pinexq-client 0.4.0.2024.717.1__py3-none-any.whl → 0.5.0.20241018.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pinexq_client/core/hco/action_hco.py +6 -2
- pinexq_client/core/hco/action_with_parameters_hco.py +6 -2
- pinexq_client/core/hco/link_hco.py +7 -2
- pinexq_client/core/hco/unavailable.py +21 -2
- pinexq_client/core/model/error.py +3 -0
- pinexq_client/core/sirenaccess.py +3 -3
- pinexq_client/job_management/__init__.py +1 -1
- pinexq_client/job_management/hcos/job_hco.py +134 -120
- pinexq_client/job_management/hcos/jobsroot_hco.py +54 -39
- pinexq_client/job_management/model/open_api_generated.py +24 -1
- pinexq_client/job_management/tool/job.py +17 -7
- pinexq_client/job_management/tool/job_group.py +148 -130
- pinexq_client/job_management/tool/processing_step.py +83 -5
- {pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.5.0.20241018.1.dist-info}/METADATA +2 -2
- {pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.5.0.20241018.1.dist-info}/RECORD +18 -17
- {pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.5.0.20241018.1.dist-info}/WHEEL +1 -1
- pinexq_client-0.5.0.20241018.1.dist-info/entry_points.txt +4 -0
- {pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.5.0.20241018.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -5,13 +5,13 @@ from httpx import URL
|
|
|
5
5
|
|
|
6
6
|
from pinexq_client.core import Entity, Action, execute_action, raise_exception_on_error, ClientException
|
|
7
7
|
from pinexq_client.core.hco.hco_base import ClientContainer
|
|
8
|
-
from pinexq_client.core.hco.unavailable import UnavailableAction
|
|
8
|
+
from pinexq_client.core.hco.unavailable import UnavailableAction, HypermediaAvailability
|
|
9
9
|
|
|
10
10
|
TEntity = TypeVar('TEntity', bound=Entity)
|
|
11
11
|
THcoEntity = TypeVar('THcoEntity', bound=Entity)
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
class ActionHco(ClientContainer):
|
|
14
|
+
class ActionHco(ClientContainer, HypermediaAvailability):
|
|
15
15
|
_client: httpx.Client
|
|
16
16
|
_action: Action
|
|
17
17
|
|
|
@@ -51,6 +51,10 @@ class ActionHco(ClientContainer):
|
|
|
51
51
|
f"Error while mapping mandatory action {name}: does not exist")
|
|
52
52
|
return result
|
|
53
53
|
|
|
54
|
+
@staticmethod
|
|
55
|
+
def is_available() -> bool:
|
|
56
|
+
return True
|
|
57
|
+
|
|
54
58
|
def _execute_internal(self) -> None | URL:
|
|
55
59
|
response = execute_action(self._client, self._action)
|
|
56
60
|
raise_exception_on_error(f"Error while executing action, unexpected response", response)
|
|
@@ -6,12 +6,12 @@ from pydantic import BaseModel
|
|
|
6
6
|
|
|
7
7
|
from pinexq_client.core import Entity, Action, execute_action, raise_exception_on_error, ClientException
|
|
8
8
|
from pinexq_client.core.hco.hco_base import ClientContainer
|
|
9
|
-
from pinexq_client.core.hco.unavailable import UnavailableAction
|
|
9
|
+
from pinexq_client.core.hco.unavailable import UnavailableAction, HypermediaAvailability
|
|
10
10
|
|
|
11
11
|
TParameters = TypeVar('TParameters', bound=BaseModel)
|
|
12
12
|
|
|
13
13
|
|
|
14
|
-
class ActionWithParametersHco(ClientContainer, Generic[TParameters]):
|
|
14
|
+
class ActionWithParametersHco(ClientContainer, Generic[TParameters], HypermediaAvailability):
|
|
15
15
|
_client: httpx.Client
|
|
16
16
|
_action: Action
|
|
17
17
|
|
|
@@ -52,6 +52,10 @@ class ActionWithParametersHco(ClientContainer, Generic[TParameters]):
|
|
|
52
52
|
f"Error while mapping mandatory action {name}: action does not exist")
|
|
53
53
|
return result
|
|
54
54
|
|
|
55
|
+
@staticmethod
|
|
56
|
+
def is_available() -> bool:
|
|
57
|
+
return True
|
|
58
|
+
|
|
55
59
|
def _execute_internal(self, parameters: BaseModel) -> None | URL:
|
|
56
60
|
if parameters is None:
|
|
57
61
|
raise ClientException(f"Error while executing action: action requires parameters")
|
|
@@ -5,10 +5,11 @@ from httpx import URL
|
|
|
5
5
|
|
|
6
6
|
from pinexq_client.core import Link, Entity, navigate, ensure_siren_response, ClientException
|
|
7
7
|
from pinexq_client.core.hco.hco_base import ClientContainer, TEntity
|
|
8
|
-
from pinexq_client.core.hco.unavailable import UnavailableLink
|
|
8
|
+
from pinexq_client.core.hco.unavailable import UnavailableLink, HypermediaAvailability
|
|
9
9
|
|
|
10
10
|
|
|
11
|
-
class LinkHco(ClientContainer):
|
|
11
|
+
class LinkHco(ClientContainer, HypermediaAvailability):
|
|
12
|
+
|
|
12
13
|
_client: httpx.Client
|
|
13
14
|
_link: Link
|
|
14
15
|
|
|
@@ -46,6 +47,10 @@ class LinkHco(ClientContainer):
|
|
|
46
47
|
|
|
47
48
|
return result
|
|
48
49
|
|
|
50
|
+
@staticmethod
|
|
51
|
+
def is_available() -> bool:
|
|
52
|
+
return True
|
|
53
|
+
|
|
49
54
|
def _navigate_internal(self, parse_type: Type[TEntity] = Entity) -> TEntity:
|
|
50
55
|
response = navigate(self._client, self._link, parse_type)
|
|
51
56
|
return ensure_siren_response(response)
|
|
@@ -1,17 +1,36 @@
|
|
|
1
|
+
from abc import ABC, ABCMeta, abstractmethod
|
|
2
|
+
|
|
1
3
|
from pinexq_client.core import NotAvailableException
|
|
2
4
|
|
|
3
5
|
|
|
4
|
-
class
|
|
6
|
+
class HypermediaAvailability(ABC):
|
|
7
|
+
def __bool__(self) -> bool:
|
|
8
|
+
return self.is_available()
|
|
9
|
+
|
|
10
|
+
@staticmethod
|
|
11
|
+
@abstractmethod
|
|
12
|
+
def is_available() -> bool:
|
|
13
|
+
...
|
|
14
|
+
|
|
15
|
+
class UnavailableAction(HypermediaAvailability):
|
|
5
16
|
"""This class is used to represent an action that is not available. It is used to avoid None
|
|
6
17
|
checks in the code."""
|
|
7
18
|
|
|
8
19
|
def execute(self, *args, **kwargs):
|
|
9
20
|
raise NotAvailableException(f"Error while executing action: action is not available")
|
|
10
21
|
|
|
22
|
+
@staticmethod
|
|
23
|
+
def is_available() -> bool:
|
|
24
|
+
return False
|
|
11
25
|
|
|
12
|
-
|
|
26
|
+
|
|
27
|
+
class UnavailableLink(HypermediaAvailability):
|
|
13
28
|
"""This class is used to represent a link that is not available. It is used to avoid None
|
|
14
29
|
checks in the code."""
|
|
15
30
|
|
|
16
31
|
def navigate(self):
|
|
17
32
|
raise NotAvailableException(f"Error while navigating: link is not available")
|
|
33
|
+
|
|
34
|
+
@staticmethod
|
|
35
|
+
def is_available() -> bool:
|
|
36
|
+
return False
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
1
3
|
from pydantic import BaseModel, Field, ConfigDict
|
|
2
4
|
|
|
3
5
|
|
|
@@ -7,6 +9,7 @@ class ProblemDetails(BaseModel):
|
|
|
7
9
|
status: int | None = None
|
|
8
10
|
detail: str | None = None
|
|
9
11
|
instance: str | None = None
|
|
12
|
+
errors: Any = None
|
|
10
13
|
|
|
11
14
|
def __str__(self):
|
|
12
15
|
message = [f" {key}: {value}" for key, value in self.model_dump().items() if value]
|
|
@@ -22,7 +22,7 @@ def get_resource(client: httpx.Client, href: str, media_type: str = MediaTypes.S
|
|
|
22
22
|
# assume get for links
|
|
23
23
|
response = client.get(href)
|
|
24
24
|
except (httpx.ConnectTimeout, httpx.ConnectError) as exc:
|
|
25
|
-
raise ClientException(f"Http-client error requesting resource: {href}") from exc
|
|
25
|
+
raise ClientException(f"Http-client error requesting resource: {href}\nMessage:{exc}\nCurrent client timeout:{client.timeout}") from exc
|
|
26
26
|
expected_type = media_type or MediaTypes.SIREN # if not specified expect siren
|
|
27
27
|
|
|
28
28
|
if response.status_code == httpx.codes.OK:
|
|
@@ -84,7 +84,7 @@ def upload_file(client: httpx.Client, action: Action, file: BinaryIO, filename:
|
|
|
84
84
|
try:
|
|
85
85
|
response = client.request(method=action.method, url=action.href, files=files)
|
|
86
86
|
except httpx.RequestError as exc:
|
|
87
|
-
raise ClientException(f"Error from httpx while uploading data to: {action.href}") from exc
|
|
87
|
+
raise ClientException(f"Error from httpx while uploading data to: {action.href}\nMessage:{exc}\nCurrent client timeout:{client.timeout}") from exc
|
|
88
88
|
return handle_action_result(response)
|
|
89
89
|
|
|
90
90
|
|
|
@@ -119,7 +119,7 @@ def execute_action(client: httpx.Client, action: Action,
|
|
|
119
119
|
headers={Headers.CONTENT_TYPE.value: MediaTypes.APPLICATION_JSON.value}
|
|
120
120
|
)
|
|
121
121
|
except httpx.RequestError as exc:
|
|
122
|
-
raise ClientException(f"Error from httpx while executing action: {action.href}") from exc
|
|
122
|
+
raise ClientException(f"Error from httpx while executing action: {action.href}\nMessage:{exc}\nCurrent client timeout:{client.timeout}") from exc
|
|
123
123
|
|
|
124
124
|
return handle_action_result(response)
|
|
125
125
|
|
|
@@ -13,171 +13,185 @@ from pinexq_client.job_management.hcos import InputDataSlotHco
|
|
|
13
13
|
from pinexq_client.job_management.hcos.output_dataslot_hco import OutputDataSlotHco
|
|
14
14
|
from pinexq_client.job_management.hcos.processing_step_hco import ProcessingStepLink
|
|
15
15
|
from pinexq_client.job_management.known_relations import Relations
|
|
16
|
+
from pinexq_client.job_management.model import SetJobErrorStateParameters
|
|
16
17
|
from pinexq_client.job_management.model.open_api_generated import JobStates, ProcessingView, RenameJobParameters, \
|
|
17
|
-
|
|
18
|
+
SelectProcessingParameters, SetJobTagsParameters
|
|
18
19
|
from pinexq_client.job_management.model.sirenentities import JobEntity, InputDataSlotEntity, OutputDataSlotEntity
|
|
19
20
|
|
|
20
21
|
|
|
21
22
|
class JobRenameAction(ActionWithParametersHco[RenameJobParameters]):
|
|
22
|
-
|
|
23
|
-
|
|
23
|
+
def execute(self, parameters: RenameJobParameters):
|
|
24
|
+
self._execute(parameters)
|
|
24
25
|
|
|
25
|
-
|
|
26
|
-
|
|
26
|
+
def default_parameters(self) -> RenameJobParameters:
|
|
27
|
+
return self._get_default_parameters(RenameJobParameters, RenameJobParameters())
|
|
27
28
|
|
|
28
29
|
|
|
29
30
|
class JobSelectProcessingAction(ActionWithParametersHco[SelectProcessingParameters]):
|
|
30
|
-
|
|
31
|
-
|
|
31
|
+
def execute(self, parameters: SelectProcessingParameters):
|
|
32
|
+
self._execute(parameters)
|
|
32
33
|
|
|
33
|
-
|
|
34
|
-
|
|
34
|
+
def default_parameters(self) -> SelectProcessingParameters:
|
|
35
|
+
return self._get_default_parameters(SelectProcessingParameters, SelectProcessingParameters())
|
|
35
36
|
|
|
36
37
|
|
|
37
38
|
class JobHideAction(ActionHco):
|
|
38
|
-
|
|
39
|
-
|
|
39
|
+
def execute(self):
|
|
40
|
+
self._execute_internal()
|
|
40
41
|
|
|
41
42
|
|
|
42
43
|
class JobUnHideAction(ActionHco):
|
|
43
|
-
|
|
44
|
-
|
|
44
|
+
def execute(self):
|
|
45
|
+
self._execute_internal()
|
|
45
46
|
|
|
46
47
|
|
|
47
48
|
class JobDeleteAction(ActionHco):
|
|
48
|
-
|
|
49
|
-
|
|
49
|
+
def execute(self):
|
|
50
|
+
self._execute_internal()
|
|
50
51
|
|
|
51
52
|
|
|
52
53
|
class JobAllowOutputDataDeletionAction(ActionHco):
|
|
53
|
-
|
|
54
|
-
|
|
54
|
+
def execute(self):
|
|
55
|
+
self._execute_internal()
|
|
55
56
|
|
|
56
57
|
|
|
57
58
|
class JobDisAllowOutputDataDeletionAction(ActionHco):
|
|
58
|
-
|
|
59
|
-
|
|
59
|
+
def execute(self):
|
|
60
|
+
self._execute_internal()
|
|
60
61
|
|
|
61
62
|
|
|
62
63
|
class GenericProcessingConfigureParameters(BaseModel):
|
|
63
|
-
|
|
64
|
-
|
|
64
|
+
"""Generic parameter model, that can be set with any dictionary"""
|
|
65
|
+
model_config = ConfigDict(extra='allow')
|
|
65
66
|
|
|
66
67
|
|
|
67
68
|
class JobConfigureProcessingAction(ActionWithParametersHco[GenericProcessingConfigureParameters]):
|
|
68
|
-
|
|
69
|
-
|
|
69
|
+
def execute(self, parameters: GenericProcessingConfigureParameters):
|
|
70
|
+
self._execute(parameters)
|
|
70
71
|
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
72
|
+
def default_parameters(self) -> GenericProcessingConfigureParameters:
|
|
73
|
+
return self._get_default_parameters(GenericProcessingConfigureParameters,
|
|
74
|
+
GenericProcessingConfigureParameters())
|
|
74
75
|
|
|
75
76
|
|
|
76
77
|
class JobStartProcessingAction(ActionHco):
|
|
77
|
-
|
|
78
|
-
|
|
78
|
+
def execute(self):
|
|
79
|
+
self._execute_internal()
|
|
79
80
|
|
|
80
81
|
|
|
81
82
|
class JobLink(LinkHco):
|
|
82
|
-
|
|
83
|
-
|
|
83
|
+
def navigate(self) -> 'JobHco':
|
|
84
|
+
return JobHco.from_entity(self._navigate_internal(JobEntity), self._client)
|
|
84
85
|
|
|
85
86
|
|
|
86
87
|
class ParentJobLink(LinkHco):
|
|
87
|
-
|
|
88
|
-
|
|
88
|
+
def navigate(self) -> 'JobHco':
|
|
89
|
+
return JobHco.from_entity(self._navigate_internal(JobEntity), self._client)
|
|
89
90
|
|
|
90
91
|
|
|
91
92
|
class JobEditTagsAction(ActionWithParametersHco[SetJobTagsParameters]):
|
|
92
|
-
|
|
93
|
-
|
|
93
|
+
def execute(self, parameters: SetJobTagsParameters):
|
|
94
|
+
self._execute(parameters)
|
|
94
95
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
96
|
+
def default_parameters(self) -> SetJobTagsParameters:
|
|
97
|
+
# todo check why we have to manually set tags
|
|
98
|
+
return self._get_default_parameters(SetJobTagsParameters, SetJobTagsParameters(tags=[]))
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class JobSetToErrorStateAction(ActionWithParametersHco[SetJobErrorStateParameters]):
|
|
102
|
+
def execute(self, parameters: SetJobErrorStateParameters):
|
|
103
|
+
self._execute(parameters)
|
|
104
|
+
|
|
105
|
+
def default_parameters(self) -> SetJobErrorStateParameters:
|
|
106
|
+
return self._get_default_parameters(SetJobErrorStateParameters, SetJobErrorStateParameters(
|
|
107
|
+
message='Manually set to error state by admin'
|
|
108
|
+
))
|
|
98
109
|
|
|
99
110
|
|
|
100
111
|
class JobHco(Hco[JobEntity]):
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
112
|
+
name: str = Property()
|
|
113
|
+
state: JobStates = Property()
|
|
114
|
+
hidden: bool = Property()
|
|
115
|
+
tags: list[str] | None = Property()
|
|
116
|
+
output_is_deletable: bool = Property()
|
|
117
|
+
created_at: datetime = Property()
|
|
118
|
+
completed_at: datetime = Property()
|
|
119
|
+
error_description: str = Property()
|
|
120
|
+
processing: ProcessingView = Property()
|
|
121
|
+
result: str = Property()
|
|
122
|
+
|
|
123
|
+
self_link: JobLink
|
|
124
|
+
parent_link: ParentJobLink | UnavailableLink
|
|
125
|
+
selected_processing_step_link: ProcessingStepLink | UnavailableLink
|
|
126
|
+
|
|
127
|
+
rename_action: JobRenameAction | UnavailableAction
|
|
128
|
+
select_processing_action: JobSelectProcessingAction | UnavailableAction
|
|
129
|
+
configure_processing_action: JobConfigureProcessingAction | UnavailableAction
|
|
130
|
+
start_processing_action: JobStartProcessingAction | UnavailableAction
|
|
131
|
+
hide_action: JobHideAction | UnavailableAction
|
|
132
|
+
unhide_action: JobUnHideAction | UnavailableAction
|
|
133
|
+
delete_action: JobDeleteAction | UnavailableAction
|
|
134
|
+
allow_output_data_deletion_action: JobAllowOutputDataDeletionAction | UnavailableAction
|
|
135
|
+
disallow_output_data_deletion_action: JobDisAllowOutputDataDeletionAction | UnavailableAction
|
|
136
|
+
edit_tags_action: JobEditTagsAction | UnavailableAction
|
|
137
|
+
set_to_error_state_action: JobSetToErrorStateAction | UnavailableAction
|
|
138
|
+
|
|
139
|
+
input_dataslots: List[InputDataSlotHco]
|
|
140
|
+
output_dataslots: List[OutputDataSlotHco]
|
|
141
|
+
|
|
142
|
+
@classmethod
|
|
143
|
+
def from_entity(cls, entity: JobEntity, client: httpx.Client) -> Self:
|
|
144
|
+
instance = cls(client, entity)
|
|
145
|
+
|
|
146
|
+
Hco.check_classes(instance._entity.class_, ["Job"])
|
|
147
|
+
|
|
148
|
+
instance.self_link = JobLink.from_entity(
|
|
149
|
+
instance._client, instance._entity, Relations.SELF)
|
|
150
|
+
instance.parent_link = ParentJobLink.from_entity_optional(
|
|
151
|
+
instance._client, instance._entity, Relations.PARENT_JOB)
|
|
152
|
+
instance.selected_processing_step_link = ProcessingStepLink.from_entity_optional(
|
|
153
|
+
instance._client, instance._entity, Relations.SELECTED_PROCESSING_STEP)
|
|
154
|
+
|
|
155
|
+
# actions
|
|
156
|
+
instance.hide_action = JobHideAction.from_entity_optional(
|
|
157
|
+
client, instance._entity, "Hide")
|
|
158
|
+
instance.unhide_action = JobUnHideAction.from_entity_optional(
|
|
159
|
+
client, instance._entity, "UnHide")
|
|
160
|
+
instance.delete_action = JobDeleteAction.from_entity_optional(
|
|
161
|
+
client, instance._entity, "Delete")
|
|
162
|
+
instance.rename_action = JobRenameAction.from_entity_optional(
|
|
163
|
+
client, instance._entity, "Rename")
|
|
164
|
+
instance.select_processing_action = JobSelectProcessingAction.from_entity_optional(
|
|
165
|
+
client, instance._entity, "SelectProcessing")
|
|
166
|
+
instance.configure_processing_action = JobConfigureProcessingAction.from_entity_optional(
|
|
167
|
+
client, instance._entity, "ConfigureProcessing")
|
|
168
|
+
instance.start_processing_action = JobStartProcessingAction.from_entity_optional(
|
|
169
|
+
client, instance._entity, "StartProcessing")
|
|
170
|
+
instance.allow_output_data_deletion_action = JobAllowOutputDataDeletionAction.from_entity_optional(
|
|
171
|
+
client, instance._entity, "AllowOutputDataDeletion")
|
|
172
|
+
instance.disallow_output_data_deletion_action = JobDisAllowOutputDataDeletionAction.from_entity_optional(
|
|
173
|
+
client, instance._entity, "DisallowOutputDataDeletion")
|
|
174
|
+
instance.set_to_error_state_action = JobSetToErrorStateAction.from_entity_optional(
|
|
175
|
+
client, instance._entity, "SetJobToErrorState")
|
|
176
|
+
instance.edit_tags_action = JobEditTagsAction.from_entity_optional(
|
|
177
|
+
client, instance._entity, "EditTags")
|
|
178
|
+
|
|
179
|
+
# entities
|
|
180
|
+
instance._extract_input_dataslots()
|
|
181
|
+
instance._extract_output_dataslots()
|
|
182
|
+
|
|
183
|
+
return instance
|
|
184
|
+
|
|
185
|
+
def _extract_input_dataslots(self):
|
|
186
|
+
self.input_dataslots = []
|
|
187
|
+
input_dataslots = self._entity.find_all_entities_with_relation(Relations.INPUT_DATASLOT, InputDataSlotEntity)
|
|
188
|
+
for input_dataslot in input_dataslots:
|
|
189
|
+
input_dataslot_hco: InputDataSlotHco = InputDataSlotHco.from_entity(input_dataslot, self._client)
|
|
190
|
+
self.input_dataslots.append(input_dataslot_hco)
|
|
191
|
+
|
|
192
|
+
def _extract_output_dataslots(self):
|
|
193
|
+
self.output_dataslots = []
|
|
194
|
+
output_dataslots = self._entity.find_all_entities_with_relation(Relations.OUTPUT_DATASLOT, OutputDataSlotEntity)
|
|
195
|
+
for output_dataslot in output_dataslots:
|
|
196
|
+
output_dataslot_hco: OutputDataSlotHco = OutputDataSlotHco.from_entity(output_dataslot, self._client)
|
|
197
|
+
self.output_dataslots.append(output_dataslot_hco)
|
|
@@ -18,65 +18,80 @@ from pinexq_client.job_management.known_relations import Relations
|
|
|
18
18
|
from pinexq_client.job_management.model.open_api_generated import (
|
|
19
19
|
CreateJobParameters,
|
|
20
20
|
JobQueryParameters,
|
|
21
|
-
CreateSubJobParameters
|
|
21
|
+
CreateSubJobParameters, SetJobsErrorStateParameters
|
|
22
22
|
)
|
|
23
23
|
from pinexq_client.job_management.model.sirenentities import JobsRootEntity
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
class CreateJobAction(ActionWithParametersHco[CreateJobParameters]):
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
27
|
+
def execute(self, parameters: CreateJobParameters) -> JobLink:
|
|
28
|
+
url: URL = self._execute_returns_url(parameters)
|
|
29
|
+
link = Link.from_url(url, [str(Relations.CREATED_RESSOURCE)], "Created job", MediaTypes.SIREN)
|
|
30
|
+
return JobLink.from_link(self._client, link)
|
|
31
31
|
|
|
32
|
-
|
|
33
|
-
|
|
32
|
+
def default_parameters(self) -> CreateJobParameters:
|
|
33
|
+
return self._get_default_parameters(CreateJobParameters, CreateJobParameters())
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
class CreateSubJobAction(ActionWithParametersHco[CreateSubJobParameters]):
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
37
|
+
def execute(self, parameters: CreateSubJobParameters) -> JobLink:
|
|
38
|
+
url = self._execute_returns_url(parameters)
|
|
39
|
+
link = Link.from_url(url, [str(Relations.CREATED_RESSOURCE)], "Created sub-job", MediaTypes.SIREN)
|
|
40
|
+
return JobLink.from_link(self._client, link)
|
|
41
41
|
|
|
42
|
-
|
|
43
|
-
|
|
42
|
+
def default_parameters(self) -> CreateSubJobParameters:
|
|
43
|
+
return self._get_default_parameters(CreateSubJobParameters, CreateSubJobParameters())
|
|
44
44
|
|
|
45
45
|
|
|
46
46
|
class JobQueryAction(ActionWithParametersHco):
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
47
|
+
def execute(self, parameters: JobQueryParameters) -> JobQueryResultHco:
|
|
48
|
+
url = self._execute_returns_url(parameters)
|
|
49
|
+
link = Link.from_url(url, [str(Relations.CREATED_RESSOURCE)], "Created job query", MediaTypes.SIREN)
|
|
50
|
+
# resolve link immediately
|
|
51
|
+
return JobQueryResultLink.from_link(self._client, link).navigate()
|
|
52
52
|
|
|
53
|
-
|
|
54
|
-
|
|
53
|
+
def default_parameters(self) -> JobQueryParameters:
|
|
54
|
+
return self._get_default_parameters(JobQueryParameters, JobQueryParameters())
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class JobSetToErrorStateAction(ActionWithParametersHco[SetJobsErrorStateParameters]):
|
|
58
|
+
def execute(self, parameters: SetJobsErrorStateParameters):
|
|
59
|
+
self._execute(parameters)
|
|
60
|
+
|
|
61
|
+
def default_parameters(self) -> SetJobsErrorStateParameters:
|
|
62
|
+
return self._get_default_parameters(SetJobsErrorStateParameters, SetJobsErrorStateParameters(
|
|
63
|
+
message='Manually set to error state by admin',
|
|
64
|
+
created_this_many_hours_ago=0,
|
|
65
|
+
))
|
|
55
66
|
|
|
56
67
|
|
|
57
68
|
class JobsRootHco(Hco[JobsRootEntity]):
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
69
|
+
create_job_action: CreateJobAction | UnavailableAction
|
|
70
|
+
job_query_action: JobQueryAction | UnavailableAction
|
|
71
|
+
create_subjob_action: CreateSubJobAction | UnavailableAction
|
|
72
|
+
used_tags_link: JobUsedTagsLink | UnavailableLink
|
|
73
|
+
set_jobs_to_error_state: JobSetToErrorStateAction | UnavailableAction
|
|
74
|
+
|
|
75
|
+
self_link: 'JobsRootLink'
|
|
62
76
|
|
|
63
|
-
|
|
77
|
+
@classmethod
|
|
78
|
+
def from_entity(cls, entity: JobsRootEntity, client: httpx.Client) -> Self:
|
|
79
|
+
instance = cls(client, entity)
|
|
64
80
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
81
|
+
Hco.check_classes(instance._entity.class_, ["JobsRoot"])
|
|
82
|
+
instance.create_job_action = CreateJobAction.from_entity_optional(client, instance._entity, "CreateJob")
|
|
83
|
+
instance.create_subjob_action = CreateSubJobAction.from_entity_optional(client, instance._entity,
|
|
84
|
+
"CreateSubJob")
|
|
85
|
+
instance.job_query_action = JobQueryAction.from_entity_optional(client, instance._entity, "CreateJobQuery")
|
|
86
|
+
instance.used_tags_link = JobUsedTagsLink.from_entity_optional(
|
|
87
|
+
instance._client, instance._entity, Relations.USED_TAGS)
|
|
88
|
+
instance.self_link = JobsRootLink.from_entity(instance._client, instance._entity, Relations.SELF)
|
|
89
|
+
instance.set_jobs_to_error_state = JobSetToErrorStateAction.from_entity_optional(instance._client, instance._entity,
|
|
90
|
+
"SetJobsToErrorState")
|
|
68
91
|
|
|
69
|
-
|
|
70
|
-
instance.create_job_action = CreateJobAction.from_entity_optional(client, instance._entity, "CreateJob")
|
|
71
|
-
instance.create_subjob_action = CreateSubJobAction.from_entity_optional(client, instance._entity,
|
|
72
|
-
"CreateSubJob")
|
|
73
|
-
instance.job_query_action = JobQueryAction.from_entity_optional(client, instance._entity, "CreateJobQuery")
|
|
74
|
-
instance.used_tags_link = JobUsedTagsLink.from_entity_optional(
|
|
75
|
-
instance._client, instance._entity, Relations.USED_TAGS)
|
|
76
|
-
instance.self_link = JobsRootLink.from_entity(instance._client, instance._entity, Relations.SELF)
|
|
77
|
-
return instance
|
|
92
|
+
return instance
|
|
78
93
|
|
|
79
94
|
|
|
80
95
|
class JobsRootLink(LinkHco):
|
|
81
|
-
|
|
82
|
-
|
|
96
|
+
def navigate(self) -> JobsRootHco:
|
|
97
|
+
return JobsRootHco.from_entity(self._navigate_internal(JobsRootEntity), self._client)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# generated by datamodel-codegen:
|
|
2
2
|
# filename: openapi.json
|
|
3
|
-
# timestamp: 2024-
|
|
3
|
+
# timestamp: 2024-10-18T08:23:13+00:00
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
@@ -58,6 +58,7 @@ class CreateProcessingStepParameters(BaseModel):
|
|
|
58
58
|
)
|
|
59
59
|
title: constr(min_length=1) = Field(..., alias='Title')
|
|
60
60
|
function_name: constr(min_length=1) = Field(..., alias='FunctionName')
|
|
61
|
+
version: constr(min_length=1) = Field(..., alias='Version')
|
|
61
62
|
|
|
62
63
|
|
|
63
64
|
class CreateSubJobParameters(BaseModel):
|
|
@@ -355,6 +356,14 @@ class SetCommentWorkDataParameters(BaseModel):
|
|
|
355
356
|
comment: str | None = Field(None, alias='Comment')
|
|
356
357
|
|
|
357
358
|
|
|
359
|
+
class SetJobErrorStateParameters(BaseModel):
|
|
360
|
+
model_config = ConfigDict(
|
|
361
|
+
extra='allow',
|
|
362
|
+
populate_by_name=True,
|
|
363
|
+
)
|
|
364
|
+
message: constr(min_length=1) = Field(..., alias='Message')
|
|
365
|
+
|
|
366
|
+
|
|
358
367
|
class SetJobTagsParameters(BaseModel):
|
|
359
368
|
model_config = ConfigDict(
|
|
360
369
|
extra='allow',
|
|
@@ -363,6 +372,15 @@ class SetJobTagsParameters(BaseModel):
|
|
|
363
372
|
tags: List[str] = Field(..., alias='Tags')
|
|
364
373
|
|
|
365
374
|
|
|
375
|
+
class SetJobsErrorStateParameters(BaseModel):
|
|
376
|
+
model_config = ConfigDict(
|
|
377
|
+
extra='allow',
|
|
378
|
+
populate_by_name=True,
|
|
379
|
+
)
|
|
380
|
+
message: constr(min_length=1) = Field(..., alias='Message')
|
|
381
|
+
created_before: AwareDatetime = Field(..., alias='CreatedBefore')
|
|
382
|
+
|
|
383
|
+
|
|
366
384
|
class SetNameWorkDataParameters(BaseModel):
|
|
367
385
|
model_config = ConfigDict(
|
|
368
386
|
extra='allow',
|
|
@@ -485,6 +503,8 @@ class AdminJobFilterParameter(BaseModel):
|
|
|
485
503
|
state: JobStates | None = Field(None, alias='State')
|
|
486
504
|
show_hidden: bool | None = Field(None, alias='ShowHidden')
|
|
487
505
|
parent_job_url: str | None = Field(None, alias='ParentJobUrl')
|
|
506
|
+
created_before: AwareDatetime | None = Field(None, alias='CreatedBefore')
|
|
507
|
+
created_after: AwareDatetime | None = Field(None, alias='CreatedAfter')
|
|
488
508
|
tags_by_and: List[str] | None = Field(None, alias='TagsByAnd')
|
|
489
509
|
tags_by_or: List[str] | None = Field(None, alias='TagsByOr')
|
|
490
510
|
user_url: str | None = Field(None, alias='UserUrl')
|
|
@@ -610,6 +630,8 @@ class JobFilterParameter(BaseModel):
|
|
|
610
630
|
name: str | None = Field(None, alias='Name')
|
|
611
631
|
show_hidden: bool | None = Field(None, alias='ShowHidden')
|
|
612
632
|
processing_step_url: str | None = Field(None, alias='ProcessingStepUrl')
|
|
633
|
+
created_before: AwareDatetime | None = Field(None, alias='CreatedBefore')
|
|
634
|
+
created_after: AwareDatetime | None = Field(None, alias='CreatedAfter')
|
|
613
635
|
tags_by_and: List[str] | None = Field(None, alias='TagsByAnd')
|
|
614
636
|
tags_by_or: List[str] | None = Field(None, alias='TagsByOr')
|
|
615
637
|
parent_job_url: str | None = Field(None, alias='ParentJobUrl')
|
|
@@ -785,6 +807,7 @@ class WorkDataHtoOpenApiProperties(BaseModel):
|
|
|
785
807
|
media_type: str | None = Field(None, alias='MediaType')
|
|
786
808
|
kind: WorkDataKind | None = Field(None, alias='Kind')
|
|
787
809
|
is_deletable: bool | None = Field(None, alias='IsDeletable')
|
|
810
|
+
is_used_as_job_input: bool | None = Field(None, alias='IsUsedAsJobInput')
|
|
788
811
|
hidden: bool | None = Field(None, alias='Hidden')
|
|
789
812
|
comments: str | None = Field(None, alias='Comments')
|
|
790
813
|
|
|
@@ -40,6 +40,7 @@ from pinexq_client.job_management.model import (
|
|
|
40
40
|
from pinexq_client.job_management.tool.processing_step import ProcessingStep
|
|
41
41
|
from pinexq_client.job_management.tool.workdata import WorkData
|
|
42
42
|
|
|
43
|
+
|
|
43
44
|
class Job:
|
|
44
45
|
"""Convenience wrapper for handling JobHcos in the JobManagement-Api.
|
|
45
46
|
|
|
@@ -197,15 +198,15 @@ class Job:
|
|
|
197
198
|
|
|
198
199
|
if processing_step_link is not None:
|
|
199
200
|
if not isinstance(processing_step_link, ProcessingStepLink):
|
|
200
|
-
raise
|
|
201
|
+
raise TypeError('Instance passed to "processing_step_link" is not of type "ProcessingStepLink"')
|
|
201
202
|
processing_url = processing_step_link.get_url()
|
|
202
203
|
elif processing_step_instance is not None:
|
|
203
204
|
if not isinstance(processing_step_instance, ProcessingStep):
|
|
204
|
-
raise
|
|
205
|
+
raise TypeError('Instance passed to "processing_step_instance" is not of type "ProcessingStep"')
|
|
205
206
|
processing_url = processing_step_instance.self_link().get_url()
|
|
206
207
|
else:
|
|
207
208
|
if not isinstance(function_name, str):
|
|
208
|
-
raise
|
|
209
|
+
raise TypeError('Instance passed to "function_name" is not of type "str"')
|
|
209
210
|
# ToDo: provide more parameters to query a processing step
|
|
210
211
|
query_param = ProcessingStepQueryParameters(
|
|
211
212
|
filter=ProcessingStepFilterParameter(
|
|
@@ -215,7 +216,7 @@ class Job:
|
|
|
215
216
|
)
|
|
216
217
|
query_result = self._processing_step_root.query_action.execute(query_param)
|
|
217
218
|
if len(query_result.processing_steps) != 1:
|
|
218
|
-
raise
|
|
219
|
+
raise NameError(f"No processing step with the name '{function_name}' registered!")
|
|
219
220
|
# Todo: For now we choose the first and only result. Make this more flexible?
|
|
220
221
|
processing_url = query_result.processing_steps[0].self_link.get_url()
|
|
221
222
|
|
|
@@ -292,15 +293,15 @@ class Job:
|
|
|
292
293
|
polling_interval_ms=polling_interval_ms
|
|
293
294
|
)
|
|
294
295
|
except TimeoutError as timeout:
|
|
295
|
-
raise
|
|
296
|
+
raise TimeoutError(
|
|
296
297
|
f"Job did not reach state: '{state.value}' "
|
|
297
298
|
f"current state: '{self.get_state().value}'. Error:{str(timeout)}"
|
|
298
299
|
)
|
|
299
300
|
except PollingException:
|
|
300
301
|
if self._job.state == JobStates.error:
|
|
301
302
|
error_reason = self._job.error_description
|
|
302
|
-
raise
|
|
303
|
-
raise
|
|
303
|
+
raise PollingException(f"Job failed'. Error:{error_reason}")
|
|
304
|
+
raise PollingException("Job failed")
|
|
304
305
|
|
|
305
306
|
return self
|
|
306
307
|
|
|
@@ -615,3 +616,12 @@ class Job:
|
|
|
615
616
|
|
|
616
617
|
def self_link(self) -> JobLink:
|
|
617
618
|
return self._job.self_link
|
|
619
|
+
|
|
620
|
+
def set_to_error_state(self) -> Self:
|
|
621
|
+
"""Set this job to error state.
|
|
622
|
+
|
|
623
|
+
Returns:
|
|
624
|
+
This `Job` object
|
|
625
|
+
"""
|
|
626
|
+
self._job.set_to_error_state_action.execute()
|
|
627
|
+
return self
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import time
|
|
1
2
|
from typing import Union, Self
|
|
2
3
|
|
|
3
4
|
import httpx
|
|
@@ -8,133 +9,150 @@ from pinexq_client.job_management.model import JobStates
|
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
class JobGroup:
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
12
|
+
"""
|
|
13
|
+
A wrapper class for a group of jobs for easier execution and waiting
|
|
14
|
+
|
|
15
|
+
Attributes:
|
|
16
|
+
_client:
|
|
17
|
+
The http client
|
|
18
|
+
_jobs:
|
|
19
|
+
List of jobs in the group
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
_client: httpx.Client
|
|
23
|
+
|
|
24
|
+
def __init__(self, client: httpx.Client):
|
|
25
|
+
self._jobs: list[Job] = []
|
|
26
|
+
self._client = client
|
|
27
|
+
|
|
28
|
+
@classmethod
|
|
29
|
+
def from_query_result(cls, client: httpx.Client, job_query_result: JobQueryResultHco) -> Self:
|
|
30
|
+
"""
|
|
31
|
+
Initializes a `JobGroup` object from a JobQueryResultHco object
|
|
32
|
+
Args:
|
|
33
|
+
client: The http client
|
|
34
|
+
job_query_result: The JobQueryResultHco object whose jobs are to be added to the JobGroup
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
The newly created `JobGroup` instance
|
|
38
|
+
"""
|
|
39
|
+
instance = cls(client)
|
|
40
|
+
for job in job_query_result.iter_flat():
|
|
41
|
+
instance.add_jobs(Job.from_hco(instance._client, job))
|
|
42
|
+
return instance
|
|
43
|
+
|
|
44
|
+
def add_jobs(self, jobs: Union[Job, list[Job]]) -> Self:
|
|
45
|
+
"""
|
|
46
|
+
Add a job or multiple jobs to the group
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
jobs: A job or a list of job objects to be added to the JobGroup
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
This `JobGroup` object
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
if isinstance(jobs, list):
|
|
56
|
+
self._jobs.extend(jobs)
|
|
57
|
+
else:
|
|
58
|
+
self._jobs.append(jobs)
|
|
59
|
+
return self
|
|
60
|
+
|
|
61
|
+
def start_all(self) -> Self:
|
|
62
|
+
"""
|
|
63
|
+
Start all jobs
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
This `JobGroup` object
|
|
67
|
+
"""
|
|
68
|
+
for job in self._jobs:
|
|
69
|
+
job.start()
|
|
70
|
+
return self
|
|
71
|
+
|
|
72
|
+
def wait_all(self, *, job_timeout_ms: int = 5000, total_timeout_ms: int | None = None) -> Self:
|
|
73
|
+
"""
|
|
74
|
+
Wait for all jobs to complete or error state.
|
|
75
|
+
If the overall timeout elapses and some jobs are not complete, then exception.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
job_timeout_ms:
|
|
79
|
+
Individual job timeout in milliseconds. Default is 5000 ms.
|
|
80
|
+
total_timeout_ms:
|
|
81
|
+
Timeout for the whole operation in milliseconds. Default is no timeout.
|
|
82
|
+
Returns:
|
|
83
|
+
This `JobGroup` object
|
|
84
|
+
"""
|
|
85
|
+
start_time = time.time()
|
|
86
|
+
for job in self._jobs:
|
|
87
|
+
if total_timeout_ms is not None:
|
|
88
|
+
elapsed_time_ms = (time.time() - start_time) * 1000
|
|
89
|
+
if total_timeout_ms - elapsed_time_ms <= 0:
|
|
90
|
+
raise Exception("Total timeout exceeded while waiting for jobs.")
|
|
91
|
+
|
|
92
|
+
try:
|
|
93
|
+
job.wait_for_state(JobStates.completed, timeout_ms=job_timeout_ms)
|
|
94
|
+
except Exception:
|
|
95
|
+
pass
|
|
96
|
+
return self
|
|
97
|
+
|
|
98
|
+
def all_jobs_completed_ok(self) -> bool:
|
|
99
|
+
for job in self._jobs:
|
|
100
|
+
state = job.get_state()
|
|
101
|
+
if state is not JobStates.completed:
|
|
102
|
+
return False
|
|
103
|
+
return True
|
|
104
|
+
|
|
105
|
+
def incomplete_jobs(self) -> list[Job]:
|
|
106
|
+
"""
|
|
107
|
+
Returns the incomplete jobs
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Count of incomplete jobs
|
|
111
|
+
"""
|
|
112
|
+
incomplete_jobs = []
|
|
113
|
+
for job in self._jobs:
|
|
114
|
+
state = job.get_state()
|
|
115
|
+
if state in (JobStates.processing, JobStates.pending):
|
|
116
|
+
incomplete_jobs.append(job)
|
|
117
|
+
return incomplete_jobs
|
|
118
|
+
|
|
119
|
+
def jobs_with_error(self) -> list[Job]:
|
|
120
|
+
"""
|
|
121
|
+
Returns the list of jobs that produced errors
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
List of jobs that produced errors
|
|
125
|
+
"""
|
|
126
|
+
return [job for job in self._jobs if job.get_state() == JobStates.error]
|
|
127
|
+
|
|
128
|
+
def remove(self, job_name: str) -> Self:
|
|
129
|
+
"""
|
|
130
|
+
Removes all jobs from the group whose name matches the provided name
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
job_name:
|
|
134
|
+
The name of the job(s) to be removed
|
|
135
|
+
Returns:
|
|
136
|
+
This `JobGroup` object
|
|
137
|
+
"""
|
|
138
|
+
self._jobs = [job for job in self._jobs if job.get_name() != job_name]
|
|
139
|
+
return self
|
|
140
|
+
|
|
141
|
+
def clear(self) -> Self:
|
|
142
|
+
"""
|
|
143
|
+
Removes all jobs from the group
|
|
144
|
+
|
|
145
|
+
Returns:
|
|
146
|
+
This `JobGroup` object
|
|
147
|
+
"""
|
|
148
|
+
self._jobs = []
|
|
149
|
+
return self
|
|
150
|
+
|
|
151
|
+
def get_jobs(self) -> list[Job]:
|
|
152
|
+
"""
|
|
153
|
+
Returns the list of jobs in the group
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
List of jobs in the group
|
|
157
|
+
"""
|
|
158
|
+
return self._jobs
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Any, Self
|
|
1
|
+
from typing import Any, Self, List, Optional
|
|
2
2
|
|
|
3
3
|
import httpx
|
|
4
4
|
from httpx import URL
|
|
@@ -6,7 +6,7 @@ from httpx import URL
|
|
|
6
6
|
from pinexq_client.core import Link, MediaTypes
|
|
7
7
|
from pinexq_client.core.hco.upload_action_hco import UploadParameters
|
|
8
8
|
from pinexq_client.job_management.enterjma import enter_jma
|
|
9
|
-
from pinexq_client.job_management.hcos import ProcessingStepHco, ProcessingStepLink
|
|
9
|
+
from pinexq_client.job_management.hcos import ProcessingStepHco, ProcessingStepLink, ProcessingStepQueryResultHco
|
|
10
10
|
from pinexq_client.job_management.hcos.entrypoint_hco import EntryPointHco
|
|
11
11
|
from pinexq_client.job_management.hcos.job_hco import GenericProcessingConfigureParameters
|
|
12
12
|
from pinexq_client.job_management.hcos.processingsteproot_hco import ProcessingStepsRootHco
|
|
@@ -14,7 +14,8 @@ from pinexq_client.job_management.known_relations import Relations
|
|
|
14
14
|
from pinexq_client.job_management.model import (
|
|
15
15
|
CreateProcessingStepParameters,
|
|
16
16
|
EditProcessingStepParameters,
|
|
17
|
-
SetProcessingStepTagsParameters,
|
|
17
|
+
SetProcessingStepTagsParameters, ProcessingStepQueryParameters, ProcessingStepFilterParameter,
|
|
18
|
+
FunctionNameMatchTypes,
|
|
18
19
|
)
|
|
19
20
|
|
|
20
21
|
|
|
@@ -37,20 +38,22 @@ class ProcessingStep:
|
|
|
37
38
|
self._entrypoint = enter_jma(client)
|
|
38
39
|
self._processing_steps_root = self._entrypoint.processing_step_root_link.navigate()
|
|
39
40
|
|
|
40
|
-
def create(self, title: str, function_name: str) -> Self:
|
|
41
|
+
def create(self, title: str, function_name: str, version: str = "0") -> Self:
|
|
41
42
|
"""
|
|
42
43
|
Creates a new ProcessingStep by name.
|
|
43
44
|
|
|
44
45
|
Args:
|
|
45
46
|
title: Title of the ProcessingStep to be created
|
|
46
47
|
function_name: Function name of the ProcessingStep to be created
|
|
48
|
+
version: Version of the ProcessingStep to be created
|
|
47
49
|
|
|
48
50
|
Returns:
|
|
49
51
|
The newly created ProcessingStep as `ProcessingStep` object
|
|
50
52
|
"""
|
|
51
53
|
processing_step_hco = self._processing_steps_root.register_new_action.execute(CreateProcessingStepParameters(
|
|
52
54
|
title=title,
|
|
53
|
-
function_name=function_name
|
|
55
|
+
function_name=function_name,
|
|
56
|
+
version=version
|
|
54
57
|
))
|
|
55
58
|
self._processing_step = processing_step_hco
|
|
56
59
|
return self
|
|
@@ -94,6 +97,81 @@ class ProcessingStep:
|
|
|
94
97
|
processing_step_instance._get_by_link(ProcessingStepLink.from_link(client, link))
|
|
95
98
|
return processing_step_instance
|
|
96
99
|
|
|
100
|
+
@classmethod
|
|
101
|
+
def from_name(cls, client: httpx.Client, step_name: str, version: str = "0") -> Self:
|
|
102
|
+
"""Create a ProcessingStep object from an existing name.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
client: Create a ProcessingStep object from an existing name.
|
|
106
|
+
step_name: Name of the registered processing step.
|
|
107
|
+
version: Version of the ProcessingStep to be created
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
The newly created processing step as `ProcessingStep` object
|
|
111
|
+
"""
|
|
112
|
+
|
|
113
|
+
# Attempt to find the processing step
|
|
114
|
+
query_result = cls._query_processing_steps(client, step_name, version)
|
|
115
|
+
|
|
116
|
+
# Check if exactly one result is found
|
|
117
|
+
if len(query_result.processing_steps) != 1:
|
|
118
|
+
# Attempt to suggest alternative steps if exact match not found
|
|
119
|
+
suggested_steps = cls._processing_steps_by_name(client, step_name)
|
|
120
|
+
raise NameError(
|
|
121
|
+
f"No processing step with the name {step_name} and version {version} registered. "
|
|
122
|
+
f"Suggestions: {suggested_steps}"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
# Todo: For now we choose the first and only result. Make this more flexible?
|
|
126
|
+
processing_step_hco = query_result.processing_steps[0]
|
|
127
|
+
return ProcessingStep.from_hco(client, processing_step_hco)
|
|
128
|
+
|
|
129
|
+
@staticmethod
|
|
130
|
+
def _query_processing_steps(client: httpx.Client, step_name: str, version: Optional[str] = None) -> ProcessingStepQueryResultHco:
|
|
131
|
+
"""
|
|
132
|
+
Helper function to query processing steps based on name and optional version.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
client: HTTP client for executing queries.
|
|
136
|
+
step_name: Name of the processing step.
|
|
137
|
+
version: Optional version to match.
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
Query result object containing the matching processing steps.
|
|
141
|
+
"""
|
|
142
|
+
query_param = ProcessingStepQueryParameters(
|
|
143
|
+
filter=ProcessingStepFilterParameter(
|
|
144
|
+
function_name=step_name,
|
|
145
|
+
function_name_match_type=FunctionNameMatchTypes.match_exact,
|
|
146
|
+
version=version
|
|
147
|
+
)
|
|
148
|
+
)
|
|
149
|
+
instance = ProcessingStep(client)
|
|
150
|
+
return instance._processing_steps_root.query_action.execute(query_param)
|
|
151
|
+
|
|
152
|
+
@staticmethod
|
|
153
|
+
def _processing_steps_by_name(client: httpx.Client, step_name: str) -> list:
|
|
154
|
+
"""
|
|
155
|
+
Suggest processing steps if the exact step is not found.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
client: HTTP client for executing queries.
|
|
159
|
+
step_name: Name of the processing step.
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
A list of alternative processing steps matching the step name.
|
|
163
|
+
"""
|
|
164
|
+
# Query for steps without version to get suggestions
|
|
165
|
+
instance = ProcessingStep(client)
|
|
166
|
+
query_result = instance._query_processing_steps(client, step_name)
|
|
167
|
+
|
|
168
|
+
# If no suggestions are found, raise an error
|
|
169
|
+
if len(query_result.processing_steps) == 0:
|
|
170
|
+
raise NameError(f"No processing steps found with the name '{step_name}'.")
|
|
171
|
+
|
|
172
|
+
# Return list of alternative steps as suggestions
|
|
173
|
+
return [f"{step.function_name}:{step.version}" for step in query_result.processing_steps]
|
|
174
|
+
|
|
97
175
|
def refresh(self) -> Self:
|
|
98
176
|
"""Updates the processing step from the server
|
|
99
177
|
|
{pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.5.0.20241018.1.dist-info}/METADATA
RENAMED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pinexq-client
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.5.0.20241018.1
|
|
4
4
|
Summary: A hypermedia-based client for the DataCybernetics PinexQ platform.
|
|
5
|
-
Author-Email: =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Mathias Reichardt <reichardt@data-cybernetics.com>, Pratik Poudel <poudel@data-cybernetics.com>
|
|
5
|
+
Author-Email: =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Mathias Reichardt <reichardt@data-cybernetics.com>, Jasim Ahmed <ahmed@data-cybernetics.com>, Pratik Poudel <poudel@data-cybernetics.com>
|
|
6
6
|
Maintainer-Email: Mathias Reichardt <reichardt@data-cybernetics.com>, =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Carsten Blank <blank@data-cybernetics.com>
|
|
7
7
|
License: MIT
|
|
8
8
|
Requires-Python: >=3.11
|
{pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.5.0.20241018.1.dist-info}/RECORD
RENAMED
|
@@ -1,35 +1,36 @@
|
|
|
1
|
-
pinexq_client-0.
|
|
2
|
-
pinexq_client-0.
|
|
3
|
-
pinexq_client-0.
|
|
1
|
+
pinexq_client-0.5.0.20241018.1.dist-info/METADATA,sha256=iPZaVcmKxfXwSxywFo5dj9iyfOnBo_igmxoWRHo-PJU,3278
|
|
2
|
+
pinexq_client-0.5.0.20241018.1.dist-info/WHEEL,sha256=pM0IBB6ZwH3nkEPhtcp50KvKNX-07jYtnb1g1m6Z4Co,90
|
|
3
|
+
pinexq_client-0.5.0.20241018.1.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
|
4
|
+
pinexq_client-0.5.0.20241018.1.dist-info/licenses/LICENSE,sha256=3oz3tAhM7kOgRukkRe7wmh5T_HihZY77ZtJDJm91ZN8,1072
|
|
4
5
|
pinexq_client/core/__init__.py,sha256=8SVD_PRgJtpUCOtVjdR6fRrv6KPNk7HD6UQrn0FKR04,235
|
|
5
6
|
pinexq_client/core/base_relations.py,sha256=oIUS58pkbMDdqm-3YOdsenhL1smtzeAk4fp7-U595MY,162
|
|
6
7
|
pinexq_client/core/enterapi.py,sha256=sL9TmF1L5LaDJnre1l_tiHDUo9vTbZ8cvPSov3Q1UTs,671
|
|
7
8
|
pinexq_client/core/exceptions.py,sha256=NqB3y1ufjOfG3kv7Rz4ppXqJRAugt2zlgxkto2nIVQU,2228
|
|
8
9
|
pinexq_client/core/hco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
-
pinexq_client/core/hco/action_hco.py,sha256=
|
|
10
|
-
pinexq_client/core/hco/action_with_parameters_hco.py,sha256=
|
|
10
|
+
pinexq_client/core/hco/action_hco.py,sha256=6bWea-SkFjn52m5sYywkZeNOe74Nxmsvj338jSB6RZo,2338
|
|
11
|
+
pinexq_client/core/hco/action_with_parameters_hco.py,sha256=qvtoqTfDHddxB0ALanWXNDobSpazCmK2_ulTuvax2A4,3280
|
|
11
12
|
pinexq_client/core/hco/download_link_hco.py,sha256=_ijLtRfzy0qKG_QXpsWBZ8FJfp60o5Lw7QVv4_EpgPY,1335
|
|
12
13
|
pinexq_client/core/hco/hco_base.py,sha256=NglQuS6p1EimgtIx55G8ngBDGXc4lzyq08jsXzztE1I,3367
|
|
13
|
-
pinexq_client/core/hco/link_hco.py,sha256=
|
|
14
|
-
pinexq_client/core/hco/unavailable.py,sha256=
|
|
14
|
+
pinexq_client/core/hco/link_hco.py,sha256=LamrK6XyD6Gf_pl1tnNO_aIphPqcklPMjrxx8jx9HVw,2183
|
|
15
|
+
pinexq_client/core/hco/unavailable.py,sha256=CBJN_YnQRPPTwYr-3MIXQIgYXIDLw9ozIM2TNo0uewY,942
|
|
15
16
|
pinexq_client/core/hco/upload_action_hco.py,sha256=oOGw0Z_nx67CbqJ-QkQJbffdXajBgKmBAfSfFtyq8wk,4690
|
|
16
17
|
pinexq_client/core/http_headers.py,sha256=tY4h3DKVDMkjw9DLFn57HdEnwR1NAixMF8Yjgy3gjQY,183
|
|
17
18
|
pinexq_client/core/media_types.py,sha256=qU-Jd92H5HKczWDk51EstFHadO6A_QJL4lLXHRlfgjQ,600
|
|
18
19
|
pinexq_client/core/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
19
|
-
pinexq_client/core/model/error.py,sha256=
|
|
20
|
+
pinexq_client/core/model/error.py,sha256=MZHma9zCxcTCUp21KZeD22EXK0xhcrKWDQVTEk7WAn4,437
|
|
20
21
|
pinexq_client/core/model/sirenmodels.py,sha256=vGRQlhM2cSa2caxQel91Jr48KWqM-vMYX32iaQCzIds,5547
|
|
21
22
|
pinexq_client/core/polling.py,sha256=Z6VXn-PCGk4XX-4tQWZG59qJyCIS0eIrpPUORQDIGrs,1077
|
|
22
|
-
pinexq_client/core/sirenaccess.py,sha256=
|
|
23
|
-
pinexq_client/job_management/__init__.py,sha256=
|
|
23
|
+
pinexq_client/core/sirenaccess.py,sha256=moZQz2ujFsl8_WP_jfl8eM6lOzTHD6CK0dXBD8Wrg2g,6939
|
|
24
|
+
pinexq_client/job_management/__init__.py,sha256=kvOmgq1qw5ksPm9tbEg1p1XiiZRrwsdKBEgDR_qsk08,164
|
|
24
25
|
pinexq_client/job_management/enterjma.py,sha256=Ivl_jVPw_gaLyU5nKbywM-bbVGpn0MoCrZ0DMbJYf3s,1411
|
|
25
26
|
pinexq_client/job_management/hcos/__init__.py,sha256=vMIdxGHBsVcKYKrVkCzD4a_VaheKSNxCimospFn5N88,396
|
|
26
27
|
pinexq_client/job_management/hcos/entrypoint_hco.py,sha256=qodjAwO_MtawUuhmaYjhGXHV-uW1k94V9gKRYZRkhn4,2234
|
|
27
28
|
pinexq_client/job_management/hcos/info_hco.py,sha256=zWjR63SCEO_sUcZ9ha_aIoC_qUzAWLf50Xu4AHddAn8,1328
|
|
28
29
|
pinexq_client/job_management/hcos/input_dataslot_hco.py,sha256=SDflhyW8kjpcTUfKAXnJxNR-etPzAHfoTqlYUcJZrxs,3442
|
|
29
|
-
pinexq_client/job_management/hcos/job_hco.py,sha256=
|
|
30
|
+
pinexq_client/job_management/hcos/job_hco.py,sha256=CwF0eD07pd88iZPQUb_10iDtotiemy48BJ5exYN5gRc,8649
|
|
30
31
|
pinexq_client/job_management/hcos/job_query_result_hco.py,sha256=I0G8YIlYDhTahLz8n06L8BywlcsMGNWUEsmEr4Sk0GU,3315
|
|
31
32
|
pinexq_client/job_management/hcos/job_used_tags_hco.py,sha256=nys6E97NNXATdnvX6KZ46JR9qEb2lnqol9ZvJVEiNpQ,944
|
|
32
|
-
pinexq_client/job_management/hcos/jobsroot_hco.py,sha256=
|
|
33
|
+
pinexq_client/job_management/hcos/jobsroot_hco.py,sha256=_mHEw_gVvWDZ_LcD7T6zLajl-TrmS1x4ULrBk6AmknY,4637
|
|
33
34
|
pinexq_client/job_management/hcos/output_dataslot_hco.py,sha256=zxpo-fI9eHcp_pMKcf2l-gRoPHX1RzQO53auHMRB_T8,1549
|
|
34
35
|
pinexq_client/job_management/hcos/processing_step_hco.py,sha256=MykmFssysIc8fIUoPyNOLEGLGQNoC4zFgjIAIjf0v7U,5190
|
|
35
36
|
pinexq_client/job_management/hcos/processing_step_used_tags_hco.py,sha256=90-2IWlYTcYX62NzmAPnmcUCwMDhmMZyBrNs_G3yigs,1067
|
|
@@ -42,11 +43,11 @@ pinexq_client/job_management/hcos/workdata_used_tags_query_result_hco.py,sha256=
|
|
|
42
43
|
pinexq_client/job_management/hcos/workdataroot_hco.py,sha256=LdEPW2JJTqAWi-6zj-40lfREhthcDL6nPXQk_nfMtCA,3936
|
|
43
44
|
pinexq_client/job_management/known_relations.py,sha256=UlOF-sua8SyOPNNKzT_j6JVG8T-aewHIzn7S2ajXBhI,593
|
|
44
45
|
pinexq_client/job_management/model/__init__.py,sha256=ApHhNfjx4bPuz10sQnyBA2zajYbU7loDTZSKC5H_jBY,34
|
|
45
|
-
pinexq_client/job_management/model/open_api_generated.py,sha256
|
|
46
|
+
pinexq_client/job_management/model/open_api_generated.py,sha256=-7TWWzqSeaNld3w_a5sSQWAGZvqY6Oq5c-b_RkL9RrE,31137
|
|
46
47
|
pinexq_client/job_management/model/sirenentities.py,sha256=OInvxHpC6mnnYQjOMM2xAw7uLtvWwj9E2EQSRJe2jDo,3202
|
|
47
48
|
pinexq_client/job_management/tool/__init__.py,sha256=58CRDcP8ifSx9eA2uyTLEg0_fX3FUuNUogY_lirx9AY,96
|
|
48
|
-
pinexq_client/job_management/tool/job.py,sha256=
|
|
49
|
-
pinexq_client/job_management/tool/job_group.py,sha256=
|
|
50
|
-
pinexq_client/job_management/tool/processing_step.py,sha256=
|
|
49
|
+
pinexq_client/job_management/tool/job.py,sha256=aUa03aZ8rYPJA1SPAbCmdYS65G1s2xIPC9kEhzEnw08,22427
|
|
50
|
+
pinexq_client/job_management/tool/job_group.py,sha256=4zW_SEbQMLVNqu_zLUP57epkypBbRc5gxjPLp_FZgZo,4515
|
|
51
|
+
pinexq_client/job_management/tool/processing_step.py,sha256=0qq64JOESPFAgpUpmM9hWndh21BnCB95EExd21ZHYOg,9730
|
|
51
52
|
pinexq_client/job_management/tool/workdata.py,sha256=8DwvzcjefKL-wBuCxy9ziaUC7gTf0TMXAa1WoCmHvZM,4903
|
|
52
|
-
pinexq_client-0.
|
|
53
|
+
pinexq_client-0.5.0.20241018.1.dist-info/RECORD,,
|
|
File without changes
|