pinexq-client 0.5.0.20241018.1__py3-none-any.whl → 0.7.0.20241113.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pinexq_client/core/hco/link_hco.py +42 -1
- pinexq_client/core/media_types.py +3 -0
- pinexq_client/core/model/error.py +1 -1
- pinexq_client/core/sirenaccess.py +2 -3
- pinexq_client/job_management/__init__.py +1 -1
- pinexq_client/job_management/hcos/jobsroot_hco.py +5 -19
- pinexq_client/job_management/model/open_api_generated.py +2 -10
- pinexq_client/job_management/tool/job.py +90 -40
- pinexq_client/job_management/tool/job_group.py +18 -6
- pinexq_client/job_management/tool/processing_step.py +27 -16
- pinexq_client/job_management/tool/workdata.py +28 -15
- {pinexq_client-0.5.0.20241018.1.dist-info → pinexq_client-0.7.0.20241113.1.dist-info}/METADATA +1 -1
- {pinexq_client-0.5.0.20241018.1.dist-info → pinexq_client-0.7.0.20241113.1.dist-info}/RECORD +16 -16
- {pinexq_client-0.5.0.20241018.1.dist-info → pinexq_client-0.7.0.20241113.1.dist-info}/WHEEL +1 -1
- {pinexq_client-0.5.0.20241018.1.dist-info → pinexq_client-0.7.0.20241113.1.dist-info}/entry_points.txt +0 -0
- {pinexq_client-0.5.0.20241018.1.dist-info → pinexq_client-0.7.0.20241113.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -2,10 +2,13 @@ from typing import Self, Type
|
|
|
2
2
|
|
|
3
3
|
import httpx
|
|
4
4
|
from httpx import URL
|
|
5
|
+
from httpx import Response
|
|
5
6
|
|
|
6
|
-
from pinexq_client.core import Link, Entity, navigate, ensure_siren_response, ClientException
|
|
7
|
+
from pinexq_client.core import Link, Entity, navigate, ensure_siren_response, ClientException, raise_exception_on_error, \
|
|
8
|
+
ApiException
|
|
7
9
|
from pinexq_client.core.hco.hco_base import ClientContainer, TEntity
|
|
8
10
|
from pinexq_client.core.hco.unavailable import UnavailableLink, HypermediaAvailability
|
|
11
|
+
from pinexq_client.core.model.error import ProblemDetails
|
|
9
12
|
|
|
10
13
|
|
|
11
14
|
class LinkHco(ClientContainer, HypermediaAvailability):
|
|
@@ -51,6 +54,37 @@ class LinkHco(ClientContainer, HypermediaAvailability):
|
|
|
51
54
|
def is_available() -> bool:
|
|
52
55
|
return True
|
|
53
56
|
|
|
57
|
+
"""Checks if the resource can be retrieved (no 404) """
|
|
58
|
+
def exists(self) -> bool:
|
|
59
|
+
response = navigate(self._client, self._link)
|
|
60
|
+
match response:
|
|
61
|
+
case Response() as r:
|
|
62
|
+
if r.status_code == 404:
|
|
63
|
+
# resource is not found
|
|
64
|
+
return False
|
|
65
|
+
elif r.status_code == 200:
|
|
66
|
+
# resource found
|
|
67
|
+
return True
|
|
68
|
+
elif r.status_code >= 400:
|
|
69
|
+
raise_exception_on_error("Expected a resource or none, got error", r)
|
|
70
|
+
else:
|
|
71
|
+
raise ApiException(f"Unexpected status code in response: {r.status_code}")
|
|
72
|
+
|
|
73
|
+
case ProblemDetails() as p:
|
|
74
|
+
if p.status == 404:
|
|
75
|
+
# resource is not found
|
|
76
|
+
return False
|
|
77
|
+
elif p.status == 200:
|
|
78
|
+
# resource found
|
|
79
|
+
return True
|
|
80
|
+
elif p.status >= 400:
|
|
81
|
+
raise_exception_on_error("Expected a resource or none, got error", p)
|
|
82
|
+
else:
|
|
83
|
+
raise ApiException(f"Unexpected status code in response: {p.status_code}")
|
|
84
|
+
case _:
|
|
85
|
+
# got entity
|
|
86
|
+
return True
|
|
87
|
+
|
|
54
88
|
def _navigate_internal(self, parse_type: Type[TEntity] = Entity) -> TEntity:
|
|
55
89
|
response = navigate(self._client, self._link, parse_type)
|
|
56
90
|
return ensure_siren_response(response)
|
|
@@ -58,6 +92,13 @@ class LinkHco(ClientContainer, HypermediaAvailability):
|
|
|
58
92
|
def get_url(self) -> URL:
|
|
59
93
|
return URL(self._link.href)
|
|
60
94
|
|
|
95
|
+
def __eq__(self, other):
|
|
96
|
+
"""Compares the link url to determine if the link is pointing to the same resource."""
|
|
97
|
+
if isinstance(other, LinkHco):
|
|
98
|
+
return self.get_url() == other.get_url()
|
|
99
|
+
else:
|
|
100
|
+
return NotImplemented
|
|
101
|
+
|
|
61
102
|
def __repr__(self):
|
|
62
103
|
rel_names = ', '.join((f"'{r}'" for r in self._link.rel))
|
|
63
104
|
return f"<{self.__class__.__name__}: {rel_names}>"
|
|
@@ -19,6 +19,9 @@ class MediaTypes(StrEnum):
|
|
|
19
19
|
JPEG = "image/jpeg"
|
|
20
20
|
BMP = "image/bmp"
|
|
21
21
|
|
|
22
|
+
WORKFLOW_DEFINITION = "application/vnd.pinexq.workflow.definition+json"
|
|
23
|
+
WORKFLOW_REPORT = "application/vnd.pinexq.workflow.report+json"
|
|
24
|
+
|
|
22
25
|
|
|
23
26
|
class SirenClasses(StrEnum):
|
|
24
27
|
FileUploadAction = "FileUploadAction"
|
|
@@ -43,12 +43,11 @@ def get_resource(client: httpx.Client, href: str, media_type: str = MediaTypes.S
|
|
|
43
43
|
warnings.warn(f"Unexpected return code: {response.status_code}")
|
|
44
44
|
return response
|
|
45
45
|
|
|
46
|
-
|
|
47
|
-
|
|
46
|
+
def navigate(client: httpx.Client,
|
|
47
|
+
link: Link,
|
|
48
48
|
parse_type: type[TEntity] = Entity) -> TEntity | ProblemDetails | Response:
|
|
49
49
|
return get_resource(client, link.href, link.type, parse_type)
|
|
50
50
|
|
|
51
|
-
|
|
52
51
|
def ensure_siren_response(response: TEntity | ProblemDetails | Response) -> TEntity:
|
|
53
52
|
raise_exception_on_error(f"Error while navigating, unexpected response", response)
|
|
54
53
|
return response
|
|
@@ -10,15 +10,15 @@ from pinexq_client.core.hco.link_hco import LinkHco
|
|
|
10
10
|
from pinexq_client.core.hco.unavailable import UnavailableAction, UnavailableLink
|
|
11
11
|
from pinexq_client.job_management.hcos.job_hco import JobLink
|
|
12
12
|
from pinexq_client.job_management.hcos.job_query_result_hco import (
|
|
13
|
-
|
|
14
|
-
|
|
13
|
+
JobQueryResultHco,
|
|
14
|
+
JobQueryResultLink
|
|
15
15
|
)
|
|
16
16
|
from pinexq_client.job_management.hcos.job_used_tags_hco import JobUsedTagsLink
|
|
17
17
|
from pinexq_client.job_management.known_relations import Relations
|
|
18
18
|
from pinexq_client.job_management.model.open_api_generated import (
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
19
|
+
CreateJobParameters,
|
|
20
|
+
JobQueryParameters,
|
|
21
|
+
CreateSubJobParameters
|
|
22
22
|
)
|
|
23
23
|
from pinexq_client.job_management.model.sirenentities import JobsRootEntity
|
|
24
24
|
|
|
@@ -54,23 +54,11 @@ class JobQueryAction(ActionWithParametersHco):
|
|
|
54
54
|
return self._get_default_parameters(JobQueryParameters, JobQueryParameters())
|
|
55
55
|
|
|
56
56
|
|
|
57
|
-
class JobSetToErrorStateAction(ActionWithParametersHco[SetJobsErrorStateParameters]):
|
|
58
|
-
def execute(self, parameters: SetJobsErrorStateParameters):
|
|
59
|
-
self._execute(parameters)
|
|
60
|
-
|
|
61
|
-
def default_parameters(self) -> SetJobsErrorStateParameters:
|
|
62
|
-
return self._get_default_parameters(SetJobsErrorStateParameters, SetJobsErrorStateParameters(
|
|
63
|
-
message='Manually set to error state by admin',
|
|
64
|
-
created_this_many_hours_ago=0,
|
|
65
|
-
))
|
|
66
|
-
|
|
67
|
-
|
|
68
57
|
class JobsRootHco(Hco[JobsRootEntity]):
|
|
69
58
|
create_job_action: CreateJobAction | UnavailableAction
|
|
70
59
|
job_query_action: JobQueryAction | UnavailableAction
|
|
71
60
|
create_subjob_action: CreateSubJobAction | UnavailableAction
|
|
72
61
|
used_tags_link: JobUsedTagsLink | UnavailableLink
|
|
73
|
-
set_jobs_to_error_state: JobSetToErrorStateAction | UnavailableAction
|
|
74
62
|
|
|
75
63
|
self_link: 'JobsRootLink'
|
|
76
64
|
|
|
@@ -86,8 +74,6 @@ class JobsRootHco(Hco[JobsRootEntity]):
|
|
|
86
74
|
instance.used_tags_link = JobUsedTagsLink.from_entity_optional(
|
|
87
75
|
instance._client, instance._entity, Relations.USED_TAGS)
|
|
88
76
|
instance.self_link = JobsRootLink.from_entity(instance._client, instance._entity, Relations.SELF)
|
|
89
|
-
instance.set_jobs_to_error_state = JobSetToErrorStateAction.from_entity_optional(instance._client, instance._entity,
|
|
90
|
-
"SetJobsToErrorState")
|
|
91
77
|
|
|
92
78
|
return instance
|
|
93
79
|
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# generated by datamodel-codegen:
|
|
2
2
|
# filename: openapi.json
|
|
3
|
-
# timestamp: 2024-
|
|
3
|
+
# timestamp: 2024-11-04T09:06:44+00:00
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
@@ -372,15 +372,6 @@ class SetJobTagsParameters(BaseModel):
|
|
|
372
372
|
tags: List[str] = Field(..., alias='Tags')
|
|
373
373
|
|
|
374
374
|
|
|
375
|
-
class SetJobsErrorStateParameters(BaseModel):
|
|
376
|
-
model_config = ConfigDict(
|
|
377
|
-
extra='allow',
|
|
378
|
-
populate_by_name=True,
|
|
379
|
-
)
|
|
380
|
-
message: constr(min_length=1) = Field(..., alias='Message')
|
|
381
|
-
created_before: AwareDatetime = Field(..., alias='CreatedBefore')
|
|
382
|
-
|
|
383
|
-
|
|
384
375
|
class SetNameWorkDataParameters(BaseModel):
|
|
385
376
|
model_config = ConfigDict(
|
|
386
377
|
extra='allow',
|
|
@@ -653,6 +644,7 @@ class JobHtoOpenApiProperties(BaseModel):
|
|
|
653
644
|
error_description: str | None = Field(None, alias='ErrorDescription')
|
|
654
645
|
processing: ProcessingView | None = Field(None, alias='Processing')
|
|
655
646
|
result: str | None = Field(None, alias='Result')
|
|
647
|
+
configured_parameters: str | None = Field(None, alias='ConfiguredParameters')
|
|
656
648
|
|
|
657
649
|
|
|
658
650
|
class JobQueryResultHtoOpenApi(BaseModel):
|
|
@@ -60,8 +60,8 @@ class Job:
|
|
|
60
60
|
_client: httpx.Client
|
|
61
61
|
_entrypoint: EntryPointHco
|
|
62
62
|
_jobs_root: JobsRootHco
|
|
63
|
-
_job: JobHco | None = None
|
|
64
63
|
_processing_step_root: ProcessingStepsRootHco
|
|
64
|
+
job_hco: JobHco | None = None # Internal hco of the wrapper. This is updated by this class. You should not take a reference to this object.
|
|
65
65
|
|
|
66
66
|
def __init__(self, client: httpx.Client):
|
|
67
67
|
"""
|
|
@@ -93,21 +93,20 @@ class Job:
|
|
|
93
93
|
return self
|
|
94
94
|
|
|
95
95
|
def _get_by_link(self, job_link: JobLink):
|
|
96
|
-
self.
|
|
96
|
+
self.job_hco = job_link.navigate()
|
|
97
97
|
|
|
98
98
|
@classmethod
|
|
99
|
-
def from_hco(cls,
|
|
99
|
+
def from_hco(cls, job: JobHco) -> Self:
|
|
100
100
|
"""Initializes a `Job` object from an existing JobHco object.
|
|
101
101
|
|
|
102
102
|
Args:
|
|
103
|
-
client: An httpx.Client instance initialized with the api-host-url as `base_url`
|
|
104
103
|
job: The 'JobHco' to initialize this job from.
|
|
105
104
|
|
|
106
105
|
Returns:
|
|
107
106
|
The newly created job as `Job` object.
|
|
108
107
|
"""
|
|
109
|
-
job_instance = cls(
|
|
110
|
-
job_instance.
|
|
108
|
+
job_instance = cls(job._client)
|
|
109
|
+
job_instance.job_hco = job
|
|
111
110
|
return job_instance
|
|
112
111
|
|
|
113
112
|
@classmethod
|
|
@@ -140,7 +139,8 @@ class Job:
|
|
|
140
139
|
Returns:
|
|
141
140
|
The newly created job as `Job` object
|
|
142
141
|
"""
|
|
143
|
-
|
|
142
|
+
self._raise_if_no_hco()
|
|
143
|
+
parent_job_url = self.job_hco.self_link.get_url()
|
|
144
144
|
sub_job_link = self._jobs_root.create_subjob_action.execute(
|
|
145
145
|
CreateSubJobParameters(name=name, parent_job_url=str(parent_job_url))
|
|
146
146
|
)
|
|
@@ -154,7 +154,8 @@ class Job:
|
|
|
154
154
|
Returns:
|
|
155
155
|
This `Job` object, but with updated properties.
|
|
156
156
|
"""
|
|
157
|
-
self.
|
|
157
|
+
self._raise_if_no_hco()
|
|
158
|
+
self.job_hco = self.job_hco.self_link.navigate()
|
|
158
159
|
return self
|
|
159
160
|
|
|
160
161
|
def get_state(self) -> JobStates:
|
|
@@ -164,7 +165,7 @@ class Job:
|
|
|
164
165
|
The current state of this `Job` from JobStates
|
|
165
166
|
"""
|
|
166
167
|
self.refresh()
|
|
167
|
-
return self.
|
|
168
|
+
return self.job_hco.state
|
|
168
169
|
|
|
169
170
|
def get_name(self) -> str:
|
|
170
171
|
"""Returns the name of this job
|
|
@@ -173,7 +174,7 @@ class Job:
|
|
|
173
174
|
The name of this `Job`
|
|
174
175
|
"""
|
|
175
176
|
self.refresh()
|
|
176
|
-
return self.
|
|
177
|
+
return self.job_hco.name
|
|
177
178
|
|
|
178
179
|
def select_processing(
|
|
179
180
|
self,
|
|
@@ -193,6 +194,7 @@ class Job:
|
|
|
193
194
|
Returns:
|
|
194
195
|
This `Job` object
|
|
195
196
|
"""
|
|
197
|
+
self._raise_if_no_hco()
|
|
196
198
|
if sum(p is not None for p in [function_name, processing_step_link, processing_step_instance]) != 1:
|
|
197
199
|
raise ValueError("Exactly one parameter must be provided")
|
|
198
200
|
|
|
@@ -220,7 +222,7 @@ class Job:
|
|
|
220
222
|
# Todo: For now we choose the first and only result. Make this more flexible?
|
|
221
223
|
processing_url = query_result.processing_steps[0].self_link.get_url()
|
|
222
224
|
|
|
223
|
-
self.
|
|
225
|
+
self.job_hco.select_processing_action.execute(
|
|
224
226
|
SelectProcessingParameters(processing_step_url=str(processing_url))
|
|
225
227
|
)
|
|
226
228
|
|
|
@@ -238,7 +240,8 @@ class Job:
|
|
|
238
240
|
Returns:
|
|
239
241
|
This `Job` object
|
|
240
242
|
"""
|
|
241
|
-
self.
|
|
243
|
+
self._raise_if_no_hco()
|
|
244
|
+
self.job_hco.configure_processing_action.execute(
|
|
242
245
|
GenericProcessingConfigureParameters.model_validate(parameters)
|
|
243
246
|
)
|
|
244
247
|
|
|
@@ -251,7 +254,8 @@ class Job:
|
|
|
251
254
|
Returns:
|
|
252
255
|
This `Job` object
|
|
253
256
|
"""
|
|
254
|
-
self.
|
|
257
|
+
self._raise_if_no_hco()
|
|
258
|
+
self.job_hco.start_processing_action.execute()
|
|
255
259
|
self.refresh()
|
|
256
260
|
return self
|
|
257
261
|
|
|
@@ -267,7 +271,7 @@ class Job:
|
|
|
267
271
|
# TODO: return Sentinel or Exception on 'NotDoneYet'
|
|
268
272
|
# TODO: handle return value equivalent to asyncio's Future objects
|
|
269
273
|
self.refresh()
|
|
270
|
-
result = self.
|
|
274
|
+
result = self.job_hco.result
|
|
271
275
|
return json_.loads(result) if result else None
|
|
272
276
|
|
|
273
277
|
def wait_for_state(self, state: JobStates, timeout_ms: int = 5000, polling_interval_ms: int = 1000) -> Self:
|
|
@@ -284,12 +288,13 @@ class Job:
|
|
|
284
288
|
|
|
285
289
|
|
|
286
290
|
"""
|
|
291
|
+
self._raise_if_no_hco()
|
|
287
292
|
try:
|
|
288
293
|
wait_until(
|
|
289
294
|
condition=lambda: self.get_state() == state,
|
|
290
295
|
timeout_ms=timeout_ms,
|
|
291
296
|
timeout_message="Waiting for job completion",
|
|
292
|
-
error_condition=lambda: self.
|
|
297
|
+
error_condition=lambda: self.job_hco.state == JobStates.error,
|
|
293
298
|
polling_interval_ms=polling_interval_ms
|
|
294
299
|
)
|
|
295
300
|
except TimeoutError as timeout:
|
|
@@ -298,8 +303,8 @@ class Job:
|
|
|
298
303
|
f"current state: '{self.get_state().value}'. Error:{str(timeout)}"
|
|
299
304
|
)
|
|
300
305
|
except PollingException:
|
|
301
|
-
if self.
|
|
302
|
-
error_reason = self.
|
|
306
|
+
if self.job_hco.state == JobStates.error:
|
|
307
|
+
error_reason = self.job_hco.error_description
|
|
303
308
|
raise PollingException(f"Job failed'. Error:{error_reason}")
|
|
304
309
|
raise PollingException("Job failed")
|
|
305
310
|
|
|
@@ -322,6 +327,7 @@ class Job:
|
|
|
322
327
|
Returns:
|
|
323
328
|
This `Job` object
|
|
324
329
|
"""
|
|
330
|
+
self._raise_if_no_hco()
|
|
325
331
|
if sum(p is not None for p in [work_data_link, work_data_instance]) != 1:
|
|
326
332
|
raise ValueError("Exactly one parameter must be provided")
|
|
327
333
|
|
|
@@ -334,7 +340,7 @@ class Job:
|
|
|
334
340
|
raise Exception('Instance passed to "work_data_link" is not of type "WorkDataLink"')
|
|
335
341
|
work_data = work_data_link
|
|
336
342
|
|
|
337
|
-
dataslot = self.
|
|
343
|
+
dataslot = self.job_hco.input_dataslots[index]
|
|
338
344
|
dataslot.select_workdata_action.execute(
|
|
339
345
|
parameters=SelectWorkDataForDataSlotParameters(
|
|
340
346
|
work_data_url=str(work_data.get_url())
|
|
@@ -361,6 +367,7 @@ class Job:
|
|
|
361
367
|
Returns:
|
|
362
368
|
This `Job` object
|
|
363
369
|
"""
|
|
370
|
+
self._raise_if_no_hco()
|
|
364
371
|
if sum(p is not None for p in [work_data_links, work_data_instances]) != 1:
|
|
365
372
|
raise ValueError("Exactly one parameter must be provided")
|
|
366
373
|
|
|
@@ -374,7 +381,7 @@ class Job:
|
|
|
374
381
|
raise Exception('Instance passed to "work_data_links" is not of type "list[WorkDataLink]"')
|
|
375
382
|
work_datas = work_data_links
|
|
376
383
|
|
|
377
|
-
dataslot = self.
|
|
384
|
+
dataslot = self.job_hco.input_dataslots[index]
|
|
378
385
|
dataslot.select_workdata_collection_action.execute(
|
|
379
386
|
parameters=SelectWorkDataCollectionForDataSlotParameters(
|
|
380
387
|
work_data_urls=[str(workdata_link.get_url()) for workdata_link in work_datas]
|
|
@@ -393,7 +400,8 @@ class Job:
|
|
|
393
400
|
Returns:
|
|
394
401
|
This `Job` object
|
|
395
402
|
"""
|
|
396
|
-
|
|
403
|
+
self._raise_if_no_hco()
|
|
404
|
+
dataslot = self.job_hco.input_dataslots[index]
|
|
397
405
|
|
|
398
406
|
# already cleared
|
|
399
407
|
if not dataslot.clear_workdata_action:
|
|
@@ -412,14 +420,16 @@ class Job:
|
|
|
412
420
|
show_deleted: bool | None = None,
|
|
413
421
|
processing_step_url: str | None = None,
|
|
414
422
|
) -> JobQueryResultHco:
|
|
423
|
+
self._raise_if_no_hco()
|
|
415
424
|
filter_param = JobFilterParameter(
|
|
416
425
|
is_sub_job=True,
|
|
417
|
-
parent_job_url=str(self.
|
|
426
|
+
parent_job_url=str(self.job_hco.self_link.get_url()),
|
|
418
427
|
state=state,
|
|
419
428
|
name=name,
|
|
420
429
|
show_deleted=show_deleted,
|
|
421
430
|
processing_step_url=processing_step_url,
|
|
422
431
|
)
|
|
432
|
+
|
|
423
433
|
query_param = JobQueryParameters(sort_by=sort_by, filter=filter_param)
|
|
424
434
|
job_query_result = self._jobs_root.job_query_action.execute(query_param)
|
|
425
435
|
return job_query_result
|
|
@@ -479,7 +489,8 @@ class Job:
|
|
|
479
489
|
Returns:
|
|
480
490
|
This `Job` object
|
|
481
491
|
"""
|
|
482
|
-
self.
|
|
492
|
+
self._raise_if_no_hco()
|
|
493
|
+
self.job_hco.hide_action.execute()
|
|
483
494
|
self.refresh()
|
|
484
495
|
return self
|
|
485
496
|
|
|
@@ -489,7 +500,9 @@ class Job:
|
|
|
489
500
|
Returns:
|
|
490
501
|
This `Job` object
|
|
491
502
|
"""
|
|
492
|
-
self.
|
|
503
|
+
self._raise_if_no_hco()
|
|
504
|
+
self.job_hco.delete_action.execute()
|
|
505
|
+
self.job_hco = None
|
|
493
506
|
return self
|
|
494
507
|
|
|
495
508
|
def delete_with_associated(
|
|
@@ -499,7 +512,7 @@ class Job:
|
|
|
499
512
|
delete_input_workdata: bool = False,
|
|
500
513
|
delete_subjobs_with_data: bool = True
|
|
501
514
|
) -> Self:
|
|
502
|
-
"""Delete this job after deleting output workdata and subjobs depending on the flag.
|
|
515
|
+
"""Delete this job after deleting output workdata and subjobs (recursive call) depending on the flag.
|
|
503
516
|
Afterward, also deletes input workdata depending on the flag. This is a best effort operation,
|
|
504
517
|
if an operation can not be executed a warning will be printed but the process continues.
|
|
505
518
|
|
|
@@ -511,22 +524,34 @@ class Job:
|
|
|
511
524
|
Returns:
|
|
512
525
|
This `Job` object
|
|
513
526
|
"""
|
|
527
|
+
self._raise_if_no_hco()
|
|
514
528
|
|
|
515
529
|
# delete subjobs
|
|
516
530
|
if delete_subjobs_with_data is True:
|
|
517
531
|
for subjob in self._get_sub_jobs().iter_flat():
|
|
518
532
|
try:
|
|
519
|
-
|
|
533
|
+
# recursion
|
|
534
|
+
subjob_wrapper = Job.from_hco(subjob)
|
|
535
|
+
subjob_wrapper.delete_with_associated(
|
|
536
|
+
delete_output_workdata=delete_output_workdata,
|
|
537
|
+
delete_input_workdata=delete_input_workdata,
|
|
538
|
+
delete_subjobs_with_data=delete_subjobs_with_data)
|
|
539
|
+
if subjob.self_link.exists():
|
|
540
|
+
warnings.warn(f"Could not delete subjob: {subjob.self_link.get_url()}")
|
|
520
541
|
except (ClientException, ApiException) as e:
|
|
521
542
|
warnings.warn(f"Could not delete subjob: {subjob.self_link.get_url()}\n{e}")
|
|
522
543
|
pass
|
|
544
|
+
self.refresh()
|
|
523
545
|
|
|
524
546
|
# delete output workdatas
|
|
525
547
|
if delete_output_workdata is True:
|
|
526
|
-
for slot in self.
|
|
548
|
+
for slot in self.job_hco.output_dataslots:
|
|
527
549
|
for wd in slot.assigned_workdatas:
|
|
528
550
|
try:
|
|
529
|
-
wd.delete_action.
|
|
551
|
+
if wd.delete_action.is_available():
|
|
552
|
+
wd.delete_action.execute()
|
|
553
|
+
else:
|
|
554
|
+
warnings.warn(f"Could not delete output workdata: {wd.self_link.get_url()}")
|
|
530
555
|
except (ClientException, ApiException) as e:
|
|
531
556
|
warnings.warn(f"Could not delete output workdata: {wd.self_link.get_url()}\n{e}")
|
|
532
557
|
pass
|
|
@@ -534,23 +559,35 @@ class Job:
|
|
|
534
559
|
# delete this job
|
|
535
560
|
self.refresh()
|
|
536
561
|
|
|
562
|
+
job_was_deleted = False
|
|
537
563
|
try:
|
|
538
|
-
self.
|
|
564
|
+
if self.job_hco.delete_action.is_available():
|
|
565
|
+
self.job_hco.delete_action.execute()
|
|
566
|
+
# do not delete the hco here since we want to access its data slots just below
|
|
567
|
+
job_was_deleted = True
|
|
568
|
+
|
|
569
|
+
else:
|
|
570
|
+
warnings.warn(f"Could not delete job: {self.self_link().get_url()}")
|
|
539
571
|
except (ClientException, ApiException) as e:
|
|
540
572
|
warnings.warn(f"Could not delete job: {self.self_link().get_url()}\n{e}")
|
|
541
573
|
|
|
542
574
|
# finally delete input workdatas
|
|
543
575
|
if delete_input_workdata is True:
|
|
544
|
-
for slot in self.
|
|
576
|
+
for slot in self.job_hco.input_dataslots:
|
|
545
577
|
for wd in slot.selected_workdatas:
|
|
546
578
|
try:
|
|
547
|
-
wd.allow_deletion_action.execute()
|
|
548
579
|
wd = wd.self_link.navigate()
|
|
549
|
-
wd.delete_action.
|
|
580
|
+
if wd.delete_action.is_available():
|
|
581
|
+
wd.delete_action.execute()
|
|
582
|
+
else:
|
|
583
|
+
warnings.warn(f"Could not delete input workdata: {wd.self_link.get_url()}")
|
|
550
584
|
except (ClientException, ApiException) as e:
|
|
551
585
|
warnings.warn(f"Could not delete input workdata: {wd.self_link.get_url()}\n{e}")
|
|
552
586
|
pass
|
|
553
587
|
|
|
588
|
+
# we are done with the hco, set to none now since the resource was deleted. The wrapper makes no sense anymore
|
|
589
|
+
if job_was_deleted:
|
|
590
|
+
self.job_hco = None
|
|
554
591
|
return self
|
|
555
592
|
|
|
556
593
|
def unhide(self) -> Self:
|
|
@@ -559,7 +596,8 @@ class Job:
|
|
|
559
596
|
Returns:
|
|
560
597
|
This `Job` object
|
|
561
598
|
"""
|
|
562
|
-
self.
|
|
599
|
+
self._raise_if_no_hco()
|
|
600
|
+
self.job_hco.unhide_action.execute()
|
|
563
601
|
self.refresh()
|
|
564
602
|
return self
|
|
565
603
|
|
|
@@ -569,7 +607,8 @@ class Job:
|
|
|
569
607
|
Returns:
|
|
570
608
|
This `Job` object
|
|
571
609
|
"""
|
|
572
|
-
self.
|
|
610
|
+
self._raise_if_no_hco()
|
|
611
|
+
self.job_hco.allow_output_data_deletion_action.execute()
|
|
573
612
|
self.refresh()
|
|
574
613
|
return self
|
|
575
614
|
|
|
@@ -579,7 +618,8 @@ class Job:
|
|
|
579
618
|
Returns:
|
|
580
619
|
This `Job` object
|
|
581
620
|
"""
|
|
582
|
-
self.
|
|
621
|
+
self._raise_if_no_hco()
|
|
622
|
+
self.job_hco.disallow_output_data_deletion_action.execute()
|
|
583
623
|
self.refresh()
|
|
584
624
|
return self
|
|
585
625
|
|
|
@@ -589,7 +629,8 @@ class Job:
|
|
|
589
629
|
Returns:
|
|
590
630
|
This `Job` object
|
|
591
631
|
"""
|
|
592
|
-
self.
|
|
632
|
+
self._raise_if_no_hco()
|
|
633
|
+
self.job_hco.edit_tags_action.execute(
|
|
593
634
|
SetJobTagsParameters(tags=tags)
|
|
594
635
|
)
|
|
595
636
|
self.refresh()
|
|
@@ -601,7 +642,8 @@ class Job:
|
|
|
601
642
|
Returns:
|
|
602
643
|
`list[InputDataSlotHco]` object
|
|
603
644
|
"""
|
|
604
|
-
|
|
645
|
+
self._raise_if_no_hco()
|
|
646
|
+
return self.job_hco.input_dataslots
|
|
605
647
|
|
|
606
648
|
def get_output_data_slots(self) -> list[OutputDataSlotHco]:
|
|
607
649
|
"""Returns list of OutputDataSlotHco objects.
|
|
@@ -609,13 +651,16 @@ class Job:
|
|
|
609
651
|
Returns:
|
|
610
652
|
`list[OutputDataSlotHco]` object
|
|
611
653
|
"""
|
|
612
|
-
|
|
654
|
+
self._raise_if_no_hco()
|
|
655
|
+
return self.job_hco.output_dataslots
|
|
613
656
|
|
|
614
657
|
def get_processing_info(self) -> ProcessingView:
|
|
615
|
-
|
|
658
|
+
self._raise_if_no_hco()
|
|
659
|
+
return self.job_hco.processing
|
|
616
660
|
|
|
617
661
|
def self_link(self) -> JobLink:
|
|
618
|
-
|
|
662
|
+
self._raise_if_no_hco()
|
|
663
|
+
return self.job_hco.self_link
|
|
619
664
|
|
|
620
665
|
def set_to_error_state(self) -> Self:
|
|
621
666
|
"""Set this job to error state.
|
|
@@ -623,5 +668,10 @@ class Job:
|
|
|
623
668
|
Returns:
|
|
624
669
|
This `Job` object
|
|
625
670
|
"""
|
|
626
|
-
self.
|
|
671
|
+
self._raise_if_no_hco()
|
|
672
|
+
self.job_hco.set_to_error_state_action.execute()
|
|
627
673
|
return self
|
|
674
|
+
|
|
675
|
+
def _raise_if_no_hco(self):
|
|
676
|
+
if self.job_hco is None:
|
|
677
|
+
raise Exception("No job hco present. Maybe this class is used after resource deletion.")
|
|
@@ -38,7 +38,7 @@ class JobGroup:
|
|
|
38
38
|
"""
|
|
39
39
|
instance = cls(client)
|
|
40
40
|
for job in job_query_result.iter_flat():
|
|
41
|
-
instance.add_jobs(Job.from_hco(
|
|
41
|
+
instance.add_jobs(Job.from_hco(job))
|
|
42
42
|
return instance
|
|
43
43
|
|
|
44
44
|
def add_jobs(self, jobs: Union[Job, list[Job]]) -> Self:
|
|
@@ -125,17 +125,29 @@ class JobGroup:
|
|
|
125
125
|
"""
|
|
126
126
|
return [job for job in self._jobs if job.get_state() == JobStates.error]
|
|
127
127
|
|
|
128
|
-
def remove(self,
|
|
128
|
+
def remove(self, jobs: Job | list[Job]) -> Self:
|
|
129
129
|
"""
|
|
130
|
-
Removes
|
|
130
|
+
Removes given job(s) from the group
|
|
131
131
|
|
|
132
132
|
Args:
|
|
133
|
-
|
|
134
|
-
The
|
|
133
|
+
jobs:
|
|
134
|
+
The Job instance(s) to be removed
|
|
135
135
|
Returns:
|
|
136
136
|
This `JobGroup` object
|
|
137
137
|
"""
|
|
138
|
-
|
|
138
|
+
|
|
139
|
+
def remove_by_url(job_url: str):
|
|
140
|
+
for existing_job in self._jobs:
|
|
141
|
+
if existing_job.self_link().get_url() == job_url:
|
|
142
|
+
self._jobs.remove(existing_job)
|
|
143
|
+
break
|
|
144
|
+
|
|
145
|
+
if isinstance(jobs, list):
|
|
146
|
+
for job in jobs:
|
|
147
|
+
remove_by_url(str(job.self_link().get_url()))
|
|
148
|
+
else:
|
|
149
|
+
remove_by_url(str(jobs.self_link().get_url()))
|
|
150
|
+
|
|
139
151
|
return self
|
|
140
152
|
|
|
141
153
|
def clear(self) -> Self:
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Any, Self,
|
|
1
|
+
from typing import Any, Self, Optional
|
|
2
2
|
|
|
3
3
|
import httpx
|
|
4
4
|
from httpx import URL
|
|
@@ -26,7 +26,8 @@ class ProcessingStep:
|
|
|
26
26
|
_client: httpx.Client
|
|
27
27
|
_entrypoint: EntryPointHco
|
|
28
28
|
_processing_steps_root: ProcessingStepsRootHco
|
|
29
|
-
|
|
29
|
+
processing_step_hco: ProcessingStepHco | None = None # Internal hco of the wrapper. This is updated by this class. You should not take a reference to this object.
|
|
30
|
+
|
|
30
31
|
|
|
31
32
|
def __init__(self, client: httpx.Client):
|
|
32
33
|
"""
|
|
@@ -55,25 +56,24 @@ class ProcessingStep:
|
|
|
55
56
|
function_name=function_name,
|
|
56
57
|
version=version
|
|
57
58
|
))
|
|
58
|
-
self.
|
|
59
|
+
self.processing_step_hco = processing_step_hco
|
|
59
60
|
return self
|
|
60
61
|
|
|
61
62
|
def _get_by_link(self, processing_step_link: ProcessingStepLink):
|
|
62
|
-
self.
|
|
63
|
+
self.processing_step_hco = processing_step_link.navigate()
|
|
63
64
|
|
|
64
65
|
@classmethod
|
|
65
|
-
def from_hco(cls,
|
|
66
|
+
def from_hco(cls, processing_step: ProcessingStepHco) -> Self:
|
|
66
67
|
"""Initializes a `ProcessingStep` object from an existing ProcessingStepHco object.
|
|
67
68
|
|
|
68
69
|
Args:
|
|
69
|
-
client: An httpx.Client instance initialized with the api-host-url as `base_url`
|
|
70
70
|
processing_step: The 'ProcessingStepHco' to initialize this ProcessingStep from.
|
|
71
71
|
|
|
72
72
|
Returns:
|
|
73
73
|
The newly created processing step as `ProcessingStep` object.
|
|
74
74
|
"""
|
|
75
|
-
processing_step_instance = cls(
|
|
76
|
-
processing_step_instance.
|
|
75
|
+
processing_step_instance = cls(processing_step._client)
|
|
76
|
+
processing_step_instance.processing_step_hco = processing_step
|
|
77
77
|
return processing_step_instance
|
|
78
78
|
|
|
79
79
|
@classmethod
|
|
@@ -124,7 +124,7 @@ class ProcessingStep:
|
|
|
124
124
|
|
|
125
125
|
# Todo: For now we choose the first and only result. Make this more flexible?
|
|
126
126
|
processing_step_hco = query_result.processing_steps[0]
|
|
127
|
-
return ProcessingStep.from_hco(
|
|
127
|
+
return ProcessingStep.from_hco(processing_step_hco)
|
|
128
128
|
|
|
129
129
|
@staticmethod
|
|
130
130
|
def _query_processing_steps(client: httpx.Client, step_name: str, version: Optional[str] = None) -> ProcessingStepQueryResultHco:
|
|
@@ -178,7 +178,8 @@ class ProcessingStep:
|
|
|
178
178
|
Returns:
|
|
179
179
|
This `ProcessingStep` object, but with updated properties.
|
|
180
180
|
"""
|
|
181
|
-
self.
|
|
181
|
+
self._raise_if_no_hco()
|
|
182
|
+
self.processing_step_hco = self.processing_step_hco.self_link.navigate()
|
|
182
183
|
return self
|
|
183
184
|
|
|
184
185
|
def set_tags(self, tags: list[str]) -> Self:
|
|
@@ -186,7 +187,8 @@ class ProcessingStep:
|
|
|
186
187
|
|
|
187
188
|
Returns:
|
|
188
189
|
This `ProcessingStep` object"""
|
|
189
|
-
self.
|
|
190
|
+
self._raise_if_no_hco()
|
|
191
|
+
self.processing_step_hco.edit_tags_action.execute(SetProcessingStepTagsParameters(
|
|
190
192
|
tags=tags
|
|
191
193
|
))
|
|
192
194
|
self.refresh()
|
|
@@ -203,7 +205,8 @@ class ProcessingStep:
|
|
|
203
205
|
|
|
204
206
|
Returns:
|
|
205
207
|
This `ProcessingStep` object"""
|
|
206
|
-
self.
|
|
208
|
+
self._raise_if_no_hco()
|
|
209
|
+
self.processing_step_hco.edit_properties_action.execute(EditProcessingStepParameters(
|
|
207
210
|
title=new_title,
|
|
208
211
|
function_name=new_function_name,
|
|
209
212
|
is_public=is_public
|
|
@@ -221,7 +224,8 @@ class ProcessingStep:
|
|
|
221
224
|
Returns:
|
|
222
225
|
This `ProcessingStep` object
|
|
223
226
|
"""
|
|
224
|
-
self.
|
|
227
|
+
self._raise_if_no_hco()
|
|
228
|
+
self.processing_step_hco.configure_default_parameters_action.execute(
|
|
225
229
|
GenericProcessingConfigureParameters.model_validate(parameters)
|
|
226
230
|
)
|
|
227
231
|
|
|
@@ -234,7 +238,8 @@ class ProcessingStep:
|
|
|
234
238
|
Returns:
|
|
235
239
|
This `ProcessingStep` object
|
|
236
240
|
"""
|
|
237
|
-
self.
|
|
241
|
+
self._raise_if_no_hco()
|
|
242
|
+
self.processing_step_hco.clear_default_parameters_action.execute()
|
|
238
243
|
self.refresh()
|
|
239
244
|
|
|
240
245
|
return self
|
|
@@ -245,7 +250,8 @@ class ProcessingStep:
|
|
|
245
250
|
Returns:
|
|
246
251
|
This `ProcessingStep` object
|
|
247
252
|
"""
|
|
248
|
-
self.
|
|
253
|
+
self._raise_if_no_hco()
|
|
254
|
+
self.processing_step_hco.upload_configuration_action.execute(
|
|
249
255
|
UploadParameters(
|
|
250
256
|
filename="config.json", # placeholder, jma does not care about filename
|
|
251
257
|
mediatype=MediaTypes.APPLICATION_JSON,
|
|
@@ -257,4 +263,9 @@ class ProcessingStep:
|
|
|
257
263
|
return self
|
|
258
264
|
|
|
259
265
|
def self_link(self) -> ProcessingStepLink:
|
|
260
|
-
|
|
266
|
+
self._raise_if_no_hco()
|
|
267
|
+
return self.processing_step_hco.self_link
|
|
268
|
+
|
|
269
|
+
def _raise_if_no_hco(self):
|
|
270
|
+
if self.processing_step_hco is None:
|
|
271
|
+
raise Exception("No processing step hco present. Maybe this class is used after resource deletion.")
|
|
@@ -20,7 +20,7 @@ class WorkData:
|
|
|
20
20
|
_client: httpx.Client
|
|
21
21
|
_entrypoint: EntryPointHco
|
|
22
22
|
_work_data_root: WorkDataRootHco
|
|
23
|
-
|
|
23
|
+
work_data_hco: WorkDataHco | None = None # Internal hco of the wrapper. This is updated by this class. You should not take a reference to this object.
|
|
24
24
|
|
|
25
25
|
def __init__(self, client: httpx.Client):
|
|
26
26
|
"""
|
|
@@ -49,23 +49,23 @@ class WorkData:
|
|
|
49
49
|
self._get_by_link(work_data_link)
|
|
50
50
|
return self
|
|
51
51
|
|
|
52
|
-
def _get_by_link(self,
|
|
53
|
-
self.
|
|
52
|
+
def _get_by_link(self, work_data_link: WorkDataLink):
|
|
53
|
+
self.work_data_hco = work_data_link.navigate()
|
|
54
54
|
|
|
55
55
|
@classmethod
|
|
56
|
-
def from_hco(cls,
|
|
56
|
+
def from_hco(cls, work_data_hco: WorkDataHco) -> Self:
|
|
57
57
|
"""Initializes a `WorkData` object from an existing WorkDataHco object.
|
|
58
58
|
|
|
59
59
|
Args:
|
|
60
60
|
client: An httpx.Client instance initialized with the api-host-url as `base_url`
|
|
61
|
-
|
|
61
|
+
work_data_hco: The WorkDataHco to initialize this WorkData from.
|
|
62
62
|
|
|
63
63
|
Returns:
|
|
64
64
|
The newly created work data as `WorkData` object.
|
|
65
65
|
"""
|
|
66
66
|
|
|
67
|
-
work_data_instance = cls(
|
|
68
|
-
work_data_instance.
|
|
67
|
+
work_data_instance = cls(work_data_hco._client)
|
|
68
|
+
work_data_instance.work_data_hco = work_data_hco
|
|
69
69
|
return work_data_instance
|
|
70
70
|
|
|
71
71
|
@classmethod
|
|
@@ -95,7 +95,8 @@ class WorkData:
|
|
|
95
95
|
Returns:
|
|
96
96
|
This `WorkData` object, but with updated properties.
|
|
97
97
|
"""
|
|
98
|
-
self.
|
|
98
|
+
self._raise_if_no_hco()
|
|
99
|
+
self.work_data_hco = self.work_data_hco.self_link.navigate()
|
|
99
100
|
return self
|
|
100
101
|
|
|
101
102
|
def set_tags(self, tags: list[str]):
|
|
@@ -103,7 +104,8 @@ class WorkData:
|
|
|
103
104
|
|
|
104
105
|
Returns:
|
|
105
106
|
This `WorkData` object"""
|
|
106
|
-
self.
|
|
107
|
+
self._raise_if_no_hco()
|
|
108
|
+
self.work_data_hco.edit_tags_action.execute(SetTagsWorkDataParameters(
|
|
107
109
|
tags=tags
|
|
108
110
|
))
|
|
109
111
|
self.refresh()
|
|
@@ -114,7 +116,8 @@ class WorkData:
|
|
|
114
116
|
|
|
115
117
|
Returns:
|
|
116
118
|
This `WorkData` object"""
|
|
117
|
-
self.
|
|
119
|
+
self._raise_if_no_hco()
|
|
120
|
+
self.work_data_hco.allow_deletion_action.execute()
|
|
118
121
|
self.refresh()
|
|
119
122
|
return self
|
|
120
123
|
|
|
@@ -123,7 +126,8 @@ class WorkData:
|
|
|
123
126
|
|
|
124
127
|
Returns:
|
|
125
128
|
This `WorkData` object"""
|
|
126
|
-
self.
|
|
129
|
+
self._raise_if_no_hco()
|
|
130
|
+
self.work_data_hco.disallow_deletion_action.execute()
|
|
127
131
|
self.refresh()
|
|
128
132
|
return self
|
|
129
133
|
|
|
@@ -132,7 +136,8 @@ class WorkData:
|
|
|
132
136
|
|
|
133
137
|
Returns:
|
|
134
138
|
This `WorkData` object"""
|
|
135
|
-
self.
|
|
139
|
+
self._raise_if_no_hco()
|
|
140
|
+
self.work_data_hco.hide_action.execute()
|
|
136
141
|
self.refresh()
|
|
137
142
|
return self
|
|
138
143
|
|
|
@@ -141,7 +146,9 @@ class WorkData:
|
|
|
141
146
|
|
|
142
147
|
Returns:
|
|
143
148
|
This `WorkData` object"""
|
|
144
|
-
self.
|
|
149
|
+
self._raise_if_no_hco()
|
|
150
|
+
self.work_data_hco.delete_action.execute()
|
|
151
|
+
self.work_data_hco = None
|
|
145
152
|
return self
|
|
146
153
|
|
|
147
154
|
def download(self) -> bytes:
|
|
@@ -150,7 +157,13 @@ class WorkData:
|
|
|
150
157
|
Returns:
|
|
151
158
|
Downloaded WorkData in bytes
|
|
152
159
|
"""
|
|
153
|
-
|
|
160
|
+
self._raise_if_no_hco()
|
|
161
|
+
return self.work_data_hco.download_link.download()
|
|
154
162
|
|
|
155
163
|
def self_link(self) -> WorkDataLink:
|
|
156
|
-
|
|
164
|
+
self._raise_if_no_hco()
|
|
165
|
+
return self.work_data_hco.self_link
|
|
166
|
+
|
|
167
|
+
def _raise_if_no_hco(self):
|
|
168
|
+
if self.work_data_hco is None:
|
|
169
|
+
raise Exception("No work data hco present. Maybe this class is used after resource deletion.")
|
{pinexq_client-0.5.0.20241018.1.dist-info → pinexq_client-0.7.0.20241113.1.dist-info}/METADATA
RENAMED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pinexq-client
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.7.0.20241113.1
|
|
4
4
|
Summary: A hypermedia-based client for the DataCybernetics PinexQ platform.
|
|
5
5
|
Author-Email: =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Mathias Reichardt <reichardt@data-cybernetics.com>, Jasim Ahmed <ahmed@data-cybernetics.com>, Pratik Poudel <poudel@data-cybernetics.com>
|
|
6
6
|
Maintainer-Email: Mathias Reichardt <reichardt@data-cybernetics.com>, =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Carsten Blank <blank@data-cybernetics.com>
|
{pinexq_client-0.5.0.20241018.1.dist-info → pinexq_client-0.7.0.20241113.1.dist-info}/RECORD
RENAMED
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
pinexq_client-0.
|
|
2
|
-
pinexq_client-0.
|
|
3
|
-
pinexq_client-0.
|
|
4
|
-
pinexq_client-0.
|
|
1
|
+
pinexq_client-0.7.0.20241113.1.dist-info/METADATA,sha256=5aTcxNIUNdX1Szxha37WAKXXSbG_jia6h84xiw2CEy8,3278
|
|
2
|
+
pinexq_client-0.7.0.20241113.1.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
|
|
3
|
+
pinexq_client-0.7.0.20241113.1.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
|
4
|
+
pinexq_client-0.7.0.20241113.1.dist-info/licenses/LICENSE,sha256=3oz3tAhM7kOgRukkRe7wmh5T_HihZY77ZtJDJm91ZN8,1072
|
|
5
5
|
pinexq_client/core/__init__.py,sha256=8SVD_PRgJtpUCOtVjdR6fRrv6KPNk7HD6UQrn0FKR04,235
|
|
6
6
|
pinexq_client/core/base_relations.py,sha256=oIUS58pkbMDdqm-3YOdsenhL1smtzeAk4fp7-U595MY,162
|
|
7
7
|
pinexq_client/core/enterapi.py,sha256=sL9TmF1L5LaDJnre1l_tiHDUo9vTbZ8cvPSov3Q1UTs,671
|
|
@@ -11,17 +11,17 @@ pinexq_client/core/hco/action_hco.py,sha256=6bWea-SkFjn52m5sYywkZeNOe74Nxmsvj338
|
|
|
11
11
|
pinexq_client/core/hco/action_with_parameters_hco.py,sha256=qvtoqTfDHddxB0ALanWXNDobSpazCmK2_ulTuvax2A4,3280
|
|
12
12
|
pinexq_client/core/hco/download_link_hco.py,sha256=_ijLtRfzy0qKG_QXpsWBZ8FJfp60o5Lw7QVv4_EpgPY,1335
|
|
13
13
|
pinexq_client/core/hco/hco_base.py,sha256=NglQuS6p1EimgtIx55G8ngBDGXc4lzyq08jsXzztE1I,3367
|
|
14
|
-
pinexq_client/core/hco/link_hco.py,sha256=
|
|
14
|
+
pinexq_client/core/hco/link_hco.py,sha256=XUk8anEu4hJET0ytKooB7-2Ntuzfes1rQziYXRE6D40,3845
|
|
15
15
|
pinexq_client/core/hco/unavailable.py,sha256=CBJN_YnQRPPTwYr-3MIXQIgYXIDLw9ozIM2TNo0uewY,942
|
|
16
16
|
pinexq_client/core/hco/upload_action_hco.py,sha256=oOGw0Z_nx67CbqJ-QkQJbffdXajBgKmBAfSfFtyq8wk,4690
|
|
17
17
|
pinexq_client/core/http_headers.py,sha256=tY4h3DKVDMkjw9DLFn57HdEnwR1NAixMF8Yjgy3gjQY,183
|
|
18
|
-
pinexq_client/core/media_types.py,sha256=
|
|
18
|
+
pinexq_client/core/media_types.py,sha256=GhrTEXqIb4vAM9Bl-i3WybYiL3T_Uy1Eu6hvLN7Oexc,745
|
|
19
19
|
pinexq_client/core/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
-
pinexq_client/core/model/error.py,sha256=
|
|
20
|
+
pinexq_client/core/model/error.py,sha256=ZDbUlwsj7d8XPMolSSLTFwgs3RBLvOvgmlEtoBuon5Y,418
|
|
21
21
|
pinexq_client/core/model/sirenmodels.py,sha256=vGRQlhM2cSa2caxQel91Jr48KWqM-vMYX32iaQCzIds,5547
|
|
22
22
|
pinexq_client/core/polling.py,sha256=Z6VXn-PCGk4XX-4tQWZG59qJyCIS0eIrpPUORQDIGrs,1077
|
|
23
|
-
pinexq_client/core/sirenaccess.py,sha256=
|
|
24
|
-
pinexq_client/job_management/__init__.py,sha256=
|
|
23
|
+
pinexq_client/core/sirenaccess.py,sha256=qopQKDfL1enJFb0Ja8UgsLMVOa0oBGy7PonhlV7YVBY,6950
|
|
24
|
+
pinexq_client/job_management/__init__.py,sha256=pZ4G2mi6jqfKnUtcf9Mvrb0IpkmEvG2JDVXuYLyInnY,164
|
|
25
25
|
pinexq_client/job_management/enterjma.py,sha256=Ivl_jVPw_gaLyU5nKbywM-bbVGpn0MoCrZ0DMbJYf3s,1411
|
|
26
26
|
pinexq_client/job_management/hcos/__init__.py,sha256=vMIdxGHBsVcKYKrVkCzD4a_VaheKSNxCimospFn5N88,396
|
|
27
27
|
pinexq_client/job_management/hcos/entrypoint_hco.py,sha256=qodjAwO_MtawUuhmaYjhGXHV-uW1k94V9gKRYZRkhn4,2234
|
|
@@ -30,7 +30,7 @@ pinexq_client/job_management/hcos/input_dataslot_hco.py,sha256=SDflhyW8kjpcTUfKA
|
|
|
30
30
|
pinexq_client/job_management/hcos/job_hco.py,sha256=CwF0eD07pd88iZPQUb_10iDtotiemy48BJ5exYN5gRc,8649
|
|
31
31
|
pinexq_client/job_management/hcos/job_query_result_hco.py,sha256=I0G8YIlYDhTahLz8n06L8BywlcsMGNWUEsmEr4Sk0GU,3315
|
|
32
32
|
pinexq_client/job_management/hcos/job_used_tags_hco.py,sha256=nys6E97NNXATdnvX6KZ46JR9qEb2lnqol9ZvJVEiNpQ,944
|
|
33
|
-
pinexq_client/job_management/hcos/jobsroot_hco.py,sha256=
|
|
33
|
+
pinexq_client/job_management/hcos/jobsroot_hco.py,sha256=P8C5CaIEq_bkh6YgJBuQEp45Cc4fHuU5lYuWNdgtISY,3853
|
|
34
34
|
pinexq_client/job_management/hcos/output_dataslot_hco.py,sha256=zxpo-fI9eHcp_pMKcf2l-gRoPHX1RzQO53auHMRB_T8,1549
|
|
35
35
|
pinexq_client/job_management/hcos/processing_step_hco.py,sha256=MykmFssysIc8fIUoPyNOLEGLGQNoC4zFgjIAIjf0v7U,5190
|
|
36
36
|
pinexq_client/job_management/hcos/processing_step_used_tags_hco.py,sha256=90-2IWlYTcYX62NzmAPnmcUCwMDhmMZyBrNs_G3yigs,1067
|
|
@@ -43,11 +43,11 @@ pinexq_client/job_management/hcos/workdata_used_tags_query_result_hco.py,sha256=
|
|
|
43
43
|
pinexq_client/job_management/hcos/workdataroot_hco.py,sha256=LdEPW2JJTqAWi-6zj-40lfREhthcDL6nPXQk_nfMtCA,3936
|
|
44
44
|
pinexq_client/job_management/known_relations.py,sha256=UlOF-sua8SyOPNNKzT_j6JVG8T-aewHIzn7S2ajXBhI,593
|
|
45
45
|
pinexq_client/job_management/model/__init__.py,sha256=ApHhNfjx4bPuz10sQnyBA2zajYbU7loDTZSKC5H_jBY,34
|
|
46
|
-
pinexq_client/job_management/model/open_api_generated.py,sha256
|
|
46
|
+
pinexq_client/job_management/model/open_api_generated.py,sha256=Bcdy60VZtRPFXj05z6NjWeopAZsBo48QkVBdxwqScmI,30946
|
|
47
47
|
pinexq_client/job_management/model/sirenentities.py,sha256=OInvxHpC6mnnYQjOMM2xAw7uLtvWwj9E2EQSRJe2jDo,3202
|
|
48
48
|
pinexq_client/job_management/tool/__init__.py,sha256=58CRDcP8ifSx9eA2uyTLEg0_fX3FUuNUogY_lirx9AY,96
|
|
49
|
-
pinexq_client/job_management/tool/job.py,sha256=
|
|
50
|
-
pinexq_client/job_management/tool/job_group.py,sha256=
|
|
51
|
-
pinexq_client/job_management/tool/processing_step.py,sha256=
|
|
52
|
-
pinexq_client/job_management/tool/workdata.py,sha256=
|
|
53
|
-
pinexq_client-0.
|
|
49
|
+
pinexq_client/job_management/tool/job.py,sha256=_4HKeOAXsVx9f3DR-2E2Oyr3RqGqEb3Am4D1HwFh9JI,24795
|
|
50
|
+
pinexq_client/job_management/tool/job_group.py,sha256=JyBKqtKAMpVXwdGaGKzh5IJOHgMk8zhK9b54jfvEHkk,4817
|
|
51
|
+
pinexq_client/job_management/tool/processing_step.py,sha256=dM4IcNymFl9d7nbxWliFE7gLLxG4RFQ8B4w14lfF2Mc,10180
|
|
52
|
+
pinexq_client/job_management/tool/workdata.py,sha256=wRy_yfFZUJDh-hoGUuAbQaRGtPysDmEOhLwD84Fgz04,5510
|
|
53
|
+
pinexq_client-0.7.0.20241113.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|