pinexq-client 0.3.0.20240620.2__py3-none-any.whl → 0.4.2.20241009.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pinexq_client/core/enterapi.py +8 -9
- pinexq_client/core/exceptions.py +73 -2
- pinexq_client/core/hco/action_hco.py +12 -22
- pinexq_client/core/hco/action_with_parameters_hco.py +15 -21
- pinexq_client/core/hco/download_link_hco.py +2 -3
- pinexq_client/core/hco/hco_base.py +2 -3
- pinexq_client/core/hco/link_hco.py +10 -9
- pinexq_client/core/hco/unavailable.py +17 -0
- pinexq_client/core/hco/upload_action_hco.py +11 -10
- pinexq_client/core/model/error.py +5 -1
- pinexq_client/core/sirenaccess.py +18 -27
- pinexq_client/job_management/__init__.py +1 -1
- pinexq_client/job_management/enterjma.py +2 -4
- pinexq_client/job_management/hcos/entrypoint_hco.py +36 -35
- pinexq_client/job_management/hcos/input_dataslot_hco.py +53 -51
- pinexq_client/job_management/hcos/job_hco.py +121 -120
- pinexq_client/job_management/hcos/job_query_result_hco.py +72 -47
- pinexq_client/job_management/hcos/jobsroot_hco.py +44 -42
- pinexq_client/job_management/hcos/output_dataslot_hco.py +1 -1
- pinexq_client/job_management/hcos/processing_step_hco.py +71 -70
- pinexq_client/job_management/hcos/processingstep_query_result_hco.py +76 -51
- pinexq_client/job_management/hcos/processingsteproot_hco.py +44 -43
- pinexq_client/job_management/hcos/workdata_hco.py +81 -80
- pinexq_client/job_management/hcos/workdata_query_result_hco.py +75 -52
- pinexq_client/job_management/hcos/workdataroot_hco.py +53 -52
- pinexq_client/job_management/model/open_api_generated.py +3 -1
- pinexq_client/job_management/tool/job.py +108 -11
- pinexq_client/job_management/tool/job_group.py +158 -0
- pinexq_client/job_management/tool/processing_step.py +83 -5
- pinexq_client/job_management/tool/workdata.py +8 -0
- {pinexq_client-0.3.0.20240620.2.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/METADATA +2 -2
- pinexq_client-0.4.2.20241009.1.dist-info/RECORD +53 -0
- {pinexq_client-0.3.0.20240620.2.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/WHEEL +1 -1
- pinexq_client-0.4.2.20241009.1.dist-info/entry_points.txt +4 -0
- pinexq_client-0.3.0.20240620.2.dist-info/RECORD +0 -50
- {pinexq_client-0.3.0.20240620.2.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,10 +1,11 @@
|
|
|
1
1
|
import json as json_
|
|
2
|
+
import warnings
|
|
2
3
|
from typing import Any, Self
|
|
3
4
|
|
|
4
5
|
import httpx
|
|
5
6
|
from httpx import URL
|
|
6
7
|
|
|
7
|
-
from pinexq_client.core import Link, MediaTypes
|
|
8
|
+
from pinexq_client.core import Link, MediaTypes, ClientException, ApiException
|
|
8
9
|
from pinexq_client.core.polling import wait_until, PollingException
|
|
9
10
|
from pinexq_client.job_management.enterjma import enter_jma
|
|
10
11
|
from pinexq_client.job_management.hcos import WorkDataLink, ProcessingStepLink, InputDataSlotHco, OutputDataSlotHco
|
|
@@ -39,7 +40,6 @@ from pinexq_client.job_management.model import (
|
|
|
39
40
|
from pinexq_client.job_management.tool.processing_step import ProcessingStep
|
|
40
41
|
from pinexq_client.job_management.tool.workdata import WorkData
|
|
41
42
|
|
|
42
|
-
|
|
43
43
|
class Job:
|
|
44
44
|
"""Convenience wrapper for handling JobHcos in the JobManagement-Api.
|
|
45
45
|
|
|
@@ -165,9 +165,19 @@ class Job:
|
|
|
165
165
|
self.refresh()
|
|
166
166
|
return self._job.state
|
|
167
167
|
|
|
168
|
+
def get_name(self) -> str:
|
|
169
|
+
"""Returns the name of this job
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
The name of this `Job`
|
|
173
|
+
"""
|
|
174
|
+
self.refresh()
|
|
175
|
+
return self._job.name
|
|
176
|
+
|
|
168
177
|
def select_processing(
|
|
169
178
|
self,
|
|
170
179
|
function_name: str | None = None,
|
|
180
|
+
*,
|
|
171
181
|
processing_step_link: ProcessingStepLink | None = None,
|
|
172
182
|
processing_step_instance: ProcessingStep | None = None
|
|
173
183
|
) -> Self:
|
|
@@ -186,10 +196,16 @@ class Job:
|
|
|
186
196
|
raise ValueError("Exactly one parameter must be provided")
|
|
187
197
|
|
|
188
198
|
if processing_step_link is not None:
|
|
199
|
+
if not isinstance(processing_step_link, ProcessingStepLink):
|
|
200
|
+
raise TypeError('Instance passed to "processing_step_link" is not of type "ProcessingStepLink"')
|
|
189
201
|
processing_url = processing_step_link.get_url()
|
|
190
202
|
elif processing_step_instance is not None:
|
|
203
|
+
if not isinstance(processing_step_instance, ProcessingStep):
|
|
204
|
+
raise TypeError('Instance passed to "processing_step_instance" is not of type "ProcessingStep"')
|
|
191
205
|
processing_url = processing_step_instance.self_link().get_url()
|
|
192
206
|
else:
|
|
207
|
+
if not isinstance(function_name, str):
|
|
208
|
+
raise TypeError('Instance passed to "function_name" is not of type "str"')
|
|
193
209
|
# ToDo: provide more parameters to query a processing step
|
|
194
210
|
query_param = ProcessingStepQueryParameters(
|
|
195
211
|
filter=ProcessingStepFilterParameter(
|
|
@@ -199,7 +215,7 @@ class Job:
|
|
|
199
215
|
)
|
|
200
216
|
query_result = self._processing_step_root.query_action.execute(query_param)
|
|
201
217
|
if len(query_result.processing_steps) != 1:
|
|
202
|
-
raise
|
|
218
|
+
raise NameError(f"No processing step with the name '{function_name}' registered!")
|
|
203
219
|
# Todo: For now we choose the first and only result. Make this more flexible?
|
|
204
220
|
processing_url = query_result.processing_steps[0].self_link.get_url()
|
|
205
221
|
|
|
@@ -253,16 +269,19 @@ class Job:
|
|
|
253
269
|
result = self._job.result
|
|
254
270
|
return json_.loads(result) if result else None
|
|
255
271
|
|
|
256
|
-
def wait_for_state(self, state: JobStates, timeout_ms: int = 5000) -> Self:
|
|
257
|
-
"""Wait for this job to reach a state.
|
|
272
|
+
def wait_for_state(self, state: JobStates, timeout_ms: int = 5000, polling_interval_ms: int = 1000) -> Self:
|
|
273
|
+
"""Wait for this job to reach a state. If the job enters error state an exception is risen
|
|
258
274
|
|
|
259
275
|
Args:
|
|
260
276
|
state: The state to wait for. After the job enters this state this function returns.
|
|
261
|
-
timeout_ms: Time span in milliseconds to wait for reaching the state before
|
|
262
|
-
|
|
277
|
+
timeout_ms: Time span in milliseconds to wait for reaching the state before raising an exception.
|
|
278
|
+
polling_interval_ms: will determine how fast the API is polled for updates.
|
|
279
|
+
Note that low values will produce unnecessary load.
|
|
263
280
|
|
|
264
281
|
Returns:
|
|
265
282
|
This `Job` object
|
|
283
|
+
|
|
284
|
+
|
|
266
285
|
"""
|
|
267
286
|
try:
|
|
268
287
|
wait_until(
|
|
@@ -270,23 +289,25 @@ class Job:
|
|
|
270
289
|
timeout_ms=timeout_ms,
|
|
271
290
|
timeout_message="Waiting for job completion",
|
|
272
291
|
error_condition=lambda: self._job.state == JobStates.error,
|
|
292
|
+
polling_interval_ms=polling_interval_ms
|
|
273
293
|
)
|
|
274
294
|
except TimeoutError as timeout:
|
|
275
|
-
raise
|
|
295
|
+
raise TimeoutError(
|
|
276
296
|
f"Job did not reach state: '{state.value}' "
|
|
277
297
|
f"current state: '{self.get_state().value}'. Error:{str(timeout)}"
|
|
278
298
|
)
|
|
279
299
|
except PollingException:
|
|
280
300
|
if self._job.state == JobStates.error:
|
|
281
301
|
error_reason = self._job.error_description
|
|
282
|
-
raise
|
|
283
|
-
raise
|
|
302
|
+
raise PollingException(f"Job failed'. Error:{error_reason}")
|
|
303
|
+
raise PollingException("Job failed")
|
|
284
304
|
|
|
285
305
|
return self
|
|
286
306
|
|
|
287
307
|
def assign_input_dataslot(
|
|
288
308
|
self,
|
|
289
309
|
index: int,
|
|
310
|
+
*,
|
|
290
311
|
work_data_link: WorkDataLink | None = None,
|
|
291
312
|
work_data_instance: WorkData | None = None
|
|
292
313
|
) -> Self:
|
|
@@ -304,8 +325,12 @@ class Job:
|
|
|
304
325
|
raise ValueError("Exactly one parameter must be provided")
|
|
305
326
|
|
|
306
327
|
if work_data_instance is not None:
|
|
328
|
+
if not isinstance(work_data_instance, WorkData):
|
|
329
|
+
raise Exception('Instance passed to "work_data_instance" is not of type "WorkData"')
|
|
307
330
|
work_data = work_data_instance.self_link()
|
|
308
331
|
else:
|
|
332
|
+
if not isinstance(work_data_link, WorkDataLink):
|
|
333
|
+
raise Exception('Instance passed to "work_data_link" is not of type "WorkDataLink"')
|
|
309
334
|
work_data = work_data_link
|
|
310
335
|
|
|
311
336
|
dataslot = self._job.input_dataslots[index]
|
|
@@ -321,6 +346,7 @@ class Job:
|
|
|
321
346
|
def assign_collection_input_dataslot(
|
|
322
347
|
self,
|
|
323
348
|
index: int,
|
|
349
|
+
*,
|
|
324
350
|
work_data_links: list[WorkDataLink] | None = None,
|
|
325
351
|
work_data_instances: list[WorkData] | None = None
|
|
326
352
|
) -> Self:
|
|
@@ -338,8 +364,13 @@ class Job:
|
|
|
338
364
|
raise ValueError("Exactly one parameter must be provided")
|
|
339
365
|
|
|
340
366
|
if work_data_instances is not None:
|
|
367
|
+
if not isinstance(work_data_instances, list) or any(
|
|
368
|
+
not isinstance(i, WorkData) for i in work_data_instances):
|
|
369
|
+
raise Exception('Instance passed to "work_data_instances" is not of type "list[WorkData]"')
|
|
341
370
|
work_datas = [work_data_instance.self_link() for work_data_instance in work_data_instances]
|
|
342
371
|
else:
|
|
372
|
+
if not isinstance(work_data_links, list) or any(not isinstance(i, WorkDataLink) for i in work_data_links):
|
|
373
|
+
raise Exception('Instance passed to "work_data_links" is not of type "list[WorkDataLink]"')
|
|
343
374
|
work_datas = work_data_links
|
|
344
375
|
|
|
345
376
|
dataslot = self._job.input_dataslots[index]
|
|
@@ -408,7 +439,7 @@ class Job:
|
|
|
408
439
|
query_result = self._get_sub_jobs(state=state)
|
|
409
440
|
return query_result.total_entities
|
|
410
441
|
|
|
411
|
-
def wait_for_sub_jobs_complete(self, timeout_ms: int =
|
|
442
|
+
def wait_for_sub_jobs_complete(self, timeout_ms: int = 60000, polling_interval_ms: int = 1000) -> Self:
|
|
412
443
|
"""Wait for all sub-jobs to reach the state 'completed'.
|
|
413
444
|
|
|
414
445
|
This function will block execution until the state is reached or raise an exception
|
|
@@ -419,16 +450,21 @@ class Job:
|
|
|
419
450
|
|
|
420
451
|
Returns:
|
|
421
452
|
This `Job` object
|
|
453
|
+
:param timeout_ms: Wil determine how long to wait for success
|
|
454
|
+
:param polling_interval_ms: will determine how fast the API is polled for updates.
|
|
455
|
+
Note that low values will produce unnecessary load.
|
|
422
456
|
"""
|
|
423
457
|
wait_until(
|
|
424
458
|
condition=lambda: self.sub_jobs_in_state(JobStates.pending) == 0,
|
|
425
459
|
timeout_ms=timeout_ms,
|
|
426
460
|
timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_ms}ms]",
|
|
461
|
+
polling_interval_ms=polling_interval_ms
|
|
427
462
|
)
|
|
428
463
|
wait_until(
|
|
429
464
|
condition=lambda: self.sub_jobs_in_state(JobStates.processing) == 0,
|
|
430
465
|
timeout_ms=timeout_ms,
|
|
431
466
|
timeout_message=f"Timeout while waiting for sub-jobs to complete! [timeout: {timeout_ms}ms]",
|
|
467
|
+
polling_interval_ms=polling_interval_ms
|
|
432
468
|
)
|
|
433
469
|
|
|
434
470
|
error_count = self.sub_jobs_in_state(JobStates.error)
|
|
@@ -455,6 +491,67 @@ class Job:
|
|
|
455
491
|
self._job.delete_action.execute()
|
|
456
492
|
return self
|
|
457
493
|
|
|
494
|
+
def delete_with_associated(
|
|
495
|
+
self,
|
|
496
|
+
*,
|
|
497
|
+
delete_output_workdata: bool = True,
|
|
498
|
+
delete_input_workdata: bool = False,
|
|
499
|
+
delete_subjobs_with_data: bool = True
|
|
500
|
+
) -> Self:
|
|
501
|
+
"""Delete this job after deleting output workdata and subjobs depending on the flag.
|
|
502
|
+
Afterward, also deletes input workdata depending on the flag. This is a best effort operation,
|
|
503
|
+
if an operation can not be executed a warning will be printed but the process continues.
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
delete_output_workdata: boolean flag to specify if output WorkData should be attempted for deletion. Default: True
|
|
507
|
+
delete_input_workdata: boolean flag to specify if input WorkData should be attempted for deletion. Default: False
|
|
508
|
+
delete_subjobs_with_data: boolean flag tp specify if Sub jobs should be attempted for deletion. Default: True
|
|
509
|
+
|
|
510
|
+
Returns:
|
|
511
|
+
This `Job` object
|
|
512
|
+
"""
|
|
513
|
+
|
|
514
|
+
# delete subjobs
|
|
515
|
+
if delete_subjobs_with_data is True:
|
|
516
|
+
for subjob in self._get_sub_jobs().iter_flat():
|
|
517
|
+
try:
|
|
518
|
+
subjob.delete_action.execute()
|
|
519
|
+
except (ClientException, ApiException) as e:
|
|
520
|
+
warnings.warn(f"Could not delete subjob: {subjob.self_link.get_url()}\n{e}")
|
|
521
|
+
pass
|
|
522
|
+
|
|
523
|
+
# delete output workdatas
|
|
524
|
+
if delete_output_workdata is True:
|
|
525
|
+
for slot in self._job.output_dataslots:
|
|
526
|
+
for wd in slot.assigned_workdatas:
|
|
527
|
+
try:
|
|
528
|
+
wd.delete_action.execute()
|
|
529
|
+
except (ClientException, ApiException) as e:
|
|
530
|
+
warnings.warn(f"Could not delete output workdata: {wd.self_link.get_url()}\n{e}")
|
|
531
|
+
pass
|
|
532
|
+
|
|
533
|
+
# delete this job
|
|
534
|
+
self.refresh()
|
|
535
|
+
|
|
536
|
+
try:
|
|
537
|
+
self.delete()
|
|
538
|
+
except (ClientException, ApiException) as e:
|
|
539
|
+
warnings.warn(f"Could not delete job: {self.self_link().get_url()}\n{e}")
|
|
540
|
+
|
|
541
|
+
# finally delete input workdatas
|
|
542
|
+
if delete_input_workdata is True:
|
|
543
|
+
for slot in self._job.input_dataslots:
|
|
544
|
+
for wd in slot.selected_workdatas:
|
|
545
|
+
try:
|
|
546
|
+
wd.allow_deletion_action.execute()
|
|
547
|
+
wd = wd.self_link.navigate()
|
|
548
|
+
wd.delete_action.execute()
|
|
549
|
+
except (ClientException, ApiException) as e:
|
|
550
|
+
warnings.warn(f"Could not delete input workdata: {wd.self_link.get_url()}\n{e}")
|
|
551
|
+
pass
|
|
552
|
+
|
|
553
|
+
return self
|
|
554
|
+
|
|
458
555
|
def unhide(self) -> Self:
|
|
459
556
|
"""Reveal this job again.
|
|
460
557
|
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from typing import Union, Self
|
|
3
|
+
|
|
4
|
+
import httpx
|
|
5
|
+
|
|
6
|
+
from pinexq_client.job_management import Job
|
|
7
|
+
from pinexq_client.job_management.hcos import JobQueryResultHco
|
|
8
|
+
from pinexq_client.job_management.model import JobStates
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class JobGroup:
|
|
12
|
+
"""
|
|
13
|
+
A wrapper class for a group of jobs for easier execution and waiting
|
|
14
|
+
|
|
15
|
+
Attributes:
|
|
16
|
+
_client:
|
|
17
|
+
The http client
|
|
18
|
+
_jobs:
|
|
19
|
+
List of jobs in the group
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
_client: httpx.Client
|
|
23
|
+
|
|
24
|
+
def __init__(self, client: httpx.Client):
|
|
25
|
+
self._jobs: list[Job] = []
|
|
26
|
+
self._client = client
|
|
27
|
+
|
|
28
|
+
@classmethod
|
|
29
|
+
def from_query_result(cls, client: httpx.Client, job_query_result: JobQueryResultHco) -> Self:
|
|
30
|
+
"""
|
|
31
|
+
Initializes a `JobGroup` object from a JobQueryResultHco object
|
|
32
|
+
Args:
|
|
33
|
+
client: The http client
|
|
34
|
+
job_query_result: The JobQueryResultHco object whose jobs are to be added to the JobGroup
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
The newly created `JobGroup` instance
|
|
38
|
+
"""
|
|
39
|
+
instance = cls(client)
|
|
40
|
+
for job in job_query_result.iter_flat():
|
|
41
|
+
instance.add_jobs(Job.from_hco(instance._client, job))
|
|
42
|
+
return instance
|
|
43
|
+
|
|
44
|
+
def add_jobs(self, jobs: Union[Job, list[Job]]) -> Self:
|
|
45
|
+
"""
|
|
46
|
+
Add a job or multiple jobs to the group
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
jobs: A job or a list of job objects to be added to the JobGroup
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
This `JobGroup` object
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
if isinstance(jobs, list):
|
|
56
|
+
self._jobs.extend(jobs)
|
|
57
|
+
else:
|
|
58
|
+
self._jobs.append(jobs)
|
|
59
|
+
return self
|
|
60
|
+
|
|
61
|
+
def start_all(self) -> Self:
|
|
62
|
+
"""
|
|
63
|
+
Start all jobs
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
This `JobGroup` object
|
|
67
|
+
"""
|
|
68
|
+
for job in self._jobs:
|
|
69
|
+
job.start()
|
|
70
|
+
return self
|
|
71
|
+
|
|
72
|
+
def wait_all(self, *, job_timeout_ms: int = 5000, total_timeout_ms: int | None = None) -> Self:
|
|
73
|
+
"""
|
|
74
|
+
Wait for all jobs to complete or error state.
|
|
75
|
+
If the overall timeout elapses and some jobs are not complete, then exception.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
job_timeout_ms:
|
|
79
|
+
Individual job timeout in milliseconds. Default is 5000 ms.
|
|
80
|
+
total_timeout_ms:
|
|
81
|
+
Timeout for the whole operation in milliseconds. Default is no timeout.
|
|
82
|
+
Returns:
|
|
83
|
+
This `JobGroup` object
|
|
84
|
+
"""
|
|
85
|
+
start_time = time.time()
|
|
86
|
+
for job in self._jobs:
|
|
87
|
+
if total_timeout_ms is not None:
|
|
88
|
+
elapsed_time_ms = (time.time() - start_time) * 1000
|
|
89
|
+
if total_timeout_ms - elapsed_time_ms <= 0:
|
|
90
|
+
raise Exception("Total timeout exceeded while waiting for jobs.")
|
|
91
|
+
|
|
92
|
+
try:
|
|
93
|
+
job.wait_for_state(JobStates.completed, timeout_ms=job_timeout_ms)
|
|
94
|
+
except Exception:
|
|
95
|
+
pass
|
|
96
|
+
return self
|
|
97
|
+
|
|
98
|
+
def all_jobs_completed_ok(self) -> bool:
|
|
99
|
+
for job in self._jobs:
|
|
100
|
+
state = job.get_state()
|
|
101
|
+
if state is not JobStates.completed:
|
|
102
|
+
return False
|
|
103
|
+
return True
|
|
104
|
+
|
|
105
|
+
def incomplete_jobs(self) -> list[Job]:
|
|
106
|
+
"""
|
|
107
|
+
Returns the incomplete jobs
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Count of incomplete jobs
|
|
111
|
+
"""
|
|
112
|
+
incomplete_jobs = []
|
|
113
|
+
for job in self._jobs:
|
|
114
|
+
state = job.get_state()
|
|
115
|
+
if state in (JobStates.processing, JobStates.pending):
|
|
116
|
+
incomplete_jobs.append(job)
|
|
117
|
+
return incomplete_jobs
|
|
118
|
+
|
|
119
|
+
def jobs_with_error(self) -> list[Job]:
|
|
120
|
+
"""
|
|
121
|
+
Returns the list of jobs that produced errors
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
List of jobs that produced errors
|
|
125
|
+
"""
|
|
126
|
+
return [job for job in self._jobs if job.get_state() == JobStates.error]
|
|
127
|
+
|
|
128
|
+
def remove(self, job_name: str) -> Self:
|
|
129
|
+
"""
|
|
130
|
+
Removes all jobs from the group whose name matches the provided name
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
job_name:
|
|
134
|
+
The name of the job(s) to be removed
|
|
135
|
+
Returns:
|
|
136
|
+
This `JobGroup` object
|
|
137
|
+
"""
|
|
138
|
+
self._jobs = [job for job in self._jobs if job.get_name() != job_name]
|
|
139
|
+
return self
|
|
140
|
+
|
|
141
|
+
def clear(self) -> Self:
|
|
142
|
+
"""
|
|
143
|
+
Removes all jobs from the group
|
|
144
|
+
|
|
145
|
+
Returns:
|
|
146
|
+
This `JobGroup` object
|
|
147
|
+
"""
|
|
148
|
+
self._jobs = []
|
|
149
|
+
return self
|
|
150
|
+
|
|
151
|
+
def get_jobs(self) -> list[Job]:
|
|
152
|
+
"""
|
|
153
|
+
Returns the list of jobs in the group
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
List of jobs in the group
|
|
157
|
+
"""
|
|
158
|
+
return self._jobs
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Any, Self
|
|
1
|
+
from typing import Any, Self, List, Optional
|
|
2
2
|
|
|
3
3
|
import httpx
|
|
4
4
|
from httpx import URL
|
|
@@ -6,7 +6,7 @@ from httpx import URL
|
|
|
6
6
|
from pinexq_client.core import Link, MediaTypes
|
|
7
7
|
from pinexq_client.core.hco.upload_action_hco import UploadParameters
|
|
8
8
|
from pinexq_client.job_management.enterjma import enter_jma
|
|
9
|
-
from pinexq_client.job_management.hcos import ProcessingStepHco, ProcessingStepLink
|
|
9
|
+
from pinexq_client.job_management.hcos import ProcessingStepHco, ProcessingStepLink, ProcessingStepQueryResultHco
|
|
10
10
|
from pinexq_client.job_management.hcos.entrypoint_hco import EntryPointHco
|
|
11
11
|
from pinexq_client.job_management.hcos.job_hco import GenericProcessingConfigureParameters
|
|
12
12
|
from pinexq_client.job_management.hcos.processingsteproot_hco import ProcessingStepsRootHco
|
|
@@ -14,7 +14,8 @@ from pinexq_client.job_management.known_relations import Relations
|
|
|
14
14
|
from pinexq_client.job_management.model import (
|
|
15
15
|
CreateProcessingStepParameters,
|
|
16
16
|
EditProcessingStepParameters,
|
|
17
|
-
SetProcessingStepTagsParameters,
|
|
17
|
+
SetProcessingStepTagsParameters, ProcessingStepQueryParameters, ProcessingStepFilterParameter,
|
|
18
|
+
FunctionNameMatchTypes,
|
|
18
19
|
)
|
|
19
20
|
|
|
20
21
|
|
|
@@ -37,20 +38,22 @@ class ProcessingStep:
|
|
|
37
38
|
self._entrypoint = enter_jma(client)
|
|
38
39
|
self._processing_steps_root = self._entrypoint.processing_step_root_link.navigate()
|
|
39
40
|
|
|
40
|
-
def create(self, title: str, function_name: str) -> Self:
|
|
41
|
+
def create(self, title: str, function_name: str, version: str = "0") -> Self:
|
|
41
42
|
"""
|
|
42
43
|
Creates a new ProcessingStep by name.
|
|
43
44
|
|
|
44
45
|
Args:
|
|
45
46
|
title: Title of the ProcessingStep to be created
|
|
46
47
|
function_name: Function name of the ProcessingStep to be created
|
|
48
|
+
version: Version of the ProcessingStep to be created
|
|
47
49
|
|
|
48
50
|
Returns:
|
|
49
51
|
The newly created ProcessingStep as `ProcessingStep` object
|
|
50
52
|
"""
|
|
51
53
|
processing_step_hco = self._processing_steps_root.register_new_action.execute(CreateProcessingStepParameters(
|
|
52
54
|
title=title,
|
|
53
|
-
function_name=function_name
|
|
55
|
+
function_name=function_name,
|
|
56
|
+
version=version
|
|
54
57
|
))
|
|
55
58
|
self._processing_step = processing_step_hco
|
|
56
59
|
return self
|
|
@@ -94,6 +97,81 @@ class ProcessingStep:
|
|
|
94
97
|
processing_step_instance._get_by_link(ProcessingStepLink.from_link(client, link))
|
|
95
98
|
return processing_step_instance
|
|
96
99
|
|
|
100
|
+
@classmethod
|
|
101
|
+
def from_name(cls, client: httpx.Client, step_name: str, version: str = "0") -> Self:
|
|
102
|
+
"""Create a ProcessingStep object from an existing name.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
client: Create a ProcessingStep object from an existing name.
|
|
106
|
+
step_name: Name of the registered processing step.
|
|
107
|
+
version: Version of the ProcessingStep to be created
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
The newly created processing step as `ProcessingStep` object
|
|
111
|
+
"""
|
|
112
|
+
|
|
113
|
+
# Attempt to find the processing step
|
|
114
|
+
query_result = cls._query_processing_steps(client, step_name, version)
|
|
115
|
+
|
|
116
|
+
# Check if exactly one result is found
|
|
117
|
+
if len(query_result.processing_steps) != 1:
|
|
118
|
+
# Attempt to suggest alternative steps if exact match not found
|
|
119
|
+
suggested_steps = cls._processing_steps_by_name(client, step_name)
|
|
120
|
+
raise NameError(
|
|
121
|
+
f"No processing step with the name {step_name} and version {version} registered. "
|
|
122
|
+
f"Suggestions: {suggested_steps}"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
# Todo: For now we choose the first and only result. Make this more flexible?
|
|
126
|
+
processing_step_hco = query_result.processing_steps[0]
|
|
127
|
+
return ProcessingStep.from_hco(client, processing_step_hco)
|
|
128
|
+
|
|
129
|
+
@staticmethod
|
|
130
|
+
def _query_processing_steps(client: httpx.Client, step_name: str, version: Optional[str] = None) -> ProcessingStepQueryResultHco:
|
|
131
|
+
"""
|
|
132
|
+
Helper function to query processing steps based on name and optional version.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
client: HTTP client for executing queries.
|
|
136
|
+
step_name: Name of the processing step.
|
|
137
|
+
version: Optional version to match.
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
Query result object containing the matching processing steps.
|
|
141
|
+
"""
|
|
142
|
+
query_param = ProcessingStepQueryParameters(
|
|
143
|
+
filter=ProcessingStepFilterParameter(
|
|
144
|
+
function_name=step_name,
|
|
145
|
+
function_name_match_type=FunctionNameMatchTypes.match_exact,
|
|
146
|
+
version=version
|
|
147
|
+
)
|
|
148
|
+
)
|
|
149
|
+
instance = ProcessingStep(client)
|
|
150
|
+
return instance._processing_steps_root.query_action.execute(query_param)
|
|
151
|
+
|
|
152
|
+
@staticmethod
|
|
153
|
+
def _processing_steps_by_name(client: httpx.Client, step_name: str) -> list:
|
|
154
|
+
"""
|
|
155
|
+
Suggest processing steps if the exact step is not found.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
client: HTTP client for executing queries.
|
|
159
|
+
step_name: Name of the processing step.
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
A list of alternative processing steps matching the step name.
|
|
163
|
+
"""
|
|
164
|
+
# Query for steps without version to get suggestions
|
|
165
|
+
instance = ProcessingStep(client)
|
|
166
|
+
query_result = instance._query_processing_steps(client, step_name)
|
|
167
|
+
|
|
168
|
+
# If no suggestions are found, raise an error
|
|
169
|
+
if len(query_result.processing_steps) == 0:
|
|
170
|
+
raise NameError(f"No processing steps found with the name '{step_name}'.")
|
|
171
|
+
|
|
172
|
+
# Return list of alternative steps as suggestions
|
|
173
|
+
return [f"{step.function_name}:{step.version}" for step in query_result.processing_steps]
|
|
174
|
+
|
|
97
175
|
def refresh(self) -> Self:
|
|
98
176
|
"""Updates the processing step from the server
|
|
99
177
|
|
|
@@ -144,5 +144,13 @@ class WorkData:
|
|
|
144
144
|
self._work_data.delete_action.execute()
|
|
145
145
|
return self
|
|
146
146
|
|
|
147
|
+
def download(self) -> bytes:
|
|
148
|
+
"""Download WorkData.
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
Downloaded WorkData in bytes
|
|
152
|
+
"""
|
|
153
|
+
return self._work_data.download_link.download()
|
|
154
|
+
|
|
147
155
|
def self_link(self) -> WorkDataLink:
|
|
148
156
|
return self._work_data.self_link
|
{pinexq_client-0.3.0.20240620.2.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/METADATA
RENAMED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pinexq-client
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.2.20241009.1
|
|
4
4
|
Summary: A hypermedia-based client for the DataCybernetics PinexQ platform.
|
|
5
|
-
Author-Email: =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Mathias Reichardt <reichardt@data-cybernetics.com>, Pratik Poudel <poudel@data-cybernetics.com>
|
|
5
|
+
Author-Email: =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Mathias Reichardt <reichardt@data-cybernetics.com>, Jasim Ahmed <ahmed@data-cybernetics.com>, Pratik Poudel <poudel@data-cybernetics.com>
|
|
6
6
|
Maintainer-Email: Mathias Reichardt <reichardt@data-cybernetics.com>, =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Carsten Blank <blank@data-cybernetics.com>
|
|
7
7
|
License: MIT
|
|
8
8
|
Requires-Python: >=3.11
|
|
@@ -0,0 +1,53 @@
|
|
|
1
|
+
pinexq_client-0.4.2.20241009.1.dist-info/METADATA,sha256=OwWRTY8LrZ0sTtVPcvKWudbDLVg_25sdw9B8inJczfc,3278
|
|
2
|
+
pinexq_client-0.4.2.20241009.1.dist-info/WHEEL,sha256=Vza3XR51HW1KmFP0iIMUVYIvz0uQuKJpIXKYOBGQyFQ,90
|
|
3
|
+
pinexq_client-0.4.2.20241009.1.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
|
4
|
+
pinexq_client-0.4.2.20241009.1.dist-info/licenses/LICENSE,sha256=3oz3tAhM7kOgRukkRe7wmh5T_HihZY77ZtJDJm91ZN8,1072
|
|
5
|
+
pinexq_client/core/__init__.py,sha256=8SVD_PRgJtpUCOtVjdR6fRrv6KPNk7HD6UQrn0FKR04,235
|
|
6
|
+
pinexq_client/core/base_relations.py,sha256=oIUS58pkbMDdqm-3YOdsenhL1smtzeAk4fp7-U595MY,162
|
|
7
|
+
pinexq_client/core/enterapi.py,sha256=sL9TmF1L5LaDJnre1l_tiHDUo9vTbZ8cvPSov3Q1UTs,671
|
|
8
|
+
pinexq_client/core/exceptions.py,sha256=NqB3y1ufjOfG3kv7Rz4ppXqJRAugt2zlgxkto2nIVQU,2228
|
|
9
|
+
pinexq_client/core/hco/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
+
pinexq_client/core/hco/action_hco.py,sha256=lctChgDHHwRqv0LU_8r8YRS708n5blYFxVkFNYf8YC8,2219
|
|
11
|
+
pinexq_client/core/hco/action_with_parameters_hco.py,sha256=V-8n7-X2oSglQ4fYbLm4YKNcFhIoaT3SNIUVDflI9yY,3161
|
|
12
|
+
pinexq_client/core/hco/download_link_hco.py,sha256=_ijLtRfzy0qKG_QXpsWBZ8FJfp60o5Lw7QVv4_EpgPY,1335
|
|
13
|
+
pinexq_client/core/hco/hco_base.py,sha256=NglQuS6p1EimgtIx55G8ngBDGXc4lzyq08jsXzztE1I,3367
|
|
14
|
+
pinexq_client/core/hco/link_hco.py,sha256=m-TxPtlBg7bVEcj_fztD84Kwvf5lWpqcAS_MBHdggkc,2063
|
|
15
|
+
pinexq_client/core/hco/unavailable.py,sha256=rbIAXEtIjW7G0lQB5xsFSrakDDc_xAnr100sh2VE_Ds,566
|
|
16
|
+
pinexq_client/core/hco/upload_action_hco.py,sha256=oOGw0Z_nx67CbqJ-QkQJbffdXajBgKmBAfSfFtyq8wk,4690
|
|
17
|
+
pinexq_client/core/http_headers.py,sha256=tY4h3DKVDMkjw9DLFn57HdEnwR1NAixMF8Yjgy3gjQY,183
|
|
18
|
+
pinexq_client/core/media_types.py,sha256=qU-Jd92H5HKczWDk51EstFHadO6A_QJL4lLXHRlfgjQ,600
|
|
19
|
+
pinexq_client/core/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
20
|
+
pinexq_client/core/model/error.py,sha256=FNJrXVTkVnh6oDkHHc3sSZ2wNtEPCZyqgzZT01MG8Tc,390
|
|
21
|
+
pinexq_client/core/model/sirenmodels.py,sha256=vGRQlhM2cSa2caxQel91Jr48KWqM-vMYX32iaQCzIds,5547
|
|
22
|
+
pinexq_client/core/polling.py,sha256=Z6VXn-PCGk4XX-4tQWZG59qJyCIS0eIrpPUORQDIGrs,1077
|
|
23
|
+
pinexq_client/core/sirenaccess.py,sha256=5GDTolgIlt9rW2g9qG7LerUlmz0BQNKYGp78T2UPrv0,6771
|
|
24
|
+
pinexq_client/job_management/__init__.py,sha256=pvdID8cHtpXcjx-PKW9svc9DxMGG58cXyW-mhYEJPQE,164
|
|
25
|
+
pinexq_client/job_management/enterjma.py,sha256=Ivl_jVPw_gaLyU5nKbywM-bbVGpn0MoCrZ0DMbJYf3s,1411
|
|
26
|
+
pinexq_client/job_management/hcos/__init__.py,sha256=vMIdxGHBsVcKYKrVkCzD4a_VaheKSNxCimospFn5N88,396
|
|
27
|
+
pinexq_client/job_management/hcos/entrypoint_hco.py,sha256=qodjAwO_MtawUuhmaYjhGXHV-uW1k94V9gKRYZRkhn4,2234
|
|
28
|
+
pinexq_client/job_management/hcos/info_hco.py,sha256=zWjR63SCEO_sUcZ9ha_aIoC_qUzAWLf50Xu4AHddAn8,1328
|
|
29
|
+
pinexq_client/job_management/hcos/input_dataslot_hco.py,sha256=SDflhyW8kjpcTUfKAXnJxNR-etPzAHfoTqlYUcJZrxs,3442
|
|
30
|
+
pinexq_client/job_management/hcos/job_hco.py,sha256=0ZzeBXjwJd3kbe3peIdGgnIHR_cvwSz1_yuO5135g34,7340
|
|
31
|
+
pinexq_client/job_management/hcos/job_query_result_hco.py,sha256=I0G8YIlYDhTahLz8n06L8BywlcsMGNWUEsmEr4Sk0GU,3315
|
|
32
|
+
pinexq_client/job_management/hcos/job_used_tags_hco.py,sha256=nys6E97NNXATdnvX6KZ46JR9qEb2lnqol9ZvJVEiNpQ,944
|
|
33
|
+
pinexq_client/job_management/hcos/jobsroot_hco.py,sha256=aLVlGEl4BG3HboelTu3srTdbQOXPQJWVZrYPHeWe3m4,3594
|
|
34
|
+
pinexq_client/job_management/hcos/output_dataslot_hco.py,sha256=zxpo-fI9eHcp_pMKcf2l-gRoPHX1RzQO53auHMRB_T8,1549
|
|
35
|
+
pinexq_client/job_management/hcos/processing_step_hco.py,sha256=MykmFssysIc8fIUoPyNOLEGLGQNoC4zFgjIAIjf0v7U,5190
|
|
36
|
+
pinexq_client/job_management/hcos/processing_step_used_tags_hco.py,sha256=90-2IWlYTcYX62NzmAPnmcUCwMDhmMZyBrNs_G3yigs,1067
|
|
37
|
+
pinexq_client/job_management/hcos/processingstep_query_result_hco.py,sha256=YcCgigKvOIggILixgaEbmnM23FlkjCgxnhZC2Eh98dY,3817
|
|
38
|
+
pinexq_client/job_management/hcos/processingsteproot_hco.py,sha256=gQBGMWEKX5kq_HwC7-eEjjfAm6oYTuIxGX5kKw_GKUM,3684
|
|
39
|
+
pinexq_client/job_management/hcos/user_hco.py,sha256=z6USe-4nYzBfOoEx3n9_UbgomMTimg4EIa_XeVBj01A,1095
|
|
40
|
+
pinexq_client/job_management/hcos/workdata_hco.py,sha256=lyqUQ93oNOIOS3J9V0BKNq7opoHdTQTTCcpJTubEbVU,4734
|
|
41
|
+
pinexq_client/job_management/hcos/workdata_query_result_hco.py,sha256=yxEnu_COMxP3mt553JZD13jjPyqSp3DJjgd8es5Nq_E,3520
|
|
42
|
+
pinexq_client/job_management/hcos/workdata_used_tags_query_result_hco.py,sha256=qB1iQpwD63579dq3tUF4DBB_rZRMqJ80y1ysf-41aOo,1087
|
|
43
|
+
pinexq_client/job_management/hcos/workdataroot_hco.py,sha256=LdEPW2JJTqAWi-6zj-40lfREhthcDL6nPXQk_nfMtCA,3936
|
|
44
|
+
pinexq_client/job_management/known_relations.py,sha256=UlOF-sua8SyOPNNKzT_j6JVG8T-aewHIzn7S2ajXBhI,593
|
|
45
|
+
pinexq_client/job_management/model/__init__.py,sha256=ApHhNfjx4bPuz10sQnyBA2zajYbU7loDTZSKC5H_jBY,34
|
|
46
|
+
pinexq_client/job_management/model/open_api_generated.py,sha256=9VQtoenAmAVsRdR9GaQSBqrJIPrWFYkF5DKv8n9Z6Ow,30354
|
|
47
|
+
pinexq_client/job_management/model/sirenentities.py,sha256=OInvxHpC6mnnYQjOMM2xAw7uLtvWwj9E2EQSRJe2jDo,3202
|
|
48
|
+
pinexq_client/job_management/tool/__init__.py,sha256=58CRDcP8ifSx9eA2uyTLEg0_fX3FUuNUogY_lirx9AY,96
|
|
49
|
+
pinexq_client/job_management/tool/job.py,sha256=BA1S2NY6D4Cpa3Mmoly0M3qCzCiv6p-MP7_decSlP-0,22209
|
|
50
|
+
pinexq_client/job_management/tool/job_group.py,sha256=4zW_SEbQMLVNqu_zLUP57epkypBbRc5gxjPLp_FZgZo,4515
|
|
51
|
+
pinexq_client/job_management/tool/processing_step.py,sha256=0qq64JOESPFAgpUpmM9hWndh21BnCB95EExd21ZHYOg,9730
|
|
52
|
+
pinexq_client/job_management/tool/workdata.py,sha256=8DwvzcjefKL-wBuCxy9ziaUC7gTf0TMXAa1WoCmHvZM,4903
|
|
53
|
+
pinexq_client-0.4.2.20241009.1.dist-info/RECORD,,
|