pinexq-client 0.4.0.2024.717.1__py3-none-any.whl → 0.4.2.20241009.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pinexq_client/job_management/__init__.py +1 -1
- pinexq_client/job_management/model/open_api_generated.py +3 -1
- pinexq_client/job_management/tool/job.py +7 -7
- pinexq_client/job_management/tool/job_group.py +148 -130
- pinexq_client/job_management/tool/processing_step.py +83 -5
- {pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/METADATA +2 -2
- {pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/RECORD +10 -9
- {pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/WHEEL +1 -1
- pinexq_client-0.4.2.20241009.1.dist-info/entry_points.txt +4 -0
- {pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
# generated by datamodel-codegen:
|
|
2
2
|
# filename: openapi.json
|
|
3
|
-
# timestamp: 2024-
|
|
3
|
+
# timestamp: 2024-09-24T10:14:32+00:00
|
|
4
4
|
|
|
5
5
|
from __future__ import annotations
|
|
6
6
|
|
|
@@ -58,6 +58,7 @@ class CreateProcessingStepParameters(BaseModel):
|
|
|
58
58
|
)
|
|
59
59
|
title: constr(min_length=1) = Field(..., alias='Title')
|
|
60
60
|
function_name: constr(min_length=1) = Field(..., alias='FunctionName')
|
|
61
|
+
version: constr(min_length=1) = Field(..., alias='Version')
|
|
61
62
|
|
|
62
63
|
|
|
63
64
|
class CreateSubJobParameters(BaseModel):
|
|
@@ -785,6 +786,7 @@ class WorkDataHtoOpenApiProperties(BaseModel):
|
|
|
785
786
|
media_type: str | None = Field(None, alias='MediaType')
|
|
786
787
|
kind: WorkDataKind | None = Field(None, alias='Kind')
|
|
787
788
|
is_deletable: bool | None = Field(None, alias='IsDeletable')
|
|
789
|
+
is_used_as_job_input: bool | None = Field(None, alias='IsUsedAsJobInput')
|
|
788
790
|
hidden: bool | None = Field(None, alias='Hidden')
|
|
789
791
|
comments: str | None = Field(None, alias='Comments')
|
|
790
792
|
|
|
@@ -197,15 +197,15 @@ class Job:
|
|
|
197
197
|
|
|
198
198
|
if processing_step_link is not None:
|
|
199
199
|
if not isinstance(processing_step_link, ProcessingStepLink):
|
|
200
|
-
raise
|
|
200
|
+
raise TypeError('Instance passed to "processing_step_link" is not of type "ProcessingStepLink"')
|
|
201
201
|
processing_url = processing_step_link.get_url()
|
|
202
202
|
elif processing_step_instance is not None:
|
|
203
203
|
if not isinstance(processing_step_instance, ProcessingStep):
|
|
204
|
-
raise
|
|
204
|
+
raise TypeError('Instance passed to "processing_step_instance" is not of type "ProcessingStep"')
|
|
205
205
|
processing_url = processing_step_instance.self_link().get_url()
|
|
206
206
|
else:
|
|
207
207
|
if not isinstance(function_name, str):
|
|
208
|
-
raise
|
|
208
|
+
raise TypeError('Instance passed to "function_name" is not of type "str"')
|
|
209
209
|
# ToDo: provide more parameters to query a processing step
|
|
210
210
|
query_param = ProcessingStepQueryParameters(
|
|
211
211
|
filter=ProcessingStepFilterParameter(
|
|
@@ -215,7 +215,7 @@ class Job:
|
|
|
215
215
|
)
|
|
216
216
|
query_result = self._processing_step_root.query_action.execute(query_param)
|
|
217
217
|
if len(query_result.processing_steps) != 1:
|
|
218
|
-
raise
|
|
218
|
+
raise NameError(f"No processing step with the name '{function_name}' registered!")
|
|
219
219
|
# Todo: For now we choose the first and only result. Make this more flexible?
|
|
220
220
|
processing_url = query_result.processing_steps[0].self_link.get_url()
|
|
221
221
|
|
|
@@ -292,15 +292,15 @@ class Job:
|
|
|
292
292
|
polling_interval_ms=polling_interval_ms
|
|
293
293
|
)
|
|
294
294
|
except TimeoutError as timeout:
|
|
295
|
-
raise
|
|
295
|
+
raise TimeoutError(
|
|
296
296
|
f"Job did not reach state: '{state.value}' "
|
|
297
297
|
f"current state: '{self.get_state().value}'. Error:{str(timeout)}"
|
|
298
298
|
)
|
|
299
299
|
except PollingException:
|
|
300
300
|
if self._job.state == JobStates.error:
|
|
301
301
|
error_reason = self._job.error_description
|
|
302
|
-
raise
|
|
303
|
-
raise
|
|
302
|
+
raise PollingException(f"Job failed'. Error:{error_reason}")
|
|
303
|
+
raise PollingException("Job failed")
|
|
304
304
|
|
|
305
305
|
return self
|
|
306
306
|
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import time
|
|
1
2
|
from typing import Union, Self
|
|
2
3
|
|
|
3
4
|
import httpx
|
|
@@ -8,133 +9,150 @@ from pinexq_client.job_management.model import JobStates
|
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
class JobGroup:
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
12
|
+
"""
|
|
13
|
+
A wrapper class for a group of jobs for easier execution and waiting
|
|
14
|
+
|
|
15
|
+
Attributes:
|
|
16
|
+
_client:
|
|
17
|
+
The http client
|
|
18
|
+
_jobs:
|
|
19
|
+
List of jobs in the group
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
_client: httpx.Client
|
|
23
|
+
|
|
24
|
+
def __init__(self, client: httpx.Client):
|
|
25
|
+
self._jobs: list[Job] = []
|
|
26
|
+
self._client = client
|
|
27
|
+
|
|
28
|
+
@classmethod
|
|
29
|
+
def from_query_result(cls, client: httpx.Client, job_query_result: JobQueryResultHco) -> Self:
|
|
30
|
+
"""
|
|
31
|
+
Initializes a `JobGroup` object from a JobQueryResultHco object
|
|
32
|
+
Args:
|
|
33
|
+
client: The http client
|
|
34
|
+
job_query_result: The JobQueryResultHco object whose jobs are to be added to the JobGroup
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
The newly created `JobGroup` instance
|
|
38
|
+
"""
|
|
39
|
+
instance = cls(client)
|
|
40
|
+
for job in job_query_result.iter_flat():
|
|
41
|
+
instance.add_jobs(Job.from_hco(instance._client, job))
|
|
42
|
+
return instance
|
|
43
|
+
|
|
44
|
+
def add_jobs(self, jobs: Union[Job, list[Job]]) -> Self:
|
|
45
|
+
"""
|
|
46
|
+
Add a job or multiple jobs to the group
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
jobs: A job or a list of job objects to be added to the JobGroup
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
This `JobGroup` object
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
if isinstance(jobs, list):
|
|
56
|
+
self._jobs.extend(jobs)
|
|
57
|
+
else:
|
|
58
|
+
self._jobs.append(jobs)
|
|
59
|
+
return self
|
|
60
|
+
|
|
61
|
+
def start_all(self) -> Self:
|
|
62
|
+
"""
|
|
63
|
+
Start all jobs
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
This `JobGroup` object
|
|
67
|
+
"""
|
|
68
|
+
for job in self._jobs:
|
|
69
|
+
job.start()
|
|
70
|
+
return self
|
|
71
|
+
|
|
72
|
+
def wait_all(self, *, job_timeout_ms: int = 5000, total_timeout_ms: int | None = None) -> Self:
|
|
73
|
+
"""
|
|
74
|
+
Wait for all jobs to complete or error state.
|
|
75
|
+
If the overall timeout elapses and some jobs are not complete, then exception.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
job_timeout_ms:
|
|
79
|
+
Individual job timeout in milliseconds. Default is 5000 ms.
|
|
80
|
+
total_timeout_ms:
|
|
81
|
+
Timeout for the whole operation in milliseconds. Default is no timeout.
|
|
82
|
+
Returns:
|
|
83
|
+
This `JobGroup` object
|
|
84
|
+
"""
|
|
85
|
+
start_time = time.time()
|
|
86
|
+
for job in self._jobs:
|
|
87
|
+
if total_timeout_ms is not None:
|
|
88
|
+
elapsed_time_ms = (time.time() - start_time) * 1000
|
|
89
|
+
if total_timeout_ms - elapsed_time_ms <= 0:
|
|
90
|
+
raise Exception("Total timeout exceeded while waiting for jobs.")
|
|
91
|
+
|
|
92
|
+
try:
|
|
93
|
+
job.wait_for_state(JobStates.completed, timeout_ms=job_timeout_ms)
|
|
94
|
+
except Exception:
|
|
95
|
+
pass
|
|
96
|
+
return self
|
|
97
|
+
|
|
98
|
+
def all_jobs_completed_ok(self) -> bool:
|
|
99
|
+
for job in self._jobs:
|
|
100
|
+
state = job.get_state()
|
|
101
|
+
if state is not JobStates.completed:
|
|
102
|
+
return False
|
|
103
|
+
return True
|
|
104
|
+
|
|
105
|
+
def incomplete_jobs(self) -> list[Job]:
|
|
106
|
+
"""
|
|
107
|
+
Returns the incomplete jobs
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Count of incomplete jobs
|
|
111
|
+
"""
|
|
112
|
+
incomplete_jobs = []
|
|
113
|
+
for job in self._jobs:
|
|
114
|
+
state = job.get_state()
|
|
115
|
+
if state in (JobStates.processing, JobStates.pending):
|
|
116
|
+
incomplete_jobs.append(job)
|
|
117
|
+
return incomplete_jobs
|
|
118
|
+
|
|
119
|
+
def jobs_with_error(self) -> list[Job]:
|
|
120
|
+
"""
|
|
121
|
+
Returns the list of jobs that produced errors
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
List of jobs that produced errors
|
|
125
|
+
"""
|
|
126
|
+
return [job for job in self._jobs if job.get_state() == JobStates.error]
|
|
127
|
+
|
|
128
|
+
def remove(self, job_name: str) -> Self:
|
|
129
|
+
"""
|
|
130
|
+
Removes all jobs from the group whose name matches the provided name
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
job_name:
|
|
134
|
+
The name of the job(s) to be removed
|
|
135
|
+
Returns:
|
|
136
|
+
This `JobGroup` object
|
|
137
|
+
"""
|
|
138
|
+
self._jobs = [job for job in self._jobs if job.get_name() != job_name]
|
|
139
|
+
return self
|
|
140
|
+
|
|
141
|
+
def clear(self) -> Self:
|
|
142
|
+
"""
|
|
143
|
+
Removes all jobs from the group
|
|
144
|
+
|
|
145
|
+
Returns:
|
|
146
|
+
This `JobGroup` object
|
|
147
|
+
"""
|
|
148
|
+
self._jobs = []
|
|
149
|
+
return self
|
|
150
|
+
|
|
151
|
+
def get_jobs(self) -> list[Job]:
|
|
152
|
+
"""
|
|
153
|
+
Returns the list of jobs in the group
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
List of jobs in the group
|
|
157
|
+
"""
|
|
158
|
+
return self._jobs
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Any, Self
|
|
1
|
+
from typing import Any, Self, List, Optional
|
|
2
2
|
|
|
3
3
|
import httpx
|
|
4
4
|
from httpx import URL
|
|
@@ -6,7 +6,7 @@ from httpx import URL
|
|
|
6
6
|
from pinexq_client.core import Link, MediaTypes
|
|
7
7
|
from pinexq_client.core.hco.upload_action_hco import UploadParameters
|
|
8
8
|
from pinexq_client.job_management.enterjma import enter_jma
|
|
9
|
-
from pinexq_client.job_management.hcos import ProcessingStepHco, ProcessingStepLink
|
|
9
|
+
from pinexq_client.job_management.hcos import ProcessingStepHco, ProcessingStepLink, ProcessingStepQueryResultHco
|
|
10
10
|
from pinexq_client.job_management.hcos.entrypoint_hco import EntryPointHco
|
|
11
11
|
from pinexq_client.job_management.hcos.job_hco import GenericProcessingConfigureParameters
|
|
12
12
|
from pinexq_client.job_management.hcos.processingsteproot_hco import ProcessingStepsRootHco
|
|
@@ -14,7 +14,8 @@ from pinexq_client.job_management.known_relations import Relations
|
|
|
14
14
|
from pinexq_client.job_management.model import (
|
|
15
15
|
CreateProcessingStepParameters,
|
|
16
16
|
EditProcessingStepParameters,
|
|
17
|
-
SetProcessingStepTagsParameters,
|
|
17
|
+
SetProcessingStepTagsParameters, ProcessingStepQueryParameters, ProcessingStepFilterParameter,
|
|
18
|
+
FunctionNameMatchTypes,
|
|
18
19
|
)
|
|
19
20
|
|
|
20
21
|
|
|
@@ -37,20 +38,22 @@ class ProcessingStep:
|
|
|
37
38
|
self._entrypoint = enter_jma(client)
|
|
38
39
|
self._processing_steps_root = self._entrypoint.processing_step_root_link.navigate()
|
|
39
40
|
|
|
40
|
-
def create(self, title: str, function_name: str) -> Self:
|
|
41
|
+
def create(self, title: str, function_name: str, version: str = "0") -> Self:
|
|
41
42
|
"""
|
|
42
43
|
Creates a new ProcessingStep by name.
|
|
43
44
|
|
|
44
45
|
Args:
|
|
45
46
|
title: Title of the ProcessingStep to be created
|
|
46
47
|
function_name: Function name of the ProcessingStep to be created
|
|
48
|
+
version: Version of the ProcessingStep to be created
|
|
47
49
|
|
|
48
50
|
Returns:
|
|
49
51
|
The newly created ProcessingStep as `ProcessingStep` object
|
|
50
52
|
"""
|
|
51
53
|
processing_step_hco = self._processing_steps_root.register_new_action.execute(CreateProcessingStepParameters(
|
|
52
54
|
title=title,
|
|
53
|
-
function_name=function_name
|
|
55
|
+
function_name=function_name,
|
|
56
|
+
version=version
|
|
54
57
|
))
|
|
55
58
|
self._processing_step = processing_step_hco
|
|
56
59
|
return self
|
|
@@ -94,6 +97,81 @@ class ProcessingStep:
|
|
|
94
97
|
processing_step_instance._get_by_link(ProcessingStepLink.from_link(client, link))
|
|
95
98
|
return processing_step_instance
|
|
96
99
|
|
|
100
|
+
@classmethod
|
|
101
|
+
def from_name(cls, client: httpx.Client, step_name: str, version: str = "0") -> Self:
|
|
102
|
+
"""Create a ProcessingStep object from an existing name.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
client: Create a ProcessingStep object from an existing name.
|
|
106
|
+
step_name: Name of the registered processing step.
|
|
107
|
+
version: Version of the ProcessingStep to be created
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
The newly created processing step as `ProcessingStep` object
|
|
111
|
+
"""
|
|
112
|
+
|
|
113
|
+
# Attempt to find the processing step
|
|
114
|
+
query_result = cls._query_processing_steps(client, step_name, version)
|
|
115
|
+
|
|
116
|
+
# Check if exactly one result is found
|
|
117
|
+
if len(query_result.processing_steps) != 1:
|
|
118
|
+
# Attempt to suggest alternative steps if exact match not found
|
|
119
|
+
suggested_steps = cls._processing_steps_by_name(client, step_name)
|
|
120
|
+
raise NameError(
|
|
121
|
+
f"No processing step with the name {step_name} and version {version} registered. "
|
|
122
|
+
f"Suggestions: {suggested_steps}"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
# Todo: For now we choose the first and only result. Make this more flexible?
|
|
126
|
+
processing_step_hco = query_result.processing_steps[0]
|
|
127
|
+
return ProcessingStep.from_hco(client, processing_step_hco)
|
|
128
|
+
|
|
129
|
+
@staticmethod
|
|
130
|
+
def _query_processing_steps(client: httpx.Client, step_name: str, version: Optional[str] = None) -> ProcessingStepQueryResultHco:
|
|
131
|
+
"""
|
|
132
|
+
Helper function to query processing steps based on name and optional version.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
client: HTTP client for executing queries.
|
|
136
|
+
step_name: Name of the processing step.
|
|
137
|
+
version: Optional version to match.
|
|
138
|
+
|
|
139
|
+
Returns:
|
|
140
|
+
Query result object containing the matching processing steps.
|
|
141
|
+
"""
|
|
142
|
+
query_param = ProcessingStepQueryParameters(
|
|
143
|
+
filter=ProcessingStepFilterParameter(
|
|
144
|
+
function_name=step_name,
|
|
145
|
+
function_name_match_type=FunctionNameMatchTypes.match_exact,
|
|
146
|
+
version=version
|
|
147
|
+
)
|
|
148
|
+
)
|
|
149
|
+
instance = ProcessingStep(client)
|
|
150
|
+
return instance._processing_steps_root.query_action.execute(query_param)
|
|
151
|
+
|
|
152
|
+
@staticmethod
|
|
153
|
+
def _processing_steps_by_name(client: httpx.Client, step_name: str) -> list:
|
|
154
|
+
"""
|
|
155
|
+
Suggest processing steps if the exact step is not found.
|
|
156
|
+
|
|
157
|
+
Args:
|
|
158
|
+
client: HTTP client for executing queries.
|
|
159
|
+
step_name: Name of the processing step.
|
|
160
|
+
|
|
161
|
+
Returns:
|
|
162
|
+
A list of alternative processing steps matching the step name.
|
|
163
|
+
"""
|
|
164
|
+
# Query for steps without version to get suggestions
|
|
165
|
+
instance = ProcessingStep(client)
|
|
166
|
+
query_result = instance._query_processing_steps(client, step_name)
|
|
167
|
+
|
|
168
|
+
# If no suggestions are found, raise an error
|
|
169
|
+
if len(query_result.processing_steps) == 0:
|
|
170
|
+
raise NameError(f"No processing steps found with the name '{step_name}'.")
|
|
171
|
+
|
|
172
|
+
# Return list of alternative steps as suggestions
|
|
173
|
+
return [f"{step.function_name}:{step.version}" for step in query_result.processing_steps]
|
|
174
|
+
|
|
97
175
|
def refresh(self) -> Self:
|
|
98
176
|
"""Updates the processing step from the server
|
|
99
177
|
|
{pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/METADATA
RENAMED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: pinexq-client
|
|
3
|
-
Version: 0.4.
|
|
3
|
+
Version: 0.4.2.20241009.1
|
|
4
4
|
Summary: A hypermedia-based client for the DataCybernetics PinexQ platform.
|
|
5
|
-
Author-Email: =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Mathias Reichardt <reichardt@data-cybernetics.com>, Pratik Poudel <poudel@data-cybernetics.com>
|
|
5
|
+
Author-Email: =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Mathias Reichardt <reichardt@data-cybernetics.com>, Jasim Ahmed <ahmed@data-cybernetics.com>, Pratik Poudel <poudel@data-cybernetics.com>
|
|
6
6
|
Maintainer-Email: Mathias Reichardt <reichardt@data-cybernetics.com>, =?utf-8?q?Sebastian_H=C3=B6fer?= <hoefer@data-cybernetics.com>, Carsten Blank <blank@data-cybernetics.com>
|
|
7
7
|
License: MIT
|
|
8
8
|
Requires-Python: >=3.11
|
{pinexq_client-0.4.0.2024.717.1.dist-info → pinexq_client-0.4.2.20241009.1.dist-info}/RECORD
RENAMED
|
@@ -1,6 +1,7 @@
|
|
|
1
|
-
pinexq_client-0.4.
|
|
2
|
-
pinexq_client-0.4.
|
|
3
|
-
pinexq_client-0.4.
|
|
1
|
+
pinexq_client-0.4.2.20241009.1.dist-info/METADATA,sha256=OwWRTY8LrZ0sTtVPcvKWudbDLVg_25sdw9B8inJczfc,3278
|
|
2
|
+
pinexq_client-0.4.2.20241009.1.dist-info/WHEEL,sha256=Vza3XR51HW1KmFP0iIMUVYIvz0uQuKJpIXKYOBGQyFQ,90
|
|
3
|
+
pinexq_client-0.4.2.20241009.1.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34
|
|
4
|
+
pinexq_client-0.4.2.20241009.1.dist-info/licenses/LICENSE,sha256=3oz3tAhM7kOgRukkRe7wmh5T_HihZY77ZtJDJm91ZN8,1072
|
|
4
5
|
pinexq_client/core/__init__.py,sha256=8SVD_PRgJtpUCOtVjdR6fRrv6KPNk7HD6UQrn0FKR04,235
|
|
5
6
|
pinexq_client/core/base_relations.py,sha256=oIUS58pkbMDdqm-3YOdsenhL1smtzeAk4fp7-U595MY,162
|
|
6
7
|
pinexq_client/core/enterapi.py,sha256=sL9TmF1L5LaDJnre1l_tiHDUo9vTbZ8cvPSov3Q1UTs,671
|
|
@@ -20,7 +21,7 @@ pinexq_client/core/model/error.py,sha256=FNJrXVTkVnh6oDkHHc3sSZ2wNtEPCZyqgzZT01M
|
|
|
20
21
|
pinexq_client/core/model/sirenmodels.py,sha256=vGRQlhM2cSa2caxQel91Jr48KWqM-vMYX32iaQCzIds,5547
|
|
21
22
|
pinexq_client/core/polling.py,sha256=Z6VXn-PCGk4XX-4tQWZG59qJyCIS0eIrpPUORQDIGrs,1077
|
|
22
23
|
pinexq_client/core/sirenaccess.py,sha256=5GDTolgIlt9rW2g9qG7LerUlmz0BQNKYGp78T2UPrv0,6771
|
|
23
|
-
pinexq_client/job_management/__init__.py,sha256=
|
|
24
|
+
pinexq_client/job_management/__init__.py,sha256=pvdID8cHtpXcjx-PKW9svc9DxMGG58cXyW-mhYEJPQE,164
|
|
24
25
|
pinexq_client/job_management/enterjma.py,sha256=Ivl_jVPw_gaLyU5nKbywM-bbVGpn0MoCrZ0DMbJYf3s,1411
|
|
25
26
|
pinexq_client/job_management/hcos/__init__.py,sha256=vMIdxGHBsVcKYKrVkCzD4a_VaheKSNxCimospFn5N88,396
|
|
26
27
|
pinexq_client/job_management/hcos/entrypoint_hco.py,sha256=qodjAwO_MtawUuhmaYjhGXHV-uW1k94V9gKRYZRkhn4,2234
|
|
@@ -42,11 +43,11 @@ pinexq_client/job_management/hcos/workdata_used_tags_query_result_hco.py,sha256=
|
|
|
42
43
|
pinexq_client/job_management/hcos/workdataroot_hco.py,sha256=LdEPW2JJTqAWi-6zj-40lfREhthcDL6nPXQk_nfMtCA,3936
|
|
43
44
|
pinexq_client/job_management/known_relations.py,sha256=UlOF-sua8SyOPNNKzT_j6JVG8T-aewHIzn7S2ajXBhI,593
|
|
44
45
|
pinexq_client/job_management/model/__init__.py,sha256=ApHhNfjx4bPuz10sQnyBA2zajYbU7loDTZSKC5H_jBY,34
|
|
45
|
-
pinexq_client/job_management/model/open_api_generated.py,sha256=
|
|
46
|
+
pinexq_client/job_management/model/open_api_generated.py,sha256=9VQtoenAmAVsRdR9GaQSBqrJIPrWFYkF5DKv8n9Z6Ow,30354
|
|
46
47
|
pinexq_client/job_management/model/sirenentities.py,sha256=OInvxHpC6mnnYQjOMM2xAw7uLtvWwj9E2EQSRJe2jDo,3202
|
|
47
48
|
pinexq_client/job_management/tool/__init__.py,sha256=58CRDcP8ifSx9eA2uyTLEg0_fX3FUuNUogY_lirx9AY,96
|
|
48
|
-
pinexq_client/job_management/tool/job.py,sha256=
|
|
49
|
-
pinexq_client/job_management/tool/job_group.py,sha256=
|
|
50
|
-
pinexq_client/job_management/tool/processing_step.py,sha256=
|
|
49
|
+
pinexq_client/job_management/tool/job.py,sha256=BA1S2NY6D4Cpa3Mmoly0M3qCzCiv6p-MP7_decSlP-0,22209
|
|
50
|
+
pinexq_client/job_management/tool/job_group.py,sha256=4zW_SEbQMLVNqu_zLUP57epkypBbRc5gxjPLp_FZgZo,4515
|
|
51
|
+
pinexq_client/job_management/tool/processing_step.py,sha256=0qq64JOESPFAgpUpmM9hWndh21BnCB95EExd21ZHYOg,9730
|
|
51
52
|
pinexq_client/job_management/tool/workdata.py,sha256=8DwvzcjefKL-wBuCxy9ziaUC7gTf0TMXAa1WoCmHvZM,4903
|
|
52
|
-
pinexq_client-0.4.
|
|
53
|
+
pinexq_client-0.4.2.20241009.1.dist-info/RECORD,,
|
|
File without changes
|