pinexq-client 0.9.2.20251028.52__py3-none-any.whl → 0.10.4rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pinexq_client/core/api_event_manager.py +141 -0
- pinexq_client/core/hco/hco_base.py +4 -1
- pinexq_client/core/polling.py +3 -3
- pinexq_client/job_management/__init__.py +1 -1
- pinexq_client/job_management/enterjma.py +49 -0
- pinexq_client/job_management/hcos/entrypoint_hco.py +35 -35
- pinexq_client/job_management/hcos/info_hco.py +15 -0
- pinexq_client/job_management/hcos/input_dataslot_hco.py +3 -1
- pinexq_client/job_management/hcos/output_dataslot_hco.py +1 -0
- pinexq_client/job_management/hcos/processing_step_hco.py +75 -4
- pinexq_client/job_management/known_relations.py +7 -0
- pinexq_client/job_management/model/open_api_generated.py +238 -129
- pinexq_client/job_management/tool/job.py +134 -52
- pinexq_client/job_management/tool/job_group.py +207 -60
- pinexq_client/job_management/tool/processing_step.py +114 -2
- {pinexq_client-0.9.2.20251028.52.dist-info → pinexq_client-0.10.4rc1.dist-info}/METADATA +102 -99
- {pinexq_client-0.9.2.20251028.52.dist-info → pinexq_client-0.10.4rc1.dist-info}/RECORD +18 -19
- pinexq_client-0.10.4rc1.dist-info/WHEEL +4 -0
- pinexq_client-0.9.2.20251028.52.dist-info/WHEEL +0 -4
- pinexq_client-0.9.2.20251028.52.dist-info/entry_points.txt +0 -4
- pinexq_client-0.9.2.20251028.52.dist-info/licenses/LICENSE +0 -19
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import queue
|
|
2
|
+
import threading
|
|
3
|
+
import time
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
from pydantic import BaseModel
|
|
6
|
+
|
|
7
|
+
import httpx
|
|
8
|
+
from httpx_sse import connect_sse
|
|
9
|
+
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
|
|
12
|
+
from pinexq_client.job_management import EntryPointHco, enter_jma
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class WorkerInfo:
|
|
17
|
+
worker: threading.Thread
|
|
18
|
+
close_after: datetime | None = None
|
|
19
|
+
|
|
20
|
+
class ApiEvent(BaseModel):
|
|
21
|
+
EventType: str
|
|
22
|
+
ResourceType: str
|
|
23
|
+
ResourceLink: str
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ApiEventManagerSingleton:
|
|
27
|
+
_instance = None
|
|
28
|
+
listening_clients: dict[httpx.Client, dict[str, list[queue.Queue]]]
|
|
29
|
+
workers: dict[httpx.Client, WorkerInfo]
|
|
30
|
+
sse_connection_cooldown_s: int
|
|
31
|
+
connection_create_lock: threading.Lock
|
|
32
|
+
reconnect_time_s: int
|
|
33
|
+
|
|
34
|
+
def __new__(cls):
|
|
35
|
+
if cls._instance is None:
|
|
36
|
+
cls._instance = super().__new__(cls)
|
|
37
|
+
return cls._instance
|
|
38
|
+
|
|
39
|
+
def __init__(self):
|
|
40
|
+
# Only initialize once
|
|
41
|
+
if not hasattr(self, '_initialized'):
|
|
42
|
+
self._initialized = True
|
|
43
|
+
self.listening_clients = {}
|
|
44
|
+
self.workers = {}
|
|
45
|
+
self.connection_create_lock = threading.Lock()
|
|
46
|
+
self.sse_connection_cooldown_s = 30 # avoid opening and closing fast
|
|
47
|
+
self.reconnect_time_s = 5
|
|
48
|
+
|
|
49
|
+
def subscribe_waiter(self, client: httpx.Client, resource_link:str, wait_queue: queue.Queue):
|
|
50
|
+
open_sse_connections = False
|
|
51
|
+
|
|
52
|
+
if not client in self.listening_clients:
|
|
53
|
+
self.listening_clients[client] = {}
|
|
54
|
+
open_sse_connections = True
|
|
55
|
+
|
|
56
|
+
if resource_link not in self.listening_clients[client]:
|
|
57
|
+
self.listening_clients[client][resource_link] = []
|
|
58
|
+
|
|
59
|
+
self.listening_clients[client][resource_link].append(wait_queue)
|
|
60
|
+
|
|
61
|
+
with self.connection_create_lock:
|
|
62
|
+
if open_sse_connections:
|
|
63
|
+
self.open_sse_connection(client)
|
|
64
|
+
self.workers[client].close_after = None # we use the connection now, do not close
|
|
65
|
+
|
|
66
|
+
def unsubscribe_waiter(self, client: httpx.Client, resource_link: str, wait_queue: queue.Queue):
|
|
67
|
+
if not client in self.listening_clients:
|
|
68
|
+
return
|
|
69
|
+
|
|
70
|
+
if resource_link not in self.listening_clients[client]:
|
|
71
|
+
return
|
|
72
|
+
|
|
73
|
+
# event should only be in here once
|
|
74
|
+
self.listening_clients[client][resource_link].remove(wait_queue)
|
|
75
|
+
|
|
76
|
+
# clean up if no events left
|
|
77
|
+
if len(self.listening_clients[client][resource_link]) == 0:
|
|
78
|
+
del self.listening_clients[client][resource_link]
|
|
79
|
+
|
|
80
|
+
# if noone is subscribed anymore set shutdown time
|
|
81
|
+
if not self.listening_clients[client]:
|
|
82
|
+
self.workers[client].close_after = datetime.now() + timedelta(seconds=self.sse_connection_cooldown_s)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def open_sse_connection(self, client: httpx.Client):
|
|
86
|
+
sse_endpoint = self.get_sse_endpoint_from_api(client)
|
|
87
|
+
|
|
88
|
+
# start worker for client
|
|
89
|
+
worker_thread = threading.Thread(target=self.consume_sse_events, args=(client, sse_endpoint))
|
|
90
|
+
self.workers[client] = WorkerInfo(worker=worker_thread)
|
|
91
|
+
worker_thread.start()
|
|
92
|
+
|
|
93
|
+
def consume_sse_events(self, client: httpx.Client, sse_endpoint: str):
|
|
94
|
+
while True:
|
|
95
|
+
try:
|
|
96
|
+
if client.is_closed:
|
|
97
|
+
print("Client was closed, will close sse connection too")
|
|
98
|
+
self.close_connections(client)
|
|
99
|
+
return
|
|
100
|
+
|
|
101
|
+
with connect_sse(client, "GET", sse_endpoint) as event_source:
|
|
102
|
+
for sse in event_source.iter_sse():
|
|
103
|
+
# close the thread if no one is using it anymore and cooldown has passed
|
|
104
|
+
close_after = self.workers[client].close_after
|
|
105
|
+
if close_after and datetime.now() > close_after:
|
|
106
|
+
self.close_connections(client)
|
|
107
|
+
return
|
|
108
|
+
|
|
109
|
+
if len(sse.data) <= 0: # keep alive has no data
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
api_event = ApiEvent.model_validate_json(sse.data)
|
|
113
|
+
|
|
114
|
+
# for now, we only process job events
|
|
115
|
+
if api_event.ResourceType != "Job":
|
|
116
|
+
continue
|
|
117
|
+
|
|
118
|
+
job_link = api_event.ResourceLink
|
|
119
|
+
subscribed = self.listening_clients[client]
|
|
120
|
+
if job_link in subscribed:
|
|
121
|
+
for wait_queue in subscribed[job_link]:
|
|
122
|
+
wait_queue.put(job_link)
|
|
123
|
+
except httpx.ReadError:
|
|
124
|
+
print(f"SSE Connection lost. Reconnecting in {self.reconnect_time_s} seconds...")
|
|
125
|
+
time.sleep(self.reconnect_time_s)
|
|
126
|
+
except httpx.ConnectError as e:
|
|
127
|
+
print(f"Failed to connect to SSE endpoint: {e}. Retrying in {self.reconnect_time_s} seconds...")
|
|
128
|
+
time.sleep(self.reconnect_time_s)
|
|
129
|
+
except Exception as ex:
|
|
130
|
+
raise
|
|
131
|
+
|
|
132
|
+
def close_connections(self, client: httpx.Client):
|
|
133
|
+
del self.listening_clients[client]
|
|
134
|
+
del self.workers[client] # worker not needed any more
|
|
135
|
+
|
|
136
|
+
@staticmethod
|
|
137
|
+
def get_sse_endpoint_from_api(client)-> str:
|
|
138
|
+
entrypoint: EntryPointHco = enter_jma(client)
|
|
139
|
+
endpoint = str(entrypoint.info_link.navigate().api_events_endpoint.get_url())
|
|
140
|
+
return endpoint
|
|
141
|
+
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import inspect
|
|
1
2
|
from dataclasses import dataclass
|
|
2
3
|
from typing import List, TypeVar, Generic, Callable, Any
|
|
3
4
|
|
|
@@ -51,6 +52,7 @@ class Hco(ClientContainer, Generic[THcoEntity]):
|
|
|
51
52
|
if candidate_self_link is not None:
|
|
52
53
|
return hash(URL(candidate_self_link.href).path)
|
|
53
54
|
else:
|
|
55
|
+
# might be an issue if self link is not present from the beginning but is added later.
|
|
54
56
|
return super().__hash__()
|
|
55
57
|
|
|
56
58
|
def __eq__(self, other):
|
|
@@ -75,7 +77,8 @@ class Hco(ClientContainer, Generic[THcoEntity]):
|
|
|
75
77
|
with `Property()` the variable gets set from a property in self._entity.properties
|
|
76
78
|
with the same name.
|
|
77
79
|
"""
|
|
78
|
-
|
|
80
|
+
annotations = inspect.get_annotations(self.__class__)
|
|
81
|
+
for var_name, var_type in annotations.items():
|
|
79
82
|
# Get the initialized object. The annotation contain only the annotated type.
|
|
80
83
|
hco_property = getattr(self, var_name, None)
|
|
81
84
|
# Skip everything that is not initialized as `Property()`
|
pinexq_client/core/polling.py
CHANGED
|
@@ -9,14 +9,14 @@ class PollingException(Exception):
|
|
|
9
9
|
def wait_until(
|
|
10
10
|
condition: Callable,
|
|
11
11
|
polling_interval_ms: int = 200,
|
|
12
|
-
timeout_ms: int = 5000,
|
|
12
|
+
timeout_ms: int | None = 5000,
|
|
13
13
|
timeout_message: str | None = None,
|
|
14
14
|
error_condition: Callable | None = None,
|
|
15
15
|
error_condition_message: str | None = None,
|
|
16
16
|
|
|
17
17
|
) -> None:
|
|
18
18
|
start = time.time()
|
|
19
|
-
timeout = start + timeout_ms / 1000
|
|
19
|
+
timeout = (start + timeout_ms / 1000) if timeout_ms is not None else None
|
|
20
20
|
while True:
|
|
21
21
|
now = time.time()
|
|
22
22
|
next_due = now + polling_interval_ms / 1000
|
|
@@ -30,7 +30,7 @@ def wait_until(
|
|
|
30
30
|
raise PollingException(
|
|
31
31
|
f"{f': {error_condition_message}' if error_condition_message else 'Error condition meet while waiting'}")
|
|
32
32
|
|
|
33
|
-
if (timeout > 0) and (now > timeout):
|
|
33
|
+
if timeout is not None and (timeout > 0) and (now > timeout):
|
|
34
34
|
raise TimeoutError(
|
|
35
35
|
f"{f': {timeout_message}' if timeout_message else f'Timeout while waiting. Waited: {timeout_ms}ms'}")
|
|
36
36
|
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
from typing import TypeVar, Type
|
|
2
2
|
|
|
3
3
|
import httpx
|
|
4
|
+
from httpx import Client, _config
|
|
5
|
+
# from hishel.httpx import SyncCacheClient
|
|
6
|
+
from httpx_caching import CachingClient
|
|
4
7
|
|
|
5
8
|
import warnings
|
|
6
9
|
|
|
@@ -17,6 +20,42 @@ THco = TypeVar("THco", bound=Hco)
|
|
|
17
20
|
def _version_match_major_minor(ver1: list[int], ver2: list[int]) -> bool:
|
|
18
21
|
return all([v1 == v2 for v1, v2 in zip(ver1[:2], ver2[:2])])
|
|
19
22
|
|
|
23
|
+
def create_pinexq_client(
|
|
24
|
+
pinexq_api_endpoint: str,
|
|
25
|
+
api_key:str,
|
|
26
|
+
timeout: _config.TimeoutTypes = _config.DEFAULT_TIMEOUT_CONFIG,
|
|
27
|
+
use_client_cache: bool = True) -> httpx.Client:
|
|
28
|
+
"""
|
|
29
|
+
Will create a httpx client, optional with caching, to be used with the API objects.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
pinexq_api_endpoint: The endpoint for the pinexq API.
|
|
33
|
+
api_key: The API key for the pinexq API.
|
|
34
|
+
timeout: The timeout passed to the http client for operations.
|
|
35
|
+
use_client_cache: Whether to use a httpx client with caching.
|
|
36
|
+
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
headers = { 'x-api-key': api_key }
|
|
40
|
+
if use_client_cache:
|
|
41
|
+
# for now, we use the persistent cache, which is also shared between instances
|
|
42
|
+
# use if you need each client to have a own cache storage=InMemoryStorage()
|
|
43
|
+
# broken, will cache SSE stream
|
|
44
|
+
#return SyncCacheClient(
|
|
45
|
+
# base_url=pinexq_api_endpoint,
|
|
46
|
+
# headers=headers,
|
|
47
|
+
# timeout=timeout)
|
|
48
|
+
client = Client(
|
|
49
|
+
base_url=pinexq_api_endpoint,
|
|
50
|
+
headers=headers,
|
|
51
|
+
timeout=timeout)
|
|
52
|
+
return CachingClient(client)
|
|
53
|
+
else:
|
|
54
|
+
return Client(
|
|
55
|
+
base_url=pinexq_api_endpoint,
|
|
56
|
+
headers=headers,
|
|
57
|
+
timeout=timeout)
|
|
58
|
+
|
|
20
59
|
|
|
21
60
|
def enter_jma(
|
|
22
61
|
client: httpx.Client,
|
|
@@ -24,6 +63,16 @@ def enter_jma(
|
|
|
24
63
|
entrypoint_entity_type: Type[Entity] = EntryPointEntity,
|
|
25
64
|
entrypoint: str = "api/EntryPoint",
|
|
26
65
|
) -> EntryPointHco:
|
|
66
|
+
"""
|
|
67
|
+
Gets the entrypoint object for the pinexq API. Will use the configured base url from the client.
|
|
68
|
+
Will check the current API version against the version of the client to ensure compatibility.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
client: The configured pinexq client.
|
|
72
|
+
entrypoint_hco_type: The type to represent the api resource of the entrypoint object [optional].
|
|
73
|
+
entrypoint_entity_type: The type of the entrypoint object containing typed properties [optional].
|
|
74
|
+
entrypoint: The path segment leading to the api entrypoint resource [optional].
|
|
75
|
+
"""
|
|
27
76
|
entry_point_hco = enter_api(client, entrypoint_hco_type, entrypoint_entity_type, entrypoint)
|
|
28
77
|
|
|
29
78
|
info = entry_point_hco.info_link.navigate()
|
|
@@ -15,44 +15,44 @@ from pinexq_client.job_management.model.sirenentities import EntryPointEntity
|
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class EntryPointLink(LinkHco):
|
|
18
|
-
|
|
19
|
-
|
|
18
|
+
def navigate(self) -> 'EntryPointHco':
|
|
19
|
+
return EntryPointHco.from_entity(self._navigate_internal(EntryPointEntity), self._client)
|
|
20
20
|
|
|
21
21
|
|
|
22
22
|
class EntrypointRelations(StrEnum):
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
23
|
+
JOBS_ROOT = "JobsRoot"
|
|
24
|
+
WORKDATA_ROOT = "WorkDataRoot"
|
|
25
|
+
PROCESSINGSTEPS_ROOT = "ProcessingStepsRoot"
|
|
26
|
+
API_EVENTS_ROOT = "ApiEvents"
|
|
27
|
+
INFO = "Info"
|
|
28
|
+
ADMIN = "Admin"
|
|
28
29
|
|
|
29
30
|
|
|
30
31
|
class EntryPointHco(Hco[EntryPointEntity]):
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
return instance
|
|
32
|
+
self_link: EntryPointLink
|
|
33
|
+
job_root_link: JobsRootLink
|
|
34
|
+
work_data_root_link: WorkDataRootLink
|
|
35
|
+
processing_step_root_link: ProcessingStepsRootLink
|
|
36
|
+
info_link: InfoLink
|
|
37
|
+
admin_link: LinkHco | UnavailableLink
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def from_entity(cls, entity: EntryPointEntity, client: httpx.Client) -> Self:
|
|
41
|
+
instance = cls(client, entity)
|
|
42
|
+
Hco.check_classes(instance._entity.class_, ["EntryPoint"])
|
|
43
|
+
|
|
44
|
+
instance.self_link = EntryPointLink.from_entity(
|
|
45
|
+
instance._client, instance._entity, Relations.SELF)
|
|
46
|
+
instance.info_link = InfoLink.from_entity(
|
|
47
|
+
instance._client, instance._entity, EntrypointRelations.INFO)
|
|
48
|
+
instance.job_root_link = JobsRootLink.from_entity(
|
|
49
|
+
instance._client, instance._entity, EntrypointRelations.JOBS_ROOT)
|
|
50
|
+
instance.work_data_root_link = WorkDataRootLink.from_entity(
|
|
51
|
+
instance._client, instance._entity, EntrypointRelations.WORKDATA_ROOT)
|
|
52
|
+
instance.processing_step_root_link = ProcessingStepsRootLink.from_entity(
|
|
53
|
+
instance._client, instance._entity, EntrypointRelations.PROCESSINGSTEPS_ROOT)
|
|
54
|
+
|
|
55
|
+
instance.admin_link = LinkHco.from_entity_optional(
|
|
56
|
+
instance._client, instance._entity, EntrypointRelations.ADMIN)
|
|
57
|
+
|
|
58
|
+
return instance
|
|
@@ -13,6 +13,14 @@ class InfoLink(LinkHco):
|
|
|
13
13
|
def navigate(self) -> "InfoHco":
|
|
14
14
|
return InfoHco.from_entity(self._navigate_internal(InfoEntity), self._client)
|
|
15
15
|
|
|
16
|
+
class ApiEventsEndpointLink(LinkHco):
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
class DeploymentRegistryEndpointLink(LinkHco):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
class RemoteEndpointLink(LinkHco):
|
|
23
|
+
pass
|
|
16
24
|
|
|
17
25
|
class InfoHco(Hco[InfoEntity]):
|
|
18
26
|
api_version: str = Property()
|
|
@@ -22,6 +30,9 @@ class InfoHco(Hco[InfoEntity]):
|
|
|
22
30
|
used_storage_in_bytes: int = Property()
|
|
23
31
|
|
|
24
32
|
self_link: InfoLink
|
|
33
|
+
api_events_endpoint: ApiEventsEndpointLink
|
|
34
|
+
deployment_registry_endpoint: DeploymentRegistryEndpointLink
|
|
35
|
+
remote_endpoint: RemoteEndpointLink
|
|
25
36
|
|
|
26
37
|
@classmethod
|
|
27
38
|
def from_entity(cls, entity: InfoEntity, client: httpx.Client) -> Self:
|
|
@@ -33,6 +44,10 @@ class InfoHco(Hco[InfoEntity]):
|
|
|
33
44
|
instance._client, instance._entity, Relations.SELF
|
|
34
45
|
)
|
|
35
46
|
|
|
47
|
+
instance.api_events_endpoint = ApiEventsEndpointLink.from_entity(instance._client, instance._entity, Relations.API_EVENTS_ENDPOINT)
|
|
48
|
+
instance.deployment_registry_endpoint = DeploymentRegistryEndpointLink.from_entity_optional(instance._client, instance._entity, Relations.DEPLOYMENT_REGISTRY_ENDPOINT)
|
|
49
|
+
instance.remote_endpoint = RemoteEndpointLink.from_entity_optional(instance._client, instance._entity, Relations.REMOTE_ENDPOINT)
|
|
50
|
+
|
|
36
51
|
instance._extract_current_user()
|
|
37
52
|
|
|
38
53
|
return instance
|
|
@@ -45,11 +45,13 @@ class InputDataSlotClearDataAction(ActionHco):
|
|
|
45
45
|
|
|
46
46
|
|
|
47
47
|
class InputDataSlotHco(Hco[InputDataSlotEntity]):
|
|
48
|
-
|
|
48
|
+
|
|
49
49
|
title: str | None = Property()
|
|
50
50
|
description: str | None = Property()
|
|
51
|
+
name: str | None = Property()
|
|
51
52
|
media_type: str | None = Property()
|
|
52
53
|
selected_workdatas: list[WorkDataHco]
|
|
54
|
+
is_configured: bool | None = Property()
|
|
53
55
|
|
|
54
56
|
select_workdata_action: InputDataSlotSelectWorkDataAction | UnavailableAction
|
|
55
57
|
select_workdata_collection_action: InputDataSlotSelectWorkDataCollectionAction | UnavailableAction
|
|
@@ -20,6 +20,7 @@ class OutputDataSlotLink(LinkHco):
|
|
|
20
20
|
class OutputDataSlotHco(Hco[OutputDataSlotEntity]):
|
|
21
21
|
title: str | None = Property()
|
|
22
22
|
description: str | None = Property()
|
|
23
|
+
name: str | None = Property()
|
|
23
24
|
media_type: str | None = Property()
|
|
24
25
|
assigned_workdatas: list[WorkDataHco]
|
|
25
26
|
|
|
@@ -14,9 +14,11 @@ from pinexq_client.core.hco.unavailable import UnavailableAction
|
|
|
14
14
|
from pinexq_client.core.hco.upload_action_hco import UploadAction, UploadParameters
|
|
15
15
|
from pinexq_client.job_management.known_relations import Relations
|
|
16
16
|
from pinexq_client.job_management.model import CopyPsFromUserToOrgActionParameters, CopyPsFromOrgToUserActionParameters, \
|
|
17
|
-
DeprecatePsActionParameters
|
|
18
|
-
from pinexq_client.job_management.model.open_api_generated import DataSpecificationHto,
|
|
19
|
-
|
|
17
|
+
DeprecatePsActionParameters, AssignCodeHashParameters
|
|
18
|
+
from pinexq_client.job_management.model.open_api_generated import (DataSpecificationHto,
|
|
19
|
+
SetProcessingStepTagsParameters,
|
|
20
|
+
EditProcessingStepParameters,
|
|
21
|
+
ConfigureDeploymentParameters, DeploymentStates, ProcessingStepDeploymentHto)
|
|
20
22
|
from pinexq_client.job_management.model.sirenentities import ProcessingStepEntity
|
|
21
23
|
|
|
22
24
|
|
|
@@ -81,6 +83,32 @@ class RestoreAction(ActionHco):
|
|
|
81
83
|
def execute(self):
|
|
82
84
|
self._execute_internal()
|
|
83
85
|
|
|
86
|
+
|
|
87
|
+
class ConfigureExternalDeploymentAction(ActionHco):
|
|
88
|
+
def execute(self):
|
|
89
|
+
self._execute_internal()
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class RemoveDeploymentAction(ActionHco):
|
|
93
|
+
def execute(self):
|
|
94
|
+
self._execute_internal()
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class SuspendDeploymentAction(ActionHco):
|
|
98
|
+
def execute(self):
|
|
99
|
+
self._execute_internal()
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class ResumeDeploymentAction(ActionHco):
|
|
103
|
+
def execute(self):
|
|
104
|
+
self._execute_internal()
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class ClearCodeHashAction(ActionHco):
|
|
108
|
+
def execute(self):
|
|
109
|
+
self._execute_internal()
|
|
110
|
+
|
|
111
|
+
|
|
84
112
|
class UploadConfigurationAction(UploadAction):
|
|
85
113
|
def execute(self, parameters: UploadParameters):
|
|
86
114
|
upload_json(self._client, self._action, parameters.json_, parameters.filename)
|
|
@@ -117,6 +145,24 @@ class ProcessingStepDeprecateAction(ActionWithParametersHco[DeprecatePsActionPar
|
|
|
117
145
|
DeprecatePsActionParameters())
|
|
118
146
|
|
|
119
147
|
|
|
148
|
+
class ConfigureDeploymentAction(ActionWithParametersHco[ConfigureDeploymentParameters]):
|
|
149
|
+
def execute(self, parameters: ConfigureDeploymentParameters):
|
|
150
|
+
self._execute(parameters)
|
|
151
|
+
|
|
152
|
+
def default_parameters(self) -> ConfigureDeploymentParameters:
|
|
153
|
+
return self._get_default_parameters(ConfigureDeploymentParameters,
|
|
154
|
+
ConfigureDeploymentParameters())
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class AssignCodeHashAction(ActionWithParametersHco[AssignCodeHashParameters]):
|
|
158
|
+
def execute(self, parameters: AssignCodeHashParameters):
|
|
159
|
+
self._execute(parameters)
|
|
160
|
+
|
|
161
|
+
def default_parameters(self) -> AssignCodeHashParameters:
|
|
162
|
+
return self._get_default_parameters(AssignCodeHashParameters,
|
|
163
|
+
AssignCodeHashParameters())
|
|
164
|
+
|
|
165
|
+
|
|
120
166
|
class ProcessingStepHco(Hco[ProcessingStepEntity]):
|
|
121
167
|
title: str = Property()
|
|
122
168
|
version: str | None = Property()
|
|
@@ -134,8 +180,12 @@ class ProcessingStepHco(Hco[ProcessingStepEntity]):
|
|
|
134
180
|
return_schema: str | None = Property()
|
|
135
181
|
error_schema: str | None = Property()
|
|
136
182
|
hidden: bool | None = Property()
|
|
137
|
-
|
|
183
|
+
|
|
184
|
+
deprecated_at: datetime | None = Property()
|
|
138
185
|
reason_for_deprecation: str | None = Property()
|
|
186
|
+
is_deprecated: bool | None = Property()
|
|
187
|
+
deployment_state: DeploymentStates = Property()
|
|
188
|
+
deployment: ProcessingStepDeploymentHto = Property()
|
|
139
189
|
|
|
140
190
|
input_data_slot_specification: List[DataSpecificationHto] | None = Property()
|
|
141
191
|
output_data_slot_specification: List[DataSpecificationHto] | None = Property()
|
|
@@ -151,6 +201,13 @@ class ProcessingStepHco(Hco[ProcessingStepEntity]):
|
|
|
151
201
|
delete_action: DeleteAction | UnavailableAction
|
|
152
202
|
deprecate_ps_action: ProcessingStepDeprecateAction | UnavailableAction
|
|
153
203
|
restore_ps_action: RestoreAction | UnavailableAction
|
|
204
|
+
assign_code_hash_action: AssignCodeHashAction | UnavailableAction
|
|
205
|
+
configure_deployment_action: ConfigureDeploymentAction | UnavailableAction
|
|
206
|
+
configure_external_deployment_action: ConfigureExternalDeploymentAction | UnavailableAction
|
|
207
|
+
remove_deployment_action: RemoveDeploymentAction | UnavailableAction
|
|
208
|
+
suspend_deployment_action: SuspendDeploymentAction | UnavailableAction
|
|
209
|
+
resume_deployment_action: ResumeDeploymentAction | UnavailableAction
|
|
210
|
+
clear_code_hash_action: ClearCodeHashAction | UnavailableAction
|
|
154
211
|
|
|
155
212
|
self_link: ProcessingStepLink
|
|
156
213
|
download_link: DownloadLinkHco
|
|
@@ -189,5 +246,19 @@ class ProcessingStepHco(Hco[ProcessingStepEntity]):
|
|
|
189
246
|
client, instance._entity, "Deprecate")
|
|
190
247
|
instance.restore_ps_action = RestoreAction.from_entity_optional(
|
|
191
248
|
client, instance._entity, "Restore")
|
|
249
|
+
instance.assign_code_hash_action = AssignCodeHashAction.from_entity_optional(
|
|
250
|
+
client, instance._entity, "AssignCodeHash")
|
|
251
|
+
instance.configure_deployment_action = ConfigureDeploymentAction.from_entity_optional(
|
|
252
|
+
client, instance._entity, "ConfigureDeployment")
|
|
253
|
+
instance.configure_external_deployment_action = ConfigureExternalDeploymentAction.from_entity_optional(
|
|
254
|
+
client, instance._entity, "ConfigureExternalDeployment")
|
|
255
|
+
instance.remove_deployment_action = RemoveDeploymentAction.from_entity_optional(
|
|
256
|
+
client, instance._entity, "RemoveDeployment")
|
|
257
|
+
instance.suspend_deployment_action = SuspendDeploymentAction.from_entity_optional(
|
|
258
|
+
client, instance._entity, "SuspendDeployment")
|
|
259
|
+
instance.resume_deployment_action = ResumeDeploymentAction.from_entity_optional(
|
|
260
|
+
client, instance._entity, "ResumeDeployment")
|
|
261
|
+
instance.clear_code_hash_action = ClearCodeHashAction.from_entity_optional(
|
|
262
|
+
client, instance._entity, "ClearCodeHash")
|
|
192
263
|
|
|
193
264
|
return instance
|