processcube-etw-library 2026.1.22.145211__py3-none-any.whl → 2026.1.29.71849b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- processcube_etw_library/__init__.py +7 -5
- processcube_etw_library/create_external_task_client.py +4 -2
- processcube_etw_library/etw_app.py +18 -5
- processcube_etw_library/health/built_in.py +3 -4
- processcube_etw_library/health/check.py +14 -4
- processcube_etw_library/identity_provider.py +0 -2
- processcube_etw_library/processcube_client/__init__.py +22 -0
- processcube_etw_library/processcube_client/app_info/__init__.py +1 -0
- processcube_etw_library/processcube_client/app_info/app_info_client.py +36 -0
- processcube_etw_library/processcube_client/client_factory.py +37 -0
- processcube_etw_library/processcube_client/core/__init__.py +2 -0
- processcube_etw_library/processcube_client/core/api/__init__.py +13 -0
- processcube_etw_library/processcube_client/core/api/base_client.py +235 -0
- processcube_etw_library/processcube_client/core/api/client.py +816 -0
- processcube_etw_library/processcube_client/core/api/helpers/__init__.py +0 -0
- processcube_etw_library/processcube_client/core/api/helpers/application_info.py +34 -0
- processcube_etw_library/processcube_client/core/api/helpers/data_object_instances.py +61 -0
- processcube_etw_library/processcube_client/core/api/helpers/empty_tasks.py +86 -0
- processcube_etw_library/processcube_client/core/api/helpers/events.py +39 -0
- processcube_etw_library/processcube_client/core/api/helpers/external_tasks.py +142 -0
- processcube_etw_library/processcube_client/core/api/helpers/flow_node_instances.py +80 -0
- processcube_etw_library/processcube_client/core/api/helpers/manual_tasks.py +87 -0
- processcube_etw_library/processcube_client/core/api/helpers/process_definitions.py +46 -0
- processcube_etw_library/processcube_client/core/api/helpers/process_instances.py +96 -0
- processcube_etw_library/processcube_client/core/api/helpers/process_models.py +51 -0
- processcube_etw_library/processcube_client/core/api/helpers/user_tasks.py +130 -0
- processcube_etw_library/processcube_client/core/base_client.py +175 -0
- processcube_etw_library/processcube_client/core/loop_helper.py +200 -0
- processcube_etw_library/processcube_client/event/__init__.py +1 -0
- processcube_etw_library/processcube_client/event/event_client.py +43 -0
- processcube_etw_library/processcube_client/external_task/__init__.py +3 -0
- processcube_etw_library/processcube_client/external_task/client_wrapper.py +28 -0
- processcube_etw_library/processcube_client/external_task/external_task_client.py +195 -0
- processcube_etw_library/processcube_client/external_task/external_task_worker.py +205 -0
- processcube_etw_library/processcube_client/external_task/functional_error.py +17 -0
- processcube_etw_library/processcube_client/flow_node_instance/__init__.py +1 -0
- processcube_etw_library/processcube_client/flow_node_instance/flow_node_instance_client.py +43 -0
- processcube_etw_library/processcube_client/notification/__init__.py +1 -0
- processcube_etw_library/processcube_client/notification/notification_client.py +103 -0
- processcube_etw_library/processcube_client/process_definition/__init__.py +2 -0
- processcube_etw_library/processcube_client/process_definition/process_definition_client.py +94 -0
- processcube_etw_library/processcube_client/process_definition/start_callback_type.py +6 -0
- processcube_etw_library/processcube_client/process_instance/__init__.py +1 -0
- processcube_etw_library/processcube_client/process_instance/process_instance_client.py +32 -0
- processcube_etw_library/processcube_client/user_task/__init__.py +1 -0
- processcube_etw_library/processcube_client/user_task/user_task_client.py +63 -0
- processcube_etw_library/settings.py +35 -9
- {processcube_etw_library-2026.1.22.145211.dist-info → processcube_etw_library-2026.1.29.71849b0.dist-info}/METADATA +13 -11
- processcube_etw_library-2026.1.29.71849b0.dist-info/RECORD +58 -0
- {processcube_etw_library-2026.1.22.145211.dist-info → processcube_etw_library-2026.1.29.71849b0.dist-info}/WHEEL +1 -1
- processcube_etw_library-2026.1.22.145211.dist-info/RECORD +0 -18
- {processcube_etw_library-2026.1.22.145211.dist-info → processcube_etw_library-2026.1.29.71849b0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase
|
|
3
|
+
from enum import IntFlag
|
|
4
|
+
from typing import Callable, Optional
|
|
5
|
+
|
|
6
|
+
from ..base_client import BaseClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class StartCallbackType(IntFlag):
|
|
10
|
+
CallbackOnProcessInstanceCreated = 1
|
|
11
|
+
CallbackOnProcessInstanceFinished = 2
|
|
12
|
+
CallbackOnEndEventReached = 3
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
16
|
+
@dataclass
|
|
17
|
+
class ProcessStartRequest:
|
|
18
|
+
process_model_id: str
|
|
19
|
+
return_on: StartCallbackType = StartCallbackType.CallbackOnEndEventReached
|
|
20
|
+
end_event_id: str = None
|
|
21
|
+
start_event_id: str = None
|
|
22
|
+
correlation_id: str = None
|
|
23
|
+
initial_token: dict = None
|
|
24
|
+
parent_process_instance_id: str = None
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
28
|
+
@dataclass
|
|
29
|
+
class ProcessStartResponse:
|
|
30
|
+
process_instance_id: str
|
|
31
|
+
correlation_id: str
|
|
32
|
+
end_event_id: Optional[str] = None
|
|
33
|
+
token_payload: dict = field(default_factory=dict)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class ProcessModelHandler(BaseClient):
|
|
37
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
38
|
+
super(ProcessModelHandler, self).__init__(url, identity, api_version)
|
|
39
|
+
|
|
40
|
+
def start(
|
|
41
|
+
self, process_model_id: str, request: ProcessStartRequest, options: dict = {}
|
|
42
|
+
) -> ProcessStartResponse:
|
|
43
|
+
path = f"process_models/{process_model_id}/start"
|
|
44
|
+
|
|
45
|
+
payload = request.to_dict()
|
|
46
|
+
|
|
47
|
+
response_json = self.do_post(path, payload, options)
|
|
48
|
+
|
|
49
|
+
response = ProcessStartResponse.from_dict(response_json)
|
|
50
|
+
|
|
51
|
+
return response
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase, Undefined, config
|
|
3
|
+
from typing import Any, Callable, List, Dict, Optional
|
|
4
|
+
from urllib import parse
|
|
5
|
+
|
|
6
|
+
from ..base_client import BaseClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
10
|
+
@dataclass
|
|
11
|
+
class UserTaskQuery:
|
|
12
|
+
limit: int = 0
|
|
13
|
+
offset: int = 0
|
|
14
|
+
user_task_instance_id: str = field(
|
|
15
|
+
metadata=config(field_name="flowNodeInstanceId"), default=None
|
|
16
|
+
)
|
|
17
|
+
flow_node_id: str = None
|
|
18
|
+
flow_node_name: str = None
|
|
19
|
+
flow_node_lane: str = None
|
|
20
|
+
correlation_id: str = None
|
|
21
|
+
process_definition_id: str = None
|
|
22
|
+
process_model_id: str = None
|
|
23
|
+
process_instance_id: str = None
|
|
24
|
+
owner_id: str = None
|
|
25
|
+
state: str = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
29
|
+
@dataclass
|
|
30
|
+
class FormFields:
|
|
31
|
+
id: str
|
|
32
|
+
type: str
|
|
33
|
+
label: str
|
|
34
|
+
default_value: Optional[str] = None
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
38
|
+
@dataclass
|
|
39
|
+
class UserTaskConfig:
|
|
40
|
+
form_fields: List[FormFields]
|
|
41
|
+
custom_form: str = "DynamicForm"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE)
|
|
45
|
+
# @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.INCLUDE)
|
|
46
|
+
@dataclass
|
|
47
|
+
class UserTaskResponse:
|
|
48
|
+
user_task_instance_id: str = field(metadata=config(field_name="flowNodeInstanceId"))
|
|
49
|
+
user_task_config: UserTaskConfig
|
|
50
|
+
owner_id: str
|
|
51
|
+
correlation_id: str
|
|
52
|
+
process_instance_id: str
|
|
53
|
+
process_model_id: str
|
|
54
|
+
flow_node_name: str
|
|
55
|
+
actual_owner_id: Optional[str] = None
|
|
56
|
+
# place_holder: CatchAll
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
60
|
+
@dataclass
|
|
61
|
+
class ReserveUserTaskRequest:
|
|
62
|
+
actual_owner_id: str = None
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class UserTaskHandler(BaseClient):
|
|
66
|
+
BPMN_TYPE = "bpmn:UserTask"
|
|
67
|
+
|
|
68
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
69
|
+
super(UserTaskHandler, self).__init__(url, identity, api_version)
|
|
70
|
+
|
|
71
|
+
def query(
|
|
72
|
+
self, request: UserTaskQuery = UserTaskQuery(), options: dict = {}
|
|
73
|
+
) -> List[UserTaskResponse]:
|
|
74
|
+
query_dict = request.to_dict()
|
|
75
|
+
query_dict.update(
|
|
76
|
+
{
|
|
77
|
+
"state": "suspended",
|
|
78
|
+
"flowNodeType": UserTaskHandler.BPMN_TYPE,
|
|
79
|
+
}
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
filtered_query = list(
|
|
83
|
+
filter(lambda dict_entry: dict_entry[1] is not None, query_dict.items())
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
query_str = parse.urlencode(filtered_query, doseq=False)
|
|
87
|
+
|
|
88
|
+
path = f"flow_node_instances?{query_str}"
|
|
89
|
+
|
|
90
|
+
response_list_of_dict = self.do_get(path, options)
|
|
91
|
+
|
|
92
|
+
if response_list_of_dict.get("totalCount", 0) > 0:
|
|
93
|
+
json_data = response_list_of_dict.get("flowNodeInstances", {})
|
|
94
|
+
response = UserTaskResponse.schema().load(json_data, many=True)
|
|
95
|
+
else:
|
|
96
|
+
response = []
|
|
97
|
+
|
|
98
|
+
return response
|
|
99
|
+
|
|
100
|
+
def reserve(
|
|
101
|
+
self,
|
|
102
|
+
user_task_instance_id: str,
|
|
103
|
+
request: ReserveUserTaskRequest,
|
|
104
|
+
options: dict = {},
|
|
105
|
+
) -> bool:
|
|
106
|
+
path = f"user_tasks/{user_task_instance_id}/reserve"
|
|
107
|
+
|
|
108
|
+
payload = request.to_dict()
|
|
109
|
+
|
|
110
|
+
_ = self.do_put(path, payload, options)
|
|
111
|
+
|
|
112
|
+
return True
|
|
113
|
+
|
|
114
|
+
def finish(
|
|
115
|
+
self, user_task_instance_id: str, request: Dict[str, Any], options: dict = {}
|
|
116
|
+
) -> bool:
|
|
117
|
+
path = f"user_tasks/{user_task_instance_id}/finish"
|
|
118
|
+
|
|
119
|
+
_ = self.do_put(path, request, options)
|
|
120
|
+
|
|
121
|
+
return True
|
|
122
|
+
|
|
123
|
+
def cancel_reservation(
|
|
124
|
+
self, user_task_instance_id: str, options: dict = {}
|
|
125
|
+
) -> bool:
|
|
126
|
+
path = f"user_tasks/{user_task_instance_id}/cancel-reservation"
|
|
127
|
+
|
|
128
|
+
_ = self.do_delete(path, options)
|
|
129
|
+
|
|
130
|
+
return True
|
|
@@ -0,0 +1,175 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import aiohttp
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def run_async_in_sync_context(coro):
|
|
10
|
+
"""Run an async coroutine in a synchronous context, compatible with Python 3.10+
|
|
11
|
+
|
|
12
|
+
This function handles the proper creation and management of event loops
|
|
13
|
+
for running async code from sync code. It's compatible with nested event loops
|
|
14
|
+
(e.g., in Jupyter notebooks when nest_asyncio is applied).
|
|
15
|
+
|
|
16
|
+
Args:
|
|
17
|
+
coro: The coroutine to run
|
|
18
|
+
|
|
19
|
+
Returns:
|
|
20
|
+
The result of the coroutine
|
|
21
|
+
"""
|
|
22
|
+
try:
|
|
23
|
+
# Try to get the running loop (will work in async context)
|
|
24
|
+
loop = asyncio.get_running_loop()
|
|
25
|
+
# If we're here, we're already in an async context - this shouldn't happen
|
|
26
|
+
# in normal sync-to-async transitions but can happen with nest_asyncio
|
|
27
|
+
raise RuntimeError("Cannot use run_async_in_sync_context from an async context")
|
|
28
|
+
except RuntimeError:
|
|
29
|
+
# No running loop - we're in a sync context, which is what we want
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
# Try to get or create an event loop
|
|
33
|
+
try:
|
|
34
|
+
loop = asyncio.get_event_loop()
|
|
35
|
+
if loop.is_closed():
|
|
36
|
+
raise RuntimeError("Event loop is closed")
|
|
37
|
+
if loop.is_running():
|
|
38
|
+
# Loop is running but we're not in it - this can happen with nest_asyncio
|
|
39
|
+
# In this case, we need to schedule the coroutine on the existing loop
|
|
40
|
+
import concurrent.futures
|
|
41
|
+
future = asyncio.run_coroutine_threadsafe(coro, loop)
|
|
42
|
+
return future.result()
|
|
43
|
+
except RuntimeError:
|
|
44
|
+
# No event loop or it's closed - create a new one
|
|
45
|
+
loop = asyncio.new_event_loop()
|
|
46
|
+
asyncio.set_event_loop(loop)
|
|
47
|
+
|
|
48
|
+
# Run the coroutine
|
|
49
|
+
try:
|
|
50
|
+
result = loop.run_until_complete(coro)
|
|
51
|
+
return result
|
|
52
|
+
finally:
|
|
53
|
+
# Don't close the loop if it was already set as the event loop for this thread
|
|
54
|
+
# This allows it to be reused in subsequent calls
|
|
55
|
+
pass
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
class BaseClient:
|
|
59
|
+
|
|
60
|
+
def __init__(self, url, session=None, identity=None):
|
|
61
|
+
self._url = url
|
|
62
|
+
self._session = session
|
|
63
|
+
|
|
64
|
+
if identity is None:
|
|
65
|
+
self._identity = {"token": "ZHVtbXlfdG9rZW4="}
|
|
66
|
+
else:
|
|
67
|
+
self._identity = identity
|
|
68
|
+
|
|
69
|
+
async def __aenter__(self):
|
|
70
|
+
return self
|
|
71
|
+
|
|
72
|
+
async def __aexit__(self, exception_type, exception_value, traceback):
|
|
73
|
+
await self.close()
|
|
74
|
+
|
|
75
|
+
async def close(self):
|
|
76
|
+
if self._session:
|
|
77
|
+
await self._session.close()
|
|
78
|
+
|
|
79
|
+
async def do_get(self, path, options={}):
|
|
80
|
+
headers = self.__get_default_headers()
|
|
81
|
+
headers.update(options.get('headers', {}))
|
|
82
|
+
headers.update(self.__get_auth_headers())
|
|
83
|
+
|
|
84
|
+
request_url = f"{self._url}{path}"
|
|
85
|
+
|
|
86
|
+
async with aiohttp.ClientSession() as session:
|
|
87
|
+
|
|
88
|
+
current_session = self._session if self._session else session
|
|
89
|
+
|
|
90
|
+
async with current_session.get(request_url, headers=headers) as response:
|
|
91
|
+
response.raise_for_status()
|
|
92
|
+
if response.status == 200:
|
|
93
|
+
return await response.json()
|
|
94
|
+
|
|
95
|
+
async def do_delete(self, path, options={}):
|
|
96
|
+
headers = self.__get_default_headers()
|
|
97
|
+
headers.update(options.get('headers', {}))
|
|
98
|
+
headers.update(self.__get_auth_headers())
|
|
99
|
+
|
|
100
|
+
request_url = f"{self._url}{path}"
|
|
101
|
+
|
|
102
|
+
async with aiohttp.ClientSession() as session:
|
|
103
|
+
|
|
104
|
+
current_session = self._session if self._session else session
|
|
105
|
+
|
|
106
|
+
async with current_session.delete(request_url, headers=headers) as response:
|
|
107
|
+
response.raise_for_status()
|
|
108
|
+
if response.status == 200:
|
|
109
|
+
return await response.json()
|
|
110
|
+
|
|
111
|
+
async def do_post(self, path, payload, options={}):
|
|
112
|
+
headers = self.__get_default_headers()
|
|
113
|
+
headers.update(options.get('headers', {}))
|
|
114
|
+
headers.update(self.__get_auth_headers())
|
|
115
|
+
|
|
116
|
+
request_url = f"{self._url}{path}"
|
|
117
|
+
|
|
118
|
+
logger.debug(f"post request to {request_url} with json payload {payload}")
|
|
119
|
+
|
|
120
|
+
async with aiohttp.ClientSession() as session:
|
|
121
|
+
|
|
122
|
+
current_session = self._session if self._session else session
|
|
123
|
+
|
|
124
|
+
async with current_session.post(request_url, json=payload, headers=headers) as response:
|
|
125
|
+
logger.debug(f"handle response {response.status}")
|
|
126
|
+
response.raise_for_status()
|
|
127
|
+
if response.status in [200, 201, 202]:
|
|
128
|
+
return await response.json()
|
|
129
|
+
elif response.status == 204:
|
|
130
|
+
return ""
|
|
131
|
+
else:
|
|
132
|
+
raise Exception(f"TODO: need a better error {response.status}")
|
|
133
|
+
|
|
134
|
+
async def do_put(self, path, payload, options={}):
|
|
135
|
+
headers = self.__get_default_headers()
|
|
136
|
+
headers.update(options.get('headers', {}))
|
|
137
|
+
headers.update(self.__get_auth_headers())
|
|
138
|
+
|
|
139
|
+
request_url = f"{self._url}{path}"
|
|
140
|
+
|
|
141
|
+
logger.debug(f"put request to {request_url} with json payload {payload}")
|
|
142
|
+
|
|
143
|
+
async with aiohttp.ClientSession() as session:
|
|
144
|
+
|
|
145
|
+
current_session = self._session if self._session else session
|
|
146
|
+
|
|
147
|
+
async with current_session.put(request_url, json=payload, headers=headers) as response:
|
|
148
|
+
logger.debug(f"handle response {response.status}")
|
|
149
|
+
response.raise_for_status()
|
|
150
|
+
if response.status in [200, 201, 202]:
|
|
151
|
+
return await response.json()
|
|
152
|
+
elif response.status == 204:
|
|
153
|
+
return ""
|
|
154
|
+
else:
|
|
155
|
+
raise Exception(f"TODO: need a better error {response.status}")
|
|
156
|
+
|
|
157
|
+
async def get_serverinfo(self):
|
|
158
|
+
return await self.do_get('/atlas_engine/api/v1/info')
|
|
159
|
+
|
|
160
|
+
def __get_auth_headers(self):
|
|
161
|
+
identity = self.__get_identity()
|
|
162
|
+
token = identity['token']
|
|
163
|
+
return {'Authorization': 'Bearer {}'.format(token)}
|
|
164
|
+
|
|
165
|
+
def __get_default_headers(self):
|
|
166
|
+
return {'Content-Type': 'application/json'}
|
|
167
|
+
|
|
168
|
+
def __get_identity(self):
|
|
169
|
+
identity = self._identity
|
|
170
|
+
|
|
171
|
+
if callable(self._identity):
|
|
172
|
+
identity = self._identity()
|
|
173
|
+
|
|
174
|
+
return identity
|
|
175
|
+
|
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import logging
|
|
3
|
+
import signal
|
|
4
|
+
import os
|
|
5
|
+
|
|
6
|
+
logger = logging.getLogger(__name__)
|
|
7
|
+
|
|
8
|
+
_DEFAULT_DELAY = 0.1
|
|
9
|
+
|
|
10
|
+
def ensure_has_loop():
|
|
11
|
+
loop = get_or_create_loop()
|
|
12
|
+
|
|
13
|
+
return loop
|
|
14
|
+
|
|
15
|
+
def get_or_create_loop():
|
|
16
|
+
"""Get or create an event loop, compatible with Python 3.10+
|
|
17
|
+
|
|
18
|
+
In Python 3.10+, asyncio.get_event_loop() raises RuntimeError if there's
|
|
19
|
+
no running event loop in the current thread. This function handles that gracefully.
|
|
20
|
+
"""
|
|
21
|
+
try:
|
|
22
|
+
# Try to get the current event loop
|
|
23
|
+
loop = asyncio.get_event_loop()
|
|
24
|
+
# Check if the loop is closed (also an error condition)
|
|
25
|
+
if loop.is_closed():
|
|
26
|
+
raise RuntimeError("Event loop is closed")
|
|
27
|
+
return loop
|
|
28
|
+
except RuntimeError:
|
|
29
|
+
# No event loop in current thread or loop is closed - create a new one
|
|
30
|
+
loop = asyncio.new_event_loop()
|
|
31
|
+
asyncio.set_event_loop(loop)
|
|
32
|
+
return loop
|
|
33
|
+
|
|
34
|
+
class LoopHelper:
|
|
35
|
+
|
|
36
|
+
# TODO: mm - remove kwargs with better readable params
|
|
37
|
+
def __init__(self, loop=get_or_create_loop(), **kwargs):
|
|
38
|
+
self._loop = loop
|
|
39
|
+
self._tasks = []
|
|
40
|
+
self._run_forever = kwargs.get('run_forever', not self._loop.is_running())
|
|
41
|
+
self._install_signals = kwargs.get('install_signals', True)
|
|
42
|
+
|
|
43
|
+
self.on_shutdown = kwargs.get('on_shutdown', self.__internal_on_shutdown)
|
|
44
|
+
|
|
45
|
+
def create_task(self, task_callback):
|
|
46
|
+
task = asyncio.run_coroutine_threadsafe(task_callback(), self._loop)
|
|
47
|
+
self._tasks.append(task)
|
|
48
|
+
|
|
49
|
+
def register_delayed_task(self, task_func, **options):
|
|
50
|
+
logger.info(f"Create delayed tasks with options ({options}).")
|
|
51
|
+
task = asyncio.run_coroutine_threadsafe(self.__create_delayed_task(task_func, **options), self._loop)
|
|
52
|
+
self._tasks.append(task)
|
|
53
|
+
|
|
54
|
+
return task
|
|
55
|
+
|
|
56
|
+
def unregister_delayed_task(self, delayed_task, msg=""):
|
|
57
|
+
return self.__unregister_task(delayed_task, msg)
|
|
58
|
+
|
|
59
|
+
async def __create_delayed_task(self, task_func, **options):
|
|
60
|
+
async def _worker(delay):
|
|
61
|
+
try:
|
|
62
|
+
logger.info("sleep for {deplay} ms")
|
|
63
|
+
await asyncio.sleep(delay)
|
|
64
|
+
|
|
65
|
+
if asyncio.iscoroutinefunction(task_func):
|
|
66
|
+
logger.debug("running delayed job (async)")
|
|
67
|
+
await task_func()
|
|
68
|
+
else:
|
|
69
|
+
logger.debug("running delayed job (sync)")
|
|
70
|
+
task_func()
|
|
71
|
+
|
|
72
|
+
except asyncio.CancelledError as ce:
|
|
73
|
+
logger.debug(f"Cancel the task {ce}")
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
if options.get('delay', None) is not None:
|
|
77
|
+
logger.warning('delay is deprecated, please use delay_in_ms')
|
|
78
|
+
options['delay_in_ms'] = options['delay']
|
|
79
|
+
|
|
80
|
+
delay = options.get('delay_in_ms', _DEFAULT_DELAY)
|
|
81
|
+
return await _worker(delay)
|
|
82
|
+
|
|
83
|
+
def register_background_task(self, task_func, **options):
|
|
84
|
+
logger.info(f"Create background worker with options ({options}).")
|
|
85
|
+
|
|
86
|
+
task = asyncio.run_coroutine_threadsafe(self.__create_background_task(task_func, **options), self._loop)
|
|
87
|
+
self._tasks.append(task)
|
|
88
|
+
|
|
89
|
+
return task
|
|
90
|
+
|
|
91
|
+
def unregister_background_task(self, background_task, msg=""):
|
|
92
|
+
return self.__unregister_task(background_task, msg)
|
|
93
|
+
|
|
94
|
+
def __unregister_task(self, task, msg):
|
|
95
|
+
can_unregister = True
|
|
96
|
+
|
|
97
|
+
if self._tasks.index(task) >= 0:
|
|
98
|
+
logger.debug(f"cancel and unregister task: {msg}")
|
|
99
|
+
self._tasks.remove(task)
|
|
100
|
+
|
|
101
|
+
try:
|
|
102
|
+
task.cancel()
|
|
103
|
+
logger.debug(f"cancelled task: {msg}")
|
|
104
|
+
except asyncio.CancelledError as ce:
|
|
105
|
+
logger.error(f"__unregister_task: {ce}")
|
|
106
|
+
pass
|
|
107
|
+
else:
|
|
108
|
+
logger.warning("did'nt found task to unregister")
|
|
109
|
+
can_unregister = False
|
|
110
|
+
|
|
111
|
+
return can_unregister
|
|
112
|
+
|
|
113
|
+
async def __create_background_task(self, task_func, **options):
|
|
114
|
+
async def _task(delay):
|
|
115
|
+
running = True
|
|
116
|
+
|
|
117
|
+
while running:
|
|
118
|
+
try:
|
|
119
|
+
if with_delay:
|
|
120
|
+
logger.debug(f"background worker delay for {delay}")
|
|
121
|
+
await asyncio.sleep(delay)
|
|
122
|
+
|
|
123
|
+
if asyncio.iscoroutinefunction(task_func):
|
|
124
|
+
logger.debug("running background job (async)")
|
|
125
|
+
await task_func()
|
|
126
|
+
else:
|
|
127
|
+
logger.debug("running background job (sync)")
|
|
128
|
+
task_func()
|
|
129
|
+
|
|
130
|
+
except asyncio.CancelledError:
|
|
131
|
+
running = False
|
|
132
|
+
except Exception as e:
|
|
133
|
+
logger.error(f"Failed run background job with error {e}")
|
|
134
|
+
|
|
135
|
+
if options.get('delay', None) is not None:
|
|
136
|
+
logger.warning('delay is deprecated, please use delay_in_seconds')
|
|
137
|
+
options['delay_in_seconds'] = options['delay']
|
|
138
|
+
|
|
139
|
+
delay_in_seconds = options.get('delay_in_seconds', _DEFAULT_DELAY)
|
|
140
|
+
with_delay = True if delay_in_seconds > 0 else False
|
|
141
|
+
|
|
142
|
+
return await _task(delay_in_seconds)
|
|
143
|
+
|
|
144
|
+
def start(self, **kwargs):
|
|
145
|
+
logger.info(f"Starting event loop {kwargs}.")
|
|
146
|
+
try:
|
|
147
|
+
self._run_forever = kwargs.get('run_forever', self._run_forever)
|
|
148
|
+
|
|
149
|
+
if self._install_signals:
|
|
150
|
+
self.__register_shutdown()
|
|
151
|
+
|
|
152
|
+
if self._run_forever:
|
|
153
|
+
self._loop.run_forever()
|
|
154
|
+
except KeyboardInterrupt:
|
|
155
|
+
self._loop.close()
|
|
156
|
+
|
|
157
|
+
def run_forever(self):
|
|
158
|
+
self.start(run_forever=True)
|
|
159
|
+
|
|
160
|
+
def stop_all_tasks(self):
|
|
161
|
+
logger.info("Stopping tasks.")
|
|
162
|
+
for task in self._tasks:
|
|
163
|
+
try:
|
|
164
|
+
task.cancel()
|
|
165
|
+
except Exception as e:
|
|
166
|
+
logger.warning(f"Task stopped with exception {e}")
|
|
167
|
+
|
|
168
|
+
def stop(self):
|
|
169
|
+
logger.info("Stopping event loop.")
|
|
170
|
+
self.stop_all_tasks()
|
|
171
|
+
|
|
172
|
+
if self._run_forever:
|
|
173
|
+
logger.info("close the event loop.")
|
|
174
|
+
self._loop.stop()
|
|
175
|
+
|
|
176
|
+
async def __internal_on_shutdown(self):
|
|
177
|
+
logger.debug('only internal on_shutdown called')
|
|
178
|
+
await asyncio.sleep(0)
|
|
179
|
+
|
|
180
|
+
def __register_shutdown(self):
|
|
181
|
+
async def shutdown(sig):
|
|
182
|
+
logger.info(f"Received exit signal {sig.name}...")
|
|
183
|
+
|
|
184
|
+
await self.on_shutdown()
|
|
185
|
+
|
|
186
|
+
self.stop()
|
|
187
|
+
|
|
188
|
+
signal_handler = lambda sig: asyncio.create_task(shutdown(sig))
|
|
189
|
+
|
|
190
|
+
if self.is_win():
|
|
191
|
+
# See https://docs.python.org/3/library/asyncio-platforms.html#windows
|
|
192
|
+
logger.warning("Cannot register signal handler on windows. To indicate shutdown use the close() method.")
|
|
193
|
+
else:
|
|
194
|
+
signals = (signal.SIGHUP, signal.SIGTERM, signal.SIGINT, signal.SIGQUIT)
|
|
195
|
+
|
|
196
|
+
for s in signals:
|
|
197
|
+
self._loop.add_signal_handler(s, signal_handler, s)
|
|
198
|
+
|
|
199
|
+
def is_win(self):
|
|
200
|
+
return os.name == 'nt'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .event_client import EventClient
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
from ..core.base_client import BaseClient, run_async_in_sync_context
|
|
4
|
+
|
|
5
|
+
logger = logging.getLogger(__name__)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class EventClient(BaseClient):
|
|
9
|
+
def __init__(self, url, session=None, identity=None):
|
|
10
|
+
super(EventClient, self).__init__(url, session, identity)
|
|
11
|
+
|
|
12
|
+
async def __trigger_message(self, event_name, payload, process_instance_id):
|
|
13
|
+
url = f"/atlas_engine/api/v1/messages/{event_name}/trigger"
|
|
14
|
+
|
|
15
|
+
if process_instance_id is not None:
|
|
16
|
+
url = f"{url}?process_instance_id={process_instance_id}"
|
|
17
|
+
|
|
18
|
+
result = await self.do_post(url, payload)
|
|
19
|
+
|
|
20
|
+
return result
|
|
21
|
+
|
|
22
|
+
def trigger_message(self, event_name, payload={}, process_instance_id=None):
|
|
23
|
+
logger.info(f"Connection to atlas engine at url '{self._url}'.")
|
|
24
|
+
logger.info(
|
|
25
|
+
f"Trigger message event {event_name} for process instance {process_instance_id} with payload {payload}."
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
return run_async_in_sync_context(
|
|
29
|
+
self.__trigger_message(event_name, payload, process_instance_id)
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
async def __trigger_signal(self, signal_name):
|
|
33
|
+
url = f"/atlas_engine/api/v1/signals/{signal_name}/trigger"
|
|
34
|
+
|
|
35
|
+
result = await self.do_post(url, {})
|
|
36
|
+
|
|
37
|
+
return result
|
|
38
|
+
|
|
39
|
+
def trigger_signal(self, signal_name):
|
|
40
|
+
logger.info(f"Connection to atlas engine at url '{self._url}'.")
|
|
41
|
+
logger.info(f"Trigger signal event {signal_name}.")
|
|
42
|
+
|
|
43
|
+
return run_async_in_sync_context(self.__trigger_signal(signal_name))
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import warnings
|
|
3
|
+
|
|
4
|
+
from .external_task_client import ExternalTaskClient
|
|
5
|
+
from ..core.loop_helper import get_or_create_loop
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class ClientWrapper:
|
|
9
|
+
|
|
10
|
+
def __init__(self, atlas_engine_url):
|
|
11
|
+
self._atlas_engine_url = atlas_engine_url
|
|
12
|
+
|
|
13
|
+
def subscribe_to_external_task_for_topic(self, topic, handler, **task_options):
|
|
14
|
+
|
|
15
|
+
loop = task_options.get('loop', get_or_create_loop())
|
|
16
|
+
|
|
17
|
+
warnings.warn("Please use 'subscribe_to_external_task_topic' instead of 'subscribe_to_external_task_for_topic'.", DeprecationWarning)
|
|
18
|
+
return self.subscribe_to_external_task_topic(topic, handler, loop=loop)
|
|
19
|
+
|
|
20
|
+
def subscribe_to_external_task_topic(self, topic, handler, **task_options):
|
|
21
|
+
loop = task_options.get('loop', get_or_create_loop())
|
|
22
|
+
|
|
23
|
+
external_task_client = ExternalTaskClient(self._atlas_engine_url, loop=loop)
|
|
24
|
+
|
|
25
|
+
external_task_client.subscribe_to_external_task_for_topic(topic, handler)
|
|
26
|
+
|
|
27
|
+
return external_task_client
|
|
28
|
+
|