processcube-etw-library 2026.1.22.145211__py3-none-any.whl → 2026.1.29.71849b0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- processcube_etw_library/__init__.py +7 -5
- processcube_etw_library/create_external_task_client.py +4 -2
- processcube_etw_library/etw_app.py +18 -5
- processcube_etw_library/health/built_in.py +3 -4
- processcube_etw_library/health/check.py +14 -4
- processcube_etw_library/identity_provider.py +0 -2
- processcube_etw_library/processcube_client/__init__.py +22 -0
- processcube_etw_library/processcube_client/app_info/__init__.py +1 -0
- processcube_etw_library/processcube_client/app_info/app_info_client.py +36 -0
- processcube_etw_library/processcube_client/client_factory.py +37 -0
- processcube_etw_library/processcube_client/core/__init__.py +2 -0
- processcube_etw_library/processcube_client/core/api/__init__.py +13 -0
- processcube_etw_library/processcube_client/core/api/base_client.py +235 -0
- processcube_etw_library/processcube_client/core/api/client.py +816 -0
- processcube_etw_library/processcube_client/core/api/helpers/__init__.py +0 -0
- processcube_etw_library/processcube_client/core/api/helpers/application_info.py +34 -0
- processcube_etw_library/processcube_client/core/api/helpers/data_object_instances.py +61 -0
- processcube_etw_library/processcube_client/core/api/helpers/empty_tasks.py +86 -0
- processcube_etw_library/processcube_client/core/api/helpers/events.py +39 -0
- processcube_etw_library/processcube_client/core/api/helpers/external_tasks.py +142 -0
- processcube_etw_library/processcube_client/core/api/helpers/flow_node_instances.py +80 -0
- processcube_etw_library/processcube_client/core/api/helpers/manual_tasks.py +87 -0
- processcube_etw_library/processcube_client/core/api/helpers/process_definitions.py +46 -0
- processcube_etw_library/processcube_client/core/api/helpers/process_instances.py +96 -0
- processcube_etw_library/processcube_client/core/api/helpers/process_models.py +51 -0
- processcube_etw_library/processcube_client/core/api/helpers/user_tasks.py +130 -0
- processcube_etw_library/processcube_client/core/base_client.py +175 -0
- processcube_etw_library/processcube_client/core/loop_helper.py +200 -0
- processcube_etw_library/processcube_client/event/__init__.py +1 -0
- processcube_etw_library/processcube_client/event/event_client.py +43 -0
- processcube_etw_library/processcube_client/external_task/__init__.py +3 -0
- processcube_etw_library/processcube_client/external_task/client_wrapper.py +28 -0
- processcube_etw_library/processcube_client/external_task/external_task_client.py +195 -0
- processcube_etw_library/processcube_client/external_task/external_task_worker.py +205 -0
- processcube_etw_library/processcube_client/external_task/functional_error.py +17 -0
- processcube_etw_library/processcube_client/flow_node_instance/__init__.py +1 -0
- processcube_etw_library/processcube_client/flow_node_instance/flow_node_instance_client.py +43 -0
- processcube_etw_library/processcube_client/notification/__init__.py +1 -0
- processcube_etw_library/processcube_client/notification/notification_client.py +103 -0
- processcube_etw_library/processcube_client/process_definition/__init__.py +2 -0
- processcube_etw_library/processcube_client/process_definition/process_definition_client.py +94 -0
- processcube_etw_library/processcube_client/process_definition/start_callback_type.py +6 -0
- processcube_etw_library/processcube_client/process_instance/__init__.py +1 -0
- processcube_etw_library/processcube_client/process_instance/process_instance_client.py +32 -0
- processcube_etw_library/processcube_client/user_task/__init__.py +1 -0
- processcube_etw_library/processcube_client/user_task/user_task_client.py +63 -0
- processcube_etw_library/settings.py +35 -9
- {processcube_etw_library-2026.1.22.145211.dist-info → processcube_etw_library-2026.1.29.71849b0.dist-info}/METADATA +13 -11
- processcube_etw_library-2026.1.29.71849b0.dist-info/RECORD +58 -0
- {processcube_etw_library-2026.1.22.145211.dist-info → processcube_etw_library-2026.1.29.71849b0.dist-info}/WHEEL +1 -1
- processcube_etw_library-2026.1.22.145211.dist-info/RECORD +0 -18
- {processcube_etw_library-2026.1.22.145211.dist-info → processcube_etw_library-2026.1.29.71849b0.dist-info}/top_level.txt +0 -0
|
File without changes
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase
|
|
3
|
+
from typing import Callable
|
|
4
|
+
|
|
5
|
+
from ..base_client import BaseClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
9
|
+
@dataclass
|
|
10
|
+
class ApplicationInfo:
|
|
11
|
+
id: str
|
|
12
|
+
name: str
|
|
13
|
+
package_name: str
|
|
14
|
+
version: str
|
|
15
|
+
authority_url: str
|
|
16
|
+
allow_anonymous_root_access: str
|
|
17
|
+
extra_info: dict
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class ApplicationInfoHandler(BaseClient):
|
|
21
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
22
|
+
super(ApplicationInfoHandler, self).__init__(url, identity, api_version)
|
|
23
|
+
|
|
24
|
+
def info(self) -> ApplicationInfo:
|
|
25
|
+
json_data = self.do_get("info")
|
|
26
|
+
|
|
27
|
+
info = ApplicationInfo.from_dict(json_data)
|
|
28
|
+
|
|
29
|
+
return info
|
|
30
|
+
|
|
31
|
+
def authority(self) -> str:
|
|
32
|
+
json_data = self.do_get("authority")
|
|
33
|
+
|
|
34
|
+
return json_data
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase, Undefined
|
|
3
|
+
from dataclasses_json import CatchAll
|
|
4
|
+
from typing import Any, Callable, Dict, List, Optional
|
|
5
|
+
from urllib import parse
|
|
6
|
+
|
|
7
|
+
from ..base_client import BaseClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
11
|
+
@dataclass
|
|
12
|
+
class DataObjectInstancesQuery:
|
|
13
|
+
limit: int = None
|
|
14
|
+
offset: int = None
|
|
15
|
+
data_object_id: str = None
|
|
16
|
+
process_definition_id: str = None
|
|
17
|
+
process_model_id: str = None
|
|
18
|
+
process_instance_id: str = None
|
|
19
|
+
flow_node_instance_id: str = None
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.INCLUDE)
|
|
23
|
+
@dataclass
|
|
24
|
+
class DataObjectInstanceResponse:
|
|
25
|
+
data_object_id: Optional[str] = None
|
|
26
|
+
flow_node_instance_id: Optional[str] = None
|
|
27
|
+
process_definition_id: Optional[str] = None
|
|
28
|
+
process_model_id: Optional[str] = None
|
|
29
|
+
process_instance_id: Optional[str] = None
|
|
30
|
+
value: Dict[str, Any] = field(default_factory=dict)
|
|
31
|
+
created_at: Optional[str] = None
|
|
32
|
+
updated_at: Optional[str] = None
|
|
33
|
+
place_holder: CatchAll = None
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class DataObjectInstanceHandler(BaseClient):
|
|
37
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
38
|
+
super(DataObjectInstanceHandler, self).__init__(url, identity, api_version)
|
|
39
|
+
|
|
40
|
+
def query(
|
|
41
|
+
self, request: DataObjectInstancesQuery, options: dict = {}
|
|
42
|
+
) -> List[DataObjectInstanceResponse]:
|
|
43
|
+
query_dict = request.to_dict()
|
|
44
|
+
|
|
45
|
+
filtered_query = list(
|
|
46
|
+
filter(lambda dict_entry: dict_entry[1] is not None, query_dict.items())
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
query_str = parse.urlencode(filtered_query, doseq=False)
|
|
50
|
+
|
|
51
|
+
path = f"data_object_instances/query?{query_str}"
|
|
52
|
+
|
|
53
|
+
response_list_of_dict = self.do_get(path, options)
|
|
54
|
+
|
|
55
|
+
if response_list_of_dict.get("totalCount", 0) > 0:
|
|
56
|
+
json_data = response_list_of_dict["dataObjectInstances"]
|
|
57
|
+
response = DataObjectInstanceResponse.schema().load(json_data, many=True)
|
|
58
|
+
else:
|
|
59
|
+
response = []
|
|
60
|
+
|
|
61
|
+
return response
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase, Undefined, config
|
|
3
|
+
from dataclasses_json import CatchAll
|
|
4
|
+
from typing import Callable, List
|
|
5
|
+
from urllib import parse
|
|
6
|
+
|
|
7
|
+
from ..base_client import BaseClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
11
|
+
@dataclass
|
|
12
|
+
class EmptyTaskQuery:
|
|
13
|
+
limit: int = None
|
|
14
|
+
offset: int = None
|
|
15
|
+
empty_task_instance_id: str = field(
|
|
16
|
+
metadata=config(field_name="flowNodeInstanceId"), default=None
|
|
17
|
+
)
|
|
18
|
+
flow_node_id: str = None
|
|
19
|
+
flow_node_name: str = None
|
|
20
|
+
flow_node_lane: str = None
|
|
21
|
+
correlation_id: str = None
|
|
22
|
+
process_definition_id: str = None
|
|
23
|
+
process_model_id: str = None
|
|
24
|
+
process_instance_id: str = None
|
|
25
|
+
state: str = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE)
|
|
29
|
+
# @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.INCLUDE)
|
|
30
|
+
@dataclass
|
|
31
|
+
class EmptyTaskResponse:
|
|
32
|
+
empty_task_instance_id: str = field(
|
|
33
|
+
metadata=config(field_name="flowNodeInstanceId")
|
|
34
|
+
)
|
|
35
|
+
correlation_id: str
|
|
36
|
+
process_instance_id: str
|
|
37
|
+
process_model_id: str
|
|
38
|
+
# place_holder: CatchAll
|
|
39
|
+
flow_node_name: str = None
|
|
40
|
+
flow_node_id: str = None
|
|
41
|
+
owner_id: str = None
|
|
42
|
+
flow_node_lane: str = None
|
|
43
|
+
process_definition_id: str = None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class EmptyTaskHandler(BaseClient):
|
|
47
|
+
BPMN_TYPE = "bpmn:Task"
|
|
48
|
+
|
|
49
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
50
|
+
super(EmptyTaskHandler, self).__init__(url, identity, api_version)
|
|
51
|
+
|
|
52
|
+
def query(
|
|
53
|
+
self, request: EmptyTaskQuery = EmptyTaskQuery(), options: dict = {}
|
|
54
|
+
) -> List[EmptyTaskResponse]:
|
|
55
|
+
query_dict = request.to_dict()
|
|
56
|
+
query_dict.update(
|
|
57
|
+
{
|
|
58
|
+
"state": "suspended",
|
|
59
|
+
"flowNodeType": EmptyTaskHandler.BPMN_TYPE,
|
|
60
|
+
}
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
filtered_query = list(
|
|
64
|
+
filter(lambda dict_entry: dict_entry[1] is not None, query_dict.items())
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
query_str = parse.urlencode(filtered_query, doseq=False)
|
|
68
|
+
|
|
69
|
+
path = f"flow_node_instances?{query_str}"
|
|
70
|
+
|
|
71
|
+
response_list_of_dict = self.do_get(path, options)
|
|
72
|
+
|
|
73
|
+
if response_list_of_dict.get("totalCount", 0) > 0:
|
|
74
|
+
json_data = response_list_of_dict.get("flowNodeInstances", {})
|
|
75
|
+
response = EmptyTaskResponse.schema().load(json_data, many=True)
|
|
76
|
+
else:
|
|
77
|
+
response = []
|
|
78
|
+
|
|
79
|
+
return response
|
|
80
|
+
|
|
81
|
+
def finish(self, empty_task_instance_id: str, options: dict = {}) -> bool:
|
|
82
|
+
path = f"empty_activities/{empty_task_instance_id}/finish"
|
|
83
|
+
|
|
84
|
+
_ = self.do_put(path, {}, options)
|
|
85
|
+
|
|
86
|
+
return True
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase
|
|
3
|
+
from typing import Callable
|
|
4
|
+
|
|
5
|
+
from ..base_client import BaseClient
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
9
|
+
@dataclass
|
|
10
|
+
class MessageTriggerRequest:
|
|
11
|
+
payload: dict
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class EventsHandler(BaseClient):
|
|
15
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
16
|
+
super(EventsHandler, self).__init__(url, identity, api_version)
|
|
17
|
+
|
|
18
|
+
def trigger_message(
|
|
19
|
+
self, event_name: str, request: MessageTriggerRequest, options: dict = {}
|
|
20
|
+
) -> bool:
|
|
21
|
+
url = f"messages/{event_name}/trigger"
|
|
22
|
+
|
|
23
|
+
process_instance_id = options.get("process_instance_id", None)
|
|
24
|
+
|
|
25
|
+
if process_instance_id is not None:
|
|
26
|
+
url = f"{url}?process_instance_id={process_instance_id}"
|
|
27
|
+
|
|
28
|
+
payload = request.to_dict()
|
|
29
|
+
|
|
30
|
+
_ = self.do_post(url, payload, options)
|
|
31
|
+
|
|
32
|
+
return True
|
|
33
|
+
|
|
34
|
+
def trigger_signal(self, signal_name: str, options: dict = {}) -> bool:
|
|
35
|
+
url = f"signals/{signal_name}/trigger"
|
|
36
|
+
|
|
37
|
+
_ = self.do_post(url, {}, options)
|
|
38
|
+
|
|
39
|
+
return True
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase
|
|
3
|
+
from typing import Callable, List, Optional
|
|
4
|
+
|
|
5
|
+
from dataclasses_json.undefined import Undefined
|
|
6
|
+
|
|
7
|
+
from ..base_client import BaseClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
11
|
+
@dataclass
|
|
12
|
+
class FetchAndLockRequestPayload:
|
|
13
|
+
worker_id: str
|
|
14
|
+
topic_name: str
|
|
15
|
+
max_tasks: int = 10
|
|
16
|
+
long_polling_timeout: int = 10 * 1000
|
|
17
|
+
lock_duration: int = 100 * 1000
|
|
18
|
+
payload_filter: str = None
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE)
|
|
22
|
+
@dataclass
|
|
23
|
+
class ExternalTask:
|
|
24
|
+
id: str
|
|
25
|
+
worker_id: str
|
|
26
|
+
topic: str
|
|
27
|
+
flow_node_instance_id: str
|
|
28
|
+
correlation_id: str
|
|
29
|
+
process_definition_id: str
|
|
30
|
+
process_instance_id: str
|
|
31
|
+
owner_id: str = None
|
|
32
|
+
payload: dict = None
|
|
33
|
+
lock_expiration_time: str = None
|
|
34
|
+
state: str = None # ExternalTaskStatestring(pending = pending, finished = finished)
|
|
35
|
+
created_at: str = None
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
39
|
+
@dataclass
|
|
40
|
+
class ExtendLockRequest:
|
|
41
|
+
worker_id: str
|
|
42
|
+
additional_duration: int
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
46
|
+
@dataclass
|
|
47
|
+
class FinishExternalTaskRequestPayload:
|
|
48
|
+
worker_id: str
|
|
49
|
+
result: dict
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
53
|
+
@dataclass
|
|
54
|
+
class BpmnError:
|
|
55
|
+
error_code: str
|
|
56
|
+
error_message: str
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
60
|
+
@dataclass
|
|
61
|
+
class BpmnErrorRequest:
|
|
62
|
+
worker_id: str
|
|
63
|
+
bpmn_error: BpmnError
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@dataclass_json(letter_case=LetterCase)
|
|
67
|
+
@dataclass
|
|
68
|
+
class ServiceError:
|
|
69
|
+
error_code: str
|
|
70
|
+
error_message: str
|
|
71
|
+
error_details: dict = None
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
75
|
+
@dataclass
|
|
76
|
+
class ServiceErrorRequest:
|
|
77
|
+
worker_id: str
|
|
78
|
+
error: ServiceError
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class ExternalTaskHandler(BaseClient):
|
|
82
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
83
|
+
super(ExternalTaskHandler, self).__init__(url, identity, api_version)
|
|
84
|
+
|
|
85
|
+
def fetch_and_lock(
|
|
86
|
+
self, request: FetchAndLockRequestPayload, options: dict = {}
|
|
87
|
+
) -> List[ExternalTask]:
|
|
88
|
+
path = "external_tasks/fetch_and_lock"
|
|
89
|
+
|
|
90
|
+
payload = request.to_dict()
|
|
91
|
+
|
|
92
|
+
response_list_of_dict = self.do_post(path, payload, options)
|
|
93
|
+
|
|
94
|
+
response = ExternalTask.schema().load(response_list_of_dict, many=True)
|
|
95
|
+
|
|
96
|
+
return response
|
|
97
|
+
|
|
98
|
+
def extend_lock(
|
|
99
|
+
self, external_task_id: str, request: ExtendLockRequest, options: dict = {}
|
|
100
|
+
) -> bool:
|
|
101
|
+
path = f"external_tasks/{external_task_id}/extend_lock"
|
|
102
|
+
|
|
103
|
+
payload = request.to_dict()
|
|
104
|
+
|
|
105
|
+
_ = self.do_put(path, payload, options)
|
|
106
|
+
|
|
107
|
+
return True
|
|
108
|
+
|
|
109
|
+
def finish(
|
|
110
|
+
self,
|
|
111
|
+
external_task_id: str,
|
|
112
|
+
request: FinishExternalTaskRequestPayload,
|
|
113
|
+
options: dict = {},
|
|
114
|
+
) -> bool:
|
|
115
|
+
path = f"external_tasks/{external_task_id}/finish"
|
|
116
|
+
|
|
117
|
+
payload = request.to_dict()
|
|
118
|
+
|
|
119
|
+
_ = self.do_put(path, payload, options)
|
|
120
|
+
|
|
121
|
+
return True
|
|
122
|
+
|
|
123
|
+
def handle_error(
|
|
124
|
+
self, external_task_id: str, request: ServiceErrorRequest, options: dict = {}
|
|
125
|
+
) -> bool:
|
|
126
|
+
path = f"external_tasks/{external_task_id}/error"
|
|
127
|
+
|
|
128
|
+
payload = request.to_dict()
|
|
129
|
+
|
|
130
|
+
_ = self.do_put(path, payload, options)
|
|
131
|
+
|
|
132
|
+
return True
|
|
133
|
+
|
|
134
|
+
def handle_bpmn_error(
|
|
135
|
+
self, external_task_id: str, request: BpmnErrorRequest, options: dict = {}
|
|
136
|
+
) -> bool:
|
|
137
|
+
return self.handle_error(external_task_id, request, options)
|
|
138
|
+
|
|
139
|
+
def handle_service_error(
|
|
140
|
+
self, external_task_id: str, request: ServiceErrorRequest, options: dict = {}
|
|
141
|
+
) -> bool:
|
|
142
|
+
return self.handle_error(external_task_id, request, options)
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase, Undefined
|
|
3
|
+
from dataclasses_json import CatchAll
|
|
4
|
+
from typing import Any, Callable, List, Dict, Optional
|
|
5
|
+
from urllib import parse
|
|
6
|
+
|
|
7
|
+
from ..base_client import BaseClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
11
|
+
@dataclass
|
|
12
|
+
class FlowNodeInstancesQuery:
|
|
13
|
+
limit: int = None
|
|
14
|
+
offset: int = None
|
|
15
|
+
flow_node_instance_id: Optional[str] = None
|
|
16
|
+
flow_node_id: Optional[str] = None
|
|
17
|
+
flow_node_name: Optional[str] = None
|
|
18
|
+
flow_node_lane: Optional[str] = None
|
|
19
|
+
flow_node_type: Optional[str] = None
|
|
20
|
+
event_type: Optional[str] = None
|
|
21
|
+
correlation_id: Optional[str] = None
|
|
22
|
+
process_definition_id: Optional[str] = None
|
|
23
|
+
process_model_id: Optional[str] = None
|
|
24
|
+
process_instance_id: Optional[str] = None
|
|
25
|
+
owner_id: Optional[str] = None
|
|
26
|
+
state: Optional[str] = None
|
|
27
|
+
previous_flow_node_instance_id: Optional[str] = None
|
|
28
|
+
parent_process_instance_id: Optional[str] = None
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.INCLUDE)
|
|
32
|
+
@dataclass
|
|
33
|
+
class FlowNodeInstanceResponse:
|
|
34
|
+
flow_node_instance_id: Optional[str] = None
|
|
35
|
+
flow_node_id: Optional[str] = None
|
|
36
|
+
flow_node_name: Optional[str] = None
|
|
37
|
+
flow_node_lane: Optional[str] = None
|
|
38
|
+
flow_node_type: Optional[str] = None
|
|
39
|
+
event_type: Optional[str] = None
|
|
40
|
+
previous_flow_node_instance_id: Optional[str] = None
|
|
41
|
+
parent_process_instance_id: Optional[str] = None
|
|
42
|
+
state: Optional[str] = None
|
|
43
|
+
process_definition_id: Optional[str] = None
|
|
44
|
+
process_model_id: Optional[str] = None
|
|
45
|
+
process_instance_id: Optional[str] = None
|
|
46
|
+
correlation_id: Optional[str] = None
|
|
47
|
+
tokens: List[Dict[str, Any]] = field(default_factory=list)
|
|
48
|
+
end_token: Dict[str, Any] = field(default_factory=dict)
|
|
49
|
+
owner_id: str = None
|
|
50
|
+
error: Dict[str, Any] = field(default_factory=dict)
|
|
51
|
+
meta_info: List[Dict[str, Any]] = field(default_factory=list)
|
|
52
|
+
place_holder: CatchAll = None
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class FlowNodeInstanceHandler(BaseClient):
|
|
56
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
57
|
+
super(FlowNodeInstanceHandler, self).__init__(url, identity, api_version)
|
|
58
|
+
|
|
59
|
+
def query(
|
|
60
|
+
self, request: FlowNodeInstancesQuery, options: dict = {}
|
|
61
|
+
) -> List[FlowNodeInstanceResponse]:
|
|
62
|
+
query_dict = request.to_dict()
|
|
63
|
+
|
|
64
|
+
filtered_query = list(
|
|
65
|
+
filter(lambda dict_entry: dict_entry[1] is not None, query_dict.items())
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
query_str = parse.urlencode(filtered_query, doseq=False)
|
|
69
|
+
|
|
70
|
+
path = f"flow_node_instances?{query_str}"
|
|
71
|
+
|
|
72
|
+
response_list_of_dict = self.do_get(path, options)
|
|
73
|
+
|
|
74
|
+
if response_list_of_dict.get("totalCount", 0) > 0:
|
|
75
|
+
json_data = response_list_of_dict.get("flowNodeInstances", {})
|
|
76
|
+
response = FlowNodeInstanceResponse.schema().load(json_data, many=True)
|
|
77
|
+
else:
|
|
78
|
+
response = []
|
|
79
|
+
|
|
80
|
+
return response
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase, Undefined, config
|
|
3
|
+
|
|
4
|
+
# from dataclasses_json import CatchAll
|
|
5
|
+
from typing import Callable, List
|
|
6
|
+
from urllib import parse
|
|
7
|
+
|
|
8
|
+
from ..base_client import BaseClient
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
12
|
+
@dataclass
|
|
13
|
+
class ManualTaskQuery:
|
|
14
|
+
limit: int = None
|
|
15
|
+
offset: int = None
|
|
16
|
+
manual_task_instance_id: str = field(
|
|
17
|
+
metadata=config(field_name="flowNodeInstanceId"), default=None
|
|
18
|
+
)
|
|
19
|
+
flow_node_id: str = None
|
|
20
|
+
flow_node_name: str = None
|
|
21
|
+
flow_node_lane: str = None
|
|
22
|
+
correlation_id: str = None
|
|
23
|
+
process_definition_id: str = None
|
|
24
|
+
process_model_id: str = None
|
|
25
|
+
process_instance_id: str = None
|
|
26
|
+
state: str = None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE)
|
|
30
|
+
# @dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.INCLUDE)
|
|
31
|
+
@dataclass
|
|
32
|
+
class ManualTaskResponse:
|
|
33
|
+
manual_task_instance_id: str = field(
|
|
34
|
+
metadata=config(field_name="flowNodeInstanceId")
|
|
35
|
+
)
|
|
36
|
+
correlation_id: str
|
|
37
|
+
process_instance_id: str
|
|
38
|
+
process_model_id: str
|
|
39
|
+
# place_holder: CatchAll
|
|
40
|
+
flow_node_name: str = None
|
|
41
|
+
flow_node_id: str = None
|
|
42
|
+
owner_id: str = None
|
|
43
|
+
flow_node_lane: str = None
|
|
44
|
+
process_definition_id: str = None
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class ManualTaskHandler(BaseClient):
|
|
48
|
+
BPMN_TYPE = "bpmn:ManualTask"
|
|
49
|
+
|
|
50
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
51
|
+
super(ManualTaskHandler, self).__init__(url, identity, api_version)
|
|
52
|
+
|
|
53
|
+
def query(
|
|
54
|
+
self, request: ManualTaskQuery = ManualTaskQuery(), options: dict = {}
|
|
55
|
+
) -> List[ManualTaskResponse]:
|
|
56
|
+
query_dict = request.to_dict()
|
|
57
|
+
query_dict.update(
|
|
58
|
+
{
|
|
59
|
+
"state": "suspended",
|
|
60
|
+
"flowNodeType": ManualTaskHandler.BPMN_TYPE,
|
|
61
|
+
}
|
|
62
|
+
)
|
|
63
|
+
|
|
64
|
+
filtered_query = list(
|
|
65
|
+
filter(lambda dict_entry: dict_entry[1] is not None, query_dict.items())
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
query_str = parse.urlencode(filtered_query, doseq=False)
|
|
69
|
+
|
|
70
|
+
path = f"flow_node_instances?{query_str}"
|
|
71
|
+
|
|
72
|
+
response_list_of_dict = self.do_get(path, options)
|
|
73
|
+
|
|
74
|
+
if response_list_of_dict.get("totalCount", 0) > 0:
|
|
75
|
+
json_data = response_list_of_dict.get("flowNodeInstances", {})
|
|
76
|
+
response = ManualTaskResponse.schema().load(json_data, many=True)
|
|
77
|
+
else:
|
|
78
|
+
response = []
|
|
79
|
+
|
|
80
|
+
return response
|
|
81
|
+
|
|
82
|
+
def finish(self, manual_task_instance_id: str, options: dict = {}) -> bool:
|
|
83
|
+
path = f"manual_tasks/{manual_task_instance_id}/finish"
|
|
84
|
+
|
|
85
|
+
_ = self.do_put(path, {}, options)
|
|
86
|
+
|
|
87
|
+
return True
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from dataclasses_json import dataclass_json, LetterCase
|
|
3
|
+
from enum import IntFlag
|
|
4
|
+
from typing import Callable
|
|
5
|
+
|
|
6
|
+
from ..base_client import BaseClient
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class StartCallbackType(IntFlag):
|
|
10
|
+
CallbackOnProcessInstanceCreated = 1
|
|
11
|
+
CallbackOnProcessInstanceFinished = 2
|
|
12
|
+
CallbackOnEndEventReached = 3
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
16
|
+
@dataclass
|
|
17
|
+
class ProcessDefinitionUploadPayload:
|
|
18
|
+
xml: str
|
|
19
|
+
overwrite_existing: bool = False
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class ProcessDefinitionHandler(BaseClient):
|
|
23
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
24
|
+
super(ProcessDefinitionHandler, self).__init__(url, identity, api_version)
|
|
25
|
+
|
|
26
|
+
def upload(
|
|
27
|
+
self, request: ProcessDefinitionUploadPayload, options: dict = {}
|
|
28
|
+
) -> None:
|
|
29
|
+
path = "process_definitions"
|
|
30
|
+
|
|
31
|
+
payload = request.to_dict()
|
|
32
|
+
|
|
33
|
+
self.do_post(path, payload, options)
|
|
34
|
+
|
|
35
|
+
def delete(
|
|
36
|
+
self,
|
|
37
|
+
process_definition_id: str,
|
|
38
|
+
delete_all_related_data: bool = False,
|
|
39
|
+
options: dict = {},
|
|
40
|
+
) -> None:
|
|
41
|
+
path = f"process_definitions/{process_definition_id}"
|
|
42
|
+
|
|
43
|
+
if delete_all_related_data:
|
|
44
|
+
path = f"{path}?delete_all_related_data=true"
|
|
45
|
+
|
|
46
|
+
self.do_delete(path, options)
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
from datetime import date
|
|
2
|
+
from dataclasses import dataclass, field
|
|
3
|
+
from dataclasses_json import dataclass_json, LetterCase, Undefined
|
|
4
|
+
from typing import Callable, List, Optional
|
|
5
|
+
from urllib.parse import urlencode
|
|
6
|
+
|
|
7
|
+
from ..base_client import BaseClient
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass_json(letter_case=LetterCase.CAMEL)
|
|
11
|
+
@dataclass
|
|
12
|
+
class ProcessInstanceQueryRequest:
|
|
13
|
+
offset: int = 0
|
|
14
|
+
limit: int = -1
|
|
15
|
+
correlation_id: str = None
|
|
16
|
+
process_instance_id: str = None
|
|
17
|
+
process_definition_id: str = None
|
|
18
|
+
process_model_id: str = None
|
|
19
|
+
process_model_name: str = None
|
|
20
|
+
process_model_hash: str = None
|
|
21
|
+
owner_id: str = None
|
|
22
|
+
state: str = None
|
|
23
|
+
parent_process_instance_id: str = None
|
|
24
|
+
terminated_by_user_id: str = None
|
|
25
|
+
created_before: date = None
|
|
26
|
+
created_at: date = None
|
|
27
|
+
created_after: date = None
|
|
28
|
+
updated_before: date = None
|
|
29
|
+
updated_at: date = None
|
|
30
|
+
updated_after: date = None
|
|
31
|
+
finished_before: date = None
|
|
32
|
+
finished_at: date = None
|
|
33
|
+
finished_after: date = None
|
|
34
|
+
start_token: dict = None
|
|
35
|
+
end_token: dict = None
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
@dataclass_json(letter_case=LetterCase.CAMEL, undefined=Undefined.EXCLUDE)
|
|
39
|
+
@dataclass
|
|
40
|
+
class ProcessInstanceQueryResponse:
|
|
41
|
+
correlation_id: Optional[str] = None
|
|
42
|
+
process_instance_id: Optional[str] = None
|
|
43
|
+
process_definition_id: Optional[str] = None
|
|
44
|
+
process_model_id: Optional[str] = None
|
|
45
|
+
process_model_name: Optional[str] = None
|
|
46
|
+
parent_process_instance_id: Optional[str] = None
|
|
47
|
+
hash: Optional[str] = None
|
|
48
|
+
xml: Optional[str] = None
|
|
49
|
+
state: Optional[str] = None
|
|
50
|
+
error: dict = field(default_factory=dict)
|
|
51
|
+
owner_id: Optional[str] = None
|
|
52
|
+
created_at: Optional[str] = None
|
|
53
|
+
finished_at: Optional[str] = None
|
|
54
|
+
terminated_by_user_id: Optional[str] = None
|
|
55
|
+
start_token: Optional[dict] = None
|
|
56
|
+
end_token: Optional[dict] = None
|
|
57
|
+
metadata: dict = field(default_factory=dict)
|
|
58
|
+
# correlation: Any = None
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
class ProcessInstanceHandler(BaseClient):
|
|
62
|
+
def __init__(self, url: str, identity: Callable = None, api_version: str = "v1"):
|
|
63
|
+
super(ProcessInstanceHandler, self).__init__(url, identity, api_version)
|
|
64
|
+
|
|
65
|
+
def query(
|
|
66
|
+
self, request: ProcessInstanceQueryRequest, options: dict = {}
|
|
67
|
+
) -> List[ProcessInstanceQueryResponse]:
|
|
68
|
+
path = "process_instances/query"
|
|
69
|
+
|
|
70
|
+
all_fields = request.to_dict()
|
|
71
|
+
|
|
72
|
+
query_fields = [
|
|
73
|
+
(key, value) for key, value in all_fields.items() if value is not None
|
|
74
|
+
]
|
|
75
|
+
|
|
76
|
+
query = urlencode(query_fields)
|
|
77
|
+
|
|
78
|
+
if len(query) > 0:
|
|
79
|
+
path = f"{path}?{query}"
|
|
80
|
+
|
|
81
|
+
response_list_of_dict = self.do_get(path, options)
|
|
82
|
+
|
|
83
|
+
if response_list_of_dict.get("totalCount", 0) > 0:
|
|
84
|
+
json_data = response_list_of_dict["processInstances"]
|
|
85
|
+
response = ProcessInstanceQueryResponse.schema().load(json_data, many=True)
|
|
86
|
+
else:
|
|
87
|
+
response = []
|
|
88
|
+
|
|
89
|
+
return response
|
|
90
|
+
|
|
91
|
+
def terminate(self, process_instance_id: str, options: dict = {}) -> bool:
|
|
92
|
+
path = f"process_instances/{process_instance_id}/terminate"
|
|
93
|
+
|
|
94
|
+
_ = self.do_put(path, {}, options)
|
|
95
|
+
|
|
96
|
+
return True
|