windmill-api 1.425.0__py3-none-any.whl → 1.426.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of windmill-api might be problematic. Click here for more details.
- windmill_api/api/kafka_trigger/__init__.py +0 -0
- windmill_api/api/kafka_trigger/create_kafka_trigger.py +105 -0
- windmill_api/api/kafka_trigger/delete_kafka_trigger.py +101 -0
- windmill_api/api/kafka_trigger/exists_kafka_trigger.py +160 -0
- windmill_api/api/kafka_trigger/get_kafka_trigger.py +166 -0
- windmill_api/api/kafka_trigger/list_kafka_triggers.py +237 -0
- windmill_api/api/kafka_trigger/set_kafka_trigger_enabled.py +113 -0
- windmill_api/api/kafka_trigger/update_kafka_trigger.py +113 -0
- windmill_api/models/add_granular_acls_kind.py +1 -0
- windmill_api/models/create_kafka_trigger_json_body.py +104 -0
- windmill_api/models/edit_kafka_trigger.py +94 -0
- windmill_api/models/get_granular_acls_kind.py +1 -0
- windmill_api/models/get_kafka_trigger_response_200.py +180 -0
- windmill_api/models/get_kafka_trigger_response_200_extra_perms.py +44 -0
- windmill_api/models/get_triggers_count_of_flow_response_200.py +8 -0
- windmill_api/models/get_triggers_count_of_script_response_200.py +8 -0
- windmill_api/models/get_used_triggers_response_200.py +7 -0
- windmill_api/models/kafka_trigger.py +180 -0
- windmill_api/models/kafka_trigger_extra_perms.py +44 -0
- windmill_api/models/list_kafka_triggers_response_200_item.py +182 -0
- windmill_api/models/list_kafka_triggers_response_200_item_extra_perms.py +44 -0
- windmill_api/models/new_kafka_trigger.py +104 -0
- windmill_api/models/remove_granular_acls_kind.py +1 -0
- windmill_api/models/set_kafka_trigger_enabled_json_body.py +58 -0
- windmill_api/models/triggers_count.py +8 -0
- windmill_api/models/update_kafka_trigger_json_body.py +94 -0
- {windmill_api-1.425.0.dist-info → windmill_api-1.426.0.dist-info}/METADATA +1 -1
- {windmill_api-1.425.0.dist-info → windmill_api-1.426.0.dist-info}/RECORD +30 -11
- {windmill_api-1.425.0.dist-info → windmill_api-1.426.0.dist-info}/LICENSE +0 -0
- {windmill_api-1.425.0.dist-info → windmill_api-1.426.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast
|
|
3
|
+
|
|
4
|
+
from attrs import define as _attrs_define
|
|
5
|
+
from attrs import field as _attrs_field
|
|
6
|
+
from dateutil.parser import isoparse
|
|
7
|
+
|
|
8
|
+
from ..types import UNSET, Unset
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from ..models.list_kafka_triggers_response_200_item_extra_perms import ListKafkaTriggersResponse200ItemExtraPerms
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
T = TypeVar("T", bound="ListKafkaTriggersResponse200Item")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@_attrs_define
|
|
18
|
+
class ListKafkaTriggersResponse200Item:
|
|
19
|
+
"""
|
|
20
|
+
Attributes:
|
|
21
|
+
path (str):
|
|
22
|
+
edited_by (str):
|
|
23
|
+
edited_at (datetime.datetime):
|
|
24
|
+
script_path (str):
|
|
25
|
+
kafka_resource_path (str):
|
|
26
|
+
group_id (str):
|
|
27
|
+
topics (List[str]):
|
|
28
|
+
is_flow (bool):
|
|
29
|
+
extra_perms (ListKafkaTriggersResponse200ItemExtraPerms):
|
|
30
|
+
email (str):
|
|
31
|
+
workspace_id (str):
|
|
32
|
+
enabled (bool):
|
|
33
|
+
server_id (Union[Unset, str]):
|
|
34
|
+
last_server_ping (Union[Unset, datetime.datetime]):
|
|
35
|
+
error (Union[Unset, str]):
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
path: str
|
|
39
|
+
edited_by: str
|
|
40
|
+
edited_at: datetime.datetime
|
|
41
|
+
script_path: str
|
|
42
|
+
kafka_resource_path: str
|
|
43
|
+
group_id: str
|
|
44
|
+
topics: List[str]
|
|
45
|
+
is_flow: bool
|
|
46
|
+
extra_perms: "ListKafkaTriggersResponse200ItemExtraPerms"
|
|
47
|
+
email: str
|
|
48
|
+
workspace_id: str
|
|
49
|
+
enabled: bool
|
|
50
|
+
server_id: Union[Unset, str] = UNSET
|
|
51
|
+
last_server_ping: Union[Unset, datetime.datetime] = UNSET
|
|
52
|
+
error: Union[Unset, str] = UNSET
|
|
53
|
+
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
54
|
+
|
|
55
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
56
|
+
path = self.path
|
|
57
|
+
edited_by = self.edited_by
|
|
58
|
+
edited_at = self.edited_at.isoformat()
|
|
59
|
+
|
|
60
|
+
script_path = self.script_path
|
|
61
|
+
kafka_resource_path = self.kafka_resource_path
|
|
62
|
+
group_id = self.group_id
|
|
63
|
+
topics = self.topics
|
|
64
|
+
|
|
65
|
+
is_flow = self.is_flow
|
|
66
|
+
extra_perms = self.extra_perms.to_dict()
|
|
67
|
+
|
|
68
|
+
email = self.email
|
|
69
|
+
workspace_id = self.workspace_id
|
|
70
|
+
enabled = self.enabled
|
|
71
|
+
server_id = self.server_id
|
|
72
|
+
last_server_ping: Union[Unset, str] = UNSET
|
|
73
|
+
if not isinstance(self.last_server_ping, Unset):
|
|
74
|
+
last_server_ping = self.last_server_ping.isoformat()
|
|
75
|
+
|
|
76
|
+
error = self.error
|
|
77
|
+
|
|
78
|
+
field_dict: Dict[str, Any] = {}
|
|
79
|
+
field_dict.update(self.additional_properties)
|
|
80
|
+
field_dict.update(
|
|
81
|
+
{
|
|
82
|
+
"path": path,
|
|
83
|
+
"edited_by": edited_by,
|
|
84
|
+
"edited_at": edited_at,
|
|
85
|
+
"script_path": script_path,
|
|
86
|
+
"kafka_resource_path": kafka_resource_path,
|
|
87
|
+
"group_id": group_id,
|
|
88
|
+
"topics": topics,
|
|
89
|
+
"is_flow": is_flow,
|
|
90
|
+
"extra_perms": extra_perms,
|
|
91
|
+
"email": email,
|
|
92
|
+
"workspace_id": workspace_id,
|
|
93
|
+
"enabled": enabled,
|
|
94
|
+
}
|
|
95
|
+
)
|
|
96
|
+
if server_id is not UNSET:
|
|
97
|
+
field_dict["server_id"] = server_id
|
|
98
|
+
if last_server_ping is not UNSET:
|
|
99
|
+
field_dict["last_server_ping"] = last_server_ping
|
|
100
|
+
if error is not UNSET:
|
|
101
|
+
field_dict["error"] = error
|
|
102
|
+
|
|
103
|
+
return field_dict
|
|
104
|
+
|
|
105
|
+
@classmethod
|
|
106
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
107
|
+
from ..models.list_kafka_triggers_response_200_item_extra_perms import (
|
|
108
|
+
ListKafkaTriggersResponse200ItemExtraPerms,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
d = src_dict.copy()
|
|
112
|
+
path = d.pop("path")
|
|
113
|
+
|
|
114
|
+
edited_by = d.pop("edited_by")
|
|
115
|
+
|
|
116
|
+
edited_at = isoparse(d.pop("edited_at"))
|
|
117
|
+
|
|
118
|
+
script_path = d.pop("script_path")
|
|
119
|
+
|
|
120
|
+
kafka_resource_path = d.pop("kafka_resource_path")
|
|
121
|
+
|
|
122
|
+
group_id = d.pop("group_id")
|
|
123
|
+
|
|
124
|
+
topics = cast(List[str], d.pop("topics"))
|
|
125
|
+
|
|
126
|
+
is_flow = d.pop("is_flow")
|
|
127
|
+
|
|
128
|
+
extra_perms = ListKafkaTriggersResponse200ItemExtraPerms.from_dict(d.pop("extra_perms"))
|
|
129
|
+
|
|
130
|
+
email = d.pop("email")
|
|
131
|
+
|
|
132
|
+
workspace_id = d.pop("workspace_id")
|
|
133
|
+
|
|
134
|
+
enabled = d.pop("enabled")
|
|
135
|
+
|
|
136
|
+
server_id = d.pop("server_id", UNSET)
|
|
137
|
+
|
|
138
|
+
_last_server_ping = d.pop("last_server_ping", UNSET)
|
|
139
|
+
last_server_ping: Union[Unset, datetime.datetime]
|
|
140
|
+
if isinstance(_last_server_ping, Unset):
|
|
141
|
+
last_server_ping = UNSET
|
|
142
|
+
else:
|
|
143
|
+
last_server_ping = isoparse(_last_server_ping)
|
|
144
|
+
|
|
145
|
+
error = d.pop("error", UNSET)
|
|
146
|
+
|
|
147
|
+
list_kafka_triggers_response_200_item = cls(
|
|
148
|
+
path=path,
|
|
149
|
+
edited_by=edited_by,
|
|
150
|
+
edited_at=edited_at,
|
|
151
|
+
script_path=script_path,
|
|
152
|
+
kafka_resource_path=kafka_resource_path,
|
|
153
|
+
group_id=group_id,
|
|
154
|
+
topics=topics,
|
|
155
|
+
is_flow=is_flow,
|
|
156
|
+
extra_perms=extra_perms,
|
|
157
|
+
email=email,
|
|
158
|
+
workspace_id=workspace_id,
|
|
159
|
+
enabled=enabled,
|
|
160
|
+
server_id=server_id,
|
|
161
|
+
last_server_ping=last_server_ping,
|
|
162
|
+
error=error,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
list_kafka_triggers_response_200_item.additional_properties = d
|
|
166
|
+
return list_kafka_triggers_response_200_item
|
|
167
|
+
|
|
168
|
+
@property
|
|
169
|
+
def additional_keys(self) -> List[str]:
|
|
170
|
+
return list(self.additional_properties.keys())
|
|
171
|
+
|
|
172
|
+
def __getitem__(self, key: str) -> Any:
|
|
173
|
+
return self.additional_properties[key]
|
|
174
|
+
|
|
175
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
176
|
+
self.additional_properties[key] = value
|
|
177
|
+
|
|
178
|
+
def __delitem__(self, key: str) -> None:
|
|
179
|
+
del self.additional_properties[key]
|
|
180
|
+
|
|
181
|
+
def __contains__(self, key: str) -> bool:
|
|
182
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Type, TypeVar
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="ListKafkaTriggersResponse200ItemExtraPerms")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class ListKafkaTriggersResponse200ItemExtraPerms:
|
|
11
|
+
""" """
|
|
12
|
+
|
|
13
|
+
additional_properties: Dict[str, bool] = _attrs_field(init=False, factory=dict)
|
|
14
|
+
|
|
15
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
16
|
+
field_dict: Dict[str, Any] = {}
|
|
17
|
+
field_dict.update(self.additional_properties)
|
|
18
|
+
field_dict.update({})
|
|
19
|
+
|
|
20
|
+
return field_dict
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
24
|
+
d = src_dict.copy()
|
|
25
|
+
list_kafka_triggers_response_200_item_extra_perms = cls()
|
|
26
|
+
|
|
27
|
+
list_kafka_triggers_response_200_item_extra_perms.additional_properties = d
|
|
28
|
+
return list_kafka_triggers_response_200_item_extra_perms
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def additional_keys(self) -> List[str]:
|
|
32
|
+
return list(self.additional_properties.keys())
|
|
33
|
+
|
|
34
|
+
def __getitem__(self, key: str) -> bool:
|
|
35
|
+
return self.additional_properties[key]
|
|
36
|
+
|
|
37
|
+
def __setitem__(self, key: str, value: bool) -> None:
|
|
38
|
+
self.additional_properties[key] = value
|
|
39
|
+
|
|
40
|
+
def __delitem__(self, key: str) -> None:
|
|
41
|
+
del self.additional_properties[key]
|
|
42
|
+
|
|
43
|
+
def __contains__(self, key: str) -> bool:
|
|
44
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Type, TypeVar, Union, cast
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
from ..types import UNSET, Unset
|
|
7
|
+
|
|
8
|
+
T = TypeVar("T", bound="NewKafkaTrigger")
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@_attrs_define
|
|
12
|
+
class NewKafkaTrigger:
|
|
13
|
+
"""
|
|
14
|
+
Attributes:
|
|
15
|
+
path (str):
|
|
16
|
+
script_path (str):
|
|
17
|
+
is_flow (bool):
|
|
18
|
+
kafka_resource_path (str):
|
|
19
|
+
group_id (str):
|
|
20
|
+
topics (List[str]):
|
|
21
|
+
enabled (Union[Unset, bool]):
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
path: str
|
|
25
|
+
script_path: str
|
|
26
|
+
is_flow: bool
|
|
27
|
+
kafka_resource_path: str
|
|
28
|
+
group_id: str
|
|
29
|
+
topics: List[str]
|
|
30
|
+
enabled: Union[Unset, bool] = UNSET
|
|
31
|
+
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
32
|
+
|
|
33
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
34
|
+
path = self.path
|
|
35
|
+
script_path = self.script_path
|
|
36
|
+
is_flow = self.is_flow
|
|
37
|
+
kafka_resource_path = self.kafka_resource_path
|
|
38
|
+
group_id = self.group_id
|
|
39
|
+
topics = self.topics
|
|
40
|
+
|
|
41
|
+
enabled = self.enabled
|
|
42
|
+
|
|
43
|
+
field_dict: Dict[str, Any] = {}
|
|
44
|
+
field_dict.update(self.additional_properties)
|
|
45
|
+
field_dict.update(
|
|
46
|
+
{
|
|
47
|
+
"path": path,
|
|
48
|
+
"script_path": script_path,
|
|
49
|
+
"is_flow": is_flow,
|
|
50
|
+
"kafka_resource_path": kafka_resource_path,
|
|
51
|
+
"group_id": group_id,
|
|
52
|
+
"topics": topics,
|
|
53
|
+
}
|
|
54
|
+
)
|
|
55
|
+
if enabled is not UNSET:
|
|
56
|
+
field_dict["enabled"] = enabled
|
|
57
|
+
|
|
58
|
+
return field_dict
|
|
59
|
+
|
|
60
|
+
@classmethod
|
|
61
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
62
|
+
d = src_dict.copy()
|
|
63
|
+
path = d.pop("path")
|
|
64
|
+
|
|
65
|
+
script_path = d.pop("script_path")
|
|
66
|
+
|
|
67
|
+
is_flow = d.pop("is_flow")
|
|
68
|
+
|
|
69
|
+
kafka_resource_path = d.pop("kafka_resource_path")
|
|
70
|
+
|
|
71
|
+
group_id = d.pop("group_id")
|
|
72
|
+
|
|
73
|
+
topics = cast(List[str], d.pop("topics"))
|
|
74
|
+
|
|
75
|
+
enabled = d.pop("enabled", UNSET)
|
|
76
|
+
|
|
77
|
+
new_kafka_trigger = cls(
|
|
78
|
+
path=path,
|
|
79
|
+
script_path=script_path,
|
|
80
|
+
is_flow=is_flow,
|
|
81
|
+
kafka_resource_path=kafka_resource_path,
|
|
82
|
+
group_id=group_id,
|
|
83
|
+
topics=topics,
|
|
84
|
+
enabled=enabled,
|
|
85
|
+
)
|
|
86
|
+
|
|
87
|
+
new_kafka_trigger.additional_properties = d
|
|
88
|
+
return new_kafka_trigger
|
|
89
|
+
|
|
90
|
+
@property
|
|
91
|
+
def additional_keys(self) -> List[str]:
|
|
92
|
+
return list(self.additional_properties.keys())
|
|
93
|
+
|
|
94
|
+
def __getitem__(self, key: str) -> Any:
|
|
95
|
+
return self.additional_properties[key]
|
|
96
|
+
|
|
97
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
98
|
+
self.additional_properties[key] = value
|
|
99
|
+
|
|
100
|
+
def __delitem__(self, key: str) -> None:
|
|
101
|
+
del self.additional_properties[key]
|
|
102
|
+
|
|
103
|
+
def __contains__(self, key: str) -> bool:
|
|
104
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Type, TypeVar
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="SetKafkaTriggerEnabledJsonBody")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class SetKafkaTriggerEnabledJsonBody:
|
|
11
|
+
"""
|
|
12
|
+
Attributes:
|
|
13
|
+
enabled (bool):
|
|
14
|
+
"""
|
|
15
|
+
|
|
16
|
+
enabled: bool
|
|
17
|
+
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
18
|
+
|
|
19
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
20
|
+
enabled = self.enabled
|
|
21
|
+
|
|
22
|
+
field_dict: Dict[str, Any] = {}
|
|
23
|
+
field_dict.update(self.additional_properties)
|
|
24
|
+
field_dict.update(
|
|
25
|
+
{
|
|
26
|
+
"enabled": enabled,
|
|
27
|
+
}
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
return field_dict
|
|
31
|
+
|
|
32
|
+
@classmethod
|
|
33
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
34
|
+
d = src_dict.copy()
|
|
35
|
+
enabled = d.pop("enabled")
|
|
36
|
+
|
|
37
|
+
set_kafka_trigger_enabled_json_body = cls(
|
|
38
|
+
enabled=enabled,
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
set_kafka_trigger_enabled_json_body.additional_properties = d
|
|
42
|
+
return set_kafka_trigger_enabled_json_body
|
|
43
|
+
|
|
44
|
+
@property
|
|
45
|
+
def additional_keys(self) -> List[str]:
|
|
46
|
+
return list(self.additional_properties.keys())
|
|
47
|
+
|
|
48
|
+
def __getitem__(self, key: str) -> Any:
|
|
49
|
+
return self.additional_properties[key]
|
|
50
|
+
|
|
51
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
52
|
+
self.additional_properties[key] = value
|
|
53
|
+
|
|
54
|
+
def __delitem__(self, key: str) -> None:
|
|
55
|
+
del self.additional_properties[key]
|
|
56
|
+
|
|
57
|
+
def __contains__(self, key: str) -> bool:
|
|
58
|
+
return key in self.additional_properties
|
|
@@ -22,6 +22,7 @@ class TriggersCount:
|
|
|
22
22
|
webhook_count (Union[Unset, float]):
|
|
23
23
|
email_count (Union[Unset, float]):
|
|
24
24
|
websocket_count (Union[Unset, float]):
|
|
25
|
+
kafka_count (Union[Unset, float]):
|
|
25
26
|
"""
|
|
26
27
|
|
|
27
28
|
primary_schedule: Union[Unset, "TriggersCountPrimarySchedule"] = UNSET
|
|
@@ -30,6 +31,7 @@ class TriggersCount:
|
|
|
30
31
|
webhook_count: Union[Unset, float] = UNSET
|
|
31
32
|
email_count: Union[Unset, float] = UNSET
|
|
32
33
|
websocket_count: Union[Unset, float] = UNSET
|
|
34
|
+
kafka_count: Union[Unset, float] = UNSET
|
|
33
35
|
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
34
36
|
|
|
35
37
|
def to_dict(self) -> Dict[str, Any]:
|
|
@@ -42,6 +44,7 @@ class TriggersCount:
|
|
|
42
44
|
webhook_count = self.webhook_count
|
|
43
45
|
email_count = self.email_count
|
|
44
46
|
websocket_count = self.websocket_count
|
|
47
|
+
kafka_count = self.kafka_count
|
|
45
48
|
|
|
46
49
|
field_dict: Dict[str, Any] = {}
|
|
47
50
|
field_dict.update(self.additional_properties)
|
|
@@ -58,6 +61,8 @@ class TriggersCount:
|
|
|
58
61
|
field_dict["email_count"] = email_count
|
|
59
62
|
if websocket_count is not UNSET:
|
|
60
63
|
field_dict["websocket_count"] = websocket_count
|
|
64
|
+
if kafka_count is not UNSET:
|
|
65
|
+
field_dict["kafka_count"] = kafka_count
|
|
61
66
|
|
|
62
67
|
return field_dict
|
|
63
68
|
|
|
@@ -83,6 +88,8 @@ class TriggersCount:
|
|
|
83
88
|
|
|
84
89
|
websocket_count = d.pop("websocket_count", UNSET)
|
|
85
90
|
|
|
91
|
+
kafka_count = d.pop("kafka_count", UNSET)
|
|
92
|
+
|
|
86
93
|
triggers_count = cls(
|
|
87
94
|
primary_schedule=primary_schedule,
|
|
88
95
|
schedule_count=schedule_count,
|
|
@@ -90,6 +97,7 @@ class TriggersCount:
|
|
|
90
97
|
webhook_count=webhook_count,
|
|
91
98
|
email_count=email_count,
|
|
92
99
|
websocket_count=websocket_count,
|
|
100
|
+
kafka_count=kafka_count,
|
|
93
101
|
)
|
|
94
102
|
|
|
95
103
|
triggers_count.additional_properties = d
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Type, TypeVar, cast
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="UpdateKafkaTriggerJsonBody")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class UpdateKafkaTriggerJsonBody:
|
|
11
|
+
"""
|
|
12
|
+
Attributes:
|
|
13
|
+
kafka_resource_path (str):
|
|
14
|
+
group_id (str):
|
|
15
|
+
topics (List[str]):
|
|
16
|
+
path (str):
|
|
17
|
+
script_path (str):
|
|
18
|
+
is_flow (bool):
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
kafka_resource_path: str
|
|
22
|
+
group_id: str
|
|
23
|
+
topics: List[str]
|
|
24
|
+
path: str
|
|
25
|
+
script_path: str
|
|
26
|
+
is_flow: bool
|
|
27
|
+
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
28
|
+
|
|
29
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
30
|
+
kafka_resource_path = self.kafka_resource_path
|
|
31
|
+
group_id = self.group_id
|
|
32
|
+
topics = self.topics
|
|
33
|
+
|
|
34
|
+
path = self.path
|
|
35
|
+
script_path = self.script_path
|
|
36
|
+
is_flow = self.is_flow
|
|
37
|
+
|
|
38
|
+
field_dict: Dict[str, Any] = {}
|
|
39
|
+
field_dict.update(self.additional_properties)
|
|
40
|
+
field_dict.update(
|
|
41
|
+
{
|
|
42
|
+
"kafka_resource_path": kafka_resource_path,
|
|
43
|
+
"group_id": group_id,
|
|
44
|
+
"topics": topics,
|
|
45
|
+
"path": path,
|
|
46
|
+
"script_path": script_path,
|
|
47
|
+
"is_flow": is_flow,
|
|
48
|
+
}
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
return field_dict
|
|
52
|
+
|
|
53
|
+
@classmethod
|
|
54
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
55
|
+
d = src_dict.copy()
|
|
56
|
+
kafka_resource_path = d.pop("kafka_resource_path")
|
|
57
|
+
|
|
58
|
+
group_id = d.pop("group_id")
|
|
59
|
+
|
|
60
|
+
topics = cast(List[str], d.pop("topics"))
|
|
61
|
+
|
|
62
|
+
path = d.pop("path")
|
|
63
|
+
|
|
64
|
+
script_path = d.pop("script_path")
|
|
65
|
+
|
|
66
|
+
is_flow = d.pop("is_flow")
|
|
67
|
+
|
|
68
|
+
update_kafka_trigger_json_body = cls(
|
|
69
|
+
kafka_resource_path=kafka_resource_path,
|
|
70
|
+
group_id=group_id,
|
|
71
|
+
topics=topics,
|
|
72
|
+
path=path,
|
|
73
|
+
script_path=script_path,
|
|
74
|
+
is_flow=is_flow,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
update_kafka_trigger_json_body.additional_properties = d
|
|
78
|
+
return update_kafka_trigger_json_body
|
|
79
|
+
|
|
80
|
+
@property
|
|
81
|
+
def additional_keys(self) -> List[str]:
|
|
82
|
+
return list(self.additional_properties.keys())
|
|
83
|
+
|
|
84
|
+
def __getitem__(self, key: str) -> Any:
|
|
85
|
+
return self.additional_properties[key]
|
|
86
|
+
|
|
87
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
88
|
+
self.additional_properties[key] = value
|
|
89
|
+
|
|
90
|
+
def __delitem__(self, key: str) -> None:
|
|
91
|
+
del self.additional_properties[key]
|
|
92
|
+
|
|
93
|
+
def __contains__(self, key: str) -> bool:
|
|
94
|
+
return key in self.additional_properties
|