windmill-api 1.425.1__py3-none-any.whl → 1.427.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of windmill-api might be problematic. Click here for more details.
- windmill_api/api/kafka_trigger/__init__.py +0 -0
- windmill_api/api/kafka_trigger/create_kafka_trigger.py +105 -0
- windmill_api/api/kafka_trigger/delete_kafka_trigger.py +101 -0
- windmill_api/api/kafka_trigger/exists_kafka_trigger.py +160 -0
- windmill_api/api/kafka_trigger/get_kafka_trigger.py +166 -0
- windmill_api/api/kafka_trigger/list_kafka_triggers.py +237 -0
- windmill_api/api/kafka_trigger/set_kafka_trigger_enabled.py +113 -0
- windmill_api/api/kafka_trigger/update_kafka_trigger.py +113 -0
- windmill_api/models/add_granular_acls_kind.py +1 -0
- windmill_api/models/create_kafka_trigger_json_body.py +104 -0
- windmill_api/models/edit_kafka_trigger.py +94 -0
- windmill_api/models/get_granular_acls_kind.py +1 -0
- windmill_api/models/get_kafka_trigger_response_200.py +180 -0
- windmill_api/models/get_kafka_trigger_response_200_extra_perms.py +44 -0
- windmill_api/models/get_triggers_count_of_flow_response_200.py +8 -0
- windmill_api/models/get_triggers_count_of_script_response_200.py +8 -0
- windmill_api/models/get_used_triggers_response_200.py +7 -0
- windmill_api/models/kafka_trigger.py +180 -0
- windmill_api/models/kafka_trigger_extra_perms.py +44 -0
- windmill_api/models/list_kafka_triggers_response_200_item.py +182 -0
- windmill_api/models/list_kafka_triggers_response_200_item_extra_perms.py +44 -0
- windmill_api/models/new_kafka_trigger.py +104 -0
- windmill_api/models/remove_granular_acls_kind.py +1 -0
- windmill_api/models/set_kafka_trigger_enabled_json_body.py +58 -0
- windmill_api/models/triggers_count.py +8 -0
- windmill_api/models/update_kafka_trigger_json_body.py +94 -0
- {windmill_api-1.425.1.dist-info → windmill_api-1.427.0.dist-info}/METADATA +1 -1
- {windmill_api-1.425.1.dist-info → windmill_api-1.427.0.dist-info}/RECORD +30 -11
- {windmill_api-1.425.1.dist-info → windmill_api-1.427.0.dist-info}/LICENSE +0 -0
- {windmill_api-1.425.1.dist-info → windmill_api-1.427.0.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast
|
|
3
|
+
|
|
4
|
+
from attrs import define as _attrs_define
|
|
5
|
+
from attrs import field as _attrs_field
|
|
6
|
+
from dateutil.parser import isoparse
|
|
7
|
+
|
|
8
|
+
from ..types import UNSET, Unset
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from ..models.get_kafka_trigger_response_200_extra_perms import GetKafkaTriggerResponse200ExtraPerms
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
T = TypeVar("T", bound="GetKafkaTriggerResponse200")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@_attrs_define
|
|
18
|
+
class GetKafkaTriggerResponse200:
|
|
19
|
+
"""
|
|
20
|
+
Attributes:
|
|
21
|
+
path (str):
|
|
22
|
+
edited_by (str):
|
|
23
|
+
edited_at (datetime.datetime):
|
|
24
|
+
script_path (str):
|
|
25
|
+
kafka_resource_path (str):
|
|
26
|
+
group_id (str):
|
|
27
|
+
topics (List[str]):
|
|
28
|
+
is_flow (bool):
|
|
29
|
+
extra_perms (GetKafkaTriggerResponse200ExtraPerms):
|
|
30
|
+
email (str):
|
|
31
|
+
workspace_id (str):
|
|
32
|
+
enabled (bool):
|
|
33
|
+
server_id (Union[Unset, str]):
|
|
34
|
+
last_server_ping (Union[Unset, datetime.datetime]):
|
|
35
|
+
error (Union[Unset, str]):
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
path: str
|
|
39
|
+
edited_by: str
|
|
40
|
+
edited_at: datetime.datetime
|
|
41
|
+
script_path: str
|
|
42
|
+
kafka_resource_path: str
|
|
43
|
+
group_id: str
|
|
44
|
+
topics: List[str]
|
|
45
|
+
is_flow: bool
|
|
46
|
+
extra_perms: "GetKafkaTriggerResponse200ExtraPerms"
|
|
47
|
+
email: str
|
|
48
|
+
workspace_id: str
|
|
49
|
+
enabled: bool
|
|
50
|
+
server_id: Union[Unset, str] = UNSET
|
|
51
|
+
last_server_ping: Union[Unset, datetime.datetime] = UNSET
|
|
52
|
+
error: Union[Unset, str] = UNSET
|
|
53
|
+
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
54
|
+
|
|
55
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
56
|
+
path = self.path
|
|
57
|
+
edited_by = self.edited_by
|
|
58
|
+
edited_at = self.edited_at.isoformat()
|
|
59
|
+
|
|
60
|
+
script_path = self.script_path
|
|
61
|
+
kafka_resource_path = self.kafka_resource_path
|
|
62
|
+
group_id = self.group_id
|
|
63
|
+
topics = self.topics
|
|
64
|
+
|
|
65
|
+
is_flow = self.is_flow
|
|
66
|
+
extra_perms = self.extra_perms.to_dict()
|
|
67
|
+
|
|
68
|
+
email = self.email
|
|
69
|
+
workspace_id = self.workspace_id
|
|
70
|
+
enabled = self.enabled
|
|
71
|
+
server_id = self.server_id
|
|
72
|
+
last_server_ping: Union[Unset, str] = UNSET
|
|
73
|
+
if not isinstance(self.last_server_ping, Unset):
|
|
74
|
+
last_server_ping = self.last_server_ping.isoformat()
|
|
75
|
+
|
|
76
|
+
error = self.error
|
|
77
|
+
|
|
78
|
+
field_dict: Dict[str, Any] = {}
|
|
79
|
+
field_dict.update(self.additional_properties)
|
|
80
|
+
field_dict.update(
|
|
81
|
+
{
|
|
82
|
+
"path": path,
|
|
83
|
+
"edited_by": edited_by,
|
|
84
|
+
"edited_at": edited_at,
|
|
85
|
+
"script_path": script_path,
|
|
86
|
+
"kafka_resource_path": kafka_resource_path,
|
|
87
|
+
"group_id": group_id,
|
|
88
|
+
"topics": topics,
|
|
89
|
+
"is_flow": is_flow,
|
|
90
|
+
"extra_perms": extra_perms,
|
|
91
|
+
"email": email,
|
|
92
|
+
"workspace_id": workspace_id,
|
|
93
|
+
"enabled": enabled,
|
|
94
|
+
}
|
|
95
|
+
)
|
|
96
|
+
if server_id is not UNSET:
|
|
97
|
+
field_dict["server_id"] = server_id
|
|
98
|
+
if last_server_ping is not UNSET:
|
|
99
|
+
field_dict["last_server_ping"] = last_server_ping
|
|
100
|
+
if error is not UNSET:
|
|
101
|
+
field_dict["error"] = error
|
|
102
|
+
|
|
103
|
+
return field_dict
|
|
104
|
+
|
|
105
|
+
@classmethod
|
|
106
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
107
|
+
from ..models.get_kafka_trigger_response_200_extra_perms import GetKafkaTriggerResponse200ExtraPerms
|
|
108
|
+
|
|
109
|
+
d = src_dict.copy()
|
|
110
|
+
path = d.pop("path")
|
|
111
|
+
|
|
112
|
+
edited_by = d.pop("edited_by")
|
|
113
|
+
|
|
114
|
+
edited_at = isoparse(d.pop("edited_at"))
|
|
115
|
+
|
|
116
|
+
script_path = d.pop("script_path")
|
|
117
|
+
|
|
118
|
+
kafka_resource_path = d.pop("kafka_resource_path")
|
|
119
|
+
|
|
120
|
+
group_id = d.pop("group_id")
|
|
121
|
+
|
|
122
|
+
topics = cast(List[str], d.pop("topics"))
|
|
123
|
+
|
|
124
|
+
is_flow = d.pop("is_flow")
|
|
125
|
+
|
|
126
|
+
extra_perms = GetKafkaTriggerResponse200ExtraPerms.from_dict(d.pop("extra_perms"))
|
|
127
|
+
|
|
128
|
+
email = d.pop("email")
|
|
129
|
+
|
|
130
|
+
workspace_id = d.pop("workspace_id")
|
|
131
|
+
|
|
132
|
+
enabled = d.pop("enabled")
|
|
133
|
+
|
|
134
|
+
server_id = d.pop("server_id", UNSET)
|
|
135
|
+
|
|
136
|
+
_last_server_ping = d.pop("last_server_ping", UNSET)
|
|
137
|
+
last_server_ping: Union[Unset, datetime.datetime]
|
|
138
|
+
if isinstance(_last_server_ping, Unset):
|
|
139
|
+
last_server_ping = UNSET
|
|
140
|
+
else:
|
|
141
|
+
last_server_ping = isoparse(_last_server_ping)
|
|
142
|
+
|
|
143
|
+
error = d.pop("error", UNSET)
|
|
144
|
+
|
|
145
|
+
get_kafka_trigger_response_200 = cls(
|
|
146
|
+
path=path,
|
|
147
|
+
edited_by=edited_by,
|
|
148
|
+
edited_at=edited_at,
|
|
149
|
+
script_path=script_path,
|
|
150
|
+
kafka_resource_path=kafka_resource_path,
|
|
151
|
+
group_id=group_id,
|
|
152
|
+
topics=topics,
|
|
153
|
+
is_flow=is_flow,
|
|
154
|
+
extra_perms=extra_perms,
|
|
155
|
+
email=email,
|
|
156
|
+
workspace_id=workspace_id,
|
|
157
|
+
enabled=enabled,
|
|
158
|
+
server_id=server_id,
|
|
159
|
+
last_server_ping=last_server_ping,
|
|
160
|
+
error=error,
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
get_kafka_trigger_response_200.additional_properties = d
|
|
164
|
+
return get_kafka_trigger_response_200
|
|
165
|
+
|
|
166
|
+
@property
|
|
167
|
+
def additional_keys(self) -> List[str]:
|
|
168
|
+
return list(self.additional_properties.keys())
|
|
169
|
+
|
|
170
|
+
def __getitem__(self, key: str) -> Any:
|
|
171
|
+
return self.additional_properties[key]
|
|
172
|
+
|
|
173
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
174
|
+
self.additional_properties[key] = value
|
|
175
|
+
|
|
176
|
+
def __delitem__(self, key: str) -> None:
|
|
177
|
+
del self.additional_properties[key]
|
|
178
|
+
|
|
179
|
+
def __contains__(self, key: str) -> bool:
|
|
180
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Type, TypeVar
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="GetKafkaTriggerResponse200ExtraPerms")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class GetKafkaTriggerResponse200ExtraPerms:
|
|
11
|
+
""" """
|
|
12
|
+
|
|
13
|
+
additional_properties: Dict[str, bool] = _attrs_field(init=False, factory=dict)
|
|
14
|
+
|
|
15
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
16
|
+
field_dict: Dict[str, Any] = {}
|
|
17
|
+
field_dict.update(self.additional_properties)
|
|
18
|
+
field_dict.update({})
|
|
19
|
+
|
|
20
|
+
return field_dict
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
24
|
+
d = src_dict.copy()
|
|
25
|
+
get_kafka_trigger_response_200_extra_perms = cls()
|
|
26
|
+
|
|
27
|
+
get_kafka_trigger_response_200_extra_perms.additional_properties = d
|
|
28
|
+
return get_kafka_trigger_response_200_extra_perms
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def additional_keys(self) -> List[str]:
|
|
32
|
+
return list(self.additional_properties.keys())
|
|
33
|
+
|
|
34
|
+
def __getitem__(self, key: str) -> bool:
|
|
35
|
+
return self.additional_properties[key]
|
|
36
|
+
|
|
37
|
+
def __setitem__(self, key: str, value: bool) -> None:
|
|
38
|
+
self.additional_properties[key] = value
|
|
39
|
+
|
|
40
|
+
def __delitem__(self, key: str) -> None:
|
|
41
|
+
del self.additional_properties[key]
|
|
42
|
+
|
|
43
|
+
def __contains__(self, key: str) -> bool:
|
|
44
|
+
return key in self.additional_properties
|
|
@@ -24,6 +24,7 @@ class GetTriggersCountOfFlowResponse200:
|
|
|
24
24
|
webhook_count (Union[Unset, float]):
|
|
25
25
|
email_count (Union[Unset, float]):
|
|
26
26
|
websocket_count (Union[Unset, float]):
|
|
27
|
+
kafka_count (Union[Unset, float]):
|
|
27
28
|
"""
|
|
28
29
|
|
|
29
30
|
primary_schedule: Union[Unset, "GetTriggersCountOfFlowResponse200PrimarySchedule"] = UNSET
|
|
@@ -32,6 +33,7 @@ class GetTriggersCountOfFlowResponse200:
|
|
|
32
33
|
webhook_count: Union[Unset, float] = UNSET
|
|
33
34
|
email_count: Union[Unset, float] = UNSET
|
|
34
35
|
websocket_count: Union[Unset, float] = UNSET
|
|
36
|
+
kafka_count: Union[Unset, float] = UNSET
|
|
35
37
|
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
36
38
|
|
|
37
39
|
def to_dict(self) -> Dict[str, Any]:
|
|
@@ -44,6 +46,7 @@ class GetTriggersCountOfFlowResponse200:
|
|
|
44
46
|
webhook_count = self.webhook_count
|
|
45
47
|
email_count = self.email_count
|
|
46
48
|
websocket_count = self.websocket_count
|
|
49
|
+
kafka_count = self.kafka_count
|
|
47
50
|
|
|
48
51
|
field_dict: Dict[str, Any] = {}
|
|
49
52
|
field_dict.update(self.additional_properties)
|
|
@@ -60,6 +63,8 @@ class GetTriggersCountOfFlowResponse200:
|
|
|
60
63
|
field_dict["email_count"] = email_count
|
|
61
64
|
if websocket_count is not UNSET:
|
|
62
65
|
field_dict["websocket_count"] = websocket_count
|
|
66
|
+
if kafka_count is not UNSET:
|
|
67
|
+
field_dict["kafka_count"] = kafka_count
|
|
63
68
|
|
|
64
69
|
return field_dict
|
|
65
70
|
|
|
@@ -87,6 +92,8 @@ class GetTriggersCountOfFlowResponse200:
|
|
|
87
92
|
|
|
88
93
|
websocket_count = d.pop("websocket_count", UNSET)
|
|
89
94
|
|
|
95
|
+
kafka_count = d.pop("kafka_count", UNSET)
|
|
96
|
+
|
|
90
97
|
get_triggers_count_of_flow_response_200 = cls(
|
|
91
98
|
primary_schedule=primary_schedule,
|
|
92
99
|
schedule_count=schedule_count,
|
|
@@ -94,6 +101,7 @@ class GetTriggersCountOfFlowResponse200:
|
|
|
94
101
|
webhook_count=webhook_count,
|
|
95
102
|
email_count=email_count,
|
|
96
103
|
websocket_count=websocket_count,
|
|
104
|
+
kafka_count=kafka_count,
|
|
97
105
|
)
|
|
98
106
|
|
|
99
107
|
get_triggers_count_of_flow_response_200.additional_properties = d
|
|
@@ -24,6 +24,7 @@ class GetTriggersCountOfScriptResponse200:
|
|
|
24
24
|
webhook_count (Union[Unset, float]):
|
|
25
25
|
email_count (Union[Unset, float]):
|
|
26
26
|
websocket_count (Union[Unset, float]):
|
|
27
|
+
kafka_count (Union[Unset, float]):
|
|
27
28
|
"""
|
|
28
29
|
|
|
29
30
|
primary_schedule: Union[Unset, "GetTriggersCountOfScriptResponse200PrimarySchedule"] = UNSET
|
|
@@ -32,6 +33,7 @@ class GetTriggersCountOfScriptResponse200:
|
|
|
32
33
|
webhook_count: Union[Unset, float] = UNSET
|
|
33
34
|
email_count: Union[Unset, float] = UNSET
|
|
34
35
|
websocket_count: Union[Unset, float] = UNSET
|
|
36
|
+
kafka_count: Union[Unset, float] = UNSET
|
|
35
37
|
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
36
38
|
|
|
37
39
|
def to_dict(self) -> Dict[str, Any]:
|
|
@@ -44,6 +46,7 @@ class GetTriggersCountOfScriptResponse200:
|
|
|
44
46
|
webhook_count = self.webhook_count
|
|
45
47
|
email_count = self.email_count
|
|
46
48
|
websocket_count = self.websocket_count
|
|
49
|
+
kafka_count = self.kafka_count
|
|
47
50
|
|
|
48
51
|
field_dict: Dict[str, Any] = {}
|
|
49
52
|
field_dict.update(self.additional_properties)
|
|
@@ -60,6 +63,8 @@ class GetTriggersCountOfScriptResponse200:
|
|
|
60
63
|
field_dict["email_count"] = email_count
|
|
61
64
|
if websocket_count is not UNSET:
|
|
62
65
|
field_dict["websocket_count"] = websocket_count
|
|
66
|
+
if kafka_count is not UNSET:
|
|
67
|
+
field_dict["kafka_count"] = kafka_count
|
|
63
68
|
|
|
64
69
|
return field_dict
|
|
65
70
|
|
|
@@ -87,6 +92,8 @@ class GetTriggersCountOfScriptResponse200:
|
|
|
87
92
|
|
|
88
93
|
websocket_count = d.pop("websocket_count", UNSET)
|
|
89
94
|
|
|
95
|
+
kafka_count = d.pop("kafka_count", UNSET)
|
|
96
|
+
|
|
90
97
|
get_triggers_count_of_script_response_200 = cls(
|
|
91
98
|
primary_schedule=primary_schedule,
|
|
92
99
|
schedule_count=schedule_count,
|
|
@@ -94,6 +101,7 @@ class GetTriggersCountOfScriptResponse200:
|
|
|
94
101
|
webhook_count=webhook_count,
|
|
95
102
|
email_count=email_count,
|
|
96
103
|
websocket_count=websocket_count,
|
|
104
|
+
kafka_count=kafka_count,
|
|
97
105
|
)
|
|
98
106
|
|
|
99
107
|
get_triggers_count_of_script_response_200.additional_properties = d
|
|
@@ -12,15 +12,18 @@ class GetUsedTriggersResponse200:
|
|
|
12
12
|
Attributes:
|
|
13
13
|
http_routes_used (bool):
|
|
14
14
|
websocket_used (bool):
|
|
15
|
+
kafka_used (bool):
|
|
15
16
|
"""
|
|
16
17
|
|
|
17
18
|
http_routes_used: bool
|
|
18
19
|
websocket_used: bool
|
|
20
|
+
kafka_used: bool
|
|
19
21
|
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
20
22
|
|
|
21
23
|
def to_dict(self) -> Dict[str, Any]:
|
|
22
24
|
http_routes_used = self.http_routes_used
|
|
23
25
|
websocket_used = self.websocket_used
|
|
26
|
+
kafka_used = self.kafka_used
|
|
24
27
|
|
|
25
28
|
field_dict: Dict[str, Any] = {}
|
|
26
29
|
field_dict.update(self.additional_properties)
|
|
@@ -28,6 +31,7 @@ class GetUsedTriggersResponse200:
|
|
|
28
31
|
{
|
|
29
32
|
"http_routes_used": http_routes_used,
|
|
30
33
|
"websocket_used": websocket_used,
|
|
34
|
+
"kafka_used": kafka_used,
|
|
31
35
|
}
|
|
32
36
|
)
|
|
33
37
|
|
|
@@ -40,9 +44,12 @@ class GetUsedTriggersResponse200:
|
|
|
40
44
|
|
|
41
45
|
websocket_used = d.pop("websocket_used")
|
|
42
46
|
|
|
47
|
+
kafka_used = d.pop("kafka_used")
|
|
48
|
+
|
|
43
49
|
get_used_triggers_response_200 = cls(
|
|
44
50
|
http_routes_used=http_routes_used,
|
|
45
51
|
websocket_used=websocket_used,
|
|
52
|
+
kafka_used=kafka_used,
|
|
46
53
|
)
|
|
47
54
|
|
|
48
55
|
get_used_triggers_response_200.additional_properties = d
|
|
@@ -0,0 +1,180 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Type, TypeVar, Union, cast
|
|
3
|
+
|
|
4
|
+
from attrs import define as _attrs_define
|
|
5
|
+
from attrs import field as _attrs_field
|
|
6
|
+
from dateutil.parser import isoparse
|
|
7
|
+
|
|
8
|
+
from ..types import UNSET, Unset
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from ..models.kafka_trigger_extra_perms import KafkaTriggerExtraPerms
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
T = TypeVar("T", bound="KafkaTrigger")
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@_attrs_define
|
|
18
|
+
class KafkaTrigger:
|
|
19
|
+
"""
|
|
20
|
+
Attributes:
|
|
21
|
+
path (str):
|
|
22
|
+
edited_by (str):
|
|
23
|
+
edited_at (datetime.datetime):
|
|
24
|
+
script_path (str):
|
|
25
|
+
kafka_resource_path (str):
|
|
26
|
+
group_id (str):
|
|
27
|
+
topics (List[str]):
|
|
28
|
+
is_flow (bool):
|
|
29
|
+
extra_perms (KafkaTriggerExtraPerms):
|
|
30
|
+
email (str):
|
|
31
|
+
workspace_id (str):
|
|
32
|
+
enabled (bool):
|
|
33
|
+
server_id (Union[Unset, str]):
|
|
34
|
+
last_server_ping (Union[Unset, datetime.datetime]):
|
|
35
|
+
error (Union[Unset, str]):
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
path: str
|
|
39
|
+
edited_by: str
|
|
40
|
+
edited_at: datetime.datetime
|
|
41
|
+
script_path: str
|
|
42
|
+
kafka_resource_path: str
|
|
43
|
+
group_id: str
|
|
44
|
+
topics: List[str]
|
|
45
|
+
is_flow: bool
|
|
46
|
+
extra_perms: "KafkaTriggerExtraPerms"
|
|
47
|
+
email: str
|
|
48
|
+
workspace_id: str
|
|
49
|
+
enabled: bool
|
|
50
|
+
server_id: Union[Unset, str] = UNSET
|
|
51
|
+
last_server_ping: Union[Unset, datetime.datetime] = UNSET
|
|
52
|
+
error: Union[Unset, str] = UNSET
|
|
53
|
+
additional_properties: Dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
54
|
+
|
|
55
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
56
|
+
path = self.path
|
|
57
|
+
edited_by = self.edited_by
|
|
58
|
+
edited_at = self.edited_at.isoformat()
|
|
59
|
+
|
|
60
|
+
script_path = self.script_path
|
|
61
|
+
kafka_resource_path = self.kafka_resource_path
|
|
62
|
+
group_id = self.group_id
|
|
63
|
+
topics = self.topics
|
|
64
|
+
|
|
65
|
+
is_flow = self.is_flow
|
|
66
|
+
extra_perms = self.extra_perms.to_dict()
|
|
67
|
+
|
|
68
|
+
email = self.email
|
|
69
|
+
workspace_id = self.workspace_id
|
|
70
|
+
enabled = self.enabled
|
|
71
|
+
server_id = self.server_id
|
|
72
|
+
last_server_ping: Union[Unset, str] = UNSET
|
|
73
|
+
if not isinstance(self.last_server_ping, Unset):
|
|
74
|
+
last_server_ping = self.last_server_ping.isoformat()
|
|
75
|
+
|
|
76
|
+
error = self.error
|
|
77
|
+
|
|
78
|
+
field_dict: Dict[str, Any] = {}
|
|
79
|
+
field_dict.update(self.additional_properties)
|
|
80
|
+
field_dict.update(
|
|
81
|
+
{
|
|
82
|
+
"path": path,
|
|
83
|
+
"edited_by": edited_by,
|
|
84
|
+
"edited_at": edited_at,
|
|
85
|
+
"script_path": script_path,
|
|
86
|
+
"kafka_resource_path": kafka_resource_path,
|
|
87
|
+
"group_id": group_id,
|
|
88
|
+
"topics": topics,
|
|
89
|
+
"is_flow": is_flow,
|
|
90
|
+
"extra_perms": extra_perms,
|
|
91
|
+
"email": email,
|
|
92
|
+
"workspace_id": workspace_id,
|
|
93
|
+
"enabled": enabled,
|
|
94
|
+
}
|
|
95
|
+
)
|
|
96
|
+
if server_id is not UNSET:
|
|
97
|
+
field_dict["server_id"] = server_id
|
|
98
|
+
if last_server_ping is not UNSET:
|
|
99
|
+
field_dict["last_server_ping"] = last_server_ping
|
|
100
|
+
if error is not UNSET:
|
|
101
|
+
field_dict["error"] = error
|
|
102
|
+
|
|
103
|
+
return field_dict
|
|
104
|
+
|
|
105
|
+
@classmethod
|
|
106
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
107
|
+
from ..models.kafka_trigger_extra_perms import KafkaTriggerExtraPerms
|
|
108
|
+
|
|
109
|
+
d = src_dict.copy()
|
|
110
|
+
path = d.pop("path")
|
|
111
|
+
|
|
112
|
+
edited_by = d.pop("edited_by")
|
|
113
|
+
|
|
114
|
+
edited_at = isoparse(d.pop("edited_at"))
|
|
115
|
+
|
|
116
|
+
script_path = d.pop("script_path")
|
|
117
|
+
|
|
118
|
+
kafka_resource_path = d.pop("kafka_resource_path")
|
|
119
|
+
|
|
120
|
+
group_id = d.pop("group_id")
|
|
121
|
+
|
|
122
|
+
topics = cast(List[str], d.pop("topics"))
|
|
123
|
+
|
|
124
|
+
is_flow = d.pop("is_flow")
|
|
125
|
+
|
|
126
|
+
extra_perms = KafkaTriggerExtraPerms.from_dict(d.pop("extra_perms"))
|
|
127
|
+
|
|
128
|
+
email = d.pop("email")
|
|
129
|
+
|
|
130
|
+
workspace_id = d.pop("workspace_id")
|
|
131
|
+
|
|
132
|
+
enabled = d.pop("enabled")
|
|
133
|
+
|
|
134
|
+
server_id = d.pop("server_id", UNSET)
|
|
135
|
+
|
|
136
|
+
_last_server_ping = d.pop("last_server_ping", UNSET)
|
|
137
|
+
last_server_ping: Union[Unset, datetime.datetime]
|
|
138
|
+
if isinstance(_last_server_ping, Unset):
|
|
139
|
+
last_server_ping = UNSET
|
|
140
|
+
else:
|
|
141
|
+
last_server_ping = isoparse(_last_server_ping)
|
|
142
|
+
|
|
143
|
+
error = d.pop("error", UNSET)
|
|
144
|
+
|
|
145
|
+
kafka_trigger = cls(
|
|
146
|
+
path=path,
|
|
147
|
+
edited_by=edited_by,
|
|
148
|
+
edited_at=edited_at,
|
|
149
|
+
script_path=script_path,
|
|
150
|
+
kafka_resource_path=kafka_resource_path,
|
|
151
|
+
group_id=group_id,
|
|
152
|
+
topics=topics,
|
|
153
|
+
is_flow=is_flow,
|
|
154
|
+
extra_perms=extra_perms,
|
|
155
|
+
email=email,
|
|
156
|
+
workspace_id=workspace_id,
|
|
157
|
+
enabled=enabled,
|
|
158
|
+
server_id=server_id,
|
|
159
|
+
last_server_ping=last_server_ping,
|
|
160
|
+
error=error,
|
|
161
|
+
)
|
|
162
|
+
|
|
163
|
+
kafka_trigger.additional_properties = d
|
|
164
|
+
return kafka_trigger
|
|
165
|
+
|
|
166
|
+
@property
|
|
167
|
+
def additional_keys(self) -> List[str]:
|
|
168
|
+
return list(self.additional_properties.keys())
|
|
169
|
+
|
|
170
|
+
def __getitem__(self, key: str) -> Any:
|
|
171
|
+
return self.additional_properties[key]
|
|
172
|
+
|
|
173
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
174
|
+
self.additional_properties[key] = value
|
|
175
|
+
|
|
176
|
+
def __delitem__(self, key: str) -> None:
|
|
177
|
+
del self.additional_properties[key]
|
|
178
|
+
|
|
179
|
+
def __contains__(self, key: str) -> bool:
|
|
180
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from typing import Any, Dict, List, Type, TypeVar
|
|
2
|
+
|
|
3
|
+
from attrs import define as _attrs_define
|
|
4
|
+
from attrs import field as _attrs_field
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T", bound="KafkaTriggerExtraPerms")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@_attrs_define
|
|
10
|
+
class KafkaTriggerExtraPerms:
|
|
11
|
+
""" """
|
|
12
|
+
|
|
13
|
+
additional_properties: Dict[str, bool] = _attrs_field(init=False, factory=dict)
|
|
14
|
+
|
|
15
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
16
|
+
field_dict: Dict[str, Any] = {}
|
|
17
|
+
field_dict.update(self.additional_properties)
|
|
18
|
+
field_dict.update({})
|
|
19
|
+
|
|
20
|
+
return field_dict
|
|
21
|
+
|
|
22
|
+
@classmethod
|
|
23
|
+
def from_dict(cls: Type[T], src_dict: Dict[str, Any]) -> T:
|
|
24
|
+
d = src_dict.copy()
|
|
25
|
+
kafka_trigger_extra_perms = cls()
|
|
26
|
+
|
|
27
|
+
kafka_trigger_extra_perms.additional_properties = d
|
|
28
|
+
return kafka_trigger_extra_perms
|
|
29
|
+
|
|
30
|
+
@property
|
|
31
|
+
def additional_keys(self) -> List[str]:
|
|
32
|
+
return list(self.additional_properties.keys())
|
|
33
|
+
|
|
34
|
+
def __getitem__(self, key: str) -> bool:
|
|
35
|
+
return self.additional_properties[key]
|
|
36
|
+
|
|
37
|
+
def __setitem__(self, key: str, value: bool) -> None:
|
|
38
|
+
self.additional_properties[key] = value
|
|
39
|
+
|
|
40
|
+
def __delitem__(self, key: str) -> None:
|
|
41
|
+
del self.additional_properties[key]
|
|
42
|
+
|
|
43
|
+
def __contains__(self, key: str) -> bool:
|
|
44
|
+
return key in self.additional_properties
|