moru 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- moru/__init__.py +8 -0
- moru/api/__init__.py +4 -0
- moru/api/client/__init__.py +1 -1
- moru/api/client/api/sandboxes/delete_sandboxes_sandbox_id.py +4 -0
- moru/api/client/api/sandboxes/get_sandboxes.py +4 -0
- moru/api/client/api/sandboxes/get_sandboxes_metrics.py +5 -1
- moru/api/client/api/sandboxes/get_sandboxes_sandbox_id.py +4 -0
- moru/api/client/api/sandboxes/get_sandboxes_sandbox_id_logs.py +67 -23
- moru/api/client/api/sandboxes/get_sandboxes_sandbox_id_metrics.py +5 -0
- moru/api/client/api/sandboxes/get_v2_sandbox_runs.py +218 -0
- moru/api/client/api/sandboxes/get_v2_sandboxes.py +5 -2
- moru/api/client/api/sandboxes/post_sandboxes.py +4 -0
- moru/api/client/api/sandboxes/post_sandboxes_sandbox_id_connect.py +6 -0
- moru/api/client/api/sandboxes/post_sandboxes_sandbox_id_pause.py +5 -0
- moru/api/client/api/sandboxes/post_sandboxes_sandbox_id_refreshes.py +3 -0
- moru/api/client/api/sandboxes/post_sandboxes_sandbox_id_resume.py +5 -0
- moru/api/client/api/sandboxes/post_sandboxes_sandbox_id_timeout.py +4 -0
- moru/api/client/api/templates/delete_templates_template_id.py +3 -0
- moru/api/client/api/templates/get_templates.py +3 -0
- moru/api/client/api/templates/get_templates_template_id.py +3 -0
- moru/api/client/api/templates/get_templates_template_id_builds_build_id_logs.py +276 -0
- moru/api/client/api/templates/get_templates_template_id_builds_build_id_status.py +23 -4
- moru/api/client/api/templates/get_templates_template_id_files_hash.py +5 -0
- moru/api/client/api/templates/patch_templates_template_id.py +4 -0
- moru/api/client/api/templates/post_templates.py +4 -0
- moru/api/client/api/templates/post_templates_template_id.py +3 -0
- moru/api/client/api/templates/post_templates_template_id_builds_build_id.py +3 -0
- moru/api/client/api/templates/post_v2_templates.py +4 -0
- moru/api/client/api/templates/post_v3_templates.py +4 -0
- moru/api/client/api/templates/post_v_2_templates_template_id_builds_build_id.py +3 -0
- moru/api/client/models/__init__.py +30 -0
- moru/api/client/models/admin_sandbox_kill_result.py +67 -0
- moru/api/client/models/build_log_entry.py +1 -1
- moru/api/client/models/create_volume_request.py +59 -0
- moru/api/client/models/file_info.py +105 -0
- moru/api/client/models/file_info_type.py +9 -0
- moru/api/client/models/file_list_response.py +84 -0
- moru/api/client/models/logs_direction.py +9 -0
- moru/api/client/models/logs_source.py +9 -0
- moru/api/client/models/machine_info.py +83 -0
- moru/api/client/models/new_sandbox.py +19 -0
- moru/api/client/models/node.py +10 -0
- moru/api/client/models/node_detail.py +10 -0
- moru/api/client/models/sandbox_log_entry.py +9 -9
- moru/api/client/models/sandbox_log_event_type.py +11 -0
- moru/api/client/models/sandbox_run.py +130 -0
- moru/api/client/models/sandbox_run_end_reason.py +11 -0
- moru/api/client/models/sandbox_run_status.py +10 -0
- moru/api/client/models/template_build_logs_response.py +73 -0
- moru/api/client/models/upload_response.py +67 -0
- moru/api/client/models/volume.py +105 -0
- moru/sandbox/mcp.py +835 -6
- moru/sandbox_async/commands/command.py +5 -1
- moru/sandbox_async/filesystem/filesystem.py +5 -1
- moru/sandbox_async/main.py +21 -0
- moru/sandbox_async/sandbox_api.py +17 -11
- moru/sandbox_sync/filesystem/filesystem.py +5 -1
- moru/sandbox_sync/main.py +21 -0
- moru/sandbox_sync/sandbox_api.py +17 -11
- moru/volume/__init__.py +11 -0
- moru/volume/types.py +83 -0
- moru/volume/volume_api.py +330 -0
- moru/volume_async/__init__.py +5 -0
- moru/volume_async/main.py +327 -0
- moru/volume_async/volume_api.py +290 -0
- moru/volume_sync/__init__.py +5 -0
- moru/volume_sync/main.py +325 -0
- moru-0.2.0.dist-info/METADATA +122 -0
- {moru-0.1.0.dist-info → moru-0.2.0.dist-info}/RECORD +71 -46
- {moru-0.1.0.dist-info → moru-0.2.0.dist-info}/WHEEL +1 -1
- moru-0.1.0.dist-info/METADATA +0 -63
- {moru-0.1.0.dist-info/licenses → moru-0.2.0.dist-info}/LICENSE +0 -0
moru/api/client/models/node.py
CHANGED
|
@@ -7,6 +7,7 @@ from attrs import field as _attrs_field
|
|
|
7
7
|
from ..models.node_status import NodeStatus
|
|
8
8
|
|
|
9
9
|
if TYPE_CHECKING:
|
|
10
|
+
from ..models.machine_info import MachineInfo
|
|
10
11
|
from ..models.node_metrics import NodeMetrics
|
|
11
12
|
|
|
12
13
|
|
|
@@ -22,6 +23,7 @@ class Node:
|
|
|
22
23
|
create_fails (int): Number of sandbox create fails
|
|
23
24
|
create_successes (int): Number of sandbox create successes
|
|
24
25
|
id (str): Identifier of the node
|
|
26
|
+
machine_info (MachineInfo):
|
|
25
27
|
metrics (NodeMetrics): Node metrics
|
|
26
28
|
node_id (str): Identifier of the nomad node
|
|
27
29
|
sandbox_count (int): Number of sandboxes running on the node
|
|
@@ -36,6 +38,7 @@ class Node:
|
|
|
36
38
|
create_fails: int
|
|
37
39
|
create_successes: int
|
|
38
40
|
id: str
|
|
41
|
+
machine_info: "MachineInfo"
|
|
39
42
|
metrics: "NodeMetrics"
|
|
40
43
|
node_id: str
|
|
41
44
|
sandbox_count: int
|
|
@@ -56,6 +59,8 @@ class Node:
|
|
|
56
59
|
|
|
57
60
|
id = self.id
|
|
58
61
|
|
|
62
|
+
machine_info = self.machine_info.to_dict()
|
|
63
|
+
|
|
59
64
|
metrics = self.metrics.to_dict()
|
|
60
65
|
|
|
61
66
|
node_id = self.node_id
|
|
@@ -79,6 +84,7 @@ class Node:
|
|
|
79
84
|
"createFails": create_fails,
|
|
80
85
|
"createSuccesses": create_successes,
|
|
81
86
|
"id": id,
|
|
87
|
+
"machineInfo": machine_info,
|
|
82
88
|
"metrics": metrics,
|
|
83
89
|
"nodeID": node_id,
|
|
84
90
|
"sandboxCount": sandbox_count,
|
|
@@ -93,6 +99,7 @@ class Node:
|
|
|
93
99
|
|
|
94
100
|
@classmethod
|
|
95
101
|
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
102
|
+
from ..models.machine_info import MachineInfo
|
|
96
103
|
from ..models.node_metrics import NodeMetrics
|
|
97
104
|
|
|
98
105
|
d = dict(src_dict)
|
|
@@ -106,6 +113,8 @@ class Node:
|
|
|
106
113
|
|
|
107
114
|
id = d.pop("id")
|
|
108
115
|
|
|
116
|
+
machine_info = MachineInfo.from_dict(d.pop("machineInfo"))
|
|
117
|
+
|
|
109
118
|
metrics = NodeMetrics.from_dict(d.pop("metrics"))
|
|
110
119
|
|
|
111
120
|
node_id = d.pop("nodeID")
|
|
@@ -126,6 +135,7 @@ class Node:
|
|
|
126
135
|
create_fails=create_fails,
|
|
127
136
|
create_successes=create_successes,
|
|
128
137
|
id=id,
|
|
138
|
+
machine_info=machine_info,
|
|
129
139
|
metrics=metrics,
|
|
130
140
|
node_id=node_id,
|
|
131
141
|
sandbox_count=sandbox_count,
|
|
@@ -8,6 +8,7 @@ from ..models.node_status import NodeStatus
|
|
|
8
8
|
|
|
9
9
|
if TYPE_CHECKING:
|
|
10
10
|
from ..models.listed_sandbox import ListedSandbox
|
|
11
|
+
from ..models.machine_info import MachineInfo
|
|
11
12
|
from ..models.node_metrics import NodeMetrics
|
|
12
13
|
|
|
13
14
|
|
|
@@ -24,6 +25,7 @@ class NodeDetail:
|
|
|
24
25
|
create_fails (int): Number of sandbox create fails
|
|
25
26
|
create_successes (int): Number of sandbox create successes
|
|
26
27
|
id (str): Identifier of the node
|
|
28
|
+
machine_info (MachineInfo):
|
|
27
29
|
metrics (NodeMetrics): Node metrics
|
|
28
30
|
node_id (str): Identifier of the nomad node
|
|
29
31
|
sandboxes (list['ListedSandbox']): List of sandboxes running on the node
|
|
@@ -38,6 +40,7 @@ class NodeDetail:
|
|
|
38
40
|
create_fails: int
|
|
39
41
|
create_successes: int
|
|
40
42
|
id: str
|
|
43
|
+
machine_info: "MachineInfo"
|
|
41
44
|
metrics: "NodeMetrics"
|
|
42
45
|
node_id: str
|
|
43
46
|
sandboxes: list["ListedSandbox"]
|
|
@@ -59,6 +62,8 @@ class NodeDetail:
|
|
|
59
62
|
|
|
60
63
|
id = self.id
|
|
61
64
|
|
|
65
|
+
machine_info = self.machine_info.to_dict()
|
|
66
|
+
|
|
62
67
|
metrics = self.metrics.to_dict()
|
|
63
68
|
|
|
64
69
|
node_id = self.node_id
|
|
@@ -84,6 +89,7 @@ class NodeDetail:
|
|
|
84
89
|
"createFails": create_fails,
|
|
85
90
|
"createSuccesses": create_successes,
|
|
86
91
|
"id": id,
|
|
92
|
+
"machineInfo": machine_info,
|
|
87
93
|
"metrics": metrics,
|
|
88
94
|
"nodeID": node_id,
|
|
89
95
|
"sandboxes": sandboxes,
|
|
@@ -98,6 +104,7 @@ class NodeDetail:
|
|
|
98
104
|
@classmethod
|
|
99
105
|
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
100
106
|
from ..models.listed_sandbox import ListedSandbox
|
|
107
|
+
from ..models.machine_info import MachineInfo
|
|
101
108
|
from ..models.node_metrics import NodeMetrics
|
|
102
109
|
|
|
103
110
|
d = dict(src_dict)
|
|
@@ -113,6 +120,8 @@ class NodeDetail:
|
|
|
113
120
|
|
|
114
121
|
id = d.pop("id")
|
|
115
122
|
|
|
123
|
+
machine_info = MachineInfo.from_dict(d.pop("machineInfo"))
|
|
124
|
+
|
|
116
125
|
metrics = NodeMetrics.from_dict(d.pop("metrics"))
|
|
117
126
|
|
|
118
127
|
node_id = d.pop("nodeID")
|
|
@@ -137,6 +146,7 @@ class NodeDetail:
|
|
|
137
146
|
create_fails=create_fails,
|
|
138
147
|
create_successes=create_successes,
|
|
139
148
|
id=id,
|
|
149
|
+
machine_info=machine_info,
|
|
140
150
|
metrics=metrics,
|
|
141
151
|
node_id=node_id,
|
|
142
152
|
sandboxes=sandboxes,
|
|
@@ -6,7 +6,7 @@ from attrs import define as _attrs_define
|
|
|
6
6
|
from attrs import field as _attrs_field
|
|
7
7
|
from dateutil.parser import isoparse
|
|
8
8
|
|
|
9
|
-
from ..models.
|
|
9
|
+
from ..models.sandbox_log_event_type import SandboxLogEventType
|
|
10
10
|
|
|
11
11
|
if TYPE_CHECKING:
|
|
12
12
|
from ..models.sandbox_log_entry_fields import SandboxLogEntryFields
|
|
@@ -19,22 +19,22 @@ T = TypeVar("T", bound="SandboxLogEntry")
|
|
|
19
19
|
class SandboxLogEntry:
|
|
20
20
|
"""
|
|
21
21
|
Attributes:
|
|
22
|
+
event_type (SandboxLogEventType): Type of sandbox log event
|
|
22
23
|
fields (SandboxLogEntryFields):
|
|
23
|
-
level (LogLevel): State of the sandbox
|
|
24
24
|
message (str): Log message content
|
|
25
25
|
timestamp (datetime.datetime): Timestamp of the log entry
|
|
26
26
|
"""
|
|
27
27
|
|
|
28
|
+
event_type: SandboxLogEventType
|
|
28
29
|
fields: "SandboxLogEntryFields"
|
|
29
|
-
level: LogLevel
|
|
30
30
|
message: str
|
|
31
31
|
timestamp: datetime.datetime
|
|
32
32
|
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
33
33
|
|
|
34
34
|
def to_dict(self) -> dict[str, Any]:
|
|
35
|
-
|
|
35
|
+
event_type = self.event_type.value
|
|
36
36
|
|
|
37
|
-
|
|
37
|
+
fields = self.fields.to_dict()
|
|
38
38
|
|
|
39
39
|
message = self.message
|
|
40
40
|
|
|
@@ -44,8 +44,8 @@ class SandboxLogEntry:
|
|
|
44
44
|
field_dict.update(self.additional_properties)
|
|
45
45
|
field_dict.update(
|
|
46
46
|
{
|
|
47
|
+
"eventType": event_type,
|
|
47
48
|
"fields": fields,
|
|
48
|
-
"level": level,
|
|
49
49
|
"message": message,
|
|
50
50
|
"timestamp": timestamp,
|
|
51
51
|
}
|
|
@@ -58,17 +58,17 @@ class SandboxLogEntry:
|
|
|
58
58
|
from ..models.sandbox_log_entry_fields import SandboxLogEntryFields
|
|
59
59
|
|
|
60
60
|
d = dict(src_dict)
|
|
61
|
-
|
|
61
|
+
event_type = SandboxLogEventType(d.pop("eventType"))
|
|
62
62
|
|
|
63
|
-
|
|
63
|
+
fields = SandboxLogEntryFields.from_dict(d.pop("fields"))
|
|
64
64
|
|
|
65
65
|
message = d.pop("message")
|
|
66
66
|
|
|
67
67
|
timestamp = isoparse(d.pop("timestamp"))
|
|
68
68
|
|
|
69
69
|
sandbox_log_entry = cls(
|
|
70
|
+
event_type=event_type,
|
|
70
71
|
fields=fields,
|
|
71
|
-
level=level,
|
|
72
72
|
message=message,
|
|
73
73
|
timestamp=timestamp,
|
|
74
74
|
)
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from collections.abc import Mapping
|
|
3
|
+
from typing import Any, TypeVar, Union
|
|
4
|
+
|
|
5
|
+
from attrs import define as _attrs_define
|
|
6
|
+
from attrs import field as _attrs_field
|
|
7
|
+
from dateutil.parser import isoparse
|
|
8
|
+
|
|
9
|
+
from ..models.sandbox_run_end_reason import SandboxRunEndReason
|
|
10
|
+
from ..models.sandbox_run_status import SandboxRunStatus
|
|
11
|
+
from ..types import UNSET, Unset
|
|
12
|
+
|
|
13
|
+
T = TypeVar("T", bound="SandboxRun")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@_attrs_define
|
|
17
|
+
class SandboxRun:
|
|
18
|
+
"""
|
|
19
|
+
Attributes:
|
|
20
|
+
created_at (datetime.datetime): When the sandbox was created
|
|
21
|
+
sandbox_id (str): Unique sandbox identifier
|
|
22
|
+
status (SandboxRunStatus): Status of a sandbox run
|
|
23
|
+
template_id (str): Template used to create the sandbox
|
|
24
|
+
alias (Union[Unset, str]): Template alias
|
|
25
|
+
end_reason (Union[Unset, SandboxRunEndReason]): Reason the sandbox stopped
|
|
26
|
+
ended_at (Union[Unset, datetime.datetime]): When the sandbox stopped
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
created_at: datetime.datetime
|
|
30
|
+
sandbox_id: str
|
|
31
|
+
status: SandboxRunStatus
|
|
32
|
+
template_id: str
|
|
33
|
+
alias: Union[Unset, str] = UNSET
|
|
34
|
+
end_reason: Union[Unset, SandboxRunEndReason] = UNSET
|
|
35
|
+
ended_at: Union[Unset, datetime.datetime] = UNSET
|
|
36
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
37
|
+
|
|
38
|
+
def to_dict(self) -> dict[str, Any]:
|
|
39
|
+
created_at = self.created_at.isoformat()
|
|
40
|
+
|
|
41
|
+
sandbox_id = self.sandbox_id
|
|
42
|
+
|
|
43
|
+
status = self.status.value
|
|
44
|
+
|
|
45
|
+
template_id = self.template_id
|
|
46
|
+
|
|
47
|
+
alias = self.alias
|
|
48
|
+
|
|
49
|
+
end_reason: Union[Unset, str] = UNSET
|
|
50
|
+
if not isinstance(self.end_reason, Unset):
|
|
51
|
+
end_reason = self.end_reason.value
|
|
52
|
+
|
|
53
|
+
ended_at: Union[Unset, str] = UNSET
|
|
54
|
+
if not isinstance(self.ended_at, Unset):
|
|
55
|
+
ended_at = self.ended_at.isoformat()
|
|
56
|
+
|
|
57
|
+
field_dict: dict[str, Any] = {}
|
|
58
|
+
field_dict.update(self.additional_properties)
|
|
59
|
+
field_dict.update(
|
|
60
|
+
{
|
|
61
|
+
"createdAt": created_at,
|
|
62
|
+
"sandboxID": sandbox_id,
|
|
63
|
+
"status": status,
|
|
64
|
+
"templateID": template_id,
|
|
65
|
+
}
|
|
66
|
+
)
|
|
67
|
+
if alias is not UNSET:
|
|
68
|
+
field_dict["alias"] = alias
|
|
69
|
+
if end_reason is not UNSET:
|
|
70
|
+
field_dict["endReason"] = end_reason
|
|
71
|
+
if ended_at is not UNSET:
|
|
72
|
+
field_dict["endedAt"] = ended_at
|
|
73
|
+
|
|
74
|
+
return field_dict
|
|
75
|
+
|
|
76
|
+
@classmethod
|
|
77
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
78
|
+
d = dict(src_dict)
|
|
79
|
+
created_at = isoparse(d.pop("createdAt"))
|
|
80
|
+
|
|
81
|
+
sandbox_id = d.pop("sandboxID")
|
|
82
|
+
|
|
83
|
+
status = SandboxRunStatus(d.pop("status"))
|
|
84
|
+
|
|
85
|
+
template_id = d.pop("templateID")
|
|
86
|
+
|
|
87
|
+
alias = d.pop("alias", UNSET)
|
|
88
|
+
|
|
89
|
+
_end_reason = d.pop("endReason", UNSET)
|
|
90
|
+
end_reason: Union[Unset, SandboxRunEndReason]
|
|
91
|
+
if isinstance(_end_reason, Unset):
|
|
92
|
+
end_reason = UNSET
|
|
93
|
+
else:
|
|
94
|
+
end_reason = SandboxRunEndReason(_end_reason)
|
|
95
|
+
|
|
96
|
+
_ended_at = d.pop("endedAt", UNSET)
|
|
97
|
+
ended_at: Union[Unset, datetime.datetime]
|
|
98
|
+
if isinstance(_ended_at, Unset):
|
|
99
|
+
ended_at = UNSET
|
|
100
|
+
else:
|
|
101
|
+
ended_at = isoparse(_ended_at)
|
|
102
|
+
|
|
103
|
+
sandbox_run = cls(
|
|
104
|
+
created_at=created_at,
|
|
105
|
+
sandbox_id=sandbox_id,
|
|
106
|
+
status=status,
|
|
107
|
+
template_id=template_id,
|
|
108
|
+
alias=alias,
|
|
109
|
+
end_reason=end_reason,
|
|
110
|
+
ended_at=ended_at,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
sandbox_run.additional_properties = d
|
|
114
|
+
return sandbox_run
|
|
115
|
+
|
|
116
|
+
@property
|
|
117
|
+
def additional_keys(self) -> list[str]:
|
|
118
|
+
return list(self.additional_properties.keys())
|
|
119
|
+
|
|
120
|
+
def __getitem__(self, key: str) -> Any:
|
|
121
|
+
return self.additional_properties[key]
|
|
122
|
+
|
|
123
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
124
|
+
self.additional_properties[key] = value
|
|
125
|
+
|
|
126
|
+
def __delitem__(self, key: str) -> None:
|
|
127
|
+
del self.additional_properties[key]
|
|
128
|
+
|
|
129
|
+
def __contains__(self, key: str) -> bool:
|
|
130
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
from collections.abc import Mapping
|
|
2
|
+
from typing import TYPE_CHECKING, Any, TypeVar
|
|
3
|
+
|
|
4
|
+
from attrs import define as _attrs_define
|
|
5
|
+
from attrs import field as _attrs_field
|
|
6
|
+
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from ..models.build_log_entry import BuildLogEntry
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
T = TypeVar("T", bound="TemplateBuildLogsResponse")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@_attrs_define
|
|
15
|
+
class TemplateBuildLogsResponse:
|
|
16
|
+
"""
|
|
17
|
+
Attributes:
|
|
18
|
+
logs (list['BuildLogEntry']): Build logs structured
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
logs: list["BuildLogEntry"]
|
|
22
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
23
|
+
|
|
24
|
+
def to_dict(self) -> dict[str, Any]:
|
|
25
|
+
logs = []
|
|
26
|
+
for logs_item_data in self.logs:
|
|
27
|
+
logs_item = logs_item_data.to_dict()
|
|
28
|
+
logs.append(logs_item)
|
|
29
|
+
|
|
30
|
+
field_dict: dict[str, Any] = {}
|
|
31
|
+
field_dict.update(self.additional_properties)
|
|
32
|
+
field_dict.update(
|
|
33
|
+
{
|
|
34
|
+
"logs": logs,
|
|
35
|
+
}
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
return field_dict
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
42
|
+
from ..models.build_log_entry import BuildLogEntry
|
|
43
|
+
|
|
44
|
+
d = dict(src_dict)
|
|
45
|
+
logs = []
|
|
46
|
+
_logs = d.pop("logs")
|
|
47
|
+
for logs_item_data in _logs:
|
|
48
|
+
logs_item = BuildLogEntry.from_dict(logs_item_data)
|
|
49
|
+
|
|
50
|
+
logs.append(logs_item)
|
|
51
|
+
|
|
52
|
+
template_build_logs_response = cls(
|
|
53
|
+
logs=logs,
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
template_build_logs_response.additional_properties = d
|
|
57
|
+
return template_build_logs_response
|
|
58
|
+
|
|
59
|
+
@property
|
|
60
|
+
def additional_keys(self) -> list[str]:
|
|
61
|
+
return list(self.additional_properties.keys())
|
|
62
|
+
|
|
63
|
+
def __getitem__(self, key: str) -> Any:
|
|
64
|
+
return self.additional_properties[key]
|
|
65
|
+
|
|
66
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
67
|
+
self.additional_properties[key] = value
|
|
68
|
+
|
|
69
|
+
def __delitem__(self, key: str) -> None:
|
|
70
|
+
del self.additional_properties[key]
|
|
71
|
+
|
|
72
|
+
def __contains__(self, key: str) -> bool:
|
|
73
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
from collections.abc import Mapping
|
|
2
|
+
from typing import Any, TypeVar
|
|
3
|
+
|
|
4
|
+
from attrs import define as _attrs_define
|
|
5
|
+
from attrs import field as _attrs_field
|
|
6
|
+
|
|
7
|
+
T = TypeVar("T", bound="UploadResponse")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@_attrs_define
|
|
11
|
+
class UploadResponse:
|
|
12
|
+
"""
|
|
13
|
+
Attributes:
|
|
14
|
+
path (str): Path of uploaded file
|
|
15
|
+
size (int): Size of uploaded file in bytes
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
path: str
|
|
19
|
+
size: int
|
|
20
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
21
|
+
|
|
22
|
+
def to_dict(self) -> dict[str, Any]:
|
|
23
|
+
path = self.path
|
|
24
|
+
|
|
25
|
+
size = self.size
|
|
26
|
+
|
|
27
|
+
field_dict: dict[str, Any] = {}
|
|
28
|
+
field_dict.update(self.additional_properties)
|
|
29
|
+
field_dict.update(
|
|
30
|
+
{
|
|
31
|
+
"path": path,
|
|
32
|
+
"size": size,
|
|
33
|
+
}
|
|
34
|
+
)
|
|
35
|
+
|
|
36
|
+
return field_dict
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
40
|
+
d = dict(src_dict)
|
|
41
|
+
path = d.pop("path")
|
|
42
|
+
|
|
43
|
+
size = d.pop("size")
|
|
44
|
+
|
|
45
|
+
upload_response = cls(
|
|
46
|
+
path=path,
|
|
47
|
+
size=size,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
upload_response.additional_properties = d
|
|
51
|
+
return upload_response
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def additional_keys(self) -> list[str]:
|
|
55
|
+
return list(self.additional_properties.keys())
|
|
56
|
+
|
|
57
|
+
def __getitem__(self, key: str) -> Any:
|
|
58
|
+
return self.additional_properties[key]
|
|
59
|
+
|
|
60
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
61
|
+
self.additional_properties[key] = value
|
|
62
|
+
|
|
63
|
+
def __delitem__(self, key: str) -> None:
|
|
64
|
+
del self.additional_properties[key]
|
|
65
|
+
|
|
66
|
+
def __contains__(self, key: str) -> bool:
|
|
67
|
+
return key in self.additional_properties
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
from collections.abc import Mapping
|
|
3
|
+
from typing import Any, TypeVar, Union
|
|
4
|
+
|
|
5
|
+
from attrs import define as _attrs_define
|
|
6
|
+
from attrs import field as _attrs_field
|
|
7
|
+
from dateutil.parser import isoparse
|
|
8
|
+
|
|
9
|
+
from ..types import UNSET, Unset
|
|
10
|
+
|
|
11
|
+
T = TypeVar("T", bound="Volume")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@_attrs_define
|
|
15
|
+
class Volume:
|
|
16
|
+
"""
|
|
17
|
+
Attributes:
|
|
18
|
+
created_at (datetime.datetime): When the volume was created
|
|
19
|
+
name (str): Volume name
|
|
20
|
+
updated_at (datetime.datetime): When the volume was last updated
|
|
21
|
+
volume_id (str): Unique volume identifier
|
|
22
|
+
total_file_count (Union[Unset, int]): Total number of files in volume
|
|
23
|
+
total_size_bytes (Union[Unset, int]): Total size of files in volume (bytes)
|
|
24
|
+
"""
|
|
25
|
+
|
|
26
|
+
created_at: datetime.datetime
|
|
27
|
+
name: str
|
|
28
|
+
updated_at: datetime.datetime
|
|
29
|
+
volume_id: str
|
|
30
|
+
total_file_count: Union[Unset, int] = UNSET
|
|
31
|
+
total_size_bytes: Union[Unset, int] = UNSET
|
|
32
|
+
additional_properties: dict[str, Any] = _attrs_field(init=False, factory=dict)
|
|
33
|
+
|
|
34
|
+
def to_dict(self) -> dict[str, Any]:
|
|
35
|
+
created_at = self.created_at.isoformat()
|
|
36
|
+
|
|
37
|
+
name = self.name
|
|
38
|
+
|
|
39
|
+
updated_at = self.updated_at.isoformat()
|
|
40
|
+
|
|
41
|
+
volume_id = self.volume_id
|
|
42
|
+
|
|
43
|
+
total_file_count = self.total_file_count
|
|
44
|
+
|
|
45
|
+
total_size_bytes = self.total_size_bytes
|
|
46
|
+
|
|
47
|
+
field_dict: dict[str, Any] = {}
|
|
48
|
+
field_dict.update(self.additional_properties)
|
|
49
|
+
field_dict.update(
|
|
50
|
+
{
|
|
51
|
+
"createdAt": created_at,
|
|
52
|
+
"name": name,
|
|
53
|
+
"updatedAt": updated_at,
|
|
54
|
+
"volumeID": volume_id,
|
|
55
|
+
}
|
|
56
|
+
)
|
|
57
|
+
if total_file_count is not UNSET:
|
|
58
|
+
field_dict["totalFileCount"] = total_file_count
|
|
59
|
+
if total_size_bytes is not UNSET:
|
|
60
|
+
field_dict["totalSizeBytes"] = total_size_bytes
|
|
61
|
+
|
|
62
|
+
return field_dict
|
|
63
|
+
|
|
64
|
+
@classmethod
|
|
65
|
+
def from_dict(cls: type[T], src_dict: Mapping[str, Any]) -> T:
|
|
66
|
+
d = dict(src_dict)
|
|
67
|
+
created_at = isoparse(d.pop("createdAt"))
|
|
68
|
+
|
|
69
|
+
name = d.pop("name")
|
|
70
|
+
|
|
71
|
+
updated_at = isoparse(d.pop("updatedAt"))
|
|
72
|
+
|
|
73
|
+
volume_id = d.pop("volumeID")
|
|
74
|
+
|
|
75
|
+
total_file_count = d.pop("totalFileCount", UNSET)
|
|
76
|
+
|
|
77
|
+
total_size_bytes = d.pop("totalSizeBytes", UNSET)
|
|
78
|
+
|
|
79
|
+
volume = cls(
|
|
80
|
+
created_at=created_at,
|
|
81
|
+
name=name,
|
|
82
|
+
updated_at=updated_at,
|
|
83
|
+
volume_id=volume_id,
|
|
84
|
+
total_file_count=total_file_count,
|
|
85
|
+
total_size_bytes=total_size_bytes,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
volume.additional_properties = d
|
|
89
|
+
return volume
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def additional_keys(self) -> list[str]:
|
|
93
|
+
return list(self.additional_properties.keys())
|
|
94
|
+
|
|
95
|
+
def __getitem__(self, key: str) -> Any:
|
|
96
|
+
return self.additional_properties[key]
|
|
97
|
+
|
|
98
|
+
def __setitem__(self, key: str, value: Any) -> None:
|
|
99
|
+
self.additional_properties[key] = value
|
|
100
|
+
|
|
101
|
+
def __delitem__(self, key: str) -> None:
|
|
102
|
+
del self.additional_properties[key]
|
|
103
|
+
|
|
104
|
+
def __contains__(self, key: str) -> bool:
|
|
105
|
+
return key in self.additional_properties
|