meilisearch-python-sdk 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of meilisearch-python-sdk might be problematic. Click here for more details.

@@ -0,0 +1,358 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import time
5
+ from datetime import datetime
6
+ from typing import TYPE_CHECKING
7
+ from urllib.parse import urlencode
8
+
9
+ from httpx import AsyncClient as HttpxAsyncClient
10
+ from httpx import Client as HttpxClient
11
+
12
+ from meilisearch_python_sdk._http_requests import AsyncHttpRequests, HttpRequests
13
+ from meilisearch_python_sdk.errors import MeilisearchTaskFailedError, MeilisearchTimeoutError
14
+ from meilisearch_python_sdk.models.task import TaskInfo, TaskResult, TaskStatus
15
+
16
+ if TYPE_CHECKING:
17
+ from meilisearch_python_sdk._client import AsyncClient, Client # pragma: no cover
18
+
19
+
20
+ async def async_cancel_tasks(
21
+ client: HttpxAsyncClient | AsyncClient,
22
+ *,
23
+ uids: list[str] | None = None,
24
+ index_uids: list[str] | None = None,
25
+ statuses: list[str] | None = None,
26
+ types: list[str] | None = None,
27
+ before_enqueued_at: datetime | None = None,
28
+ after_enqueued_at: datetime | None = None,
29
+ before_started_at: datetime | None = None,
30
+ after_finished_at: datetime | None = None,
31
+ ) -> TaskInfo:
32
+ """Cancel a list of enqueued or processing tasks.
33
+
34
+ Defaults to cancelling all tasks.
35
+
36
+ Args:
37
+
38
+ client: An httpx HttpxAsyncClient or meilisearch_python_sdk AsyncClient instance.
39
+ uids: A list of task UIDs to cancel.
40
+ index_uids: A list of index UIDs for which to cancel tasks.
41
+ statuses: A list of statuses to cancel.
42
+ types: A list of types to cancel.
43
+ before_enqueued_at: Cancel tasks that were enqueued before the specified date time.
44
+ after_enqueued_at: Cancel tasks that were enqueued after the specified date time.
45
+ before_started_at: Cancel tasks that were started before the specified date time.
46
+ after_finished_at: Cancel tasks that were finished after the specified date time.
47
+
48
+ Returns:
49
+
50
+ The details of the task
51
+
52
+ Raises:
53
+
54
+ MeilisearchCommunicationError: If there was an error communicating with the server.
55
+ MeilisearchApiError: If the Meilisearch API returned an error.
56
+ MeilisearchTimeoutError: If the connection times out.
57
+
58
+ Examples:
59
+
60
+ >>> from meilisearch_python_sdk import AsyncClient
61
+ >>> from meilisearch_python_sdk.task import cancel_tasks
62
+ >>>
63
+ >>> async with AsyncClient("http://localhost.com", "masterKey") as client:
64
+ >>> await cancel_tasks(client, uids=[1, 2])
65
+ """
66
+ parameters = _process_params(
67
+ uids,
68
+ index_uids,
69
+ statuses,
70
+ types,
71
+ before_enqueued_at,
72
+ after_enqueued_at,
73
+ before_started_at,
74
+ after_finished_at,
75
+ )
76
+
77
+ if not parameters:
78
+ # Cancel all tasks if no parmaeters provided
79
+ parameters["statuses"] = "enqueued,processing"
80
+
81
+ url = f"tasks/cancel?{urlencode(parameters)}"
82
+ client_ = _get_async_client(client)
83
+ response = await client_.post(url)
84
+
85
+ return TaskInfo(**response.json())
86
+
87
+
88
+ async def async_delete_tasks(
89
+ client: HttpxAsyncClient | AsyncClient,
90
+ *,
91
+ uids: list[str] | None = None,
92
+ index_uids: list[str] | None = None,
93
+ statuses: list[str] | None = None,
94
+ types: list[str] | None = None,
95
+ before_enqueued_at: datetime | None = None,
96
+ after_enqueued_at: datetime | None = None,
97
+ before_started_at: datetime | None = None,
98
+ after_finished_at: datetime | None = None,
99
+ ) -> TaskInfo:
100
+ parameters = _process_params(
101
+ uids,
102
+ index_uids,
103
+ statuses,
104
+ types,
105
+ before_enqueued_at,
106
+ after_enqueued_at,
107
+ before_started_at,
108
+ after_finished_at,
109
+ )
110
+
111
+ if not parameters:
112
+ # delete all tasks if no parmaeters provided
113
+ parameters["statuses"] = "canceled,enqueued,failed,processing,succeeded"
114
+
115
+ url = f"tasks?{urlencode(parameters)}"
116
+ client_ = _get_async_client(client)
117
+ response = await client_.delete(url)
118
+
119
+ return TaskInfo(**response.json())
120
+
121
+
122
+ async def async_get_task(client: HttpxAsyncClient | AsyncClient, task_id: int) -> TaskResult:
123
+ client_ = _get_async_client(client)
124
+ response = await client_.get(f"tasks/{task_id}")
125
+
126
+ return TaskResult(**response.json())
127
+
128
+
129
+ async def async_get_tasks(
130
+ client: HttpxAsyncClient | AsyncClient,
131
+ *,
132
+ index_ids: list[str] | None = None,
133
+ types: str | list[str] | None = None,
134
+ ) -> TaskStatus:
135
+ url = f"tasks?indexUids={','.join(index_ids)}" if index_ids else "tasks"
136
+ if types:
137
+ formatted_types = ",".join(types) if isinstance(types, list) else types
138
+ url = f"{url}&types={formatted_types}" if "?" in url else f"{url}?types={formatted_types}"
139
+ client_ = _get_async_client(client)
140
+ response = await client_.get(url)
141
+
142
+ return TaskStatus(**response.json())
143
+
144
+
145
+ async def async_wait_for_task(
146
+ client: HttpxAsyncClient | AsyncClient,
147
+ task_id: int,
148
+ *,
149
+ timeout_in_ms: int | None = 5000,
150
+ interval_in_ms: int = 50,
151
+ raise_for_status: bool = False,
152
+ ) -> TaskResult:
153
+ client_ = _get_async_client(client)
154
+ url = f"tasks/{task_id}"
155
+ http_requests = AsyncHttpRequests(client_)
156
+ start_time = datetime.now()
157
+ elapsed_time = 0.0
158
+
159
+ if timeout_in_ms:
160
+ while elapsed_time < timeout_in_ms:
161
+ response = await http_requests.get(url)
162
+ status = TaskResult(**response.json())
163
+ if status.status in ("succeeded", "failed"):
164
+ if raise_for_status and status.status == "failed":
165
+ raise MeilisearchTaskFailedError(f"Task {task_id} failed")
166
+ return status
167
+ await asyncio.sleep(interval_in_ms / 1000)
168
+ time_delta = datetime.now() - start_time
169
+ elapsed_time = time_delta.seconds * 1000 + time_delta.microseconds / 1000
170
+ raise MeilisearchTimeoutError(
171
+ f"timeout of {timeout_in_ms}ms has exceeded on process {task_id} when waiting for pending update to resolve."
172
+ )
173
+ else:
174
+ while True:
175
+ response = await http_requests.get(url)
176
+ status = TaskResult(**response.json())
177
+ if status.status in ("succeeded", "failed"):
178
+ if raise_for_status and status.status == "failed":
179
+ raise MeilisearchTaskFailedError(f"Task {task_id} failed")
180
+ return status
181
+ await asyncio.sleep(interval_in_ms / 1000)
182
+
183
+
184
+ def cancel_tasks(
185
+ client: HttpxClient | Client,
186
+ *,
187
+ uids: list[str] | None = None,
188
+ index_uids: list[str] | None = None,
189
+ statuses: list[str] | None = None,
190
+ types: list[str] | None = None,
191
+ before_enqueued_at: datetime | None = None,
192
+ after_enqueued_at: datetime | None = None,
193
+ before_started_at: datetime | None = None,
194
+ after_finished_at: datetime | None = None,
195
+ ) -> TaskInfo:
196
+ parameters = _process_params(
197
+ uids,
198
+ index_uids,
199
+ statuses,
200
+ types,
201
+ before_enqueued_at,
202
+ after_enqueued_at,
203
+ before_started_at,
204
+ after_finished_at,
205
+ )
206
+
207
+ if not parameters:
208
+ # Cancel all tasks if no parmaeters provided
209
+ parameters["statuses"] = "enqueued,processing"
210
+
211
+ url = f"tasks/cancel?{urlencode(parameters)}"
212
+ client_ = _get_client(client)
213
+ response = client_.post(url)
214
+
215
+ return TaskInfo(**response.json())
216
+
217
+
218
+ def delete_tasks(
219
+ client: HttpxClient | Client,
220
+ *,
221
+ uids: list[str] | None = None,
222
+ index_uids: list[str] | None = None,
223
+ statuses: list[str] | None = None,
224
+ types: list[str] | None = None,
225
+ before_enqueued_at: datetime | None = None,
226
+ after_enqueued_at: datetime | None = None,
227
+ before_started_at: datetime | None = None,
228
+ after_finished_at: datetime | None = None,
229
+ ) -> TaskInfo:
230
+ parameters = _process_params(
231
+ uids,
232
+ index_uids,
233
+ statuses,
234
+ types,
235
+ before_enqueued_at,
236
+ after_enqueued_at,
237
+ before_started_at,
238
+ after_finished_at,
239
+ )
240
+
241
+ if not parameters:
242
+ # delete all tasks if no parmaeters provided
243
+ parameters["statuses"] = "canceled,enqueued,failed,processing,succeeded"
244
+
245
+ url = f"tasks?{urlencode(parameters)}"
246
+ client_ = _get_client(client)
247
+ response = client_.delete(url)
248
+
249
+ return TaskInfo(**response.json())
250
+
251
+
252
+ def get_task(client: HttpxClient | Client, task_id: int) -> TaskResult:
253
+ client_ = _get_client(client)
254
+ response = client_.get(f"tasks/{task_id}")
255
+
256
+ return TaskResult(**response.json())
257
+
258
+
259
+ def get_tasks(
260
+ client: HttpxClient | Client,
261
+ *,
262
+ index_ids: list[str] | None = None,
263
+ types: str | list[str] | None = None,
264
+ ) -> TaskStatus:
265
+ url = f"tasks?indexUids={','.join(index_ids)}" if index_ids else "tasks"
266
+ if types:
267
+ formatted_types = ",".join(types) if isinstance(types, list) else types
268
+ url = f"{url}&types={formatted_types}" if "?" in url else f"{url}?types={formatted_types}"
269
+ client_ = _get_client(client)
270
+ response = client_.get(url)
271
+
272
+ return TaskStatus(**response.json())
273
+
274
+
275
+ def wait_for_task(
276
+ client: HttpxClient | Client,
277
+ task_id: int,
278
+ *,
279
+ timeout_in_ms: int | None = 5000,
280
+ interval_in_ms: int = 50,
281
+ raise_for_status: bool = False,
282
+ ) -> TaskResult:
283
+ client_ = _get_client(client)
284
+ url = f"tasks/{task_id}"
285
+ http_requests = HttpRequests(client_)
286
+ start_time = datetime.now()
287
+ elapsed_time = 0.0
288
+
289
+ if timeout_in_ms:
290
+ while elapsed_time < timeout_in_ms:
291
+ response = http_requests.get(url)
292
+ status = TaskResult(**response.json())
293
+ if status.status in ("succeeded", "failed"):
294
+ if raise_for_status and status.status == "failed":
295
+ raise MeilisearchTaskFailedError(f"Task {task_id} failed")
296
+ return status
297
+ time.sleep(interval_in_ms / 1000)
298
+ time_delta = datetime.now() - start_time
299
+ elapsed_time = time_delta.seconds * 1000 + time_delta.microseconds / 1000
300
+ raise MeilisearchTimeoutError(
301
+ f"timeout of {timeout_in_ms}ms has exceeded on process {task_id} when waiting for pending update to resolve."
302
+ )
303
+ else:
304
+ while True:
305
+ response = http_requests.get(url)
306
+ status = TaskResult(**response.json())
307
+ if status.status in ("succeeded", "failed"):
308
+ if raise_for_status and status.status == "failed":
309
+ raise MeilisearchTaskFailedError(f"Task {task_id} failed")
310
+ return status
311
+ time.sleep(interval_in_ms / 1000)
312
+
313
+
314
+ def _get_async_client(client: AsyncClient | HttpxAsyncClient) -> HttpxAsyncClient:
315
+ if isinstance(client, HttpxAsyncClient):
316
+ return client
317
+
318
+ return client.http_client
319
+
320
+
321
+ def _get_client(
322
+ client: Client | HttpxClient,
323
+ ) -> HttpxClient:
324
+ if isinstance(client, HttpxClient):
325
+ return client
326
+
327
+ return client.http_client
328
+
329
+
330
+ def _process_params(
331
+ uids: list[str] | None = None,
332
+ index_uids: list[str] | None = None,
333
+ statuses: list[str] | None = None,
334
+ types: list[str] | None = None,
335
+ before_enqueued_at: datetime | None = None,
336
+ after_enqueued_at: datetime | None = None,
337
+ before_started_at: datetime | None = None,
338
+ after_finished_at: datetime | None = None,
339
+ ) -> dict[str, str]:
340
+ parameters = {}
341
+ if uids:
342
+ parameters["uids"] = ",".join([str(x) for x in uids])
343
+ if index_uids:
344
+ parameters["indexUids"] = ",".join([str(x) for x in index_uids])
345
+ if statuses:
346
+ parameters["statuses"] = ",".join(statuses)
347
+ if types:
348
+ parameters["types"] = ",".join(types)
349
+ if before_enqueued_at:
350
+ parameters["beforeEnqueuedAt"] = f"{before_enqueued_at.isoformat()}Z"
351
+ if after_enqueued_at:
352
+ parameters["afterEnqueuedAt"] = f"{after_enqueued_at.isoformat()}Z"
353
+ if before_started_at:
354
+ parameters["beforeStartedAt"] = f"{before_started_at.isoformat()}Z"
355
+ if after_finished_at:
356
+ parameters["afterFinishedAt"] = f"{after_finished_at.isoformat()}Z"
357
+
358
+ return parameters
@@ -0,0 +1,50 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ from datetime import datetime
5
+ from functools import lru_cache
6
+
7
+ import pydantic
8
+
9
+
10
+ @lru_cache(maxsize=1)
11
+ def is_pydantic_2() -> bool:
12
+ try:
13
+ # __version__ was added with Pydantic 2 so we know if this errors the version is < 2.
14
+ # Still check the version as a fail safe incase __version__ gets added to verion 1.
15
+ if int(pydantic.__version__[:1]) >= 2: # type: ignore[attr-defined]
16
+ return True
17
+ else: # pragma: no cover
18
+ # Raise an AttributeError to match the AttributeError on __version__ because in either
19
+ # case we need to get to the same place.
20
+ raise AttributeError
21
+ except AttributeError: # pragma: no cover
22
+ return False
23
+
24
+
25
+ def iso_to_date_time(iso_date: datetime | str | None) -> datetime | None:
26
+ """Handle conversion of iso string to datetime.
27
+
28
+ The microseconds from Meilisearch are sometimes too long for python to convert so this
29
+ strips off the last digits to shorten it when that happens.
30
+ """
31
+ if not iso_date:
32
+ return None
33
+
34
+ if isinstance(iso_date, datetime):
35
+ return iso_date
36
+
37
+ try:
38
+ return datetime.strptime(iso_date, "%Y-%m-%dT%H:%M:%S.%fZ")
39
+ except ValueError:
40
+ split = iso_date.split(".")
41
+ if len(split) < 2:
42
+ raise
43
+ reduce = len(split[1]) - 6
44
+ reduced = f"{split[0]}.{split[1][:-reduce]}Z"
45
+ return datetime.strptime(reduced, "%Y-%m-%dT%H:%M:%S.%fZ")
46
+
47
+
48
+ @lru_cache(maxsize=1)
49
+ def use_task_groups() -> bool:
50
+ return True if sys.version_info >= (3, 11) else False
@@ -0,0 +1 @@
1
+ VERSION = "2.0.0"
@@ -0,0 +1,71 @@
1
+ from httpx import Response
2
+
3
+
4
+ class InvalidDocumentError(Exception):
5
+ """Error for documents that are not in a valid format for Meilisearch."""
6
+
7
+ pass
8
+
9
+
10
+ class InvalidRestriction(Exception):
11
+ pass
12
+
13
+
14
+ class MeilisearchError(Exception):
15
+ """Generic class for Meilisearch error handling."""
16
+
17
+ def __init__(self, message: str) -> None:
18
+ self.message = message
19
+ super().__init__(self.message)
20
+
21
+ def __str__(self) -> str:
22
+ return f"MeilisearchError. Error message: {self.message}."
23
+
24
+
25
+ class MeilisearchApiError(MeilisearchError):
26
+ """Error sent by Meilisearch API."""
27
+
28
+ def __init__(self, error: str, response: Response) -> None:
29
+ self.status_code = response.status_code
30
+ self.code = ""
31
+ self.message = ""
32
+ self.link = ""
33
+ self.error_type = ""
34
+ if response.content:
35
+ self.message = f"Error message: {response.json().get('message') or ''}"
36
+ self.code = f"{response.json().get('code') or ''}"
37
+ self.error_type = f"{response.json().get('type') or ''}"
38
+ self.link = f"Error documentation: {response.json().get('link') or ''}"
39
+ else:
40
+ self.message = error
41
+ super().__init__(self.message)
42
+
43
+ def __str__(self) -> str:
44
+ return f"MeilisearchApiError.{self.code} {self.message} {self.error_type} {self.link}"
45
+
46
+
47
+ class MeilisearchCommunicationError(MeilisearchError):
48
+ """Error when connecting to Meilisearch."""
49
+
50
+ def __str__(self) -> str:
51
+ return f"MeilisearchCommunicationError, {self.message}"
52
+
53
+
54
+ class MeilisearchTaskFailedError(MeilisearchError):
55
+ """Error when a task is in the failed status."""
56
+
57
+ def __str__(self) -> str:
58
+ return f"MeilisearchTaskFailedError, {self.message}"
59
+
60
+
61
+ class MeilisearchTimeoutError(MeilisearchError):
62
+ """Error when Meilisearch operation takes longer than expected."""
63
+
64
+ def __str__(self) -> str:
65
+ return f"MeilisearchTimeoutError, {self.message}"
66
+
67
+
68
+ class PayloadTooLarge(Exception):
69
+ """Error when the payload is larger than the allowed payload size."""
70
+
71
+ pass
File without changes
@@ -0,0 +1,159 @@
1
+ from datetime import datetime
2
+ from typing import Dict, List, Optional, Union
3
+
4
+ import pydantic
5
+ from camel_converter.pydantic_base import CamelBase
6
+
7
+ from meilisearch_python_sdk._utils import is_pydantic_2, iso_to_date_time
8
+ from meilisearch_python_sdk.models.index import IndexStats
9
+
10
+
11
+ class ClientStats(CamelBase):
12
+ database_size: int
13
+ last_update: Optional[datetime] = None
14
+ indexes: Optional[Dict[str, IndexStats]] = None
15
+
16
+ if is_pydantic_2():
17
+
18
+ @pydantic.field_validator("last_update", mode="before") # type: ignore[attr-defined]
19
+ @classmethod
20
+ def validate_last_update(cls, v: str) -> Union[datetime, None]:
21
+ return iso_to_date_time(v)
22
+
23
+ else: # pragma: no cover
24
+
25
+ @pydantic.validator("last_update", pre=True)
26
+ @classmethod
27
+ def validate_last_update(cls, v: str) -> Union[datetime, None]:
28
+ return iso_to_date_time(v)
29
+
30
+
31
+ class _KeyBase(CamelBase):
32
+ uid: str
33
+ name: Optional[str] = None
34
+ description: str
35
+ actions: List[str]
36
+ indexes: List[str]
37
+ expires_at: Optional[datetime] = None
38
+
39
+ if is_pydantic_2():
40
+ model_config = pydantic.ConfigDict(ser_json_timedelta="iso8601") # type: ignore[typeddict-unknown-key]
41
+
42
+ @pydantic.field_validator("expires_at", mode="before") # type: ignore[attr-defined]
43
+ @classmethod
44
+ def validate_expires_at(cls, v: str) -> Union[datetime, None]:
45
+ return iso_to_date_time(v)
46
+
47
+ else: # pragma: no cover
48
+
49
+ @pydantic.validator("expires_at", pre=True)
50
+ @classmethod
51
+ def validate_expires_at(cls, v: str) -> Union[datetime, None]:
52
+ return iso_to_date_time(v)
53
+
54
+ class Config:
55
+ json_encoders = {
56
+ datetime: lambda v: None
57
+ if not v
58
+ else (
59
+ f"{str(v).split('+')[0].replace(' ', 'T')}Z"
60
+ if "+" in str(v)
61
+ else f"{str(v).replace(' ', 'T')}Z"
62
+ )
63
+ }
64
+
65
+
66
+ class Key(_KeyBase):
67
+ key: str
68
+ created_at: datetime
69
+ updated_at: Optional[datetime] = None
70
+
71
+ if is_pydantic_2():
72
+
73
+ @pydantic.field_validator("created_at", mode="before") # type: ignore[attr-defined]
74
+ @classmethod
75
+ def validate_created_at(cls, v: str) -> datetime:
76
+ converted = iso_to_date_time(v)
77
+
78
+ if not converted: # pragma: no cover
79
+ raise ValueError("created_at is required")
80
+
81
+ return converted
82
+
83
+ @pydantic.field_validator("updated_at", mode="before") # type: ignore[attr-defined]
84
+ @classmethod
85
+ def validate_updated_at(cls, v: str) -> Union[datetime, None]:
86
+ return iso_to_date_time(v)
87
+
88
+ else: # pragma: no cover
89
+
90
+ @pydantic.validator("created_at", pre=True)
91
+ @classmethod
92
+ def validate_created_at(cls, v: str) -> datetime:
93
+ converted = iso_to_date_time(v)
94
+
95
+ if not converted:
96
+ raise ValueError("created_at is required")
97
+
98
+ return converted
99
+
100
+ @pydantic.validator("updated_at", pre=True)
101
+ @classmethod
102
+ def validate_updated_at(cls, v: str) -> Union[datetime, None]:
103
+ return iso_to_date_time(v)
104
+
105
+
106
+ class KeyCreate(CamelBase):
107
+ name: Optional[str] = None
108
+ description: str
109
+ actions: List[str]
110
+ indexes: List[str]
111
+ expires_at: Optional[datetime] = None
112
+
113
+ if is_pydantic_2():
114
+ model_config = pydantic.ConfigDict(ser_json_timedelta="iso8601") # type: ignore[typeddict-unknown-key]
115
+
116
+ else: # pragma: no cover
117
+
118
+ class Config:
119
+ json_encoders = {
120
+ datetime: lambda v: None
121
+ if not v
122
+ else (
123
+ f"{str(v).split('+')[0].replace(' ', 'T')}Z"
124
+ if "+" in str(v)
125
+ else f"{str(v).replace(' ', 'T')}Z"
126
+ )
127
+ }
128
+
129
+
130
+ class KeyUpdate(CamelBase):
131
+ key: str
132
+ name: Optional[str] = None
133
+ description: Optional[str] = None
134
+ actions: Optional[List[str]] = None
135
+ indexes: Optional[List[str]] = None
136
+ expires_at: Optional[datetime] = None
137
+
138
+ if is_pydantic_2():
139
+ model_config = pydantic.ConfigDict(ser_json_timedelta="iso8601") # type: ignore[typeddict-unknown-key]
140
+
141
+ else: # pragma: no cover
142
+
143
+ class Config:
144
+ json_encoders = {
145
+ datetime: lambda v: None
146
+ if not v
147
+ else (
148
+ f"{str(v).split('+')[0].replace(' ', 'T')}Z"
149
+ if "+" in str(v)
150
+ else f"{str(v).replace(' ', 'T')}Z"
151
+ )
152
+ }
153
+
154
+
155
+ class KeySearch(CamelBase):
156
+ results: List[Key]
157
+ offset: int
158
+ limit: int
159
+ total: int
@@ -0,0 +1,10 @@
1
+ from typing import Any, Dict, List
2
+
3
+ from camel_converter.pydantic_base import CamelBase
4
+
5
+
6
+ class DocumentsInfo(CamelBase):
7
+ results: List[Dict[str, Any]]
8
+ offset: int
9
+ limit: int
10
+ total: int
@@ -0,0 +1,5 @@
1
+ from camel_converter.pydantic_base import CamelBase
2
+
3
+
4
+ class Health(CamelBase):
5
+ status: str