matrice-inference 0.1.0__py3-none-manylinux_2_17_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of matrice-inference might be problematic. Click here for more details.
- matrice_inference/deploy/aggregator/aggregator.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/aggregator.pyi +55 -0
- matrice_inference/deploy/aggregator/analytics.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/analytics.pyi +63 -0
- matrice_inference/deploy/aggregator/ingestor.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/ingestor.pyi +79 -0
- matrice_inference/deploy/aggregator/pipeline.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/pipeline.pyi +139 -0
- matrice_inference/deploy/aggregator/publisher.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/publisher.pyi +59 -0
- matrice_inference/deploy/aggregator/synchronizer.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/synchronizer.pyi +58 -0
- matrice_inference/deploy/client/auto_streaming/auto_streaming.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/auto_streaming/auto_streaming.pyi +145 -0
- matrice_inference/deploy/client/auto_streaming/auto_streaming_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/auto_streaming/auto_streaming_utils.pyi +126 -0
- matrice_inference/deploy/client/client.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/client.pyi +337 -0
- matrice_inference/deploy/client/client_stream_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/client_stream_utils.pyi +83 -0
- matrice_inference/deploy/client/client_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/client_utils.pyi +77 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_gateway.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_gateway.pyi +120 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_gateway_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_gateway_utils.pyi +442 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_results_handler.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_results_handler.pyi +19 -0
- matrice_inference/deploy/optimize/cache_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/optimize/cache_manager.pyi +15 -0
- matrice_inference/deploy/optimize/frame_comparators.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/optimize/frame_comparators.pyi +203 -0
- matrice_inference/deploy/optimize/frame_difference.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/optimize/frame_difference.pyi +165 -0
- matrice_inference/deploy/optimize/transmission.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/optimize/transmission.pyi +97 -0
- matrice_inference/deploy/server/inference/batch_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/inference/batch_manager.pyi +50 -0
- matrice_inference/deploy/server/inference/inference_interface.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/inference/inference_interface.pyi +114 -0
- matrice_inference/deploy/server/inference/model_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/inference/model_manager.pyi +80 -0
- matrice_inference/deploy/server/inference/triton_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/inference/triton_utils.pyi +115 -0
- matrice_inference/deploy/server/proxy/proxy_interface.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/proxy/proxy_interface.pyi +90 -0
- matrice_inference/deploy/server/proxy/proxy_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/proxy/proxy_utils.pyi +113 -0
- matrice_inference/deploy/server/server.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/server.pyi +155 -0
- matrice_inference/deploy/server/stream/inference_worker.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/inference_worker.pyi +56 -0
- matrice_inference/deploy/server/stream/kafka_consumer_worker.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/kafka_consumer_worker.pyi +51 -0
- matrice_inference/deploy/server/stream/kafka_producer_worker.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/kafka_producer_worker.pyi +50 -0
- matrice_inference/deploy/server/stream/stream_debug_logger.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/stream_debug_logger.pyi +47 -0
- matrice_inference/deploy/server/stream/stream_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/stream_manager.pyi +69 -0
- matrice_inference/deploy/server/stream/video_buffer.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/video_buffer.pyi +120 -0
- matrice_inference/deploy/stream/kafka_stream.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/stream/kafka_stream.pyi +444 -0
- matrice_inference/deploy/stream/redis_stream.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/stream/redis_stream.pyi +447 -0
- matrice_inference/deployment/camera_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deployment/camera_manager.pyi +669 -0
- matrice_inference/deployment/deployment.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deployment/deployment.pyi +736 -0
- matrice_inference/deployment/inference_pipeline.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deployment/inference_pipeline.pyi +527 -0
- matrice_inference/deployment/streaming_gateway_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deployment/streaming_gateway_manager.pyi +275 -0
- matrice_inference/py.typed +0 -0
- matrice_inference-0.1.0.dist-info/METADATA +26 -0
- matrice_inference-0.1.0.dist-info/RECORD +80 -0
- matrice_inference-0.1.0.dist-info/WHEEL +5 -0
- matrice_inference-0.1.0.dist-info/licenses/LICENSE.txt +21 -0
- matrice_inference-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,736 @@
|
|
|
1
|
+
"""Auto-generated stub for module: deployment."""
|
|
2
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
3
|
+
|
|
4
|
+
from camera_manager import CameraManager, CameraGroupConfig, CameraConfig, Camera, CameraGroup
|
|
5
|
+
from datetime import datetime, timedelta
|
|
6
|
+
from matrice_common.utils import handle_response, get_summary
|
|
7
|
+
from streaming_gateway_manager import StreamingGatewayManager, StreamingGatewayConfig, StreamingGateway
|
|
8
|
+
import json
|
|
9
|
+
import sys
|
|
10
|
+
import time
|
|
11
|
+
|
|
12
|
+
# Classes
|
|
13
|
+
class Deployment:
|
|
14
|
+
"""
|
|
15
|
+
Class to manage deployment-related operations within a project.
|
|
16
|
+
|
|
17
|
+
The `Deployment` class initializes with a given session and deployment details,
|
|
18
|
+
allowing users to access and manage the deployment attributes such as status,
|
|
19
|
+
configuration, and associated project information.
|
|
20
|
+
|
|
21
|
+
Parameters
|
|
22
|
+
----------
|
|
23
|
+
session : object
|
|
24
|
+
The session object containing project and RPC information.
|
|
25
|
+
deployment_id : str, optional
|
|
26
|
+
The ID of the deployment to manage. Default is None.
|
|
27
|
+
deployment_name : str, optional
|
|
28
|
+
The name of the deployment. Default is None.
|
|
29
|
+
|
|
30
|
+
Attributes
|
|
31
|
+
----------
|
|
32
|
+
session : object
|
|
33
|
+
The session object for RPC communication.
|
|
34
|
+
rpc : object
|
|
35
|
+
The RPC interface for backend API communication.
|
|
36
|
+
project_id : str
|
|
37
|
+
The project ID associated with the deployment.
|
|
38
|
+
deployment_id : str
|
|
39
|
+
The unique ID of the deployment.
|
|
40
|
+
deployment_name : str
|
|
41
|
+
Name of the deployment.
|
|
42
|
+
model_id : str
|
|
43
|
+
ID of the model associated with the deployment.
|
|
44
|
+
user_id : str
|
|
45
|
+
User ID of the deployment owner.
|
|
46
|
+
user_name : str
|
|
47
|
+
Username of the deployment owner.
|
|
48
|
+
action_id : str
|
|
49
|
+
ID of the action associated with the deployment.
|
|
50
|
+
auth_keys : list
|
|
51
|
+
List of authorization keys for the deployment.
|
|
52
|
+
runtime_framework : str
|
|
53
|
+
Framework used for the runtime of the model in the deployment.
|
|
54
|
+
model_input : dict
|
|
55
|
+
Input format expected by the model.
|
|
56
|
+
model_type : str
|
|
57
|
+
Type of model deployed (e.g., classification, detection).
|
|
58
|
+
model_output : dict
|
|
59
|
+
Output format of the deployed model.
|
|
60
|
+
deployment_type : str
|
|
61
|
+
Type of deployment (e.g., real-time, batch).
|
|
62
|
+
suggested_classes : list
|
|
63
|
+
Suggested classes for classification models.
|
|
64
|
+
running_instances : list
|
|
65
|
+
List of currently running instances.
|
|
66
|
+
auto_shutdown : bool
|
|
67
|
+
Whether the deployment has auto-shutdown enabled.
|
|
68
|
+
auto_scale : bool
|
|
69
|
+
Whether the deployment is configured for auto-scaling.
|
|
70
|
+
gpu_required : bool
|
|
71
|
+
Whether GPU is required for the deployment.
|
|
72
|
+
status : str
|
|
73
|
+
Current status of the deployment.
|
|
74
|
+
hibernation_threshold : int
|
|
75
|
+
Threshold for auto-hibernation in minutes.
|
|
76
|
+
image_store_confidence_threshold : float
|
|
77
|
+
Confidence threshold for storing images.
|
|
78
|
+
image_store_count_threshold : int
|
|
79
|
+
Count threshold for storing images.
|
|
80
|
+
images_stored_count : int
|
|
81
|
+
Number of images currently stored.
|
|
82
|
+
bucket_alias : str
|
|
83
|
+
Alias for the storage bucket associated with the deployment.
|
|
84
|
+
credential_alias : str
|
|
85
|
+
Alias for credentials used in the deployment.
|
|
86
|
+
created_at : str
|
|
87
|
+
Timestamp when the deployment was created.
|
|
88
|
+
updated_at : str
|
|
89
|
+
Timestamp when the deployment was last updated.
|
|
90
|
+
compute_alias : str
|
|
91
|
+
Alias of the compute resource associated with the deployment.
|
|
92
|
+
is_optimized : bool
|
|
93
|
+
Indicates whether the deployment is optimized.
|
|
94
|
+
status_cards : list
|
|
95
|
+
List of status cards related to the deployment.
|
|
96
|
+
total_deployments : int or None
|
|
97
|
+
Total number of deployments in the project.
|
|
98
|
+
active_deployments : int or None
|
|
99
|
+
Number of active deployments in the project.
|
|
100
|
+
total_running_instances_count : int or None
|
|
101
|
+
Total count of running instances in the project.
|
|
102
|
+
hibernated_deployments : int or None
|
|
103
|
+
Number of hibernated deployments.
|
|
104
|
+
error_deployments : int or None
|
|
105
|
+
Number of deployments with errors.
|
|
106
|
+
camera_manager : CameraManager
|
|
107
|
+
Manager for camera groups and configurations.
|
|
108
|
+
streaming_gateway_manager : StreamingGatewayManager
|
|
109
|
+
Manager for streaming gateways.
|
|
110
|
+
|
|
111
|
+
Example
|
|
112
|
+
-------
|
|
113
|
+
>>> session = Session(account_number="account_number")
|
|
114
|
+
>>> deployment = Deployment(session=session, deployment_id="deployment_id", deployment_name="MyDeployment")
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
def __init__(self: Any, session: Any, deployment_id: Any = None, deployment_name: Any = None) -> None: ...
|
|
118
|
+
|
|
119
|
+
def add_camera_config(self: Any, config: Any) -> Any: ...
|
|
120
|
+
"""
|
|
121
|
+
Legacy method - use create_camera instead.
|
|
122
|
+
"""
|
|
123
|
+
|
|
124
|
+
def add_camera_groups_to_streaming_gateway(self: Any, gateway_id: str, camera_group_ids: List[str]) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
125
|
+
"""
|
|
126
|
+
Add camera groups to a streaming gateway.
|
|
127
|
+
|
|
128
|
+
Args:
|
|
129
|
+
gateway_id: The ID of the streaming gateway
|
|
130
|
+
camera_group_ids: List of camera group IDs to add
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
tuple: (result, error, message)
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
def add_cameras_to_group(self: Any, group_id: str, camera_configs: List[CameraConfig]) -> Tuple[Optional[List['Camera']], Optional[str], str]: ...
|
|
137
|
+
"""
|
|
138
|
+
Add multiple cameras to a camera group in this deployment.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
group_id: The ID of the camera group
|
|
142
|
+
camera_configs: List of CameraConfig objects
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
tuple: (camera_instances, error, message)
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
def create_auth_key(self: Any, expiry_days: Any) -> Any: ...
|
|
149
|
+
"""
|
|
150
|
+
Create a new authentication key for the deployment, valid for the specified number of days.
|
|
151
|
+
The `deployment_id` and `project_id` must be set during initialization.
|
|
152
|
+
|
|
153
|
+
Parameters
|
|
154
|
+
----------
|
|
155
|
+
expiry_days : int
|
|
156
|
+
The number of days before the authentication key expires.
|
|
157
|
+
|
|
158
|
+
Returns
|
|
159
|
+
-------
|
|
160
|
+
tuple
|
|
161
|
+
A tuple containing three elements:
|
|
162
|
+
- dict: The API response with details of the created authentication key,
|
|
163
|
+
including keys such as:
|
|
164
|
+
- `authKey` (str): The newly created authentication key.
|
|
165
|
+
- `expiryDate` (str): Expiration date of the key.
|
|
166
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
167
|
+
- str: Status message indicating success or failure of the API call.
|
|
168
|
+
|
|
169
|
+
Examples
|
|
170
|
+
--------
|
|
171
|
+
>>> auth_key, err, msg = deployment.create_auth_key(30)
|
|
172
|
+
>>> if err:
|
|
173
|
+
>>> pprint(err)
|
|
174
|
+
>>> else:
|
|
175
|
+
>>> pprint(auth_key)
|
|
176
|
+
"""
|
|
177
|
+
|
|
178
|
+
def create_camera(self: Any, camera_config: Any) -> Tuple[Optional['Camera'], Optional[str], str]: ...
|
|
179
|
+
"""
|
|
180
|
+
Create a new camera for this deployment.
|
|
181
|
+
|
|
182
|
+
Args:
|
|
183
|
+
camera_config: CameraConfig object containing the camera configuration
|
|
184
|
+
|
|
185
|
+
Returns:
|
|
186
|
+
tuple: (camera_instance, error, message)
|
|
187
|
+
"""
|
|
188
|
+
|
|
189
|
+
def create_camera_group(self: Any, group: Any) -> Tuple[Optional['CameraGroup'], Optional[str], str]: ...
|
|
190
|
+
"""
|
|
191
|
+
Create a new camera group for this deployment.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
group: CameraGroup object containing the group configuration
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
tuple: (camera_group_instance, error, message)
|
|
198
|
+
"""
|
|
199
|
+
|
|
200
|
+
def create_dataset(self: Any, dataset_name: Any, is_unlabeled: Any, source: Any, source_url: Any, is_public: Any, dataset_description: Any = '', version_description: Any = '') -> Any: ...
|
|
201
|
+
"""
|
|
202
|
+
Create a new dataset from a deployment. Only zip files are supported for upload,
|
|
203
|
+
and the deployment ID must be set for this operation.
|
|
204
|
+
|
|
205
|
+
Parameters
|
|
206
|
+
----------
|
|
207
|
+
dataset_name : str
|
|
208
|
+
The name of the new dataset.
|
|
209
|
+
is_unlabeled : bool
|
|
210
|
+
Indicates whether the dataset is unlabeled.
|
|
211
|
+
source : str
|
|
212
|
+
The source of the dataset (e.g., "aws").
|
|
213
|
+
source_url : str
|
|
214
|
+
The URL of the dataset to be created.
|
|
215
|
+
is_public : bool
|
|
216
|
+
Specifies if the dataset is public.
|
|
217
|
+
dataset_description : str, optional
|
|
218
|
+
A description for the dataset. Default is an empty string.
|
|
219
|
+
version_description : str, optional
|
|
220
|
+
A description for this version of the dataset. Default is an empty string.
|
|
221
|
+
|
|
222
|
+
Returns
|
|
223
|
+
-------
|
|
224
|
+
tuple
|
|
225
|
+
A tuple containing three elements:
|
|
226
|
+
- dict: The API response with details of the dataset creation, structured as:
|
|
227
|
+
- `datasetId` (str): ID of the created dataset.
|
|
228
|
+
- `status` (str): Status of the dataset creation request.
|
|
229
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
230
|
+
- str: Status message indicating success or failure of the API call.
|
|
231
|
+
|
|
232
|
+
Example
|
|
233
|
+
-------
|
|
234
|
+
>>> from pprint import pprint
|
|
235
|
+
>>> resp, err, msg = deployment.create_dataset(
|
|
236
|
+
... dataset_name="New Dataset",
|
|
237
|
+
... is_unlabeled=False,
|
|
238
|
+
... source="aws",
|
|
239
|
+
... source_url="https://example.com/dataset.zip",
|
|
240
|
+
... is_public=True,
|
|
241
|
+
... dataset_description="Dataset description",
|
|
242
|
+
... version_description="Version description"
|
|
243
|
+
... )
|
|
244
|
+
>>> if err:
|
|
245
|
+
... pprint(err)
|
|
246
|
+
>>> else:
|
|
247
|
+
... pprint(resp)
|
|
248
|
+
"""
|
|
249
|
+
|
|
250
|
+
def create_streaming_gateway(self: Any, gateway_config: Any) -> Tuple[Optional['StreamingGateway'], Optional[str], str]: ...
|
|
251
|
+
"""
|
|
252
|
+
Create a new streaming gateway for this deployment.
|
|
253
|
+
|
|
254
|
+
Args:
|
|
255
|
+
gateway_config: StreamingGatewayConfig object containing the gateway configuration
|
|
256
|
+
|
|
257
|
+
Returns:
|
|
258
|
+
tuple: (streaming_gateway, error, message)
|
|
259
|
+
"""
|
|
260
|
+
|
|
261
|
+
def delete(self: Any) -> Any: ...
|
|
262
|
+
"""
|
|
263
|
+
Delete the specified deployment.
|
|
264
|
+
|
|
265
|
+
This method deletes the deployment identified by `deployment_id` from the backend system.
|
|
266
|
+
|
|
267
|
+
Returns
|
|
268
|
+
-------
|
|
269
|
+
tuple
|
|
270
|
+
A tuple containing three elements:
|
|
271
|
+
- dict: The API response confirming the deletion.
|
|
272
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
273
|
+
- str: Status message indicating success or failure of the API call.
|
|
274
|
+
|
|
275
|
+
Raises
|
|
276
|
+
------
|
|
277
|
+
SystemExit
|
|
278
|
+
If `deployment_id` is not set.
|
|
279
|
+
|
|
280
|
+
Examples
|
|
281
|
+
--------
|
|
282
|
+
>>> delete, err, msg = deployment.delete()
|
|
283
|
+
>>> if err:
|
|
284
|
+
>>> pprint(err)
|
|
285
|
+
>>> else:
|
|
286
|
+
>>> pprint(delete)
|
|
287
|
+
"""
|
|
288
|
+
|
|
289
|
+
def delete_all_cameras(self: Any, confirm: bool = False) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
290
|
+
"""
|
|
291
|
+
Delete all cameras for this deployment.
|
|
292
|
+
|
|
293
|
+
Args:
|
|
294
|
+
confirm: Must be True to confirm bulk deletion
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
tuple: (result, error, message)
|
|
298
|
+
"""
|
|
299
|
+
|
|
300
|
+
def delete_auth_key(self: Any, auth_key: Any) -> Any: ...
|
|
301
|
+
"""
|
|
302
|
+
Delete a specified authentication key for the current deployment.
|
|
303
|
+
The `deployment_id` must be set during initialization.
|
|
304
|
+
|
|
305
|
+
Parameters
|
|
306
|
+
----------
|
|
307
|
+
auth_key : str
|
|
308
|
+
The authentication key to be deleted.
|
|
309
|
+
|
|
310
|
+
Returns
|
|
311
|
+
-------
|
|
312
|
+
tuple
|
|
313
|
+
A tuple containing three elements:
|
|
314
|
+
- dict: The API response indicating the result of the delete operation.
|
|
315
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
316
|
+
- str: Status message indicating success or failure of the API call.
|
|
317
|
+
|
|
318
|
+
Raises
|
|
319
|
+
------
|
|
320
|
+
SystemExit
|
|
321
|
+
If `deployment_id` is not set.
|
|
322
|
+
|
|
323
|
+
Examples
|
|
324
|
+
--------
|
|
325
|
+
>>> delete_auth_key, err, msg = deployment.delete_auth_key("abcd1234")
|
|
326
|
+
>>> if err:
|
|
327
|
+
>>> pprint(err)
|
|
328
|
+
>>> else:
|
|
329
|
+
>>> pprint(delete_auth_key)
|
|
330
|
+
"""
|
|
331
|
+
|
|
332
|
+
def delete_camera(self: Any, camera_id: str) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
333
|
+
"""
|
|
334
|
+
Delete a camera by its ID.
|
|
335
|
+
|
|
336
|
+
Args:
|
|
337
|
+
camera_id: The ID of the camera to delete
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
tuple: (result, error, message)
|
|
341
|
+
"""
|
|
342
|
+
|
|
343
|
+
def delete_camera_group(self: Any, group_id: str) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
344
|
+
"""
|
|
345
|
+
Delete a camera group by its ID.
|
|
346
|
+
|
|
347
|
+
Args:
|
|
348
|
+
group_id: The ID of the camera group to delete
|
|
349
|
+
|
|
350
|
+
Returns:
|
|
351
|
+
tuple: (result, error, message)
|
|
352
|
+
"""
|
|
353
|
+
|
|
354
|
+
def delete_streaming_gateway(self: Any, gateway_id: str, force: bool = False) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
355
|
+
"""
|
|
356
|
+
Delete a streaming gateway by its ID.
|
|
357
|
+
|
|
358
|
+
Args:
|
|
359
|
+
gateway_id: The ID of the streaming gateway to delete
|
|
360
|
+
force: Force delete even if active
|
|
361
|
+
|
|
362
|
+
Returns:
|
|
363
|
+
tuple: (result, error, message)
|
|
364
|
+
"""
|
|
365
|
+
|
|
366
|
+
def get_camera_by_id(self: Any, camera_id: str) -> Tuple[Optional['Camera'], Optional[str], str]: ...
|
|
367
|
+
"""
|
|
368
|
+
Get a camera by its ID.
|
|
369
|
+
|
|
370
|
+
Args:
|
|
371
|
+
camera_id: The ID of the camera to retrieve
|
|
372
|
+
|
|
373
|
+
Returns:
|
|
374
|
+
tuple: (camera_instance, error, message)
|
|
375
|
+
"""
|
|
376
|
+
|
|
377
|
+
def get_camera_configs(self: Any, page: int = 1, limit: int = 10, search: str = None, group_id: str = None) -> Any: ...
|
|
378
|
+
"""
|
|
379
|
+
Legacy method - use get_cameras instead.
|
|
380
|
+
"""
|
|
381
|
+
|
|
382
|
+
def get_camera_group_by_id(self: Any, group_id: str) -> Tuple[Optional['CameraGroup'], Optional[str], str]: ...
|
|
383
|
+
"""
|
|
384
|
+
Get a camera group by its ID.
|
|
385
|
+
|
|
386
|
+
Args:
|
|
387
|
+
group_id: The ID of the camera group to retrieve
|
|
388
|
+
|
|
389
|
+
Returns:
|
|
390
|
+
tuple: (camera_group_instance, error, message)
|
|
391
|
+
"""
|
|
392
|
+
|
|
393
|
+
def get_camera_groups(self: Any, page: int = 1, limit: int = 10, search: str = None) -> Tuple[Optional[List['CameraGroup']], Optional[str], str]: ...
|
|
394
|
+
"""
|
|
395
|
+
Get all camera groups for this deployment.
|
|
396
|
+
|
|
397
|
+
Args:
|
|
398
|
+
page: Page number for pagination
|
|
399
|
+
limit: Items per page
|
|
400
|
+
search: Optional search term
|
|
401
|
+
|
|
402
|
+
Returns:
|
|
403
|
+
tuple: (camera_group_instances, error, message)
|
|
404
|
+
"""
|
|
405
|
+
|
|
406
|
+
def get_cameras(self: Any, page: int = 1, limit: int = 10, search: str = None, group_id: str = None) -> Tuple[Optional[List['Camera']], Optional[str], str]: ...
|
|
407
|
+
"""
|
|
408
|
+
Get all cameras for this deployment.
|
|
409
|
+
|
|
410
|
+
Args:
|
|
411
|
+
page: Page number for pagination
|
|
412
|
+
limit: Items per page
|
|
413
|
+
search: Optional search term
|
|
414
|
+
group_id: Optional filter by camera group ID
|
|
415
|
+
|
|
416
|
+
Returns:
|
|
417
|
+
tuple: (camera_instances, error, message)
|
|
418
|
+
"""
|
|
419
|
+
|
|
420
|
+
def get_deployment_server(self: Any, model_train_id: Any, model_type: Any) -> Any: ...
|
|
421
|
+
"""
|
|
422
|
+
Fetch information about the deployment server for a specific model.
|
|
423
|
+
|
|
424
|
+
Parameters
|
|
425
|
+
----------
|
|
426
|
+
model_train_id : str
|
|
427
|
+
The ID of the model training instance.
|
|
428
|
+
model_type : str
|
|
429
|
+
The type of model (e.g., 'trained', 'exported').
|
|
430
|
+
|
|
431
|
+
Returns
|
|
432
|
+
-------
|
|
433
|
+
tuple
|
|
434
|
+
A tuple containing three elements:
|
|
435
|
+
- dict: The API response with details of the deployment server.
|
|
436
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
437
|
+
- str: Status message indicating success or failure of the API call.
|
|
438
|
+
|
|
439
|
+
Examples
|
|
440
|
+
--------
|
|
441
|
+
>>> deployment_server, err, msg = deployment.get_deployment_server("train123", "trained")
|
|
442
|
+
>>> if err:
|
|
443
|
+
>>> pprint(err)
|
|
444
|
+
>>> else:
|
|
445
|
+
>>> pprint(deployment_server)
|
|
446
|
+
"""
|
|
447
|
+
|
|
448
|
+
def get_prediction(self: Any, input_path: Any = None, auth_key: Any = '', input_url: Any = None, extra_params: Any = {}) -> Any: ...
|
|
449
|
+
"""
|
|
450
|
+
Fetch model predictions for a given image using a deployment.
|
|
451
|
+
|
|
452
|
+
This method sends an image to the deployment for prediction. Either `deployment_id`
|
|
453
|
+
or `deployment_name` must be provided in the instance to locate the deployment.
|
|
454
|
+
|
|
455
|
+
Parameters
|
|
456
|
+
----------
|
|
457
|
+
input_path : str
|
|
458
|
+
The path to the input for prediction.
|
|
459
|
+
auth_key : str
|
|
460
|
+
The authentication key required for authorizing the prediction request.
|
|
461
|
+
|
|
462
|
+
Returns
|
|
463
|
+
-------
|
|
464
|
+
tuple
|
|
465
|
+
A tuple containing three elements:
|
|
466
|
+
- dict: The API response with the prediction results, structured as:
|
|
467
|
+
- `predictions` (list of dict): Each entry contains:
|
|
468
|
+
- `class` (str): The predicted class label.
|
|
469
|
+
- `confidence` (float): Confidence score of the prediction.
|
|
470
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
471
|
+
- str: Status message indicating success or failure of the prediction request.
|
|
472
|
+
|
|
473
|
+
Raises
|
|
474
|
+
------
|
|
475
|
+
ValueError
|
|
476
|
+
If `auth_key` is not provided or if neither `deployment_id` nor `deployment_name` is
|
|
477
|
+
set.
|
|
478
|
+
|
|
479
|
+
Examples
|
|
480
|
+
--------
|
|
481
|
+
>>> from pprint import pprint
|
|
482
|
+
>>> result, error, message = deployment.get_prediction(
|
|
483
|
+
... input_path="/path/to/input.jpg",
|
|
484
|
+
... auth_key="auth123"
|
|
485
|
+
... )
|
|
486
|
+
>>> if error:
|
|
487
|
+
... pprint(error)
|
|
488
|
+
>>> else:
|
|
489
|
+
... pprint(result)
|
|
490
|
+
"""
|
|
491
|
+
|
|
492
|
+
def get_streaming_gateway_by_id(self: Any, gateway_id: str) -> Tuple[Optional['StreamingGateway'], Optional[str], str]: ...
|
|
493
|
+
"""
|
|
494
|
+
Get a streaming gateway by its ID.
|
|
495
|
+
|
|
496
|
+
Args:
|
|
497
|
+
gateway_id: The ID of the streaming gateway to retrieve
|
|
498
|
+
|
|
499
|
+
Returns:
|
|
500
|
+
tuple: (streaming_gateway, error, message)
|
|
501
|
+
"""
|
|
502
|
+
|
|
503
|
+
def get_streaming_gateways(self: Any, page: int = 1, limit: int = 10, search: str = None) -> Tuple[Optional[List['StreamingGateway']], Optional[str], str]: ...
|
|
504
|
+
"""
|
|
505
|
+
Get all streaming gateways for this deployment.
|
|
506
|
+
|
|
507
|
+
Args:
|
|
508
|
+
page: Page number for pagination
|
|
509
|
+
limit: Items per page
|
|
510
|
+
search: Optional search term
|
|
511
|
+
|
|
512
|
+
Returns:
|
|
513
|
+
tuple: (streaming_gateways, error, message)
|
|
514
|
+
"""
|
|
515
|
+
|
|
516
|
+
def refresh(self: Any) -> Any: ...
|
|
517
|
+
"""
|
|
518
|
+
Refresh the instance by reinstantiating it with the previous values.
|
|
519
|
+
"""
|
|
520
|
+
|
|
521
|
+
def remove_camera_groups_from_streaming_gateway(self: Any, gateway_id: str, camera_group_ids: List[str]) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
522
|
+
"""
|
|
523
|
+
Remove camera groups from a streaming gateway.
|
|
524
|
+
|
|
525
|
+
Args:
|
|
526
|
+
gateway_id: The ID of the streaming gateway
|
|
527
|
+
camera_group_ids: List of camera group IDs to remove
|
|
528
|
+
|
|
529
|
+
Returns:
|
|
530
|
+
tuple: (result, error, message)
|
|
531
|
+
"""
|
|
532
|
+
|
|
533
|
+
def rename(self: Any, updated_name: Any) -> Any: ...
|
|
534
|
+
"""
|
|
535
|
+
Update the deployment name for the current deployment.
|
|
536
|
+
|
|
537
|
+
Parameters
|
|
538
|
+
----------
|
|
539
|
+
updated_name : str
|
|
540
|
+
The new name for the deployment.
|
|
541
|
+
|
|
542
|
+
Returns
|
|
543
|
+
-------
|
|
544
|
+
tuple
|
|
545
|
+
A tuple containing three elements:
|
|
546
|
+
- dict: The API response with details of the rename operation.
|
|
547
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
548
|
+
- str: Status message indicating success or failure of the API call.
|
|
549
|
+
|
|
550
|
+
Raises
|
|
551
|
+
------
|
|
552
|
+
SystemExit
|
|
553
|
+
If `deployment_id` is not set.
|
|
554
|
+
|
|
555
|
+
Examples
|
|
556
|
+
--------
|
|
557
|
+
>>> from pprint import pprint
|
|
558
|
+
>>> deployment = Deployment(session, deployment_id="1234")
|
|
559
|
+
>>> rename, err, msg = deployment.rename("NewDeploymentName")
|
|
560
|
+
>>> if err:
|
|
561
|
+
>>> pprint(err)
|
|
562
|
+
>>> else:
|
|
563
|
+
>>> pprint(rename)
|
|
564
|
+
"""
|
|
565
|
+
|
|
566
|
+
def request_count_monitor(self: Any, start_date: Any, end_date: Any, granularity: Any = 'second') -> Any: ...
|
|
567
|
+
"""
|
|
568
|
+
Monitor the count of requests within a specified time range and granularity for the current
|
|
569
|
+
deployment.
|
|
570
|
+
|
|
571
|
+
Parameters
|
|
572
|
+
----------
|
|
573
|
+
start_date : str
|
|
574
|
+
The start date of the monitoring period in ISO format (e.g., "YYYY-MM-DDTHH:MM:SSZ").
|
|
575
|
+
end_date : str
|
|
576
|
+
The end date of the monitoring period in ISO format.
|
|
577
|
+
granularity : str, optional
|
|
578
|
+
The time granularity for the request count (e.g., "second", "minute")
|
|
579
|
+
. Default is "second".
|
|
580
|
+
|
|
581
|
+
Returns
|
|
582
|
+
-------
|
|
583
|
+
tuple
|
|
584
|
+
A tuple containing three elements:
|
|
585
|
+
- dict: The API response with the request count details, structured as:
|
|
586
|
+
- `counts` (list of dict): Each entry contains:
|
|
587
|
+
- `timestamp` (str): The timestamp of the request count.
|
|
588
|
+
- `count` (int): The number of requests at that timestamp.
|
|
589
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
590
|
+
- str: Status message indicating success or failure of the API call.
|
|
591
|
+
|
|
592
|
+
Examples
|
|
593
|
+
--------
|
|
594
|
+
>>> start = "2024-01-28T18:30:00.000Z"
|
|
595
|
+
>>> end = "2024-02-29T10:11:27.000Z"
|
|
596
|
+
>>> count_monitor, error, message = deployment.request_count_monitor(start, end)
|
|
597
|
+
>>> if error:
|
|
598
|
+
>>> pprint(error)
|
|
599
|
+
>>> else:
|
|
600
|
+
>>> pprint(count_monitor)
|
|
601
|
+
"""
|
|
602
|
+
|
|
603
|
+
def request_latency_monitor(self: Any, start_date: Any, end_date: Any, granularity: Any = 'second') -> Any: ...
|
|
604
|
+
"""
|
|
605
|
+
Monitor the request latency within a specified time range and granularity for the current
|
|
606
|
+
deployment.
|
|
607
|
+
|
|
608
|
+
Parameters
|
|
609
|
+
----------
|
|
610
|
+
start_date : str
|
|
611
|
+
The start date of the monitoring period in ISO format (e.g., "YYYY-MM-DDTHH:MM:SSZ").
|
|
612
|
+
end_date : str
|
|
613
|
+
The end date of the monitoring period in ISO format.
|
|
614
|
+
granularity : str, optional
|
|
615
|
+
The time granularity for latency tracking (e.g., "second", "minute"). Default is
|
|
616
|
+
"second".
|
|
617
|
+
|
|
618
|
+
Returns
|
|
619
|
+
-------
|
|
620
|
+
tuple
|
|
621
|
+
A tuple containing three elements:
|
|
622
|
+
- dict: The API response with latency details, structured as:
|
|
623
|
+
- `latencies` (list of dict): Each entry contains:
|
|
624
|
+
- `timestamp` (str): The timestamp of the latency record.
|
|
625
|
+
- `avg_latency` (float): The average latency in seconds for the requests at
|
|
626
|
+
that timestamp.
|
|
627
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
628
|
+
- str: Status message indicating success or failure of the API call.
|
|
629
|
+
|
|
630
|
+
Examples
|
|
631
|
+
--------
|
|
632
|
+
>>> from pprint import pprint
|
|
633
|
+
>>> start = "2024-01-28T18:30:00.000Z"
|
|
634
|
+
>>> end = "2024-02-29T10:11:27.000Z"
|
|
635
|
+
>>> result, error, message = deployment.request_latency_monitor(start, end)
|
|
636
|
+
>>> if error:
|
|
637
|
+
>>> pprint(error)
|
|
638
|
+
>>> else:
|
|
639
|
+
>>> pprint(result)
|
|
640
|
+
"""
|
|
641
|
+
|
|
642
|
+
def request_total_monitor(self: Any) -> Any: ...
|
|
643
|
+
"""
|
|
644
|
+
Monitor the total number of requests for the current deployment.
|
|
645
|
+
|
|
646
|
+
This method checks the total request count for a deployment by its `deployment_id`.
|
|
647
|
+
If `deployment_id` is not set, it attempts to fetch it using `deployment_name`.
|
|
648
|
+
|
|
649
|
+
Returns
|
|
650
|
+
-------
|
|
651
|
+
tuple
|
|
652
|
+
A tuple containing three elements:
|
|
653
|
+
- dict: The API response with the total request count.
|
|
654
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
655
|
+
- str: Status message indicating success or failure of the API call.
|
|
656
|
+
|
|
657
|
+
Raises
|
|
658
|
+
------
|
|
659
|
+
SystemExit
|
|
660
|
+
If both `deployment_id` and `deployment_name` are not set.
|
|
661
|
+
|
|
662
|
+
Examples
|
|
663
|
+
--------
|
|
664
|
+
>>> from pprint import pprint
|
|
665
|
+
>>> monitor, error, message = deployment.request_total_monitor()
|
|
666
|
+
>>> if error:
|
|
667
|
+
>>> pprint(error)
|
|
668
|
+
>>> else:
|
|
669
|
+
>>> pprint(monitor)
|
|
670
|
+
"""
|
|
671
|
+
|
|
672
|
+
def set_auth_key(self: Any, auth_key: Any) -> Any: ...
|
|
673
|
+
|
|
674
|
+
def update_camera(self: Any, camera_id: str, camera_config: Any) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
675
|
+
"""
|
|
676
|
+
Update an existing camera.
|
|
677
|
+
|
|
678
|
+
Args:
|
|
679
|
+
camera_id: The ID of the camera to update
|
|
680
|
+
camera_config: CameraConfig object with updated configuration
|
|
681
|
+
|
|
682
|
+
Returns:
|
|
683
|
+
tuple: (result, error, message)
|
|
684
|
+
"""
|
|
685
|
+
|
|
686
|
+
def update_camera_group(self: Any, group_id: str, group: Any) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
687
|
+
"""
|
|
688
|
+
Update an existing camera group.
|
|
689
|
+
|
|
690
|
+
Args:
|
|
691
|
+
group_id: The ID of the camera group to update
|
|
692
|
+
group: CameraGroup object with updated configuration
|
|
693
|
+
|
|
694
|
+
Returns:
|
|
695
|
+
tuple: (result, error, message)
|
|
696
|
+
"""
|
|
697
|
+
|
|
698
|
+
def update_streaming_gateway(self: Any, gateway_id: str, gateway_config: Any) -> Tuple[Optional[Dict], Optional[str], str]: ...
|
|
699
|
+
"""
|
|
700
|
+
Update an existing streaming gateway.
|
|
701
|
+
|
|
702
|
+
Args:
|
|
703
|
+
gateway_id: The ID of the streaming gateway to update
|
|
704
|
+
gateway_config: StreamingGatewayConfig object with updated configuration
|
|
705
|
+
|
|
706
|
+
Returns:
|
|
707
|
+
tuple: (result, error, message)
|
|
708
|
+
"""
|
|
709
|
+
|
|
710
|
+
def wakeup_deployment_server(self: Any) -> Any: ...
|
|
711
|
+
"""
|
|
712
|
+
Wake up the deployment server associated with the current deployment.
|
|
713
|
+
The `deployment_id` must be set during initialization.
|
|
714
|
+
|
|
715
|
+
Returns
|
|
716
|
+
-------
|
|
717
|
+
tuple
|
|
718
|
+
A tuple containing three elements:
|
|
719
|
+
- dict: The API response with details of the wake-up operation.
|
|
720
|
+
- str or None: Error message if an error occurred, otherwise None.
|
|
721
|
+
- str: Status message indicating success or failure of the API call.
|
|
722
|
+
|
|
723
|
+
Raises
|
|
724
|
+
------
|
|
725
|
+
SystemExit
|
|
726
|
+
If `deployment_id` is not set.
|
|
727
|
+
|
|
728
|
+
Examples
|
|
729
|
+
--------
|
|
730
|
+
>>> wakeup, err, msg = deployment.wakeup_deployment_server()
|
|
731
|
+
>>> if err:
|
|
732
|
+
>>> pprint(err)
|
|
733
|
+
>>> else:
|
|
734
|
+
>>> pprint(wakeup)
|
|
735
|
+
"""
|
|
736
|
+
|