viam-sdk 0.41.1__py3-none-linux_armv6l.whl → 0.66.0__py3-none-linux_armv6l.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of viam-sdk might be problematic. Click here for more details.
- viam/app/app_client.py +225 -51
- viam/app/billing_client.py +47 -5
- viam/app/data_client.py +771 -234
- viam/app/ml_training_client.py +3 -5
- viam/app/provisioning_client.py +3 -5
- viam/app/viam_client.py +58 -11
- viam/components/arm/arm.py +1 -1
- viam/components/arm/service.py +1 -1
- viam/components/audio_in/__init__.py +24 -0
- viam/components/audio_in/audio_in.py +74 -0
- viam/components/audio_in/client.py +76 -0
- viam/components/audio_in/service.py +83 -0
- viam/components/audio_out/__init__.py +21 -0
- viam/components/audio_out/audio_out.py +72 -0
- viam/components/audio_out/client.py +67 -0
- viam/components/audio_out/service.py +63 -0
- viam/components/base/base.py +1 -1
- viam/components/board/board.py +8 -2
- viam/components/board/client.py +2 -1
- viam/components/board/service.py +1 -0
- viam/components/button/__init__.py +10 -0
- viam/components/button/button.py +41 -0
- viam/components/button/client.py +52 -0
- viam/components/button/service.py +46 -0
- viam/components/camera/camera.py +15 -30
- viam/components/camera/client.py +10 -21
- viam/components/camera/service.py +15 -28
- viam/components/component_base.py +2 -2
- viam/components/gantry/client.py +17 -2
- viam/components/gantry/gantry.py +32 -1
- viam/components/gantry/service.py +21 -5
- viam/components/gripper/__init__.py +2 -0
- viam/components/gripper/client.py +25 -2
- viam/components/gripper/gripper.py +76 -1
- viam/components/gripper/service.py +32 -3
- viam/components/input/input.py +1 -1
- viam/components/motor/motor.py +1 -1
- viam/components/power_sensor/power_sensor.py +1 -1
- viam/components/switch/__init__.py +10 -0
- viam/components/switch/client.py +83 -0
- viam/components/switch/service.py +72 -0
- viam/components/switch/switch.py +98 -0
- viam/gen/app/agent/v1/agent_pb2.py +1 -1
- viam/gen/app/cloudslam/v1/cloud_slam_pb2.py +1 -1
- viam/gen/app/data/v1/data_grpc.py +74 -2
- viam/gen/app/data/v1/data_pb2.py +198 -104
- viam/gen/app/data/v1/data_pb2.pyi +563 -31
- viam/gen/app/datapipelines/__init__.py +0 -0
- viam/gen/app/datapipelines/v1/__init__.py +0 -0
- viam/gen/app/datapipelines/v1/data_pipelines_grpc.py +84 -0
- viam/gen/app/datapipelines/v1/data_pipelines_pb2.py +57 -0
- viam/gen/app/datapipelines/v1/data_pipelines_pb2.pyi +387 -0
- viam/gen/app/dataset/v1/dataset_grpc.py +10 -2
- viam/gen/app/dataset/v1/dataset_pb2.py +8 -4
- viam/gen/app/dataset/v1/dataset_pb2.pyi +36 -1
- viam/gen/app/datasync/v1/data_sync_pb2.py +39 -35
- viam/gen/app/datasync/v1/data_sync_pb2.pyi +21 -8
- viam/gen/app/mlinference/v1/ml_inference_pb2.py +7 -7
- viam/gen/app/mlinference/v1/ml_inference_pb2.pyi +4 -2
- viam/gen/app/mltraining/v1/ml_training_grpc.py +10 -2
- viam/gen/app/mltraining/v1/ml_training_pb2.py +63 -43
- viam/gen/app/mltraining/v1/ml_training_pb2.pyi +112 -7
- viam/gen/app/packages/v1/packages_pb2.py +1 -1
- viam/gen/app/v1/app_grpc.py +74 -3
- viam/gen/app/v1/app_pb2.py +600 -545
- viam/gen/app/v1/app_pb2.pyi +1108 -258
- viam/gen/app/v1/billing_grpc.py +26 -2
- viam/gen/app/v1/billing_pb2.py +52 -36
- viam/gen/app/v1/billing_pb2.pyi +158 -4
- viam/gen/app/v1/end_user_pb2.py +1 -1
- viam/gen/app/v1/robot_pb2.py +95 -89
- viam/gen/app/v1/robot_pb2.pyi +121 -9
- viam/gen/common/v1/common_pb2.py +76 -58
- viam/gen/common/v1/common_pb2.pyi +186 -17
- viam/gen/component/arm/v1/arm_grpc.py +10 -2
- viam/gen/component/arm/v1/arm_pb2.py +5 -3
- viam/gen/component/audioin/__init__.py +0 -0
- viam/gen/component/audioin/v1/__init__.py +0 -0
- viam/gen/component/audioin/v1/audioin_grpc.py +54 -0
- viam/gen/component/audioin/v1/audioin_pb2.py +34 -0
- viam/gen/component/audioin/v1/audioin_pb2.pyi +94 -0
- viam/gen/component/audioinput/v1/audioinput_pb2.py +1 -1
- viam/gen/component/audioout/__init__.py +0 -0
- viam/gen/component/audioout/v1/__init__.py +0 -0
- viam/gen/component/audioout/v1/audioout_grpc.py +54 -0
- viam/gen/component/audioout/v1/audioout_pb2.py +32 -0
- viam/gen/component/audioout/v1/audioout_pb2.pyi +47 -0
- viam/gen/component/base/v1/base_pb2.py +1 -1
- viam/gen/component/board/v1/board_pb2.py +1 -1
- viam/gen/component/button/v1/button_pb2.py +1 -1
- viam/gen/component/camera/v1/camera_grpc.py +1 -0
- viam/gen/component/camera/v1/camera_pb2.py +37 -36
- viam/gen/component/camera/v1/camera_pb2.pyi +31 -4
- viam/gen/component/encoder/v1/encoder_pb2.py +1 -1
- viam/gen/component/gantry/v1/gantry_grpc.py +9 -1
- viam/gen/component/gantry/v1/gantry_pb2.py +5 -3
- viam/gen/component/generic/v1/generic_pb2.py +1 -1
- viam/gen/component/gripper/v1/gripper_grpc.py +18 -2
- viam/gen/component/gripper/v1/gripper_pb2.py +12 -4
- viam/gen/component/gripper/v1/gripper_pb2.pyi +43 -1
- viam/gen/component/inputcontroller/v1/input_controller_pb2.py +1 -1
- viam/gen/component/motor/v1/motor_pb2.py +1 -1
- viam/gen/component/movementsensor/v1/movementsensor_pb2.py +1 -1
- viam/gen/component/posetracker/v1/pose_tracker_pb2.py +1 -1
- viam/gen/component/powersensor/v1/powersensor_pb2.py +1 -1
- viam/gen/component/sensor/v1/sensor_pb2.py +1 -1
- viam/gen/component/servo/v1/servo_pb2.py +1 -1
- viam/gen/component/switch/v1/switch_pb2.py +5 -5
- viam/gen/component/switch/v1/switch_pb2.pyi +9 -2
- viam/gen/component/testecho/v1/testecho_pb2.py +1 -1
- viam/gen/module/v1/module_pb2.py +5 -5
- viam/gen/module/v1/module_pb2.pyi +7 -2
- viam/gen/opentelemetry/__init__.py +0 -0
- viam/gen/opentelemetry/proto/__init__.py +0 -0
- viam/gen/opentelemetry/proto/common/__init__.py +0 -0
- viam/gen/opentelemetry/proto/common/v1/__init__.py +0 -0
- viam/gen/opentelemetry/proto/common/v1/common_grpc.py +0 -0
- viam/gen/opentelemetry/proto/common/v1/common_pb2.py +27 -0
- viam/gen/opentelemetry/proto/common/v1/common_pb2.pyi +208 -0
- viam/gen/opentelemetry/proto/resource/__init__.py +0 -0
- viam/gen/opentelemetry/proto/resource/v1/__init__.py +0 -0
- viam/gen/opentelemetry/proto/resource/v1/resource_grpc.py +0 -0
- viam/gen/opentelemetry/proto/resource/v1/resource_pb2.py +18 -0
- viam/gen/opentelemetry/proto/resource/v1/resource_pb2.pyi +59 -0
- viam/gen/opentelemetry/proto/trace/__init__.py +0 -0
- viam/gen/opentelemetry/proto/trace/v1/__init__.py +0 -0
- viam/gen/opentelemetry/proto/trace/v1/trace_grpc.py +0 -0
- viam/gen/opentelemetry/proto/trace/v1/trace_pb2.py +37 -0
- viam/gen/opentelemetry/proto/trace/v1/trace_pb2.pyi +402 -0
- viam/gen/proto/rpc/examples/echo/v1/echo_pb2.py +1 -1
- viam/gen/proto/rpc/examples/echoresource/v1/echoresource_pb2.py +1 -1
- viam/gen/proto/rpc/v1/auth_pb2.py +1 -1
- viam/gen/proto/rpc/webrtc/v1/grpc_pb2.py +1 -1
- viam/gen/proto/rpc/webrtc/v1/signaling_pb2.py +1 -1
- viam/gen/provisioning/v1/provisioning_grpc.py +10 -2
- viam/gen/provisioning/v1/provisioning_pb2.py +32 -26
- viam/gen/provisioning/v1/provisioning_pb2.pyi +46 -5
- viam/gen/robot/v1/robot_grpc.py +51 -34
- viam/gen/robot/v1/robot_pb2.py +147 -142
- viam/gen/robot/v1/robot_pb2.pyi +153 -86
- viam/gen/service/datamanager/v1/data_manager_grpc.py +11 -2
- viam/gen/service/datamanager/v1/data_manager_pb2.py +15 -8
- viam/gen/service/datamanager/v1/data_manager_pb2.pyi +47 -1
- viam/gen/service/discovery/v1/discovery_pb2.py +1 -1
- viam/gen/service/generic/v1/generic_pb2.py +1 -1
- viam/gen/service/mlmodel/v1/mlmodel_pb2.py +1 -1
- viam/gen/service/motion/v1/motion_pb2.py +92 -62
- viam/gen/service/motion/v1/motion_pb2.pyi +130 -68
- viam/gen/service/navigation/v1/navigation_pb2.py +1 -1
- viam/gen/service/sensors/v1/sensors_pb2.py +1 -1
- viam/gen/service/shell/v1/shell_pb2.py +1 -1
- viam/gen/service/slam/v1/slam_pb2.py +1 -1
- viam/gen/service/slam/v1/slam_pb2.pyi +1 -1
- viam/gen/service/video/__init__.py +0 -0
- viam/gen/service/video/v1/__init__.py +0 -0
- viam/gen/service/video/v1/video_grpc.py +39 -0
- viam/gen/service/video/v1/video_pb2.py +29 -0
- viam/gen/service/video/v1/video_pb2.pyi +72 -0
- viam/gen/service/vision/v1/vision_pb2.py +27 -27
- viam/gen/service/vision/v1/vision_pb2.pyi +28 -3
- viam/gen/service/worldstatestore/__init__.py +0 -0
- viam/gen/service/worldstatestore/v1/__init__.py +0 -0
- viam/gen/service/worldstatestore/v1/world_state_store_grpc.py +55 -0
- viam/gen/service/worldstatestore/v1/world_state_store_pb2.py +39 -0
- viam/gen/service/worldstatestore/v1/world_state_store_pb2.pyi +171 -0
- viam/gen/stream/v1/stream_pb2.py +1 -1
- viam/gen/tagger/v1/tagger_pb2.py +1 -1
- viam/logging.py +9 -8
- viam/media/audio.py +22 -10
- viam/media/utils/pil/__init__.py +5 -1
- viam/media/video.py +54 -40
- viam/module/module.py +85 -16
- viam/module/resource_data_consumer.py +41 -0
- viam/module/service.py +9 -1
- viam/proto/app/__init__.py +68 -0
- viam/proto/app/billing.py +16 -0
- viam/proto/app/data/__init__.py +48 -0
- viam/proto/app/datapipelines/__init__.py +56 -0
- viam/proto/app/dataset/__init__.py +4 -0
- viam/proto/app/mltraining/__init__.py +6 -0
- viam/proto/app/robot.py +6 -0
- viam/proto/common/__init__.py +14 -0
- viam/proto/component/audioin/__init__.py +16 -0
- viam/proto/component/audioout/__init__.py +15 -0
- viam/proto/component/camera/__init__.py +0 -2
- viam/proto/component/gripper/__init__.py +4 -0
- viam/proto/opentelemetry/__init__.py +0 -0
- viam/proto/opentelemetry/proto/__init__.py +0 -0
- viam/proto/opentelemetry/proto/common/__init__.py +15 -0
- viam/proto/opentelemetry/proto/resource/__init__.py +10 -0
- viam/proto/opentelemetry/proto/trace/__init__.py +15 -0
- viam/proto/provisioning/__init__.py +6 -0
- viam/proto/robot/__init__.py +16 -8
- viam/proto/service/datamanager/__init__.py +8 -1
- viam/proto/service/motion/__init__.py +2 -0
- viam/proto/service/video/__init__.py +15 -0
- viam/proto/service/worldstatestore/__init__.py +32 -0
- viam/resource/easy_resource.py +5 -9
- viam/resource/manager.py +4 -3
- viam/resource/registry.py +2 -2
- viam/resource/types.py +2 -2
- viam/robot/client.py +38 -59
- viam/rpc/dial.py +48 -5
- viam/rpc/libviam_rust_utils.so +0 -0
- viam/rpc/server.py +24 -10
- viam/services/motion/client.py +8 -9
- viam/services/motion/motion.py +48 -46
- viam/services/navigation/navigation.py +2 -2
- viam/services/vision/client.py +1 -1
- viam/services/vision/service.py +5 -8
- viam/services/vision/vision.py +5 -3
- viam/services/worldstatestore/__init__.py +18 -0
- viam/services/worldstatestore/client.py +94 -0
- viam/services/worldstatestore/service.py +55 -0
- viam/services/worldstatestore/worldstatestore.py +90 -0
- viam/sessions_client.py +115 -46
- viam/version_metadata.py +2 -2
- {viam_sdk-0.41.1.dist-info → viam_sdk-0.66.0.dist-info}/METADATA +10 -6
- {viam_sdk-0.41.1.dist-info → viam_sdk-0.66.0.dist-info}/RECORD +221 -152
- {viam_sdk-0.41.1.dist-info → viam_sdk-0.66.0.dist-info}/WHEEL +1 -1
- viam/components/audio_input/__init__.py +0 -18
- viam/components/audio_input/audio_input.py +0 -81
- viam/components/audio_input/client.py +0 -70
- viam/components/audio_input/service.py +0 -114
- {viam_sdk-0.41.1.dist-info → viam_sdk-0.66.0.dist-info}/licenses/LICENSE +0 -0
viam/app/data_client.py
CHANGED
|
@@ -2,11 +2,12 @@ import warnings
|
|
|
2
2
|
from dataclasses import dataclass
|
|
3
3
|
from datetime import datetime
|
|
4
4
|
from pathlib import Path
|
|
5
|
-
from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union
|
|
5
|
+
from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union, cast
|
|
6
6
|
|
|
7
7
|
import bson
|
|
8
8
|
from google.protobuf.struct_pb2 import Struct
|
|
9
9
|
from grpclib.client import Channel, Stream
|
|
10
|
+
from typing_extensions import Self
|
|
10
11
|
|
|
11
12
|
from viam import logging
|
|
12
13
|
from viam.proto.app.data import (
|
|
@@ -26,12 +27,16 @@ from viam.proto.app.data import (
|
|
|
26
27
|
CaptureInterval,
|
|
27
28
|
CaptureMetadata,
|
|
28
29
|
ConfigureDatabaseUserRequest,
|
|
30
|
+
CreateBinaryDataSignedURLRequest,
|
|
31
|
+
CreateBinaryDataSignedURLResponse,
|
|
32
|
+
CreateIndexRequest,
|
|
29
33
|
DataRequest,
|
|
30
34
|
DataServiceStub,
|
|
31
35
|
DeleteBinaryDataByFilterRequest,
|
|
32
36
|
DeleteBinaryDataByFilterResponse,
|
|
33
37
|
DeleteBinaryDataByIDsRequest,
|
|
34
38
|
DeleteBinaryDataByIDsResponse,
|
|
39
|
+
DeleteIndexRequest,
|
|
35
40
|
DeleteTabularDataRequest,
|
|
36
41
|
DeleteTabularDataResponse,
|
|
37
42
|
ExportTabularDataRequest,
|
|
@@ -41,6 +46,10 @@ from viam.proto.app.data import (
|
|
|
41
46
|
GetDatabaseConnectionResponse,
|
|
42
47
|
GetLatestTabularDataRequest,
|
|
43
48
|
GetLatestTabularDataResponse,
|
|
49
|
+
Index,
|
|
50
|
+
IndexableCollection,
|
|
51
|
+
ListIndexesRequest,
|
|
52
|
+
ListIndexesResponse,
|
|
44
53
|
Order,
|
|
45
54
|
RemoveBinaryDataFromDatasetByIDsRequest,
|
|
46
55
|
RemoveBoundingBoxFromImageByIDRequest,
|
|
@@ -54,9 +63,31 @@ from viam.proto.app.data import (
|
|
|
54
63
|
TabularDataByMQLResponse,
|
|
55
64
|
TabularDataBySQLRequest,
|
|
56
65
|
TabularDataBySQLResponse,
|
|
66
|
+
TabularDataSource,
|
|
67
|
+
TabularDataSourceType,
|
|
57
68
|
TagsByFilterRequest,
|
|
58
69
|
TagsByFilterResponse,
|
|
59
70
|
)
|
|
71
|
+
from viam.proto.app.datapipelines import (
|
|
72
|
+
CreateDataPipelineRequest,
|
|
73
|
+
CreateDataPipelineResponse,
|
|
74
|
+
DataPipelineRunStatus,
|
|
75
|
+
DataPipelinesServiceStub,
|
|
76
|
+
DeleteDataPipelineRequest,
|
|
77
|
+
GetDataPipelineRequest,
|
|
78
|
+
GetDataPipelineResponse,
|
|
79
|
+
ListDataPipelineRunsRequest,
|
|
80
|
+
ListDataPipelineRunsResponse,
|
|
81
|
+
ListDataPipelinesRequest,
|
|
82
|
+
ListDataPipelinesResponse,
|
|
83
|
+
RenameDataPipelineRequest,
|
|
84
|
+
)
|
|
85
|
+
from viam.proto.app.datapipelines import (
|
|
86
|
+
DataPipeline as ProtoDataPipeline,
|
|
87
|
+
)
|
|
88
|
+
from viam.proto.app.datapipelines import (
|
|
89
|
+
DataPipelineRun as ProtoDataPipelineRun,
|
|
90
|
+
)
|
|
60
91
|
from viam.proto.app.dataset import (
|
|
61
92
|
CreateDatasetRequest,
|
|
62
93
|
CreateDatasetResponse,
|
|
@@ -67,6 +98,8 @@ from viam.proto.app.dataset import (
|
|
|
67
98
|
ListDatasetsByIDsResponse,
|
|
68
99
|
ListDatasetsByOrganizationIDRequest,
|
|
69
100
|
ListDatasetsByOrganizationIDResponse,
|
|
101
|
+
MergeDatasetsRequest,
|
|
102
|
+
MergeDatasetsResponse,
|
|
70
103
|
RenameDatasetRequest,
|
|
71
104
|
)
|
|
72
105
|
from viam.proto.app.datasync import (
|
|
@@ -84,7 +117,7 @@ from viam.proto.app.datasync import (
|
|
|
84
117
|
StreamingDataCaptureUploadResponse,
|
|
85
118
|
UploadMetadata,
|
|
86
119
|
)
|
|
87
|
-
from viam.utils import ValueTypes, _alias_param, create_filter, datetime_to_timestamp, struct_to_dict
|
|
120
|
+
from viam.utils import ValueTypes, _alias_param, create_filter, datetime_to_timestamp, dict_to_struct, struct_to_dict
|
|
88
121
|
|
|
89
122
|
LOGGER = logging.getLogger(__name__)
|
|
90
123
|
|
|
@@ -92,10 +125,10 @@ LOGGER = logging.getLogger(__name__)
|
|
|
92
125
|
class DataClient:
|
|
93
126
|
"""gRPC client for uploading and retrieving data from app.
|
|
94
127
|
|
|
95
|
-
|
|
96
|
-
|
|
128
|
+
This class's constructor instantiates relevant service stubs. Always make :class:`DataClient` method calls through an instance of
|
|
129
|
+
:class:`ViamClient`.
|
|
97
130
|
|
|
98
|
-
Establish a
|
|
131
|
+
Establish a connection::
|
|
99
132
|
|
|
100
133
|
import asyncio
|
|
101
134
|
|
|
@@ -111,11 +144,9 @@ class DataClient:
|
|
|
111
144
|
|
|
112
145
|
async def main():
|
|
113
146
|
# Make a ViamClient
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
viam_client.close()
|
|
147
|
+
async with await connect() as viam_client:
|
|
148
|
+
# Instantiate a DataClient to run data client API methods on
|
|
149
|
+
data_client = viam_client.data_client
|
|
119
150
|
|
|
120
151
|
if __name__ == '__main__':
|
|
121
152
|
asyncio.run(main())
|
|
@@ -159,10 +190,10 @@ class DataClient:
|
|
|
159
190
|
"""The resource name"""
|
|
160
191
|
|
|
161
192
|
resource_api: str
|
|
162
|
-
"""The resource API.
|
|
193
|
+
"""The resource API. For example, rdk:component:sensor"""
|
|
163
194
|
|
|
164
195
|
method_name: str
|
|
165
|
-
"""The method used for data capture.
|
|
196
|
+
"""The method used for data capture. For example, Readings"""
|
|
166
197
|
|
|
167
198
|
time_captured: datetime
|
|
168
199
|
"""The time at which the data point was captured"""
|
|
@@ -220,8 +251,131 @@ class DataClient:
|
|
|
220
251
|
)
|
|
221
252
|
return self.resource_api
|
|
222
253
|
|
|
254
|
+
@dataclass
|
|
255
|
+
class DataPipeline:
|
|
256
|
+
"""Represents a data pipeline and its associated metadata."""
|
|
257
|
+
|
|
258
|
+
id: str
|
|
259
|
+
"""The ID of the data pipeline"""
|
|
260
|
+
|
|
261
|
+
organization_id: str
|
|
262
|
+
"""The organization ID"""
|
|
263
|
+
|
|
264
|
+
name: str
|
|
265
|
+
"""The name of the data pipeline"""
|
|
266
|
+
|
|
267
|
+
mql_binary: List[Dict[str, Any]]
|
|
268
|
+
"""The MQL binary of the data pipeline"""
|
|
269
|
+
|
|
270
|
+
schedule: str
|
|
271
|
+
"""The schedule of the data pipeline"""
|
|
272
|
+
|
|
273
|
+
created_on: datetime
|
|
274
|
+
"""The time the data pipeline was created"""
|
|
275
|
+
|
|
276
|
+
updated_at: datetime
|
|
277
|
+
"""The time the data pipeline was last updated"""
|
|
278
|
+
|
|
279
|
+
enabled: bool
|
|
280
|
+
"""Whether the data pipeline is enabled"""
|
|
281
|
+
|
|
282
|
+
data_source_type: TabularDataSourceType.ValueType
|
|
283
|
+
"""The type of data source for the data pipeline"""
|
|
284
|
+
|
|
285
|
+
@classmethod
|
|
286
|
+
def from_proto(cls, data_pipeline: ProtoDataPipeline) -> Self:
|
|
287
|
+
return cls(
|
|
288
|
+
id=data_pipeline.id,
|
|
289
|
+
organization_id=data_pipeline.organization_id,
|
|
290
|
+
name=data_pipeline.name,
|
|
291
|
+
mql_binary=[bson.decode(bson_bytes) for bson_bytes in data_pipeline.mql_binary],
|
|
292
|
+
schedule=data_pipeline.schedule,
|
|
293
|
+
created_on=data_pipeline.created_on.ToDatetime(),
|
|
294
|
+
updated_at=data_pipeline.updated_at.ToDatetime(),
|
|
295
|
+
enabled=data_pipeline.enabled,
|
|
296
|
+
data_source_type=data_pipeline.data_source_type,
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
@dataclass
|
|
300
|
+
class DataPipelineRun:
|
|
301
|
+
"""Represents a data pipeline run and its associated metadata."""
|
|
302
|
+
|
|
303
|
+
id: str
|
|
304
|
+
"""The ID of the data pipeline run"""
|
|
305
|
+
|
|
306
|
+
status: DataPipelineRunStatus.ValueType
|
|
307
|
+
"""The status of the data pipeline run"""
|
|
308
|
+
|
|
309
|
+
start_time: datetime
|
|
310
|
+
"""The time the data pipeline run started"""
|
|
311
|
+
|
|
312
|
+
end_time: datetime
|
|
313
|
+
"""The time the data pipeline run ended"""
|
|
314
|
+
|
|
315
|
+
data_start_time: datetime
|
|
316
|
+
"""The start time of the data that was processed in the run."""
|
|
317
|
+
data_end_time: datetime
|
|
318
|
+
"""The end time of the data that was processed in the run."""
|
|
319
|
+
|
|
320
|
+
error_message: str
|
|
321
|
+
"""The error message of the data pipeline run. Only set if the run failed."""
|
|
322
|
+
|
|
323
|
+
@classmethod
|
|
324
|
+
def from_proto(cls, data_pipeline_run: ProtoDataPipelineRun) -> Self:
|
|
325
|
+
return cls(
|
|
326
|
+
id=data_pipeline_run.id,
|
|
327
|
+
status=data_pipeline_run.status,
|
|
328
|
+
start_time=data_pipeline_run.start_time.ToDatetime(),
|
|
329
|
+
end_time=data_pipeline_run.end_time.ToDatetime(),
|
|
330
|
+
data_start_time=data_pipeline_run.data_start_time.ToDatetime(),
|
|
331
|
+
data_end_time=data_pipeline_run.data_end_time.ToDatetime(),
|
|
332
|
+
error_message=data_pipeline_run.error_message,
|
|
333
|
+
)
|
|
334
|
+
|
|
335
|
+
@dataclass
|
|
336
|
+
class DataPipelineRunsPage:
|
|
337
|
+
"""Represents a page of data pipeline runs and provides pagination functionality."""
|
|
338
|
+
|
|
339
|
+
_client: "DataClient"
|
|
340
|
+
"""The data client used to make API calls"""
|
|
341
|
+
|
|
342
|
+
pipeline_id: str
|
|
343
|
+
"""The ID of the pipeline these runs belong to"""
|
|
344
|
+
|
|
345
|
+
page_size: int
|
|
346
|
+
"""The number of runs per page"""
|
|
347
|
+
|
|
348
|
+
runs: List["DataClient.DataPipelineRun"]
|
|
349
|
+
"""The list of runs in this page"""
|
|
350
|
+
|
|
351
|
+
next_page_token: str
|
|
352
|
+
"""The token to use to get the next page of results"""
|
|
353
|
+
|
|
354
|
+
async def next_page(self) -> "DataClient.DataPipelineRunsPage":
|
|
355
|
+
"""Get the next page of data pipeline runs.
|
|
356
|
+
|
|
357
|
+
Returns:
|
|
358
|
+
DataPipelineRunsPage: The next page of runs, or an empty page if there are no more runs
|
|
359
|
+
"""
|
|
360
|
+
if not self.next_page_token:
|
|
361
|
+
# no token, return empty next page
|
|
362
|
+
return DataClient.DataPipelineRunsPage(
|
|
363
|
+
_client=self._client, pipeline_id=self.pipeline_id, page_size=self.page_size, runs=[], next_page_token=""
|
|
364
|
+
)
|
|
365
|
+
return await self._client._list_data_pipeline_runs(self.pipeline_id, self.page_size, self.next_page_token)
|
|
366
|
+
|
|
367
|
+
@classmethod
|
|
368
|
+
def from_proto(cls, data_pipeline_runs_page: ListDataPipelineRunsResponse, client: "DataClient", page_size: int) -> Self:
|
|
369
|
+
return cls(
|
|
370
|
+
_client=client,
|
|
371
|
+
pipeline_id=data_pipeline_runs_page.pipeline_id,
|
|
372
|
+
page_size=page_size,
|
|
373
|
+
runs=[DataClient.DataPipelineRun.from_proto(run) for run in data_pipeline_runs_page.runs],
|
|
374
|
+
next_page_token=data_pipeline_runs_page.next_page_token,
|
|
375
|
+
)
|
|
376
|
+
|
|
223
377
|
def __init__(self, channel: Channel, metadata: Mapping[str, str]):
|
|
224
|
-
"""Create a
|
|
378
|
+
"""Create a :class:`DataClient` that maintains a connection to app.
|
|
225
379
|
|
|
226
380
|
Args:
|
|
227
381
|
channel (grpclib.client.Channel): Connection to app.
|
|
@@ -231,11 +385,13 @@ class DataClient:
|
|
|
231
385
|
self._data_client = DataServiceStub(channel)
|
|
232
386
|
self._data_sync_client = DataSyncServiceStub(channel)
|
|
233
387
|
self._dataset_client = DatasetServiceStub(channel)
|
|
388
|
+
self._data_pipelines_client = DataPipelinesServiceStub(channel)
|
|
234
389
|
self._channel = channel
|
|
235
390
|
|
|
236
391
|
_data_client: DataServiceStub
|
|
237
392
|
_data_sync_client: DataSyncServiceStub
|
|
238
393
|
_dataset_client: DatasetServiceStub
|
|
394
|
+
_data_pipelines_client: DataPipelinesServiceStub
|
|
239
395
|
_metadata: Mapping[str, str]
|
|
240
396
|
_channel: Channel
|
|
241
397
|
|
|
@@ -249,9 +405,8 @@ class DataClient:
|
|
|
249
405
|
include_internal_data: bool = False,
|
|
250
406
|
dest: Optional[str] = None,
|
|
251
407
|
) -> Tuple[List[TabularData], int, str]:
|
|
252
|
-
"""Filter and download tabular data. The data will be paginated into pages of
|
|
253
|
-
|
|
254
|
-
If the file is not empty, it will be overwritten.
|
|
408
|
+
"""Filter and download tabular data. The data will be paginated into pages of ``limit`` items; the returned tuple will include
|
|
409
|
+
the pagination ID. If a destination is provided, this method saves returned data to that file, overwriting any existing file content.
|
|
255
410
|
|
|
256
411
|
::
|
|
257
412
|
|
|
@@ -269,23 +424,23 @@ class DataClient:
|
|
|
269
424
|
print(f"My data: {my_data}")
|
|
270
425
|
|
|
271
426
|
Args:
|
|
272
|
-
filter (viam.proto.app.data.Filter): Optional
|
|
273
|
-
data.
|
|
427
|
+
filter (~viam.proto.app.data.Filter): Optional, specifies tabular data to retrieve. If missing, matches all tabular data.
|
|
274
428
|
limit (int): The maximum number of entries to include in a page. Defaults to 50 if unspecified.
|
|
275
|
-
sort_order (viam.proto.app.data.Order): The desired sort order of the data.
|
|
429
|
+
sort_order (~viam.proto.app.data.Order): The desired sort order of the data.
|
|
276
430
|
last (str): Optional string indicating the object identifier of the last-returned data.
|
|
277
|
-
|
|
278
|
-
|
|
431
|
+
Returned by calls to :class:`TabularDataByFilter` as the ``last`` value.
|
|
432
|
+
If provided, the server returns the next data entries after the last object identifier.
|
|
279
433
|
count_only (bool): Whether to return only the total count of entries.
|
|
280
434
|
include_internal_data (bool): Whether to return the internal data. Internal data is used for Viam-specific data ingestion,
|
|
281
|
-
|
|
435
|
+
like cloud SLAM. Defaults to ``False``.
|
|
282
436
|
dest (str): Optional filepath for writing retrieved data.
|
|
283
437
|
|
|
284
438
|
Returns:
|
|
285
439
|
Tuple[List[TabularData], int, str]: A tuple containing the following:
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
440
|
+
|
|
441
|
+
- ``tabular_data`` (*List[TabularData]*): The tabular data.
|
|
442
|
+
- ``count`` (*int*): The count (number of entries).
|
|
443
|
+
- ``last`` (*str*): The last-returned page ID.
|
|
289
444
|
|
|
290
445
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#tabulardatabyfilter>`_.
|
|
291
446
|
"""
|
|
@@ -332,7 +487,7 @@ class DataClient:
|
|
|
332
487
|
|
|
333
488
|
Args:
|
|
334
489
|
organization_id (str): The ID of the organization that owns the data.
|
|
335
|
-
|
|
490
|
+
To find your organization ID, visit the organization settings page.
|
|
336
491
|
sql_query (str): The SQL query to run.
|
|
337
492
|
|
|
338
493
|
Returns:
|
|
@@ -346,7 +501,13 @@ class DataClient:
|
|
|
346
501
|
|
|
347
502
|
@_alias_param("query", param_alias="mql_binary")
|
|
348
503
|
async def tabular_data_by_mql(
|
|
349
|
-
self,
|
|
504
|
+
self,
|
|
505
|
+
organization_id: str,
|
|
506
|
+
query: Union[List[bytes], List[Dict[str, Any]]],
|
|
507
|
+
use_recent_data: Optional[bool] = None,
|
|
508
|
+
tabular_data_source_type: TabularDataSourceType.ValueType = TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD,
|
|
509
|
+
pipeline_id: Optional[str] = None,
|
|
510
|
+
query_prefix_name: Optional[str] = None,
|
|
350
511
|
) -> List[Dict[str, Union[ValueTypes, datetime]]]:
|
|
351
512
|
"""Obtain unified tabular data and metadata, queried with MQL.
|
|
352
513
|
|
|
@@ -363,11 +524,17 @@ class DataClient:
|
|
|
363
524
|
|
|
364
525
|
Args:
|
|
365
526
|
organization_id (str): The ID of the organization that owns the data.
|
|
366
|
-
|
|
527
|
+
To find your organization ID, visit the organization settings page.
|
|
367
528
|
query (Union[List[bytes], List[Dict[str, Any]]]): The MQL query to run, as a list of MongoDB aggregation pipeline stages.
|
|
368
|
-
|
|
369
|
-
so
|
|
370
|
-
use_recent_data (bool): Whether to query blob storage or your recent data store. Defaults to
|
|
529
|
+
Each stage can be provided as either a dictionary or raw BSON bytes, but support for bytes will be removed in the
|
|
530
|
+
future, so prefer the dictionary option.
|
|
531
|
+
use_recent_data (bool): Whether to query blob storage or your recent data store. Defaults to ``False``..
|
|
532
|
+
Deprecated, use `tabular_data_source_type` instead.
|
|
533
|
+
tabular_data_source_type (viam.proto.app.data.TabularDataSourceType): The data source to query.
|
|
534
|
+
Defaults to `TABULAR_DATA_SOURCE_TYPE_STANDARD`.
|
|
535
|
+
pipeline_id (str): The ID of the data pipeline to query. Defaults to `None`.
|
|
536
|
+
Required if `tabular_data_source_type` is `TABULAR_DATA_SOURCE_TYPE_PIPELINE_SINK`.
|
|
537
|
+
query_prefix_name (str): Optional field that can be used to specify a saved query to run.
|
|
371
538
|
|
|
372
539
|
Returns:
|
|
373
540
|
List[Dict[str, Union[ValueTypes, datetime]]]: An array of decoded BSON data objects.
|
|
@@ -375,13 +542,26 @@ class DataClient:
|
|
|
375
542
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#tabulardatabymql>`_.
|
|
376
543
|
"""
|
|
377
544
|
binary: List[bytes] = [bson.encode(query) for query in query] if isinstance(query[0], dict) else query # type: ignore
|
|
378
|
-
|
|
545
|
+
data_source = TabularDataSource(type=tabular_data_source_type, pipeline_id=pipeline_id)
|
|
546
|
+
if use_recent_data:
|
|
547
|
+
data_source.type = TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_HOT_STORAGE
|
|
548
|
+
request = TabularDataByMQLRequest(
|
|
549
|
+
organization_id=organization_id,
|
|
550
|
+
mql_binary=binary,
|
|
551
|
+
data_source=data_source,
|
|
552
|
+
query_prefix_name=query_prefix_name,
|
|
553
|
+
)
|
|
379
554
|
response: TabularDataByMQLResponse = await self._data_client.TabularDataByMQL(request, metadata=self._metadata)
|
|
380
555
|
return [bson.decode(bson_bytes) for bson_bytes in response.raw_data]
|
|
381
556
|
|
|
382
557
|
@_alias_param("resource_api", param_alias="resource_subtype")
|
|
383
558
|
async def get_latest_tabular_data(
|
|
384
|
-
self,
|
|
559
|
+
self,
|
|
560
|
+
part_id: str,
|
|
561
|
+
resource_name: str,
|
|
562
|
+
resource_api: str,
|
|
563
|
+
method_name: str,
|
|
564
|
+
additional_params: Optional[Mapping[str, ValueTypes]] = None,
|
|
385
565
|
) -> Optional[Tuple[datetime, datetime, Dict[str, ValueTypes]]]:
|
|
386
566
|
"""Gets the most recent tabular data captured from the specified data source, as long as it was synced within the last year.
|
|
387
567
|
|
|
@@ -391,7 +571,8 @@ class DataClient:
|
|
|
391
571
|
part_id="77ae3145-7b91-123a-a234-e567cdca8910",
|
|
392
572
|
resource_name="camera-1",
|
|
393
573
|
resource_api="rdk:component:camera",
|
|
394
|
-
method_name="
|
|
574
|
+
method_name="GetImages",
|
|
575
|
+
additional_params={"docommand_input": {"test": "test"}}
|
|
395
576
|
)
|
|
396
577
|
|
|
397
578
|
if tabular_data:
|
|
@@ -404,22 +585,30 @@ class DataClient:
|
|
|
404
585
|
|
|
405
586
|
Args:
|
|
406
587
|
part_id (str): The ID of the part that owns the data.
|
|
407
|
-
resource_name (str): The name of the requested resource that captured the data.
|
|
408
|
-
resource_api (str): The API of the requested resource that captured the data.
|
|
409
|
-
method_name (str): The data capture method name.
|
|
588
|
+
resource_name (str): The name of the requested resource that captured the data. For example, "my-sensor".
|
|
589
|
+
resource_api (str): The API of the requested resource that captured the data. For example, "rdk:component:sensor".
|
|
590
|
+
method_name (str): The data capture method name. For exampe, "Readings".
|
|
591
|
+
additional_params (dict): Optional additional parameters of the resource that captured the data.
|
|
410
592
|
|
|
411
593
|
Returns:
|
|
412
|
-
Optional[Tuple[datetime, datetime, Dict[str, ValueTypes]]]:
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
594
|
+
Optional[Tuple[datetime, datetime, Dict[str, ValueTypes]]]:
|
|
595
|
+
A return value of ``None`` means that this data source
|
|
596
|
+
has not synced data in the last year. Otherwise, the data source has synced some data in the last year, so the returned
|
|
597
|
+
tuple contains the following:
|
|
598
|
+
|
|
599
|
+
- ``time_captured`` (*datetime*): The time captured.
|
|
600
|
+
- ``time_synced`` (*datetime*): The time synced.
|
|
601
|
+
- ``payload`` (*Dict[str, ValueTypes]*): The latest tabular data captured from the specified data source.
|
|
417
602
|
|
|
418
603
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#getlatesttabulardata>`_.
|
|
419
604
|
"""
|
|
420
605
|
|
|
421
606
|
request = GetLatestTabularDataRequest(
|
|
422
|
-
part_id=part_id,
|
|
607
|
+
part_id=part_id,
|
|
608
|
+
resource_name=resource_name,
|
|
609
|
+
resource_subtype=resource_api,
|
|
610
|
+
method_name=method_name,
|
|
611
|
+
additional_parameters=dict_to_struct(additional_params) if additional_params is not None else None,
|
|
423
612
|
)
|
|
424
613
|
response: GetLatestTabularDataResponse = await self._data_client.GetLatestTabularData(request, metadata=self._metadata)
|
|
425
614
|
if not response.payload:
|
|
@@ -435,6 +624,7 @@ class DataClient:
|
|
|
435
624
|
method_name: str,
|
|
436
625
|
start_time: Optional[datetime] = None,
|
|
437
626
|
end_time: Optional[datetime] = None,
|
|
627
|
+
additional_params: Optional[Mapping[str, ValueTypes]] = None,
|
|
438
628
|
) -> List[TabularDataPoint]:
|
|
439
629
|
"""Obtain unified tabular data and metadata from the specified data source.
|
|
440
630
|
|
|
@@ -447,6 +637,7 @@ class DataClient:
|
|
|
447
637
|
method_name="<METHOD-NAME>",
|
|
448
638
|
start_time="<START_TIME>"
|
|
449
639
|
end_time="<END_TIME>"
|
|
640
|
+
additional_params="<ADDITIONAL_PARAMETERS>"
|
|
450
641
|
)
|
|
451
642
|
|
|
452
643
|
print(f"My data: {tabular_data}")
|
|
@@ -458,6 +649,7 @@ class DataClient:
|
|
|
458
649
|
method_name (str): The data capture method name.
|
|
459
650
|
start_time (datetime): Optional start time for requesting a specific range of data.
|
|
460
651
|
end_time (datetime): Optional end time for requesting a specific range of data.
|
|
652
|
+
additional_params (dict): Optional additional parameters of the resource that captured the data.
|
|
461
653
|
|
|
462
654
|
Returns:
|
|
463
655
|
List[TabularDataPoint]: The unified tabular data and metadata.
|
|
@@ -467,7 +659,12 @@ class DataClient:
|
|
|
467
659
|
|
|
468
660
|
interval = CaptureInterval(start=datetime_to_timestamp(start_time), end=datetime_to_timestamp(end_time))
|
|
469
661
|
request = ExportTabularDataRequest(
|
|
470
|
-
part_id=part_id,
|
|
662
|
+
part_id=part_id,
|
|
663
|
+
resource_name=resource_name,
|
|
664
|
+
resource_subtype=resource_api,
|
|
665
|
+
method_name=method_name,
|
|
666
|
+
interval=interval,
|
|
667
|
+
additional_parameters=dict_to_struct(additional_params) if additional_params is not None else None,
|
|
471
668
|
)
|
|
472
669
|
response: List[ExportTabularDataResponse] = await self._data_client.ExportTabularData(request, metadata=self._metadata)
|
|
473
670
|
|
|
@@ -501,9 +698,9 @@ class DataClient:
|
|
|
501
698
|
include_internal_data: bool = False,
|
|
502
699
|
dest: Optional[str] = None,
|
|
503
700
|
) -> Tuple[List[BinaryData], int, str]:
|
|
504
|
-
"""Filter and download binary data. The data will be paginated into pages of
|
|
505
|
-
in the returned tuple
|
|
506
|
-
|
|
701
|
+
"""Filter and download binary data. The data will be paginated into pages of ``limit`` items, and the pagination ID will be included
|
|
702
|
+
in the returned tuple as ``last``. If a destination is provided, this method saves returned data to that file,
|
|
703
|
+
overwriting any existing file content.
|
|
507
704
|
|
|
508
705
|
::
|
|
509
706
|
|
|
@@ -542,25 +739,25 @@ class DataClient:
|
|
|
542
739
|
my_untagged_data.extend(data)
|
|
543
740
|
|
|
544
741
|
Args:
|
|
545
|
-
filter (viam.proto.app.data.Filter): Optional
|
|
546
|
-
data.
|
|
742
|
+
filter (~viam.proto.app.data.Filter): Optional, specifies tabular data to retrieve. An empty filter matches all binary data.
|
|
547
743
|
limit (int): The maximum number of entries to include in a page. Defaults to 50 if unspecified.
|
|
548
|
-
sort_order (viam.proto.app.data.Order): The desired sort order of the data.
|
|
744
|
+
sort_order (~viam.proto.app.data.Order): The desired sort order of the data.
|
|
549
745
|
last (str): Optional string indicating the object identifier of the last-returned data.
|
|
550
|
-
|
|
551
|
-
|
|
746
|
+
This object identifier is returned by calls to :meth:`binary_data_by_filter` as the ``last`` value.
|
|
747
|
+
If provided, the server will return the next data entries after the last object identifier.
|
|
552
748
|
include_binary_data (bool): Boolean specifying whether to actually include the binary file data with each retrieved file.
|
|
553
|
-
|
|
749
|
+
Defaults to true (that is, both the files' data and metadata are returned).
|
|
554
750
|
count_only (bool): Whether to return only the total count of entries.
|
|
555
751
|
include_internal_data (bool): Whether to return the internal data. Internal data is used for Viam-specific data ingestion,
|
|
556
|
-
|
|
752
|
+
like cloud SLAM. Defaults to ``False``.
|
|
557
753
|
dest (str): Optional filepath for writing retrieved data.
|
|
558
754
|
|
|
559
755
|
Returns:
|
|
560
|
-
Tuple[List[viam.proto.app.data.BinaryData], int, str]: A tuple containing the following:
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
756
|
+
Tuple[List[~viam.proto.app.data.BinaryData], int, str]: A tuple containing the following:
|
|
757
|
+
|
|
758
|
+
- ``data`` (*List[* :class:`~viam.proto.app.data.BinaryData` *]*): The binary data.
|
|
759
|
+
- ``count`` (*int*): The count (number of entries).
|
|
760
|
+
- ``last`` (*str*): The last-returned page ID.
|
|
564
761
|
|
|
565
762
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#binarydatabyfilter>`_.
|
|
566
763
|
"""
|
|
@@ -592,15 +789,14 @@ class DataClient:
|
|
|
592
789
|
|
|
593
790
|
async def binary_data_by_ids(
|
|
594
791
|
self,
|
|
595
|
-
binary_ids: List[BinaryID],
|
|
792
|
+
binary_ids: Union[List[BinaryID], List[str]],
|
|
793
|
+
include_binary_data: bool = True,
|
|
596
794
|
dest: Optional[str] = None,
|
|
597
795
|
) -> List[BinaryData]:
|
|
598
796
|
"""Filter and download binary data.
|
|
599
797
|
|
|
600
798
|
::
|
|
601
799
|
|
|
602
|
-
from viam.proto.app.data import BinaryID
|
|
603
|
-
|
|
604
800
|
binary_metadata, count, last = await data_client.binary_data_by_filter(
|
|
605
801
|
include_binary_data=False
|
|
606
802
|
)
|
|
@@ -608,29 +804,34 @@ class DataClient:
|
|
|
608
804
|
my_ids = []
|
|
609
805
|
|
|
610
806
|
for obj in binary_metadata:
|
|
611
|
-
my_ids.append(
|
|
612
|
-
BinaryID(
|
|
613
|
-
file_id=obj.metadata.id,
|
|
614
|
-
organization_id=obj.metadata.capture_metadata.organization_id,
|
|
615
|
-
location_id=obj.metadata.capture_metadata.location_id
|
|
616
|
-
)
|
|
617
|
-
)
|
|
807
|
+
my_ids.append(obj.metadata.binary_data_id)
|
|
618
808
|
|
|
619
809
|
binary_data = await data_client.binary_data_by_ids(my_ids)
|
|
620
810
|
|
|
621
811
|
Args:
|
|
622
|
-
binary_ids (List[viam.proto.app.data.BinaryID]):
|
|
812
|
+
binary_ids (Union[List[~viam.proto.app.data.BinaryID], List[str]]): Binary data ID strings specifying the desired data or
|
|
813
|
+
:class:`BinaryID` objects. Must be non-empty.
|
|
814
|
+
*DEPRECATED:* :class:`BinaryID` *is deprecated and will be removed in a future release. Instead, pass binary data IDs as a
|
|
815
|
+
list of strings.*
|
|
816
|
+
include_binary_data (bool): Boolean specifying whether to actually include the binary file data with each retrieved file.
|
|
817
|
+
Defaults to true (that is, both the files' data and metadata are returned).
|
|
623
818
|
dest (str): Optional filepath for writing retrieved data.
|
|
624
819
|
|
|
625
820
|
Raises:
|
|
626
|
-
GRPCError: If no
|
|
821
|
+
GRPCError: If no binary data ID strings or :class:`BinaryID` objects are provided.
|
|
627
822
|
|
|
628
823
|
Returns:
|
|
629
|
-
List[viam.proto.app.data.BinaryData]: The binary data.
|
|
824
|
+
List[~viam.proto.app.data.BinaryData]: The binary data.
|
|
630
825
|
|
|
631
826
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#binarydatabyids>`_.
|
|
632
827
|
"""
|
|
633
|
-
request = BinaryDataByIDsRequest(
|
|
828
|
+
request = BinaryDataByIDsRequest()
|
|
829
|
+
if len(binary_ids) > 0 and isinstance(binary_ids[0], str):
|
|
830
|
+
binary_data_ids = cast(List[str], binary_ids)
|
|
831
|
+
request = BinaryDataByIDsRequest(binary_data_ids=binary_data_ids, include_binary=include_binary_data)
|
|
832
|
+
else:
|
|
833
|
+
bin_ids = cast(List[BinaryID], binary_ids)
|
|
834
|
+
request = BinaryDataByIDsRequest(binary_ids=bin_ids, include_binary=include_binary_data)
|
|
634
835
|
response: BinaryDataByIDsResponse = await self._data_client.BinaryDataByIDs(request, metadata=self._metadata)
|
|
635
836
|
if dest:
|
|
636
837
|
try:
|
|
@@ -652,10 +853,10 @@ class DataClient:
|
|
|
652
853
|
)
|
|
653
854
|
|
|
654
855
|
Args:
|
|
655
|
-
organization_id (str): ID of organization to delete data from.
|
|
656
|
-
|
|
657
|
-
delete_older_than_days (int): Delete data that was captured up to this many days ago. For example
|
|
658
|
-
|
|
856
|
+
organization_id (str): The ID of the organization to delete the data from.
|
|
857
|
+
To find your organization ID, visit the organization settings page.
|
|
858
|
+
delete_older_than_days (int): Delete data that was captured up to *this many* days ago. For example, a value of
|
|
859
|
+
10 deletes any data that was captured up to 10 days ago. A value of 0 deletes *all* existing data.
|
|
659
860
|
|
|
660
861
|
Returns:
|
|
661
862
|
int: The number of items deleted.
|
|
@@ -667,7 +868,7 @@ class DataClient:
|
|
|
667
868
|
return response.deleted_count
|
|
668
869
|
|
|
669
870
|
async def delete_tabular_data_by_filter(self, filter: Optional[Filter]) -> int:
|
|
670
|
-
"""Deprecated: use delete_tabular_data instead."""
|
|
871
|
+
"""Deprecated: use :meth:`delete_tabular_data` instead."""
|
|
671
872
|
raise NotImplementedError()
|
|
672
873
|
|
|
673
874
|
async def delete_binary_data_by_filter(self, filter: Optional[Filter]) -> int:
|
|
@@ -682,9 +883,10 @@ class DataClient:
|
|
|
682
883
|
res = await data_client.delete_binary_data_by_filter(my_filter)
|
|
683
884
|
|
|
684
885
|
Args:
|
|
685
|
-
filter (viam.proto.app.data.Filter): Optional
|
|
686
|
-
|
|
687
|
-
|
|
886
|
+
filter (~viam.proto.app.data.Filter): Optional, specifies binary data to delete.
|
|
887
|
+
**CAUTION: Passing an empty** ``Filter`` **deletes all binary data!**
|
|
888
|
+
You must specify an organization ID with ``organization_ids`` when using this option.
|
|
889
|
+
To find your organization ID, visit the organization settings page.
|
|
688
890
|
|
|
689
891
|
Returns:
|
|
690
892
|
int: The number of items deleted.
|
|
@@ -696,7 +898,7 @@ class DataClient:
|
|
|
696
898
|
response: DeleteBinaryDataByFilterResponse = await self._data_client.DeleteBinaryDataByFilter(request, metadata=self._metadata)
|
|
697
899
|
return response.deleted_count
|
|
698
900
|
|
|
699
|
-
async def delete_binary_data_by_ids(self, binary_ids: List[BinaryID]) -> int:
|
|
901
|
+
async def delete_binary_data_by_ids(self, binary_ids: Union[List[BinaryID], List[str]]) -> int:
|
|
700
902
|
"""Filter and delete binary data.
|
|
701
903
|
|
|
702
904
|
::
|
|
@@ -715,36 +917,40 @@ class DataClient:
|
|
|
715
917
|
|
|
716
918
|
for obj in binary_metadata:
|
|
717
919
|
my_ids.append(
|
|
718
|
-
|
|
719
|
-
file_id=obj.metadata.id,
|
|
720
|
-
organization_id=obj.metadata.capture_metadata.organization_id,
|
|
721
|
-
location_id=obj.metadata.capture_metadata.location_id
|
|
722
|
-
)
|
|
920
|
+
obj.metadata.binary_data_id
|
|
723
921
|
)
|
|
724
922
|
|
|
725
923
|
binary_data = await data_client.delete_binary_data_by_ids(my_ids)
|
|
726
924
|
|
|
727
925
|
Args:
|
|
728
|
-
binary_ids (List[viam.proto.app.data.BinaryID]):
|
|
926
|
+
binary_ids (Union[List[~viam.proto.app.data.BinaryID], List[str]]): Binary data ID strings specifying the data to be deleted or
|
|
927
|
+
:class:`BinaryID` objects. Must be non-empty.
|
|
928
|
+
*DEPRECATED:* :class:`BinaryID` *is deprecated and will be removed in a future release. Instead, pass binary data IDs as a
|
|
929
|
+
list of strings.*
|
|
729
930
|
|
|
730
931
|
Raises:
|
|
731
|
-
GRPCError: If no
|
|
932
|
+
GRPCError: If no binary data ID strings or :class:`BinaryID` objects are provided.
|
|
732
933
|
|
|
733
934
|
Returns:
|
|
734
935
|
int: The number of items deleted.
|
|
735
936
|
|
|
736
937
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#deletebinarydatabyids>`_.
|
|
737
938
|
"""
|
|
738
|
-
request = DeleteBinaryDataByIDsRequest(
|
|
939
|
+
request = DeleteBinaryDataByIDsRequest()
|
|
940
|
+
if len(binary_ids) > 0 and isinstance(binary_ids[0], str):
|
|
941
|
+
binary_data_ids = cast(List[str], binary_ids)
|
|
942
|
+
request = DeleteBinaryDataByIDsRequest(binary_data_ids=binary_data_ids)
|
|
943
|
+
else:
|
|
944
|
+
bin_ids = cast(List[BinaryID], binary_ids)
|
|
945
|
+
request = DeleteBinaryDataByIDsRequest(binary_ids=bin_ids)
|
|
739
946
|
response: DeleteBinaryDataByIDsResponse = await self._data_client.DeleteBinaryDataByIDs(request, metadata=self._metadata)
|
|
740
947
|
return response.deleted_count
|
|
741
948
|
|
|
742
|
-
async def add_tags_to_binary_data_by_ids(self, tags: List[str], binary_ids: List[BinaryID]) -> None:
|
|
949
|
+
async def add_tags_to_binary_data_by_ids(self, tags: List[str], binary_ids: Union[List[BinaryID], List[str]]) -> None:
|
|
743
950
|
"""Add tags to binary data.
|
|
744
951
|
|
|
745
952
|
::
|
|
746
953
|
|
|
747
|
-
from viam.proto.app.data import BinaryID
|
|
748
954
|
from viam.utils import create_filter
|
|
749
955
|
|
|
750
956
|
tags = ["tag1", "tag2"]
|
|
@@ -760,25 +966,30 @@ class DataClient:
|
|
|
760
966
|
|
|
761
967
|
for obj in binary_metadata:
|
|
762
968
|
my_ids.append(
|
|
763
|
-
|
|
764
|
-
file_id=obj.metadata.id,
|
|
765
|
-
organization_id=obj.metadata.capture_metadata.organization_id,
|
|
766
|
-
location_id=obj.metadata.capture_metadata.location_id
|
|
767
|
-
)
|
|
969
|
+
obj.metadata.binary_data_id
|
|
768
970
|
)
|
|
769
971
|
|
|
770
972
|
binary_data = await data_client.add_tags_to_binary_data_by_ids(tags, my_ids)
|
|
771
973
|
|
|
772
974
|
Args:
|
|
773
975
|
tags (List[str]): List of tags to add to specified binary data. Must be non-empty.
|
|
774
|
-
binary_ids (List[viam.app.
|
|
976
|
+
binary_ids (Union[List[~viam.proto.app.data.BinaryID], List[str]]): Binary data ID strings specifying the data to be tagged or
|
|
977
|
+
:class:`BinaryID` objects. Must be non-empty.
|
|
978
|
+
*DEPRECATED:* :class:`BinaryID` *is deprecated and will be removed in a future release. Instead, pass binary data IDs as a
|
|
979
|
+
list of strings.*
|
|
775
980
|
|
|
776
981
|
Raises:
|
|
777
|
-
GRPCError: If no
|
|
982
|
+
GRPCError: If no binary data ID strings or :class:`BinaryID` objects are provided.
|
|
778
983
|
|
|
779
984
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#addtagstobinarydatabyids>`_.
|
|
780
985
|
"""
|
|
781
|
-
request = AddTagsToBinaryDataByIDsRequest(
|
|
986
|
+
request = AddTagsToBinaryDataByIDsRequest()
|
|
987
|
+
if len(binary_ids) > 0 and isinstance(binary_ids[0], str):
|
|
988
|
+
binary_data_ids = cast(List[str], binary_ids)
|
|
989
|
+
request = AddTagsToBinaryDataByIDsRequest(binary_data_ids=binary_data_ids, tags=tags)
|
|
990
|
+
else:
|
|
991
|
+
bin_ids = cast(List[BinaryID], binary_ids)
|
|
992
|
+
request = AddTagsToBinaryDataByIDsRequest(binary_ids=bin_ids, tags=tags)
|
|
782
993
|
await self._data_client.AddTagsToBinaryDataByIDs(request, metadata=self._metadata)
|
|
783
994
|
|
|
784
995
|
async def add_tags_to_binary_data_by_filter(self, tags: List[str], filter: Optional[Filter] = None) -> None:
|
|
@@ -794,8 +1005,7 @@ class DataClient:
|
|
|
794
1005
|
|
|
795
1006
|
Args:
|
|
796
1007
|
tags (List[str]): List of tags to add to specified binary data. Must be non-empty.
|
|
797
|
-
filter (viam.proto.app.data.Filter):
|
|
798
|
-
tagged.
|
|
1008
|
+
filter (~viam.proto.app.data.Filter): Specifies binary data to tag. If none is provided, tags all data.
|
|
799
1009
|
|
|
800
1010
|
Raises:
|
|
801
1011
|
GRPCError: If no tags are provided.
|
|
@@ -806,12 +1016,11 @@ class DataClient:
|
|
|
806
1016
|
request = AddTagsToBinaryDataByFilterRequest(filter=filter, tags=tags)
|
|
807
1017
|
await self._data_client.AddTagsToBinaryDataByFilter(request, metadata=self._metadata)
|
|
808
1018
|
|
|
809
|
-
async def remove_tags_from_binary_data_by_ids(self, tags: List[str], binary_ids: List[BinaryID]) -> int:
|
|
1019
|
+
async def remove_tags_from_binary_data_by_ids(self, tags: List[str], binary_ids: Union[List[BinaryID], List[str]]) -> int:
|
|
810
1020
|
"""Remove tags from binary data by IDs.
|
|
811
1021
|
|
|
812
1022
|
::
|
|
813
1023
|
|
|
814
|
-
from viam.proto.app.data import BinaryID
|
|
815
1024
|
from viam.utils import create_filter
|
|
816
1025
|
|
|
817
1026
|
tags = ["tag1", "tag2"]
|
|
@@ -828,11 +1037,7 @@ class DataClient:
|
|
|
828
1037
|
|
|
829
1038
|
for obj in binary_metadata:
|
|
830
1039
|
my_ids.append(
|
|
831
|
-
|
|
832
|
-
file_id=obj.metadata.id,
|
|
833
|
-
organization_id=obj.metadata.capture_metadata.organization_id,
|
|
834
|
-
location_id=obj.metadata.capture_metadata.location_id
|
|
835
|
-
)
|
|
1040
|
+
obj.metadata.binary_data_id
|
|
836
1041
|
)
|
|
837
1042
|
|
|
838
1043
|
binary_data = await data_client.remove_tags_from_binary_data_by_ids(
|
|
@@ -840,17 +1045,26 @@ class DataClient:
|
|
|
840
1045
|
|
|
841
1046
|
Args:
|
|
842
1047
|
tags (List[str]): List of tags to remove from specified binary data. Must be non-empty.
|
|
843
|
-
binary_ids (List[BinaryID]):
|
|
1048
|
+
binary_ids (Union[List[~viam.proto.app.data.BinaryID], List[str]]): Binary data ID strings specifying the data to be untagged
|
|
1049
|
+
or `BinaryID` objects. Must be non-empty.
|
|
1050
|
+
*DEPRECATED:* :class:`BinaryID` *is deprecated and will be removed in a future release. Instead, pass binary data IDs as a
|
|
1051
|
+
list of strings.*
|
|
844
1052
|
|
|
845
1053
|
Raises:
|
|
846
|
-
GRPCError: If no
|
|
1054
|
+
GRPCError: If no binary data ID strings, :class:`BinaryID` objects, or tags are provided.
|
|
847
1055
|
|
|
848
1056
|
Returns:
|
|
849
1057
|
int: The number of tags removed.
|
|
850
1058
|
|
|
851
1059
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#removetagsfrombinarydatabyids>`_.
|
|
852
1060
|
"""
|
|
853
|
-
request = RemoveTagsFromBinaryDataByIDsRequest(
|
|
1061
|
+
request = RemoveTagsFromBinaryDataByIDsRequest(tags=tags)
|
|
1062
|
+
if len(binary_ids) > 0 and isinstance(binary_ids[0], str):
|
|
1063
|
+
binary_data_ids = cast(List[str], binary_ids)
|
|
1064
|
+
request = RemoveTagsFromBinaryDataByIDsRequest(binary_data_ids=binary_data_ids, tags=tags)
|
|
1065
|
+
else:
|
|
1066
|
+
bin_ids = cast(List[BinaryID], binary_ids)
|
|
1067
|
+
request = RemoveTagsFromBinaryDataByIDsRequest(binary_ids=bin_ids, tags=tags)
|
|
854
1068
|
response: RemoveTagsFromBinaryDataByIDsResponse = await self._data_client.RemoveTagsFromBinaryDataByIDs(
|
|
855
1069
|
request, metadata=self._metadata
|
|
856
1070
|
)
|
|
@@ -869,8 +1083,7 @@ class DataClient:
|
|
|
869
1083
|
|
|
870
1084
|
Args:
|
|
871
1085
|
tags (List[str]): List of tags to remove from specified binary data.
|
|
872
|
-
filter (viam.proto.app.data.Filter):
|
|
873
|
-
untagged.
|
|
1086
|
+
filter (~viam.proto.app.data.Filter): Specifies binary data to untag. If none is provided, removes tags from all data.
|
|
874
1087
|
|
|
875
1088
|
Raises:
|
|
876
1089
|
GRPCError: If no tags are provided.
|
|
@@ -898,8 +1111,7 @@ class DataClient:
|
|
|
898
1111
|
tags = await data_client.tags_by_filter(my_filter)
|
|
899
1112
|
|
|
900
1113
|
Args:
|
|
901
|
-
filter (viam.proto.app.data.Filter):
|
|
902
|
-
return.
|
|
1114
|
+
filter (~viam.proto.app.data.Filter): Specifies subset ofdata to retrieve tags from. If none is provided, returns all tags.
|
|
903
1115
|
|
|
904
1116
|
Returns:
|
|
905
1117
|
List[str]: The list of tags.
|
|
@@ -913,7 +1125,7 @@ class DataClient:
|
|
|
913
1125
|
|
|
914
1126
|
async def add_bounding_box_to_image_by_id(
|
|
915
1127
|
self,
|
|
916
|
-
binary_id: BinaryID,
|
|
1128
|
+
binary_id: Union[BinaryID, str],
|
|
917
1129
|
label: str,
|
|
918
1130
|
x_min_normalized: float,
|
|
919
1131
|
y_min_normalized: float,
|
|
@@ -924,16 +1136,8 @@ class DataClient:
|
|
|
924
1136
|
|
|
925
1137
|
::
|
|
926
1138
|
|
|
927
|
-
from viam.proto.app.data import BinaryID
|
|
928
|
-
|
|
929
|
-
MY_BINARY_ID = BinaryID(
|
|
930
|
-
file_id="<YOUR-FILE-ID>",
|
|
931
|
-
organization_id="<YOUR-ORG-ID>",
|
|
932
|
-
location_id="<YOUR-LOCATION-ID>"
|
|
933
|
-
)
|
|
934
|
-
|
|
935
1139
|
bbox_id = await data_client.add_bounding_box_to_image_by_id(
|
|
936
|
-
binary_id=
|
|
1140
|
+
binary_id="<YOUR-BINARY-DATA-ID>",
|
|
937
1141
|
label="label",
|
|
938
1142
|
x_min_normalized=0,
|
|
939
1143
|
y_min_normalized=.1,
|
|
@@ -944,7 +1148,9 @@ class DataClient:
|
|
|
944
1148
|
print(bbox_id)
|
|
945
1149
|
|
|
946
1150
|
Args:
|
|
947
|
-
binary_id (viam.proto.app.data.BinaryID): The ID of the image to add the bounding
|
|
1151
|
+
binary_id (Union[~viam.proto.app.data.BinaryID, str]): The binary data ID or :class:`BinaryID` of the image to add the bounding
|
|
1152
|
+
box to. *DEPRECATED:* :class:`BinaryID` *is deprecated and will be removed in a future release. Instead, pass binary data IDs as a
|
|
1153
|
+
list of strings.*
|
|
948
1154
|
label (str): A label for the bounding box.
|
|
949
1155
|
x_min_normalized (float): Min X value of the bounding box normalized from 0 to 1.
|
|
950
1156
|
y_min_normalized (float): Min Y value of the bounding box normalized from 0 to 1.
|
|
@@ -959,42 +1165,53 @@ class DataClient:
|
|
|
959
1165
|
|
|
960
1166
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#addboundingboxtoimagebyid>`_.
|
|
961
1167
|
"""
|
|
962
|
-
request = AddBoundingBoxToImageByIDRequest(
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
1168
|
+
request = AddBoundingBoxToImageByIDRequest()
|
|
1169
|
+
if isinstance(binary_id, str):
|
|
1170
|
+
request = AddBoundingBoxToImageByIDRequest(
|
|
1171
|
+
binary_data_id=binary_id,
|
|
1172
|
+
label=label,
|
|
1173
|
+
x_max_normalized=x_max_normalized,
|
|
1174
|
+
x_min_normalized=x_min_normalized,
|
|
1175
|
+
y_max_normalized=y_max_normalized,
|
|
1176
|
+
y_min_normalized=y_min_normalized,
|
|
1177
|
+
)
|
|
1178
|
+
else:
|
|
1179
|
+
request = AddBoundingBoxToImageByIDRequest(
|
|
1180
|
+
binary_id=binary_id,
|
|
1181
|
+
label=label,
|
|
1182
|
+
x_max_normalized=x_max_normalized,
|
|
1183
|
+
x_min_normalized=x_min_normalized,
|
|
1184
|
+
y_max_normalized=y_max_normalized,
|
|
1185
|
+
y_min_normalized=y_min_normalized,
|
|
1186
|
+
)
|
|
970
1187
|
response: AddBoundingBoxToImageByIDResponse = await self._data_client.AddBoundingBoxToImageByID(request, metadata=self._metadata)
|
|
971
1188
|
return response.bbox_id
|
|
972
1189
|
|
|
973
|
-
async def remove_bounding_box_from_image_by_id(self, bbox_id: str, binary_id: BinaryID) -> None:
|
|
1190
|
+
async def remove_bounding_box_from_image_by_id(self, bbox_id: str, binary_id: Union[BinaryID, str]) -> None:
|
|
974
1191
|
"""Removes a bounding box from an image.
|
|
975
1192
|
|
|
976
1193
|
::
|
|
977
1194
|
|
|
978
|
-
from viam.proto.app.data import BinaryID
|
|
979
|
-
|
|
980
|
-
MY_BINARY_ID = BinaryID(
|
|
981
|
-
file_id=your-file_id,
|
|
982
|
-
organization_id=your-org-id,
|
|
983
|
-
location_id=your-location-id
|
|
984
|
-
)
|
|
985
|
-
|
|
986
1195
|
await data_client.remove_bounding_box_from_image_by_id(
|
|
987
|
-
binary_id=
|
|
1196
|
+
binary_id="<YOUR-BINARY-DATA-ID>",
|
|
988
1197
|
bbox_id="your-bounding-box-id-to-delete"
|
|
989
1198
|
)
|
|
990
1199
|
|
|
991
1200
|
Args:
|
|
992
1201
|
bbox_id (str): The ID of the bounding box to remove.
|
|
993
|
-
binary_id (viam.proto.
|
|
1202
|
+
binary_id (Union[~viam.proto.app.data.BinaryID, str]): The binary data ID or :class:`BinaryID` of the image to remove the
|
|
1203
|
+
bounding box from.
|
|
1204
|
+
*DEPRECATED:* :class:`BinaryID` *is deprecated and will be removed in a future release. Instead, pass binary data IDs as a
|
|
1205
|
+
list of strings.*
|
|
994
1206
|
|
|
995
1207
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#removeboundingboxfromimagebyid>`_.
|
|
996
1208
|
"""
|
|
997
|
-
request = RemoveBoundingBoxFromImageByIDRequest(
|
|
1209
|
+
request = RemoveBoundingBoxFromImageByIDRequest()
|
|
1210
|
+
if isinstance(binary_id, str):
|
|
1211
|
+
request = RemoveBoundingBoxFromImageByIDRequest(binary_data_id=binary_id, bbox_id=bbox_id)
|
|
1212
|
+
else:
|
|
1213
|
+
request = RemoveBoundingBoxFromImageByIDRequest(binary_id=binary_id, bbox_id=bbox_id)
|
|
1214
|
+
|
|
998
1215
|
await self._data_client.RemoveBoundingBoxFromImageByID(request, metadata=self._metadata)
|
|
999
1216
|
|
|
1000
1217
|
async def bounding_box_labels_by_filter(self, filter: Optional[Filter] = None) -> List[str]:
|
|
@@ -1011,8 +1228,8 @@ class DataClient:
|
|
|
1011
1228
|
print(bounding_box_labels)
|
|
1012
1229
|
|
|
1013
1230
|
Args:
|
|
1014
|
-
filter (viam.proto.app.data.Filter):
|
|
1015
|
-
|
|
1231
|
+
filter (~viam.proto.app.data.Filter): Specifies data to retrieve bounding box labels from. If none is provided, returns labels
|
|
1232
|
+
from all data.
|
|
1016
1233
|
|
|
1017
1234
|
Returns:
|
|
1018
1235
|
List[str]: The list of bounding box labels.
|
|
@@ -1032,8 +1249,8 @@ class DataClient:
|
|
|
1032
1249
|
hostname = await data_client.get_database_connection(organization_id="<YOUR-ORG-ID>")
|
|
1033
1250
|
|
|
1034
1251
|
Args:
|
|
1035
|
-
organization_id (str):
|
|
1036
|
-
|
|
1252
|
+
organization_id (str): The ID of the organization you'd like to connect to.
|
|
1253
|
+
To find your organization ID, visit the organization settings page.
|
|
1037
1254
|
|
|
1038
1255
|
Returns:
|
|
1039
1256
|
str: The hostname of the federated database.
|
|
@@ -1056,8 +1273,8 @@ class DataClient:
|
|
|
1056
1273
|
)
|
|
1057
1274
|
|
|
1058
1275
|
Args:
|
|
1059
|
-
organization_id (str): The ID of the organization.
|
|
1060
|
-
|
|
1276
|
+
organization_id (str): The ID of the organization you'd like to configure a database user for.
|
|
1277
|
+
To find your organization ID, visit the organization settings page.
|
|
1061
1278
|
password (str): The password of the user.
|
|
1062
1279
|
|
|
1063
1280
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#configuredatabaseuser>`_.
|
|
@@ -1079,7 +1296,7 @@ class DataClient:
|
|
|
1079
1296
|
Args:
|
|
1080
1297
|
name (str): The name of the dataset being created.
|
|
1081
1298
|
organization_id (str): The ID of the organization where the dataset is being created.
|
|
1082
|
-
|
|
1299
|
+
To find your organization ID, visit the organization settings page.
|
|
1083
1300
|
|
|
1084
1301
|
Returns:
|
|
1085
1302
|
str: The dataset ID of the created dataset.
|
|
@@ -1090,6 +1307,32 @@ class DataClient:
|
|
|
1090
1307
|
response: CreateDatasetResponse = await self._dataset_client.CreateDataset(request, metadata=self._metadata)
|
|
1091
1308
|
return response.id
|
|
1092
1309
|
|
|
1310
|
+
async def merge_datasets(self, name: str, organization_id: str, dataset_ids: List[str]) -> str:
|
|
1311
|
+
"""Merge multiple datasets into a new dataset.
|
|
1312
|
+
|
|
1313
|
+
::
|
|
1314
|
+
|
|
1315
|
+
dataset_id = await data_client.merge_datasets(
|
|
1316
|
+
name="<DATASET-NAME>",
|
|
1317
|
+
organization_id="<YOUR-ORG-ID>",
|
|
1318
|
+
dataset_ids=["<YOUR-DATASET-ID-1>", "<YOUR-DATASET-ID-2>"]
|
|
1319
|
+
)
|
|
1320
|
+
print(dataset_id)
|
|
1321
|
+
|
|
1322
|
+
Args:
|
|
1323
|
+
name (str): The name of the dataset being created.
|
|
1324
|
+
organization_id (str): The ID of the organization where the dataset is being created.
|
|
1325
|
+
To find your organization ID, visit the organization settings page.
|
|
1326
|
+
dataset_ids (List[str]): The IDs of the datasets that you would like to merge.
|
|
1327
|
+
Returns:
|
|
1328
|
+
str: The dataset ID of the created dataset.
|
|
1329
|
+
|
|
1330
|
+
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#mergedatasets>`_.
|
|
1331
|
+
"""
|
|
1332
|
+
request = MergeDatasetsRequest(name=name, organization_id=organization_id, dataset_ids=dataset_ids)
|
|
1333
|
+
response: MergeDatasetsResponse = await self._dataset_client.MergeDatasets(request, metadata=self._metadata)
|
|
1334
|
+
return response.dataset_id
|
|
1335
|
+
|
|
1093
1336
|
async def list_dataset_by_ids(self, ids: List[str]) -> Sequence[Dataset]:
|
|
1094
1337
|
"""Get a list of datasets using their IDs.
|
|
1095
1338
|
|
|
@@ -1101,9 +1344,12 @@ class DataClient:
|
|
|
1101
1344
|
print(datasets)
|
|
1102
1345
|
|
|
1103
1346
|
Args:
|
|
1104
|
-
ids (List[str]): The IDs of the datasets
|
|
1105
|
-
|
|
1106
|
-
|
|
1347
|
+
ids (List[str]): The IDs of the datasets that you would like to retrieve information about. To retrieve a dataset ID:
|
|
1348
|
+
|
|
1349
|
+
- Navigate to the **DATASETS** tab of the **DATA** page.
|
|
1350
|
+
- Click on the dataset.
|
|
1351
|
+
- Click the **...** menu.
|
|
1352
|
+
- Select **Copy dataset ID**.
|
|
1107
1353
|
|
|
1108
1354
|
Returns:
|
|
1109
1355
|
Sequence[Dataset]: The list of datasets.
|
|
@@ -1126,8 +1372,8 @@ class DataClient:
|
|
|
1126
1372
|
print(datasets)
|
|
1127
1373
|
|
|
1128
1374
|
Args:
|
|
1129
|
-
organization_id (str): The ID of the organization.
|
|
1130
|
-
|
|
1375
|
+
organization_id (str): The ID of the organization you'd like to retrieve datasets from.
|
|
1376
|
+
To find your organization ID, visit the organization settings page.
|
|
1131
1377
|
|
|
1132
1378
|
Returns:
|
|
1133
1379
|
Sequence[Dataset]: The list of datasets in the organization.
|
|
@@ -1152,8 +1398,12 @@ class DataClient:
|
|
|
1152
1398
|
)
|
|
1153
1399
|
|
|
1154
1400
|
Args:
|
|
1155
|
-
id (str): The ID of the dataset.
|
|
1156
|
-
|
|
1401
|
+
id (str): The ID of the dataset. To retrieve the dataset ID:
|
|
1402
|
+
|
|
1403
|
+
- Navigate to the **DATASETS** tab of the **DATA** page.
|
|
1404
|
+
- Click on the dataset.
|
|
1405
|
+
- Click the **...** menu.
|
|
1406
|
+
- Select **Copy dataset ID**.
|
|
1157
1407
|
name (str): The new name of the dataset.
|
|
1158
1408
|
|
|
1159
1409
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#renamedataset>`_.
|
|
@@ -1171,92 +1421,106 @@ class DataClient:
|
|
|
1171
1421
|
)
|
|
1172
1422
|
|
|
1173
1423
|
Args:
|
|
1174
|
-
id (str): The ID of the dataset.
|
|
1175
|
-
|
|
1424
|
+
id (str): The ID of the dataset. To retrieve the dataset ID:
|
|
1425
|
+
|
|
1426
|
+
- Navigate to the **DATASETS** tab of the **DATA** page.
|
|
1427
|
+
- Click on the dataset.
|
|
1428
|
+
- Click the **...** menu.
|
|
1429
|
+
- Select **Copy dataset ID**.
|
|
1176
1430
|
|
|
1177
1431
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#deletedataset>`_.
|
|
1178
1432
|
"""
|
|
1179
1433
|
request = DeleteDatasetRequest(id=id)
|
|
1180
1434
|
await self._dataset_client.DeleteDataset(request, metadata=self._metadata)
|
|
1181
1435
|
|
|
1182
|
-
async def add_binary_data_to_dataset_by_ids(self, binary_ids: List[BinaryID], dataset_id: str) -> None:
|
|
1436
|
+
async def add_binary_data_to_dataset_by_ids(self, binary_ids: Union[List[BinaryID], List[str]], dataset_id: str) -> None:
|
|
1183
1437
|
"""Add the BinaryData to the provided dataset.
|
|
1184
1438
|
|
|
1185
1439
|
This BinaryData will be tagged with the VIAM_DATASET_{id} label.
|
|
1186
1440
|
|
|
1187
1441
|
::
|
|
1188
1442
|
|
|
1189
|
-
from viam.proto.app.data import BinaryID
|
|
1190
|
-
|
|
1191
1443
|
binary_metadata, count, last = await data_client.binary_data_by_filter(
|
|
1192
1444
|
include_binary_data=False
|
|
1193
1445
|
)
|
|
1194
1446
|
|
|
1195
|
-
|
|
1447
|
+
my_binary_data_ids = []
|
|
1196
1448
|
|
|
1197
1449
|
for obj in binary_metadata:
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
file_id=obj.metadata.id,
|
|
1201
|
-
organization_id=obj.metadata.capture_metadata.organization_id,
|
|
1202
|
-
location_id=obj.metadata.capture_metadata.location_id
|
|
1203
|
-
)
|
|
1450
|
+
my_binary_data_ids.append(
|
|
1451
|
+
obj.metadata.binary_data_id
|
|
1204
1452
|
)
|
|
1205
1453
|
|
|
1206
1454
|
await data_client.add_binary_data_to_dataset_by_ids(
|
|
1207
|
-
binary_ids=
|
|
1455
|
+
binary_ids=my_binary_data_ids,
|
|
1208
1456
|
dataset_id="abcd-1234xyz-8765z-123abc"
|
|
1209
1457
|
)
|
|
1210
1458
|
|
|
1211
1459
|
Args:
|
|
1212
|
-
binary_ids (List[BinaryID]):
|
|
1213
|
-
navigate to
|
|
1214
|
-
|
|
1215
|
-
|
|
1460
|
+
binary_ids (List[~viam.proto.app.data.BinaryID]): Unique identifiers for binary data to add to the dataset. To retrieve these IDs,
|
|
1461
|
+
navigate to the DATA page, click on an image, and copy its Binary Data ID from the details tab.
|
|
1462
|
+
dataset_id (str): The ID of the dataset to be added to. To retrieve the dataset ID:
|
|
1463
|
+
|
|
1464
|
+
- Navigate to the **DATASETS** tab of the **DATA** page.
|
|
1465
|
+
- Click on the dataset.
|
|
1466
|
+
- Click the **...** menu.
|
|
1467
|
+
- Select **Copy dataset ID**.
|
|
1216
1468
|
|
|
1217
1469
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#addbinarydatatodatasetbyids>`_.
|
|
1218
1470
|
"""
|
|
1219
|
-
request = AddBinaryDataToDatasetByIDsRequest(
|
|
1471
|
+
request = AddBinaryDataToDatasetByIDsRequest()
|
|
1472
|
+
if len(binary_ids) > 0 and isinstance(binary_ids[0], str):
|
|
1473
|
+
binary_data_ids = cast(List[str], binary_ids)
|
|
1474
|
+
request = AddBinaryDataToDatasetByIDsRequest(binary_data_ids=binary_data_ids, dataset_id=dataset_id)
|
|
1475
|
+
else:
|
|
1476
|
+
bin_ids = cast(List[BinaryID], binary_ids)
|
|
1477
|
+
request = AddBinaryDataToDatasetByIDsRequest(binary_ids=bin_ids, dataset_id=dataset_id)
|
|
1220
1478
|
await self._data_client.AddBinaryDataToDatasetByIDs(request, metadata=self._metadata)
|
|
1221
1479
|
|
|
1222
|
-
async def remove_binary_data_from_dataset_by_ids(self, binary_ids: List[BinaryID], dataset_id: str) -> None:
|
|
1480
|
+
async def remove_binary_data_from_dataset_by_ids(self, binary_ids: Union[List[BinaryID], List[str]], dataset_id: str) -> None:
|
|
1223
1481
|
"""Remove the BinaryData from the provided dataset.
|
|
1224
1482
|
|
|
1225
1483
|
This BinaryData will lose the VIAM_DATASET_{id} tag.
|
|
1226
1484
|
|
|
1227
1485
|
::
|
|
1228
1486
|
|
|
1229
|
-
from viam.proto.app.data import BinaryID
|
|
1230
|
-
|
|
1231
1487
|
binary_metadata, count, last = await data_client.binary_data_by_filter(
|
|
1232
1488
|
include_binary_data=False
|
|
1233
1489
|
)
|
|
1234
1490
|
|
|
1235
|
-
|
|
1491
|
+
my_binary_data_ids = []
|
|
1236
1492
|
|
|
1237
1493
|
for obj in binary_metadata:
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
file_id=obj.metadata.id,
|
|
1241
|
-
organization_id=obj.metadata.capture_metadata.organization_id,
|
|
1242
|
-
location_id=obj.metadata.capture_metadata.location_id
|
|
1243
|
-
)
|
|
1494
|
+
my_binary_data_ids.append(
|
|
1495
|
+
obj.metadata.binary_data_id
|
|
1244
1496
|
)
|
|
1245
1497
|
|
|
1246
1498
|
await data_client.remove_binary_data_from_dataset_by_ids(
|
|
1247
|
-
binary_ids=
|
|
1499
|
+
binary_ids=my_binary_data_ids,
|
|
1248
1500
|
dataset_id="abcd-1234xyz-8765z-123abc"
|
|
1249
1501
|
)
|
|
1250
1502
|
|
|
1251
1503
|
Args:
|
|
1252
|
-
binary_ids (List[BinaryID]):
|
|
1253
|
-
navigate to
|
|
1254
|
-
|
|
1255
|
-
|
|
1504
|
+
binary_ids (Union[List[~viam.proto.app.data.BinaryID], List[str]]): Unique identifiers for the binary data to remove from the dataset. To retrieve these IDs,
|
|
1505
|
+
navigate to the DATA page, click on an image and copy its Binary Data ID from the details tab.
|
|
1506
|
+
*DEPRECATED:* :class:`BinaryID` *is deprecated and will be removed in a future release. Instead, pass binary data IDs as a
|
|
1507
|
+
list of strings.*
|
|
1508
|
+
dataset_id (str): The ID of the dataset to be removed from. To retrieve the dataset ID:
|
|
1509
|
+
|
|
1510
|
+
- Navigate to the **DATASETS** tab of the **DATA** page.
|
|
1511
|
+
- Click on the dataset.
|
|
1512
|
+
- Click the **...** menu.
|
|
1513
|
+
- Select **Copy dataset ID**.
|
|
1256
1514
|
|
|
1257
1515
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#removebinarydatafromdatasetbyids>`_.
|
|
1258
1516
|
"""
|
|
1259
|
-
request = RemoveBinaryDataFromDatasetByIDsRequest(
|
|
1517
|
+
request = RemoveBinaryDataFromDatasetByIDsRequest()
|
|
1518
|
+
if len(binary_ids) > 0 and isinstance(binary_ids[0], str):
|
|
1519
|
+
binary_data_ids = cast(List[str], binary_ids)
|
|
1520
|
+
request = RemoveBinaryDataFromDatasetByIDsRequest(binary_data_ids=binary_data_ids, dataset_id=dataset_id)
|
|
1521
|
+
else:
|
|
1522
|
+
bin_ids = cast(List[BinaryID], binary_ids)
|
|
1523
|
+
request = RemoveBinaryDataFromDatasetByIDsRequest(binary_ids=bin_ids, dataset_id=dataset_id)
|
|
1260
1524
|
await self._data_client.RemoveBinaryDataFromDatasetByIDs(request, metadata=self._metadata)
|
|
1261
1525
|
|
|
1262
1526
|
async def binary_data_capture_upload(
|
|
@@ -1269,12 +1533,13 @@ class DataClient:
|
|
|
1269
1533
|
file_extension: str,
|
|
1270
1534
|
method_parameters: Optional[Mapping[str, Any]] = None,
|
|
1271
1535
|
tags: Optional[List[str]] = None,
|
|
1536
|
+
dataset_ids: Optional[List[str]] = None,
|
|
1272
1537
|
data_request_times: Optional[Tuple[datetime, datetime]] = None,
|
|
1273
1538
|
) -> str:
|
|
1274
1539
|
"""Upload binary sensor data.
|
|
1275
1540
|
|
|
1276
|
-
Upload binary data collected on a robot through a specific component (for example, a motor) along with the relevant metadata
|
|
1277
|
-
|
|
1541
|
+
Upload binary data collected on a robot through a specific component (for example, a motor), along with the relevant metadata.
|
|
1542
|
+
Binary data can be found on the **DATA** page.
|
|
1278
1543
|
|
|
1279
1544
|
::
|
|
1280
1545
|
|
|
@@ -1290,7 +1555,8 @@ class DataClient:
|
|
|
1290
1555
|
tags=["tag_1", "tag_2"],
|
|
1291
1556
|
data_request_times=[time_requested, time_received],
|
|
1292
1557
|
file_extension=".jpg",
|
|
1293
|
-
binary_data=b"Encoded image bytes"
|
|
1558
|
+
binary_data=b"Encoded image bytes",
|
|
1559
|
+
dataset_ids=["dataset_1", "dataset_2"]
|
|
1294
1560
|
)
|
|
1295
1561
|
|
|
1296
1562
|
Args:
|
|
@@ -1299,19 +1565,20 @@ class DataClient:
|
|
|
1299
1565
|
component_type (str): Type of the component used to capture the data (for example, "movement_sensor").
|
|
1300
1566
|
component_name (str): Name of the component used to capture the data.
|
|
1301
1567
|
method_name (str): Name of the method used to capture the data.
|
|
1302
|
-
file_extension (str): The file extension of binary data including the period
|
|
1303
|
-
The backend
|
|
1304
|
-
or
|
|
1568
|
+
file_extension (str): The file extension of binary data, *including the period*, for example ``.jpg``, ``.png``, ``.pcd``.
|
|
1569
|
+
The backend routes the binary to its corresponding mime type based on this extension. Files with a ``.jpeg``, ``.jpg``,
|
|
1570
|
+
or ``.png`` extension will appear in the **Images** tab.
|
|
1305
1571
|
method_parameters (Optional[Mapping[str, Any]]): Optional dictionary of method parameters. No longer in active use.
|
|
1306
1572
|
tags (Optional[List[str]]): Optional list of tags to allow for tag-based data filtering when retrieving data.
|
|
1573
|
+
dataset_ids (Optional[List[str]]): Optional list of datasets to add the data to.
|
|
1307
1574
|
data_request_times (Optional[Tuple[datetime.datetime, datetime.datetime]]): Optional tuple containing datetime objects
|
|
1308
|
-
denoting the times this data was requested[0] by the robot and received[1] from the appropriate sensor.
|
|
1575
|
+
denoting the times this data was requested ``[0]`` by the robot and received ``[1]`` from the appropriate sensor.
|
|
1309
1576
|
|
|
1310
1577
|
Raises:
|
|
1311
1578
|
GRPCError: If an invalid part ID is passed.
|
|
1312
1579
|
|
|
1313
1580
|
Returns:
|
|
1314
|
-
str: The
|
|
1581
|
+
str: The binary data ID of the uploaded data.
|
|
1315
1582
|
|
|
1316
1583
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#binarydatacaptureupload>`_.
|
|
1317
1584
|
"""
|
|
@@ -1335,11 +1602,12 @@ class DataClient:
|
|
|
1335
1602
|
type=DataType.DATA_TYPE_BINARY_SENSOR,
|
|
1336
1603
|
method_parameters=method_parameters,
|
|
1337
1604
|
tags=tags,
|
|
1605
|
+
dataset_ids=dataset_ids,
|
|
1338
1606
|
)
|
|
1339
1607
|
if file_extension:
|
|
1340
1608
|
metadata.file_extension = file_extension if file_extension[0] == "." else f".{file_extension}"
|
|
1341
1609
|
response = await self._data_capture_upload(metadata=metadata, sensor_contents=[sensor_contents])
|
|
1342
|
-
return response.
|
|
1610
|
+
return response.binary_data_id
|
|
1343
1611
|
|
|
1344
1612
|
async def tabular_data_capture_upload(
|
|
1345
1613
|
self,
|
|
@@ -1354,8 +1622,8 @@ class DataClient:
|
|
|
1354
1622
|
) -> str:
|
|
1355
1623
|
"""Upload tabular sensor data.
|
|
1356
1624
|
|
|
1357
|
-
Upload tabular data collected on a robot through a specific component (for example, a motor) along with the relevant metadata
|
|
1358
|
-
|
|
1625
|
+
Upload tabular data collected on a robot through a specific component (for example, a motor), along with the relevant metadata.
|
|
1626
|
+
Tabular data can be found under the **Sensors** tab of the **DATA** page.
|
|
1359
1627
|
|
|
1360
1628
|
::
|
|
1361
1629
|
|
|
@@ -1380,24 +1648,24 @@ class DataClient:
|
|
|
1380
1648
|
|
|
1381
1649
|
Args:
|
|
1382
1650
|
tabular_data (List[Mapping[str, Any]]): List of the data to be uploaded, represented tabularly as a collection of dictionaries.
|
|
1383
|
-
Must include the key
|
|
1651
|
+
Must include the key ``readings`` for sensors.
|
|
1384
1652
|
part_id (str): Part ID of the component used to capture the data.
|
|
1385
|
-
component_type (str): Type of the component used to capture the data (for example,
|
|
1653
|
+
component_type (str): Type of the component used to capture the data (for example, ``rdk:component:movement_sensor``).
|
|
1386
1654
|
component_name (str): Name of the component used to capture the data.
|
|
1387
1655
|
method_name (str): Name of the method used to capture the data.
|
|
1388
|
-
data_request_times (List[Tuple[datetime.datetime, datetime.datetime]]): List of tuples, each containing
|
|
1389
|
-
denoting the times this data was requested[0] by the robot and received[1] from the appropriate sensor.
|
|
1390
|
-
tabular data and
|
|
1656
|
+
data_request_times (List[Tuple[datetime.datetime, datetime.datetime]]): List of tuples, each containing ``datetime`` objects
|
|
1657
|
+
denoting the times this data was requested ``[0]`` by the robot and received ``[1]`` from the appropriate sensor.
|
|
1658
|
+
Pass a list of tabular data and timestamps with length ``n > 1`` to upload ``n`` datapoints, all with the same metadata.
|
|
1391
1659
|
method_parameters (Optional[Mapping[str, Any]]): Optional dictionary of method parameters. No longer in active use.
|
|
1392
1660
|
tags (Optional[List[str]]): Optional list of tags to allow for tag-based data filtering when retrieving data.
|
|
1393
1661
|
|
|
1394
1662
|
Raises:
|
|
1395
1663
|
GRPCError: If an invalid part ID is passed.
|
|
1396
|
-
ValueError: If
|
|
1664
|
+
ValueError: If the provided list of `Timestamp` objects has a length that does not match the length of the list of tabular
|
|
1397
1665
|
data.
|
|
1398
1666
|
|
|
1399
1667
|
Returns:
|
|
1400
|
-
str: The
|
|
1668
|
+
str: The file ID of the uploaded data.
|
|
1401
1669
|
|
|
1402
1670
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#tabulardatacaptureupload>`_.
|
|
1403
1671
|
"""
|
|
@@ -1452,6 +1720,7 @@ class DataClient:
|
|
|
1452
1720
|
method_parameters: Optional[Mapping[str, Any]] = None,
|
|
1453
1721
|
data_request_times: Optional[Tuple[datetime, datetime]] = None,
|
|
1454
1722
|
tags: Optional[List[str]] = None,
|
|
1723
|
+
dataset_ids: Optional[List[str]] = None,
|
|
1455
1724
|
) -> str:
|
|
1456
1725
|
"""Uploads the metadata and contents of streaming binary data.
|
|
1457
1726
|
|
|
@@ -1468,26 +1737,28 @@ class DataClient:
|
|
|
1468
1737
|
component_name='left_motor',
|
|
1469
1738
|
method_name='IsPowered',
|
|
1470
1739
|
data_request_times=[time_requested, time_received],
|
|
1471
|
-
tags=["tag_1", "tag_2"]
|
|
1740
|
+
tags=["tag_1", "tag_2"],
|
|
1741
|
+
dataset_ids=["dataset_1", "dataset_2"]
|
|
1472
1742
|
)
|
|
1473
1743
|
|
|
1474
1744
|
Args:
|
|
1475
|
-
data (bytes):
|
|
1745
|
+
data (bytes): The data to be uploaded.
|
|
1476
1746
|
part_id (str): Part ID of the resource associated with the file.
|
|
1477
|
-
file_ext (str):
|
|
1747
|
+
file_ext (str): File extension type for the data. required for determining MIME type.
|
|
1478
1748
|
component_type (Optional[str]): Optional type of the component associated with the file (for example, "movement_sensor").
|
|
1479
1749
|
component_name (Optional[str]): Optional name of the component associated with the file.
|
|
1480
1750
|
method_name (Optional[str]): Optional name of the method associated with the file.
|
|
1481
1751
|
method_parameters (Optional[str]): Optional dictionary of the method parameters. No longer in active use.
|
|
1482
1752
|
data_request_times (Optional[Tuple[datetime.datetime, datetime.datetime]]): Optional tuple containing datetime objects
|
|
1483
|
-
denoting the times this data was requested[0] by the robot and received[1] from the appropriate sensor.
|
|
1753
|
+
denoting the times this data was requested ``[0]`` by the robot and received ``[1]`` from the appropriate sensor.
|
|
1484
1754
|
tags (Optional[List[str]]): Optional list of tags to allow for tag-based filtering when retrieving data.
|
|
1755
|
+
dataset_ids (Optional[List[str]]): Optional list of datasets to add the data to.
|
|
1485
1756
|
|
|
1486
1757
|
Raises:
|
|
1487
1758
|
GRPCError: If an invalid part ID is passed.
|
|
1488
1759
|
|
|
1489
1760
|
Returns:
|
|
1490
|
-
str: The
|
|
1761
|
+
str: The binary data ID of the uploaded data.
|
|
1491
1762
|
|
|
1492
1763
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#streamingdatacaptureupload>`_.
|
|
1493
1764
|
"""
|
|
@@ -1501,6 +1772,7 @@ class DataClient:
|
|
|
1501
1772
|
type=DataType.DATA_TYPE_BINARY_SENSOR,
|
|
1502
1773
|
file_extension=file_ext if file_ext[0] == "." else f".{file_ext}",
|
|
1503
1774
|
tags=tags,
|
|
1775
|
+
dataset_ids=dataset_ids,
|
|
1504
1776
|
)
|
|
1505
1777
|
sensor_metadata = SensorMetadata(
|
|
1506
1778
|
time_requested=datetime_to_timestamp(data_request_times[0]) if data_request_times else None,
|
|
@@ -1516,7 +1788,7 @@ class DataClient:
|
|
|
1516
1788
|
if not response:
|
|
1517
1789
|
await stream.recv_trailing_metadata() # causes us to throw appropriate gRPC error
|
|
1518
1790
|
raise TypeError("Response cannot be empty")
|
|
1519
|
-
return response.
|
|
1791
|
+
return response.binary_data_id
|
|
1520
1792
|
|
|
1521
1793
|
async def file_upload(
|
|
1522
1794
|
self,
|
|
@@ -1529,11 +1801,12 @@ class DataClient:
|
|
|
1529
1801
|
method_parameters: Optional[Mapping[str, Any]] = None,
|
|
1530
1802
|
file_extension: Optional[str] = None,
|
|
1531
1803
|
tags: Optional[List[str]] = None,
|
|
1804
|
+
dataset_ids: Optional[List[str]] = None,
|
|
1532
1805
|
) -> str:
|
|
1533
1806
|
"""Upload arbitrary file data.
|
|
1534
1807
|
|
|
1535
|
-
Upload file data that may be stored on a robot along with the relevant metadata
|
|
1536
|
-
|
|
1808
|
+
Upload file data that may be stored on a robot along with the relevant metadata. File data can be found in the
|
|
1809
|
+
**Files** tab of the **DATA** page.
|
|
1537
1810
|
|
|
1538
1811
|
::
|
|
1539
1812
|
|
|
@@ -1542,7 +1815,8 @@ class DataClient:
|
|
|
1542
1815
|
part_id="INSERT YOUR PART ID",
|
|
1543
1816
|
tags=["tag_1", "tag_2"],
|
|
1544
1817
|
file_name="your-file",
|
|
1545
|
-
file_extension=".txt"
|
|
1818
|
+
file_extension=".txt",
|
|
1819
|
+
dataset_ids=["dataset_1", "dataset_2"]
|
|
1546
1820
|
)
|
|
1547
1821
|
|
|
1548
1822
|
Args:
|
|
@@ -1551,18 +1825,19 @@ class DataClient:
|
|
|
1551
1825
|
component_type (Optional[str]): Optional type of the component associated with the file (for example, "movement_sensor").
|
|
1552
1826
|
component_name (Optional[str]): Optional name of the component associated with the file.
|
|
1553
1827
|
method_name (Optional[str]): Optional name of the method associated with the file.
|
|
1554
|
-
file_name (Optional[str]): Optional name of the file. The empty string "" will be assigned as the file name if one isn't
|
|
1828
|
+
file_name (Optional[str]): Optional name of the file. The empty string ``""`` will be assigned as the file name if one isn't
|
|
1555
1829
|
provided.
|
|
1556
1830
|
method_parameters (Optional[str]): Optional dictionary of the method parameters. No longer in active use.
|
|
1557
|
-
file_extension (Optional[str]): Optional file extension. The empty string "" will be assigned as the file extension if one
|
|
1558
|
-
provided. Files with a
|
|
1831
|
+
file_extension (Optional[str]): Optional file extension. The empty string ``""`` will be assigned as the file extension if one
|
|
1832
|
+
isn't provided. Files with a ``.jpeg``, ``.jpg``, or ``.png`` extension will be saved to the **Images** tab.
|
|
1559
1833
|
tags (Optional[List[str]]): Optional list of tags to allow for tag-based filtering when retrieving data.
|
|
1834
|
+
dataset_ids (Optional[List[str]]): Optional list of datasets to add the data to.
|
|
1560
1835
|
|
|
1561
1836
|
Raises:
|
|
1562
1837
|
GRPCError: If an invalid part ID is passed.
|
|
1563
1838
|
|
|
1564
1839
|
Returns:
|
|
1565
|
-
str: ID of the new file.
|
|
1840
|
+
str: Binary data ID of the new file.
|
|
1566
1841
|
|
|
1567
1842
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#fileupload>`_.
|
|
1568
1843
|
"""
|
|
@@ -1576,9 +1851,10 @@ class DataClient:
|
|
|
1576
1851
|
method_parameters=method_parameters,
|
|
1577
1852
|
file_extension=file_extension if file_extension else "",
|
|
1578
1853
|
tags=tags,
|
|
1854
|
+
dataset_ids=dataset_ids,
|
|
1579
1855
|
)
|
|
1580
1856
|
response: FileUploadResponse = await self._file_upload(metadata=metadata, file_contents=FileData(data=data))
|
|
1581
|
-
return response.
|
|
1857
|
+
return response.binary_data_id
|
|
1582
1858
|
|
|
1583
1859
|
async def file_upload_from_path(
|
|
1584
1860
|
self,
|
|
@@ -1589,17 +1865,19 @@ class DataClient:
|
|
|
1589
1865
|
method_name: Optional[str] = None,
|
|
1590
1866
|
method_parameters: Optional[Mapping[str, Any]] = None,
|
|
1591
1867
|
tags: Optional[List[str]] = None,
|
|
1868
|
+
dataset_ids: Optional[List[str]] = None,
|
|
1592
1869
|
) -> str:
|
|
1593
1870
|
"""Upload arbitrary file data.
|
|
1594
1871
|
|
|
1595
|
-
Upload file data that may be stored on a robot along with the relevant metadata
|
|
1596
|
-
|
|
1872
|
+
Upload file data that may be stored on a robot along with the relevant metadata. File data can be found in the
|
|
1873
|
+
**Files** tab of the **DATA** page.
|
|
1597
1874
|
|
|
1598
1875
|
::
|
|
1599
1876
|
|
|
1600
1877
|
file_id = await data_client.file_upload_from_path(
|
|
1601
1878
|
part_id="INSERT YOUR PART ID",
|
|
1602
1879
|
tags=["tag_1", "tag_2"],
|
|
1880
|
+
dataset_ids=["dataset_1", "dataset_2"],
|
|
1603
1881
|
filepath="/Users/<your-username>/<your-directory>/<your-file.txt>"
|
|
1604
1882
|
)
|
|
1605
1883
|
|
|
@@ -1611,23 +1889,22 @@ class DataClient:
|
|
|
1611
1889
|
method_name (Optional[str]): Optional name of the method associated with the file.
|
|
1612
1890
|
method_parameters (Optional[str]): Optional dictionary of the method parameters. No longer in active use.
|
|
1613
1891
|
tags (Optional[List[str]]): Optional list of tags to allow for tag-based filtering when retrieving data.
|
|
1614
|
-
|
|
1892
|
+
dataset_ids (Optional[List[str]]): Optional list of datasets to add the data to.
|
|
1615
1893
|
|
|
1616
1894
|
Raises:
|
|
1617
1895
|
GRPCError: If an invalid part ID is passed.
|
|
1618
1896
|
FileNotFoundError: If the provided filepath is not found.
|
|
1619
1897
|
|
|
1620
1898
|
Returns:
|
|
1621
|
-
str: ID of the new file.
|
|
1899
|
+
str: Binary data ID of the new file.
|
|
1622
1900
|
|
|
1623
1901
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#fileuploadfrompath>`_.
|
|
1624
1902
|
"""
|
|
1625
1903
|
path = Path(filepath)
|
|
1626
1904
|
file_name = path.stem
|
|
1627
1905
|
file_extension = path.suffix if path.suffix != "" else None
|
|
1628
|
-
|
|
1629
|
-
|
|
1630
|
-
f.close()
|
|
1906
|
+
with open(filepath, "rb") as f:
|
|
1907
|
+
data = f.read()
|
|
1631
1908
|
|
|
1632
1909
|
metadata = UploadMetadata(
|
|
1633
1910
|
part_id=part_id,
|
|
@@ -1639,9 +1916,10 @@ class DataClient:
|
|
|
1639
1916
|
method_parameters=method_parameters,
|
|
1640
1917
|
file_extension=file_extension if file_extension else "",
|
|
1641
1918
|
tags=tags,
|
|
1919
|
+
dataset_ids=dataset_ids,
|
|
1642
1920
|
)
|
|
1643
1921
|
response: FileUploadResponse = await self._file_upload(metadata=metadata, file_contents=FileData(data=data if data else bytes()))
|
|
1644
|
-
return response.
|
|
1922
|
+
return response.binary_data_id
|
|
1645
1923
|
|
|
1646
1924
|
async def _file_upload(self, metadata: UploadMetadata, file_contents: FileData) -> FileUploadResponse:
|
|
1647
1925
|
request_metadata = FileUploadRequest(metadata=metadata)
|
|
@@ -1656,6 +1934,265 @@ class DataClient:
|
|
|
1656
1934
|
raise TypeError("Response cannot be empty")
|
|
1657
1935
|
return response
|
|
1658
1936
|
|
|
1937
|
+
async def get_data_pipeline(self, id: str) -> DataPipeline:
|
|
1938
|
+
"""Get a data pipeline by its ID.
|
|
1939
|
+
|
|
1940
|
+
::
|
|
1941
|
+
|
|
1942
|
+
data_pipeline = await data_client.get_data_pipeline(id="<YOUR-DATA-PIPELINE-ID>")
|
|
1943
|
+
|
|
1944
|
+
Args:
|
|
1945
|
+
id (str): The ID of the data pipeline to get.
|
|
1946
|
+
|
|
1947
|
+
Returns:
|
|
1948
|
+
DataPipeline: The data pipeline with the given ID.
|
|
1949
|
+
"""
|
|
1950
|
+
request = GetDataPipelineRequest(id=id)
|
|
1951
|
+
response: GetDataPipelineResponse = await self._data_pipelines_client.GetDataPipeline(request, metadata=self._metadata)
|
|
1952
|
+
return DataClient.DataPipeline.from_proto(response.data_pipeline)
|
|
1953
|
+
|
|
1954
|
+
async def list_data_pipelines(self, organization_id: str) -> List[DataPipeline]:
|
|
1955
|
+
"""List all of the data pipelines for an organization.
|
|
1956
|
+
|
|
1957
|
+
::
|
|
1958
|
+
|
|
1959
|
+
data_pipelines = await data_client.list_data_pipelines(organization_id="<YOUR-ORGANIZATION-ID>")
|
|
1960
|
+
|
|
1961
|
+
Args:
|
|
1962
|
+
organization_id (str): The ID of the organization that owns the pipelines.
|
|
1963
|
+
You can obtain your organization ID from the organization settings page.
|
|
1964
|
+
|
|
1965
|
+
Returns:
|
|
1966
|
+
List[DataPipeline]: A list of all of the data pipelines for the given organization.
|
|
1967
|
+
"""
|
|
1968
|
+
request = ListDataPipelinesRequest(organization_id=organization_id)
|
|
1969
|
+
response: ListDataPipelinesResponse = await self._data_pipelines_client.ListDataPipelines(request, metadata=self._metadata)
|
|
1970
|
+
return [DataClient.DataPipeline.from_proto(pipeline) for pipeline in response.data_pipelines]
|
|
1971
|
+
|
|
1972
|
+
async def create_data_pipeline(
|
|
1973
|
+
self,
|
|
1974
|
+
organization_id: str,
|
|
1975
|
+
name: str,
|
|
1976
|
+
mql_binary: List[Dict[str, Any]],
|
|
1977
|
+
schedule: str,
|
|
1978
|
+
enable_backfill: bool,
|
|
1979
|
+
data_source_type: TabularDataSourceType.ValueType = TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD,
|
|
1980
|
+
) -> str:
|
|
1981
|
+
"""Create a new data pipeline.
|
|
1982
|
+
|
|
1983
|
+
::
|
|
1984
|
+
|
|
1985
|
+
data_pipeline_id = await data_client.create_data_pipeline(
|
|
1986
|
+
organization_id="<YOUR-ORGANIZATION-ID>",
|
|
1987
|
+
name="<YOUR-PIPELINE-NAME>",
|
|
1988
|
+
mql_binary=[<YOUR-MQL-PIPELINE-AGGREGATION>],
|
|
1989
|
+
schedule="<YOUR-SCHEDULE>",
|
|
1990
|
+
enable_backfill=False,
|
|
1991
|
+
data_source_type=TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD,
|
|
1992
|
+
)
|
|
1993
|
+
|
|
1994
|
+
Args:
|
|
1995
|
+
organization_id (str): The ID of the organization that will own the pipeline.
|
|
1996
|
+
You can obtain your organization ID from the organization settings page.
|
|
1997
|
+
name (str): The name of the pipeline.
|
|
1998
|
+
mql_binary (List[Dict[str, Any]]):The MQL pipeline to run, as a list of MongoDB aggregation pipeline stages.
|
|
1999
|
+
schedule (str): A cron expression representing the expected execution schedule in UTC (note this also
|
|
2000
|
+
defines the input time window; an hourly schedule would process 1 hour of data at a time).
|
|
2001
|
+
enable_backfill (bool): When true, pipeline runs will be scheduled for the organization's past data.
|
|
2002
|
+
data_source_type (TabularDataSourceType): The type of data source to use for the pipeline.
|
|
2003
|
+
Defaults to TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD.
|
|
2004
|
+
|
|
2005
|
+
Returns:
|
|
2006
|
+
str: The ID of the newly created pipeline.
|
|
2007
|
+
"""
|
|
2008
|
+
binary: List[bytes] = [bson.encode(query) for query in mql_binary]
|
|
2009
|
+
request = CreateDataPipelineRequest(
|
|
2010
|
+
organization_id=organization_id,
|
|
2011
|
+
name=name,
|
|
2012
|
+
mql_binary=binary,
|
|
2013
|
+
schedule=schedule,
|
|
2014
|
+
enable_backfill=enable_backfill,
|
|
2015
|
+
data_source_type=data_source_type,
|
|
2016
|
+
)
|
|
2017
|
+
response: CreateDataPipelineResponse = await self._data_pipelines_client.CreateDataPipeline(request, metadata=self._metadata)
|
|
2018
|
+
return response.id
|
|
2019
|
+
|
|
2020
|
+
async def rename_data_pipeline(self, id: str, name: str) -> None:
|
|
2021
|
+
"""Rename a data pipeline by its ID.
|
|
2022
|
+
::
|
|
2023
|
+
|
|
2024
|
+
await data_client.rename_data_pipeline(id="<YOUR-DATA-PIPELINE-ID>", name="<YOUR-NEW-NAME>")
|
|
2025
|
+
|
|
2026
|
+
Args:
|
|
2027
|
+
id (str): The ID of the data pipeline to rename.
|
|
2028
|
+
name (str): The new name of the data pipeline.
|
|
2029
|
+
"""
|
|
2030
|
+
if not id or not name:
|
|
2031
|
+
raise ValueError("id and name are required")
|
|
2032
|
+
request = RenameDataPipelineRequest(id=id, name=name)
|
|
2033
|
+
await self._data_pipelines_client.RenameDataPipeline(request, metadata=self._metadata)
|
|
2034
|
+
|
|
2035
|
+
async def delete_data_pipeline(self, id: str) -> None:
|
|
2036
|
+
"""Delete a data pipeline by its ID.
|
|
2037
|
+
|
|
2038
|
+
::
|
|
2039
|
+
|
|
2040
|
+
await data_client.delete_data_pipeline(id="<YOUR-DATA-PIPELINE-ID>")
|
|
2041
|
+
|
|
2042
|
+
Args:
|
|
2043
|
+
id (str): The ID of the data pipeline to delete.
|
|
2044
|
+
"""
|
|
2045
|
+
request = DeleteDataPipelineRequest(id=id)
|
|
2046
|
+
await self._data_pipelines_client.DeleteDataPipeline(request, metadata=self._metadata)
|
|
2047
|
+
|
|
2048
|
+
async def list_data_pipeline_runs(self, id: str, page_size: int = 10) -> DataPipelineRunsPage:
|
|
2049
|
+
"""List all of the data pipeline runs for a data pipeline.
|
|
2050
|
+
|
|
2051
|
+
::
|
|
2052
|
+
|
|
2053
|
+
data_pipeline_runs = await data_client.list_data_pipeline_runs(id="<YOUR-DATA-PIPELINE-ID>")
|
|
2054
|
+
while len(data_pipeline_runs.runs) > 0:
|
|
2055
|
+
data_pipeline_runs = await data_pipeline_runs.next_page()
|
|
2056
|
+
|
|
2057
|
+
Args:
|
|
2058
|
+
id (str): The ID of the pipeline to list runs for
|
|
2059
|
+
page_size (int): The number of runs to return per page. Defaults to 10.
|
|
2060
|
+
|
|
2061
|
+
Returns:
|
|
2062
|
+
DataPipelineRunsPage: A page of data pipeline runs with pagination support
|
|
2063
|
+
"""
|
|
2064
|
+
return await self._list_data_pipeline_runs(id, page_size)
|
|
2065
|
+
|
|
2066
|
+
async def _list_data_pipeline_runs(self, id: str, page_size: int, page_token: str = "") -> DataPipelineRunsPage:
|
|
2067
|
+
"""Internal method to list data pipeline runs with pagination.
|
|
2068
|
+
|
|
2069
|
+
Args:
|
|
2070
|
+
id (str): The ID of the pipeline to list runs for
|
|
2071
|
+
page_size (int): The number of runs to return per page
|
|
2072
|
+
page_token (str): The token to use to get the next page of results
|
|
2073
|
+
|
|
2074
|
+
Returns:
|
|
2075
|
+
DataPipelineRunsPage: A page of data pipeline runs with pagination support
|
|
2076
|
+
"""
|
|
2077
|
+
request = ListDataPipelineRunsRequest(id=id, page_size=page_size, page_token=page_token)
|
|
2078
|
+
response: ListDataPipelineRunsResponse = await self._data_pipelines_client.ListDataPipelineRuns(request, metadata=self._metadata)
|
|
2079
|
+
return DataClient.DataPipelineRunsPage.from_proto(response, self, page_size)
|
|
2080
|
+
|
|
2081
|
+
async def create_index(
|
|
2082
|
+
self,
|
|
2083
|
+
organization_id: str,
|
|
2084
|
+
collection_type: IndexableCollection.ValueType,
|
|
2085
|
+
index_spec: Dict[str, Any],
|
|
2086
|
+
pipeline_name: Optional[str] = None,
|
|
2087
|
+
) -> None:
|
|
2088
|
+
"""Starts a custom index build.
|
|
2089
|
+
|
|
2090
|
+
Args:
|
|
2091
|
+
organization_id (str): The ID of the organization that owns the data.
|
|
2092
|
+
To find your organization ID, visit the organization settings page.
|
|
2093
|
+
collection_type (IndexableCollection.ValueType): The type of collection the index is on.
|
|
2094
|
+
index_spec (List[Dict[str, Any]]): The MongoDB index specification defined in JSON format.
|
|
2095
|
+
pipeline_name (Optional[str]): The name of the pipeline if the collection type is PIPELINE_SINK.
|
|
2096
|
+
|
|
2097
|
+
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#createindex>`_.
|
|
2098
|
+
"""
|
|
2099
|
+
index_spec_bytes = [bson.encode(index_spec)]
|
|
2100
|
+
request = CreateIndexRequest(
|
|
2101
|
+
organization_id=organization_id,
|
|
2102
|
+
collection_type=collection_type,
|
|
2103
|
+
index_spec=index_spec_bytes,
|
|
2104
|
+
pipeline_name=pipeline_name,
|
|
2105
|
+
)
|
|
2106
|
+
await self._data_client.CreateIndex(request, metadata=self._metadata)
|
|
2107
|
+
|
|
2108
|
+
async def list_indexes(
|
|
2109
|
+
self,
|
|
2110
|
+
organization_id: str,
|
|
2111
|
+
collection_type: IndexableCollection.ValueType,
|
|
2112
|
+
pipeline_name: Optional[str] = None,
|
|
2113
|
+
) -> Sequence[Index]:
|
|
2114
|
+
"""Returns all the indexes for a given collection.
|
|
2115
|
+
|
|
2116
|
+
Args:
|
|
2117
|
+
organization_id (str): The ID of the organization that owns the data.
|
|
2118
|
+
To find your organization ID, visit the organization settings page.
|
|
2119
|
+
collection_type (IndexableCollection.ValueType): The type of collection the index is on.
|
|
2120
|
+
pipeline_name (Optional[str]): The name of the pipeline if the collection type is PIPELINE_SINK.
|
|
2121
|
+
|
|
2122
|
+
Returns:
|
|
2123
|
+
List[Index]: A list of indexes.
|
|
2124
|
+
|
|
2125
|
+
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#listindexes>`_.
|
|
2126
|
+
"""
|
|
2127
|
+
request = ListIndexesRequest(
|
|
2128
|
+
organization_id=organization_id,
|
|
2129
|
+
collection_type=collection_type,
|
|
2130
|
+
pipeline_name=pipeline_name,
|
|
2131
|
+
)
|
|
2132
|
+
response: ListIndexesResponse = await self._data_client.ListIndexes(request, metadata=self._metadata)
|
|
2133
|
+
return response.indexes
|
|
2134
|
+
|
|
2135
|
+
async def delete_index(
|
|
2136
|
+
self,
|
|
2137
|
+
organization_id: str,
|
|
2138
|
+
collection_type: IndexableCollection.ValueType,
|
|
2139
|
+
index_name: str,
|
|
2140
|
+
pipeline_name: Optional[str] = None,
|
|
2141
|
+
) -> None:
|
|
2142
|
+
"""Drops the specified custom index from a collection.
|
|
2143
|
+
|
|
2144
|
+
Args:
|
|
2145
|
+
organization_id (str): The ID of the organization that owns the data.
|
|
2146
|
+
To find your organization ID, visit the organization settings page.
|
|
2147
|
+
collection_type (IndexableCollection.ValueType): The type of collection the index is on.
|
|
2148
|
+
index_name (str): The name of the index to delete.
|
|
2149
|
+
pipeline_name (Optional[str]): The name of the pipeline if the collection type is PIPELINE_SINK.
|
|
2150
|
+
|
|
2151
|
+
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#deleteindex>`_.
|
|
2152
|
+
"""
|
|
2153
|
+
request = DeleteIndexRequest(
|
|
2154
|
+
organization_id=organization_id,
|
|
2155
|
+
collection_type=collection_type,
|
|
2156
|
+
index_name=index_name,
|
|
2157
|
+
pipeline_name=pipeline_name,
|
|
2158
|
+
)
|
|
2159
|
+
await self._data_client.DeleteIndex(request, metadata=self._metadata)
|
|
2160
|
+
|
|
2161
|
+
async def create_binary_data_signed_url(
|
|
2162
|
+
self,
|
|
2163
|
+
binary_data_id: str,
|
|
2164
|
+
expiration_minutes: Optional[int] = None,
|
|
2165
|
+
) -> Tuple[str, datetime]:
|
|
2166
|
+
"""Create a signed URL for binary data.
|
|
2167
|
+
|
|
2168
|
+
::
|
|
2169
|
+
|
|
2170
|
+
signed_url, expires_at = await data_client.create_binary_data_signed_url(
|
|
2171
|
+
binary_data_id="<YOUR-BINARY-DATA-ID>",
|
|
2172
|
+
expiration_minutes=60
|
|
2173
|
+
)
|
|
2174
|
+
|
|
2175
|
+
print(f"Signed URL: {signed_url}")
|
|
2176
|
+
print(f"Expires at: {expires_at}")
|
|
2177
|
+
|
|
2178
|
+
Args:
|
|
2179
|
+
binary_data_id (str): The binary data ID of the file to create a signed URL for.
|
|
2180
|
+
expiration_minutes (Optional[int]): Expiration time in minutes. Defaults to 15 minutes if not specified.
|
|
2181
|
+
Maximum allowed is 10080 minutes (7 days).
|
|
2182
|
+
|
|
2183
|
+
Returns:
|
|
2184
|
+
Tuple[str, datetime]: A tuple containing:
|
|
2185
|
+
- ``signed_url`` (*str*): The signed URL for the binary data file.
|
|
2186
|
+
- ``expires_at`` (*datetime*): The expiration time of the signed URL token.
|
|
2187
|
+
|
|
2188
|
+
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#createbinarydatasignedurl>`_.
|
|
2189
|
+
"""
|
|
2190
|
+
request = CreateBinaryDataSignedURLRequest(binary_data_id=binary_data_id)
|
|
2191
|
+
if expiration_minutes is not None:
|
|
2192
|
+
request.expiration_minutes = expiration_minutes
|
|
2193
|
+
response: CreateBinaryDataSignedURLResponse = await self._data_client.CreateBinaryDataSignedURL(request, metadata=self._metadata)
|
|
2194
|
+
return response.signed_url, response.expires_at.ToDatetime()
|
|
2195
|
+
|
|
1659
2196
|
@staticmethod
|
|
1660
2197
|
def create_filter(
|
|
1661
2198
|
component_name: Optional[str] = None,
|