viam-sdk 0.46.0__py3-none-linux_armv6l.whl → 0.48.0__py3-none-linux_armv6l.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of viam-sdk might be problematic. Click here for more details.
- viam/app/data_client.py +280 -3
- viam/components/arm/arm.py +1 -1
- viam/components/gripper/__init__.py +2 -0
- viam/components/gripper/client.py +15 -2
- viam/components/gripper/gripper.py +37 -1
- viam/components/gripper/service.py +19 -3
- viam/gen/app/datapipelines/v1/data_pipelines_grpc.py +1 -1
- viam/gen/app/datapipelines/v1/data_pipelines_pb2.py +42 -41
- viam/gen/app/datapipelines/v1/data_pipelines_pb2.pyi +40 -7
- viam/gen/app/mltraining/v1/ml_training_pb2.py +30 -24
- viam/gen/app/mltraining/v1/ml_training_pb2.pyi +21 -2
- viam/gen/app/v1/app_grpc.py +9 -1
- viam/gen/app/v1/app_pb2.py +523 -519
- viam/gen/app/v1/app_pb2.pyi +56 -6
- viam/gen/component/gripper/v1/gripper_grpc.py +10 -2
- viam/gen/component/gripper/v1/gripper_pb2.py +4 -2
- viam/gen/provisioning/v1/provisioning_grpc.py +10 -2
- viam/gen/provisioning/v1/provisioning_pb2.py +29 -25
- viam/gen/provisioning/v1/provisioning_pb2.pyi +20 -2
- viam/module/module.py +67 -7
- viam/proto/app/__init__.py +4 -0
- viam/proto/provisioning/__init__.py +4 -0
- viam/resource/easy_resource.py +4 -4
- viam/resource/registry.py +2 -2
- viam/resource/types.py +2 -2
- viam/rpc/server.py +24 -10
- viam/version_metadata.py +2 -2
- {viam_sdk-0.46.0.dist-info → viam_sdk-0.48.0.dist-info}/METADATA +1 -1
- {viam_sdk-0.46.0.dist-info → viam_sdk-0.48.0.dist-info}/RECORD +31 -31
- {viam_sdk-0.46.0.dist-info → viam_sdk-0.48.0.dist-info}/WHEEL +0 -0
- {viam_sdk-0.46.0.dist-info → viam_sdk-0.48.0.dist-info}/licenses/LICENSE +0 -0
viam/app/data_client.py
CHANGED
|
@@ -7,6 +7,7 @@ from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union, c
|
|
|
7
7
|
import bson
|
|
8
8
|
from google.protobuf.struct_pb2 import Struct
|
|
9
9
|
from grpclib.client import Channel, Stream
|
|
10
|
+
from typing_extensions import Self
|
|
10
11
|
|
|
11
12
|
from viam import logging
|
|
12
13
|
from viam.proto.app.data import (
|
|
@@ -54,9 +55,30 @@ from viam.proto.app.data import (
|
|
|
54
55
|
TabularDataByMQLResponse,
|
|
55
56
|
TabularDataBySQLRequest,
|
|
56
57
|
TabularDataBySQLResponse,
|
|
58
|
+
TabularDataSource,
|
|
59
|
+
TabularDataSourceType,
|
|
57
60
|
TagsByFilterRequest,
|
|
58
61
|
TagsByFilterResponse,
|
|
59
62
|
)
|
|
63
|
+
from viam.proto.app.datapipelines import (
|
|
64
|
+
CreateDataPipelineRequest,
|
|
65
|
+
CreateDataPipelineResponse,
|
|
66
|
+
DataPipelineRunStatus,
|
|
67
|
+
DataPipelinesServiceStub,
|
|
68
|
+
DeleteDataPipelineRequest,
|
|
69
|
+
GetDataPipelineRequest,
|
|
70
|
+
GetDataPipelineResponse,
|
|
71
|
+
ListDataPipelineRunsRequest,
|
|
72
|
+
ListDataPipelineRunsResponse,
|
|
73
|
+
ListDataPipelinesRequest,
|
|
74
|
+
ListDataPipelinesResponse,
|
|
75
|
+
)
|
|
76
|
+
from viam.proto.app.datapipelines import (
|
|
77
|
+
DataPipeline as ProtoDataPipeline,
|
|
78
|
+
)
|
|
79
|
+
from viam.proto.app.datapipelines import (
|
|
80
|
+
DataPipelineRun as ProtoDataPipelineRun,
|
|
81
|
+
)
|
|
60
82
|
from viam.proto.app.dataset import (
|
|
61
83
|
CreateDatasetRequest,
|
|
62
84
|
CreateDatasetResponse,
|
|
@@ -220,6 +242,125 @@ class DataClient:
|
|
|
220
242
|
)
|
|
221
243
|
return self.resource_api
|
|
222
244
|
|
|
245
|
+
@dataclass
|
|
246
|
+
class DataPipeline:
|
|
247
|
+
"""Represents a data pipeline and its associated metadata."""
|
|
248
|
+
|
|
249
|
+
id: str
|
|
250
|
+
"""The ID of the data pipeline"""
|
|
251
|
+
|
|
252
|
+
organization_id: str
|
|
253
|
+
"""The organization ID"""
|
|
254
|
+
|
|
255
|
+
name: str
|
|
256
|
+
"""The name of the data pipeline"""
|
|
257
|
+
|
|
258
|
+
mql_binary: List[Dict[str, Any]]
|
|
259
|
+
"""The MQL binary of the data pipeline"""
|
|
260
|
+
|
|
261
|
+
schedule: str
|
|
262
|
+
"""The schedule of the data pipeline"""
|
|
263
|
+
|
|
264
|
+
created_on: datetime
|
|
265
|
+
"""The time the data pipeline was created"""
|
|
266
|
+
|
|
267
|
+
updated_at: datetime
|
|
268
|
+
"""The time the data pipeline was last updated"""
|
|
269
|
+
|
|
270
|
+
enabled: bool
|
|
271
|
+
"""Whether the data pipeline is enabled"""
|
|
272
|
+
|
|
273
|
+
data_source_type: TabularDataSourceType.ValueType
|
|
274
|
+
"""The type of data source for the data pipeline"""
|
|
275
|
+
|
|
276
|
+
@classmethod
|
|
277
|
+
def from_proto(cls, data_pipeline: ProtoDataPipeline) -> Self:
|
|
278
|
+
return cls(
|
|
279
|
+
id=data_pipeline.id,
|
|
280
|
+
organization_id=data_pipeline.organization_id,
|
|
281
|
+
name=data_pipeline.name,
|
|
282
|
+
mql_binary=[bson.decode(bson_bytes) for bson_bytes in data_pipeline.mql_binary],
|
|
283
|
+
schedule=data_pipeline.schedule,
|
|
284
|
+
created_on=data_pipeline.created_on.ToDatetime(),
|
|
285
|
+
updated_at=data_pipeline.updated_at.ToDatetime(),
|
|
286
|
+
enabled=data_pipeline.enabled,
|
|
287
|
+
data_source_type=data_pipeline.data_source_type,
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
@dataclass
|
|
291
|
+
class DataPipelineRun:
|
|
292
|
+
"""Represents a data pipeline run and its associated metadata."""
|
|
293
|
+
|
|
294
|
+
id: str
|
|
295
|
+
"""The ID of the data pipeline run"""
|
|
296
|
+
|
|
297
|
+
status: DataPipelineRunStatus.ValueType
|
|
298
|
+
"""The status of the data pipeline run"""
|
|
299
|
+
|
|
300
|
+
start_time: datetime
|
|
301
|
+
"""The time the data pipeline run started"""
|
|
302
|
+
|
|
303
|
+
end_time: datetime
|
|
304
|
+
"""The time the data pipeline run ended"""
|
|
305
|
+
|
|
306
|
+
data_start_time: datetime
|
|
307
|
+
"""The start time of the data that was processed in the run."""
|
|
308
|
+
data_end_time: datetime
|
|
309
|
+
"""The end time of the data that was processed in the run."""
|
|
310
|
+
|
|
311
|
+
@classmethod
|
|
312
|
+
def from_proto(cls, data_pipeline_run: ProtoDataPipelineRun) -> Self:
|
|
313
|
+
return cls(
|
|
314
|
+
id=data_pipeline_run.id,
|
|
315
|
+
status=data_pipeline_run.status,
|
|
316
|
+
start_time=data_pipeline_run.start_time.ToDatetime(),
|
|
317
|
+
end_time=data_pipeline_run.end_time.ToDatetime(),
|
|
318
|
+
data_start_time=data_pipeline_run.data_start_time.ToDatetime(),
|
|
319
|
+
data_end_time=data_pipeline_run.data_end_time.ToDatetime(),
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
@dataclass
|
|
323
|
+
class DataPipelineRunsPage:
|
|
324
|
+
"""Represents a page of data pipeline runs and provides pagination functionality."""
|
|
325
|
+
|
|
326
|
+
_client: "DataClient"
|
|
327
|
+
"""The data client used to make API calls"""
|
|
328
|
+
|
|
329
|
+
pipeline_id: str
|
|
330
|
+
"""The ID of the pipeline these runs belong to"""
|
|
331
|
+
|
|
332
|
+
page_size: int
|
|
333
|
+
"""The number of runs per page"""
|
|
334
|
+
|
|
335
|
+
runs: List["DataClient.DataPipelineRun"]
|
|
336
|
+
"""The list of runs in this page"""
|
|
337
|
+
|
|
338
|
+
next_page_token: str
|
|
339
|
+
"""The token to use to get the next page of results"""
|
|
340
|
+
|
|
341
|
+
async def next_page(self) -> "DataClient.DataPipelineRunsPage":
|
|
342
|
+
"""Get the next page of data pipeline runs.
|
|
343
|
+
|
|
344
|
+
Returns:
|
|
345
|
+
DataPipelineRunsPage: The next page of runs, or an empty page if there are no more runs
|
|
346
|
+
"""
|
|
347
|
+
if not self.next_page_token:
|
|
348
|
+
# no token, return empty next page
|
|
349
|
+
return DataClient.DataPipelineRunsPage(
|
|
350
|
+
_client=self._client, pipeline_id=self.pipeline_id, page_size=self.page_size, runs=[], next_page_token=""
|
|
351
|
+
)
|
|
352
|
+
return await self._client._list_data_pipeline_runs(self.pipeline_id, self.page_size, self.next_page_token)
|
|
353
|
+
|
|
354
|
+
@classmethod
|
|
355
|
+
def from_proto(cls, data_pipeline_runs_page: ListDataPipelineRunsResponse, client: "DataClient", page_size: int) -> Self:
|
|
356
|
+
return cls(
|
|
357
|
+
_client=client,
|
|
358
|
+
pipeline_id=data_pipeline_runs_page.pipeline_id,
|
|
359
|
+
page_size=page_size,
|
|
360
|
+
runs=[DataClient.DataPipelineRun.from_proto(run) for run in data_pipeline_runs_page.runs],
|
|
361
|
+
next_page_token=data_pipeline_runs_page.next_page_token,
|
|
362
|
+
)
|
|
363
|
+
|
|
223
364
|
def __init__(self, channel: Channel, metadata: Mapping[str, str]):
|
|
224
365
|
"""Create a :class:`DataClient` that maintains a connection to app.
|
|
225
366
|
|
|
@@ -231,11 +372,13 @@ class DataClient:
|
|
|
231
372
|
self._data_client = DataServiceStub(channel)
|
|
232
373
|
self._data_sync_client = DataSyncServiceStub(channel)
|
|
233
374
|
self._dataset_client = DatasetServiceStub(channel)
|
|
375
|
+
self._data_pipelines_client = DataPipelinesServiceStub(channel)
|
|
234
376
|
self._channel = channel
|
|
235
377
|
|
|
236
378
|
_data_client: DataServiceStub
|
|
237
379
|
_data_sync_client: DataSyncServiceStub
|
|
238
380
|
_dataset_client: DatasetServiceStub
|
|
381
|
+
_data_pipelines_client: DataPipelinesServiceStub
|
|
239
382
|
_metadata: Mapping[str, str]
|
|
240
383
|
_channel: Channel
|
|
241
384
|
|
|
@@ -345,7 +488,12 @@ class DataClient:
|
|
|
345
488
|
|
|
346
489
|
@_alias_param("query", param_alias="mql_binary")
|
|
347
490
|
async def tabular_data_by_mql(
|
|
348
|
-
self,
|
|
491
|
+
self,
|
|
492
|
+
organization_id: str,
|
|
493
|
+
query: Union[List[bytes], List[Dict[str, Any]]],
|
|
494
|
+
use_recent_data: Optional[bool] = None,
|
|
495
|
+
tabular_data_source_type: TabularDataSourceType.ValueType = TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD,
|
|
496
|
+
pipeline_id: Optional[str] = None,
|
|
349
497
|
) -> List[Dict[str, Union[ValueTypes, datetime]]]:
|
|
350
498
|
"""Obtain unified tabular data and metadata, queried with MQL.
|
|
351
499
|
|
|
@@ -366,7 +514,12 @@ class DataClient:
|
|
|
366
514
|
query (Union[List[bytes], List[Dict[str, Any]]]): The MQL query to run, as a list of MongoDB aggregation pipeline stages.
|
|
367
515
|
Each stage can be provided as either a dictionary or raw BSON bytes, but support for bytes will be removed in the
|
|
368
516
|
future, so prefer the dictionary option.
|
|
369
|
-
use_recent_data (bool): Whether to query blob storage or your recent data store. Defaults to ``False
|
|
517
|
+
use_recent_data (bool): Whether to query blob storage or your recent data store. Defaults to ``False``..
|
|
518
|
+
Deprecated, use `tabular_data_source_type` instead.
|
|
519
|
+
tabular_data_source_type (viam.proto.app.data.TabularDataSourceType): The data source to query.
|
|
520
|
+
Defaults to `TABULAR_DATA_SOURCE_TYPE_STANDARD`.
|
|
521
|
+
pipeline_id (str): The ID of the data pipeline to query. Defaults to `None`.
|
|
522
|
+
Required if `tabular_data_source_type` is `TABULAR_DATA_SOURCE_TYPE_PIPELINE_SINK`.
|
|
370
523
|
|
|
371
524
|
Returns:
|
|
372
525
|
List[Dict[str, Union[ValueTypes, datetime]]]: An array of decoded BSON data objects.
|
|
@@ -374,7 +527,10 @@ class DataClient:
|
|
|
374
527
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#tabulardatabymql>`_.
|
|
375
528
|
"""
|
|
376
529
|
binary: List[bytes] = [bson.encode(query) for query in query] if isinstance(query[0], dict) else query # type: ignore
|
|
377
|
-
|
|
530
|
+
data_source = TabularDataSource(type=tabular_data_source_type, pipeline_id=pipeline_id)
|
|
531
|
+
if use_recent_data:
|
|
532
|
+
data_source.type = TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_HOT_STORAGE
|
|
533
|
+
request = TabularDataByMQLRequest(organization_id=organization_id, mql_binary=binary, data_source=data_source)
|
|
378
534
|
response: TabularDataByMQLResponse = await self._data_client.TabularDataByMQL(request, metadata=self._metadata)
|
|
379
535
|
return [bson.decode(bson_bytes) for bson_bytes in response.raw_data]
|
|
380
536
|
|
|
@@ -1696,6 +1852,127 @@ class DataClient:
|
|
|
1696
1852
|
raise TypeError("Response cannot be empty")
|
|
1697
1853
|
return response
|
|
1698
1854
|
|
|
1855
|
+
async def get_data_pipeline(self, id: str) -> DataPipeline:
|
|
1856
|
+
"""Get a data pipeline by its ID.
|
|
1857
|
+
|
|
1858
|
+
::
|
|
1859
|
+
|
|
1860
|
+
data_pipeline = await data_client.get_data_pipeline(id="<YOUR-DATA-PIPELINE-ID>")
|
|
1861
|
+
|
|
1862
|
+
Args:
|
|
1863
|
+
id (str): The ID of the data pipeline to get.
|
|
1864
|
+
|
|
1865
|
+
Returns:
|
|
1866
|
+
DataPipeline: The data pipeline with the given ID.
|
|
1867
|
+
"""
|
|
1868
|
+
request = GetDataPipelineRequest(id=id)
|
|
1869
|
+
response: GetDataPipelineResponse = await self._data_pipelines_client.GetDataPipeline(request, metadata=self._metadata)
|
|
1870
|
+
return DataClient.DataPipeline.from_proto(response.data_pipeline)
|
|
1871
|
+
|
|
1872
|
+
async def list_data_pipelines(self, organization_id: str) -> List[DataPipeline]:
|
|
1873
|
+
"""List all of the data pipelines for an organization.
|
|
1874
|
+
|
|
1875
|
+
::
|
|
1876
|
+
|
|
1877
|
+
data_pipelines = await data_client.list_data_pipelines(organization_id="<YOUR-ORGANIZATION-ID>")
|
|
1878
|
+
|
|
1879
|
+
Args:
|
|
1880
|
+
organization_id (str): The ID of the organization that owns the pipelines.
|
|
1881
|
+
You can obtain your organization ID from the Viam app's organization settings page.
|
|
1882
|
+
|
|
1883
|
+
Returns:
|
|
1884
|
+
List[DataPipeline]: A list of all of the data pipelines for the given organization.
|
|
1885
|
+
"""
|
|
1886
|
+
request = ListDataPipelinesRequest(organization_id=organization_id)
|
|
1887
|
+
response: ListDataPipelinesResponse = await self._data_pipelines_client.ListDataPipelines(request, metadata=self._metadata)
|
|
1888
|
+
return [DataClient.DataPipeline.from_proto(pipeline) for pipeline in response.data_pipelines]
|
|
1889
|
+
|
|
1890
|
+
async def create_data_pipeline(
|
|
1891
|
+
self,
|
|
1892
|
+
organization_id: str,
|
|
1893
|
+
name: str,
|
|
1894
|
+
mql_binary: List[Dict[str, Any]],
|
|
1895
|
+
schedule: str,
|
|
1896
|
+
data_source_type: TabularDataSourceType.ValueType = TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD,
|
|
1897
|
+
) -> str:
|
|
1898
|
+
"""Create a new data pipeline.
|
|
1899
|
+
|
|
1900
|
+
::
|
|
1901
|
+
|
|
1902
|
+
data_pipeline_id = await data_client.create_data_pipeline(
|
|
1903
|
+
organization_id="<YOUR-ORGANIZATION-ID>",
|
|
1904
|
+
name="<YOUR-PIPELINE-NAME>",
|
|
1905
|
+
mql_binary=[<YOUR-MQL-PIPELINE-AGGREGATION>],
|
|
1906
|
+
schedule="<YOUR-SCHEDULE>",
|
|
1907
|
+
data_source_type=TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD,
|
|
1908
|
+
)
|
|
1909
|
+
|
|
1910
|
+
Args:
|
|
1911
|
+
organization_id (str): The ID of the organization that will own the pipeline.
|
|
1912
|
+
You can obtain your organization ID from the Viam app's organization settings page.
|
|
1913
|
+
name (str): The name of the pipeline.
|
|
1914
|
+
mql_binary (List[Dict[str, Any]]):The MQL pipeline to run, as a list of MongoDB aggregation pipeline stages.
|
|
1915
|
+
schedule (str): A cron expression representing the expected execution schedule in UTC (note this also
|
|
1916
|
+
defines the input time window; an hourly schedule would process 1 hour of data at a time).
|
|
1917
|
+
data_source_type (TabularDataSourceType): The type of data source to use for the pipeline.
|
|
1918
|
+
Defaults to TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD.
|
|
1919
|
+
|
|
1920
|
+
Returns:
|
|
1921
|
+
str: The ID of the newly created pipeline.
|
|
1922
|
+
"""
|
|
1923
|
+
binary: List[bytes] = [bson.encode(query) for query in mql_binary]
|
|
1924
|
+
request = CreateDataPipelineRequest(
|
|
1925
|
+
organization_id=organization_id, name=name, mql_binary=binary, schedule=schedule, data_source_type=data_source_type
|
|
1926
|
+
)
|
|
1927
|
+
response: CreateDataPipelineResponse = await self._data_pipelines_client.CreateDataPipeline(request, metadata=self._metadata)
|
|
1928
|
+
return response.id
|
|
1929
|
+
|
|
1930
|
+
async def delete_data_pipeline(self, id: str) -> None:
|
|
1931
|
+
"""Delete a data pipeline by its ID.
|
|
1932
|
+
|
|
1933
|
+
::
|
|
1934
|
+
|
|
1935
|
+
await data_client.delete_data_pipeline(id="<YOUR-DATA-PIPELINE-ID>")
|
|
1936
|
+
|
|
1937
|
+
Args:
|
|
1938
|
+
id (str): The ID of the data pipeline to delete.
|
|
1939
|
+
"""
|
|
1940
|
+
request = DeleteDataPipelineRequest(id=id)
|
|
1941
|
+
await self._data_pipelines_client.DeleteDataPipeline(request, metadata=self._metadata)
|
|
1942
|
+
|
|
1943
|
+
async def list_data_pipeline_runs(self, id: str, page_size: int = 10) -> DataPipelineRunsPage:
|
|
1944
|
+
"""List all of the data pipeline runs for a data pipeline.
|
|
1945
|
+
|
|
1946
|
+
::
|
|
1947
|
+
|
|
1948
|
+
data_pipeline_runs = await data_client.list_data_pipeline_runs(id="<YOUR-DATA-PIPELINE-ID>")
|
|
1949
|
+
while len(data_pipeline_runs.runs) > 0:
|
|
1950
|
+
data_pipeline_runs = await data_pipeline_runs.next_page()
|
|
1951
|
+
|
|
1952
|
+
Args:
|
|
1953
|
+
id (str): The ID of the pipeline to list runs for
|
|
1954
|
+
page_size (int): The number of runs to return per page. Defaults to 10.
|
|
1955
|
+
|
|
1956
|
+
Returns:
|
|
1957
|
+
DataPipelineRunsPage: A page of data pipeline runs with pagination support
|
|
1958
|
+
"""
|
|
1959
|
+
return await self._list_data_pipeline_runs(id, page_size)
|
|
1960
|
+
|
|
1961
|
+
async def _list_data_pipeline_runs(self, id: str, page_size: int, page_token: str = "") -> DataPipelineRunsPage:
|
|
1962
|
+
"""Internal method to list data pipeline runs with pagination.
|
|
1963
|
+
|
|
1964
|
+
Args:
|
|
1965
|
+
id (str): The ID of the pipeline to list runs for
|
|
1966
|
+
page_size (int): The number of runs to return per page
|
|
1967
|
+
page_token (str): The token to use to get the next page of results
|
|
1968
|
+
|
|
1969
|
+
Returns:
|
|
1970
|
+
DataPipelineRunsPage: A page of data pipeline runs with pagination support
|
|
1971
|
+
"""
|
|
1972
|
+
request = ListDataPipelineRunsRequest(id=id, page_size=page_size, page_token=page_token)
|
|
1973
|
+
response: ListDataPipelineRunsResponse = await self._data_pipelines_client.ListDataPipelineRuns(request, metadata=self._metadata)
|
|
1974
|
+
return DataClient.DataPipelineRunsPage.from_proto(response, self, page_size)
|
|
1975
|
+
|
|
1699
1976
|
@staticmethod
|
|
1700
1977
|
def create_filter(
|
|
1701
1978
|
component_name: Optional[str] = None,
|
viam/components/arm/arm.py
CHANGED
|
@@ -194,7 +194,7 @@ class Arm(ComponentBase):
|
|
|
194
194
|
|
|
195
195
|
@abc.abstractmethod
|
|
196
196
|
async def get_kinematics(
|
|
197
|
-
self, *, extra: Optional[Dict[str, Any]] = None, timeout: Optional[float] = None
|
|
197
|
+
self, *, extra: Optional[Dict[str, Any]] = None, timeout: Optional[float] = None, **kwargs
|
|
198
198
|
) -> Tuple[KinematicsFileFormat.ValueType, bytes]:
|
|
199
199
|
"""
|
|
200
200
|
Get the kinematics information associated with the arm.
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from viam.proto.common import KinematicsFileFormat
|
|
1
2
|
from viam.resource.registry import Registry, ResourceRegistration
|
|
2
3
|
|
|
3
4
|
from .client import GripperClient
|
|
@@ -6,6 +7,7 @@ from .service import GripperRPCService
|
|
|
6
7
|
|
|
7
8
|
__all__ = [
|
|
8
9
|
"Gripper",
|
|
10
|
+
"KinematicsFileFormat",
|
|
9
11
|
]
|
|
10
12
|
|
|
11
13
|
Registry.register_api(ResourceRegistration(Gripper, GripperRPCService, lambda name, channel: GripperClient(name, channel)))
|
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
from typing import Any, Dict, List, Mapping, Optional
|
|
1
|
+
from typing import Any, Dict, List, Mapping, Optional, Tuple
|
|
2
2
|
|
|
3
3
|
from grpclib.client import Channel
|
|
4
4
|
|
|
5
|
-
from viam.proto.common import DoCommandRequest, DoCommandResponse, Geometry
|
|
5
|
+
from viam.proto.common import DoCommandRequest, DoCommandResponse, Geometry, GetKinematicsRequest, GetKinematicsResponse
|
|
6
6
|
from viam.proto.component.gripper import (
|
|
7
7
|
GrabRequest,
|
|
8
8
|
GrabResponse,
|
|
@@ -15,6 +15,7 @@ from viam.proto.component.gripper import (
|
|
|
15
15
|
from viam.resource.rpc_client_base import ReconfigurableResourceRPCClientBase
|
|
16
16
|
from viam.utils import ValueTypes, dict_to_struct, get_geometries, struct_to_dict
|
|
17
17
|
|
|
18
|
+
from . import KinematicsFileFormat
|
|
18
19
|
from .gripper import Gripper
|
|
19
20
|
|
|
20
21
|
|
|
@@ -83,3 +84,15 @@ class GripperClient(Gripper, ReconfigurableResourceRPCClientBase):
|
|
|
83
84
|
async def get_geometries(self, *, extra: Optional[Dict[str, Any]] = None, timeout: Optional[float] = None, **kwargs) -> List[Geometry]:
|
|
84
85
|
md = kwargs.get("metadata", self.Metadata())
|
|
85
86
|
return await get_geometries(self.client, self.name, extra, timeout, md)
|
|
87
|
+
|
|
88
|
+
async def get_kinematics(
|
|
89
|
+
self,
|
|
90
|
+
*,
|
|
91
|
+
extra: Optional[Dict[str, Any]] = None,
|
|
92
|
+
timeout: Optional[float] = None,
|
|
93
|
+
**kwargs,
|
|
94
|
+
) -> Tuple[KinematicsFileFormat.ValueType, bytes]:
|
|
95
|
+
md = kwargs.get("metadata", self.Metadata()).proto
|
|
96
|
+
request = GetKinematicsRequest(name=self.name, extra=dict_to_struct(extra))
|
|
97
|
+
response: GetKinematicsResponse = await self.client.GetKinematics(request, timeout=timeout, metadata=md)
|
|
98
|
+
return (response.format, response.kinematics_data)
|
|
@@ -1,9 +1,11 @@
|
|
|
1
1
|
import abc
|
|
2
|
-
from typing import Any, Dict, Final, Optional
|
|
2
|
+
from typing import Any, Dict, Final, Optional, Tuple
|
|
3
3
|
|
|
4
4
|
from viam.components.component_base import ComponentBase
|
|
5
5
|
from viam.resource.types import API, RESOURCE_NAMESPACE_RDK, RESOURCE_TYPE_COMPONENT
|
|
6
6
|
|
|
7
|
+
from . import KinematicsFileFormat
|
|
8
|
+
|
|
7
9
|
|
|
8
10
|
class Gripper(ComponentBase):
|
|
9
11
|
"""
|
|
@@ -112,3 +114,37 @@ class Gripper(ComponentBase):
|
|
|
112
114
|
For more information, see `Gripper component <https://docs.viam.com/dev/reference/apis/components/gripper/#is_moving>`_.
|
|
113
115
|
"""
|
|
114
116
|
...
|
|
117
|
+
|
|
118
|
+
@abc.abstractmethod
|
|
119
|
+
async def get_kinematics(
|
|
120
|
+
self,
|
|
121
|
+
*,
|
|
122
|
+
extra: Optional[Dict[str, Any]] = None,
|
|
123
|
+
timeout: Optional[float] = None,
|
|
124
|
+
**kwargs,
|
|
125
|
+
) -> Tuple[KinematicsFileFormat.ValueType, bytes]:
|
|
126
|
+
"""
|
|
127
|
+
Get the kinematics information associated with the gripper.
|
|
128
|
+
|
|
129
|
+
::
|
|
130
|
+
|
|
131
|
+
my_gripper = Gripper.from_robot(robot=machine, name="my_gripper")
|
|
132
|
+
|
|
133
|
+
# Get the kinematics information associated with the gripper.
|
|
134
|
+
kinematics = await my_gripper.get_kinematics()
|
|
135
|
+
|
|
136
|
+
# Get the format of the kinematics file.
|
|
137
|
+
k_file = kinematics[0]
|
|
138
|
+
|
|
139
|
+
# Get the byte contents of the file.
|
|
140
|
+
k_bytes = kinematics[1]
|
|
141
|
+
|
|
142
|
+
Returns:
|
|
143
|
+
Tuple[KinematicsFileFormat.ValueType, bytes]: A tuple containing two values; the first [0] value represents the format of the
|
|
144
|
+
file, either in URDF format (``KinematicsFileFormat.KINEMATICS_FILE_FORMAT_URDF``) or
|
|
145
|
+
Viam's kinematic parameter format (spatial vector algebra) (``KinematicsFileFormat.KINEMATICS_FILE_FORMAT_SVA``),
|
|
146
|
+
and the second [1] value represents the byte contents of the file.
|
|
147
|
+
|
|
148
|
+
For more information, see `Gripper component <https://docs.viam.com/dev/reference/apis/components/gripper/#getkinematics>`_.
|
|
149
|
+
"""
|
|
150
|
+
...
|
|
@@ -1,6 +1,13 @@
|
|
|
1
1
|
from grpclib.server import Stream
|
|
2
2
|
|
|
3
|
-
from viam.proto.common import
|
|
3
|
+
from viam.proto.common import (
|
|
4
|
+
DoCommandRequest,
|
|
5
|
+
DoCommandResponse,
|
|
6
|
+
GetGeometriesRequest,
|
|
7
|
+
GetGeometriesResponse,
|
|
8
|
+
GetKinematicsRequest,
|
|
9
|
+
GetKinematicsResponse,
|
|
10
|
+
)
|
|
4
11
|
from viam.proto.component.gripper import (
|
|
5
12
|
GrabRequest,
|
|
6
13
|
GrabResponse,
|
|
@@ -74,8 +81,17 @@ class GripperRPCService(GripperServiceBase, ResourceRPCServiceBase[Gripper]):
|
|
|
74
81
|
async def GetGeometries(self, stream: Stream[GetGeometriesRequest, GetGeometriesResponse]) -> None:
|
|
75
82
|
request = await stream.recv_message()
|
|
76
83
|
assert request is not None
|
|
77
|
-
|
|
84
|
+
gripper = self.get_resource(request.name)
|
|
78
85
|
timeout = stream.deadline.time_remaining() if stream.deadline else None
|
|
79
|
-
geometries = await
|
|
86
|
+
geometries = await gripper.get_geometries(extra=struct_to_dict(request.extra), timeout=timeout)
|
|
80
87
|
response = GetGeometriesResponse(geometries=geometries)
|
|
81
88
|
await stream.send_message(response)
|
|
89
|
+
|
|
90
|
+
async def GetKinematics(self, stream: Stream[GetKinematicsRequest, GetKinematicsResponse]) -> None:
|
|
91
|
+
request = await stream.recv_message()
|
|
92
|
+
assert request is not None
|
|
93
|
+
gripper = self.get_resource(request.name)
|
|
94
|
+
timeout = stream.deadline.time_remaining() if stream.deadline else None
|
|
95
|
+
format, kinematics_data = await gripper.get_kinematics(extra=struct_to_dict(request.extra), timeout=timeout)
|
|
96
|
+
response = GetKinematicsResponse(format=format, kinematics_data=kinematics_data)
|
|
97
|
+
await stream.send_message(response)
|
|
@@ -6,51 +6,52 @@ from google.protobuf import symbol_database as _symbol_database
|
|
|
6
6
|
from google.protobuf.internal import builder as _builder
|
|
7
7
|
_runtime_version.ValidateProtobufRuntimeVersion(_runtime_version.Domain.PUBLIC, 5, 29, 2, '', 'app/datapipelines/v1/data_pipelines.proto')
|
|
8
8
|
_sym_db = _symbol_database.Default()
|
|
9
|
+
from ....app.data.v1 import data_pb2 as app_dot_data_dot_v1_dot_data__pb2
|
|
9
10
|
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
|
|
10
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)app/datapipelines/v1/data_pipelines.proto\x12\x19viam.app.datapipelines.v1\x1a\x1fgoogle/protobuf/timestamp.proto"\
|
|
11
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)app/datapipelines/v1/data_pipelines.proto\x12\x19viam.app.datapipelines.v1\x1a\x16app/data/v1/data.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x93\x03\n\x0cDataPipeline\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\'\n\x0forganization_id\x18\x02 \x01(\tR\x0eorganizationId\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12\x1d\n\nmql_binary\x18\x04 \x03(\x0cR\tmqlBinary\x12\x1a\n\x08schedule\x18\x05 \x01(\tR\x08schedule\x12\x18\n\x07enabled\x18\x06 \x01(\x08R\x07enabled\x129\n\ncreated_on\x18\x07 \x01(\x0b2\x1a.google.protobuf.TimestampR\tcreatedOn\x129\n\nupdated_at\x18\x08 \x01(\x0b2\x1a.google.protobuf.TimestampR\tupdatedAt\x12V\n\x10data_source_type\x18\t \x01(\x0e2\'.viam.app.data.v1.TabularDataSourceTypeH\x00R\x0edataSourceType\x88\x01\x01B\x13\n\x11_data_source_type"(\n\x16GetDataPipelineRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"g\n\x17GetDataPipelineResponse\x12L\n\rdata_pipeline\x18\x01 \x01(\x0b2\'.viam.app.datapipelines.v1.DataPipelineR\x0cdataPipeline"C\n\x18ListDataPipelinesRequest\x12\'\n\x0forganization_id\x18\x01 \x01(\tR\x0eorganizationId"k\n\x19ListDataPipelinesResponse\x12N\n\x0edata_pipelines\x18\x01 \x03(\x0b2\'.viam.app.datapipelines.v1.DataPipelineR\rdataPipelines"\xc2\x02\n\x19CreateDataPipelineRequest\x12\'\n\x0forganization_id\x18\x01 \x01(\tR\x0eorganizationId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1d\n\nmql_binary\x18\x03 \x03(\x0cR\tmqlBinary\x12\x1a\n\x08schedule\x18\x04 \x01(\tR\x08schedule\x12,\n\x0fenable_backfill\x18\x05 \x01(\x08H\x00R\x0eenableBackfill\x88\x01\x01\x12V\n\x10data_source_type\x18\x06 \x01(\x0e2\'.viam.app.data.v1.TabularDataSourceTypeH\x01R\x0edataSourceType\x88\x01\x01B\x12\n\x10_enable_backfillB\x13\n\x11_data_source_type",\n\x1aCreateDataPipelineResponse\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\xe7\x01\n\x19UpdateDataPipelineRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12\x1d\n\nmql_binary\x18\x03 \x03(\x0cR\tmqlBinary\x12\x1a\n\x08schedule\x18\x04 \x01(\tR\x08schedule\x12V\n\x10data_source_type\x18\x05 \x01(\x0e2\'.viam.app.data.v1.TabularDataSourceTypeH\x00R\x0edataSourceType\x88\x01\x01B\x13\n\x11_data_source_type"\x1c\n\x1aUpdateDataPipelineResponse"+\n\x19DeleteDataPipelineRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\x1c\n\x1aDeleteDataPipelineResponse"+\n\x19EnableDataPipelineRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\x1c\n\x1aEnableDataPipelineResponse",\n\x1aDisableDataPipelineRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\x1d\n\x1bDisableDataPipelineResponse"i\n\x1bListDataPipelineRunsRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1b\n\tpage_size\x18\x02 \x01(\rR\x08pageSize\x12\x1d\n\npage_token\x18\x03 \x01(\tR\tpageToken"\xa7\x01\n\x1cListDataPipelineRunsResponse\x12\x1f\n\x0bpipeline_id\x18\x01 \x01(\tR\npipelineId\x12>\n\x04runs\x18\x02 \x03(\x0b2*.viam.app.datapipelines.v1.DataPipelineRunR\x04runs\x12&\n\x0fnext_page_token\x18\x03 \x01(\tR\rnextPageToken"\xe1\x02\n\x0fDataPipelineRun\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x129\n\nstart_time\x18\x02 \x01(\x0b2\x1a.google.protobuf.TimestampR\tstartTime\x125\n\x08end_time\x18\x03 \x01(\x0b2\x1a.google.protobuf.TimestampR\x07endTime\x12B\n\x0fdata_start_time\x18\x04 \x01(\x0b2\x1a.google.protobuf.TimestampR\rdataStartTime\x12>\n\rdata_end_time\x18\x05 \x01(\x0b2\x1a.google.protobuf.TimestampR\x0bdataEndTime\x12H\n\x06status\x18\x06 \x01(\x0e20.viam.app.datapipelines.v1.DataPipelineRunStatusR\x06status*\xdc\x01\n\x15DataPipelineRunStatus\x12(\n$DATA_PIPELINE_RUN_STATUS_UNSPECIFIED\x10\x00\x12&\n"DATA_PIPELINE_RUN_STATUS_SCHEDULED\x10\x01\x12$\n DATA_PIPELINE_RUN_STATUS_STARTED\x10\x02\x12&\n"DATA_PIPELINE_RUN_STATUS_COMPLETED\x10\x03\x12#\n\x1fDATA_PIPELINE_RUN_STATUS_FAILED\x10\x042\xb1\x08\n\x14DataPipelinesService\x12x\n\x0fGetDataPipeline\x121.viam.app.datapipelines.v1.GetDataPipelineRequest\x1a2.viam.app.datapipelines.v1.GetDataPipelineResponse\x12~\n\x11ListDataPipelines\x123.viam.app.datapipelines.v1.ListDataPipelinesRequest\x1a4.viam.app.datapipelines.v1.ListDataPipelinesResponse\x12\x81\x01\n\x12CreateDataPipeline\x124.viam.app.datapipelines.v1.CreateDataPipelineRequest\x1a5.viam.app.datapipelines.v1.CreateDataPipelineResponse\x12\x81\x01\n\x12UpdateDataPipeline\x124.viam.app.datapipelines.v1.UpdateDataPipelineRequest\x1a5.viam.app.datapipelines.v1.UpdateDataPipelineResponse\x12\x81\x01\n\x12DeleteDataPipeline\x124.viam.app.datapipelines.v1.DeleteDataPipelineRequest\x1a5.viam.app.datapipelines.v1.DeleteDataPipelineResponse\x12\x81\x01\n\x12EnableDataPipeline\x124.viam.app.datapipelines.v1.EnableDataPipelineRequest\x1a5.viam.app.datapipelines.v1.EnableDataPipelineResponse\x12\x84\x01\n\x13DisableDataPipeline\x125.viam.app.datapipelines.v1.DisableDataPipelineRequest\x1a6.viam.app.datapipelines.v1.DisableDataPipelineResponse\x12\x87\x01\n\x14ListDataPipelineRuns\x126.viam.app.datapipelines.v1.ListDataPipelineRunsRequest\x1a7.viam.app.datapipelines.v1.ListDataPipelineRunsResponseB&Z$go.viam.com/api/app/datapipelines/v1b\x06proto3')
|
|
11
12
|
_globals = globals()
|
|
12
13
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
13
14
|
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'app.datapipelines.v1.data_pipelines_pb2', _globals)
|
|
14
15
|
if not _descriptor._USE_C_DESCRIPTORS:
|
|
15
16
|
_globals['DESCRIPTOR']._loaded_options = None
|
|
16
17
|
_globals['DESCRIPTOR']._serialized_options = b'Z$go.viam.com/api/app/datapipelines/v1'
|
|
17
|
-
_globals['_DATAPIPELINERUNSTATUS']._serialized_start =
|
|
18
|
-
_globals['_DATAPIPELINERUNSTATUS']._serialized_end =
|
|
19
|
-
_globals['_DATAPIPELINE']._serialized_start =
|
|
20
|
-
_globals['_DATAPIPELINE']._serialized_end =
|
|
21
|
-
_globals['_GETDATAPIPELINEREQUEST']._serialized_start =
|
|
22
|
-
_globals['_GETDATAPIPELINEREQUEST']._serialized_end =
|
|
23
|
-
_globals['_GETDATAPIPELINERESPONSE']._serialized_start =
|
|
24
|
-
_globals['_GETDATAPIPELINERESPONSE']._serialized_end =
|
|
25
|
-
_globals['_LISTDATAPIPELINESREQUEST']._serialized_start =
|
|
26
|
-
_globals['_LISTDATAPIPELINESREQUEST']._serialized_end =
|
|
27
|
-
_globals['_LISTDATAPIPELINESRESPONSE']._serialized_start =
|
|
28
|
-
_globals['_LISTDATAPIPELINESRESPONSE']._serialized_end =
|
|
29
|
-
_globals['_CREATEDATAPIPELINEREQUEST']._serialized_start =
|
|
30
|
-
_globals['_CREATEDATAPIPELINEREQUEST']._serialized_end =
|
|
31
|
-
_globals['_CREATEDATAPIPELINERESPONSE']._serialized_start =
|
|
32
|
-
_globals['_CREATEDATAPIPELINERESPONSE']._serialized_end =
|
|
33
|
-
_globals['_UPDATEDATAPIPELINEREQUEST']._serialized_start =
|
|
34
|
-
_globals['_UPDATEDATAPIPELINEREQUEST']._serialized_end =
|
|
35
|
-
_globals['_UPDATEDATAPIPELINERESPONSE']._serialized_start =
|
|
36
|
-
_globals['_UPDATEDATAPIPELINERESPONSE']._serialized_end =
|
|
37
|
-
_globals['_DELETEDATAPIPELINEREQUEST']._serialized_start =
|
|
38
|
-
_globals['_DELETEDATAPIPELINEREQUEST']._serialized_end =
|
|
39
|
-
_globals['_DELETEDATAPIPELINERESPONSE']._serialized_start =
|
|
40
|
-
_globals['_DELETEDATAPIPELINERESPONSE']._serialized_end =
|
|
41
|
-
_globals['_ENABLEDATAPIPELINEREQUEST']._serialized_start =
|
|
42
|
-
_globals['_ENABLEDATAPIPELINEREQUEST']._serialized_end =
|
|
43
|
-
_globals['_ENABLEDATAPIPELINERESPONSE']._serialized_start =
|
|
44
|
-
_globals['_ENABLEDATAPIPELINERESPONSE']._serialized_end =
|
|
45
|
-
_globals['_DISABLEDATAPIPELINEREQUEST']._serialized_start =
|
|
46
|
-
_globals['_DISABLEDATAPIPELINEREQUEST']._serialized_end =
|
|
47
|
-
_globals['_DISABLEDATAPIPELINERESPONSE']._serialized_start =
|
|
48
|
-
_globals['_DISABLEDATAPIPELINERESPONSE']._serialized_end =
|
|
49
|
-
_globals['_LISTDATAPIPELINERUNSREQUEST']._serialized_start =
|
|
50
|
-
_globals['_LISTDATAPIPELINERUNSREQUEST']._serialized_end =
|
|
51
|
-
_globals['_LISTDATAPIPELINERUNSRESPONSE']._serialized_start =
|
|
52
|
-
_globals['_LISTDATAPIPELINERUNSRESPONSE']._serialized_end =
|
|
53
|
-
_globals['_DATAPIPELINERUN']._serialized_start =
|
|
54
|
-
_globals['_DATAPIPELINERUN']._serialized_end =
|
|
55
|
-
_globals['_DATAPIPELINESSERVICE']._serialized_start =
|
|
56
|
-
_globals['_DATAPIPELINESSERVICE']._serialized_end =
|
|
18
|
+
_globals['_DATAPIPELINERUNSTATUS']._serialized_start = 2356
|
|
19
|
+
_globals['_DATAPIPELINERUNSTATUS']._serialized_end = 2576
|
|
20
|
+
_globals['_DATAPIPELINE']._serialized_start = 130
|
|
21
|
+
_globals['_DATAPIPELINE']._serialized_end = 533
|
|
22
|
+
_globals['_GETDATAPIPELINEREQUEST']._serialized_start = 535
|
|
23
|
+
_globals['_GETDATAPIPELINEREQUEST']._serialized_end = 575
|
|
24
|
+
_globals['_GETDATAPIPELINERESPONSE']._serialized_start = 577
|
|
25
|
+
_globals['_GETDATAPIPELINERESPONSE']._serialized_end = 680
|
|
26
|
+
_globals['_LISTDATAPIPELINESREQUEST']._serialized_start = 682
|
|
27
|
+
_globals['_LISTDATAPIPELINESREQUEST']._serialized_end = 749
|
|
28
|
+
_globals['_LISTDATAPIPELINESRESPONSE']._serialized_start = 751
|
|
29
|
+
_globals['_LISTDATAPIPELINESRESPONSE']._serialized_end = 858
|
|
30
|
+
_globals['_CREATEDATAPIPELINEREQUEST']._serialized_start = 861
|
|
31
|
+
_globals['_CREATEDATAPIPELINEREQUEST']._serialized_end = 1183
|
|
32
|
+
_globals['_CREATEDATAPIPELINERESPONSE']._serialized_start = 1185
|
|
33
|
+
_globals['_CREATEDATAPIPELINERESPONSE']._serialized_end = 1229
|
|
34
|
+
_globals['_UPDATEDATAPIPELINEREQUEST']._serialized_start = 1232
|
|
35
|
+
_globals['_UPDATEDATAPIPELINEREQUEST']._serialized_end = 1463
|
|
36
|
+
_globals['_UPDATEDATAPIPELINERESPONSE']._serialized_start = 1465
|
|
37
|
+
_globals['_UPDATEDATAPIPELINERESPONSE']._serialized_end = 1493
|
|
38
|
+
_globals['_DELETEDATAPIPELINEREQUEST']._serialized_start = 1495
|
|
39
|
+
_globals['_DELETEDATAPIPELINEREQUEST']._serialized_end = 1538
|
|
40
|
+
_globals['_DELETEDATAPIPELINERESPONSE']._serialized_start = 1540
|
|
41
|
+
_globals['_DELETEDATAPIPELINERESPONSE']._serialized_end = 1568
|
|
42
|
+
_globals['_ENABLEDATAPIPELINEREQUEST']._serialized_start = 1570
|
|
43
|
+
_globals['_ENABLEDATAPIPELINEREQUEST']._serialized_end = 1613
|
|
44
|
+
_globals['_ENABLEDATAPIPELINERESPONSE']._serialized_start = 1615
|
|
45
|
+
_globals['_ENABLEDATAPIPELINERESPONSE']._serialized_end = 1643
|
|
46
|
+
_globals['_DISABLEDATAPIPELINEREQUEST']._serialized_start = 1645
|
|
47
|
+
_globals['_DISABLEDATAPIPELINEREQUEST']._serialized_end = 1689
|
|
48
|
+
_globals['_DISABLEDATAPIPELINERESPONSE']._serialized_start = 1691
|
|
49
|
+
_globals['_DISABLEDATAPIPELINERESPONSE']._serialized_end = 1720
|
|
50
|
+
_globals['_LISTDATAPIPELINERUNSREQUEST']._serialized_start = 1722
|
|
51
|
+
_globals['_LISTDATAPIPELINERUNSREQUEST']._serialized_end = 1827
|
|
52
|
+
_globals['_LISTDATAPIPELINERUNSRESPONSE']._serialized_start = 1830
|
|
53
|
+
_globals['_LISTDATAPIPELINERUNSRESPONSE']._serialized_end = 1997
|
|
54
|
+
_globals['_DATAPIPELINERUN']._serialized_start = 2000
|
|
55
|
+
_globals['_DATAPIPELINERUN']._serialized_end = 2353
|
|
56
|
+
_globals['_DATAPIPELINESSERVICE']._serialized_start = 2579
|
|
57
|
+
_globals['_DATAPIPELINESSERVICE']._serialized_end = 3652
|