viam-sdk 0.46.0__py3-none-linux_armv7l.whl → 0.47.0__py3-none-linux_armv7l.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of viam-sdk might be problematic. Click here for more details.
- viam/app/data_client.py +264 -3
- viam/gen/app/mltraining/v1/ml_training_pb2.py +30 -24
- viam/gen/app/mltraining/v1/ml_training_pb2.pyi +21 -2
- viam/module/module.py +24 -2
- viam/resource/easy_resource.py +4 -4
- viam/resource/registry.py +2 -2
- viam/resource/types.py +2 -2
- viam/version_metadata.py +2 -2
- {viam_sdk-0.46.0.dist-info → viam_sdk-0.47.0.dist-info}/METADATA +1 -1
- {viam_sdk-0.46.0.dist-info → viam_sdk-0.47.0.dist-info}/RECORD +12 -12
- {viam_sdk-0.46.0.dist-info → viam_sdk-0.47.0.dist-info}/WHEEL +0 -0
- {viam_sdk-0.46.0.dist-info → viam_sdk-0.47.0.dist-info}/licenses/LICENSE +0 -0
viam/app/data_client.py
CHANGED
|
@@ -7,6 +7,7 @@ from typing import Any, Dict, List, Mapping, Optional, Sequence, Tuple, Union, c
|
|
|
7
7
|
import bson
|
|
8
8
|
from google.protobuf.struct_pb2 import Struct
|
|
9
9
|
from grpclib.client import Channel, Stream
|
|
10
|
+
from typing_extensions import Self
|
|
10
11
|
|
|
11
12
|
from viam import logging
|
|
12
13
|
from viam.proto.app.data import (
|
|
@@ -54,9 +55,30 @@ from viam.proto.app.data import (
|
|
|
54
55
|
TabularDataByMQLResponse,
|
|
55
56
|
TabularDataBySQLRequest,
|
|
56
57
|
TabularDataBySQLResponse,
|
|
58
|
+
TabularDataSource,
|
|
59
|
+
TabularDataSourceType,
|
|
57
60
|
TagsByFilterRequest,
|
|
58
61
|
TagsByFilterResponse,
|
|
59
62
|
)
|
|
63
|
+
from viam.proto.app.datapipelines import (
|
|
64
|
+
CreateDataPipelineRequest,
|
|
65
|
+
CreateDataPipelineResponse,
|
|
66
|
+
DataPipelineRunStatus,
|
|
67
|
+
DataPipelinesServiceStub,
|
|
68
|
+
DeleteDataPipelineRequest,
|
|
69
|
+
GetDataPipelineRequest,
|
|
70
|
+
GetDataPipelineResponse,
|
|
71
|
+
ListDataPipelineRunsRequest,
|
|
72
|
+
ListDataPipelineRunsResponse,
|
|
73
|
+
ListDataPipelinesRequest,
|
|
74
|
+
ListDataPipelinesResponse,
|
|
75
|
+
)
|
|
76
|
+
from viam.proto.app.datapipelines import (
|
|
77
|
+
DataPipeline as ProtoDataPipeline,
|
|
78
|
+
)
|
|
79
|
+
from viam.proto.app.datapipelines import (
|
|
80
|
+
DataPipelineRun as ProtoDataPipelineRun,
|
|
81
|
+
)
|
|
60
82
|
from viam.proto.app.dataset import (
|
|
61
83
|
CreateDatasetRequest,
|
|
62
84
|
CreateDatasetResponse,
|
|
@@ -220,6 +242,121 @@ class DataClient:
|
|
|
220
242
|
)
|
|
221
243
|
return self.resource_api
|
|
222
244
|
|
|
245
|
+
@dataclass
|
|
246
|
+
class DataPipeline:
|
|
247
|
+
"""Represents a data pipeline and its associated metadata."""
|
|
248
|
+
|
|
249
|
+
id: str
|
|
250
|
+
"""The ID of the data pipeline"""
|
|
251
|
+
|
|
252
|
+
organization_id: str
|
|
253
|
+
"""The organization ID"""
|
|
254
|
+
|
|
255
|
+
name: str
|
|
256
|
+
"""The name of the data pipeline"""
|
|
257
|
+
|
|
258
|
+
mql_binary: List[Dict[str, Any]]
|
|
259
|
+
"""The MQL binary of the data pipeline"""
|
|
260
|
+
|
|
261
|
+
schedule: str
|
|
262
|
+
"""The schedule of the data pipeline"""
|
|
263
|
+
|
|
264
|
+
created_on: datetime
|
|
265
|
+
"""The time the data pipeline was created"""
|
|
266
|
+
|
|
267
|
+
updated_at: datetime
|
|
268
|
+
"""The time the data pipeline was last updated"""
|
|
269
|
+
|
|
270
|
+
enabled: bool
|
|
271
|
+
"""Whether the data pipeline is enabled"""
|
|
272
|
+
|
|
273
|
+
@classmethod
|
|
274
|
+
def from_proto(cls, data_pipeline: ProtoDataPipeline) -> Self:
|
|
275
|
+
return cls(
|
|
276
|
+
id=data_pipeline.id,
|
|
277
|
+
organization_id=data_pipeline.organization_id,
|
|
278
|
+
name=data_pipeline.name,
|
|
279
|
+
mql_binary=[bson.decode(bson_bytes) for bson_bytes in data_pipeline.mql_binary],
|
|
280
|
+
schedule=data_pipeline.schedule,
|
|
281
|
+
created_on=data_pipeline.created_on.ToDatetime(),
|
|
282
|
+
updated_at=data_pipeline.updated_at.ToDatetime(),
|
|
283
|
+
enabled=data_pipeline.enabled,
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
@dataclass
|
|
287
|
+
class DataPipelineRun:
|
|
288
|
+
"""Represents a data pipeline run and its associated metadata."""
|
|
289
|
+
|
|
290
|
+
id: str
|
|
291
|
+
"""The ID of the data pipeline run"""
|
|
292
|
+
|
|
293
|
+
status: DataPipelineRunStatus.ValueType
|
|
294
|
+
"""The status of the data pipeline run"""
|
|
295
|
+
|
|
296
|
+
start_time: datetime
|
|
297
|
+
"""The time the data pipeline run started"""
|
|
298
|
+
|
|
299
|
+
end_time: datetime
|
|
300
|
+
"""The time the data pipeline run ended"""
|
|
301
|
+
|
|
302
|
+
data_start_time: datetime
|
|
303
|
+
"""The start time of the data that was processed in the run."""
|
|
304
|
+
data_end_time: datetime
|
|
305
|
+
"""The end time of the data that was processed in the run."""
|
|
306
|
+
|
|
307
|
+
@classmethod
|
|
308
|
+
def from_proto(cls, data_pipeline_run: ProtoDataPipelineRun) -> Self:
|
|
309
|
+
return cls(
|
|
310
|
+
id=data_pipeline_run.id,
|
|
311
|
+
status=data_pipeline_run.status,
|
|
312
|
+
start_time=data_pipeline_run.start_time.ToDatetime(),
|
|
313
|
+
end_time=data_pipeline_run.end_time.ToDatetime(),
|
|
314
|
+
data_start_time=data_pipeline_run.data_start_time.ToDatetime(),
|
|
315
|
+
data_end_time=data_pipeline_run.data_end_time.ToDatetime(),
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
@dataclass
|
|
319
|
+
class DataPipelineRunsPage:
|
|
320
|
+
"""Represents a page of data pipeline runs and provides pagination functionality."""
|
|
321
|
+
|
|
322
|
+
_client: "DataClient"
|
|
323
|
+
"""The data client used to make API calls"""
|
|
324
|
+
|
|
325
|
+
pipeline_id: str
|
|
326
|
+
"""The ID of the pipeline these runs belong to"""
|
|
327
|
+
|
|
328
|
+
page_size: int
|
|
329
|
+
"""The number of runs per page"""
|
|
330
|
+
|
|
331
|
+
runs: List["DataClient.DataPipelineRun"]
|
|
332
|
+
"""The list of runs in this page"""
|
|
333
|
+
|
|
334
|
+
next_page_token: str
|
|
335
|
+
"""The token to use to get the next page of results"""
|
|
336
|
+
|
|
337
|
+
async def next_page(self) -> "DataClient.DataPipelineRunsPage":
|
|
338
|
+
"""Get the next page of data pipeline runs.
|
|
339
|
+
|
|
340
|
+
Returns:
|
|
341
|
+
DataPipelineRunsPage: The next page of runs, or an empty page if there are no more runs
|
|
342
|
+
"""
|
|
343
|
+
if not self.next_page_token:
|
|
344
|
+
# no token, return empty next page
|
|
345
|
+
return DataClient.DataPipelineRunsPage(
|
|
346
|
+
_client=self._client, pipeline_id=self.pipeline_id, page_size=self.page_size, runs=[], next_page_token=""
|
|
347
|
+
)
|
|
348
|
+
return await self._client._list_data_pipeline_runs(self.pipeline_id, self.page_size, self.next_page_token)
|
|
349
|
+
|
|
350
|
+
@classmethod
|
|
351
|
+
def from_proto(cls, data_pipeline_runs_page: ListDataPipelineRunsResponse, client: "DataClient", page_size: int) -> Self:
|
|
352
|
+
return cls(
|
|
353
|
+
_client=client,
|
|
354
|
+
pipeline_id=data_pipeline_runs_page.pipeline_id,
|
|
355
|
+
page_size=page_size,
|
|
356
|
+
runs=[DataClient.DataPipelineRun.from_proto(run) for run in data_pipeline_runs_page.runs],
|
|
357
|
+
next_page_token=data_pipeline_runs_page.next_page_token,
|
|
358
|
+
)
|
|
359
|
+
|
|
223
360
|
def __init__(self, channel: Channel, metadata: Mapping[str, str]):
|
|
224
361
|
"""Create a :class:`DataClient` that maintains a connection to app.
|
|
225
362
|
|
|
@@ -231,11 +368,13 @@ class DataClient:
|
|
|
231
368
|
self._data_client = DataServiceStub(channel)
|
|
232
369
|
self._data_sync_client = DataSyncServiceStub(channel)
|
|
233
370
|
self._dataset_client = DatasetServiceStub(channel)
|
|
371
|
+
self._data_pipelines_client = DataPipelinesServiceStub(channel)
|
|
234
372
|
self._channel = channel
|
|
235
373
|
|
|
236
374
|
_data_client: DataServiceStub
|
|
237
375
|
_data_sync_client: DataSyncServiceStub
|
|
238
376
|
_dataset_client: DatasetServiceStub
|
|
377
|
+
_data_pipelines_client: DataPipelinesServiceStub
|
|
239
378
|
_metadata: Mapping[str, str]
|
|
240
379
|
_channel: Channel
|
|
241
380
|
|
|
@@ -345,7 +484,12 @@ class DataClient:
|
|
|
345
484
|
|
|
346
485
|
@_alias_param("query", param_alias="mql_binary")
|
|
347
486
|
async def tabular_data_by_mql(
|
|
348
|
-
self,
|
|
487
|
+
self,
|
|
488
|
+
organization_id: str,
|
|
489
|
+
query: Union[List[bytes], List[Dict[str, Any]]],
|
|
490
|
+
use_recent_data: Optional[bool] = None,
|
|
491
|
+
tabular_data_source_type: TabularDataSourceType.ValueType = TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD,
|
|
492
|
+
pipeline_id: Optional[str] = None,
|
|
349
493
|
) -> List[Dict[str, Union[ValueTypes, datetime]]]:
|
|
350
494
|
"""Obtain unified tabular data and metadata, queried with MQL.
|
|
351
495
|
|
|
@@ -366,7 +510,12 @@ class DataClient:
|
|
|
366
510
|
query (Union[List[bytes], List[Dict[str, Any]]]): The MQL query to run, as a list of MongoDB aggregation pipeline stages.
|
|
367
511
|
Each stage can be provided as either a dictionary or raw BSON bytes, but support for bytes will be removed in the
|
|
368
512
|
future, so prefer the dictionary option.
|
|
369
|
-
use_recent_data (bool): Whether to query blob storage or your recent data store. Defaults to ``False
|
|
513
|
+
use_recent_data (bool): Whether to query blob storage or your recent data store. Defaults to ``False``..
|
|
514
|
+
Deprecated, use `tabular_data_source_type` instead.
|
|
515
|
+
tabular_data_source_type (viam.proto.app.data.TabularDataSourceType): The data source to query.
|
|
516
|
+
Defaults to `TABULAR_DATA_SOURCE_TYPE_STANDARD`.
|
|
517
|
+
pipeline_id (str): The ID of the data pipeline to query. Defaults to `None`.
|
|
518
|
+
Required if `tabular_data_source_type` is `TABULAR_DATA_SOURCE_TYPE_PIPELINE_SINK`.
|
|
370
519
|
|
|
371
520
|
Returns:
|
|
372
521
|
List[Dict[str, Union[ValueTypes, datetime]]]: An array of decoded BSON data objects.
|
|
@@ -374,7 +523,10 @@ class DataClient:
|
|
|
374
523
|
For more information, see `Data Client API <https://docs.viam.com/dev/reference/apis/data-client/#tabulardatabymql>`_.
|
|
375
524
|
"""
|
|
376
525
|
binary: List[bytes] = [bson.encode(query) for query in query] if isinstance(query[0], dict) else query # type: ignore
|
|
377
|
-
|
|
526
|
+
data_source = TabularDataSource(type=tabular_data_source_type, pipeline_id=pipeline_id)
|
|
527
|
+
if use_recent_data:
|
|
528
|
+
data_source.type = TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_HOT_STORAGE
|
|
529
|
+
request = TabularDataByMQLRequest(organization_id=organization_id, mql_binary=binary, data_source=data_source)
|
|
378
530
|
response: TabularDataByMQLResponse = await self._data_client.TabularDataByMQL(request, metadata=self._metadata)
|
|
379
531
|
return [bson.decode(bson_bytes) for bson_bytes in response.raw_data]
|
|
380
532
|
|
|
@@ -1696,6 +1848,115 @@ class DataClient:
|
|
|
1696
1848
|
raise TypeError("Response cannot be empty")
|
|
1697
1849
|
return response
|
|
1698
1850
|
|
|
1851
|
+
async def get_data_pipeline(self, id: str) -> DataPipeline:
|
|
1852
|
+
"""Get a data pipeline by its ID.
|
|
1853
|
+
|
|
1854
|
+
::
|
|
1855
|
+
|
|
1856
|
+
data_pipeline = await data_client.get_data_pipeline(id="<YOUR-DATA-PIPELINE-ID>")
|
|
1857
|
+
|
|
1858
|
+
Args:
|
|
1859
|
+
id (str): The ID of the data pipeline to get.
|
|
1860
|
+
|
|
1861
|
+
Returns:
|
|
1862
|
+
DataPipeline: The data pipeline with the given ID.
|
|
1863
|
+
"""
|
|
1864
|
+
request = GetDataPipelineRequest(id=id)
|
|
1865
|
+
response: GetDataPipelineResponse = await self._data_pipelines_client.GetDataPipeline(request, metadata=self._metadata)
|
|
1866
|
+
return DataClient.DataPipeline.from_proto(response.data_pipeline)
|
|
1867
|
+
|
|
1868
|
+
async def list_data_pipelines(self, organization_id: str) -> List[DataPipeline]:
|
|
1869
|
+
"""List all of the data pipelines for an organization.
|
|
1870
|
+
|
|
1871
|
+
::
|
|
1872
|
+
|
|
1873
|
+
data_pipelines = await data_client.list_data_pipelines(organization_id="<YOUR-ORGANIZATION-ID>")
|
|
1874
|
+
|
|
1875
|
+
Args:
|
|
1876
|
+
organization_id (str): The ID of the organization that owns the pipelines.
|
|
1877
|
+
You can obtain your organization ID from the Viam app's organization settings page.
|
|
1878
|
+
|
|
1879
|
+
Returns:
|
|
1880
|
+
List[DataPipeline]: A list of all of the data pipelines for the given organization.
|
|
1881
|
+
"""
|
|
1882
|
+
request = ListDataPipelinesRequest(organization_id=organization_id)
|
|
1883
|
+
response: ListDataPipelinesResponse = await self._data_pipelines_client.ListDataPipelines(request, metadata=self._metadata)
|
|
1884
|
+
return [DataClient.DataPipeline.from_proto(pipeline) for pipeline in response.data_pipelines]
|
|
1885
|
+
|
|
1886
|
+
async def create_data_pipeline(self, organization_id: str, name: str, mql_binary: List[Dict[str, Any]], schedule: str) -> str:
|
|
1887
|
+
"""Create a new data pipeline.
|
|
1888
|
+
|
|
1889
|
+
::
|
|
1890
|
+
|
|
1891
|
+
data_pipeline_id = await data_client.create_data_pipeline(
|
|
1892
|
+
organization_id="<YOUR-ORGANIZATION-ID>",
|
|
1893
|
+
name="<YOUR-PIPELINE-NAME>",
|
|
1894
|
+
mql_binary=[<YOUR-MQL-PIPELINE-AGGREGATION>],
|
|
1895
|
+
schedule="<YOUR-SCHEDULE>"
|
|
1896
|
+
)
|
|
1897
|
+
|
|
1898
|
+
Args:
|
|
1899
|
+
organization_id (str): The ID of the organization that will own the pipeline.
|
|
1900
|
+
You can obtain your organization ID from the Viam app's organization settings page.
|
|
1901
|
+
name (str): The name of the pipeline.
|
|
1902
|
+
mql_binary (List[Dict[str, Any]]):The MQL pipeline to run, as a list of MongoDB aggregation pipeline stages.
|
|
1903
|
+
schedule (str): A cron expression representing the expected execution schedule in UTC (note this also
|
|
1904
|
+
defines the input time window; an hourly schedule would process 1 hour of data at a time).
|
|
1905
|
+
|
|
1906
|
+
Returns:
|
|
1907
|
+
str: The ID of the newly created pipeline.
|
|
1908
|
+
"""
|
|
1909
|
+
binary: List[bytes] = [bson.encode(query) for query in mql_binary]
|
|
1910
|
+
request = CreateDataPipelineRequest(organization_id=organization_id, name=name, mql_binary=binary, schedule=schedule)
|
|
1911
|
+
response: CreateDataPipelineResponse = await self._data_pipelines_client.CreateDataPipeline(request, metadata=self._metadata)
|
|
1912
|
+
return response.id
|
|
1913
|
+
|
|
1914
|
+
async def delete_data_pipeline(self, id: str) -> None:
|
|
1915
|
+
"""Delete a data pipeline by its ID.
|
|
1916
|
+
|
|
1917
|
+
::
|
|
1918
|
+
|
|
1919
|
+
await data_client.delete_data_pipeline(id="<YOUR-DATA-PIPELINE-ID>")
|
|
1920
|
+
|
|
1921
|
+
Args:
|
|
1922
|
+
id (str): The ID of the data pipeline to delete.
|
|
1923
|
+
"""
|
|
1924
|
+
request = DeleteDataPipelineRequest(id=id)
|
|
1925
|
+
await self._data_pipelines_client.DeleteDataPipeline(request, metadata=self._metadata)
|
|
1926
|
+
|
|
1927
|
+
async def list_data_pipeline_runs(self, id: str, page_size: int = 10) -> DataPipelineRunsPage:
|
|
1928
|
+
"""List all of the data pipeline runs for a data pipeline.
|
|
1929
|
+
|
|
1930
|
+
::
|
|
1931
|
+
|
|
1932
|
+
data_pipeline_runs = await data_client.list_data_pipeline_runs(id="<YOUR-DATA-PIPELINE-ID>")
|
|
1933
|
+
while len(data_pipeline_runs.runs) > 0:
|
|
1934
|
+
data_pipeline_runs = await data_pipeline_runs.next_page()
|
|
1935
|
+
|
|
1936
|
+
Args:
|
|
1937
|
+
id (str): The ID of the pipeline to list runs for
|
|
1938
|
+
page_size (int): The number of runs to return per page. Defaults to 10.
|
|
1939
|
+
|
|
1940
|
+
Returns:
|
|
1941
|
+
DataPipelineRunsPage: A page of data pipeline runs with pagination support
|
|
1942
|
+
"""
|
|
1943
|
+
return await self._list_data_pipeline_runs(id, page_size)
|
|
1944
|
+
|
|
1945
|
+
async def _list_data_pipeline_runs(self, id: str, page_size: int, page_token: str = "") -> DataPipelineRunsPage:
|
|
1946
|
+
"""Internal method to list data pipeline runs with pagination.
|
|
1947
|
+
|
|
1948
|
+
Args:
|
|
1949
|
+
id (str): The ID of the pipeline to list runs for
|
|
1950
|
+
page_size (int): The number of runs to return per page
|
|
1951
|
+
page_token (str): The token to use to get the next page of results
|
|
1952
|
+
|
|
1953
|
+
Returns:
|
|
1954
|
+
DataPipelineRunsPage: A page of data pipeline runs with pagination support
|
|
1955
|
+
"""
|
|
1956
|
+
request = ListDataPipelineRunsRequest(id=id, page_size=page_size, page_token=page_token)
|
|
1957
|
+
response: ListDataPipelineRunsResponse = await self._data_pipelines_client.ListDataPipelineRuns(request, metadata=self._metadata)
|
|
1958
|
+
return DataClient.DataPipelineRunsPage.from_proto(response, self, page_size)
|
|
1959
|
+
|
|
1699
1960
|
@staticmethod
|
|
1700
1961
|
def create_filter(
|
|
1701
1962
|
component_name: Optional[str] = None,
|
|
@@ -9,7 +9,7 @@ _sym_db = _symbol_database.Default()
|
|
|
9
9
|
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
|
|
10
10
|
from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
|
|
11
11
|
from ....tagger.v1 import tagger_pb2 as tagger_dot_v1_dot_tagger__pb2
|
|
12
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#app/mltraining/v1/ml_training.proto\x12\x16viam.app.mltraining.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x16tagger/v1/tagger.proto"\x90\x05\n\x18SubmitTrainingJobRequest\x12G\n\ndataset_id\x18\x07 \x01(\tB(\x9a\x84\x9e\x03#bson:"dataset_id" json:"dataset_id"R\tdatasetId\x12[\n\x0forganization_id\x18\x02 \x01(\tB2\x9a\x84\x9e\x03-bson:"organization_id" json:"organization_id"R\x0eorganizationId\x12G\n\nmodel_name\x18\x03 \x01(\tB(\x9a\x84\x9e\x03#bson:"model_name" json:"model_name"R\tmodelName\x12S\n\rmodel_version\x18\x04 \x01(\tB.\x9a\x84\x9e\x03)bson:"model_version" json:"model_version"R\x0cmodelVersion\x12j\n\nmodel_type\x18\x05 \x01(\x0e2!.viam.app.mltraining.v1.ModelTypeB(\x9a\x84\x9e\x03#bson:"model_type" json:"model_type"R\tmodelType\x12\x83\x01\n\x0fmodel_framework\x18\x08 \x01(\x0e2&.viam.app.mltraining.v1.ModelFrameworkB2\x9a\x84\x9e\x03-bson:"model_framework" json:"model_framework"R\x0emodelFramework\x120\n\x04tags\x18\x06 \x03(\tB\x1c\x9a\x84\x9e\x03\x17bson:"tags" json:"tags"R\x04tagsJ\x04\x08\x01\x10\x02R\x06filter"+\n\x19SubmitTrainingJobResponse\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\xdb\x05\n\x1eSubmitCustomTrainingJobRequest\x12G\n\ndataset_id\x18\x01 \x01(\tB(\x9a\x84\x9e\x03#bson:"dataset_id" json:"dataset_id"R\tdatasetId\x12^\n\x10registry_item_id\x18\x02 \x01(\tB4\x9a\x84\x9e\x03/bson:"registry_item_id" json:"registry_item_id"R\x0eregistryItemId\x12r\n\x15registry_item_version\x18\x06 \x01(\tB>\x9a\x84\x9e\x039bson:"registry_item_version" json:"registry_item_version"R\x13registryItemVersion\x12[\n\x0forganization_id\x18\x03 \x01(\tB2\x9a\x84\x9e\x03-bson:"organization_id" json:"organization_id"R\x0eorganizationId\x12G\n\nmodel_name\x18\x04 \x01(\tB(\x9a\x84\x9e\x03#bson:"model_name" json:"model_name"R\tmodelName\x12S\n\rmodel_version\x18\x05 \x01(\tB.\x9a\x84\x9e\x03)bson:"model_version" json:"model_version"R\x0cmodelVersion\x12c\n\targuments\x18\x07 \x03(\x0b2E.viam.app.mltraining.v1.SubmitCustomTrainingJobRequest.ArgumentsEntryR\targuments\x1a<\n\x0eArgumentsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x028\x01"1\n\x1fSubmitCustomTrainingJobResponse\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\'\n\x15GetTrainingJobRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"a\n\x16GetTrainingJobResponse\x12G\n\x08metadata\x18\x01 \x01(\x0b2+.viam.app.mltraining.v1.TrainingJobMetadataR\x08metadata"\x82\x01\n\x17ListTrainingJobsRequest\x12\'\n\x0forganization_id\x18\x01 \x01(\tR\x0eorganizationId\x12>\n\x06status\x18\x02 \x01(\x0e2&.viam.app.mltraining.v1.TrainingStatusR\x06status"[\n\x18ListTrainingJobsResponse\x12?\n\x04jobs\x18\x01 \x03(\x0b2+.viam.app.mltraining.v1.TrainingJobMetadataR\x04jobs"\
|
|
12
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n#app/mltraining/v1/ml_training.proto\x12\x16viam.app.mltraining.v1\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x16tagger/v1/tagger.proto"\x90\x05\n\x18SubmitTrainingJobRequest\x12G\n\ndataset_id\x18\x07 \x01(\tB(\x9a\x84\x9e\x03#bson:"dataset_id" json:"dataset_id"R\tdatasetId\x12[\n\x0forganization_id\x18\x02 \x01(\tB2\x9a\x84\x9e\x03-bson:"organization_id" json:"organization_id"R\x0eorganizationId\x12G\n\nmodel_name\x18\x03 \x01(\tB(\x9a\x84\x9e\x03#bson:"model_name" json:"model_name"R\tmodelName\x12S\n\rmodel_version\x18\x04 \x01(\tB.\x9a\x84\x9e\x03)bson:"model_version" json:"model_version"R\x0cmodelVersion\x12j\n\nmodel_type\x18\x05 \x01(\x0e2!.viam.app.mltraining.v1.ModelTypeB(\x9a\x84\x9e\x03#bson:"model_type" json:"model_type"R\tmodelType\x12\x83\x01\n\x0fmodel_framework\x18\x08 \x01(\x0e2&.viam.app.mltraining.v1.ModelFrameworkB2\x9a\x84\x9e\x03-bson:"model_framework" json:"model_framework"R\x0emodelFramework\x120\n\x04tags\x18\x06 \x03(\tB\x1c\x9a\x84\x9e\x03\x17bson:"tags" json:"tags"R\x04tagsJ\x04\x08\x01\x10\x02R\x06filter"+\n\x19SubmitTrainingJobResponse\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\xdb\x05\n\x1eSubmitCustomTrainingJobRequest\x12G\n\ndataset_id\x18\x01 \x01(\tB(\x9a\x84\x9e\x03#bson:"dataset_id" json:"dataset_id"R\tdatasetId\x12^\n\x10registry_item_id\x18\x02 \x01(\tB4\x9a\x84\x9e\x03/bson:"registry_item_id" json:"registry_item_id"R\x0eregistryItemId\x12r\n\x15registry_item_version\x18\x06 \x01(\tB>\x9a\x84\x9e\x039bson:"registry_item_version" json:"registry_item_version"R\x13registryItemVersion\x12[\n\x0forganization_id\x18\x03 \x01(\tB2\x9a\x84\x9e\x03-bson:"organization_id" json:"organization_id"R\x0eorganizationId\x12G\n\nmodel_name\x18\x04 \x01(\tB(\x9a\x84\x9e\x03#bson:"model_name" json:"model_name"R\tmodelName\x12S\n\rmodel_version\x18\x05 \x01(\tB.\x9a\x84\x9e\x03)bson:"model_version" json:"model_version"R\x0cmodelVersion\x12c\n\targuments\x18\x07 \x03(\x0b2E.viam.app.mltraining.v1.SubmitCustomTrainingJobRequest.ArgumentsEntryR\targuments\x1a<\n\x0eArgumentsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x028\x01"1\n\x1fSubmitCustomTrainingJobResponse\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\'\n\x15GetTrainingJobRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"a\n\x16GetTrainingJobResponse\x12G\n\x08metadata\x18\x01 \x01(\x0b2+.viam.app.mltraining.v1.TrainingJobMetadataR\x08metadata"\x82\x01\n\x17ListTrainingJobsRequest\x12\'\n\x0forganization_id\x18\x01 \x01(\tR\x0eorganizationId\x12>\n\x06status\x18\x02 \x01(\x0e2&.viam.app.mltraining.v1.TrainingStatusR\x06status"[\n\x18ListTrainingJobsResponse\x12?\n\x04jobs\x18\x01 \x03(\x0b2+.viam.app.mltraining.v1.TrainingJobMetadataR\x04jobs"\xa7\x0f\n\x13TrainingJobMetadata\x123\n\x02id\x18\x07 \x01(\tB#\x9a\x84\x9e\x03\x1ebson:"_id" json:"id,omitempty"R\x02id\x12G\n\ndataset_id\x18\x0b \x01(\tB(\x9a\x84\x9e\x03#bson:"dataset_id" json:"dataset_id"R\tdatasetId\x12[\n\x0forganization_id\x18\x0c \x01(\tB2\x9a\x84\x9e\x03-bson:"organization_id" json:"organization_id"R\x0eorganizationId\x12G\n\nmodel_name\x18\r \x01(\tB(\x9a\x84\x9e\x03#bson:"model_name" json:"model_name"R\tmodelName\x12S\n\rmodel_version\x18\x0e \x01(\tB.\x9a\x84\x9e\x03)bson:"model_version" json:"model_version"R\x0cmodelVersion\x12j\n\nmodel_type\x18\x0f \x01(\x0e2!.viam.app.mltraining.v1.ModelTypeB(\x9a\x84\x9e\x03#bson:"model_type" json:"model_type"R\tmodelType\x12\x83\x01\n\x0fmodel_framework\x18\x11 \x01(\x0e2&.viam.app.mltraining.v1.ModelFrameworkB2\x9a\x84\x9e\x03-bson:"model_framework" json:"model_framework"R\x0emodelFramework\x12R\n\ris_custom_job\x18\x12 \x01(\x08B.\x9a\x84\x9e\x03)bson:"is_custom_job" json:"is_custom_job"R\x0bisCustomJob\x12^\n\x10registry_item_id\x18\x13 \x01(\tB4\x9a\x84\x9e\x03/bson:"registry_item_id" json:"registry_item_id"R\x0eregistryItemId\x12r\n\x15registry_item_version\x18\x14 \x01(\tB>\x9a\x84\x9e\x039bson:"registry_item_version" json:"registry_item_version"R\x13registryItemVersion\x12`\n\x06status\x18\x02 \x01(\x0e2&.viam.app.mltraining.v1.TrainingStatusB \x9a\x84\x9e\x03\x1bbson:"status" json:"status"R\x06status\x12c\n\x0cerror_status\x18\x08 \x01(\x0b2\x12.google.rpc.StatusB,\x9a\x84\x9e\x03\'bson:"error_status" json:"error_status"R\x0berrorStatus\x12c\n\ncreated_on\x18\x03 \x01(\x0b2\x1a.google.protobuf.TimestampB(\x9a\x84\x9e\x03#bson:"created_on" json:"created_on"R\tcreatedOn\x12o\n\rlast_modified\x18\x04 \x01(\x0b2\x1a.google.protobuf.TimestampB.\x9a\x84\x9e\x03)bson:"last_modified" json:"last_modified"R\x0clastModified\x12{\n\x10training_started\x18\t \x01(\x0b2\x1a.google.protobuf.TimestampB4\x9a\x84\x9e\x03/bson:"training_started" json:"training_started"R\x0ftrainingStarted\x12s\n\x0etraining_ended\x18\n \x01(\x0b2\x1a.google.protobuf.TimestampB0\x9a\x84\x9e\x03+bson:"training_ended" json:"training_ended"R\rtrainingEnded\x12Z\n\x0fsynced_model_id\x18\x05 \x01(\tB2\x9a\x84\x9e\x03-bson:"synced_model_id" json:"synced_model_id"R\rsyncedModelId\x120\n\x04tags\x18\x10 \x03(\tB\x1c\x9a\x84\x9e\x03\x17bson:"tags" json:"tags"R\x04tags\x12\x80\x01\n\targuments\x18\x15 \x03(\x0b2:.viam.app.mltraining.v1.TrainingJobMetadata.ArgumentsEntryB&\x9a\x84\x9e\x03!bson:"arguments" json:"arguments"R\targuments\x1a<\n\x0eArgumentsEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x028\x01J\x04\x08\x01\x10\x02J\x04\x08\x06\x10\x07R\x07requestR\nuser_email"*\n\x18CancelTrainingJobRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"\x1b\n\x19CancelTrainingJobResponse"3\n!DeleteCompletedTrainingJobRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id"$\n"DeleteCompletedTrainingJobResponse"u\n\x13TrainingJobLogEntry\x12\x14\n\x05level\x18\x01 \x01(\tR\x05level\x12.\n\x04time\x18\x02 \x01(\x0b2\x1a.google.protobuf.TimestampR\x04time\x12\x18\n\x07message\x18\x03 \x01(\tR\x07message"^\n\x19GetTrainingJobLogsRequest\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12"\n\npage_token\x18\x02 \x01(\tH\x00R\tpageToken\x88\x01\x01B\r\n\x0b_page_token"\x85\x01\n\x1aGetTrainingJobLogsResponse\x12?\n\x04logs\x18\x01 \x03(\x0b2+.viam.app.mltraining.v1.TrainingJobLogEntryR\x04logs\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken*\x9f\x01\n\tModelType\x12\x1a\n\x16MODEL_TYPE_UNSPECIFIED\x10\x00\x12*\n&MODEL_TYPE_SINGLE_LABEL_CLASSIFICATION\x10\x01\x12)\n%MODEL_TYPE_MULTI_LABEL_CLASSIFICATION\x10\x02\x12\x1f\n\x1bMODEL_TYPE_OBJECT_DETECTION\x10\x03*\xa4\x01\n\x0eModelFramework\x12\x1f\n\x1bMODEL_FRAMEWORK_UNSPECIFIED\x10\x00\x12\x1a\n\x16MODEL_FRAMEWORK_TFLITE\x10\x01\x12\x1e\n\x1aMODEL_FRAMEWORK_TENSORFLOW\x10\x02\x12\x1b\n\x17MODEL_FRAMEWORK_PYTORCH\x10\x03\x12\x18\n\x14MODEL_FRAMEWORK_ONNX\x10\x04*\xe7\x01\n\x0eTrainingStatus\x12\x1f\n\x1bTRAINING_STATUS_UNSPECIFIED\x10\x00\x12\x1b\n\x17TRAINING_STATUS_PENDING\x10\x01\x12\x1f\n\x1bTRAINING_STATUS_IN_PROGRESS\x10\x02\x12\x1d\n\x19TRAINING_STATUS_COMPLETED\x10\x03\x12\x1a\n\x16TRAINING_STATUS_FAILED\x10\x04\x12\x1c\n\x18TRAINING_STATUS_CANCELED\x10\x05\x12\x1d\n\x19TRAINING_STATUS_CANCELING\x10\x062\x8f\x07\n\x11MLTrainingService\x12x\n\x11SubmitTrainingJob\x120.viam.app.mltraining.v1.SubmitTrainingJobRequest\x1a1.viam.app.mltraining.v1.SubmitTrainingJobResponse\x12\x8a\x01\n\x17SubmitCustomTrainingJob\x126.viam.app.mltraining.v1.SubmitCustomTrainingJobRequest\x1a7.viam.app.mltraining.v1.SubmitCustomTrainingJobResponse\x12o\n\x0eGetTrainingJob\x12-.viam.app.mltraining.v1.GetTrainingJobRequest\x1a..viam.app.mltraining.v1.GetTrainingJobResponse\x12u\n\x10ListTrainingJobs\x12/.viam.app.mltraining.v1.ListTrainingJobsRequest\x1a0.viam.app.mltraining.v1.ListTrainingJobsResponse\x12x\n\x11CancelTrainingJob\x120.viam.app.mltraining.v1.CancelTrainingJobRequest\x1a1.viam.app.mltraining.v1.CancelTrainingJobResponse\x12\x93\x01\n\x1aDeleteCompletedTrainingJob\x129.viam.app.mltraining.v1.DeleteCompletedTrainingJobRequest\x1a:.viam.app.mltraining.v1.DeleteCompletedTrainingJobResponse\x12{\n\x12GetTrainingJobLogs\x121.viam.app.mltraining.v1.GetTrainingJobLogsRequest\x1a2.viam.app.mltraining.v1.GetTrainingJobLogsResponseB#Z!go.viam.com/api/app/mltraining/v1b\x06proto3')
|
|
13
13
|
_globals = globals()
|
|
14
14
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
|
15
15
|
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'app.mltraining.v1.ml_training_pb2', _globals)
|
|
@@ -44,6 +44,8 @@ if not _descriptor._USE_C_DESCRIPTORS:
|
|
|
44
44
|
_globals['_SUBMITCUSTOMTRAININGJOBREQUEST'].fields_by_name['model_name']._serialized_options = b'\x9a\x84\x9e\x03#bson:"model_name" json:"model_name"'
|
|
45
45
|
_globals['_SUBMITCUSTOMTRAININGJOBREQUEST'].fields_by_name['model_version']._loaded_options = None
|
|
46
46
|
_globals['_SUBMITCUSTOMTRAININGJOBREQUEST'].fields_by_name['model_version']._serialized_options = b'\x9a\x84\x9e\x03)bson:"model_version" json:"model_version"'
|
|
47
|
+
_globals['_TRAININGJOBMETADATA_ARGUMENTSENTRY']._loaded_options = None
|
|
48
|
+
_globals['_TRAININGJOBMETADATA_ARGUMENTSENTRY']._serialized_options = b'8\x01'
|
|
47
49
|
_globals['_TRAININGJOBMETADATA'].fields_by_name['id']._loaded_options = None
|
|
48
50
|
_globals['_TRAININGJOBMETADATA'].fields_by_name['id']._serialized_options = b'\x9a\x84\x9e\x03\x1ebson:"_id" json:"id,omitempty"'
|
|
49
51
|
_globals['_TRAININGJOBMETADATA'].fields_by_name['dataset_id']._loaded_options = None
|
|
@@ -80,12 +82,14 @@ if not _descriptor._USE_C_DESCRIPTORS:
|
|
|
80
82
|
_globals['_TRAININGJOBMETADATA'].fields_by_name['synced_model_id']._serialized_options = b'\x9a\x84\x9e\x03-bson:"synced_model_id" json:"synced_model_id"'
|
|
81
83
|
_globals['_TRAININGJOBMETADATA'].fields_by_name['tags']._loaded_options = None
|
|
82
84
|
_globals['_TRAININGJOBMETADATA'].fields_by_name['tags']._serialized_options = b'\x9a\x84\x9e\x03\x17bson:"tags" json:"tags"'
|
|
83
|
-
_globals['
|
|
84
|
-
_globals['
|
|
85
|
-
_globals['
|
|
86
|
-
_globals['
|
|
87
|
-
_globals['
|
|
88
|
-
_globals['
|
|
85
|
+
_globals['_TRAININGJOBMETADATA'].fields_by_name['arguments']._loaded_options = None
|
|
86
|
+
_globals['_TRAININGJOBMETADATA'].fields_by_name['arguments']._serialized_options = b'\x9a\x84\x9e\x03!bson:"arguments" json:"arguments"'
|
|
87
|
+
_globals['_MODELTYPE']._serialized_start = 4478
|
|
88
|
+
_globals['_MODELTYPE']._serialized_end = 4637
|
|
89
|
+
_globals['_MODELFRAMEWORK']._serialized_start = 4640
|
|
90
|
+
_globals['_MODELFRAMEWORK']._serialized_end = 4804
|
|
91
|
+
_globals['_TRAININGSTATUS']._serialized_start = 4807
|
|
92
|
+
_globals['_TRAININGSTATUS']._serialized_end = 5038
|
|
89
93
|
_globals['_SUBMITTRAININGJOBREQUEST']._serialized_start = 146
|
|
90
94
|
_globals['_SUBMITTRAININGJOBREQUEST']._serialized_end = 802
|
|
91
95
|
_globals['_SUBMITTRAININGJOBRESPONSE']._serialized_start = 804
|
|
@@ -105,20 +109,22 @@ if not _descriptor._USE_C_DESCRIPTORS:
|
|
|
105
109
|
_globals['_LISTTRAININGJOBSRESPONSE']._serialized_start = 1907
|
|
106
110
|
_globals['_LISTTRAININGJOBSRESPONSE']._serialized_end = 1998
|
|
107
111
|
_globals['_TRAININGJOBMETADATA']._serialized_start = 2001
|
|
108
|
-
_globals['_TRAININGJOBMETADATA']._serialized_end =
|
|
109
|
-
_globals['
|
|
110
|
-
_globals['
|
|
111
|
-
_globals['
|
|
112
|
-
_globals['
|
|
113
|
-
_globals['
|
|
114
|
-
_globals['
|
|
115
|
-
_globals['
|
|
116
|
-
_globals['
|
|
117
|
-
_globals['
|
|
118
|
-
_globals['
|
|
119
|
-
_globals['
|
|
120
|
-
_globals['
|
|
121
|
-
_globals['
|
|
122
|
-
_globals['
|
|
123
|
-
_globals['
|
|
124
|
-
_globals['
|
|
112
|
+
_globals['_TRAININGJOBMETADATA']._serialized_end = 3960
|
|
113
|
+
_globals['_TRAININGJOBMETADATA_ARGUMENTSENTRY']._serialized_start = 1521
|
|
114
|
+
_globals['_TRAININGJOBMETADATA_ARGUMENTSENTRY']._serialized_end = 1581
|
|
115
|
+
_globals['_CANCELTRAININGJOBREQUEST']._serialized_start = 3962
|
|
116
|
+
_globals['_CANCELTRAININGJOBREQUEST']._serialized_end = 4004
|
|
117
|
+
_globals['_CANCELTRAININGJOBRESPONSE']._serialized_start = 4006
|
|
118
|
+
_globals['_CANCELTRAININGJOBRESPONSE']._serialized_end = 4033
|
|
119
|
+
_globals['_DELETECOMPLETEDTRAININGJOBREQUEST']._serialized_start = 4035
|
|
120
|
+
_globals['_DELETECOMPLETEDTRAININGJOBREQUEST']._serialized_end = 4086
|
|
121
|
+
_globals['_DELETECOMPLETEDTRAININGJOBRESPONSE']._serialized_start = 4088
|
|
122
|
+
_globals['_DELETECOMPLETEDTRAININGJOBRESPONSE']._serialized_end = 4124
|
|
123
|
+
_globals['_TRAININGJOBLOGENTRY']._serialized_start = 4126
|
|
124
|
+
_globals['_TRAININGJOBLOGENTRY']._serialized_end = 4243
|
|
125
|
+
_globals['_GETTRAININGJOBLOGSREQUEST']._serialized_start = 4245
|
|
126
|
+
_globals['_GETTRAININGJOBLOGSREQUEST']._serialized_end = 4339
|
|
127
|
+
_globals['_GETTRAININGJOBLOGSRESPONSE']._serialized_start = 4342
|
|
128
|
+
_globals['_GETTRAININGJOBLOGSRESPONSE']._serialized_end = 4475
|
|
129
|
+
_globals['_MLTRAININGSERVICE']._serialized_start = 5041
|
|
130
|
+
_globals['_MLTRAININGSERVICE']._serialized_end = 5952
|
|
@@ -245,6 +245,20 @@ global___ListTrainingJobsResponse = ListTrainingJobsResponse
|
|
|
245
245
|
@typing.final
|
|
246
246
|
class TrainingJobMetadata(google.protobuf.message.Message):
|
|
247
247
|
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
248
|
+
|
|
249
|
+
@typing.final
|
|
250
|
+
class ArgumentsEntry(google.protobuf.message.Message):
|
|
251
|
+
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
|
252
|
+
KEY_FIELD_NUMBER: builtins.int
|
|
253
|
+
VALUE_FIELD_NUMBER: builtins.int
|
|
254
|
+
key: builtins.str
|
|
255
|
+
value: builtins.str
|
|
256
|
+
|
|
257
|
+
def __init__(self, *, key: builtins.str=..., value: builtins.str=...) -> None:
|
|
258
|
+
...
|
|
259
|
+
|
|
260
|
+
def ClearField(self, field_name: typing.Literal['key', b'key', 'value', b'value']) -> None:
|
|
261
|
+
...
|
|
248
262
|
ID_FIELD_NUMBER: builtins.int
|
|
249
263
|
DATASET_ID_FIELD_NUMBER: builtins.int
|
|
250
264
|
ORGANIZATION_ID_FIELD_NUMBER: builtins.int
|
|
@@ -263,6 +277,7 @@ class TrainingJobMetadata(google.protobuf.message.Message):
|
|
|
263
277
|
TRAINING_ENDED_FIELD_NUMBER: builtins.int
|
|
264
278
|
SYNCED_MODEL_ID_FIELD_NUMBER: builtins.int
|
|
265
279
|
TAGS_FIELD_NUMBER: builtins.int
|
|
280
|
+
ARGUMENTS_FIELD_NUMBER: builtins.int
|
|
266
281
|
id: builtins.str
|
|
267
282
|
dataset_id: builtins.str
|
|
268
283
|
organization_id: builtins.str
|
|
@@ -300,13 +315,17 @@ class TrainingJobMetadata(google.protobuf.message.Message):
|
|
|
300
315
|
def tags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
|
301
316
|
...
|
|
302
317
|
|
|
303
|
-
|
|
318
|
+
@property
|
|
319
|
+
def arguments(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
|
|
320
|
+
...
|
|
321
|
+
|
|
322
|
+
def __init__(self, *, id: builtins.str=..., dataset_id: builtins.str=..., organization_id: builtins.str=..., model_name: builtins.str=..., model_version: builtins.str=..., model_type: global___ModelType.ValueType=..., model_framework: global___ModelFramework.ValueType=..., is_custom_job: builtins.bool=..., registry_item_id: builtins.str=..., registry_item_version: builtins.str=..., status: global___TrainingStatus.ValueType=..., error_status: google.rpc.status_pb2.Status | None=..., created_on: google.protobuf.timestamp_pb2.Timestamp | None=..., last_modified: google.protobuf.timestamp_pb2.Timestamp | None=..., training_started: google.protobuf.timestamp_pb2.Timestamp | None=..., training_ended: google.protobuf.timestamp_pb2.Timestamp | None=..., synced_model_id: builtins.str=..., tags: collections.abc.Iterable[builtins.str] | None=..., arguments: collections.abc.Mapping[builtins.str, builtins.str] | None=...) -> None:
|
|
304
323
|
...
|
|
305
324
|
|
|
306
325
|
def HasField(self, field_name: typing.Literal['created_on', b'created_on', 'error_status', b'error_status', 'last_modified', b'last_modified', 'training_ended', b'training_ended', 'training_started', b'training_started']) -> builtins.bool:
|
|
307
326
|
...
|
|
308
327
|
|
|
309
|
-
def ClearField(self, field_name: typing.Literal['created_on', b'created_on', 'dataset_id', b'dataset_id', 'error_status', b'error_status', 'id', b'id', 'is_custom_job', b'is_custom_job', 'last_modified', b'last_modified', 'model_framework', b'model_framework', 'model_name', b'model_name', 'model_type', b'model_type', 'model_version', b'model_version', 'organization_id', b'organization_id', 'registry_item_id', b'registry_item_id', 'registry_item_version', b'registry_item_version', 'status', b'status', 'synced_model_id', b'synced_model_id', 'tags', b'tags', 'training_ended', b'training_ended', 'training_started', b'training_started']) -> None:
|
|
328
|
+
def ClearField(self, field_name: typing.Literal['arguments', b'arguments', 'created_on', b'created_on', 'dataset_id', b'dataset_id', 'error_status', b'error_status', 'id', b'id', 'is_custom_job', b'is_custom_job', 'last_modified', b'last_modified', 'model_framework', b'model_framework', 'model_name', b'model_name', 'model_type', b'model_type', 'model_version', b'model_version', 'organization_id', b'organization_id', 'registry_item_id', b'registry_item_id', 'registry_item_version', b'registry_item_version', 'status', b'status', 'synced_model_id', b'synced_model_id', 'tags', b'tags', 'training_ended', b'training_ended', 'training_started', b'training_started']) -> None:
|
|
310
329
|
...
|
|
311
330
|
global___TrainingJobMetadata = TrainingJobMetadata
|
|
312
331
|
|
viam/module/module.py
CHANGED
|
@@ -3,6 +3,7 @@ import io
|
|
|
3
3
|
import logging as pylogging
|
|
4
4
|
import os
|
|
5
5
|
import sys
|
|
6
|
+
from collections.abc import Iterable
|
|
6
7
|
from inspect import iscoroutinefunction
|
|
7
8
|
from threading import Lock
|
|
8
9
|
from typing import List, Mapping, Optional, Sequence, Tuple
|
|
@@ -275,7 +276,28 @@ class Module:
|
|
|
275
276
|
model = Model.from_string(config.model)
|
|
276
277
|
validator = Registry.lookup_validator(api, model)
|
|
277
278
|
try:
|
|
278
|
-
|
|
279
|
-
|
|
279
|
+
# backwards compatibility. Support both ([], []) or [] with deprecation warning.
|
|
280
|
+
# If user's validate returns [str], it will be treated as required dependencies only.
|
|
281
|
+
# Incorect formats, e.g. int, will raise ValidationError.
|
|
282
|
+
_validator_return_test = validator(config)
|
|
283
|
+
if not (isinstance(_validator_return_test, tuple) and len(_validator_return_test) == 2):
|
|
284
|
+
msg = f"Your validate function {validator.__name__} did not return \
|
|
285
|
+
type tuple[Sequence[str], Sequence[str]]. Got {_validator_return_test}."
|
|
286
|
+
self.logger.warning(msg)
|
|
287
|
+
if (isinstance(_validator_return_test, Iterable) and not isinstance(_validator_return_test, str)) and all(
|
|
288
|
+
isinstance(e, str)
|
|
289
|
+
for e in _validator_return_test # type: ignore
|
|
290
|
+
):
|
|
291
|
+
self.logger.warning(
|
|
292
|
+
f"Detected deprecated validate function signature. \
|
|
293
|
+
Treating all dependencies {_validator_return_test} as required dependencies. \
|
|
294
|
+
Please update to new signature Tuple[Sequence[str], Sequence[str]] soon."
|
|
295
|
+
)
|
|
296
|
+
return ValidateConfigResponse(dependencies=_validator_return_test)
|
|
297
|
+
else:
|
|
298
|
+
raise ValidationError(msg)
|
|
299
|
+
|
|
300
|
+
dependencies, optional_dependencies = _validator_return_test
|
|
301
|
+
return ValidateConfigResponse(dependencies=dependencies, optional_dependencies=optional_dependencies)
|
|
280
302
|
except Exception as e:
|
|
281
303
|
raise ValidationError(f"{type(Exception)}: {e}").grpc_error
|
viam/resource/easy_resource.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import inspect
|
|
2
2
|
import re
|
|
3
3
|
from abc import ABCMeta
|
|
4
|
-
from typing import Callable, ClassVar, Mapping, Sequence, Union
|
|
4
|
+
from typing import Callable, ClassVar, Mapping, Sequence, Tuple, Union
|
|
5
5
|
|
|
6
6
|
from viam.proto.app.robot import ComponentConfig
|
|
7
7
|
from viam.proto.common import ResourceName
|
|
@@ -122,7 +122,7 @@ class EasyResource:
|
|
|
122
122
|
return self
|
|
123
123
|
|
|
124
124
|
@classmethod
|
|
125
|
-
def validate_config(cls, config: ComponentConfig) -> Sequence[str]:
|
|
125
|
+
def validate_config(cls, config: ComponentConfig) -> Tuple[Sequence[str], Sequence[str]]:
|
|
126
126
|
"""This method allows you to validate the configuration object received from the machine,
|
|
127
127
|
as well as to return any implicit dependencies based on that `config`.
|
|
128
128
|
|
|
@@ -130,9 +130,9 @@ class EasyResource:
|
|
|
130
130
|
config (ComponentConfig): The configuration for this resource
|
|
131
131
|
|
|
132
132
|
Returns:
|
|
133
|
-
Sequence[str]:
|
|
133
|
+
Tuple[Sequence[str], Sequence[str]]: One list of required implicit dependencies and one of optional deps.
|
|
134
134
|
"""
|
|
135
|
-
return []
|
|
135
|
+
return [], []
|
|
136
136
|
|
|
137
137
|
@classmethod
|
|
138
138
|
def register(cls):
|
viam/resource/registry.py
CHANGED
|
@@ -27,7 +27,7 @@ class ResourceCreatorRegistration:
|
|
|
27
27
|
"""A function that can create a resource given a mapping of dependencies (``ResourceName`` to ``ResourceBase``
|
|
28
28
|
"""
|
|
29
29
|
|
|
30
|
-
validator: "Validator" = lambda x: []
|
|
30
|
+
validator: "Validator" = lambda x: ([], [])
|
|
31
31
|
"""A function that can validate a resource and return implicit dependencies.
|
|
32
32
|
|
|
33
33
|
If called without a validator function, default to a function returning an empty Sequence
|
|
@@ -170,7 +170,7 @@ class Registry:
|
|
|
170
170
|
try:
|
|
171
171
|
return cls._RESOURCES[f"{api}/{model}"].validator
|
|
172
172
|
except AttributeError:
|
|
173
|
-
return lambda x: []
|
|
173
|
+
return lambda x: ([], [])
|
|
174
174
|
except KeyError:
|
|
175
175
|
raise ResourceNotFoundError(api.resource_type, api.resource_subtype)
|
|
176
176
|
|
viam/resource/types.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import re
|
|
2
2
|
import sys
|
|
3
|
-
from typing import TYPE_CHECKING, Callable, ClassVar, Mapping, Optional, Protocol, Sequence, runtime_checkable
|
|
3
|
+
from typing import TYPE_CHECKING, Callable, ClassVar, Mapping, Optional, Protocol, Sequence, Tuple, runtime_checkable
|
|
4
4
|
|
|
5
5
|
if sys.version_info >= (3, 10):
|
|
6
6
|
from typing import TypeAlias
|
|
@@ -203,7 +203,7 @@ def resource_name_from_string(string: str) -> ResourceName:
|
|
|
203
203
|
|
|
204
204
|
|
|
205
205
|
ResourceCreator: TypeAlias = Callable[[ComponentConfig, Mapping[ResourceName, "ResourceBase"]], "ResourceBase"]
|
|
206
|
-
Validator: TypeAlias = Callable[[ComponentConfig], Sequence[str]]
|
|
206
|
+
Validator: TypeAlias = Callable[[ComponentConfig], Tuple[Sequence[str], Sequence[str]]]
|
|
207
207
|
|
|
208
208
|
|
|
209
209
|
@runtime_checkable
|
viam/version_metadata.py
CHANGED
|
@@ -7,12 +7,12 @@ viam/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
7
7
|
viam/sessions_client.py,sha256=E4ThFCK1HCX_iAvEymvCdJ-H0ZlouzgoIwIE_nywfqc,9414
|
|
8
8
|
viam/streams.py,sha256=VoM8FSMuGZmv4RPDHQy4FfHvJq36r4NY--gkQoaFkzs,1042
|
|
9
9
|
viam/utils.py,sha256=xz7qb6bM-2qzOSQSsYHBheBidMbUAlQ2dHCi_GyPFnk,13579
|
|
10
|
-
viam/version_metadata.py,sha256=
|
|
10
|
+
viam/version_metadata.py,sha256=eukUREKq_2FPu6RinvsfKUpNmEuHXG_eSHPj7ptyrhM,75
|
|
11
11
|
viam/app/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
12
12
|
viam/app/_logs.py,sha256=hWxRELRJCux-fQIQtDwztD3odjkBm7Vo8HUQ7XGICek,806
|
|
13
13
|
viam/app/app_client.py,sha256=kTQOyrN1LTRDEcozTcwAK-IDzmZ_YL0cCfbmVWKLiJ0,114986
|
|
14
14
|
viam/app/billing_client.py,sha256=eZ0KOpKOVEcqPxzAhUfn3XXJdhwAPVv_jPbIg1sc0uk,5767
|
|
15
|
-
viam/app/data_client.py,sha256=
|
|
15
|
+
viam/app/data_client.py,sha256=f2kcr9bLWWbobwfjV4VzbNowPn9Io--n3Xfh5TM98sE,87278
|
|
16
16
|
viam/app/ml_training_client.py,sha256=qcnVrYETdIRusfHLpIOG8Q-QmqDIU43dKmv0pTJ5mA8,9269
|
|
17
17
|
viam/app/provisioning_client.py,sha256=Irh3waRpggZe-pPyrgk4D9fAQ48JjDE5spYyIFOJc3Y,3536
|
|
18
18
|
viam/app/viam_client.py,sha256=fJYa79_WwHFRKcDBzJWzV7Hgo0U1CBwoaDuW8vhkWLU,10820
|
|
@@ -130,8 +130,8 @@ viam/gen/app/mlinference/v1/ml_inference_pb2.pyi,sha256=F7Rd9y3rZoM3KZ114xxF8Klb
|
|
|
130
130
|
viam/gen/app/mltraining/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
131
131
|
viam/gen/app/mltraining/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
132
132
|
viam/gen/app/mltraining/v1/ml_training_grpc.py,sha256=pNG71h3FqXQBHsfEgRA8GOm_MxKjN-4lAjkDJINjrJU,8047
|
|
133
|
-
viam/gen/app/mltraining/v1/ml_training_pb2.py,sha256=
|
|
134
|
-
viam/gen/app/mltraining/v1/ml_training_pb2.pyi,sha256=
|
|
133
|
+
viam/gen/app/mltraining/v1/ml_training_pb2.py,sha256=blCC52-wPG9SaJBfkBSTf5hr7NIKdw6PT5yJKYjJF2M,20456
|
|
134
|
+
viam/gen/app/mltraining/v1/ml_training_pb2.pyi,sha256=hCmnEVRJvQIATIguh--GC4-xjv2gGVE_bUoarM3QHxc,18517
|
|
135
135
|
viam/gen/app/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
136
136
|
viam/gen/app/packages/v1/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
137
137
|
viam/gen/app/packages/v1/packages_grpc.py,sha256=r_wO6tv0MEzVhRIA2R6ryJWCRkxzW6IUatpr8tkMJgI,4260
|
|
@@ -360,7 +360,7 @@ viam/media/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,
|
|
|
360
360
|
viam/media/utils/pil/__init__.py,sha256=b8jSuxOzm1o9CMaJ186V5dMufw4_jXZG5g2U_EJ3bKQ,1466
|
|
361
361
|
viam/media/utils/pil/viam_rgba_plugin.py,sha256=3vIevKK4pnB9EJV_n7bho3XEAzFyuMb3uRTedsvizxI,2563
|
|
362
362
|
viam/module/__init__.py,sha256=BV5kJ-qjlGJzlHdTyR2fZGblymflEcuJU2bFVqPjyeU,56
|
|
363
|
-
viam/module/module.py,sha256=
|
|
363
|
+
viam/module/module.py,sha256=u1o-wRO-HSmEfCDFGpVCTXu13jqED4XhQsEwKm_uPfs,13436
|
|
364
364
|
viam/module/service.py,sha256=QdMVFXWI1aylZOdnSeDUu-LuZ8qIWEs9QihMDyCqnjg,2383
|
|
365
365
|
viam/module/types.py,sha256=SLcSFN99lon-9q0eqXt5vTELS1HaAyROh27XIzX2LlU,793
|
|
366
366
|
viam/proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
@@ -424,12 +424,12 @@ viam/proto/stream/__init__.py,sha256=ZTjtPqIsjEiH5AkXj2yqgJesAkTx2Z4iJzKRZKZg7Bs
|
|
|
424
424
|
viam/proto/tagger/__init__.py,sha256=8qjke19IIdi8HlxjlE9ntAS7GTPzR2SVla8lhslZNTs,64
|
|
425
425
|
viam/resource/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
426
426
|
viam/resource/base.py,sha256=3HGO5KQ6kgEeFlsyJWYr2Nlem7BYERPrYXyNyvydP2w,3694
|
|
427
|
-
viam/resource/easy_resource.py,sha256=
|
|
427
|
+
viam/resource/easy_resource.py,sha256=S8gVJi1FB0XiTrNpmGJC8O8CQLMM9ycBBrOyqz1rLWw,5621
|
|
428
428
|
viam/resource/manager.py,sha256=aCJs-a0fHqvV-suiYkIn3CKUnvj-qgsI2FHK5CKj3M0,4913
|
|
429
|
-
viam/resource/registry.py,sha256=
|
|
429
|
+
viam/resource/registry.py,sha256=XAiJsf3y0C5VnJl-Iy77CqMzFdq1RBLOXqSiFXZ7QmE,7367
|
|
430
430
|
viam/resource/rpc_client_base.py,sha256=2OqcRHZLShuJqcmKimzU4Wr3VuHGNc8fzV4HbtIOAXU,2054
|
|
431
431
|
viam/resource/rpc_service_base.py,sha256=f2vGY4mfcdOPCgBVk7EVFKDIwGAPxu66e0XJ55VIen8,1612
|
|
432
|
-
viam/resource/types.py,sha256=
|
|
432
|
+
viam/resource/types.py,sha256=8r2RjTQN9hOg6Po9XHMgj7xfR5kTsFr9_KNNn_62qGk,6739
|
|
433
433
|
viam/robot/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
434
434
|
viam/robot/client.py,sha256=eLxP_lpiQq7t8bRZCIzWFWMr83nCfqrA3Lf8H7CtzIo,33590
|
|
435
435
|
viam/robot/service.py,sha256=4kn2px4YRkM7083CNgs01FO7xPRe7nhtuDst3IMXjo8,2764
|
|
@@ -471,7 +471,7 @@ viam/services/vision/__init__.py,sha256=g4Dnz6BFgVZpvE6j6sNwxAc2baZfAKLA6XS2GaGg
|
|
|
471
471
|
viam/services/vision/client.py,sha256=uaFMlYwKE5N_Qp803aFGfk8k6KCWPKNwUxyV_fnlBp4,7776
|
|
472
472
|
viam/services/vision/service.py,sha256=NWV3yY1ZyEbl4rrp28CtX9n-uIgyxSQbbKJTeoIVjCg,6942
|
|
473
473
|
viam/services/vision/vision.py,sha256=MrQsTarahwGMEmqWX4ODCJT5wLOGHXNEuEyTQgOvsg4,12527
|
|
474
|
-
viam_sdk-0.
|
|
475
|
-
viam_sdk-0.
|
|
476
|
-
viam_sdk-0.
|
|
477
|
-
viam_sdk-0.
|
|
474
|
+
viam_sdk-0.47.0.dist-info/METADATA,sha256=t1rIDXHknEldULZV8OjlcVSeG1IRqk5vEDrgpcqmzIs,10310
|
|
475
|
+
viam_sdk-0.47.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
476
|
+
viam_sdk-0.47.0.dist-info/licenses/LICENSE,sha256=yVuuHRzgI17MzTVgt3LsHvuX80innw--CmNPDCzO_iw,11358
|
|
477
|
+
viam_sdk-0.47.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|