nominal-api 0.587.0__py3-none-any.whl → 0.589.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of nominal-api might be problematic. Click here for more details.

nominal_api/__init__.py CHANGED
@@ -74,5 +74,5 @@ __all__ = [
74
74
 
75
75
  __conjure_generator_version__ = "4.9.0"
76
76
 
77
- __version__ = "0.587.0"
77
+ __version__ = "0.589.0"
78
78
 
nominal_api/_impl.py CHANGED
@@ -2068,6 +2068,40 @@ user is authorized to access.
2068
2068
  _decoder = ConjureDecoder()
2069
2069
  return _decoder.decode(_response.json(), List[str], self._return_none_for_unknown_union_types)
2070
2070
 
2071
+ def batch_get_workspace_for_resource(self, auth_header: str, request: List[str] = None) -> Dict[str, str]:
2072
+ """
2073
+ Given a set of resources, returns the workspace that each resource belongs to. If a user
2074
+ is not authorized on the resource, will omit the resource from the response.
2075
+ """
2076
+ request = request if request is not None else []
2077
+
2078
+ _headers: Dict[str, Any] = {
2079
+ 'Accept': 'application/json',
2080
+ 'Content-Type': 'application/json',
2081
+ 'Authorization': auth_header,
2082
+ }
2083
+
2084
+ _params: Dict[str, Any] = {
2085
+ }
2086
+
2087
+ _path_params: Dict[str, Any] = {
2088
+ }
2089
+
2090
+ _json: Any = ConjureEncoder().default(request)
2091
+
2092
+ _path = '/authorization/v1/batch-get-workspace-for-resource'
2093
+ _path = _path.format(**_path_params)
2094
+
2095
+ _response: Response = self._request(
2096
+ 'POST',
2097
+ self._uri + _path,
2098
+ params=_params,
2099
+ headers=_headers,
2100
+ json=_json)
2101
+
2102
+ _decoder = ConjureDecoder()
2103
+ return _decoder.decode(_response.json(), Dict[str, api_rids_WorkspaceRid], self._return_none_for_unknown_union_types)
2104
+
2071
2105
  def register(self, auth_header: str, register_request: "authorization_RegistrationRequest") -> None:
2072
2106
  """
2073
2107
  Marks a resource as belonging to an organization.
@@ -4842,16 +4876,18 @@ class datasource_logset_api_CreateLogSetRequest(ConjureBeanType):
4842
4876
  'name': ConjureFieldDefinition('name', str),
4843
4877
  'description': ConjureFieldDefinition('description', OptionalTypeWrapper[str]),
4844
4878
  'origin_metadata': ConjureFieldDefinition('originMetadata', Dict[str, str]),
4845
- 'timestamp_type': ConjureFieldDefinition('timestampType', datasource_TimestampType)
4879
+ 'timestamp_type': ConjureFieldDefinition('timestampType', datasource_TimestampType),
4880
+ 'workspace': ConjureFieldDefinition('workspace', OptionalTypeWrapper[api_ids_WorkspaceId])
4846
4881
  }
4847
4882
 
4848
- __slots__: List[str] = ['_name', '_description', '_origin_metadata', '_timestamp_type']
4883
+ __slots__: List[str] = ['_name', '_description', '_origin_metadata', '_timestamp_type', '_workspace']
4849
4884
 
4850
- def __init__(self, name: str, origin_metadata: Dict[str, str], timestamp_type: "datasource_TimestampType", description: Optional[str] = None) -> None:
4885
+ def __init__(self, name: str, origin_metadata: Dict[str, str], timestamp_type: "datasource_TimestampType", description: Optional[str] = None, workspace: Optional[str] = None) -> None:
4851
4886
  self._name = name
4852
4887
  self._description = description
4853
4888
  self._origin_metadata = origin_metadata
4854
4889
  self._timestamp_type = timestamp_type
4890
+ self._workspace = workspace
4855
4891
 
4856
4892
  @builtins.property
4857
4893
  def name(self) -> str:
@@ -4869,6 +4905,14 @@ class datasource_logset_api_CreateLogSetRequest(ConjureBeanType):
4869
4905
  def timestamp_type(self) -> "datasource_TimestampType":
4870
4906
  return self._timestamp_type
4871
4907
 
4908
+ @builtins.property
4909
+ def workspace(self) -> Optional[str]:
4910
+ """
4911
+ The workspace in which to create the logset. If not provided, the logset will be created in the default workspace for
4912
+ the user's organization, if the default workspace for the organization is configured.
4913
+ """
4914
+ return self._workspace
4915
+
4872
4916
 
4873
4917
  datasource_logset_api_CreateLogSetRequest.__name__ = "CreateLogSetRequest"
4874
4918
  datasource_logset_api_CreateLogSetRequest.__qualname__ = "CreateLogSetRequest"
@@ -5138,14 +5182,16 @@ class datasource_logset_api_SearchLogSetsRequest(ConjureBeanType):
5138
5182
  def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
5139
5183
  return {
5140
5184
  'token': ConjureFieldDefinition('token', OptionalTypeWrapper[api_Token]),
5141
- 'page_size': ConjureFieldDefinition('pageSize', OptionalTypeWrapper[int])
5185
+ 'page_size': ConjureFieldDefinition('pageSize', OptionalTypeWrapper[int]),
5186
+ 'workspaces': ConjureFieldDefinition('workspaces', List[api_ids_WorkspaceId])
5142
5187
  }
5143
5188
 
5144
- __slots__: List[str] = ['_token', '_page_size']
5189
+ __slots__: List[str] = ['_token', '_page_size', '_workspaces']
5145
5190
 
5146
- def __init__(self, page_size: Optional[int] = None, token: Optional[str] = None) -> None:
5191
+ def __init__(self, workspaces: List[str], page_size: Optional[int] = None, token: Optional[str] = None) -> None:
5147
5192
  self._token = token
5148
5193
  self._page_size = page_size
5194
+ self._workspaces = workspaces
5149
5195
 
5150
5196
  @builtins.property
5151
5197
  def token(self) -> Optional[str]:
@@ -5158,6 +5204,14 @@ class datasource_logset_api_SearchLogSetsRequest(ConjureBeanType):
5158
5204
  """
5159
5205
  return self._page_size
5160
5206
 
5207
+ @builtins.property
5208
+ def workspaces(self) -> List[str]:
5209
+ """
5210
+ If supplied, will return only the log sets within the supplied workspaces. If empty
5211
+ will return all connections for workspaces that the user is permitted to see.
5212
+ """
5213
+ return self._workspaces
5214
+
5161
5215
 
5162
5216
  datasource_logset_api_SearchLogSetsRequest.__name__ = "SearchLogSetsRequest"
5163
5217
  datasource_logset_api_SearchLogSetsRequest.__qualname__ = "SearchLogSetsRequest"
@@ -6825,16 +6879,18 @@ class ingest_api_DeprecatedTriggerIngest(ConjureBeanType):
6825
6879
  'source': ConjureFieldDefinition('source', ingest_api_IngestSource),
6826
6880
  'properties': ConjureFieldDefinition('properties', Dict[str, str]),
6827
6881
  'dataset_name': ConjureFieldDefinition('datasetName', OptionalTypeWrapper[str]),
6828
- 'timestamp_metadata': ConjureFieldDefinition('timestampMetadata', OptionalTypeWrapper[ingest_api_DeprecatedTimestampMetadata])
6882
+ 'timestamp_metadata': ConjureFieldDefinition('timestampMetadata', OptionalTypeWrapper[ingest_api_DeprecatedTimestampMetadata]),
6883
+ 'workspace': ConjureFieldDefinition('workspace', OptionalTypeWrapper[api_ids_WorkspaceId])
6829
6884
  }
6830
6885
 
6831
- __slots__: List[str] = ['_source', '_properties', '_dataset_name', '_timestamp_metadata']
6886
+ __slots__: List[str] = ['_source', '_properties', '_dataset_name', '_timestamp_metadata', '_workspace']
6832
6887
 
6833
- def __init__(self, properties: Dict[str, str], source: "ingest_api_IngestSource", dataset_name: Optional[str] = None, timestamp_metadata: Optional["ingest_api_DeprecatedTimestampMetadata"] = None) -> None:
6888
+ def __init__(self, properties: Dict[str, str], source: "ingest_api_IngestSource", dataset_name: Optional[str] = None, timestamp_metadata: Optional["ingest_api_DeprecatedTimestampMetadata"] = None, workspace: Optional[str] = None) -> None:
6834
6889
  self._source = source
6835
6890
  self._properties = properties
6836
6891
  self._dataset_name = dataset_name
6837
6892
  self._timestamp_metadata = timestamp_metadata
6893
+ self._workspace = workspace
6838
6894
 
6839
6895
  @builtins.property
6840
6896
  def source(self) -> "ingest_api_IngestSource":
@@ -6852,6 +6908,14 @@ class ingest_api_DeprecatedTriggerIngest(ConjureBeanType):
6852
6908
  def timestamp_metadata(self) -> Optional["ingest_api_DeprecatedTimestampMetadata"]:
6853
6909
  return self._timestamp_metadata
6854
6910
 
6911
+ @builtins.property
6912
+ def workspace(self) -> Optional[str]:
6913
+ """
6914
+ The workspace in which to create the dataset. If not provided, the dataset will be created in the default workspace for
6915
+ the user's organization, if the default workspace for the organization is configured.
6916
+ """
6917
+ return self._workspace
6918
+
6855
6919
 
6856
6920
  ingest_api_DeprecatedTriggerIngest.__name__ = "DeprecatedTriggerIngest"
6857
6921
  ingest_api_DeprecatedTriggerIngest.__qualname__ = "DeprecatedTriggerIngest"
@@ -7254,12 +7318,13 @@ class ingest_api_IngestMcapRequest(ConjureBeanType):
7254
7318
  'properties': ConjureFieldDefinition('properties', Dict[api_PropertyName, api_PropertyValue]),
7255
7319
  'labels': ConjureFieldDefinition('labels', List[api_Label]),
7256
7320
  'title': ConjureFieldDefinition('title', OptionalTypeWrapper[str]),
7257
- 'description': ConjureFieldDefinition('description', OptionalTypeWrapper[str])
7321
+ 'description': ConjureFieldDefinition('description', OptionalTypeWrapper[str]),
7322
+ 'workspace': ConjureFieldDefinition('workspace', OptionalTypeWrapper[api_ids_WorkspaceId])
7258
7323
  }
7259
7324
 
7260
- __slots__: List[str] = ['_sources', '_channel_config', '_channels', '_properties', '_labels', '_title', '_description']
7325
+ __slots__: List[str] = ['_sources', '_channel_config', '_channels', '_properties', '_labels', '_title', '_description', '_workspace']
7261
7326
 
7262
- def __init__(self, channel_config: List["ingest_api_McapChannelConfig"], labels: List[str], properties: Dict[str, str], sources: List["ingest_api_IngestSource"], channels: Optional["ingest_api_McapChannels"] = None, description: Optional[str] = None, title: Optional[str] = None) -> None:
7327
+ def __init__(self, channel_config: List["ingest_api_McapChannelConfig"], labels: List[str], properties: Dict[str, str], sources: List["ingest_api_IngestSource"], channels: Optional["ingest_api_McapChannels"] = None, description: Optional[str] = None, title: Optional[str] = None, workspace: Optional[str] = None) -> None:
7263
7328
  self._sources = sources
7264
7329
  self._channel_config = channel_config
7265
7330
  self._channels = channels
@@ -7267,6 +7332,7 @@ class ingest_api_IngestMcapRequest(ConjureBeanType):
7267
7332
  self._labels = labels
7268
7333
  self._title = title
7269
7334
  self._description = description
7335
+ self._workspace = workspace
7270
7336
 
7271
7337
  @builtins.property
7272
7338
  def sources(self) -> List["ingest_api_IngestSource"]:
@@ -7305,6 +7371,14 @@ channels with config, otherwise the mcap may not be supported.
7305
7371
  def description(self) -> Optional[str]:
7306
7372
  return self._description
7307
7373
 
7374
+ @builtins.property
7375
+ def workspace(self) -> Optional[str]:
7376
+ """
7377
+ The workspace in which to create the dataset. If not provided, the dataset will be created in the default workspace for
7378
+ the user's organization, if the default workspace for the organization is configured.
7379
+ """
7380
+ return self._workspace
7381
+
7308
7382
 
7309
7383
  ingest_api_IngestMcapRequest.__name__ = "IngestMcapRequest"
7310
7384
  ingest_api_IngestMcapRequest.__qualname__ = "IngestMcapRequest"
@@ -7624,12 +7698,13 @@ class ingest_api_IngestRunRequest(ConjureBeanType):
7624
7698
  'properties': ConjureFieldDefinition('properties', Dict[api_PropertyName, api_PropertyValue]),
7625
7699
  'labels': ConjureFieldDefinition('labels', List[api_Label]),
7626
7700
  'run_prefix': ConjureFieldDefinition('runPrefix', OptionalTypeWrapper[str]),
7627
- 'data_sources': ConjureFieldDefinition('dataSources', Dict[ingest_api_DataSourceRefName, ingest_api_IngestRunDataSource])
7701
+ 'data_sources': ConjureFieldDefinition('dataSources', Dict[ingest_api_DataSourceRefName, ingest_api_IngestRunDataSource]),
7702
+ 'workspace': ConjureFieldDefinition('workspace', OptionalTypeWrapper[api_ids_WorkspaceId])
7628
7703
  }
7629
7704
 
7630
- __slots__: List[str] = ['_rid', '_title', '_description', '_start_time', '_end_time', '_properties', '_labels', '_run_prefix', '_data_sources']
7705
+ __slots__: List[str] = ['_rid', '_title', '_description', '_start_time', '_end_time', '_properties', '_labels', '_run_prefix', '_data_sources', '_workspace']
7631
7706
 
7632
- def __init__(self, data_sources: Dict[str, "ingest_api_IngestRunDataSource"], description: str, labels: List[str], properties: Dict[str, str], start_time: "ingest_api_UtcTimestamp", title: str, end_time: Optional["ingest_api_UtcTimestamp"] = None, rid: Optional[str] = None, run_prefix: Optional[str] = None) -> None:
7707
+ def __init__(self, data_sources: Dict[str, "ingest_api_IngestRunDataSource"], description: str, labels: List[str], properties: Dict[str, str], start_time: "ingest_api_UtcTimestamp", title: str, end_time: Optional["ingest_api_UtcTimestamp"] = None, rid: Optional[str] = None, run_prefix: Optional[str] = None, workspace: Optional[str] = None) -> None:
7633
7708
  self._rid = rid
7634
7709
  self._title = title
7635
7710
  self._description = description
@@ -7639,6 +7714,7 @@ class ingest_api_IngestRunRequest(ConjureBeanType):
7639
7714
  self._labels = labels
7640
7715
  self._run_prefix = run_prefix
7641
7716
  self._data_sources = data_sources
7717
+ self._workspace = workspace
7642
7718
 
7643
7719
  @builtins.property
7644
7720
  def rid(self) -> Optional[str]:
@@ -7682,6 +7758,14 @@ class ingest_api_IngestRunRequest(ConjureBeanType):
7682
7758
  def data_sources(self) -> Dict[str, "ingest_api_IngestRunDataSource"]:
7683
7759
  return self._data_sources
7684
7760
 
7761
+ @builtins.property
7762
+ def workspace(self) -> Optional[str]:
7763
+ """
7764
+ The workspace in which to create the dataset. If not provided, the dataset will be created in the default workspace for
7765
+ the user's organization, if the default workspace for the organization is configured.
7766
+ """
7767
+ return self._workspace
7768
+
7685
7769
 
7686
7770
  ingest_api_IngestRunRequest.__name__ = "IngestRunRequest"
7687
7771
  ingest_api_IngestRunRequest.__qualname__ = "IngestRunRequest"
@@ -7912,7 +7996,6 @@ existing one.
7912
7996
  def ingest_mcap(self, auth_header: str, ingest_video: "ingest_api_IngestMcapRequest") -> "ingest_api_IngestMcapResponse":
7913
7997
  """
7914
7998
  Ingests data from mcap files in the S3 Nominal upload bucket.
7915
- Currently only supports ingesting video channels.
7916
7999
  """
7917
8000
 
7918
8001
  _headers: Dict[str, Any] = {
@@ -8897,17 +8980,19 @@ class ingest_api_NewDatasetIngestDestination(ConjureBeanType):
8897
8980
  'dataset_description': ConjureFieldDefinition('datasetDescription', OptionalTypeWrapper[str]),
8898
8981
  'properties': ConjureFieldDefinition('properties', Dict[api_PropertyName, api_PropertyValue]),
8899
8982
  'labels': ConjureFieldDefinition('labels', List[api_Label]),
8900
- 'channel_config': ConjureFieldDefinition('channelConfig', OptionalTypeWrapper[ingest_api_ChannelConfig])
8983
+ 'channel_config': ConjureFieldDefinition('channelConfig', OptionalTypeWrapper[ingest_api_ChannelConfig]),
8984
+ 'workspace': ConjureFieldDefinition('workspace', OptionalTypeWrapper[api_ids_WorkspaceId])
8901
8985
  }
8902
8986
 
8903
- __slots__: List[str] = ['_dataset_name', '_dataset_description', '_properties', '_labels', '_channel_config']
8987
+ __slots__: List[str] = ['_dataset_name', '_dataset_description', '_properties', '_labels', '_channel_config', '_workspace']
8904
8988
 
8905
- def __init__(self, labels: List[str], properties: Dict[str, str], channel_config: Optional["ingest_api_ChannelConfig"] = None, dataset_description: Optional[str] = None, dataset_name: Optional[str] = None) -> None:
8989
+ def __init__(self, labels: List[str], properties: Dict[str, str], channel_config: Optional["ingest_api_ChannelConfig"] = None, dataset_description: Optional[str] = None, dataset_name: Optional[str] = None, workspace: Optional[str] = None) -> None:
8906
8990
  self._dataset_name = dataset_name
8907
8991
  self._dataset_description = dataset_description
8908
8992
  self._properties = properties
8909
8993
  self._labels = labels
8910
8994
  self._channel_config = channel_config
8995
+ self._workspace = workspace
8911
8996
 
8912
8997
  @builtins.property
8913
8998
  def dataset_name(self) -> Optional[str]:
@@ -8929,6 +9014,14 @@ class ingest_api_NewDatasetIngestDestination(ConjureBeanType):
8929
9014
  def channel_config(self) -> Optional["ingest_api_ChannelConfig"]:
8930
9015
  return self._channel_config
8931
9016
 
9017
+ @builtins.property
9018
+ def workspace(self) -> Optional[str]:
9019
+ """
9020
+ The workspace in which to create the dataset. If not provided, the dataset will be created in the default workspace for
9021
+ the user's organization, if the default workspace for the organization is configured.
9022
+ """
9023
+ return self._workspace
9024
+
8932
9025
 
8933
9026
  ingest_api_NewDatasetIngestDestination.__name__ = "NewDatasetIngestDestination"
8934
9027
  ingest_api_NewDatasetIngestDestination.__qualname__ = "NewDatasetIngestDestination"
@@ -9558,12 +9651,13 @@ class ingest_api_TriggerIngest(ConjureBeanType):
9558
9651
  'dataset_name': ConjureFieldDefinition('datasetName', OptionalTypeWrapper[str]),
9559
9652
  'dataset_description': ConjureFieldDefinition('datasetDescription', OptionalTypeWrapper[str]),
9560
9653
  'timestamp_metadata': ConjureFieldDefinition('timestampMetadata', OptionalTypeWrapper[ingest_api_TimestampMetadata]),
9561
- 'channel_config': ConjureFieldDefinition('channelConfig', OptionalTypeWrapper[ingest_api_ChannelConfig])
9654
+ 'channel_config': ConjureFieldDefinition('channelConfig', OptionalTypeWrapper[ingest_api_ChannelConfig]),
9655
+ 'workspace': ConjureFieldDefinition('workspace', OptionalTypeWrapper[api_ids_WorkspaceId])
9562
9656
  }
9563
9657
 
9564
- __slots__: List[str] = ['_source', '_properties', '_labels', '_dataset_name', '_dataset_description', '_timestamp_metadata', '_channel_config']
9658
+ __slots__: List[str] = ['_source', '_properties', '_labels', '_dataset_name', '_dataset_description', '_timestamp_metadata', '_channel_config', '_workspace']
9565
9659
 
9566
- def __init__(self, labels: List[str], properties: Dict[str, str], source: "ingest_api_IngestSource", channel_config: Optional["ingest_api_ChannelConfig"] = None, dataset_description: Optional[str] = None, dataset_name: Optional[str] = None, timestamp_metadata: Optional["ingest_api_TimestampMetadata"] = None) -> None:
9660
+ def __init__(self, labels: List[str], properties: Dict[str, str], source: "ingest_api_IngestSource", channel_config: Optional["ingest_api_ChannelConfig"] = None, dataset_description: Optional[str] = None, dataset_name: Optional[str] = None, timestamp_metadata: Optional["ingest_api_TimestampMetadata"] = None, workspace: Optional[str] = None) -> None:
9567
9661
  self._source = source
9568
9662
  self._properties = properties
9569
9663
  self._labels = labels
@@ -9571,6 +9665,7 @@ class ingest_api_TriggerIngest(ConjureBeanType):
9571
9665
  self._dataset_description = dataset_description
9572
9666
  self._timestamp_metadata = timestamp_metadata
9573
9667
  self._channel_config = channel_config
9668
+ self._workspace = workspace
9574
9669
 
9575
9670
  @builtins.property
9576
9671
  def source(self) -> "ingest_api_IngestSource":
@@ -9603,6 +9698,14 @@ class ingest_api_TriggerIngest(ConjureBeanType):
9603
9698
  """
9604
9699
  return self._channel_config
9605
9700
 
9701
+ @builtins.property
9702
+ def workspace(self) -> Optional[str]:
9703
+ """
9704
+ The workspace in which to create the dataset. If not provided, the dataset will be created in the default workspace for
9705
+ the user's organization, if the default workspace for the organization is configured.
9706
+ """
9707
+ return self._workspace
9708
+
9606
9709
 
9607
9710
  ingest_api_TriggerIngest.__name__ = "TriggerIngest"
9608
9711
  ingest_api_TriggerIngest.__qualname__ = "TriggerIngest"
@@ -14808,7 +14911,8 @@ dataset can still be directly accessed by its UUID/rid.
14808
14911
 
14809
14912
  return
14810
14913
 
14811
- def get_all_properties_and_labels(self, auth_header: str) -> "scout_catalog_AllPropertiesAndLabelsResponse":
14914
+ def get_all_properties_and_labels(self, auth_header: str, workspaces: List[str] = None) -> "scout_catalog_AllPropertiesAndLabelsResponse":
14915
+ workspaces = workspaces if workspaces is not None else []
14812
14916
 
14813
14917
  _headers: Dict[str, Any] = {
14814
14918
  'Accept': 'application/json',
@@ -14816,6 +14920,7 @@ dataset can still be directly accessed by its UUID/rid.
14816
14920
  }
14817
14921
 
14818
14922
  _params: Dict[str, Any] = {
14923
+ 'workspaces': workspaces,
14819
14924
  }
14820
14925
 
14821
14926
  _path_params: Dict[str, Any] = {
@@ -14969,12 +15074,13 @@ class scout_catalog_CreateDataset(ConjureBeanType):
14969
15074
  'properties': ConjureFieldDefinition('properties', Dict[api_PropertyName, api_PropertyValue]),
14970
15075
  'description': ConjureFieldDefinition('description', OptionalTypeWrapper[str]),
14971
15076
  'granularity': ConjureFieldDefinition('granularity', OptionalTypeWrapper[api_Granularity]),
14972
- 'is_v2_dataset': ConjureFieldDefinition('isV2Dataset', OptionalTypeWrapper[bool])
15077
+ 'is_v2_dataset': ConjureFieldDefinition('isV2Dataset', OptionalTypeWrapper[bool]),
15078
+ 'workspace': ConjureFieldDefinition('workspace', OptionalTypeWrapper[api_ids_WorkspaceId])
14973
15079
  }
14974
15080
 
14975
- __slots__: List[str] = ['_name', '_handle', '_metadata', '_origin_metadata', '_labels', '_properties', '_description', '_granularity', '_is_v2_dataset']
15081
+ __slots__: List[str] = ['_name', '_handle', '_metadata', '_origin_metadata', '_labels', '_properties', '_description', '_granularity', '_is_v2_dataset', '_workspace']
14976
15082
 
14977
- def __init__(self, labels: List[str], metadata: Dict[str, str], name: str, origin_metadata: "scout_catalog_DatasetOriginMetadata", properties: Dict[str, str], description: Optional[str] = None, granularity: Optional["api_Granularity"] = None, handle: Optional["scout_catalog_Handle"] = None, is_v2_dataset: Optional[bool] = None) -> None:
15083
+ def __init__(self, labels: List[str], metadata: Dict[str, str], name: str, origin_metadata: "scout_catalog_DatasetOriginMetadata", properties: Dict[str, str], description: Optional[str] = None, granularity: Optional["api_Granularity"] = None, handle: Optional["scout_catalog_Handle"] = None, is_v2_dataset: Optional[bool] = None, workspace: Optional[str] = None) -> None:
14978
15084
  self._name = name
14979
15085
  self._handle = handle
14980
15086
  self._metadata = metadata
@@ -14984,6 +15090,7 @@ class scout_catalog_CreateDataset(ConjureBeanType):
14984
15090
  self._description = description
14985
15091
  self._granularity = granularity
14986
15092
  self._is_v2_dataset = is_v2_dataset
15093
+ self._workspace = workspace
14987
15094
 
14988
15095
  @builtins.property
14989
15096
  def name(self) -> str:
@@ -15023,10 +15130,18 @@ class scout_catalog_CreateDataset(ConjureBeanType):
15023
15130
  @builtins.property
15024
15131
  def is_v2_dataset(self) -> Optional[bool]:
15025
15132
  """
15026
- If true, the dataset should be ingested to the v2 tables.
15133
+ If true, the dataset should be ingested to the v2 tables and is compatible with streaming.
15027
15134
  """
15028
15135
  return self._is_v2_dataset
15029
15136
 
15137
+ @builtins.property
15138
+ def workspace(self) -> Optional[str]:
15139
+ """
15140
+ The workspace in which to create the dataset. If not provided, the dataset will be created in the default workspace for
15141
+ the user's organization, if the default workspace for the organization is configured.
15142
+ """
15143
+ return self._workspace
15144
+
15030
15145
 
15031
15146
  scout_catalog_CreateDataset.__name__ = "CreateDataset"
15032
15147
  scout_catalog_CreateDataset.__qualname__ = "CreateDataset"
@@ -16216,6 +16331,7 @@ class scout_catalog_SearchDatasetsQuery(ConjureUnionType):
16216
16331
  _archive_status: Optional[bool] = None
16217
16332
  _and_: Optional[List["scout_catalog_SearchDatasetsQuery"]] = None
16218
16333
  _or_: Optional[List["scout_catalog_SearchDatasetsQuery"]] = None
16334
+ _workspace: Optional[str] = None
16219
16335
 
16220
16336
  @builtins.classmethod
16221
16337
  def _options(cls) -> Dict[str, ConjureFieldDefinition]:
@@ -16229,7 +16345,8 @@ class scout_catalog_SearchDatasetsQuery(ConjureUnionType):
16229
16345
  'ingested_after_inclusive': ConjureFieldDefinition('ingestedAfterInclusive', str),
16230
16346
  'archive_status': ConjureFieldDefinition('archiveStatus', bool),
16231
16347
  'and_': ConjureFieldDefinition('and', List[scout_catalog_SearchDatasetsQuery]),
16232
- 'or_': ConjureFieldDefinition('or', List[scout_catalog_SearchDatasetsQuery])
16348
+ 'or_': ConjureFieldDefinition('or', List[scout_catalog_SearchDatasetsQuery]),
16349
+ 'workspace': ConjureFieldDefinition('workspace', api_ids_WorkspaceId)
16233
16350
  }
16234
16351
 
16235
16352
  def __init__(
@@ -16244,10 +16361,11 @@ class scout_catalog_SearchDatasetsQuery(ConjureUnionType):
16244
16361
  archive_status: Optional[bool] = None,
16245
16362
  and_: Optional[List["scout_catalog_SearchDatasetsQuery"]] = None,
16246
16363
  or_: Optional[List["scout_catalog_SearchDatasetsQuery"]] = None,
16364
+ workspace: Optional[str] = None,
16247
16365
  type_of_union: Optional[str] = None
16248
16366
  ) -> None:
16249
16367
  if type_of_union is None:
16250
- if (search_text is not None) + (exact_match is not None) + (label is not None) + (properties is not None) + (ingest_status is not None) + (ingested_before_inclusive is not None) + (ingested_after_inclusive is not None) + (archive_status is not None) + (and_ is not None) + (or_ is not None) != 1:
16368
+ if (search_text is not None) + (exact_match is not None) + (label is not None) + (properties is not None) + (ingest_status is not None) + (ingested_before_inclusive is not None) + (ingested_after_inclusive is not None) + (archive_status is not None) + (and_ is not None) + (or_ is not None) + (workspace is not None) != 1:
16251
16369
  raise ValueError('a union must contain a single member')
16252
16370
 
16253
16371
  if search_text is not None:
@@ -16280,6 +16398,9 @@ class scout_catalog_SearchDatasetsQuery(ConjureUnionType):
16280
16398
  if or_ is not None:
16281
16399
  self._or_ = or_
16282
16400
  self._type = 'or'
16401
+ if workspace is not None:
16402
+ self._workspace = workspace
16403
+ self._type = 'workspace'
16283
16404
 
16284
16405
  elif type_of_union == 'searchText':
16285
16406
  if search_text is None:
@@ -16331,6 +16452,11 @@ class scout_catalog_SearchDatasetsQuery(ConjureUnionType):
16331
16452
  raise ValueError('a union value must not be None')
16332
16453
  self._or_ = or_
16333
16454
  self._type = 'or'
16455
+ elif type_of_union == 'workspace':
16456
+ if workspace is None:
16457
+ raise ValueError('a union value must not be None')
16458
+ self._workspace = workspace
16459
+ self._type = 'workspace'
16334
16460
 
16335
16461
  @builtins.property
16336
16462
  def search_text(self) -> Optional[str]:
@@ -16375,6 +16501,10 @@ class scout_catalog_SearchDatasetsQuery(ConjureUnionType):
16375
16501
  def or_(self) -> Optional[List["scout_catalog_SearchDatasetsQuery"]]:
16376
16502
  return self._or_
16377
16503
 
16504
+ @builtins.property
16505
+ def workspace(self) -> Optional[str]:
16506
+ return self._workspace
16507
+
16378
16508
  def accept(self, visitor) -> Any:
16379
16509
  if not isinstance(visitor, scout_catalog_SearchDatasetsQueryVisitor):
16380
16510
  raise ValueError('{} is not an instance of scout_catalog_SearchDatasetsQueryVisitor'.format(visitor.__class__.__name__))
@@ -16398,6 +16528,8 @@ class scout_catalog_SearchDatasetsQuery(ConjureUnionType):
16398
16528
  return visitor._and(self.and_)
16399
16529
  if self._type == 'or' and self.or_ is not None:
16400
16530
  return visitor._or(self.or_)
16531
+ if self._type == 'workspace' and self.workspace is not None:
16532
+ return visitor._workspace(self.workspace)
16401
16533
 
16402
16534
 
16403
16535
  scout_catalog_SearchDatasetsQuery.__name__ = "SearchDatasetsQuery"
@@ -16447,6 +16579,10 @@ class scout_catalog_SearchDatasetsQueryVisitor:
16447
16579
  def _or(self, or_: List["scout_catalog_SearchDatasetsQuery"]) -> Any:
16448
16580
  pass
16449
16581
 
16582
+ @abstractmethod
16583
+ def _workspace(self, workspace: str) -> Any:
16584
+ pass
16585
+
16450
16586
 
16451
16587
  scout_catalog_SearchDatasetsQueryVisitor.__name__ = "SearchDatasetsQueryVisitor"
16452
16588
  scout_catalog_SearchDatasetsQueryVisitor.__qualname__ = "SearchDatasetsQueryVisitor"
@@ -17890,6 +18026,27 @@ scout_chartdefinition_api_FrequencyPlot.__qualname__ = "FrequencyPlot"
17890
18026
  scout_chartdefinition_api_FrequencyPlot.__module__ = "nominal_api.scout_chartdefinition_api"
17891
18027
 
17892
18028
 
18029
+ class scout_chartdefinition_api_GeoAdditionalTileset(ConjureEnumType):
18030
+ """
18031
+ A standard tileset to overlay on the base map.
18032
+ """
18033
+
18034
+ VFR = 'VFR'
18035
+ '''VFR'''
18036
+ BATHYMETRY = 'BATHYMETRY'
18037
+ '''BATHYMETRY'''
18038
+ UNKNOWN = 'UNKNOWN'
18039
+ '''UNKNOWN'''
18040
+
18041
+ def __reduce_ex__(self, proto):
18042
+ return self.__class__, (self.name,)
18043
+
18044
+
18045
+ scout_chartdefinition_api_GeoAdditionalTileset.__name__ = "GeoAdditionalTileset"
18046
+ scout_chartdefinition_api_GeoAdditionalTileset.__qualname__ = "GeoAdditionalTileset"
18047
+ scout_chartdefinition_api_GeoAdditionalTileset.__module__ = "nominal_api.scout_chartdefinition_api"
18048
+
18049
+
17893
18050
  class scout_chartdefinition_api_GeoAdditionalVariable(ConjureBeanType):
17894
18051
 
17895
18052
  @builtins.classmethod
@@ -18297,15 +18454,17 @@ class scout_chartdefinition_api_GeoVizDefinitionV1(ConjureBeanType):
18297
18454
  return {
18298
18455
  'plots': ConjureFieldDefinition('plots', List[scout_chartdefinition_api_GeoPlotFromLatLong]),
18299
18456
  'title': ConjureFieldDefinition('title', OptionalTypeWrapper[str]),
18300
- 'custom_features': ConjureFieldDefinition('customFeatures', List[scout_chartdefinition_api_GeoCustomFeature])
18457
+ 'custom_features': ConjureFieldDefinition('customFeatures', List[scout_chartdefinition_api_GeoCustomFeature]),
18458
+ 'additional_tileset': ConjureFieldDefinition('additionalTileset', OptionalTypeWrapper[scout_chartdefinition_api_GeoAdditionalTileset])
18301
18459
  }
18302
18460
 
18303
- __slots__: List[str] = ['_plots', '_title', '_custom_features']
18461
+ __slots__: List[str] = ['_plots', '_title', '_custom_features', '_additional_tileset']
18304
18462
 
18305
- def __init__(self, custom_features: List["scout_chartdefinition_api_GeoCustomFeature"], plots: List["scout_chartdefinition_api_GeoPlotFromLatLong"], title: Optional[str] = None) -> None:
18463
+ def __init__(self, custom_features: List["scout_chartdefinition_api_GeoCustomFeature"], plots: List["scout_chartdefinition_api_GeoPlotFromLatLong"], additional_tileset: Optional["scout_chartdefinition_api_GeoAdditionalTileset"] = None, title: Optional[str] = None) -> None:
18306
18464
  self._plots = plots
18307
18465
  self._title = title
18308
18466
  self._custom_features = custom_features
18467
+ self._additional_tileset = additional_tileset
18309
18468
 
18310
18469
  @builtins.property
18311
18470
  def plots(self) -> List["scout_chartdefinition_api_GeoPlotFromLatLong"]:
@@ -18319,6 +18478,10 @@ class scout_chartdefinition_api_GeoVizDefinitionV1(ConjureBeanType):
18319
18478
  def custom_features(self) -> List["scout_chartdefinition_api_GeoCustomFeature"]:
18320
18479
  return self._custom_features
18321
18480
 
18481
+ @builtins.property
18482
+ def additional_tileset(self) -> Optional["scout_chartdefinition_api_GeoAdditionalTileset"]:
18483
+ return self._additional_tileset
18484
+
18322
18485
 
18323
18486
  scout_chartdefinition_api_GeoVizDefinitionV1.__name__ = "GeoVizDefinitionV1"
18324
18487
  scout_chartdefinition_api_GeoVizDefinitionV1.__qualname__ = "GeoVizDefinitionV1"
@@ -53259,10 +53422,11 @@ The Connection Service is responsible for creating, updating, and retrieving dat
53259
53422
  _decoder = ConjureDecoder()
53260
53423
  return _decoder.decode(_response.json(), List[scout_datasource_connection_api_Connection], self._return_none_for_unknown_union_types)
53261
53424
 
53262
- def list_connections(self, auth_header: str, include_archived: Optional[bool] = None) -> List["scout_datasource_connection_api_Connection"]:
53425
+ def list_connections(self, auth_header: str, workspaces: List[str] = None, include_archived: Optional[bool] = None) -> List["scout_datasource_connection_api_Connection"]:
53263
53426
  """
53264
53427
  Lists all connections.
53265
53428
  """
53429
+ workspaces = workspaces if workspaces is not None else []
53266
53430
 
53267
53431
  _headers: Dict[str, Any] = {
53268
53432
  'Accept': 'application/json',
@@ -53271,6 +53435,7 @@ The Connection Service is responsible for creating, updating, and retrieving dat
53271
53435
 
53272
53436
  _params: Dict[str, Any] = {
53273
53437
  'includeArchived': include_archived,
53438
+ 'workspaces': workspaces,
53274
53439
  }
53275
53440
 
53276
53441
  _path_params: Dict[str, Any] = {
@@ -53903,12 +54068,13 @@ class scout_datasource_connection_api_CreateConnection(ConjureBeanType):
53903
54068
  'available_tag_values': ConjureFieldDefinition('availableTagValues', Dict[api_TagName, List[api_TagValue]]),
53904
54069
  'scraping': ConjureFieldDefinition('scraping', OptionalTypeWrapper[scout_datasource_connection_api_ScrapingConfig]),
53905
54070
  'should_scrape': ConjureFieldDefinition('shouldScrape', bool),
53906
- 'limits': ConjureFieldDefinition('limits', OptionalTypeWrapper[scout_datasource_connection_api_LimitsConfig])
54071
+ 'limits': ConjureFieldDefinition('limits', OptionalTypeWrapper[scout_datasource_connection_api_LimitsConfig]),
54072
+ 'workspace': ConjureFieldDefinition('workspace', OptionalTypeWrapper[api_ids_WorkspaceId])
53907
54073
  }
53908
54074
 
53909
- __slots__: List[str] = ['_name', '_description', '_connection_details', '_metadata', '_required_tag_names', '_available_tag_values', '_scraping', '_should_scrape', '_limits']
54075
+ __slots__: List[str] = ['_name', '_description', '_connection_details', '_metadata', '_required_tag_names', '_available_tag_values', '_scraping', '_should_scrape', '_limits', '_workspace']
53910
54076
 
53911
- def __init__(self, available_tag_values: Dict[str, List[str]], connection_details: "scout_datasource_connection_api_ConnectionDetails", metadata: Dict[str, str], name: str, required_tag_names: List[str], should_scrape: bool, description: Optional[str] = None, limits: Optional["scout_datasource_connection_api_LimitsConfig"] = None, scraping: Optional["scout_datasource_connection_api_ScrapingConfig"] = None) -> None:
54077
+ def __init__(self, available_tag_values: Dict[str, List[str]], connection_details: "scout_datasource_connection_api_ConnectionDetails", metadata: Dict[str, str], name: str, required_tag_names: List[str], should_scrape: bool, description: Optional[str] = None, limits: Optional["scout_datasource_connection_api_LimitsConfig"] = None, scraping: Optional["scout_datasource_connection_api_ScrapingConfig"] = None, workspace: Optional[str] = None) -> None:
53912
54078
  self._name = name
53913
54079
  self._description = description
53914
54080
  self._connection_details = connection_details
@@ -53918,6 +54084,7 @@ class scout_datasource_connection_api_CreateConnection(ConjureBeanType):
53918
54084
  self._scraping = scraping
53919
54085
  self._should_scrape = should_scrape
53920
54086
  self._limits = limits
54087
+ self._workspace = workspace
53921
54088
 
53922
54089
  @builtins.property
53923
54090
  def name(self) -> str:
@@ -53961,6 +54128,14 @@ class scout_datasource_connection_api_CreateConnection(ConjureBeanType):
53961
54128
  def limits(self) -> Optional["scout_datasource_connection_api_LimitsConfig"]:
53962
54129
  return self._limits
53963
54130
 
54131
+ @builtins.property
54132
+ def workspace(self) -> Optional[str]:
54133
+ """
54134
+ The workspace in which to create the connection. If not provided, the connection will be created in the default workspace for
54135
+ the user's organization, if the default workspace for the organization is configured.
54136
+ """
54137
+ return self._workspace
54138
+
53964
54139
 
53965
54140
  scout_datasource_connection_api_CreateConnection.__name__ = "CreateConnection"
53966
54141
  scout_datasource_connection_api_CreateConnection.__qualname__ = "CreateConnection"
@@ -29,6 +29,7 @@ from .._impl import (
29
29
  scout_chartdefinition_api_FrequencyChartDefinitionV1 as FrequencyChartDefinitionV1,
30
30
  scout_chartdefinition_api_FrequencyChartDefinitionVisitor as FrequencyChartDefinitionVisitor,
31
31
  scout_chartdefinition_api_FrequencyPlot as FrequencyPlot,
32
+ scout_chartdefinition_api_GeoAdditionalTileset as GeoAdditionalTileset,
32
33
  scout_chartdefinition_api_GeoAdditionalVariable as GeoAdditionalVariable,
33
34
  scout_chartdefinition_api_GeoCustomFeature as GeoCustomFeature,
34
35
  scout_chartdefinition_api_GeoCustomFeatureVisitor as GeoCustomFeatureVisitor,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nominal-api
3
- Version: 0.587.0
3
+ Version: 0.589.0
4
4
  Requires-Python: >=3.8
5
5
  Requires-Dist: requests
6
6
  Requires-Dist: conjure-python-client<3,>=2.8.0
@@ -1,5 +1,5 @@
1
- nominal_api/__init__.py,sha256=xfy0xArE8DRnqIlIndR_EuuA-XWpWS0mcOoeAqrNd_8,1938
2
- nominal_api/_impl.py,sha256=UfUbDGOUp7RT7PZ93HpBtPgEJ9f-GvIBavWiKaGvG1Y,2832333
1
+ nominal_api/__init__.py,sha256=45TV7DoAdFD5AV19HfDYyaIei3JncEArt0N93UU1ZzY,1938
2
+ nominal_api/_impl.py,sha256=LpL51F-FaXjVl3PP5vjl3KvZ7pJzdSCqJ79aiB8Hkwk,2840302
3
3
  nominal_api/py.typed,sha256=eoZ6GfifbqhMLNzjlqRDVil-yyBkOmVN9ujSgJWNBlY,15
4
4
  nominal_api/api/__init__.py,sha256=kJBEE_HLVpKYdLH12KyO-cSAVzwxYpBwaaDutCtT-LM,1236
5
5
  nominal_api/api_ids/__init__.py,sha256=CAtt44XgNZEEUDv-BbEbYtuxQ8y1wqSZU-STjBYdZv8,80
@@ -23,7 +23,7 @@ nominal_api/scout_assets/__init__.py,sha256=dT-b9HnbwVbI-fEalfskKSMGzhGRwZDZ2cdz
23
23
  nominal_api/scout_catalog/__init__.py,sha256=ZGm4w1YKd4B-3CNxijTpB-1B653nctA_R0u24oPS3To,3508
24
24
  nominal_api/scout_channelvariables_api/__init__.py,sha256=4OQV1O-M2MQE36yCGlyYftnqaXSddYTYTyGce_WC4JQ,466
25
25
  nominal_api/scout_chart_api/__init__.py,sha256=sw7WSYs6SarSW7x-3IBkSIrVea1cVFnQnpYiNKbCWnQ,184
26
- nominal_api/scout_chartdefinition_api/__init__.py,sha256=hUNlgtbvzeScfevv4tJKtb-CkHjvsp97SjdeoNuBI_0,8954
26
+ nominal_api/scout_chartdefinition_api/__init__.py,sha256=5KGggvdNP5_boatvY7B31c7ITvTb9xDMuIFYbMcF63E,9030
27
27
  nominal_api/scout_checklistexecution_api/__init__.py,sha256=lpBtfyRP-ReEwNzFTcynDgiMe8ahtEJb75pg7cKl-Q0,3266
28
28
  nominal_api/scout_checks_api/__init__.py,sha256=RJH7HsXjUhItC11V9C-hfv6lkIfiSXyxnB8slUpaT2g,5203
29
29
  nominal_api/scout_comparisonnotebook_api/__init__.py,sha256=8BL5jE9NDxqCj9DyvZWSPhq6zw2J7xp6aLsl3x9rpyw,4530
@@ -71,7 +71,7 @@ nominal_api/timeseries_logicalseries_api/__init__.py,sha256=7NlQhIzOKOcjwMNUI89f
71
71
  nominal_api/timeseries_seriescache/__init__.py,sha256=tFCkNuyrVMgtj-HIl1pOYPJHaL2VikI4C_x97bX_Lcs,109
72
72
  nominal_api/timeseries_seriescache_api/__init__.py,sha256=U9EhlqdF9qzD1O9al0vcvcdgS_C5lq-lN3Kmr0K3g84,1191
73
73
  nominal_api/upload_api/__init__.py,sha256=ZMudWMSqCrNozohbHaJKuxJnT9Edepe7nxxXMz_pT9k,87
74
- nominal_api-0.587.0.dist-info/METADATA,sha256=hHyMRymt_V2XOORDpafjLjMrTCGXy2hdikN2DV2rewE,199
75
- nominal_api-0.587.0.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
76
- nominal_api-0.587.0.dist-info/top_level.txt,sha256=gI1ZdNJbuHcJZeKtCzzBXsEtpU1GX6XJKs6ksi_gCRA,12
77
- nominal_api-0.587.0.dist-info/RECORD,,
74
+ nominal_api-0.589.0.dist-info/METADATA,sha256=JtFbvnLsuxEbrnuvuqbQW2yfG9yp-J5TGE7Bq7unt_I,199
75
+ nominal_api-0.589.0.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
76
+ nominal_api-0.589.0.dist-info/top_level.txt,sha256=gI1ZdNJbuHcJZeKtCzzBXsEtpU1GX6XJKs6ksi_gCRA,12
77
+ nominal_api-0.589.0.dist-info/RECORD,,