nominal-api 0.621.0__py3-none-any.whl → 0.622.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nominal-api might be problematic. Click here for more details.
- nominal_api/__init__.py +2 -1
- nominal_api/_impl.py +821 -13
- nominal_api/api/__init__.py +1 -0
- nominal_api/ingest_api/__init__.py +0 -1
- nominal_api/ingest_workflow_api/__init__.py +21 -0
- nominal_api/persistent_compute_api/__init__.py +3 -0
- {nominal_api-0.621.0.dist-info → nominal_api-0.622.1.dist-info}/METADATA +1 -1
- {nominal_api-0.621.0.dist-info → nominal_api-0.622.1.dist-info}/RECORD +10 -9
- {nominal_api-0.621.0.dist-info → nominal_api-0.622.1.dist-info}/WHEEL +0 -0
- {nominal_api-0.621.0.dist-info → nominal_api-0.622.1.dist-info}/top_level.txt +0 -0
nominal_api/__init__.py
CHANGED
|
@@ -15,6 +15,7 @@ __all__ = [
|
|
|
15
15
|
'datasource_pagination_api',
|
|
16
16
|
'event',
|
|
17
17
|
'ingest_api',
|
|
18
|
+
'ingest_workflow_api',
|
|
18
19
|
'persistent_compute_api',
|
|
19
20
|
'scout',
|
|
20
21
|
'scout_api',
|
|
@@ -75,5 +76,5 @@ __all__ = [
|
|
|
75
76
|
|
|
76
77
|
__conjure_generator_version__ = "4.9.0"
|
|
77
78
|
|
|
78
|
-
__version__ = "0.
|
|
79
|
+
__version__ = "0.622.1"
|
|
79
80
|
|
nominal_api/_impl.py
CHANGED
|
@@ -7029,8 +7029,8 @@ class ingest_api_ContainerizedOpts(ConjureBeanType):
|
|
|
7029
7029
|
@builtins.classmethod
|
|
7030
7030
|
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
7031
7031
|
return {
|
|
7032
|
-
'sources': ConjureFieldDefinition('sources', Dict[
|
|
7033
|
-
'extractor': ConjureFieldDefinition('extractor',
|
|
7032
|
+
'sources': ConjureFieldDefinition('sources', Dict[ingest_api_EnvironmentVariable, ingest_api_IngestSource]),
|
|
7033
|
+
'extractor': ConjureFieldDefinition('extractor', ingest_api_ContainerizedExtractorRid),
|
|
7034
7034
|
'timestamp_metadata': ConjureFieldDefinition('timestampMetadata', OptionalTypeWrapper[ingest_api_TimestampMetadata]),
|
|
7035
7035
|
'target': ConjureFieldDefinition('target', ingest_api_DatasetIngestTarget)
|
|
7036
7036
|
}
|
|
@@ -7078,17 +7078,19 @@ class ingest_api_CsvOpts(ConjureBeanType):
|
|
|
7078
7078
|
'timestamp_metadata': ConjureFieldDefinition('timestampMetadata', ingest_api_TimestampMetadata),
|
|
7079
7079
|
'channel_prefix': ConjureFieldDefinition('channelPrefix', ingest_api_ChannelPrefix),
|
|
7080
7080
|
'tag_keys_from_columns': ConjureFieldDefinition('tagKeysFromColumns', OptionalTypeWrapper[List[api_TagName]]),
|
|
7081
|
+
'tag_columns': ConjureFieldDefinition('tagColumns', OptionalTypeWrapper[Dict[api_TagName, api_ColumnName]]),
|
|
7081
7082
|
'additional_file_tags': ConjureFieldDefinition('additionalFileTags', OptionalTypeWrapper[Dict[api_TagName, api_TagValue]])
|
|
7082
7083
|
}
|
|
7083
7084
|
|
|
7084
|
-
__slots__: List[str] = ['_source', '_target', '_timestamp_metadata', '_channel_prefix', '_tag_keys_from_columns', '_additional_file_tags']
|
|
7085
|
+
__slots__: List[str] = ['_source', '_target', '_timestamp_metadata', '_channel_prefix', '_tag_keys_from_columns', '_tag_columns', '_additional_file_tags']
|
|
7085
7086
|
|
|
7086
|
-
def __init__(self, source: "ingest_api_IngestSource", target: "ingest_api_DatasetIngestTarget", timestamp_metadata: "ingest_api_TimestampMetadata", additional_file_tags: Optional[Dict[str, str]] = None, channel_prefix: Optional[str] = None, tag_keys_from_columns: Optional[List[str]] = None) -> None:
|
|
7087
|
+
def __init__(self, source: "ingest_api_IngestSource", target: "ingest_api_DatasetIngestTarget", timestamp_metadata: "ingest_api_TimestampMetadata", additional_file_tags: Optional[Dict[str, str]] = None, channel_prefix: Optional[str] = None, tag_columns: Optional[Dict[str, str]] = None, tag_keys_from_columns: Optional[List[str]] = None) -> None:
|
|
7087
7088
|
self._source = source
|
|
7088
7089
|
self._target = target
|
|
7089
7090
|
self._timestamp_metadata = timestamp_metadata
|
|
7090
7091
|
self._channel_prefix = channel_prefix
|
|
7091
7092
|
self._tag_keys_from_columns = tag_keys_from_columns
|
|
7093
|
+
self._tag_columns = tag_columns
|
|
7092
7094
|
self._additional_file_tags = additional_file_tags
|
|
7093
7095
|
|
|
7094
7096
|
@builtins.property
|
|
@@ -7111,6 +7113,13 @@ class ingest_api_CsvOpts(ConjureBeanType):
|
|
|
7111
7113
|
def tag_keys_from_columns(self) -> Optional[List[str]]:
|
|
7112
7114
|
return self._tag_keys_from_columns
|
|
7113
7115
|
|
|
7116
|
+
@builtins.property
|
|
7117
|
+
def tag_columns(self) -> Optional[Dict[str, str]]:
|
|
7118
|
+
"""
|
|
7119
|
+
A map of tag names to column names to derive the tag values from.
|
|
7120
|
+
"""
|
|
7121
|
+
return self._tag_columns
|
|
7122
|
+
|
|
7114
7123
|
@builtins.property
|
|
7115
7124
|
def additional_file_tags(self) -> Optional[Dict[str, str]]:
|
|
7116
7125
|
return self._additional_file_tags
|
|
@@ -9943,18 +9952,20 @@ and archives such as .tar, .tar.gz, and .zip (must set the isArchive flag).
|
|
|
9943
9952
|
'timestamp_metadata': ConjureFieldDefinition('timestampMetadata', ingest_api_TimestampMetadata),
|
|
9944
9953
|
'channel_prefix': ConjureFieldDefinition('channelPrefix', ingest_api_ChannelPrefix),
|
|
9945
9954
|
'tag_keys_from_columns': ConjureFieldDefinition('tagKeysFromColumns', OptionalTypeWrapper[List[api_TagName]]),
|
|
9955
|
+
'tag_columns': ConjureFieldDefinition('tagColumns', OptionalTypeWrapper[Dict[api_TagName, api_ColumnName]]),
|
|
9946
9956
|
'additional_file_tags': ConjureFieldDefinition('additionalFileTags', OptionalTypeWrapper[Dict[api_TagName, api_TagValue]]),
|
|
9947
9957
|
'is_archive': ConjureFieldDefinition('isArchive', OptionalTypeWrapper[bool])
|
|
9948
9958
|
}
|
|
9949
9959
|
|
|
9950
|
-
__slots__: List[str] = ['_source', '_target', '_timestamp_metadata', '_channel_prefix', '_tag_keys_from_columns', '_additional_file_tags', '_is_archive']
|
|
9960
|
+
__slots__: List[str] = ['_source', '_target', '_timestamp_metadata', '_channel_prefix', '_tag_keys_from_columns', '_tag_columns', '_additional_file_tags', '_is_archive']
|
|
9951
9961
|
|
|
9952
|
-
def __init__(self, source: "ingest_api_IngestSource", target: "ingest_api_DatasetIngestTarget", timestamp_metadata: "ingest_api_TimestampMetadata", additional_file_tags: Optional[Dict[str, str]] = None, channel_prefix: Optional[str] = None, is_archive: Optional[bool] = None, tag_keys_from_columns: Optional[List[str]] = None) -> None:
|
|
9962
|
+
def __init__(self, source: "ingest_api_IngestSource", target: "ingest_api_DatasetIngestTarget", timestamp_metadata: "ingest_api_TimestampMetadata", additional_file_tags: Optional[Dict[str, str]] = None, channel_prefix: Optional[str] = None, is_archive: Optional[bool] = None, tag_columns: Optional[Dict[str, str]] = None, tag_keys_from_columns: Optional[List[str]] = None) -> None:
|
|
9953
9963
|
self._source = source
|
|
9954
9964
|
self._target = target
|
|
9955
9965
|
self._timestamp_metadata = timestamp_metadata
|
|
9956
9966
|
self._channel_prefix = channel_prefix
|
|
9957
9967
|
self._tag_keys_from_columns = tag_keys_from_columns
|
|
9968
|
+
self._tag_columns = tag_columns
|
|
9958
9969
|
self._additional_file_tags = additional_file_tags
|
|
9959
9970
|
self._is_archive = is_archive
|
|
9960
9971
|
|
|
@@ -9978,6 +9989,13 @@ and archives such as .tar, .tar.gz, and .zip (must set the isArchive flag).
|
|
|
9978
9989
|
def tag_keys_from_columns(self) -> Optional[List[str]]:
|
|
9979
9990
|
return self._tag_keys_from_columns
|
|
9980
9991
|
|
|
9992
|
+
@builtins.property
|
|
9993
|
+
def tag_columns(self) -> Optional[Dict[str, str]]:
|
|
9994
|
+
"""
|
|
9995
|
+
A map of tag names to column names to derive the tag values from.
|
|
9996
|
+
"""
|
|
9997
|
+
return self._tag_columns
|
|
9998
|
+
|
|
9981
9999
|
@builtins.property
|
|
9982
10000
|
def additional_file_tags(self) -> Optional[Dict[str, str]]:
|
|
9983
10001
|
return self._additional_file_tags
|
|
@@ -11235,6 +11253,547 @@ ingest_api_VideoTimestampManifestVisitor.__qualname__ = "VideoTimestampManifestV
|
|
|
11235
11253
|
ingest_api_VideoTimestampManifestVisitor.__module__ = "nominal_api.ingest_api"
|
|
11236
11254
|
|
|
11237
11255
|
|
|
11256
|
+
class ingest_workflow_api_Empty(ConjureBeanType):
|
|
11257
|
+
|
|
11258
|
+
@builtins.classmethod
|
|
11259
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11260
|
+
return {
|
|
11261
|
+
}
|
|
11262
|
+
|
|
11263
|
+
__slots__: List[str] = []
|
|
11264
|
+
|
|
11265
|
+
|
|
11266
|
+
|
|
11267
|
+
ingest_workflow_api_Empty.__name__ = "Empty"
|
|
11268
|
+
ingest_workflow_api_Empty.__qualname__ = "Empty"
|
|
11269
|
+
ingest_workflow_api_Empty.__module__ = "nominal_api.ingest_workflow_api"
|
|
11270
|
+
|
|
11271
|
+
|
|
11272
|
+
class ingest_workflow_api_EnsureExtractorJobCreatedRequest(ConjureBeanType):
|
|
11273
|
+
"""
|
|
11274
|
+
Ensure that the extractor job exists in the control plane. Runs in the namespace given by workspaceRid.
|
|
11275
|
+
"""
|
|
11276
|
+
|
|
11277
|
+
@builtins.classmethod
|
|
11278
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11279
|
+
return {
|
|
11280
|
+
'workspace_rid': ConjureFieldDefinition('workspaceRid', api_rids_WorkspaceRid),
|
|
11281
|
+
'ingest_job_uuid': ConjureFieldDefinition('ingestJobUuid', str),
|
|
11282
|
+
'containerized_extractor': ConjureFieldDefinition('containerizedExtractor', ingest_api_ContainerizedExtractor),
|
|
11283
|
+
'source_handles': ConjureFieldDefinition('sourceHandles', Dict[str, scout_catalog_S3Handle])
|
|
11284
|
+
}
|
|
11285
|
+
|
|
11286
|
+
__slots__: List[str] = ['_workspace_rid', '_ingest_job_uuid', '_containerized_extractor', '_source_handles']
|
|
11287
|
+
|
|
11288
|
+
def __init__(self, containerized_extractor: "ingest_api_ContainerizedExtractor", ingest_job_uuid: str, source_handles: Dict[str, "scout_catalog_S3Handle"], workspace_rid: str) -> None:
|
|
11289
|
+
self._workspace_rid = workspace_rid
|
|
11290
|
+
self._ingest_job_uuid = ingest_job_uuid
|
|
11291
|
+
self._containerized_extractor = containerized_extractor
|
|
11292
|
+
self._source_handles = source_handles
|
|
11293
|
+
|
|
11294
|
+
@builtins.property
|
|
11295
|
+
def workspace_rid(self) -> str:
|
|
11296
|
+
return self._workspace_rid
|
|
11297
|
+
|
|
11298
|
+
@builtins.property
|
|
11299
|
+
def ingest_job_uuid(self) -> str:
|
|
11300
|
+
return self._ingest_job_uuid
|
|
11301
|
+
|
|
11302
|
+
@builtins.property
|
|
11303
|
+
def containerized_extractor(self) -> "ingest_api_ContainerizedExtractor":
|
|
11304
|
+
return self._containerized_extractor
|
|
11305
|
+
|
|
11306
|
+
@builtins.property
|
|
11307
|
+
def source_handles(self) -> Dict[str, "scout_catalog_S3Handle"]:
|
|
11308
|
+
return self._source_handles
|
|
11309
|
+
|
|
11310
|
+
|
|
11311
|
+
ingest_workflow_api_EnsureExtractorJobCreatedRequest.__name__ = "EnsureExtractorJobCreatedRequest"
|
|
11312
|
+
ingest_workflow_api_EnsureExtractorJobCreatedRequest.__qualname__ = "EnsureExtractorJobCreatedRequest"
|
|
11313
|
+
ingest_workflow_api_EnsureExtractorJobCreatedRequest.__module__ = "nominal_api.ingest_workflow_api"
|
|
11314
|
+
|
|
11315
|
+
|
|
11316
|
+
class ingest_workflow_api_EnsureExtractorJobCreatedResponse(ConjureBeanType):
|
|
11317
|
+
|
|
11318
|
+
@builtins.classmethod
|
|
11319
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11320
|
+
return {
|
|
11321
|
+
}
|
|
11322
|
+
|
|
11323
|
+
__slots__: List[str] = []
|
|
11324
|
+
|
|
11325
|
+
|
|
11326
|
+
|
|
11327
|
+
ingest_workflow_api_EnsureExtractorJobCreatedResponse.__name__ = "EnsureExtractorJobCreatedResponse"
|
|
11328
|
+
ingest_workflow_api_EnsureExtractorJobCreatedResponse.__qualname__ = "EnsureExtractorJobCreatedResponse"
|
|
11329
|
+
ingest_workflow_api_EnsureExtractorJobCreatedResponse.__module__ = "nominal_api.ingest_workflow_api"
|
|
11330
|
+
|
|
11331
|
+
|
|
11332
|
+
class ingest_workflow_api_EnsureWorkspaceNamespaceCreatedRequest(ConjureBeanType):
|
|
11333
|
+
"""
|
|
11334
|
+
For a given workspace rid, ensures that there is a corresponding K8s namespace created.
|
|
11335
|
+
"""
|
|
11336
|
+
|
|
11337
|
+
@builtins.classmethod
|
|
11338
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11339
|
+
return {
|
|
11340
|
+
'workspace_rid': ConjureFieldDefinition('workspaceRid', api_rids_WorkspaceRid)
|
|
11341
|
+
}
|
|
11342
|
+
|
|
11343
|
+
__slots__: List[str] = ['_workspace_rid']
|
|
11344
|
+
|
|
11345
|
+
def __init__(self, workspace_rid: str) -> None:
|
|
11346
|
+
self._workspace_rid = workspace_rid
|
|
11347
|
+
|
|
11348
|
+
@builtins.property
|
|
11349
|
+
def workspace_rid(self) -> str:
|
|
11350
|
+
return self._workspace_rid
|
|
11351
|
+
|
|
11352
|
+
|
|
11353
|
+
ingest_workflow_api_EnsureWorkspaceNamespaceCreatedRequest.__name__ = "EnsureWorkspaceNamespaceCreatedRequest"
|
|
11354
|
+
ingest_workflow_api_EnsureWorkspaceNamespaceCreatedRequest.__qualname__ = "EnsureWorkspaceNamespaceCreatedRequest"
|
|
11355
|
+
ingest_workflow_api_EnsureWorkspaceNamespaceCreatedRequest.__module__ = "nominal_api.ingest_workflow_api"
|
|
11356
|
+
|
|
11357
|
+
|
|
11358
|
+
class ingest_workflow_api_EnsureWorkspaceServiceAccountCreatedRequest(ConjureBeanType):
|
|
11359
|
+
"""
|
|
11360
|
+
For a given workspace rid, ensures that there is a service account in the proper K8s Namespace.
|
|
11361
|
+
"""
|
|
11362
|
+
|
|
11363
|
+
@builtins.classmethod
|
|
11364
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11365
|
+
return {
|
|
11366
|
+
'workspace_rid': ConjureFieldDefinition('workspaceRid', api_rids_WorkspaceRid)
|
|
11367
|
+
}
|
|
11368
|
+
|
|
11369
|
+
__slots__: List[str] = ['_workspace_rid']
|
|
11370
|
+
|
|
11371
|
+
def __init__(self, workspace_rid: str) -> None:
|
|
11372
|
+
self._workspace_rid = workspace_rid
|
|
11373
|
+
|
|
11374
|
+
@builtins.property
|
|
11375
|
+
def workspace_rid(self) -> str:
|
|
11376
|
+
return self._workspace_rid
|
|
11377
|
+
|
|
11378
|
+
|
|
11379
|
+
ingest_workflow_api_EnsureWorkspaceServiceAccountCreatedRequest.__name__ = "EnsureWorkspaceServiceAccountCreatedRequest"
|
|
11380
|
+
ingest_workflow_api_EnsureWorkspaceServiceAccountCreatedRequest.__qualname__ = "EnsureWorkspaceServiceAccountCreatedRequest"
|
|
11381
|
+
ingest_workflow_api_EnsureWorkspaceServiceAccountCreatedRequest.__module__ = "nominal_api.ingest_workflow_api"
|
|
11382
|
+
|
|
11383
|
+
|
|
11384
|
+
class ingest_workflow_api_ExtractorJobState(ConjureEnumType):
|
|
11385
|
+
|
|
11386
|
+
PENDING = 'PENDING'
|
|
11387
|
+
'''PENDING'''
|
|
11388
|
+
RUNNING = 'RUNNING'
|
|
11389
|
+
'''RUNNING'''
|
|
11390
|
+
FAILED = 'FAILED'
|
|
11391
|
+
'''FAILED'''
|
|
11392
|
+
SUCCEEDED = 'SUCCEEDED'
|
|
11393
|
+
'''SUCCEEDED'''
|
|
11394
|
+
UNKNOWN = 'UNKNOWN'
|
|
11395
|
+
'''UNKNOWN'''
|
|
11396
|
+
|
|
11397
|
+
def __reduce_ex__(self, proto):
|
|
11398
|
+
return self.__class__, (self.name,)
|
|
11399
|
+
|
|
11400
|
+
|
|
11401
|
+
ingest_workflow_api_ExtractorJobState.__name__ = "ExtractorJobState"
|
|
11402
|
+
ingest_workflow_api_ExtractorJobState.__qualname__ = "ExtractorJobState"
|
|
11403
|
+
ingest_workflow_api_ExtractorJobState.__module__ = "nominal_api.ingest_workflow_api"
|
|
11404
|
+
|
|
11405
|
+
|
|
11406
|
+
class ingest_workflow_api_GetExtractorJobStateRequest(ConjureBeanType):
|
|
11407
|
+
|
|
11408
|
+
@builtins.classmethod
|
|
11409
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11410
|
+
return {
|
|
11411
|
+
'workspace_rid': ConjureFieldDefinition('workspaceRid', api_rids_WorkspaceRid),
|
|
11412
|
+
'ingest_job_uuid': ConjureFieldDefinition('ingestJobUuid', str)
|
|
11413
|
+
}
|
|
11414
|
+
|
|
11415
|
+
__slots__: List[str] = ['_workspace_rid', '_ingest_job_uuid']
|
|
11416
|
+
|
|
11417
|
+
def __init__(self, ingest_job_uuid: str, workspace_rid: str) -> None:
|
|
11418
|
+
self._workspace_rid = workspace_rid
|
|
11419
|
+
self._ingest_job_uuid = ingest_job_uuid
|
|
11420
|
+
|
|
11421
|
+
@builtins.property
|
|
11422
|
+
def workspace_rid(self) -> str:
|
|
11423
|
+
return self._workspace_rid
|
|
11424
|
+
|
|
11425
|
+
@builtins.property
|
|
11426
|
+
def ingest_job_uuid(self) -> str:
|
|
11427
|
+
return self._ingest_job_uuid
|
|
11428
|
+
|
|
11429
|
+
|
|
11430
|
+
ingest_workflow_api_GetExtractorJobStateRequest.__name__ = "GetExtractorJobStateRequest"
|
|
11431
|
+
ingest_workflow_api_GetExtractorJobStateRequest.__qualname__ = "GetExtractorJobStateRequest"
|
|
11432
|
+
ingest_workflow_api_GetExtractorJobStateRequest.__module__ = "nominal_api.ingest_workflow_api"
|
|
11433
|
+
|
|
11434
|
+
|
|
11435
|
+
class ingest_workflow_api_GetExtractorJobStateResponse(ConjureBeanType):
|
|
11436
|
+
|
|
11437
|
+
@builtins.classmethod
|
|
11438
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11439
|
+
return {
|
|
11440
|
+
'state': ConjureFieldDefinition('state', ingest_workflow_api_ExtractorJobState)
|
|
11441
|
+
}
|
|
11442
|
+
|
|
11443
|
+
__slots__: List[str] = ['_state']
|
|
11444
|
+
|
|
11445
|
+
def __init__(self, state: "ingest_workflow_api_ExtractorJobState") -> None:
|
|
11446
|
+
self._state = state
|
|
11447
|
+
|
|
11448
|
+
@builtins.property
|
|
11449
|
+
def state(self) -> "ingest_workflow_api_ExtractorJobState":
|
|
11450
|
+
return self._state
|
|
11451
|
+
|
|
11452
|
+
|
|
11453
|
+
ingest_workflow_api_GetExtractorJobStateResponse.__name__ = "GetExtractorJobStateResponse"
|
|
11454
|
+
ingest_workflow_api_GetExtractorJobStateResponse.__qualname__ = "GetExtractorJobStateResponse"
|
|
11455
|
+
ingest_workflow_api_GetExtractorJobStateResponse.__module__ = "nominal_api.ingest_workflow_api"
|
|
11456
|
+
|
|
11457
|
+
|
|
11458
|
+
class ingest_workflow_api_IngestDataflashRequest(ConjureBeanType):
|
|
11459
|
+
|
|
11460
|
+
@builtins.classmethod
|
|
11461
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11462
|
+
return {
|
|
11463
|
+
'locator': ConjureFieldDefinition('locator', ingest_workflow_api_ObjectLocator)
|
|
11464
|
+
}
|
|
11465
|
+
|
|
11466
|
+
__slots__: List[str] = ['_locator']
|
|
11467
|
+
|
|
11468
|
+
def __init__(self, locator: "ingest_workflow_api_ObjectLocator") -> None:
|
|
11469
|
+
self._locator = locator
|
|
11470
|
+
|
|
11471
|
+
@builtins.property
|
|
11472
|
+
def locator(self) -> "ingest_workflow_api_ObjectLocator":
|
|
11473
|
+
return self._locator
|
|
11474
|
+
|
|
11475
|
+
|
|
11476
|
+
ingest_workflow_api_IngestDataflashRequest.__name__ = "IngestDataflashRequest"
|
|
11477
|
+
ingest_workflow_api_IngestDataflashRequest.__qualname__ = "IngestDataflashRequest"
|
|
11478
|
+
ingest_workflow_api_IngestDataflashRequest.__module__ = "nominal_api.ingest_workflow_api"
|
|
11479
|
+
|
|
11480
|
+
|
|
11481
|
+
class ingest_workflow_api_IngestDataflashResponse(ConjureBeanType):
|
|
11482
|
+
|
|
11483
|
+
@builtins.classmethod
|
|
11484
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11485
|
+
return {
|
|
11486
|
+
'units': ConjureFieldDefinition('units', Dict[str, str]),
|
|
11487
|
+
'parquet_object_locators': ConjureFieldDefinition('parquetObjectLocators', List[ingest_workflow_api_ObjectLocator]),
|
|
11488
|
+
'timestamp_series_name': ConjureFieldDefinition('timestampSeriesName', str),
|
|
11489
|
+
'time_unit': ConjureFieldDefinition('timeUnit', ingest_workflow_api_TimeUnitSeconds)
|
|
11490
|
+
}
|
|
11491
|
+
|
|
11492
|
+
__slots__: List[str] = ['_units', '_parquet_object_locators', '_timestamp_series_name', '_time_unit']
|
|
11493
|
+
|
|
11494
|
+
def __init__(self, parquet_object_locators: List["ingest_workflow_api_ObjectLocator"], time_unit: "ingest_workflow_api_TimeUnitSeconds", timestamp_series_name: str, units: Dict[str, str]) -> None:
|
|
11495
|
+
self._units = units
|
|
11496
|
+
self._parquet_object_locators = parquet_object_locators
|
|
11497
|
+
self._timestamp_series_name = timestamp_series_name
|
|
11498
|
+
self._time_unit = time_unit
|
|
11499
|
+
|
|
11500
|
+
@builtins.property
|
|
11501
|
+
def units(self) -> Dict[str, str]:
|
|
11502
|
+
return self._units
|
|
11503
|
+
|
|
11504
|
+
@builtins.property
|
|
11505
|
+
def parquet_object_locators(self) -> List["ingest_workflow_api_ObjectLocator"]:
|
|
11506
|
+
"""
|
|
11507
|
+
Azure or S3-style blob locators of parquet files. Currently
|
|
11508
|
+
only a single file is supported, the list type is used for future compatibility.
|
|
11509
|
+
"""
|
|
11510
|
+
return self._parquet_object_locators
|
|
11511
|
+
|
|
11512
|
+
@builtins.property
|
|
11513
|
+
def timestamp_series_name(self) -> str:
|
|
11514
|
+
"""
|
|
11515
|
+
The name of the column in the generated parquet file that contains the timestamp.
|
|
11516
|
+
"""
|
|
11517
|
+
return self._timestamp_series_name
|
|
11518
|
+
|
|
11519
|
+
@builtins.property
|
|
11520
|
+
def time_unit(self) -> "ingest_workflow_api_TimeUnitSeconds":
|
|
11521
|
+
"""
|
|
11522
|
+
The unit of time for the timestamp column. Can only be seconds.
|
|
11523
|
+
"""
|
|
11524
|
+
return self._time_unit
|
|
11525
|
+
|
|
11526
|
+
|
|
11527
|
+
ingest_workflow_api_IngestDataflashResponse.__name__ = "IngestDataflashResponse"
|
|
11528
|
+
ingest_workflow_api_IngestDataflashResponse.__qualname__ = "IngestDataflashResponse"
|
|
11529
|
+
ingest_workflow_api_IngestDataflashResponse.__module__ = "nominal_api.ingest_workflow_api"
|
|
11530
|
+
|
|
11531
|
+
|
|
11532
|
+
class ingest_workflow_api_IngestMcapProtobufRequest(ConjureBeanType):
|
|
11533
|
+
|
|
11534
|
+
@builtins.classmethod
|
|
11535
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11536
|
+
return {
|
|
11537
|
+
'locator': ConjureFieldDefinition('locator', ingest_workflow_api_ObjectLocator),
|
|
11538
|
+
'channels': ConjureFieldDefinition('channels', ingest_workflow_api_McapProtoChannels)
|
|
11539
|
+
}
|
|
11540
|
+
|
|
11541
|
+
__slots__: List[str] = ['_locator', '_channels']
|
|
11542
|
+
|
|
11543
|
+
def __init__(self, channels: "ingest_workflow_api_McapProtoChannels", locator: "ingest_workflow_api_ObjectLocator") -> None:
|
|
11544
|
+
self._locator = locator
|
|
11545
|
+
self._channels = channels
|
|
11546
|
+
|
|
11547
|
+
@builtins.property
|
|
11548
|
+
def locator(self) -> "ingest_workflow_api_ObjectLocator":
|
|
11549
|
+
return self._locator
|
|
11550
|
+
|
|
11551
|
+
@builtins.property
|
|
11552
|
+
def channels(self) -> "ingest_workflow_api_McapProtoChannels":
|
|
11553
|
+
return self._channels
|
|
11554
|
+
|
|
11555
|
+
|
|
11556
|
+
ingest_workflow_api_IngestMcapProtobufRequest.__name__ = "IngestMcapProtobufRequest"
|
|
11557
|
+
ingest_workflow_api_IngestMcapProtobufRequest.__qualname__ = "IngestMcapProtobufRequest"
|
|
11558
|
+
ingest_workflow_api_IngestMcapProtobufRequest.__module__ = "nominal_api.ingest_workflow_api"
|
|
11559
|
+
|
|
11560
|
+
|
|
11561
|
+
class ingest_workflow_api_IngestMcapProtobufResponse(ConjureBeanType):
|
|
11562
|
+
|
|
11563
|
+
@builtins.classmethod
|
|
11564
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11565
|
+
return {
|
|
11566
|
+
'timestamp_column_name': ConjureFieldDefinition('timestampColumnName', str),
|
|
11567
|
+
'parquet_object_locators': ConjureFieldDefinition('parquetObjectLocators', List[ingest_workflow_api_ObjectLocator])
|
|
11568
|
+
}
|
|
11569
|
+
|
|
11570
|
+
__slots__: List[str] = ['_timestamp_column_name', '_parquet_object_locators']
|
|
11571
|
+
|
|
11572
|
+
def __init__(self, parquet_object_locators: List["ingest_workflow_api_ObjectLocator"], timestamp_column_name: str) -> None:
|
|
11573
|
+
self._timestamp_column_name = timestamp_column_name
|
|
11574
|
+
self._parquet_object_locators = parquet_object_locators
|
|
11575
|
+
|
|
11576
|
+
@builtins.property
|
|
11577
|
+
def timestamp_column_name(self) -> str:
|
|
11578
|
+
return self._timestamp_column_name
|
|
11579
|
+
|
|
11580
|
+
@builtins.property
|
|
11581
|
+
def parquet_object_locators(self) -> List["ingest_workflow_api_ObjectLocator"]:
|
|
11582
|
+
"""
|
|
11583
|
+
Azure or S3-style blob locators of parquet files. Currently
|
|
11584
|
+
only a single file is supported, the list type is used for future compatibility.
|
|
11585
|
+
"""
|
|
11586
|
+
return self._parquet_object_locators
|
|
11587
|
+
|
|
11588
|
+
|
|
11589
|
+
ingest_workflow_api_IngestMcapProtobufResponse.__name__ = "IngestMcapProtobufResponse"
|
|
11590
|
+
ingest_workflow_api_IngestMcapProtobufResponse.__qualname__ = "IngestMcapProtobufResponse"
|
|
11591
|
+
ingest_workflow_api_IngestMcapProtobufResponse.__module__ = "nominal_api.ingest_workflow_api"
|
|
11592
|
+
|
|
11593
|
+
|
|
11594
|
+
class ingest_workflow_api_McapProtoChannels(ConjureUnionType):
|
|
11595
|
+
_all: Optional["ingest_workflow_api_Empty"] = None
|
|
11596
|
+
_include_topics: Optional[List[str]] = None
|
|
11597
|
+
_exclude_topics: Optional[List[str]] = None
|
|
11598
|
+
|
|
11599
|
+
@builtins.classmethod
|
|
11600
|
+
def _options(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11601
|
+
return {
|
|
11602
|
+
'all': ConjureFieldDefinition('all', ingest_workflow_api_Empty),
|
|
11603
|
+
'include_topics': ConjureFieldDefinition('includeTopics', List[ingest_workflow_api_McapTopicName]),
|
|
11604
|
+
'exclude_topics': ConjureFieldDefinition('excludeTopics', List[ingest_workflow_api_McapTopicName])
|
|
11605
|
+
}
|
|
11606
|
+
|
|
11607
|
+
def __init__(
|
|
11608
|
+
self,
|
|
11609
|
+
all: Optional["ingest_workflow_api_Empty"] = None,
|
|
11610
|
+
include_topics: Optional[List[str]] = None,
|
|
11611
|
+
exclude_topics: Optional[List[str]] = None,
|
|
11612
|
+
type_of_union: Optional[str] = None
|
|
11613
|
+
) -> None:
|
|
11614
|
+
if type_of_union is None:
|
|
11615
|
+
if (all is not None) + (include_topics is not None) + (exclude_topics is not None) != 1:
|
|
11616
|
+
raise ValueError('a union must contain a single member')
|
|
11617
|
+
|
|
11618
|
+
if all is not None:
|
|
11619
|
+
self._all = all
|
|
11620
|
+
self._type = 'all'
|
|
11621
|
+
if include_topics is not None:
|
|
11622
|
+
self._include_topics = include_topics
|
|
11623
|
+
self._type = 'includeTopics'
|
|
11624
|
+
if exclude_topics is not None:
|
|
11625
|
+
self._exclude_topics = exclude_topics
|
|
11626
|
+
self._type = 'excludeTopics'
|
|
11627
|
+
|
|
11628
|
+
elif type_of_union == 'all':
|
|
11629
|
+
if all is None:
|
|
11630
|
+
raise ValueError('a union value must not be None')
|
|
11631
|
+
self._all = all
|
|
11632
|
+
self._type = 'all'
|
|
11633
|
+
elif type_of_union == 'includeTopics':
|
|
11634
|
+
if include_topics is None:
|
|
11635
|
+
raise ValueError('a union value must not be None')
|
|
11636
|
+
self._include_topics = include_topics
|
|
11637
|
+
self._type = 'includeTopics'
|
|
11638
|
+
elif type_of_union == 'excludeTopics':
|
|
11639
|
+
if exclude_topics is None:
|
|
11640
|
+
raise ValueError('a union value must not be None')
|
|
11641
|
+
self._exclude_topics = exclude_topics
|
|
11642
|
+
self._type = 'excludeTopics'
|
|
11643
|
+
|
|
11644
|
+
@builtins.property
|
|
11645
|
+
def all(self) -> Optional["ingest_workflow_api_Empty"]:
|
|
11646
|
+
return self._all
|
|
11647
|
+
|
|
11648
|
+
@builtins.property
|
|
11649
|
+
def include_topics(self) -> Optional[List[str]]:
|
|
11650
|
+
return self._include_topics
|
|
11651
|
+
|
|
11652
|
+
@builtins.property
|
|
11653
|
+
def exclude_topics(self) -> Optional[List[str]]:
|
|
11654
|
+
return self._exclude_topics
|
|
11655
|
+
|
|
11656
|
+
def accept(self, visitor) -> Any:
|
|
11657
|
+
if not isinstance(visitor, ingest_workflow_api_McapProtoChannelsVisitor):
|
|
11658
|
+
raise ValueError('{} is not an instance of ingest_workflow_api_McapProtoChannelsVisitor'.format(visitor.__class__.__name__))
|
|
11659
|
+
if self._type == 'all' and self.all is not None:
|
|
11660
|
+
return visitor._all(self.all)
|
|
11661
|
+
if self._type == 'includeTopics' and self.include_topics is not None:
|
|
11662
|
+
return visitor._include_topics(self.include_topics)
|
|
11663
|
+
if self._type == 'excludeTopics' and self.exclude_topics is not None:
|
|
11664
|
+
return visitor._exclude_topics(self.exclude_topics)
|
|
11665
|
+
|
|
11666
|
+
|
|
11667
|
+
ingest_workflow_api_McapProtoChannels.__name__ = "McapProtoChannels"
|
|
11668
|
+
ingest_workflow_api_McapProtoChannels.__qualname__ = "McapProtoChannels"
|
|
11669
|
+
ingest_workflow_api_McapProtoChannels.__module__ = "nominal_api.ingest_workflow_api"
|
|
11670
|
+
|
|
11671
|
+
|
|
11672
|
+
class ingest_workflow_api_McapProtoChannelsVisitor:
|
|
11673
|
+
|
|
11674
|
+
@abstractmethod
|
|
11675
|
+
def _all(self, all: "ingest_workflow_api_Empty") -> Any:
|
|
11676
|
+
pass
|
|
11677
|
+
|
|
11678
|
+
@abstractmethod
|
|
11679
|
+
def _include_topics(self, include_topics: List[str]) -> Any:
|
|
11680
|
+
pass
|
|
11681
|
+
|
|
11682
|
+
@abstractmethod
|
|
11683
|
+
def _exclude_topics(self, exclude_topics: List[str]) -> Any:
|
|
11684
|
+
pass
|
|
11685
|
+
|
|
11686
|
+
|
|
11687
|
+
ingest_workflow_api_McapProtoChannelsVisitor.__name__ = "McapProtoChannelsVisitor"
|
|
11688
|
+
ingest_workflow_api_McapProtoChannelsVisitor.__qualname__ = "McapProtoChannelsVisitor"
|
|
11689
|
+
ingest_workflow_api_McapProtoChannelsVisitor.__module__ = "nominal_api.ingest_workflow_api"
|
|
11690
|
+
|
|
11691
|
+
|
|
11692
|
+
class ingest_workflow_api_ObjectLocator(ConjureBeanType):
|
|
11693
|
+
"""
|
|
11694
|
+
Locator for files in an object store.
|
|
11695
|
+
Clients are expected to have auth and origin/region configured independently.
|
|
11696
|
+
"""
|
|
11697
|
+
|
|
11698
|
+
@builtins.classmethod
|
|
11699
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11700
|
+
return {
|
|
11701
|
+
'bucket': ConjureFieldDefinition('bucket', str),
|
|
11702
|
+
'object_name': ConjureFieldDefinition('objectName', str)
|
|
11703
|
+
}
|
|
11704
|
+
|
|
11705
|
+
__slots__: List[str] = ['_bucket', '_object_name']
|
|
11706
|
+
|
|
11707
|
+
def __init__(self, bucket: str, object_name: str) -> None:
|
|
11708
|
+
self._bucket = bucket
|
|
11709
|
+
self._object_name = object_name
|
|
11710
|
+
|
|
11711
|
+
@builtins.property
|
|
11712
|
+
def bucket(self) -> str:
|
|
11713
|
+
return self._bucket
|
|
11714
|
+
|
|
11715
|
+
@builtins.property
|
|
11716
|
+
def object_name(self) -> str:
|
|
11717
|
+
return self._object_name
|
|
11718
|
+
|
|
11719
|
+
|
|
11720
|
+
ingest_workflow_api_ObjectLocator.__name__ = "ObjectLocator"
|
|
11721
|
+
ingest_workflow_api_ObjectLocator.__qualname__ = "ObjectLocator"
|
|
11722
|
+
ingest_workflow_api_ObjectLocator.__module__ = "nominal_api.ingest_workflow_api"
|
|
11723
|
+
|
|
11724
|
+
|
|
11725
|
+
class ingest_workflow_api_TimeUnitSeconds(ConjureEnumType):
|
|
11726
|
+
|
|
11727
|
+
SECONDS = 'SECONDS'
|
|
11728
|
+
'''SECONDS'''
|
|
11729
|
+
UNKNOWN = 'UNKNOWN'
|
|
11730
|
+
'''UNKNOWN'''
|
|
11731
|
+
|
|
11732
|
+
def __reduce_ex__(self, proto):
|
|
11733
|
+
return self.__class__, (self.name,)
|
|
11734
|
+
|
|
11735
|
+
|
|
11736
|
+
ingest_workflow_api_TimeUnitSeconds.__name__ = "TimeUnitSeconds"
|
|
11737
|
+
ingest_workflow_api_TimeUnitSeconds.__qualname__ = "TimeUnitSeconds"
|
|
11738
|
+
ingest_workflow_api_TimeUnitSeconds.__module__ = "nominal_api.ingest_workflow_api"
|
|
11739
|
+
|
|
11740
|
+
|
|
11741
|
+
class persistent_compute_api_AppendResult(ConjureBeanType):
|
|
11742
|
+
"""
|
|
11743
|
+
An append result won't cover the full `StreamingComputeNodeRequest#windowWidth` but rather just a smaller
|
|
11744
|
+
window. The end of the window that the append covers is guaranteed to be later than previously sent results.
|
|
11745
|
+
The start, however, can and most likely will overlap with previous results. That allows us to support
|
|
11746
|
+
out-of-order points. The client will have to merge this new `AppendResult` with previous results.
|
|
11747
|
+
Example of time windows that might be covered by results for a subscription:
|
|
11748
|
+
We send a full result for window [0s, 120s] followed by an append result for [116s, 121s] and another
|
|
11749
|
+
append result for [117s, 122s].
|
|
11750
|
+
"""
|
|
11751
|
+
|
|
11752
|
+
@builtins.classmethod
|
|
11753
|
+
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11754
|
+
return {
|
|
11755
|
+
'subscription_id': ConjureFieldDefinition('subscriptionId', persistent_compute_api_SubscriptionId),
|
|
11756
|
+
'start': ConjureFieldDefinition('start', api_Timestamp),
|
|
11757
|
+
'end': ConjureFieldDefinition('end', api_Timestamp),
|
|
11758
|
+
'result': ConjureFieldDefinition('result', persistent_compute_api_ComputeNodeAppendResponse)
|
|
11759
|
+
}
|
|
11760
|
+
|
|
11761
|
+
__slots__: List[str] = ['_subscription_id', '_start', '_end', '_result']
|
|
11762
|
+
|
|
11763
|
+
def __init__(self, end: "api_Timestamp", result: "persistent_compute_api_ComputeNodeAppendResponse", start: "api_Timestamp", subscription_id: str) -> None:
|
|
11764
|
+
self._subscription_id = subscription_id
|
|
11765
|
+
self._start = start
|
|
11766
|
+
self._end = end
|
|
11767
|
+
self._result = result
|
|
11768
|
+
|
|
11769
|
+
@builtins.property
|
|
11770
|
+
def subscription_id(self) -> str:
|
|
11771
|
+
return self._subscription_id
|
|
11772
|
+
|
|
11773
|
+
@builtins.property
|
|
11774
|
+
def start(self) -> "api_Timestamp":
|
|
11775
|
+
"""
|
|
11776
|
+
The start of the time range that the append result covers
|
|
11777
|
+
"""
|
|
11778
|
+
return self._start
|
|
11779
|
+
|
|
11780
|
+
@builtins.property
|
|
11781
|
+
def end(self) -> "api_Timestamp":
|
|
11782
|
+
"""
|
|
11783
|
+
The end of the time range that the append result covers
|
|
11784
|
+
"""
|
|
11785
|
+
return self._end
|
|
11786
|
+
|
|
11787
|
+
@builtins.property
|
|
11788
|
+
def result(self) -> "persistent_compute_api_ComputeNodeAppendResponse":
|
|
11789
|
+
return self._result
|
|
11790
|
+
|
|
11791
|
+
|
|
11792
|
+
persistent_compute_api_AppendResult.__name__ = "AppendResult"
|
|
11793
|
+
persistent_compute_api_AppendResult.__qualname__ = "AppendResult"
|
|
11794
|
+
persistent_compute_api_AppendResult.__module__ = "nominal_api.persistent_compute_api"
|
|
11795
|
+
|
|
11796
|
+
|
|
11238
11797
|
class persistent_compute_api_ClientMessage(ConjureUnionType):
|
|
11239
11798
|
_subscribe: Optional[Dict[str, "persistent_compute_api_StreamingComputeNodeSubscription"]] = None
|
|
11240
11799
|
_unsubscribe: Optional[List[str]] = None
|
|
@@ -11361,6 +11920,217 @@ persistent_compute_api_ClientMessageVisitor.__qualname__ = "ClientMessageVisitor
|
|
|
11361
11920
|
persistent_compute_api_ClientMessageVisitor.__module__ = "nominal_api.persistent_compute_api"
|
|
11362
11921
|
|
|
11363
11922
|
|
|
11923
|
+
class persistent_compute_api_ComputeNodeAppendResponse(ConjureUnionType):
|
|
11924
|
+
"""These cover the subset of the union type in `ComputeWithUnitsResponse` for which we support append result.
|
|
11925
|
+
Append results will have the same subtype as the previous full result for the same `SubscriptionId`.
|
|
11926
|
+
If the subtype were to change (e.g., we start doing bucketing because data frequency increased) we will send
|
|
11927
|
+
a new full result with that new type. The results will also have the same units as the previous `FullResult`.
|
|
11928
|
+
Notably, we currently don't support appends for bucketed results as merging buckets is not trivial, especially
|
|
11929
|
+
when accounting for out-of-order points."""
|
|
11930
|
+
_range: Optional[List["scout_compute_api_Range"]] = None
|
|
11931
|
+
_enum_point: Optional[Optional["scout_compute_api_EnumPoint"]] = None
|
|
11932
|
+
_numeric_point: Optional[Optional["scout_compute_api_NumericPoint"]] = None
|
|
11933
|
+
_log_point: Optional[Optional["scout_compute_api_LogPoint"]] = None
|
|
11934
|
+
_range_value: Optional[Optional["scout_compute_api_Range"]] = None
|
|
11935
|
+
_numeric: Optional["scout_compute_api_NumericPlot"] = None
|
|
11936
|
+
_enum: Optional["scout_compute_api_EnumPlot"] = None
|
|
11937
|
+
|
|
11938
|
+
@builtins.classmethod
|
|
11939
|
+
def _options(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11940
|
+
return {
|
|
11941
|
+
'range': ConjureFieldDefinition('range', List[scout_compute_api_Range]),
|
|
11942
|
+
'enum_point': ConjureFieldDefinition('enumPoint', OptionalTypeWrapper[scout_compute_api_EnumPoint]),
|
|
11943
|
+
'numeric_point': ConjureFieldDefinition('numericPoint', OptionalTypeWrapper[scout_compute_api_NumericPoint]),
|
|
11944
|
+
'log_point': ConjureFieldDefinition('logPoint', OptionalTypeWrapper[scout_compute_api_LogPoint]),
|
|
11945
|
+
'range_value': ConjureFieldDefinition('rangeValue', OptionalTypeWrapper[scout_compute_api_Range]),
|
|
11946
|
+
'numeric': ConjureFieldDefinition('numeric', scout_compute_api_NumericPlot),
|
|
11947
|
+
'enum': ConjureFieldDefinition('enum', scout_compute_api_EnumPlot)
|
|
11948
|
+
}
|
|
11949
|
+
|
|
11950
|
+
def __init__(
|
|
11951
|
+
self,
|
|
11952
|
+
range: Optional[List["scout_compute_api_Range"]] = None,
|
|
11953
|
+
enum_point: Optional[Optional["scout_compute_api_EnumPoint"]] = None,
|
|
11954
|
+
numeric_point: Optional[Optional["scout_compute_api_NumericPoint"]] = None,
|
|
11955
|
+
log_point: Optional[Optional["scout_compute_api_LogPoint"]] = None,
|
|
11956
|
+
range_value: Optional[Optional["scout_compute_api_Range"]] = None,
|
|
11957
|
+
numeric: Optional["scout_compute_api_NumericPlot"] = None,
|
|
11958
|
+
enum: Optional["scout_compute_api_EnumPlot"] = None,
|
|
11959
|
+
type_of_union: Optional[str] = None
|
|
11960
|
+
) -> None:
|
|
11961
|
+
if type_of_union is None:
|
|
11962
|
+
if (range is not None) + (enum_point is not None) + (numeric_point is not None) + (log_point is not None) + (range_value is not None) + (numeric is not None) + (enum is not None) != 1:
|
|
11963
|
+
raise ValueError('a union must contain a single member')
|
|
11964
|
+
|
|
11965
|
+
if range is not None:
|
|
11966
|
+
self._range = range
|
|
11967
|
+
self._type = 'range'
|
|
11968
|
+
if enum_point is not None:
|
|
11969
|
+
self._enum_point = enum_point
|
|
11970
|
+
self._type = 'enumPoint'
|
|
11971
|
+
if numeric_point is not None:
|
|
11972
|
+
self._numeric_point = numeric_point
|
|
11973
|
+
self._type = 'numericPoint'
|
|
11974
|
+
if log_point is not None:
|
|
11975
|
+
self._log_point = log_point
|
|
11976
|
+
self._type = 'logPoint'
|
|
11977
|
+
if range_value is not None:
|
|
11978
|
+
self._range_value = range_value
|
|
11979
|
+
self._type = 'rangeValue'
|
|
11980
|
+
if numeric is not None:
|
|
11981
|
+
self._numeric = numeric
|
|
11982
|
+
self._type = 'numeric'
|
|
11983
|
+
if enum is not None:
|
|
11984
|
+
self._enum = enum
|
|
11985
|
+
self._type = 'enum'
|
|
11986
|
+
|
|
11987
|
+
elif type_of_union == 'range':
|
|
11988
|
+
if range is None:
|
|
11989
|
+
raise ValueError('a union value must not be None')
|
|
11990
|
+
self._range = range
|
|
11991
|
+
self._type = 'range'
|
|
11992
|
+
elif type_of_union == 'enumPoint':
|
|
11993
|
+
if enum_point is None:
|
|
11994
|
+
raise ValueError('a union value must not be None')
|
|
11995
|
+
self._enum_point = enum_point
|
|
11996
|
+
self._type = 'enumPoint'
|
|
11997
|
+
elif type_of_union == 'numericPoint':
|
|
11998
|
+
if numeric_point is None:
|
|
11999
|
+
raise ValueError('a union value must not be None')
|
|
12000
|
+
self._numeric_point = numeric_point
|
|
12001
|
+
self._type = 'numericPoint'
|
|
12002
|
+
elif type_of_union == 'logPoint':
|
|
12003
|
+
if log_point is None:
|
|
12004
|
+
raise ValueError('a union value must not be None')
|
|
12005
|
+
self._log_point = log_point
|
|
12006
|
+
self._type = 'logPoint'
|
|
12007
|
+
elif type_of_union == 'rangeValue':
|
|
12008
|
+
if range_value is None:
|
|
12009
|
+
raise ValueError('a union value must not be None')
|
|
12010
|
+
self._range_value = range_value
|
|
12011
|
+
self._type = 'rangeValue'
|
|
12012
|
+
elif type_of_union == 'numeric':
|
|
12013
|
+
if numeric is None:
|
|
12014
|
+
raise ValueError('a union value must not be None')
|
|
12015
|
+
self._numeric = numeric
|
|
12016
|
+
self._type = 'numeric'
|
|
12017
|
+
elif type_of_union == 'enum':
|
|
12018
|
+
if enum is None:
|
|
12019
|
+
raise ValueError('a union value must not be None')
|
|
12020
|
+
self._enum = enum
|
|
12021
|
+
self._type = 'enum'
|
|
12022
|
+
|
|
12023
|
+
@builtins.property
|
|
12024
|
+
def range(self) -> Optional[List["scout_compute_api_Range"]]:
|
|
12025
|
+
"""
|
|
12026
|
+
Merging can be done via dropping any old ranges (possibly truncating the last one) and adding these new
|
|
12027
|
+
ranges, possibly merging them if they overlap or are adjacent.
|
|
12028
|
+
"""
|
|
12029
|
+
return self._range
|
|
12030
|
+
|
|
12031
|
+
@builtins.property
|
|
12032
|
+
def enum_point(self) -> Optional[Optional["scout_compute_api_EnumPoint"]]:
|
|
12033
|
+
"""
|
|
12034
|
+
Merging can be done by keeping track of the applicable point present within the current window
|
|
12035
|
+
"""
|
|
12036
|
+
return self._enum_point
|
|
12037
|
+
|
|
12038
|
+
@builtins.property
|
|
12039
|
+
def numeric_point(self) -> Optional[Optional["scout_compute_api_NumericPoint"]]:
|
|
12040
|
+
"""
|
|
12041
|
+
Merging can be done by keeping track of the applicable point present within the current window
|
|
12042
|
+
"""
|
|
12043
|
+
return self._numeric_point
|
|
12044
|
+
|
|
12045
|
+
@builtins.property
|
|
12046
|
+
def log_point(self) -> Optional[Optional["scout_compute_api_LogPoint"]]:
|
|
12047
|
+
"""
|
|
12048
|
+
Merging can be done by keeping track of the applicable point present within the current window
|
|
12049
|
+
"""
|
|
12050
|
+
return self._log_point
|
|
12051
|
+
|
|
12052
|
+
@builtins.property
|
|
12053
|
+
def range_value(self) -> Optional[Optional["scout_compute_api_Range"]]:
|
|
12054
|
+
"""
|
|
12055
|
+
Merging can be done by keeping track of the applicable range present within the current window, possibly
|
|
12056
|
+
merging ranges if they are overlap or are adjacent
|
|
12057
|
+
"""
|
|
12058
|
+
return self._range_value
|
|
12059
|
+
|
|
12060
|
+
@builtins.property
|
|
12061
|
+
def numeric(self) -> Optional["scout_compute_api_NumericPlot"]:
|
|
12062
|
+
"""
|
|
12063
|
+
Merging be be done by dropping any old points and adding the new ones, accounting for overlaps
|
|
12064
|
+
"""
|
|
12065
|
+
return self._numeric
|
|
12066
|
+
|
|
12067
|
+
@builtins.property
|
|
12068
|
+
def enum(self) -> Optional["scout_compute_api_EnumPlot"]:
|
|
12069
|
+
"""
|
|
12070
|
+
Merging be be done by dropping any old points and adding the new ones, accounting for overlaps
|
|
12071
|
+
"""
|
|
12072
|
+
return self._enum
|
|
12073
|
+
|
|
12074
|
+
def accept(self, visitor) -> Any:
|
|
12075
|
+
if not isinstance(visitor, persistent_compute_api_ComputeNodeAppendResponseVisitor):
|
|
12076
|
+
raise ValueError('{} is not an instance of persistent_compute_api_ComputeNodeAppendResponseVisitor'.format(visitor.__class__.__name__))
|
|
12077
|
+
if self._type == 'range' and self.range is not None:
|
|
12078
|
+
return visitor._range(self.range)
|
|
12079
|
+
if self._type == 'enumPoint' and self.enum_point is not None:
|
|
12080
|
+
return visitor._enum_point(self.enum_point)
|
|
12081
|
+
if self._type == 'numericPoint' and self.numeric_point is not None:
|
|
12082
|
+
return visitor._numeric_point(self.numeric_point)
|
|
12083
|
+
if self._type == 'logPoint' and self.log_point is not None:
|
|
12084
|
+
return visitor._log_point(self.log_point)
|
|
12085
|
+
if self._type == 'rangeValue' and self.range_value is not None:
|
|
12086
|
+
return visitor._range_value(self.range_value)
|
|
12087
|
+
if self._type == 'numeric' and self.numeric is not None:
|
|
12088
|
+
return visitor._numeric(self.numeric)
|
|
12089
|
+
if self._type == 'enum' and self.enum is not None:
|
|
12090
|
+
return visitor._enum(self.enum)
|
|
12091
|
+
|
|
12092
|
+
|
|
12093
|
+
persistent_compute_api_ComputeNodeAppendResponse.__name__ = "ComputeNodeAppendResponse"
|
|
12094
|
+
persistent_compute_api_ComputeNodeAppendResponse.__qualname__ = "ComputeNodeAppendResponse"
|
|
12095
|
+
persistent_compute_api_ComputeNodeAppendResponse.__module__ = "nominal_api.persistent_compute_api"
|
|
12096
|
+
|
|
12097
|
+
|
|
12098
|
+
class persistent_compute_api_ComputeNodeAppendResponseVisitor:
|
|
12099
|
+
|
|
12100
|
+
@abstractmethod
|
|
12101
|
+
def _range(self, range: List["scout_compute_api_Range"]) -> Any:
|
|
12102
|
+
pass
|
|
12103
|
+
|
|
12104
|
+
@abstractmethod
|
|
12105
|
+
def _enum_point(self, enum_point: Optional["scout_compute_api_EnumPoint"]) -> Any:
|
|
12106
|
+
pass
|
|
12107
|
+
|
|
12108
|
+
@abstractmethod
|
|
12109
|
+
def _numeric_point(self, numeric_point: Optional["scout_compute_api_NumericPoint"]) -> Any:
|
|
12110
|
+
pass
|
|
12111
|
+
|
|
12112
|
+
@abstractmethod
|
|
12113
|
+
def _log_point(self, log_point: Optional["scout_compute_api_LogPoint"]) -> Any:
|
|
12114
|
+
pass
|
|
12115
|
+
|
|
12116
|
+
@abstractmethod
|
|
12117
|
+
def _range_value(self, range_value: Optional["scout_compute_api_Range"]) -> Any:
|
|
12118
|
+
pass
|
|
12119
|
+
|
|
12120
|
+
@abstractmethod
|
|
12121
|
+
def _numeric(self, numeric: "scout_compute_api_NumericPlot") -> Any:
|
|
12122
|
+
pass
|
|
12123
|
+
|
|
12124
|
+
@abstractmethod
|
|
12125
|
+
def _enum(self, enum: "scout_compute_api_EnumPlot") -> Any:
|
|
12126
|
+
pass
|
|
12127
|
+
|
|
12128
|
+
|
|
12129
|
+
persistent_compute_api_ComputeNodeAppendResponseVisitor.__name__ = "ComputeNodeAppendResponseVisitor"
|
|
12130
|
+
persistent_compute_api_ComputeNodeAppendResponseVisitor.__qualname__ = "ComputeNodeAppendResponseVisitor"
|
|
12131
|
+
persistent_compute_api_ComputeNodeAppendResponseVisitor.__module__ = "nominal_api.persistent_compute_api"
|
|
12132
|
+
|
|
12133
|
+
|
|
11364
12134
|
class persistent_compute_api_FullResult(ConjureBeanType):
|
|
11365
12135
|
|
|
11366
12136
|
@builtins.classmethod
|
|
@@ -11677,13 +12447,15 @@ class persistent_compute_api_SubscriptionOptions(ConjureBeanType):
|
|
|
11677
12447
|
@builtins.classmethod
|
|
11678
12448
|
def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11679
12449
|
return {
|
|
11680
|
-
'min_delay': ConjureFieldDefinition('minDelay', persistent_compute_api_Milliseconds)
|
|
12450
|
+
'min_delay': ConjureFieldDefinition('minDelay', persistent_compute_api_Milliseconds),
|
|
12451
|
+
'allow_appends': ConjureFieldDefinition('allowAppends', OptionalTypeWrapper[bool])
|
|
11681
12452
|
}
|
|
11682
12453
|
|
|
11683
|
-
__slots__: List[str] = ['_min_delay']
|
|
12454
|
+
__slots__: List[str] = ['_min_delay', '_allow_appends']
|
|
11684
12455
|
|
|
11685
|
-
def __init__(self, min_delay: int) -> None:
|
|
12456
|
+
def __init__(self, min_delay: int, allow_appends: Optional[bool] = None) -> None:
|
|
11686
12457
|
self._min_delay = min_delay
|
|
12458
|
+
self._allow_appends = allow_appends
|
|
11687
12459
|
|
|
11688
12460
|
@builtins.property
|
|
11689
12461
|
def min_delay(self) -> int:
|
|
@@ -11692,6 +12464,19 @@ class persistent_compute_api_SubscriptionOptions(ConjureBeanType):
|
|
|
11692
12464
|
"""
|
|
11693
12465
|
return self._min_delay
|
|
11694
12466
|
|
|
12467
|
+
@builtins.property
|
|
12468
|
+
def allow_appends(self) -> Optional[bool]:
|
|
12469
|
+
"""
|
|
12470
|
+
Can be set to `false` by the client to indicate that it doesn't support appends for this subscription
|
|
12471
|
+
and always wants to receive full results. Defaults to `false` if not set.
|
|
12472
|
+
The expectation is that clients should implement support for appends for any of the results covered in
|
|
12473
|
+
`ComputeNodeAppendResponse` and set this to `true` as quickly as possible. However, in order to support
|
|
12474
|
+
adding new sub-types to `ComputeNodeAppendResponse` without breaking clients that haven't upgraded yet
|
|
12475
|
+
and haven't yet added support for them, we default this to `false` and make clients opt-in as soon as they
|
|
12476
|
+
implement support.
|
|
12477
|
+
"""
|
|
12478
|
+
return self._allow_appends
|
|
12479
|
+
|
|
11695
12480
|
|
|
11696
12481
|
persistent_compute_api_SubscriptionOptions.__name__ = "SubscriptionOptions"
|
|
11697
12482
|
persistent_compute_api_SubscriptionOptions.__qualname__ = "SubscriptionOptions"
|
|
@@ -11700,41 +12485,58 @@ persistent_compute_api_SubscriptionOptions.__module__ = "nominal_api.persistent_
|
|
|
11700
12485
|
|
|
11701
12486
|
class persistent_compute_api_SubscriptionUpdate(ConjureUnionType):
|
|
11702
12487
|
_full: Optional["persistent_compute_api_FullResult"] = None
|
|
12488
|
+
_append: Optional["persistent_compute_api_AppendResult"] = None
|
|
11703
12489
|
|
|
11704
12490
|
@builtins.classmethod
|
|
11705
12491
|
def _options(cls) -> Dict[str, ConjureFieldDefinition]:
|
|
11706
12492
|
return {
|
|
11707
|
-
'full': ConjureFieldDefinition('full', persistent_compute_api_FullResult)
|
|
12493
|
+
'full': ConjureFieldDefinition('full', persistent_compute_api_FullResult),
|
|
12494
|
+
'append': ConjureFieldDefinition('append', persistent_compute_api_AppendResult)
|
|
11708
12495
|
}
|
|
11709
12496
|
|
|
11710
12497
|
def __init__(
|
|
11711
12498
|
self,
|
|
11712
12499
|
full: Optional["persistent_compute_api_FullResult"] = None,
|
|
12500
|
+
append: Optional["persistent_compute_api_AppendResult"] = None,
|
|
11713
12501
|
type_of_union: Optional[str] = None
|
|
11714
12502
|
) -> None:
|
|
11715
12503
|
if type_of_union is None:
|
|
11716
|
-
if (full is not None) != 1:
|
|
12504
|
+
if (full is not None) + (append is not None) != 1:
|
|
11717
12505
|
raise ValueError('a union must contain a single member')
|
|
11718
12506
|
|
|
11719
12507
|
if full is not None:
|
|
11720
12508
|
self._full = full
|
|
11721
12509
|
self._type = 'full'
|
|
12510
|
+
if append is not None:
|
|
12511
|
+
self._append = append
|
|
12512
|
+
self._type = 'append'
|
|
11722
12513
|
|
|
11723
12514
|
elif type_of_union == 'full':
|
|
11724
12515
|
if full is None:
|
|
11725
12516
|
raise ValueError('a union value must not be None')
|
|
11726
12517
|
self._full = full
|
|
11727
12518
|
self._type = 'full'
|
|
12519
|
+
elif type_of_union == 'append':
|
|
12520
|
+
if append is None:
|
|
12521
|
+
raise ValueError('a union value must not be None')
|
|
12522
|
+
self._append = append
|
|
12523
|
+
self._type = 'append'
|
|
11728
12524
|
|
|
11729
12525
|
@builtins.property
|
|
11730
12526
|
def full(self) -> Optional["persistent_compute_api_FullResult"]:
|
|
11731
12527
|
return self._full
|
|
11732
12528
|
|
|
12529
|
+
@builtins.property
|
|
12530
|
+
def append(self) -> Optional["persistent_compute_api_AppendResult"]:
|
|
12531
|
+
return self._append
|
|
12532
|
+
|
|
11733
12533
|
def accept(self, visitor) -> Any:
|
|
11734
12534
|
if not isinstance(visitor, persistent_compute_api_SubscriptionUpdateVisitor):
|
|
11735
12535
|
raise ValueError('{} is not an instance of persistent_compute_api_SubscriptionUpdateVisitor'.format(visitor.__class__.__name__))
|
|
11736
12536
|
if self._type == 'full' and self.full is not None:
|
|
11737
12537
|
return visitor._full(self.full)
|
|
12538
|
+
if self._type == 'append' and self.append is not None:
|
|
12539
|
+
return visitor._append(self.append)
|
|
11738
12540
|
|
|
11739
12541
|
|
|
11740
12542
|
persistent_compute_api_SubscriptionUpdate.__name__ = "SubscriptionUpdate"
|
|
@@ -11748,6 +12550,10 @@ class persistent_compute_api_SubscriptionUpdateVisitor:
|
|
|
11748
12550
|
def _full(self, full: "persistent_compute_api_FullResult") -> Any:
|
|
11749
12551
|
pass
|
|
11750
12552
|
|
|
12553
|
+
@abstractmethod
|
|
12554
|
+
def _append(self, append: "persistent_compute_api_AppendResult") -> Any:
|
|
12555
|
+
pass
|
|
12556
|
+
|
|
11751
12557
|
|
|
11752
12558
|
persistent_compute_api_SubscriptionUpdateVisitor.__name__ = "SubscriptionUpdateVisitor"
|
|
11753
12559
|
persistent_compute_api_SubscriptionUpdateVisitor.__qualname__ = "SubscriptionUpdateVisitor"
|
|
@@ -76237,6 +77043,8 @@ upload_api_UploadService.__qualname__ = "UploadService"
|
|
|
76237
77043
|
upload_api_UploadService.__module__ = "nominal_api.upload_api"
|
|
76238
77044
|
|
|
76239
77045
|
|
|
77046
|
+
api_ColumnName = str
|
|
77047
|
+
|
|
76240
77048
|
api_Label = str
|
|
76241
77049
|
|
|
76242
77050
|
timeseries_archetype_api_SeriesArchetypeName = str
|
|
@@ -76267,6 +77075,8 @@ api_rids_VideoFileRid = str
|
|
|
76267
77075
|
|
|
76268
77076
|
api_Unit = str
|
|
76269
77077
|
|
|
77078
|
+
ingest_workflow_api_McapTopicName = str
|
|
77079
|
+
|
|
76270
77080
|
scout_versioning_api_BranchRid = str
|
|
76271
77081
|
|
|
76272
77082
|
datasource_DatasetFileId = str
|
|
@@ -76433,8 +77243,6 @@ scout_datareview_api_AutomaticCheckEvaluationRid = str
|
|
|
76433
77243
|
|
|
76434
77244
|
scout_compute_api_ErrorType = str
|
|
76435
77245
|
|
|
76436
|
-
ingest_api_ExtractorInputName = str
|
|
76437
|
-
|
|
76438
77246
|
comments_api_ResourceRid = str
|
|
76439
77247
|
|
|
76440
77248
|
scout_rids_api_FunctionLineageRid = str
|
nominal_api/api/__init__.py
CHANGED
|
@@ -29,7 +29,6 @@ from .._impl import (
|
|
|
29
29
|
ingest_api_EpochTimestamp as EpochTimestamp,
|
|
30
30
|
ingest_api_ExistingDatasetIngestDestination as ExistingDatasetIngestDestination,
|
|
31
31
|
ingest_api_ExistingVideoIngestDestination as ExistingVideoIngestDestination,
|
|
32
|
-
ingest_api_ExtractorInputName as ExtractorInputName,
|
|
33
32
|
ingest_api_FileExtractionInput as FileExtractionInput,
|
|
34
33
|
ingest_api_GcsIngestSource as GcsIngestSource,
|
|
35
34
|
ingest_api_GetContainerizedExtractorsRequest as GetContainerizedExtractorsRequest,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
# coding=utf-8
|
|
2
|
+
from .._impl import (
|
|
3
|
+
ingest_workflow_api_Empty as Empty,
|
|
4
|
+
ingest_workflow_api_EnsureExtractorJobCreatedRequest as EnsureExtractorJobCreatedRequest,
|
|
5
|
+
ingest_workflow_api_EnsureExtractorJobCreatedResponse as EnsureExtractorJobCreatedResponse,
|
|
6
|
+
ingest_workflow_api_EnsureWorkspaceNamespaceCreatedRequest as EnsureWorkspaceNamespaceCreatedRequest,
|
|
7
|
+
ingest_workflow_api_EnsureWorkspaceServiceAccountCreatedRequest as EnsureWorkspaceServiceAccountCreatedRequest,
|
|
8
|
+
ingest_workflow_api_ExtractorJobState as ExtractorJobState,
|
|
9
|
+
ingest_workflow_api_GetExtractorJobStateRequest as GetExtractorJobStateRequest,
|
|
10
|
+
ingest_workflow_api_GetExtractorJobStateResponse as GetExtractorJobStateResponse,
|
|
11
|
+
ingest_workflow_api_IngestDataflashRequest as IngestDataflashRequest,
|
|
12
|
+
ingest_workflow_api_IngestDataflashResponse as IngestDataflashResponse,
|
|
13
|
+
ingest_workflow_api_IngestMcapProtobufRequest as IngestMcapProtobufRequest,
|
|
14
|
+
ingest_workflow_api_IngestMcapProtobufResponse as IngestMcapProtobufResponse,
|
|
15
|
+
ingest_workflow_api_McapProtoChannels as McapProtoChannels,
|
|
16
|
+
ingest_workflow_api_McapProtoChannelsVisitor as McapProtoChannelsVisitor,
|
|
17
|
+
ingest_workflow_api_McapTopicName as McapTopicName,
|
|
18
|
+
ingest_workflow_api_ObjectLocator as ObjectLocator,
|
|
19
|
+
ingest_workflow_api_TimeUnitSeconds as TimeUnitSeconds,
|
|
20
|
+
)
|
|
21
|
+
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
# coding=utf-8
|
|
2
2
|
from .._impl import (
|
|
3
|
+
persistent_compute_api_AppendResult as AppendResult,
|
|
3
4
|
persistent_compute_api_ClientMessage as ClientMessage,
|
|
4
5
|
persistent_compute_api_ClientMessageVisitor as ClientMessageVisitor,
|
|
6
|
+
persistent_compute_api_ComputeNodeAppendResponse as ComputeNodeAppendResponse,
|
|
7
|
+
persistent_compute_api_ComputeNodeAppendResponseVisitor as ComputeNodeAppendResponseVisitor,
|
|
5
8
|
persistent_compute_api_FullResult as FullResult,
|
|
6
9
|
persistent_compute_api_Milliseconds as Milliseconds,
|
|
7
10
|
persistent_compute_api_Ping as Ping,
|
|
@@ -1,7 +1,7 @@
|
|
|
1
|
-
nominal_api/__init__.py,sha256=
|
|
2
|
-
nominal_api/_impl.py,sha256=
|
|
1
|
+
nominal_api/__init__.py,sha256=uKJS2sUgc4qxPSumiLLjXotwfMdeT2_dUyyTXEUsJPU,1995
|
|
2
|
+
nominal_api/_impl.py,sha256=9FkFuSqGbDLVdd22akS3dFrbi6cubdwoNLO_ogpZeDo,2992871
|
|
3
3
|
nominal_api/py.typed,sha256=eoZ6GfifbqhMLNzjlqRDVil-yyBkOmVN9ujSgJWNBlY,15
|
|
4
|
-
nominal_api/api/__init__.py,sha256=
|
|
4
|
+
nominal_api/api/__init__.py,sha256=f6fvZACzXUPSIN-H855AjAYLwmisy_VrHy4Ie2HXiNE,1270
|
|
5
5
|
nominal_api/api_ids/__init__.py,sha256=CAtt44XgNZEEUDv-BbEbYtuxQ8y1wqSZU-STjBYdZv8,80
|
|
6
6
|
nominal_api/api_rids/__init__.py,sha256=qerno2fgGWLfokoSdCarKSXg2jDjTJpz45Wv8PnqI6Q,500
|
|
7
7
|
nominal_api/attachments_api/__init__.py,sha256=eQBE8xVTFDaTItCZv-WJSZqSStpgdai192n23pmVeeQ,634
|
|
@@ -15,8 +15,9 @@ nominal_api/datasource_logset/__init__.py,sha256=H3fNxqyYC490MwvdWbt5BwhgWQUev7u
|
|
|
15
15
|
nominal_api/datasource_logset_api/__init__.py,sha256=JyjO1tQmG-HZ7kYMi8lSfeaaYddBZdCMIyqc0IUJfWo,1006
|
|
16
16
|
nominal_api/datasource_pagination_api/__init__.py,sha256=3GO8TAUavOe6dUEitOhje74aSZHjTKVI5N1MNuct1lI,212
|
|
17
17
|
nominal_api/event/__init__.py,sha256=YUhvDFXtyAn08WNd7Xwnybz3PtflvtTcIOaunRS5-1I,836
|
|
18
|
-
nominal_api/ingest_api/__init__.py,sha256=
|
|
19
|
-
nominal_api/
|
|
18
|
+
nominal_api/ingest_api/__init__.py,sha256=jpxmJO0dzcCEIv5iM7xvziQEs1x92XLTm-LmbHxtii4,6953
|
|
19
|
+
nominal_api/ingest_workflow_api/__init__.py,sha256=1oJO78YCmXFkfN5LY7x0gOJwtijrOcXD8Sl0GJCOyrk,1352
|
|
20
|
+
nominal_api/persistent_compute_api/__init__.py,sha256=ovp4TsiVPGgCFjHVo4L9s0xSr0t7lV6GRSn6-8m3wBE,1268
|
|
20
21
|
nominal_api/scout/__init__.py,sha256=ip3XK_9jJKAoFiCifUVMTpDMiUE4mWIdGzMDu7LASus,324
|
|
21
22
|
nominal_api/scout_api/__init__.py,sha256=biO4DEygbGcLwM6Dg2VuvMra3A5EW6NBjukbIemXoG8,178
|
|
22
23
|
nominal_api/scout_asset_api/__init__.py,sha256=Ph-KlW-ki0JRejYQZDvbZ2jRzNAttVBux27lcEj7--Q,1910
|
|
@@ -72,7 +73,7 @@ nominal_api/timeseries_logicalseries_api/__init__.py,sha256=Q9iZHurmyDsJIFbUg-Eb
|
|
|
72
73
|
nominal_api/timeseries_seriescache/__init__.py,sha256=tFCkNuyrVMgtj-HIl1pOYPJHaL2VikI4C_x97bX_Lcs,109
|
|
73
74
|
nominal_api/timeseries_seriescache_api/__init__.py,sha256=U9EhlqdF9qzD1O9al0vcvcdgS_C5lq-lN3Kmr0K3g84,1191
|
|
74
75
|
nominal_api/upload_api/__init__.py,sha256=ZMudWMSqCrNozohbHaJKuxJnT9Edepe7nxxXMz_pT9k,87
|
|
75
|
-
nominal_api-0.
|
|
76
|
-
nominal_api-0.
|
|
77
|
-
nominal_api-0.
|
|
78
|
-
nominal_api-0.
|
|
76
|
+
nominal_api-0.622.1.dist-info/METADATA,sha256=RvEMFrZtb6YoXxuO9u49y3wJ2dHYizk6MkkIXzO2Zws,199
|
|
77
|
+
nominal_api-0.622.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
78
|
+
nominal_api-0.622.1.dist-info/top_level.txt,sha256=gI1ZdNJbuHcJZeKtCzzBXsEtpU1GX6XJKs6ksi_gCRA,12
|
|
79
|
+
nominal_api-0.622.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|