nominal-api 0.502.0__tar.gz → 0.504.0__tar.gz

Sign up to get free protection for your applications and to get access to all the features.
Files changed (74) hide show
  1. {nominal_api-0.502.0 → nominal_api-0.504.0}/PKG-INFO +1 -1
  2. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/__init__.py +1 -1
  3. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/_impl.py +358 -21
  4. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_datasource_connection_api/__init__.py +4 -0
  5. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/timeseries_archetype_api/__init__.py +1 -0
  6. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/timeseries_logicalseries_api/__init__.py +3 -0
  7. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api.egg-info/PKG-INFO +1 -1
  8. {nominal_api-0.502.0 → nominal_api-0.504.0}/setup.py +1 -1
  9. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/api/__init__.py +0 -0
  10. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/api_rids/__init__.py +0 -0
  11. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/attachments_api/__init__.py +0 -0
  12. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/authentication/__init__.py +0 -0
  13. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/authentication_api/__init__.py +0 -0
  14. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/authorization/__init__.py +0 -0
  15. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/comments_api/__init__.py +0 -0
  16. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/datasource/__init__.py +0 -0
  17. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/datasource_api/__init__.py +0 -0
  18. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/datasource_logset/__init__.py +0 -0
  19. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/datasource_logset_api/__init__.py +0 -0
  20. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/datasource_pagination_api/__init__.py +0 -0
  21. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/event/__init__.py +0 -0
  22. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/ingest_api/__init__.py +0 -0
  23. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/py.typed +0 -0
  24. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout/__init__.py +0 -0
  25. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_api/__init__.py +0 -0
  26. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_asset_api/__init__.py +0 -0
  27. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_assets/__init__.py +0 -0
  28. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_backend/__init__.py +0 -0
  29. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_catalog/__init__.py +0 -0
  30. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_channelvariables_api/__init__.py +0 -0
  31. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_chart_api/__init__.py +0 -0
  32. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_chartdefinition_api/__init__.py +0 -0
  33. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_checklistexecution_api/__init__.py +0 -0
  34. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_checks_api/__init__.py +0 -0
  35. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_comparisonnotebook_api/__init__.py +0 -0
  36. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_comparisonrun_api/__init__.py +0 -0
  37. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_compute_api/__init__.py +0 -0
  38. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_compute_api_deprecated/__init__.py +0 -0
  39. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_compute_representation_api/__init__.py +0 -0
  40. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_compute_resolved_api/__init__.py +0 -0
  41. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_dataexport_api/__init__.py +0 -0
  42. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_datareview_api/__init__.py +0 -0
  43. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_datasource/__init__.py +0 -0
  44. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_datasource_connection/__init__.py +0 -0
  45. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_datasource_connection_api_influx/__init__.py +0 -0
  46. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_integrations_api/__init__.py +0 -0
  47. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_internal_search_api/__init__.py +0 -0
  48. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_jobs_api/__init__.py +0 -0
  49. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_layout_api/__init__.py +0 -0
  50. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_metadata/__init__.py +0 -0
  51. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_notebook_api/__init__.py +0 -0
  52. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_plotting/__init__.py +0 -0
  53. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_rids_api/__init__.py +0 -0
  54. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_run_api/__init__.py +0 -0
  55. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_template_api/__init__.py +0 -0
  56. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_units_api/__init__.py +0 -0
  57. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_versioning_api/__init__.py +0 -0
  58. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_video/__init__.py +0 -0
  59. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_video_api/__init__.py +0 -0
  60. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/scout_workbookcommon_api/__init__.py +0 -0
  61. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/secrets_api/__init__.py +0 -0
  62. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/storage_datasource_api/__init__.py +0 -0
  63. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/storage_series_api/__init__.py +0 -0
  64. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/storage_writer_api/__init__.py +0 -0
  65. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/timeseries_archetype/__init__.py +0 -0
  66. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/timeseries_logicalseries/__init__.py +0 -0
  67. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/timeseries_seriescache/__init__.py +0 -0
  68. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/timeseries_seriescache_api/__init__.py +0 -0
  69. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api/upload_api/__init__.py +0 -0
  70. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api.egg-info/SOURCES.txt +0 -0
  71. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api.egg-info/dependency_links.txt +0 -0
  72. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api.egg-info/requires.txt +0 -0
  73. {nominal_api-0.502.0 → nominal_api-0.504.0}/nominal_api.egg-info/top_level.txt +0 -0
  74. {nominal_api-0.502.0 → nominal_api-0.504.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nominal-api
3
- Version: 0.502.0
3
+ Version: 0.504.0
4
4
  Requires-Python: >=3.8
5
5
  Requires-Dist: requests
6
6
  Requires-Dist: conjure-python-client<3,>=2.8.0
@@ -67,5 +67,5 @@ __all__ = [
67
67
 
68
68
  __conjure_generator_version__ = "4.9.0"
69
69
 
70
- __version__ = "0.502.0"
70
+ __version__ = "0.504.0"
71
71
 
@@ -9343,6 +9343,36 @@ a RunNotFound error will be thrown.
9343
9343
  _decoder = ConjureDecoder()
9344
9344
  return _decoder.decode(_response.json(), List[scout_notebook_api_Notebook], self._return_none_for_unknown_union_types)
9345
9345
 
9346
+ def batch_get_metadata(self, auth_header: str, rids: List[str] = None) -> List["scout_notebook_api_NotebookMetadataWithRid"]:
9347
+ rids = rids if rids is not None else []
9348
+
9349
+ _headers: Dict[str, Any] = {
9350
+ 'Accept': 'application/json',
9351
+ 'Content-Type': 'application/json',
9352
+ 'Authorization': auth_header,
9353
+ }
9354
+
9355
+ _params: Dict[str, Any] = {
9356
+ }
9357
+
9358
+ _path_params: Dict[str, Any] = {
9359
+ }
9360
+
9361
+ _json: Any = ConjureEncoder().default(rids)
9362
+
9363
+ _path = '/scout/v2/notebook/batch-get-metadata'
9364
+ _path = _path.format(**_path_params)
9365
+
9366
+ _response: Response = self._request(
9367
+ 'POST',
9368
+ self._uri + _path,
9369
+ params=_params,
9370
+ headers=_headers,
9371
+ json=_json)
9372
+
9373
+ _decoder = ConjureDecoder()
9374
+ return _decoder.decode(_response.json(), List[scout_notebook_api_NotebookMetadataWithRid], self._return_none_for_unknown_union_types)
9375
+
9346
9376
  def update_metadata(self, auth_header: str, request: "scout_notebook_api_UpdateNotebookMetadataRequest", rid: str) -> "scout_notebook_api_NotebookMetadata":
9347
9377
  """
9348
9378
  Updates metadata about a workbook, but not its contents.
@@ -44289,12 +44319,13 @@ class scout_datareview_api_CheckAlertsHistogramRequest(ConjureBeanType):
44289
44319
  'pinned_checklist_refs': ConjureFieldDefinition('pinnedChecklistRefs', OptionalTypeWrapper[List[scout_checks_api_PinnedChecklistRef]]),
44290
44320
  'chart_rids': ConjureFieldDefinition('chartRids', OptionalTypeWrapper[List[scout_rids_api_VersionedVizId]]),
44291
44321
  'notebook_rids': ConjureFieldDefinition('notebookRids', OptionalTypeWrapper[List[scout_rids_api_NotebookRid]]),
44292
- 'show_archived': ConjureFieldDefinition('showArchived', OptionalTypeWrapper[bool])
44322
+ 'show_archived': ConjureFieldDefinition('showArchived', OptionalTypeWrapper[bool]),
44323
+ 'archived_statuses': ConjureFieldDefinition('archivedStatuses', OptionalTypeWrapper[List[scout_rids_api_ArchivedStatus]])
44293
44324
  }
44294
44325
 
44295
- __slots__: List[str] = ['_num_bins', '_search_text', '_distribution_variable', '_sub_group_variable', '_start_time_after', '_start_time_before', '_status', '_check_rids', '_data_review_rids', '_assignee_rids', '_priorities', '_run_rids', '_asset_rids', '_pinned_checklist_refs', '_chart_rids', '_notebook_rids', '_show_archived']
44326
+ __slots__: List[str] = ['_num_bins', '_search_text', '_distribution_variable', '_sub_group_variable', '_start_time_after', '_start_time_before', '_status', '_check_rids', '_data_review_rids', '_assignee_rids', '_priorities', '_run_rids', '_asset_rids', '_pinned_checklist_refs', '_chart_rids', '_notebook_rids', '_show_archived', '_archived_statuses']
44296
44327
 
44297
- def __init__(self, asset_rids: List[str], distribution_variable: "scout_datareview_api_HistogramDistributionVariable", run_rids: List[str], start_time_after: "api_Timestamp", start_time_before: "api_Timestamp", assignee_rids: Optional[List[str]] = None, chart_rids: Optional[List["scout_rids_api_VersionedVizId"]] = None, check_rids: Optional[List[str]] = None, data_review_rids: Optional[List[str]] = None, notebook_rids: Optional[List[str]] = None, num_bins: Optional[int] = None, pinned_checklist_refs: Optional[List["scout_checks_api_PinnedChecklistRef"]] = None, priorities: Optional[List["scout_checks_api_Priority"]] = None, search_text: Optional[str] = None, show_archived: Optional[bool] = None, status: Optional[List["scout_datareview_api_CheckAlertStatus"]] = None, sub_group_variable: Optional["scout_datareview_api_HistogramSubGroupVariable"] = None) -> None:
44328
+ def __init__(self, asset_rids: List[str], distribution_variable: "scout_datareview_api_HistogramDistributionVariable", run_rids: List[str], start_time_after: "api_Timestamp", start_time_before: "api_Timestamp", archived_statuses: Optional[List["scout_rids_api_ArchivedStatus"]] = None, assignee_rids: Optional[List[str]] = None, chart_rids: Optional[List["scout_rids_api_VersionedVizId"]] = None, check_rids: Optional[List[str]] = None, data_review_rids: Optional[List[str]] = None, notebook_rids: Optional[List[str]] = None, num_bins: Optional[int] = None, pinned_checklist_refs: Optional[List["scout_checks_api_PinnedChecklistRef"]] = None, priorities: Optional[List["scout_checks_api_Priority"]] = None, search_text: Optional[str] = None, show_archived: Optional[bool] = None, status: Optional[List["scout_datareview_api_CheckAlertStatus"]] = None, sub_group_variable: Optional["scout_datareview_api_HistogramSubGroupVariable"] = None) -> None:
44298
44329
  self._num_bins = num_bins
44299
44330
  self._search_text = search_text
44300
44331
  self._distribution_variable = distribution_variable
@@ -44312,6 +44343,7 @@ class scout_datareview_api_CheckAlertsHistogramRequest(ConjureBeanType):
44312
44343
  self._chart_rids = chart_rids
44313
44344
  self._notebook_rids = notebook_rids
44314
44345
  self._show_archived = show_archived
44346
+ self._archived_statuses = archived_statuses
44315
44347
 
44316
44348
  @builtins.property
44317
44349
  def num_bins(self) -> Optional[int]:
@@ -44398,10 +44430,19 @@ class scout_datareview_api_CheckAlertsHistogramRequest(ConjureBeanType):
44398
44430
  @builtins.property
44399
44431
  def show_archived(self) -> Optional[bool]:
44400
44432
  """
44401
- If not present, will not show archived data reviews in search results
44433
+ To be deprecated. Use archivedStatuses instead. If not present, will not show archived data reviews
44434
+ in search results.
44402
44435
  """
44403
44436
  return self._show_archived
44404
44437
 
44438
+ @builtins.property
44439
+ def archived_statuses(self) -> Optional[List["scout_rids_api_ArchivedStatus"]]:
44440
+ """
44441
+ Filters search on check alerts based on the archived statuses provided.
44442
+ Default is NOT_ARCHIVED only if none are provided.
44443
+ """
44444
+ return self._archived_statuses
44445
+
44405
44446
 
44406
44447
  scout_datareview_api_CheckAlertsHistogramRequest.__name__ = "CheckAlertsHistogramRequest"
44407
44448
  scout_datareview_api_CheckAlertsHistogramRequest.__qualname__ = "CheckAlertsHistogramRequest"
@@ -45804,18 +45845,20 @@ If commitId is omitted from a ChecklistRef, it will match all commits.
45804
45845
  'checklist_refs': ConjureFieldDefinition('checklistRefs', List[scout_checks_api_ChecklistRef]),
45805
45846
  'next_page_token': ConjureFieldDefinition('nextPageToken', OptionalTypeWrapper[scout_api_Token]),
45806
45847
  'page_size': ConjureFieldDefinition('pageSize', OptionalTypeWrapper[int]),
45807
- 'show_archived': ConjureFieldDefinition('showArchived', OptionalTypeWrapper[bool])
45848
+ 'show_archived': ConjureFieldDefinition('showArchived', OptionalTypeWrapper[bool]),
45849
+ 'archived_statuses': ConjureFieldDefinition('archivedStatuses', OptionalTypeWrapper[List[scout_rids_api_ArchivedStatus]])
45808
45850
  }
45809
45851
 
45810
- __slots__: List[str] = ['_run_rids', '_asset_rids', '_checklist_refs', '_next_page_token', '_page_size', '_show_archived']
45852
+ __slots__: List[str] = ['_run_rids', '_asset_rids', '_checklist_refs', '_next_page_token', '_page_size', '_show_archived', '_archived_statuses']
45811
45853
 
45812
- def __init__(self, asset_rids: List[str], checklist_refs: List["scout_checks_api_ChecklistRef"], run_rids: List[str], next_page_token: Optional[str] = None, page_size: Optional[int] = None, show_archived: Optional[bool] = None) -> None:
45854
+ def __init__(self, asset_rids: List[str], checklist_refs: List["scout_checks_api_ChecklistRef"], run_rids: List[str], archived_statuses: Optional[List["scout_rids_api_ArchivedStatus"]] = None, next_page_token: Optional[str] = None, page_size: Optional[int] = None, show_archived: Optional[bool] = None) -> None:
45813
45855
  self._run_rids = run_rids
45814
45856
  self._asset_rids = asset_rids
45815
45857
  self._checklist_refs = checklist_refs
45816
45858
  self._next_page_token = next_page_token
45817
45859
  self._page_size = page_size
45818
45860
  self._show_archived = show_archived
45861
+ self._archived_statuses = archived_statuses
45819
45862
 
45820
45863
  @builtins.property
45821
45864
  def run_rids(self) -> List[str]:
@@ -45843,10 +45886,19 @@ If commitId is omitted from a ChecklistRef, it will match all commits.
45843
45886
  @builtins.property
45844
45887
  def show_archived(self) -> Optional[bool]:
45845
45888
  """
45846
- Defaults to false if not specified.
45889
+ To be deprecated. Use archivedStatuses instead. Allows for inclusion of archived data reviews
45890
+ in search results alongside non-archived ones. Defaults to false if not specified.
45847
45891
  """
45848
45892
  return self._show_archived
45849
45893
 
45894
+ @builtins.property
45895
+ def archived_statuses(self) -> Optional[List["scout_rids_api_ArchivedStatus"]]:
45896
+ """
45897
+ Filters search on data reviews based on the archived statuses provided.
45898
+ Default is NOT_ARCHIVED only if none are provided.
45899
+ """
45900
+ return self._archived_statuses
45901
+
45850
45902
 
45851
45903
  scout_datareview_api_FindDataReviewsRequest.__name__ = "FindDataReviewsRequest"
45852
45904
  scout_datareview_api_FindDataReviewsRequest.__qualname__ = "FindDataReviewsRequest"
@@ -46200,7 +46252,7 @@ class scout_datareview_api_LinkNotebook(ConjureBeanType):
46200
46252
  @builtins.property
46201
46253
  def strategy(self) -> Optional["scout_datareview_api_LinkNotebookStrategy"]:
46202
46254
  """
46203
- Defines the strategy for reconciling the alert with any linked alerts via the associated notebook. If not
46255
+ Defines the strategy for reconciling the alert with any linked alerts via the associated notebook. If not
46204
46256
  provided, the alert will be linked naively and throw if it leaves a linked notebook in an invalid state.
46205
46257
  """
46206
46258
  return self._strategy
@@ -47082,12 +47134,13 @@ class scout_datareview_api_SearchCheckAlertsRequest(ConjureBeanType):
47082
47134
  'pinned_checklist_refs': ConjureFieldDefinition('pinnedChecklistRefs', OptionalTypeWrapper[List[scout_checks_api_PinnedChecklistRef]]),
47083
47135
  'chart_rids': ConjureFieldDefinition('chartRids', OptionalTypeWrapper[List[scout_rids_api_VersionedVizId]]),
47084
47136
  'notebook_rids': ConjureFieldDefinition('notebookRids', OptionalTypeWrapper[List[scout_rids_api_NotebookRid]]),
47085
- 'show_archived': ConjureFieldDefinition('showArchived', OptionalTypeWrapper[bool])
47137
+ 'show_archived': ConjureFieldDefinition('showArchived', OptionalTypeWrapper[bool]),
47138
+ 'archived_statuses': ConjureFieldDefinition('archivedStatuses', OptionalTypeWrapper[List[scout_rids_api_ArchivedStatus]])
47086
47139
  }
47087
47140
 
47088
- __slots__: List[str] = ['_next_page_token', '_page_size', '_sort_by', '_search_text', '_after', '_before', '_status', '_check_rids', '_data_review_rids', '_assignee_rids', '_priorities', '_run_rids', '_asset_rids', '_pinned_checklist_refs', '_chart_rids', '_notebook_rids', '_show_archived']
47141
+ __slots__: List[str] = ['_next_page_token', '_page_size', '_sort_by', '_search_text', '_after', '_before', '_status', '_check_rids', '_data_review_rids', '_assignee_rids', '_priorities', '_run_rids', '_asset_rids', '_pinned_checklist_refs', '_chart_rids', '_notebook_rids', '_show_archived', '_archived_statuses']
47089
47142
 
47090
- def __init__(self, asset_rids: List[str], run_rids: List[str], after: Optional["api_Timestamp"] = None, assignee_rids: Optional[List[str]] = None, before: Optional["api_Timestamp"] = None, chart_rids: Optional[List["scout_rids_api_VersionedVizId"]] = None, check_rids: Optional[List[str]] = None, data_review_rids: Optional[List[str]] = None, next_page_token: Optional[str] = None, notebook_rids: Optional[List[str]] = None, page_size: Optional[int] = None, pinned_checklist_refs: Optional[List["scout_checks_api_PinnedChecklistRef"]] = None, priorities: Optional[List["scout_checks_api_Priority"]] = None, search_text: Optional[str] = None, show_archived: Optional[bool] = None, sort_by: Optional["scout_datareview_api_SearchCheckAlertsSortOptions"] = None, status: Optional[List["scout_datareview_api_CheckAlertStatus"]] = None) -> None:
47143
+ def __init__(self, asset_rids: List[str], run_rids: List[str], after: Optional["api_Timestamp"] = None, archived_statuses: Optional[List["scout_rids_api_ArchivedStatus"]] = None, assignee_rids: Optional[List[str]] = None, before: Optional["api_Timestamp"] = None, chart_rids: Optional[List["scout_rids_api_VersionedVizId"]] = None, check_rids: Optional[List[str]] = None, data_review_rids: Optional[List[str]] = None, next_page_token: Optional[str] = None, notebook_rids: Optional[List[str]] = None, page_size: Optional[int] = None, pinned_checklist_refs: Optional[List["scout_checks_api_PinnedChecklistRef"]] = None, priorities: Optional[List["scout_checks_api_Priority"]] = None, search_text: Optional[str] = None, show_archived: Optional[bool] = None, sort_by: Optional["scout_datareview_api_SearchCheckAlertsSortOptions"] = None, status: Optional[List["scout_datareview_api_CheckAlertStatus"]] = None) -> None:
47091
47144
  self._next_page_token = next_page_token
47092
47145
  self._page_size = page_size
47093
47146
  self._sort_by = sort_by
@@ -47105,6 +47158,7 @@ class scout_datareview_api_SearchCheckAlertsRequest(ConjureBeanType):
47105
47158
  self._chart_rids = chart_rids
47106
47159
  self._notebook_rids = notebook_rids
47107
47160
  self._show_archived = show_archived
47161
+ self._archived_statuses = archived_statuses
47108
47162
 
47109
47163
  @builtins.property
47110
47164
  def next_page_token(self) -> Optional[str]:
@@ -47188,10 +47242,19 @@ class scout_datareview_api_SearchCheckAlertsRequest(ConjureBeanType):
47188
47242
  @builtins.property
47189
47243
  def show_archived(self) -> Optional[bool]:
47190
47244
  """
47191
- If not present, will not show archived data reviews in search results
47245
+ To be deprecated. Use archivedStatuses instead. If true, includes archived check alerts in the search
47246
+ results. If not present or false, will not show archived data reviews in search results
47192
47247
  """
47193
47248
  return self._show_archived
47194
47249
 
47250
+ @builtins.property
47251
+ def archived_statuses(self) -> Optional[List["scout_rids_api_ArchivedStatus"]]:
47252
+ """
47253
+ Filters search on check alerts based on the archived statuses provided.
47254
+ Default is NOT_ARCHIVED only if none are provided.
47255
+ """
47256
+ return self._archived_statuses
47257
+
47195
47258
 
47196
47259
  scout_datareview_api_SearchCheckAlertsRequest.__name__ = "SearchCheckAlertsRequest"
47197
47260
  scout_datareview_api_SearchCheckAlertsRequest.__qualname__ = "SearchCheckAlertsRequest"
@@ -47912,6 +47975,103 @@ scout_datasource_connection_ConnectionService.__qualname__ = "ConnectionService"
47912
47975
  scout_datasource_connection_ConnectionService.__module__ = "nominal_api.scout_datasource_connection"
47913
47976
 
47914
47977
 
47978
+ class scout_datasource_connection_api_BigQueryConnectionDetails(ConjureBeanType):
47979
+
47980
+ @builtins.classmethod
47981
+ def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
47982
+ return {
47983
+ 'region': ConjureFieldDefinition('region', scout_datasource_connection_api_LocationName),
47984
+ 'project': ConjureFieldDefinition('project', scout_datasource_connection_api_ProjectName),
47985
+ 'dataset': ConjureFieldDefinition('dataset', scout_datasource_connection_api_DatasetName),
47986
+ 'table': ConjureFieldDefinition('table', scout_datasource_connection_api_TableName),
47987
+ 'service_account_key_secret_rid': ConjureFieldDefinition('serviceAccountKeySecretRid', scout_datasource_connection_api_SecretRid)
47988
+ }
47989
+
47990
+ __slots__: List[str] = ['_region', '_project', '_dataset', '_table', '_service_account_key_secret_rid']
47991
+
47992
+ def __init__(self, dataset: str, project: str, region: str, service_account_key_secret_rid: str, table: str) -> None:
47993
+ self._region = region
47994
+ self._project = project
47995
+ self._dataset = dataset
47996
+ self._table = table
47997
+ self._service_account_key_secret_rid = service_account_key_secret_rid
47998
+
47999
+ @builtins.property
48000
+ def region(self) -> str:
48001
+ """
48002
+ The region of the BigQuery Project (e.g. "us-east1")
48003
+ """
48004
+ return self._region
48005
+
48006
+ @builtins.property
48007
+ def project(self) -> str:
48008
+ """
48009
+ The name of the BigQuery Project
48010
+ """
48011
+ return self._project
48012
+
48013
+ @builtins.property
48014
+ def dataset(self) -> str:
48015
+ """
48016
+ The name of the dataset within the project
48017
+ """
48018
+ return self._dataset
48019
+
48020
+ @builtins.property
48021
+ def table(self) -> str:
48022
+ """
48023
+ The name of the table within the dataset
48024
+ """
48025
+ return self._table
48026
+
48027
+ @builtins.property
48028
+ def service_account_key_secret_rid(self) -> str:
48029
+ """
48030
+ Secret Rid of service account key stored in Secrets Service.
48031
+ """
48032
+ return self._service_account_key_secret_rid
48033
+
48034
+
48035
+ scout_datasource_connection_api_BigQueryConnectionDetails.__name__ = "BigQueryConnectionDetails"
48036
+ scout_datasource_connection_api_BigQueryConnectionDetails.__qualname__ = "BigQueryConnectionDetails"
48037
+ scout_datasource_connection_api_BigQueryConnectionDetails.__module__ = "nominal_api.scout_datasource_connection_api"
48038
+
48039
+
48040
+ class scout_datasource_connection_api_BigQueryScrapingConfig(ConjureBeanType):
48041
+
48042
+ @builtins.classmethod
48043
+ def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
48044
+ return {
48045
+ 'time_column': ConjureFieldDefinition('timeColumn', scout_datasource_connection_api_ColumnName),
48046
+ 'tag_columns': ConjureFieldDefinition('tagColumns', List[scout_datasource_connection_api_ColumnName])
48047
+ }
48048
+
48049
+ __slots__: List[str] = ['_time_column', '_tag_columns']
48050
+
48051
+ def __init__(self, tag_columns: List[str], time_column: str) -> None:
48052
+ self._time_column = time_column
48053
+ self._tag_columns = tag_columns
48054
+
48055
+ @builtins.property
48056
+ def time_column(self) -> str:
48057
+ """
48058
+ The name of the column that holds the timestamp.
48059
+ """
48060
+ return self._time_column
48061
+
48062
+ @builtins.property
48063
+ def tag_columns(self) -> List[str]:
48064
+ """
48065
+ The name of the columns that should be interpreted as tag columns
48066
+ """
48067
+ return self._tag_columns
48068
+
48069
+
48070
+ scout_datasource_connection_api_BigQueryScrapingConfig.__name__ = "BigQueryScrapingConfig"
48071
+ scout_datasource_connection_api_BigQueryScrapingConfig.__qualname__ = "BigQueryScrapingConfig"
48072
+ scout_datasource_connection_api_BigQueryScrapingConfig.__module__ = "nominal_api.scout_datasource_connection_api"
48073
+
48074
+
47915
48075
  class scout_datasource_connection_api_Connection(ConjureBeanType):
47916
48076
 
47917
48077
  @builtins.classmethod
@@ -48021,6 +48181,7 @@ class scout_datasource_connection_api_ConnectionDetails(ConjureUnionType):
48021
48181
  _nominal: Optional["scout_datasource_connection_api_NominalConnectionDetails"] = None
48022
48182
  _timestream: Optional["scout_datasource_connection_api_TimestreamConnectionDetails"] = None
48023
48183
  _visual_crossing: Optional["scout_datasource_connection_api_VisualCrossingConnectionDetails"] = None
48184
+ _big_query: Optional["scout_datasource_connection_api_BigQueryConnectionDetails"] = None
48024
48185
 
48025
48186
  @builtins.classmethod
48026
48187
  def _options(cls) -> Dict[str, ConjureFieldDefinition]:
@@ -48030,7 +48191,8 @@ class scout_datasource_connection_api_ConnectionDetails(ConjureUnionType):
48030
48191
  'influx1': ConjureFieldDefinition('influx1', scout_datasource_connection_api_Influx1ConnectionDetails),
48031
48192
  'nominal': ConjureFieldDefinition('nominal', scout_datasource_connection_api_NominalConnectionDetails),
48032
48193
  'timestream': ConjureFieldDefinition('timestream', scout_datasource_connection_api_TimestreamConnectionDetails),
48033
- 'visual_crossing': ConjureFieldDefinition('visualCrossing', scout_datasource_connection_api_VisualCrossingConnectionDetails)
48194
+ 'visual_crossing': ConjureFieldDefinition('visualCrossing', scout_datasource_connection_api_VisualCrossingConnectionDetails),
48195
+ 'big_query': ConjureFieldDefinition('bigQuery', scout_datasource_connection_api_BigQueryConnectionDetails)
48034
48196
  }
48035
48197
 
48036
48198
  def __init__(
@@ -48041,10 +48203,11 @@ class scout_datasource_connection_api_ConnectionDetails(ConjureUnionType):
48041
48203
  nominal: Optional["scout_datasource_connection_api_NominalConnectionDetails"] = None,
48042
48204
  timestream: Optional["scout_datasource_connection_api_TimestreamConnectionDetails"] = None,
48043
48205
  visual_crossing: Optional["scout_datasource_connection_api_VisualCrossingConnectionDetails"] = None,
48206
+ big_query: Optional["scout_datasource_connection_api_BigQueryConnectionDetails"] = None,
48044
48207
  type_of_union: Optional[str] = None
48045
48208
  ) -> None:
48046
48209
  if type_of_union is None:
48047
- if (timescale is not None) + (influx is not None) + (influx1 is not None) + (nominal is not None) + (timestream is not None) + (visual_crossing is not None) != 1:
48210
+ if (timescale is not None) + (influx is not None) + (influx1 is not None) + (nominal is not None) + (timestream is not None) + (visual_crossing is not None) + (big_query is not None) != 1:
48048
48211
  raise ValueError('a union must contain a single member')
48049
48212
 
48050
48213
  if timescale is not None:
@@ -48065,6 +48228,9 @@ class scout_datasource_connection_api_ConnectionDetails(ConjureUnionType):
48065
48228
  if visual_crossing is not None:
48066
48229
  self._visual_crossing = visual_crossing
48067
48230
  self._type = 'visualCrossing'
48231
+ if big_query is not None:
48232
+ self._big_query = big_query
48233
+ self._type = 'bigQuery'
48068
48234
 
48069
48235
  elif type_of_union == 'timescale':
48070
48236
  if timescale is None:
@@ -48096,6 +48262,11 @@ class scout_datasource_connection_api_ConnectionDetails(ConjureUnionType):
48096
48262
  raise ValueError('a union value must not be None')
48097
48263
  self._visual_crossing = visual_crossing
48098
48264
  self._type = 'visualCrossing'
48265
+ elif type_of_union == 'bigQuery':
48266
+ if big_query is None:
48267
+ raise ValueError('a union value must not be None')
48268
+ self._big_query = big_query
48269
+ self._type = 'bigQuery'
48099
48270
 
48100
48271
  @builtins.property
48101
48272
  def timescale(self) -> Optional["scout_datasource_connection_api_TimescaleConnectionDetails"]:
@@ -48121,6 +48292,10 @@ class scout_datasource_connection_api_ConnectionDetails(ConjureUnionType):
48121
48292
  def visual_crossing(self) -> Optional["scout_datasource_connection_api_VisualCrossingConnectionDetails"]:
48122
48293
  return self._visual_crossing
48123
48294
 
48295
+ @builtins.property
48296
+ def big_query(self) -> Optional["scout_datasource_connection_api_BigQueryConnectionDetails"]:
48297
+ return self._big_query
48298
+
48124
48299
  def accept(self, visitor) -> Any:
48125
48300
  if not isinstance(visitor, scout_datasource_connection_api_ConnectionDetailsVisitor):
48126
48301
  raise ValueError('{} is not an instance of scout_datasource_connection_api_ConnectionDetailsVisitor'.format(visitor.__class__.__name__))
@@ -48136,6 +48311,8 @@ class scout_datasource_connection_api_ConnectionDetails(ConjureUnionType):
48136
48311
  return visitor._timestream(self.timestream)
48137
48312
  if self._type == 'visualCrossing' and self.visual_crossing is not None:
48138
48313
  return visitor._visual_crossing(self.visual_crossing)
48314
+ if self._type == 'bigQuery' and self.big_query is not None:
48315
+ return visitor._big_query(self.big_query)
48139
48316
 
48140
48317
 
48141
48318
  scout_datasource_connection_api_ConnectionDetails.__name__ = "ConnectionDetails"
@@ -48169,6 +48346,10 @@ class scout_datasource_connection_api_ConnectionDetailsVisitor:
48169
48346
  def _visual_crossing(self, visual_crossing: "scout_datasource_connection_api_VisualCrossingConnectionDetails") -> Any:
48170
48347
  pass
48171
48348
 
48349
+ @abstractmethod
48350
+ def _big_query(self, big_query: "scout_datasource_connection_api_BigQueryConnectionDetails") -> Any:
48351
+ pass
48352
+
48172
48353
 
48173
48354
  scout_datasource_connection_api_ConnectionDetailsVisitor.__name__ = "ConnectionDetailsVisitor"
48174
48355
  scout_datasource_connection_api_ConnectionDetailsVisitor.__qualname__ = "ConnectionDetailsVisitor"
@@ -49239,6 +49420,7 @@ class scout_datasource_connection_api_ScrapingConfig(ConjureUnionType):
49239
49420
  _timestream: Optional["scout_datasource_connection_api_TimestreamScrapingConfig"] = None
49240
49421
  _timescale: Optional["scout_datasource_connection_api_PivotedTimescaleScrapingConfig"] = None
49241
49422
  _visual_crossing: Optional["scout_datasource_connection_api_VisualCrossingScrapingConfig"] = None
49423
+ _big_query: Optional["scout_datasource_connection_api_BigQueryScrapingConfig"] = None
49242
49424
 
49243
49425
  @builtins.classmethod
49244
49426
  def _options(cls) -> Dict[str, ConjureFieldDefinition]:
@@ -49247,7 +49429,8 @@ class scout_datasource_connection_api_ScrapingConfig(ConjureUnionType):
49247
49429
  'nominal': ConjureFieldDefinition('nominal', scout_datasource_connection_api_NominalScrapingConfig),
49248
49430
  'timestream': ConjureFieldDefinition('timestream', scout_datasource_connection_api_TimestreamScrapingConfig),
49249
49431
  'timescale': ConjureFieldDefinition('timescale', scout_datasource_connection_api_PivotedTimescaleScrapingConfig),
49250
- 'visual_crossing': ConjureFieldDefinition('visualCrossing', scout_datasource_connection_api_VisualCrossingScrapingConfig)
49432
+ 'visual_crossing': ConjureFieldDefinition('visualCrossing', scout_datasource_connection_api_VisualCrossingScrapingConfig),
49433
+ 'big_query': ConjureFieldDefinition('bigQuery', scout_datasource_connection_api_BigQueryScrapingConfig)
49251
49434
  }
49252
49435
 
49253
49436
  def __init__(
@@ -49257,10 +49440,11 @@ class scout_datasource_connection_api_ScrapingConfig(ConjureUnionType):
49257
49440
  timestream: Optional["scout_datasource_connection_api_TimestreamScrapingConfig"] = None,
49258
49441
  timescale: Optional["scout_datasource_connection_api_PivotedTimescaleScrapingConfig"] = None,
49259
49442
  visual_crossing: Optional["scout_datasource_connection_api_VisualCrossingScrapingConfig"] = None,
49443
+ big_query: Optional["scout_datasource_connection_api_BigQueryScrapingConfig"] = None,
49260
49444
  type_of_union: Optional[str] = None
49261
49445
  ) -> None:
49262
49446
  if type_of_union is None:
49263
- if (influx is not None) + (nominal is not None) + (timestream is not None) + (timescale is not None) + (visual_crossing is not None) != 1:
49447
+ if (influx is not None) + (nominal is not None) + (timestream is not None) + (timescale is not None) + (visual_crossing is not None) + (big_query is not None) != 1:
49264
49448
  raise ValueError('a union must contain a single member')
49265
49449
 
49266
49450
  if influx is not None:
@@ -49278,6 +49462,9 @@ class scout_datasource_connection_api_ScrapingConfig(ConjureUnionType):
49278
49462
  if visual_crossing is not None:
49279
49463
  self._visual_crossing = visual_crossing
49280
49464
  self._type = 'visualCrossing'
49465
+ if big_query is not None:
49466
+ self._big_query = big_query
49467
+ self._type = 'bigQuery'
49281
49468
 
49282
49469
  elif type_of_union == 'influx':
49283
49470
  if influx is None:
@@ -49304,6 +49491,11 @@ class scout_datasource_connection_api_ScrapingConfig(ConjureUnionType):
49304
49491
  raise ValueError('a union value must not be None')
49305
49492
  self._visual_crossing = visual_crossing
49306
49493
  self._type = 'visualCrossing'
49494
+ elif type_of_union == 'bigQuery':
49495
+ if big_query is None:
49496
+ raise ValueError('a union value must not be None')
49497
+ self._big_query = big_query
49498
+ self._type = 'bigQuery'
49307
49499
 
49308
49500
  @builtins.property
49309
49501
  def influx(self) -> Optional["scout_datasource_connection_api_InfluxScrapingConfig"]:
@@ -49325,6 +49517,10 @@ class scout_datasource_connection_api_ScrapingConfig(ConjureUnionType):
49325
49517
  def visual_crossing(self) -> Optional["scout_datasource_connection_api_VisualCrossingScrapingConfig"]:
49326
49518
  return self._visual_crossing
49327
49519
 
49520
+ @builtins.property
49521
+ def big_query(self) -> Optional["scout_datasource_connection_api_BigQueryScrapingConfig"]:
49522
+ return self._big_query
49523
+
49328
49524
  def accept(self, visitor) -> Any:
49329
49525
  if not isinstance(visitor, scout_datasource_connection_api_ScrapingConfigVisitor):
49330
49526
  raise ValueError('{} is not an instance of scout_datasource_connection_api_ScrapingConfigVisitor'.format(visitor.__class__.__name__))
@@ -49338,6 +49534,8 @@ class scout_datasource_connection_api_ScrapingConfig(ConjureUnionType):
49338
49534
  return visitor._timescale(self.timescale)
49339
49535
  if self._type == 'visualCrossing' and self.visual_crossing is not None:
49340
49536
  return visitor._visual_crossing(self.visual_crossing)
49537
+ if self._type == 'bigQuery' and self.big_query is not None:
49538
+ return visitor._big_query(self.big_query)
49341
49539
 
49342
49540
 
49343
49541
  scout_datasource_connection_api_ScrapingConfig.__name__ = "ScrapingConfig"
@@ -49367,6 +49565,10 @@ class scout_datasource_connection_api_ScrapingConfigVisitor:
49367
49565
  def _visual_crossing(self, visual_crossing: "scout_datasource_connection_api_VisualCrossingScrapingConfig") -> Any:
49368
49566
  pass
49369
49567
 
49568
+ @abstractmethod
49569
+ def _big_query(self, big_query: "scout_datasource_connection_api_BigQueryScrapingConfig") -> Any:
49570
+ pass
49571
+
49370
49572
 
49371
49573
  scout_datasource_connection_api_ScrapingConfigVisitor.__name__ = "ScrapingConfigVisitor"
49372
49574
  scout_datasource_connection_api_ScrapingConfigVisitor.__qualname__ = "ScrapingConfigVisitor"
@@ -61690,6 +61892,41 @@ timeseries_archetype_api_BatchGetSeriesArchetypeResponse.__qualname__ = "BatchGe
61690
61892
  timeseries_archetype_api_BatchGetSeriesArchetypeResponse.__module__ = "nominal_api.timeseries_archetype_api"
61691
61893
 
61692
61894
 
61895
+ class timeseries_archetype_api_BigQueryLocatorTemplate(ConjureBeanType):
61896
+
61897
+ @builtins.classmethod
61898
+ def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
61899
+ return {
61900
+ 'value_column': ConjureFieldDefinition('valueColumn', timeseries_logicalseries_api_ColumnName),
61901
+ 'time_column': ConjureFieldDefinition('timeColumn', timeseries_logicalseries_api_ColumnName),
61902
+ 'type': ConjureFieldDefinition('type', timeseries_logicalseries_api_NominalType)
61903
+ }
61904
+
61905
+ __slots__: List[str] = ['_value_column', '_time_column', '_type']
61906
+
61907
+ def __init__(self, time_column: str, type: "timeseries_logicalseries_api_NominalType", value_column: str) -> None:
61908
+ self._value_column = value_column
61909
+ self._time_column = time_column
61910
+ self._type = type
61911
+
61912
+ @builtins.property
61913
+ def value_column(self) -> str:
61914
+ return self._value_column
61915
+
61916
+ @builtins.property
61917
+ def time_column(self) -> str:
61918
+ return self._time_column
61919
+
61920
+ @builtins.property
61921
+ def type(self) -> "timeseries_logicalseries_api_NominalType":
61922
+ return self._type
61923
+
61924
+
61925
+ timeseries_archetype_api_BigQueryLocatorTemplate.__name__ = "BigQueryLocatorTemplate"
61926
+ timeseries_archetype_api_BigQueryLocatorTemplate.__qualname__ = "BigQueryLocatorTemplate"
61927
+ timeseries_archetype_api_BigQueryLocatorTemplate.__module__ = "nominal_api.timeseries_archetype_api"
61928
+
61929
+
61693
61930
  class timeseries_archetype_api_CreateSeriesArchetypeRequest(ConjureBeanType):
61694
61931
 
61695
61932
  @builtins.classmethod
@@ -61848,6 +62085,7 @@ class timeseries_archetype_api_LocatorTemplate(ConjureUnionType):
61848
62085
  _nominal: Optional["timeseries_archetype_api_NominalLocatorTemplate"] = None
61849
62086
  _timestream: Optional["timeseries_archetype_api_TimestreamLocatorTemplate"] = None
61850
62087
  _visual_crossing: Optional["timeseries_archetype_api_VisualCrossingLocatorTemplate"] = None
62088
+ _big_query: Optional["timeseries_archetype_api_BigQueryLocatorTemplate"] = None
61851
62089
 
61852
62090
  @builtins.classmethod
61853
62091
  def _options(cls) -> Dict[str, ConjureFieldDefinition]:
@@ -61857,7 +62095,8 @@ class timeseries_archetype_api_LocatorTemplate(ConjureUnionType):
61857
62095
  'influx1': ConjureFieldDefinition('influx1', timeseries_archetype_api_Influx1LocatorTemplate),
61858
62096
  'nominal': ConjureFieldDefinition('nominal', timeseries_archetype_api_NominalLocatorTemplate),
61859
62097
  'timestream': ConjureFieldDefinition('timestream', timeseries_archetype_api_TimestreamLocatorTemplate),
61860
- 'visual_crossing': ConjureFieldDefinition('visualCrossing', timeseries_archetype_api_VisualCrossingLocatorTemplate)
62098
+ 'visual_crossing': ConjureFieldDefinition('visualCrossing', timeseries_archetype_api_VisualCrossingLocatorTemplate),
62099
+ 'big_query': ConjureFieldDefinition('bigQuery', timeseries_archetype_api_BigQueryLocatorTemplate)
61861
62100
  }
61862
62101
 
61863
62102
  def __init__(
@@ -61868,10 +62107,11 @@ class timeseries_archetype_api_LocatorTemplate(ConjureUnionType):
61868
62107
  nominal: Optional["timeseries_archetype_api_NominalLocatorTemplate"] = None,
61869
62108
  timestream: Optional["timeseries_archetype_api_TimestreamLocatorTemplate"] = None,
61870
62109
  visual_crossing: Optional["timeseries_archetype_api_VisualCrossingLocatorTemplate"] = None,
62110
+ big_query: Optional["timeseries_archetype_api_BigQueryLocatorTemplate"] = None,
61871
62111
  type_of_union: Optional[str] = None
61872
62112
  ) -> None:
61873
62113
  if type_of_union is None:
61874
- if (timescale_db is not None) + (influx is not None) + (influx1 is not None) + (nominal is not None) + (timestream is not None) + (visual_crossing is not None) != 1:
62114
+ if (timescale_db is not None) + (influx is not None) + (influx1 is not None) + (nominal is not None) + (timestream is not None) + (visual_crossing is not None) + (big_query is not None) != 1:
61875
62115
  raise ValueError('a union must contain a single member')
61876
62116
 
61877
62117
  if timescale_db is not None:
@@ -61892,6 +62132,9 @@ class timeseries_archetype_api_LocatorTemplate(ConjureUnionType):
61892
62132
  if visual_crossing is not None:
61893
62133
  self._visual_crossing = visual_crossing
61894
62134
  self._type = 'visualCrossing'
62135
+ if big_query is not None:
62136
+ self._big_query = big_query
62137
+ self._type = 'bigQuery'
61895
62138
 
61896
62139
  elif type_of_union == 'timescaleDb':
61897
62140
  if timescale_db is None:
@@ -61923,6 +62166,11 @@ class timeseries_archetype_api_LocatorTemplate(ConjureUnionType):
61923
62166
  raise ValueError('a union value must not be None')
61924
62167
  self._visual_crossing = visual_crossing
61925
62168
  self._type = 'visualCrossing'
62169
+ elif type_of_union == 'bigQuery':
62170
+ if big_query is None:
62171
+ raise ValueError('a union value must not be None')
62172
+ self._big_query = big_query
62173
+ self._type = 'bigQuery'
61926
62174
 
61927
62175
  @builtins.property
61928
62176
  def timescale_db(self) -> Optional["timeseries_archetype_api_TimescaleDbLocatorTemplate"]:
@@ -61948,6 +62196,10 @@ class timeseries_archetype_api_LocatorTemplate(ConjureUnionType):
61948
62196
  def visual_crossing(self) -> Optional["timeseries_archetype_api_VisualCrossingLocatorTemplate"]:
61949
62197
  return self._visual_crossing
61950
62198
 
62199
+ @builtins.property
62200
+ def big_query(self) -> Optional["timeseries_archetype_api_BigQueryLocatorTemplate"]:
62201
+ return self._big_query
62202
+
61951
62203
  def accept(self, visitor) -> Any:
61952
62204
  if not isinstance(visitor, timeseries_archetype_api_LocatorTemplateVisitor):
61953
62205
  raise ValueError('{} is not an instance of timeseries_archetype_api_LocatorTemplateVisitor'.format(visitor.__class__.__name__))
@@ -61963,6 +62215,8 @@ class timeseries_archetype_api_LocatorTemplate(ConjureUnionType):
61963
62215
  return visitor._timestream(self.timestream)
61964
62216
  if self._type == 'visualCrossing' and self.visual_crossing is not None:
61965
62217
  return visitor._visual_crossing(self.visual_crossing)
62218
+ if self._type == 'bigQuery' and self.big_query is not None:
62219
+ return visitor._big_query(self.big_query)
61966
62220
 
61967
62221
 
61968
62222
  timeseries_archetype_api_LocatorTemplate.__name__ = "LocatorTemplate"
@@ -61996,6 +62250,10 @@ class timeseries_archetype_api_LocatorTemplateVisitor:
61996
62250
  def _visual_crossing(self, visual_crossing: "timeseries_archetype_api_VisualCrossingLocatorTemplate") -> Any:
61997
62251
  pass
61998
62252
 
62253
+ @abstractmethod
62254
+ def _big_query(self, big_query: "timeseries_archetype_api_BigQueryLocatorTemplate") -> Any:
62255
+ pass
62256
+
61999
62257
 
62000
62258
  timeseries_archetype_api_LocatorTemplateVisitor.__name__ = "LocatorTemplateVisitor"
62001
62259
  timeseries_archetype_api_LocatorTemplateVisitor.__qualname__ = "LocatorTemplateVisitor"
@@ -62606,6 +62864,56 @@ timeseries_logicalseries_api_BatchUpdateLogicalSeriesResponse.__qualname__ = "Ba
62606
62864
  timeseries_logicalseries_api_BatchUpdateLogicalSeriesResponse.__module__ = "nominal_api.timeseries_logicalseries_api"
62607
62865
 
62608
62866
 
62867
+ class timeseries_logicalseries_api_BigQueryLocator(ConjureBeanType):
62868
+
62869
+ @builtins.classmethod
62870
+ def _fields(cls) -> Dict[str, ConjureFieldDefinition]:
62871
+ return {
62872
+ 'value_column': ConjureFieldDefinition('valueColumn', timeseries_logicalseries_api_ColumnName),
62873
+ 'time_column': ConjureFieldDefinition('timeColumn', timeseries_logicalseries_api_ColumnName),
62874
+ 'tag_values': ConjureFieldDefinition('tagValues', Dict[api_TagName, api_TagValue]),
62875
+ 'type': ConjureFieldDefinition('type', timeseries_logicalseries_api_NominalType)
62876
+ }
62877
+
62878
+ __slots__: List[str] = ['_value_column', '_time_column', '_tag_values', '_type']
62879
+
62880
+ def __init__(self, tag_values: Dict[str, str], time_column: str, type: "timeseries_logicalseries_api_NominalType", value_column: str) -> None:
62881
+ self._value_column = value_column
62882
+ self._time_column = time_column
62883
+ self._tag_values = tag_values
62884
+ self._type = type
62885
+
62886
+ @builtins.property
62887
+ def value_column(self) -> str:
62888
+ """
62889
+ The name of the column which has the values for this series
62890
+ """
62891
+ return self._value_column
62892
+
62893
+ @builtins.property
62894
+ def time_column(self) -> str:
62895
+ """
62896
+ The name of the column which has the timestamps for this series
62897
+ """
62898
+ return self._time_column
62899
+
62900
+ @builtins.property
62901
+ def tag_values(self) -> Dict[str, str]:
62902
+ """
62903
+ The mapping of columns to column values to filter on
62904
+ """
62905
+ return self._tag_values
62906
+
62907
+ @builtins.property
62908
+ def type(self) -> "timeseries_logicalseries_api_NominalType":
62909
+ return self._type
62910
+
62911
+
62912
+ timeseries_logicalseries_api_BigQueryLocator.__name__ = "BigQueryLocator"
62913
+ timeseries_logicalseries_api_BigQueryLocator.__qualname__ = "BigQueryLocator"
62914
+ timeseries_logicalseries_api_BigQueryLocator.__module__ = "nominal_api.timeseries_logicalseries_api"
62915
+
62916
+
62609
62917
  class timeseries_logicalseries_api_Confidence(ConjureEnumType):
62610
62918
 
62611
62919
  LOW = 'LOW'
@@ -63063,6 +63371,7 @@ class timeseries_logicalseries_api_Locator(ConjureUnionType):
63063
63371
  _nominal_locator: Optional["timeseries_logicalseries_api_NominalLocator"] = None
63064
63372
  _timestream_locator: Optional["timeseries_logicalseries_api_TimestreamLocator"] = None
63065
63373
  _visual_crossing_locator: Optional["timeseries_logicalseries_api_VisualCrossingLocator"] = None
63374
+ _big_query_locator: Optional["timeseries_logicalseries_api_BigQueryLocator"] = None
63066
63375
 
63067
63376
  @builtins.classmethod
63068
63377
  def _options(cls) -> Dict[str, ConjureFieldDefinition]:
@@ -63074,7 +63383,8 @@ class timeseries_logicalseries_api_Locator(ConjureUnionType):
63074
63383
  'influx1_locator': ConjureFieldDefinition('influx1Locator', timeseries_logicalseries_api_Influx1Locator),
63075
63384
  'nominal_locator': ConjureFieldDefinition('nominalLocator', timeseries_logicalseries_api_NominalLocator),
63076
63385
  'timestream_locator': ConjureFieldDefinition('timestreamLocator', timeseries_logicalseries_api_TimestreamLocator),
63077
- 'visual_crossing_locator': ConjureFieldDefinition('visualCrossingLocator', timeseries_logicalseries_api_VisualCrossingLocator)
63386
+ 'visual_crossing_locator': ConjureFieldDefinition('visualCrossingLocator', timeseries_logicalseries_api_VisualCrossingLocator),
63387
+ 'big_query_locator': ConjureFieldDefinition('bigQueryLocator', timeseries_logicalseries_api_BigQueryLocator)
63078
63388
  }
63079
63389
 
63080
63390
  def __init__(
@@ -63087,10 +63397,11 @@ class timeseries_logicalseries_api_Locator(ConjureUnionType):
63087
63397
  nominal_locator: Optional["timeseries_logicalseries_api_NominalLocator"] = None,
63088
63398
  timestream_locator: Optional["timeseries_logicalseries_api_TimestreamLocator"] = None,
63089
63399
  visual_crossing_locator: Optional["timeseries_logicalseries_api_VisualCrossingLocator"] = None,
63400
+ big_query_locator: Optional["timeseries_logicalseries_api_BigQueryLocator"] = None,
63090
63401
  type_of_union: Optional[str] = None
63091
63402
  ) -> None:
63092
63403
  if type_of_union is None:
63093
- if (csv_locator is not None) + (csv_v2 is not None) + (timescale_db_locator is not None) + (influx_locator is not None) + (influx1_locator is not None) + (nominal_locator is not None) + (timestream_locator is not None) + (visual_crossing_locator is not None) != 1:
63404
+ if (csv_locator is not None) + (csv_v2 is not None) + (timescale_db_locator is not None) + (influx_locator is not None) + (influx1_locator is not None) + (nominal_locator is not None) + (timestream_locator is not None) + (visual_crossing_locator is not None) + (big_query_locator is not None) != 1:
63094
63405
  raise ValueError('a union must contain a single member')
63095
63406
 
63096
63407
  if csv_locator is not None:
@@ -63117,6 +63428,9 @@ class timeseries_logicalseries_api_Locator(ConjureUnionType):
63117
63428
  if visual_crossing_locator is not None:
63118
63429
  self._visual_crossing_locator = visual_crossing_locator
63119
63430
  self._type = 'visualCrossingLocator'
63431
+ if big_query_locator is not None:
63432
+ self._big_query_locator = big_query_locator
63433
+ self._type = 'bigQueryLocator'
63120
63434
 
63121
63435
  elif type_of_union == 'csvLocator':
63122
63436
  if csv_locator is None:
@@ -63158,6 +63472,11 @@ class timeseries_logicalseries_api_Locator(ConjureUnionType):
63158
63472
  raise ValueError('a union value must not be None')
63159
63473
  self._visual_crossing_locator = visual_crossing_locator
63160
63474
  self._type = 'visualCrossingLocator'
63475
+ elif type_of_union == 'bigQueryLocator':
63476
+ if big_query_locator is None:
63477
+ raise ValueError('a union value must not be None')
63478
+ self._big_query_locator = big_query_locator
63479
+ self._type = 'bigQueryLocator'
63161
63480
 
63162
63481
  @builtins.property
63163
63482
  def csv_locator(self) -> Optional["timeseries_logicalseries_api_CsvLocator"]:
@@ -63191,6 +63510,10 @@ class timeseries_logicalseries_api_Locator(ConjureUnionType):
63191
63510
  def visual_crossing_locator(self) -> Optional["timeseries_logicalseries_api_VisualCrossingLocator"]:
63192
63511
  return self._visual_crossing_locator
63193
63512
 
63513
+ @builtins.property
63514
+ def big_query_locator(self) -> Optional["timeseries_logicalseries_api_BigQueryLocator"]:
63515
+ return self._big_query_locator
63516
+
63194
63517
  def accept(self, visitor) -> Any:
63195
63518
  if not isinstance(visitor, timeseries_logicalseries_api_LocatorVisitor):
63196
63519
  raise ValueError('{} is not an instance of timeseries_logicalseries_api_LocatorVisitor'.format(visitor.__class__.__name__))
@@ -63210,6 +63533,8 @@ class timeseries_logicalseries_api_Locator(ConjureUnionType):
63210
63533
  return visitor._timestream_locator(self.timestream_locator)
63211
63534
  if self._type == 'visualCrossingLocator' and self.visual_crossing_locator is not None:
63212
63535
  return visitor._visual_crossing_locator(self.visual_crossing_locator)
63536
+ if self._type == 'bigQueryLocator' and self.big_query_locator is not None:
63537
+ return visitor._big_query_locator(self.big_query_locator)
63213
63538
 
63214
63539
 
63215
63540
  timeseries_logicalseries_api_Locator.__name__ = "Locator"
@@ -63251,6 +63576,10 @@ class timeseries_logicalseries_api_LocatorVisitor:
63251
63576
  def _visual_crossing_locator(self, visual_crossing_locator: "timeseries_logicalseries_api_VisualCrossingLocator") -> Any:
63252
63577
  pass
63253
63578
 
63579
+ @abstractmethod
63580
+ def _big_query_locator(self, big_query_locator: "timeseries_logicalseries_api_BigQueryLocator") -> Any:
63581
+ pass
63582
+
63254
63583
 
63255
63584
  timeseries_logicalseries_api_LocatorVisitor.__name__ = "LocatorVisitor"
63256
63585
  timeseries_logicalseries_api_LocatorVisitor.__qualname__ = "LocatorVisitor"
@@ -64869,12 +65198,16 @@ api_rids_AttachmentRid = str
64869
65198
 
64870
65199
  datasource_pagination_api_PageToken = str
64871
65200
 
65201
+ scout_datasource_connection_api_ProjectName = str
65202
+
64872
65203
  datasource_PropertyValue = str
64873
65204
 
64874
65205
  scout_comparisonnotebook_api_VariableName = str
64875
65206
 
64876
65207
  scout_run_api_RunRid = str
64877
65208
 
65209
+ scout_datasource_connection_api_DatasetName = str
65210
+
64878
65211
  scout_backend_Token = str
64879
65212
 
64880
65213
  api_TagValue = str
@@ -65007,6 +65340,8 @@ scout_versioning_api_CommitId = str
65007
65340
 
65008
65341
  scout_run_api_ConnectionRid = str
65009
65342
 
65343
+ timeseries_logicalseries_api_DatasetName = str
65344
+
65010
65345
  api_SeriesArchetypeRid = str
65011
65346
 
65012
65347
  scout_versioning_api_TagRid = str
@@ -65045,6 +65380,8 @@ scout_rids_api_CheckRid = str
65045
65380
 
65046
65381
  secrets_api_PropertyName = str
65047
65382
 
65383
+ timeseries_logicalseries_api_ProjectName = str
65384
+
65048
65385
  api_Channel = str
65049
65386
 
65050
65387
  attachments_api_PropertyValue = str
@@ -1,5 +1,7 @@
1
1
  # coding=utf-8
2
2
  from .._impl import (
3
+ scout_datasource_connection_api_BigQueryConnectionDetails as BigQueryConnectionDetails,
4
+ scout_datasource_connection_api_BigQueryScrapingConfig as BigQueryScrapingConfig,
3
5
  scout_datasource_connection_api_BucketName as BucketName,
4
6
  scout_datasource_connection_api_ColumnName as ColumnName,
5
7
  scout_datasource_connection_api_Connection as Connection,
@@ -9,6 +11,7 @@ from .._impl import (
9
11
  scout_datasource_connection_api_ConnectionRid as ConnectionRid,
10
12
  scout_datasource_connection_api_ConnectionStatus as ConnectionStatus,
11
13
  scout_datasource_connection_api_CreateConnection as CreateConnection,
14
+ scout_datasource_connection_api_DatasetName as DatasetName,
12
15
  scout_datasource_connection_api_Duration as Duration,
13
16
  scout_datasource_connection_api_Empty as Empty,
14
17
  scout_datasource_connection_api_HeaderValue as HeaderValue,
@@ -34,6 +37,7 @@ from .._impl import (
34
37
  scout_datasource_connection_api_PivotedTimescaleChannelNameComponentVisitor as PivotedTimescaleChannelNameComponentVisitor,
35
38
  scout_datasource_connection_api_PivotedTimescaleScrapingConfig as PivotedTimescaleScrapingConfig,
36
39
  scout_datasource_connection_api_PopulateSeriesRequest as PopulateSeriesRequest,
40
+ scout_datasource_connection_api_ProjectName as ProjectName,
37
41
  scout_datasource_connection_api_SchemaName as SchemaName,
38
42
  scout_datasource_connection_api_ScrapingConfig as ScrapingConfig,
39
43
  scout_datasource_connection_api_ScrapingConfigVisitor as ScrapingConfigVisitor,
@@ -3,6 +3,7 @@ from .._impl import (
3
3
  timeseries_archetype_api_BatchCreateSeriesArchetypeRequest as BatchCreateSeriesArchetypeRequest,
4
4
  timeseries_archetype_api_BatchGetSeriesArchetypeRequest as BatchGetSeriesArchetypeRequest,
5
5
  timeseries_archetype_api_BatchGetSeriesArchetypeResponse as BatchGetSeriesArchetypeResponse,
6
+ timeseries_archetype_api_BigQueryLocatorTemplate as BigQueryLocatorTemplate,
6
7
  timeseries_archetype_api_CreateSeriesArchetypeRequest as CreateSeriesArchetypeRequest,
7
8
  timeseries_archetype_api_Influx1LocatorTemplate as Influx1LocatorTemplate,
8
9
  timeseries_archetype_api_Influx2LocatorTemplate as Influx2LocatorTemplate,
@@ -7,6 +7,7 @@ from .._impl import (
7
7
  timeseries_logicalseries_api_BatchResolveSeriesResponse as BatchResolveSeriesResponse,
8
8
  timeseries_logicalseries_api_BatchUpdateLogicalSeriesRequest as BatchUpdateLogicalSeriesRequest,
9
9
  timeseries_logicalseries_api_BatchUpdateLogicalSeriesResponse as BatchUpdateLogicalSeriesResponse,
10
+ timeseries_logicalseries_api_BigQueryLocator as BigQueryLocator,
10
11
  timeseries_logicalseries_api_BucketName as BucketName,
11
12
  timeseries_logicalseries_api_ColumnName as ColumnName,
12
13
  timeseries_logicalseries_api_Confidence as Confidence,
@@ -17,6 +18,7 @@ from .._impl import (
17
18
  timeseries_logicalseries_api_CsvLocator as CsvLocator,
18
19
  timeseries_logicalseries_api_CsvLocatorV2 as CsvLocatorV2,
19
20
  timeseries_logicalseries_api_DatabaseName as DatabaseName,
21
+ timeseries_logicalseries_api_DatasetName as DatasetName,
20
22
  timeseries_logicalseries_api_Empty as Empty,
21
23
  timeseries_logicalseries_api_FieldName as FieldName,
22
24
  timeseries_logicalseries_api_GetSuggestedTagsRequest as GetSuggestedTagsRequest,
@@ -32,6 +34,7 @@ from .._impl import (
32
34
  timeseries_logicalseries_api_MeasurementName as MeasurementName,
33
35
  timeseries_logicalseries_api_NominalLocator as NominalLocator,
34
36
  timeseries_logicalseries_api_NominalType as NominalType,
37
+ timeseries_logicalseries_api_ProjectName as ProjectName,
35
38
  timeseries_logicalseries_api_ResolveSeriesError as ResolveSeriesError,
36
39
  timeseries_logicalseries_api_ResolveSeriesRequest as ResolveSeriesRequest,
37
40
  timeseries_logicalseries_api_ResolveSeriesResponse as ResolveSeriesResponse,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nominal-api
3
- Version: 0.502.0
3
+ Version: 0.504.0
4
4
  Requires-Python: >=3.8
5
5
  Requires-Dist: requests
6
6
  Requires-Dist: conjure-python-client<3,>=2.8.0
@@ -6,7 +6,7 @@ from setuptools import (
6
6
 
7
7
  setup(
8
8
  name='nominal-api',
9
- version='0.502.0',
9
+ version='0.504.0',
10
10
  python_requires='>=3.8',
11
11
  package_data={"": ["py.typed"]},
12
12
  packages=find_packages(),
File without changes