databricks-sdk 0.48.0__py3-none-any.whl → 0.49.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -529,6 +529,49 @@ class GenieCreateConversationMessageRequest:
529
529
  )
530
530
 
531
531
 
532
+ @dataclass
533
+ class GenieGenerateDownloadFullQueryResultResponse:
534
+ error: Optional[str] = None
535
+ """Error message if Genie failed to download the result"""
536
+
537
+ status: Optional[MessageStatus] = None
538
+ """Download result status"""
539
+
540
+ transient_statement_id: Optional[str] = None
541
+ """Transient Statement ID. Use this ID to track the download request in subsequent polling calls"""
542
+
543
+ def as_dict(self) -> dict:
544
+ """Serializes the GenieGenerateDownloadFullQueryResultResponse into a dictionary suitable for use as a JSON request body."""
545
+ body = {}
546
+ if self.error is not None:
547
+ body["error"] = self.error
548
+ if self.status is not None:
549
+ body["status"] = self.status.value
550
+ if self.transient_statement_id is not None:
551
+ body["transient_statement_id"] = self.transient_statement_id
552
+ return body
553
+
554
+ def as_shallow_dict(self) -> dict:
555
+ """Serializes the GenieGenerateDownloadFullQueryResultResponse into a shallow dictionary of its immediate attributes."""
556
+ body = {}
557
+ if self.error is not None:
558
+ body["error"] = self.error
559
+ if self.status is not None:
560
+ body["status"] = self.status
561
+ if self.transient_statement_id is not None:
562
+ body["transient_statement_id"] = self.transient_statement_id
563
+ return body
564
+
565
+ @classmethod
566
+ def from_dict(cls, d: Dict[str, Any]) -> GenieGenerateDownloadFullQueryResultResponse:
567
+ """Deserializes the GenieGenerateDownloadFullQueryResultResponse from a dictionary."""
568
+ return cls(
569
+ error=d.get("error", None),
570
+ status=_enum(d, "status", MessageStatus),
571
+ transient_statement_id=d.get("transient_statement_id", None),
572
+ )
573
+
574
+
532
575
  @dataclass
533
576
  class GenieGetMessageQueryResultResponse:
534
577
  statement_response: Optional[sql.StatementResponse] = None
@@ -1082,6 +1125,7 @@ class MessageErrorType(Enum):
1082
1125
  FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_JSON_EXCEPTION"
1083
1126
  FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION = "FUNCTION_ARGUMENTS_INVALID_TYPE_EXCEPTION"
1084
1127
  FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION = "FUNCTION_CALL_MISSING_PARAMETER_EXCEPTION"
1128
+ GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION = "GENERATED_SQL_QUERY_TOO_LONG_EXCEPTION"
1085
1129
  GENERIC_CHAT_COMPLETION_EXCEPTION = "GENERIC_CHAT_COMPLETION_EXCEPTION"
1086
1130
  GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION = "GENERIC_CHAT_COMPLETION_SERVICE_EXCEPTION"
1087
1131
  GENERIC_SQL_EXEC_API_CALL_EXCEPTION = "GENERIC_SQL_EXEC_API_CALL_EXCEPTION"
@@ -1096,6 +1140,7 @@ class MessageErrorType(Enum):
1096
1140
  MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_CANCELLED_WHILE_EXECUTING_EXCEPTION"
1097
1141
  MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION"
1098
1142
  MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = "MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION"
1143
+ MISSING_SQL_QUERY_EXCEPTION = "MISSING_SQL_QUERY_EXCEPTION"
1099
1144
  NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE = "NO_DEPLOYMENTS_AVAILABLE_TO_WORKSPACE"
1100
1145
  NO_QUERY_TO_VISUALIZE_EXCEPTION = "NO_QUERY_TO_VISUALIZE_EXCEPTION"
1101
1146
  NO_TABLES_TO_QUERY_EXCEPTION = "NO_TABLES_TO_QUERY_EXCEPTION"
@@ -1987,6 +2032,37 @@ class GenieAPI:
1987
2032
  )
1988
2033
  return GenieGetMessageQueryResultResponse.from_dict(res)
1989
2034
 
2035
+ def generate_download_full_query_result(
2036
+ self, space_id: str, conversation_id: str, message_id: str, attachment_id: str
2037
+ ) -> GenieGenerateDownloadFullQueryResultResponse:
2038
+ """Generate full query result download.
2039
+
2040
+ Initiate full SQL query result download and obtain a transient ID for tracking the download progress.
2041
+ This call initiates a new SQL execution to generate the query result.
2042
+
2043
+ :param space_id: str
2044
+ Space ID
2045
+ :param conversation_id: str
2046
+ Conversation ID
2047
+ :param message_id: str
2048
+ Message ID
2049
+ :param attachment_id: str
2050
+ Attachment ID
2051
+
2052
+ :returns: :class:`GenieGenerateDownloadFullQueryResultResponse`
2053
+ """
2054
+
2055
+ headers = {
2056
+ "Accept": "application/json",
2057
+ }
2058
+
2059
+ res = self._api.do(
2060
+ "POST",
2061
+ f"/api/2.0/genie/spaces/{space_id}/conversations/{conversation_id}/messages/{message_id}/attachments/{attachment_id}/generate-download",
2062
+ headers=headers,
2063
+ )
2064
+ return GenieGenerateDownloadFullQueryResultResponse.from_dict(res)
2065
+
1990
2066
  def get_message(self, space_id: str, conversation_id: str, message_id: str) -> GenieMessage:
1991
2067
  """Get conversation message.
1992
2068
 
@@ -846,7 +846,7 @@ class ObjectPermissions:
846
846
  @dataclass
847
847
  class PartialUpdate:
848
848
  id: Optional[str] = None
849
- """Unique ID for a user in the Databricks workspace."""
849
+ """Unique ID in the Databricks workspace."""
850
850
 
851
851
  operations: Optional[List[Patch]] = None
852
852
 
@@ -1918,8 +1918,7 @@ class User:
1918
1918
  groups: Optional[List[ComplexValue]] = None
1919
1919
 
1920
1920
  id: Optional[str] = None
1921
- """Databricks user ID. This is automatically set by Databricks. Any value provided by the client
1922
- will be ignored."""
1921
+ """Databricks user ID."""
1923
1922
 
1924
1923
  name: Optional[Name] = None
1925
1924
 
@@ -2480,7 +2479,7 @@ class AccountGroupsAPI:
2480
2479
  Partially updates the details of a group.
2481
2480
 
2482
2481
  :param id: str
2483
- Unique ID for a group in the Databricks account.
2482
+ Unique ID in the Databricks workspace.
2484
2483
  :param operations: List[:class:`Patch`] (optional)
2485
2484
  :param schemas: List[:class:`PatchSchema`] (optional)
2486
2485
  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
@@ -2493,7 +2492,6 @@ class AccountGroupsAPI:
2493
2492
  if schemas is not None:
2494
2493
  body["schemas"] = [v.value for v in schemas]
2495
2494
  headers = {
2496
- "Accept": "application/json",
2497
2495
  "Content-Type": "application/json",
2498
2496
  }
2499
2497
 
@@ -2557,7 +2555,6 @@ class AccountGroupsAPI:
2557
2555
  if schemas is not None:
2558
2556
  body["schemas"] = [v.value for v in schemas]
2559
2557
  headers = {
2560
- "Accept": "application/json",
2561
2558
  "Content-Type": "application/json",
2562
2559
  }
2563
2560
 
@@ -2765,7 +2762,7 @@ class AccountServicePrincipalsAPI:
2765
2762
  Partially updates the details of a single service principal in the Databricks account.
2766
2763
 
2767
2764
  :param id: str
2768
- Unique ID for a service principal in the Databricks account.
2765
+ Unique ID in the Databricks workspace.
2769
2766
  :param operations: List[:class:`Patch`] (optional)
2770
2767
  :param schemas: List[:class:`PatchSchema`] (optional)
2771
2768
  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
@@ -2778,7 +2775,6 @@ class AccountServicePrincipalsAPI:
2778
2775
  if schemas is not None:
2779
2776
  body["schemas"] = [v.value for v in schemas]
2780
2777
  headers = {
2781
- "Accept": "application/json",
2782
2778
  "Content-Type": "application/json",
2783
2779
  }
2784
2780
 
@@ -2848,7 +2844,6 @@ class AccountServicePrincipalsAPI:
2848
2844
  if schemas is not None:
2849
2845
  body["schemas"] = [v.value for v in schemas]
2850
2846
  headers = {
2851
- "Accept": "application/json",
2852
2847
  "Content-Type": "application/json",
2853
2848
  }
2854
2849
 
@@ -2912,8 +2907,7 @@ class AccountUsersAPI:
2912
2907
  External ID is not currently supported. It is reserved for future use.
2913
2908
  :param groups: List[:class:`ComplexValue`] (optional)
2914
2909
  :param id: str (optional)
2915
- Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
2916
- be ignored.
2910
+ Databricks user ID.
2917
2911
  :param name: :class:`Name` (optional)
2918
2912
  :param roles: List[:class:`ComplexValue`] (optional)
2919
2913
  Corresponds to AWS instance profile/arn role.
@@ -3123,7 +3117,7 @@ class AccountUsersAPI:
3123
3117
  Partially updates a user resource by applying the supplied operations on specific user attributes.
3124
3118
 
3125
3119
  :param id: str
3126
- Unique ID for a user in the Databricks account.
3120
+ Unique ID in the Databricks workspace.
3127
3121
  :param operations: List[:class:`Patch`] (optional)
3128
3122
  :param schemas: List[:class:`PatchSchema`] (optional)
3129
3123
  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
@@ -3136,7 +3130,6 @@ class AccountUsersAPI:
3136
3130
  if schemas is not None:
3137
3131
  body["schemas"] = [v.value for v in schemas]
3138
3132
  headers = {
3139
- "Accept": "application/json",
3140
3133
  "Content-Type": "application/json",
3141
3134
  }
3142
3135
 
@@ -3164,8 +3157,7 @@ class AccountUsersAPI:
3164
3157
  Replaces a user's information with the data supplied in request.
3165
3158
 
3166
3159
  :param id: str
3167
- Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
3168
- be ignored.
3160
+ Databricks user ID.
3169
3161
  :param active: bool (optional)
3170
3162
  If this user is active
3171
3163
  :param display_name: str (optional)
@@ -3215,7 +3207,6 @@ class AccountUsersAPI:
3215
3207
  if user_name is not None:
3216
3208
  body["userName"] = user_name
3217
3209
  headers = {
3218
- "Accept": "application/json",
3219
3210
  "Content-Type": "application/json",
3220
3211
  }
3221
3212
 
@@ -3434,7 +3425,7 @@ class GroupsAPI:
3434
3425
  Partially updates the details of a group.
3435
3426
 
3436
3427
  :param id: str
3437
- Unique ID for a group in the Databricks workspace.
3428
+ Unique ID in the Databricks workspace.
3438
3429
  :param operations: List[:class:`Patch`] (optional)
3439
3430
  :param schemas: List[:class:`PatchSchema`] (optional)
3440
3431
  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
@@ -3447,7 +3438,6 @@ class GroupsAPI:
3447
3438
  if schemas is not None:
3448
3439
  body["schemas"] = [v.value for v in schemas]
3449
3440
  headers = {
3450
- "Accept": "application/json",
3451
3441
  "Content-Type": "application/json",
3452
3442
  }
3453
3443
 
@@ -3509,7 +3499,6 @@ class GroupsAPI:
3509
3499
  if schemas is not None:
3510
3500
  body["schemas"] = [v.value for v in schemas]
3511
3501
  headers = {
3512
- "Accept": "application/json",
3513
3502
  "Content-Type": "application/json",
3514
3503
  }
3515
3504
 
@@ -3922,7 +3911,7 @@ class ServicePrincipalsAPI:
3922
3911
  Partially updates the details of a single service principal in the Databricks workspace.
3923
3912
 
3924
3913
  :param id: str
3925
- Unique ID for a service principal in the Databricks workspace.
3914
+ Unique ID in the Databricks workspace.
3926
3915
  :param operations: List[:class:`Patch`] (optional)
3927
3916
  :param schemas: List[:class:`PatchSchema`] (optional)
3928
3917
  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
@@ -3935,7 +3924,6 @@ class ServicePrincipalsAPI:
3935
3924
  if schemas is not None:
3936
3925
  body["schemas"] = [v.value for v in schemas]
3937
3926
  headers = {
3938
- "Accept": "application/json",
3939
3927
  "Content-Type": "application/json",
3940
3928
  }
3941
3929
 
@@ -4000,7 +3988,6 @@ class ServicePrincipalsAPI:
4000
3988
  if schemas is not None:
4001
3989
  body["schemas"] = [v.value for v in schemas]
4002
3990
  headers = {
4003
- "Accept": "application/json",
4004
3991
  "Content-Type": "application/json",
4005
3992
  }
4006
3993
 
@@ -4059,8 +4046,7 @@ class UsersAPI:
4059
4046
  External ID is not currently supported. It is reserved for future use.
4060
4047
  :param groups: List[:class:`ComplexValue`] (optional)
4061
4048
  :param id: str (optional)
4062
- Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
4063
- be ignored.
4049
+ Databricks user ID.
4064
4050
  :param name: :class:`Name` (optional)
4065
4051
  :param roles: List[:class:`ComplexValue`] (optional)
4066
4052
  Corresponds to AWS instance profile/arn role.
@@ -4294,7 +4280,7 @@ class UsersAPI:
4294
4280
  Partially updates a user resource by applying the supplied operations on specific user attributes.
4295
4281
 
4296
4282
  :param id: str
4297
- Unique ID for a user in the Databricks workspace.
4283
+ Unique ID in the Databricks workspace.
4298
4284
  :param operations: List[:class:`Patch`] (optional)
4299
4285
  :param schemas: List[:class:`PatchSchema`] (optional)
4300
4286
  The schema of the patch request. Must be ["urn:ietf:params:scim:api:messages:2.0:PatchOp"].
@@ -4307,7 +4293,6 @@ class UsersAPI:
4307
4293
  if schemas is not None:
4308
4294
  body["schemas"] = [v.value for v in schemas]
4309
4295
  headers = {
4310
- "Accept": "application/json",
4311
4296
  "Content-Type": "application/json",
4312
4297
  }
4313
4298
 
@@ -4356,8 +4341,7 @@ class UsersAPI:
4356
4341
  Replaces a user's information with the data supplied in request.
4357
4342
 
4358
4343
  :param id: str
4359
- Databricks user ID. This is automatically set by Databricks. Any value provided by the client will
4360
- be ignored.
4344
+ Databricks user ID.
4361
4345
  :param active: bool (optional)
4362
4346
  If this user is active
4363
4347
  :param display_name: str (optional)
@@ -4407,7 +4391,6 @@ class UsersAPI:
4407
4391
  if user_name is not None:
4408
4392
  body["userName"] = user_name
4409
4393
  headers = {
4410
- "Accept": "application/json",
4411
4394
  "Content-Type": "application/json",
4412
4395
  }
4413
4396
 
@@ -3659,6 +3659,7 @@ class PerformanceTarget(Enum):
3659
3659
  on serverless compute should be. The performance mode on the job or pipeline should map to a
3660
3660
  performance setting that is passed to Cluster Manager (see cluster-common PerformanceTarget)."""
3661
3661
 
3662
+ BALANCED = "BALANCED"
3662
3663
  COST_OPTIMIZED = "COST_OPTIMIZED"
3663
3664
  PERFORMANCE_OPTIMIZED = "PERFORMANCE_OPTIMIZED"
3664
3665
 
@@ -1192,6 +1192,7 @@ class FileParent:
1192
1192
  class FileParentType(Enum):
1193
1193
 
1194
1194
  LISTING = "LISTING"
1195
+ LISTING_RESOURCE = "LISTING_RESOURCE"
1195
1196
  PROVIDER = "PROVIDER"
1196
1197
 
1197
1198
 
@@ -1947,7 +1948,6 @@ class ListProvidersResponse:
1947
1948
  @dataclass
1948
1949
  class Listing:
1949
1950
  summary: ListingSummary
1950
- """Next Number: 26"""
1951
1951
 
1952
1952
  detail: Optional[ListingDetail] = None
1953
1953
 
@@ -2251,8 +2251,6 @@ class ListingStatus(Enum):
2251
2251
 
2252
2252
  @dataclass
2253
2253
  class ListingSummary:
2254
- """Next Number: 26"""
2255
-
2256
2254
  name: str
2257
2255
 
2258
2256
  listing_type: ListingType
@@ -2452,6 +2450,7 @@ class ListingType(Enum):
2452
2450
 
2453
2451
  class MarketplaceFileType(Enum):
2454
2452
 
2453
+ APP = "APP"
2455
2454
  EMBEDDED_NOTEBOOK = "EMBEDDED_NOTEBOOK"
2456
2455
  PROVIDER_ICON = "PROVIDER_ICON"
2457
2456
 
@@ -499,27 +499,19 @@ class CreateForecastingExperimentRequest:
499
499
  time_column: str
500
500
  """Name of the column in the input training table that represents the timestamp of each row."""
501
501
 
502
- data_granularity_unit: str
503
- """The time unit of the input data granularity. Together with data_granularity_quantity field, this
504
- defines the time interval between consecutive rows in the time series data. Possible values: *
505
- 'W' (weeks) * 'D' / 'days' / 'day' * 'hours' / 'hour' / 'hr' / 'h' * 'm' / 'minute' / 'min' /
506
- 'minutes' / 'T' * 'S' / 'seconds' / 'sec' / 'second' * 'M' / 'month' / 'months' * 'Q' /
507
- 'quarter' / 'quarters' * 'Y' / 'year' / 'years'"""
502
+ forecast_granularity: str
503
+ """The granularity of the forecast. This defines the time interval between consecutive rows in the
504
+ time series data. Possible values: '1 second', '1 minute', '5 minutes', '10 minutes', '15
505
+ minutes', '30 minutes', 'Hourly', 'Daily', 'Weekly', 'Monthly', 'Quarterly', 'Yearly'."""
508
506
 
509
507
  forecast_horizon: int
510
508
  """The number of time steps into the future for which predictions should be made. This value
511
- represents a multiple of data_granularity_unit and data_granularity_quantity determining how far
512
- ahead the model will forecast."""
509
+ represents a multiple of forecast_granularity determining how far ahead the model will forecast."""
513
510
 
514
511
  custom_weights_column: Optional[str] = None
515
512
  """Name of the column in the input training table used to customize the weight for each time series
516
513
  to calculate weighted metrics."""
517
514
 
518
- data_granularity_quantity: Optional[int] = None
519
- """The quantity of the input data granularity. Together with data_granularity_unit field, this
520
- defines the time interval between consecutive rows in the time series data. For now, only 1
521
- second, 1/5/10/15/30 minutes, 1 hour, 1 day, 1 week, 1 month, 1 quarter, 1 year are supported."""
522
-
523
515
  experiment_path: Optional[str] = None
524
516
  """The path to the created experiment. This is the path where the experiment will be stored in the
525
517
  workspace."""
@@ -560,12 +552,10 @@ class CreateForecastingExperimentRequest:
560
552
  body = {}
561
553
  if self.custom_weights_column is not None:
562
554
  body["custom_weights_column"] = self.custom_weights_column
563
- if self.data_granularity_quantity is not None:
564
- body["data_granularity_quantity"] = self.data_granularity_quantity
565
- if self.data_granularity_unit is not None:
566
- body["data_granularity_unit"] = self.data_granularity_unit
567
555
  if self.experiment_path is not None:
568
556
  body["experiment_path"] = self.experiment_path
557
+ if self.forecast_granularity is not None:
558
+ body["forecast_granularity"] = self.forecast_granularity
569
559
  if self.forecast_horizon is not None:
570
560
  body["forecast_horizon"] = self.forecast_horizon
571
561
  if self.holiday_regions:
@@ -597,12 +587,10 @@ class CreateForecastingExperimentRequest:
597
587
  body = {}
598
588
  if self.custom_weights_column is not None:
599
589
  body["custom_weights_column"] = self.custom_weights_column
600
- if self.data_granularity_quantity is not None:
601
- body["data_granularity_quantity"] = self.data_granularity_quantity
602
- if self.data_granularity_unit is not None:
603
- body["data_granularity_unit"] = self.data_granularity_unit
604
590
  if self.experiment_path is not None:
605
591
  body["experiment_path"] = self.experiment_path
592
+ if self.forecast_granularity is not None:
593
+ body["forecast_granularity"] = self.forecast_granularity
606
594
  if self.forecast_horizon is not None:
607
595
  body["forecast_horizon"] = self.forecast_horizon
608
596
  if self.holiday_regions:
@@ -634,9 +622,8 @@ class CreateForecastingExperimentRequest:
634
622
  """Deserializes the CreateForecastingExperimentRequest from a dictionary."""
635
623
  return cls(
636
624
  custom_weights_column=d.get("custom_weights_column", None),
637
- data_granularity_quantity=d.get("data_granularity_quantity", None),
638
- data_granularity_unit=d.get("data_granularity_unit", None),
639
625
  experiment_path=d.get("experiment_path", None),
626
+ forecast_granularity=d.get("forecast_granularity", None),
640
627
  forecast_horizon=d.get("forecast_horizon", None),
641
628
  holiday_regions=d.get("holiday_regions", None),
642
629
  max_runtime=d.get("max_runtime", None),
@@ -7000,11 +6987,10 @@ class ForecastingAPI:
7000
6987
  train_data_path: str,
7001
6988
  target_column: str,
7002
6989
  time_column: str,
7003
- data_granularity_unit: str,
6990
+ forecast_granularity: str,
7004
6991
  forecast_horizon: int,
7005
6992
  *,
7006
6993
  custom_weights_column: Optional[str] = None,
7007
- data_granularity_quantity: Optional[int] = None,
7008
6994
  experiment_path: Optional[str] = None,
7009
6995
  holiday_regions: Optional[List[str]] = None,
7010
6996
  max_runtime: Optional[int] = None,
@@ -7027,23 +7013,16 @@ class ForecastingAPI:
7027
7013
  this column will be used as the ground truth for model training.
7028
7014
  :param time_column: str
7029
7015
  Name of the column in the input training table that represents the timestamp of each row.
7030
- :param data_granularity_unit: str
7031
- The time unit of the input data granularity. Together with data_granularity_quantity field, this
7032
- defines the time interval between consecutive rows in the time series data. Possible values: * 'W'
7033
- (weeks) * 'D' / 'days' / 'day' * 'hours' / 'hour' / 'hr' / 'h' * 'm' / 'minute' / 'min' / 'minutes'
7034
- / 'T' * 'S' / 'seconds' / 'sec' / 'second' * 'M' / 'month' / 'months' * 'Q' / 'quarter' / 'quarters'
7035
- * 'Y' / 'year' / 'years'
7016
+ :param forecast_granularity: str
7017
+ The granularity of the forecast. This defines the time interval between consecutive rows in the time
7018
+ series data. Possible values: '1 second', '1 minute', '5 minutes', '10 minutes', '15 minutes', '30
7019
+ minutes', 'Hourly', 'Daily', 'Weekly', 'Monthly', 'Quarterly', 'Yearly'.
7036
7020
  :param forecast_horizon: int
7037
7021
  The number of time steps into the future for which predictions should be made. This value represents
7038
- a multiple of data_granularity_unit and data_granularity_quantity determining how far ahead the
7039
- model will forecast.
7022
+ a multiple of forecast_granularity determining how far ahead the model will forecast.
7040
7023
  :param custom_weights_column: str (optional)
7041
7024
  Name of the column in the input training table used to customize the weight for each time series to
7042
7025
  calculate weighted metrics.
7043
- :param data_granularity_quantity: int (optional)
7044
- The quantity of the input data granularity. Together with data_granularity_unit field, this defines
7045
- the time interval between consecutive rows in the time series data. For now, only 1 second,
7046
- 1/5/10/15/30 minutes, 1 hour, 1 day, 1 week, 1 month, 1 quarter, 1 year are supported.
7047
7026
  :param experiment_path: str (optional)
7048
7027
  The path to the created experiment. This is the path where the experiment will be stored in the
7049
7028
  workspace.
@@ -7078,12 +7057,10 @@ class ForecastingAPI:
7078
7057
  body = {}
7079
7058
  if custom_weights_column is not None:
7080
7059
  body["custom_weights_column"] = custom_weights_column
7081
- if data_granularity_quantity is not None:
7082
- body["data_granularity_quantity"] = data_granularity_quantity
7083
- if data_granularity_unit is not None:
7084
- body["data_granularity_unit"] = data_granularity_unit
7085
7060
  if experiment_path is not None:
7086
7061
  body["experiment_path"] = experiment_path
7062
+ if forecast_granularity is not None:
7063
+ body["forecast_granularity"] = forecast_granularity
7087
7064
  if forecast_horizon is not None:
7088
7065
  body["forecast_horizon"] = forecast_horizon
7089
7066
  if holiday_regions is not None:
@@ -7125,11 +7102,10 @@ class ForecastingAPI:
7125
7102
  train_data_path: str,
7126
7103
  target_column: str,
7127
7104
  time_column: str,
7128
- data_granularity_unit: str,
7105
+ forecast_granularity: str,
7129
7106
  forecast_horizon: int,
7130
7107
  *,
7131
7108
  custom_weights_column: Optional[str] = None,
7132
- data_granularity_quantity: Optional[int] = None,
7133
7109
  experiment_path: Optional[str] = None,
7134
7110
  holiday_regions: Optional[List[str]] = None,
7135
7111
  max_runtime: Optional[int] = None,
@@ -7143,9 +7119,8 @@ class ForecastingAPI:
7143
7119
  ) -> ForecastingExperiment:
7144
7120
  return self.create_experiment(
7145
7121
  custom_weights_column=custom_weights_column,
7146
- data_granularity_quantity=data_granularity_quantity,
7147
- data_granularity_unit=data_granularity_unit,
7148
7122
  experiment_path=experiment_path,
7123
+ forecast_granularity=forecast_granularity,
7149
7124
  forecast_horizon=forecast_horizon,
7150
7125
  holiday_regions=holiday_regions,
7151
7126
  max_runtime=max_runtime,
@@ -776,6 +776,13 @@ class OidcFederationPolicy:
776
776
  endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for
777
777
  discovering public keys."""
778
778
 
779
+ jwks_uri: Optional[str] = None
780
+ """URL of the public keys used to validate the signature of federated tokens, in JWKS format. Most
781
+ use cases should not need to specify this field. If jwks_uri and jwks_json are both unspecified
782
+ (recommended), Databricks automatically fetches the public keys from your issuer’s well known
783
+ endpoint. Databricks strongly recommends relying on your issuer’s well known endpoint for
784
+ discovering public keys."""
785
+
779
786
  subject: Optional[str] = None
780
787
  """The required token subject, as specified in the subject claim of federated tokens. Must be
781
788
  specified for service principal federation policies. Must not be specified for account
@@ -793,6 +800,8 @@ class OidcFederationPolicy:
793
800
  body["issuer"] = self.issuer
794
801
  if self.jwks_json is not None:
795
802
  body["jwks_json"] = self.jwks_json
803
+ if self.jwks_uri is not None:
804
+ body["jwks_uri"] = self.jwks_uri
796
805
  if self.subject is not None:
797
806
  body["subject"] = self.subject
798
807
  if self.subject_claim is not None:
@@ -808,6 +817,8 @@ class OidcFederationPolicy:
808
817
  body["issuer"] = self.issuer
809
818
  if self.jwks_json is not None:
810
819
  body["jwks_json"] = self.jwks_json
820
+ if self.jwks_uri is not None:
821
+ body["jwks_uri"] = self.jwks_uri
811
822
  if self.subject is not None:
812
823
  body["subject"] = self.subject
813
824
  if self.subject_claim is not None:
@@ -821,6 +832,7 @@ class OidcFederationPolicy:
821
832
  audiences=d.get("audiences", None),
822
833
  issuer=d.get("issuer", None),
823
834
  jwks_json=d.get("jwks_json", None),
835
+ jwks_uri=d.get("jwks_uri", None),
824
836
  subject=d.get("subject", None),
825
837
  subject_claim=d.get("subject_claim", None),
826
838
  )