@pulumi/databricks 1.79.0-alpha.1766174506 → 1.79.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. package/accessControlRuleSet.d.ts +46 -0
  2. package/accessControlRuleSet.js +43 -0
  3. package/accessControlRuleSet.js.map +1 -1
  4. package/accountSettingV2.d.ts +123 -6
  5. package/accountSettingV2.js.map +1 -1
  6. package/alertV2.d.ts +1 -1
  7. package/alertV2.js +1 -1
  8. package/app.d.ts +3 -3
  9. package/catalog.d.ts +1 -1
  10. package/credential.d.ts +3 -3
  11. package/dashboard.d.ts +24 -0
  12. package/dashboard.js +4 -0
  13. package/dashboard.js.map +1 -1
  14. package/dataQualityMonitor.d.ts +1 -1
  15. package/dataQualityMonitor.js +1 -1
  16. package/dataQualityRefresh.d.ts +1 -1
  17. package/dataQualityRefresh.js +1 -1
  18. package/featureEngineeringFeature.d.ts +5 -2
  19. package/featureEngineeringFeature.js +2 -3
  20. package/featureEngineeringFeature.js.map +1 -1
  21. package/featureEngineeringKafkaConfig.d.ts +149 -0
  22. package/featureEngineeringKafkaConfig.js +93 -0
  23. package/featureEngineeringKafkaConfig.js.map +1 -0
  24. package/featureEngineeringMaterializedFeature.d.ts +12 -0
  25. package/featureEngineeringMaterializedFeature.js +2 -0
  26. package/featureEngineeringMaterializedFeature.js.map +1 -1
  27. package/getAccountSettingV2.d.ts +16 -16
  28. package/getAlertV2.d.ts +2 -2
  29. package/getAlertV2.js +2 -2
  30. package/getAlertsV2.d.ts +2 -2
  31. package/getAlertsV2.js +2 -2
  32. package/getDataQualityMonitor.d.ts +2 -2
  33. package/getDataQualityMonitor.js +2 -2
  34. package/getDataQualityMonitors.d.ts +2 -2
  35. package/getDataQualityMonitors.js +2 -2
  36. package/getDataQualityRefresh.d.ts +2 -2
  37. package/getDataQualityRefresh.js +2 -2
  38. package/getDataQualityRefreshes.d.ts +2 -2
  39. package/getDataQualityRefreshes.js +2 -2
  40. package/getFeatureEngineeringFeature.d.ts +1 -0
  41. package/getFeatureEngineeringFeature.js.map +1 -1
  42. package/getFeatureEngineeringKafkaConfig.d.ts +70 -0
  43. package/getFeatureEngineeringKafkaConfig.js +28 -0
  44. package/getFeatureEngineeringKafkaConfig.js.map +1 -0
  45. package/getFeatureEngineeringKafkaConfigs.d.ts +39 -0
  46. package/getFeatureEngineeringKafkaConfigs.js +30 -0
  47. package/getFeatureEngineeringKafkaConfigs.js.map +1 -0
  48. package/getFeatureEngineeringMaterializedFeature.d.ts +5 -1
  49. package/getFeatureEngineeringMaterializedFeature.js.map +1 -1
  50. package/getPolicyInfo.d.ts +40 -2
  51. package/getPolicyInfo.js +40 -2
  52. package/getPolicyInfo.js.map +1 -1
  53. package/getPolicyInfos.d.ts +38 -2
  54. package/getPolicyInfos.js +38 -2
  55. package/getPolicyInfos.js.map +1 -1
  56. package/getServicePrincipals.d.ts +17 -3
  57. package/getServicePrincipals.js +2 -0
  58. package/getServicePrincipals.js.map +1 -1
  59. package/getSparkVersion.d.ts +2 -2
  60. package/getTagPolicies.d.ts +2 -2
  61. package/getTagPolicies.js +2 -2
  62. package/getTagPolicy.d.ts +2 -2
  63. package/getTagPolicy.js +2 -2
  64. package/getUsers.d.ts +136 -0
  65. package/getUsers.js +104 -0
  66. package/getUsers.js.map +1 -0
  67. package/getWorkspaceEntityTagAssignment.d.ts +108 -0
  68. package/getWorkspaceEntityTagAssignment.js +72 -0
  69. package/getWorkspaceEntityTagAssignment.js.map +1 -0
  70. package/getWorkspaceEntityTagAssignments.d.ts +99 -0
  71. package/getWorkspaceEntityTagAssignments.js +68 -0
  72. package/getWorkspaceEntityTagAssignments.js.map +1 -0
  73. package/getWorkspaceSettingV2.d.ts +16 -16
  74. package/index.d.ts +21 -0
  75. package/index.js +33 -7
  76. package/index.js.map +1 -1
  77. package/lakehouseMonitor.d.ts +1 -1
  78. package/lakehouseMonitor.js +1 -1
  79. package/mwsWorkspaces.d.ts +19 -5
  80. package/mwsWorkspaces.js +16 -2
  81. package/mwsWorkspaces.js.map +1 -1
  82. package/package.json +2 -2
  83. package/pipeline.d.ts +3 -3
  84. package/policyInfo.d.ts +63 -1
  85. package/policyInfo.js +63 -1
  86. package/policyInfo.js.map +1 -1
  87. package/rfaAccessRequestDestinations.d.ts +2 -2
  88. package/rfaAccessRequestDestinations.js +0 -3
  89. package/rfaAccessRequestDestinations.js.map +1 -1
  90. package/tagPolicy.d.ts +1 -1
  91. package/tagPolicy.js +1 -1
  92. package/types/input.d.ts +357 -35
  93. package/types/output.d.ts +512 -78
  94. package/workspaceEntityTagAssignment.d.ts +129 -0
  95. package/workspaceEntityTagAssignment.js +109 -0
  96. package/workspaceEntityTagAssignment.js.map +1 -0
  97. package/workspaceSettingV2.d.ts +123 -6
  98. package/workspaceSettingV2.js.map +1 -1
package/types/output.d.ts CHANGED
@@ -12,6 +12,9 @@ export interface AccessControlRuleSetGrantRule {
12
12
  * * `accounts/{account_id}/ruleSets/default`
13
13
  * * `roles/marketplace.admin` - Databricks Marketplace administrator.
14
14
  * * `roles/billing.admin` - Billing administrator.
15
+ * * `roles/tagPolicy.creator` - Creator of tag policies.
16
+ * * `roles/tagPolicy.manager` - Manager of tag policies.
17
+ * * `roles/tagPolicy.assigner` - Assigner of tag policies.
15
18
  * * `accounts/{account_id}/servicePrincipals/{service_principal_application_id}/ruleSets/default`
16
19
  * * `roles/servicePrincipal.manager` - Manager of a service principal.
17
20
  * * `roles/servicePrincipal.user` - User of a service principal.
@@ -20,6 +23,9 @@ export interface AccessControlRuleSetGrantRule {
20
23
  * * `accounts/{account_id}/budgetPolicies/{budget_policy_id}/ruleSets/default`
21
24
  * * `roles/budgetPolicy.manager` - Manager of a budget policy.
22
25
  * * `roles/budgetPolicy.user` - User of a budget policy.
26
+ * * `accounts/{account_id}/tagPolicies/{tag_policy_id}/ruleSets/default`
27
+ * * `roles/tagPolicy.manager` - Manager of a specific tag policy.
28
+ * * `roles/tagPolicy.assigner` - Assigner of a specific tag policy.
23
29
  */
24
30
  role: string;
25
31
  }
@@ -370,7 +376,9 @@ export interface AlertV2EvaluationNotification {
370
376
  */
371
377
  notifyOnOk?: boolean;
372
378
  /**
373
- * Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again
379
+ * Number of seconds an alert waits after being triggered before it is allowed to send another notification.
380
+ * If set to 0 or omitted, the alert will not send any further notifications after the first trigger
381
+ * Setting this value to 1 allows the alert to send a notification on every evaluation where the condition is met, effectively making it always retrigger for notification purposes
374
382
  */
375
383
  retriggerSeconds?: number;
376
384
  subscriptions?: outputs.AlertV2EvaluationNotificationSubscription[];
@@ -381,7 +389,7 @@ export interface AlertV2EvaluationNotificationSubscription {
381
389
  }
382
390
  export interface AlertV2EvaluationSource {
383
391
  /**
384
- * Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
392
+ * If not set, the behavior is equivalent to using `First row` in the UI. Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
385
393
  */
386
394
  aggregation?: string;
387
395
  display?: string;
@@ -393,7 +401,7 @@ export interface AlertV2EvaluationThreshold {
393
401
  }
394
402
  export interface AlertV2EvaluationThresholdColumn {
395
403
  /**
396
- * Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
404
+ * If not set, the behavior is equivalent to using `First row` in the UI. Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
397
405
  */
398
406
  aggregation?: string;
399
407
  display?: string;
@@ -862,11 +870,17 @@ export interface ClusterAwsAttributes {
862
870
  * The number of volumes launched for each instance. You can choose up to 10 volumes. This feature is only enabled for supported node types. Legacy node types cannot specify custom EBS volumes. For node types with no instance store, at least one EBS volume needs to be specified; otherwise, cluster creation will fail. These EBS volumes will be mounted at /ebs0, /ebs1, and etc. Instance store volumes will be mounted at /local_disk0, /local_disk1, and etc. If EBS volumes are attached, Databricks will configure Spark to use only the EBS volumes for scratch storage because heterogeneously sized scratch devices can lead to inefficient disk utilization. If no EBS volumes are attached, Databricks will configure Spark to use instance store volumes. If EBS volumes are specified, then the Spark configuration spark.local.dir will be overridden.
863
871
  */
864
872
  ebsVolumeCount?: number;
873
+ /**
874
+ * If using gp3 volumes, what IOPS to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.
875
+ */
865
876
  ebsVolumeIops?: number;
866
877
  /**
867
878
  * The size of each EBS volume (in GiB) launched for each instance. For general purpose SSD, this value must be within the range 100 - 4096. For throughput optimized HDD, this value must be within the range 500 - 4096. Custom EBS volumes cannot be specified for the legacy node types (memory-optimized and compute-optimized).
868
879
  */
869
880
  ebsVolumeSize?: number;
881
+ /**
882
+ * If using gp3 volumes, what throughput to use for the disk. If this is not set, the maximum performance of a gp2 volume with the same volume size will be used.
883
+ */
870
884
  ebsVolumeThroughput?: number;
871
885
  /**
872
886
  * The type of EBS volumes that will be launched with this cluster. Valid values are `GENERAL_PURPOSE_SSD` or `THROUGHPUT_OPTIMIZED_HDD`. Use this option only if you're not picking *Delta Optimized `i3.*`* node types.
@@ -1782,6 +1796,10 @@ export interface DatabaseSyncedDatabaseTableSpec {
1782
1796
  timeseriesKey?: string;
1783
1797
  }
1784
1798
  export interface DatabaseSyncedDatabaseTableSpecNewPipelineSpec {
1799
+ /**
1800
+ * Budget policy to set on the newly created pipeline
1801
+ */
1802
+ budgetPolicyId?: string;
1785
1803
  /**
1786
1804
  * This field needs to be specified if the destination catalog is a managed postgres catalog.
1787
1805
  *
@@ -1947,8 +1965,29 @@ export interface FeatureEngineeringFeatureFunctionExtraParameter {
1947
1965
  */
1948
1966
  value: string;
1949
1967
  }
1968
+ export interface FeatureEngineeringFeatureLineageContext {
1969
+ /**
1970
+ * Job context information including job ID and run ID
1971
+ */
1972
+ jobContext?: outputs.FeatureEngineeringFeatureLineageContextJobContext;
1973
+ /**
1974
+ * The notebook ID where this API was invoked
1975
+ */
1976
+ notebookId?: number;
1977
+ }
1978
+ export interface FeatureEngineeringFeatureLineageContextJobContext {
1979
+ /**
1980
+ * The job ID where this API invoked
1981
+ */
1982
+ jobId?: number;
1983
+ /**
1984
+ * The job run ID where this API was invoked
1985
+ */
1986
+ jobRunId?: number;
1987
+ }
1950
1988
  export interface FeatureEngineeringFeatureSource {
1951
1989
  deltaTableSource?: outputs.FeatureEngineeringFeatureSourceDeltaTableSource;
1990
+ kafkaSource?: outputs.FeatureEngineeringFeatureSourceKafkaSource;
1952
1991
  }
1953
1992
  export interface FeatureEngineeringFeatureSourceDeltaTableSource {
1954
1993
  /**
@@ -1964,6 +2003,34 @@ export interface FeatureEngineeringFeatureSourceDeltaTableSource {
1964
2003
  */
1965
2004
  timeseriesColumn: string;
1966
2005
  }
2006
+ export interface FeatureEngineeringFeatureSourceKafkaSource {
2007
+ /**
2008
+ * The entity column identifiers of the Kafka source
2009
+ */
2010
+ entityColumnIdentifiers: outputs.FeatureEngineeringFeatureSourceKafkaSourceEntityColumnIdentifier[];
2011
+ /**
2012
+ * Name of the Kafka source, used to identify it. This is used to look up the corresponding KafkaConfig object. Can be distinct from topic name
2013
+ */
2014
+ name: string;
2015
+ /**
2016
+ * The timeseries column identifier of the Kafka source
2017
+ */
2018
+ timeseriesColumnIdentifier: outputs.FeatureEngineeringFeatureSourceKafkaSourceTimeseriesColumnIdentifier;
2019
+ }
2020
+ export interface FeatureEngineeringFeatureSourceKafkaSourceEntityColumnIdentifier {
2021
+ /**
2022
+ * String representation of the column name or variant expression path. For nested fields, the leaf value is what will be present in materialized tables
2023
+ * and expected to match at query time. For example, the leaf node of value:trip_details.location_details.pickup_zip is pickup_zip
2024
+ */
2025
+ variantExprPath: string;
2026
+ }
2027
+ export interface FeatureEngineeringFeatureSourceKafkaSourceTimeseriesColumnIdentifier {
2028
+ /**
2029
+ * String representation of the column name or variant expression path. For nested fields, the leaf value is what will be present in materialized tables
2030
+ * and expected to match at query time. For example, the leaf node of value:trip_details.location_details.pickup_zip is pickup_zip
2031
+ */
2032
+ variantExprPath: string;
2033
+ }
1967
2034
  export interface FeatureEngineeringFeatureTimeWindow {
1968
2035
  continuous?: outputs.FeatureEngineeringFeatureTimeWindowContinuous;
1969
2036
  sliding?: outputs.FeatureEngineeringFeatureTimeWindowSliding;
@@ -1986,46 +2053,52 @@ export interface FeatureEngineeringFeatureTimeWindowSliding {
1986
2053
  export interface FeatureEngineeringFeatureTimeWindowTumbling {
1987
2054
  windowDuration: string;
1988
2055
  }
1989
- export interface FeatureEngineeringMaterializedFeatureOfflineStoreConfig {
1990
- /**
1991
- * The Unity Catalog catalog name
1992
- */
1993
- catalogName: string;
1994
- /**
1995
- * The Unity Catalog schema name
1996
- */
1997
- schemaName: string;
2056
+ export interface FeatureEngineeringKafkaConfigAuthConfig {
1998
2057
  /**
1999
- * Prefix for Unity Catalog table name.
2000
- * The materialized feature will be stored in a table with this prefix and a generated postfix
2058
+ * Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
2001
2059
  */
2002
- tableNamePrefix: string;
2060
+ ucServiceCredentialName?: string;
2003
2061
  }
2004
- export interface FeatureEngineeringMaterializedFeatureOnlineStoreConfig {
2062
+ export interface FeatureEngineeringKafkaConfigKeySchema {
2005
2063
  /**
2006
- * The capacity of the online store. Valid values are "CU_1", "CU_2", "CU_4", "CU_8"
2064
+ * Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
2007
2065
  */
2008
- capacity: string;
2066
+ jsonSchema?: string;
2067
+ }
2068
+ export interface FeatureEngineeringKafkaConfigSubscriptionMode {
2009
2069
  /**
2010
- * (string) - The timestamp when the online store was created
2070
+ * A JSON string that contains the specific topic-partitions to consume from.
2071
+ * For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
2011
2072
  */
2012
- creationTime: string;
2073
+ assign?: string;
2013
2074
  /**
2014
- * (string) - The email of the creator of the online store
2075
+ * A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
2015
2076
  */
2016
- creator: string;
2077
+ subscribe?: string;
2017
2078
  /**
2018
- * The name of the online store. This is the unique identifier for the online store
2079
+ * A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
2019
2080
  */
2020
- name: string;
2081
+ subscribePattern?: string;
2082
+ }
2083
+ export interface FeatureEngineeringKafkaConfigValueSchema {
2021
2084
  /**
2022
- * The number of read replicas for the online store. Defaults to 0
2085
+ * Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
2023
2086
  */
2024
- readReplicaCount?: number;
2087
+ jsonSchema?: string;
2088
+ }
2089
+ export interface FeatureEngineeringMaterializedFeatureOfflineStoreConfig {
2090
+ catalogName: string;
2091
+ schemaName: string;
2092
+ tableNamePrefix: string;
2093
+ }
2094
+ export interface FeatureEngineeringMaterializedFeatureOnlineStoreConfig {
2095
+ catalogName: string;
2025
2096
  /**
2026
- * (string) - The current state of the online store. Possible values are: `AVAILABLE`, `DELETING`, `FAILING_OVER`, `STARTING`, `STOPPED`, `UPDATING`
2097
+ * The name of the target online store
2027
2098
  */
2028
- state: string;
2099
+ onlineStoreName: string;
2100
+ schemaName: string;
2101
+ tableNamePrefix: string;
2029
2102
  }
2030
2103
  export interface GetAccountFederationPoliciesPolicy {
2031
2104
  /**
@@ -2566,7 +2639,9 @@ export interface GetAlertV2EvaluationNotification {
2566
2639
  */
2567
2640
  notifyOnOk?: boolean;
2568
2641
  /**
2569
- * (integer) - Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again
2642
+ * (integer) - Number of seconds an alert waits after being triggered before it is allowed to send another notification.
2643
+ * If set to 0 or omitted, the alert will not send any further notifications after the first trigger
2644
+ * Setting this value to 1 allows the alert to send a notification on every evaluation where the condition is met, effectively making it always retrigger for notification purposes
2570
2645
  */
2571
2646
  retriggerSeconds?: number;
2572
2647
  /**
@@ -2586,7 +2661,7 @@ export interface GetAlertV2EvaluationNotificationSubscription {
2586
2661
  }
2587
2662
  export interface GetAlertV2EvaluationSource {
2588
2663
  /**
2589
- * (string) - Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
2664
+ * (string) - If not set, the behavior is equivalent to using `First row` in the UI. Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
2590
2665
  */
2591
2666
  aggregation?: string;
2592
2667
  /**
@@ -2610,7 +2685,7 @@ export interface GetAlertV2EvaluationThreshold {
2610
2685
  }
2611
2686
  export interface GetAlertV2EvaluationThresholdColumn {
2612
2687
  /**
2613
- * (string) - Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
2688
+ * (string) - If not set, the behavior is equivalent to using `First row` in the UI. Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
2614
2689
  */
2615
2690
  aggregation?: string;
2616
2691
  /**
@@ -2785,7 +2860,9 @@ export interface GetAlertsV2AlertEvaluationNotification {
2785
2860
  */
2786
2861
  notifyOnOk?: boolean;
2787
2862
  /**
2788
- * (integer) - Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again
2863
+ * (integer) - Number of seconds an alert waits after being triggered before it is allowed to send another notification.
2864
+ * If set to 0 or omitted, the alert will not send any further notifications after the first trigger
2865
+ * Setting this value to 1 allows the alert to send a notification on every evaluation where the condition is met, effectively making it always retrigger for notification purposes
2789
2866
  */
2790
2867
  retriggerSeconds?: number;
2791
2868
  /**
@@ -2805,7 +2882,7 @@ export interface GetAlertsV2AlertEvaluationNotificationSubscription {
2805
2882
  }
2806
2883
  export interface GetAlertsV2AlertEvaluationSource {
2807
2884
  /**
2808
- * (string) - Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
2885
+ * (string) - If not set, the behavior is equivalent to using `First row` in the UI. Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
2809
2886
  */
2810
2887
  aggregation?: string;
2811
2888
  /**
@@ -2829,7 +2906,7 @@ export interface GetAlertsV2AlertEvaluationThreshold {
2829
2906
  }
2830
2907
  export interface GetAlertsV2AlertEvaluationThresholdColumn {
2831
2908
  /**
2832
- * (string) - Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
2909
+ * (string) - If not set, the behavior is equivalent to using `First row` in the UI. Possible values are: `AVG`, `COUNT`, `COUNT_DISTINCT`, `MAX`, `MEDIAN`, `MIN`, `STDDEV`, `SUM`
2833
2910
  */
2834
2911
  aggregation?: string;
2835
2912
  /**
@@ -5467,6 +5544,10 @@ export interface GetDatabaseSyncedDatabaseTableSpec {
5467
5544
  timeseriesKey?: string;
5468
5545
  }
5469
5546
  export interface GetDatabaseSyncedDatabaseTableSpecNewPipelineSpec {
5547
+ /**
5548
+ * (string) - Budget policy to set on the newly created pipeline
5549
+ */
5550
+ budgetPolicyId?: string;
5470
5551
  /**
5471
5552
  * (string) - This field needs to be specified if the destination catalog is a managed postgres catalog.
5472
5553
  */
@@ -5742,6 +5823,10 @@ export interface GetDatabaseSyncedDatabaseTablesSyncedTableSpec {
5742
5823
  timeseriesKey?: string;
5743
5824
  }
5744
5825
  export interface GetDatabaseSyncedDatabaseTablesSyncedTableSpecNewPipelineSpec {
5826
+ /**
5827
+ * (string) - Budget policy to set on the newly created pipeline
5828
+ */
5829
+ budgetPolicyId?: string;
5745
5830
  /**
5746
5831
  * (string) - This field needs to be specified if the destination catalog is a managed postgres catalog.
5747
5832
  */
@@ -5985,11 +6070,35 @@ export interface GetFeatureEngineeringFeatureFunctionExtraParameter {
5985
6070
  */
5986
6071
  value: string;
5987
6072
  }
6073
+ export interface GetFeatureEngineeringFeatureLineageContext {
6074
+ /**
6075
+ * (JobContext) - Job context information including job ID and run ID
6076
+ */
6077
+ jobContext?: outputs.GetFeatureEngineeringFeatureLineageContextJobContext;
6078
+ /**
6079
+ * (integer) - The notebook ID where this API was invoked
6080
+ */
6081
+ notebookId?: number;
6082
+ }
6083
+ export interface GetFeatureEngineeringFeatureLineageContextJobContext {
6084
+ /**
6085
+ * (integer) - The job ID where this API invoked
6086
+ */
6087
+ jobId?: number;
6088
+ /**
6089
+ * (integer) - The job run ID where this API was invoked
6090
+ */
6091
+ jobRunId?: number;
6092
+ }
5988
6093
  export interface GetFeatureEngineeringFeatureSource {
5989
6094
  /**
5990
6095
  * (DeltaTableSource)
5991
6096
  */
5992
6097
  deltaTableSource?: outputs.GetFeatureEngineeringFeatureSourceDeltaTableSource;
6098
+ /**
6099
+ * (KafkaSource)
6100
+ */
6101
+ kafkaSource?: outputs.GetFeatureEngineeringFeatureSourceKafkaSource;
5993
6102
  }
5994
6103
  export interface GetFeatureEngineeringFeatureSourceDeltaTableSource {
5995
6104
  /**
@@ -6005,6 +6114,34 @@ export interface GetFeatureEngineeringFeatureSourceDeltaTableSource {
6005
6114
  */
6006
6115
  timeseriesColumn: string;
6007
6116
  }
6117
+ export interface GetFeatureEngineeringFeatureSourceKafkaSource {
6118
+ /**
6119
+ * (list of ColumnIdentifier) - The entity column identifiers of the Kafka source
6120
+ */
6121
+ entityColumnIdentifiers: outputs.GetFeatureEngineeringFeatureSourceKafkaSourceEntityColumnIdentifier[];
6122
+ /**
6123
+ * (string) - Name of the Kafka source, used to identify it. This is used to look up the corresponding KafkaConfig object. Can be distinct from topic name
6124
+ */
6125
+ name: string;
6126
+ /**
6127
+ * (ColumnIdentifier) - The timeseries column identifier of the Kafka source
6128
+ */
6129
+ timeseriesColumnIdentifier: outputs.GetFeatureEngineeringFeatureSourceKafkaSourceTimeseriesColumnIdentifier;
6130
+ }
6131
+ export interface GetFeatureEngineeringFeatureSourceKafkaSourceEntityColumnIdentifier {
6132
+ /**
6133
+ * (string) - String representation of the column name or variant expression path. For nested fields, the leaf value is what will be present in materialized tables
6134
+ * and expected to match at query time. For example, the leaf node of value:trip_details.location_details.pickup_zip is pickup_zip
6135
+ */
6136
+ variantExprPath: string;
6137
+ }
6138
+ export interface GetFeatureEngineeringFeatureSourceKafkaSourceTimeseriesColumnIdentifier {
6139
+ /**
6140
+ * (string) - String representation of the column name or variant expression path. For nested fields, the leaf value is what will be present in materialized tables
6141
+ * and expected to match at query time. For example, the leaf node of value:trip_details.location_details.pickup_zip is pickup_zip
6142
+ */
6143
+ variantExprPath: string;
6144
+ }
6008
6145
  export interface GetFeatureEngineeringFeatureTimeWindow {
6009
6146
  /**
6010
6147
  * (ContinuousWindow)
@@ -6066,6 +6203,7 @@ export interface GetFeatureEngineeringFeaturesFeature {
6066
6203
  * (list of string) - The input columns from which the feature is computed
6067
6204
  */
6068
6205
  inputs: string[];
6206
+ lineageContext: outputs.GetFeatureEngineeringFeaturesFeatureLineageContext;
6069
6207
  /**
6070
6208
  * (DataSource) - The data source of the feature
6071
6209
  */
@@ -6095,11 +6233,35 @@ export interface GetFeatureEngineeringFeaturesFeatureFunctionExtraParameter {
6095
6233
  */
6096
6234
  value: string;
6097
6235
  }
6236
+ export interface GetFeatureEngineeringFeaturesFeatureLineageContext {
6237
+ /**
6238
+ * (JobContext) - Job context information including job ID and run ID
6239
+ */
6240
+ jobContext?: outputs.GetFeatureEngineeringFeaturesFeatureLineageContextJobContext;
6241
+ /**
6242
+ * (integer) - The notebook ID where this API was invoked
6243
+ */
6244
+ notebookId?: number;
6245
+ }
6246
+ export interface GetFeatureEngineeringFeaturesFeatureLineageContextJobContext {
6247
+ /**
6248
+ * (integer) - The job ID where this API invoked
6249
+ */
6250
+ jobId?: number;
6251
+ /**
6252
+ * (integer) - The job run ID where this API was invoked
6253
+ */
6254
+ jobRunId?: number;
6255
+ }
6098
6256
  export interface GetFeatureEngineeringFeaturesFeatureSource {
6099
6257
  /**
6100
6258
  * (DeltaTableSource)
6101
6259
  */
6102
6260
  deltaTableSource?: outputs.GetFeatureEngineeringFeaturesFeatureSourceDeltaTableSource;
6261
+ /**
6262
+ * (KafkaSource)
6263
+ */
6264
+ kafkaSource?: outputs.GetFeatureEngineeringFeaturesFeatureSourceKafkaSource;
6103
6265
  }
6104
6266
  export interface GetFeatureEngineeringFeaturesFeatureSourceDeltaTableSource {
6105
6267
  /**
@@ -6115,6 +6277,34 @@ export interface GetFeatureEngineeringFeaturesFeatureSourceDeltaTableSource {
6115
6277
  */
6116
6278
  timeseriesColumn: string;
6117
6279
  }
6280
+ export interface GetFeatureEngineeringFeaturesFeatureSourceKafkaSource {
6281
+ /**
6282
+ * (list of ColumnIdentifier) - The entity column identifiers of the Kafka source
6283
+ */
6284
+ entityColumnIdentifiers: outputs.GetFeatureEngineeringFeaturesFeatureSourceKafkaSourceEntityColumnIdentifier[];
6285
+ /**
6286
+ * (string) - Name of the Kafka source, used to identify it. This is used to look up the corresponding KafkaConfig object. Can be distinct from topic name
6287
+ */
6288
+ name: string;
6289
+ /**
6290
+ * (ColumnIdentifier) - The timeseries column identifier of the Kafka source
6291
+ */
6292
+ timeseriesColumnIdentifier: outputs.GetFeatureEngineeringFeaturesFeatureSourceKafkaSourceTimeseriesColumnIdentifier;
6293
+ }
6294
+ export interface GetFeatureEngineeringFeaturesFeatureSourceKafkaSourceEntityColumnIdentifier {
6295
+ /**
6296
+ * (string) - String representation of the column name or variant expression path. For nested fields, the leaf value is what will be present in materialized tables
6297
+ * and expected to match at query time. For example, the leaf node of value:trip_details.location_details.pickup_zip is pickup_zip
6298
+ */
6299
+ variantExprPath: string;
6300
+ }
6301
+ export interface GetFeatureEngineeringFeaturesFeatureSourceKafkaSourceTimeseriesColumnIdentifier {
6302
+ /**
6303
+ * (string) - String representation of the column name or variant expression path. For nested fields, the leaf value is what will be present in materialized tables
6304
+ * and expected to match at query time. For example, the leaf node of value:trip_details.location_details.pickup_zip is pickup_zip
6305
+ */
6306
+ variantExprPath: string;
6307
+ }
6118
6308
  export interface GetFeatureEngineeringFeaturesFeatureTimeWindow {
6119
6309
  /**
6120
6310
  * (ContinuousWindow)
@@ -6155,9 +6345,108 @@ export interface GetFeatureEngineeringFeaturesFeatureTimeWindowTumbling {
6155
6345
  */
6156
6346
  windowDuration: string;
6157
6347
  }
6348
+ export interface GetFeatureEngineeringKafkaConfigAuthConfig {
6349
+ /**
6350
+ * (string) - Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
6351
+ */
6352
+ ucServiceCredentialName?: string;
6353
+ }
6354
+ export interface GetFeatureEngineeringKafkaConfigKeySchema {
6355
+ /**
6356
+ * (string) - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
6357
+ */
6358
+ jsonSchema?: string;
6359
+ }
6360
+ export interface GetFeatureEngineeringKafkaConfigSubscriptionMode {
6361
+ /**
6362
+ * (string) - A JSON string that contains the specific topic-partitions to consume from.
6363
+ * For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
6364
+ */
6365
+ assign?: string;
6366
+ /**
6367
+ * (string) - A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
6368
+ */
6369
+ subscribe?: string;
6370
+ /**
6371
+ * (string) - A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
6372
+ */
6373
+ subscribePattern?: string;
6374
+ }
6375
+ export interface GetFeatureEngineeringKafkaConfigValueSchema {
6376
+ /**
6377
+ * (string) - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
6378
+ */
6379
+ jsonSchema?: string;
6380
+ }
6381
+ export interface GetFeatureEngineeringKafkaConfigsKafkaConfig {
6382
+ /**
6383
+ * (AuthConfig) - Authentication configuration for connection to topics
6384
+ */
6385
+ authConfig: outputs.GetFeatureEngineeringKafkaConfigsKafkaConfigAuthConfig;
6386
+ /**
6387
+ * (string) - A comma-separated list of host/port pairs pointing to Kafka cluster
6388
+ */
6389
+ bootstrapServers: string;
6390
+ /**
6391
+ * (object) - Catch-all for miscellaneous options. Keys should be source options or Kafka consumer options (kafka.*)
6392
+ */
6393
+ extraOptions: {
6394
+ [key: string]: string;
6395
+ };
6396
+ /**
6397
+ * (SchemaConfig) - Schema configuration for extracting message keys from topics. At least one of keySchema and valueSchema must be provided
6398
+ */
6399
+ keySchema: outputs.GetFeatureEngineeringKafkaConfigsKafkaConfigKeySchema;
6400
+ /**
6401
+ * (string) - Name that uniquely identifies this Kafka config within the metastore. This will be the identifier used from the Feature object to reference these configs for a feature.
6402
+ * Can be distinct from topic name
6403
+ */
6404
+ name: string;
6405
+ /**
6406
+ * (SubscriptionMode) - Options to configure which Kafka topics to pull data from
6407
+ */
6408
+ subscriptionMode: outputs.GetFeatureEngineeringKafkaConfigsKafkaConfigSubscriptionMode;
6409
+ /**
6410
+ * (SchemaConfig) - Schema configuration for extracting message values from topics. At least one of keySchema and valueSchema must be provided
6411
+ */
6412
+ valueSchema: outputs.GetFeatureEngineeringKafkaConfigsKafkaConfigValueSchema;
6413
+ }
6414
+ export interface GetFeatureEngineeringKafkaConfigsKafkaConfigAuthConfig {
6415
+ /**
6416
+ * (string) - Name of the Unity Catalog service credential. This value will be set under the option databricks.serviceCredential
6417
+ */
6418
+ ucServiceCredentialName?: string;
6419
+ }
6420
+ export interface GetFeatureEngineeringKafkaConfigsKafkaConfigKeySchema {
6421
+ /**
6422
+ * (string) - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
6423
+ */
6424
+ jsonSchema?: string;
6425
+ }
6426
+ export interface GetFeatureEngineeringKafkaConfigsKafkaConfigSubscriptionMode {
6427
+ /**
6428
+ * (string) - A JSON string that contains the specific topic-partitions to consume from.
6429
+ * For example, for '{"topicA":[0,1],"topicB":[2,4]}', topicA's 0'th and 1st partitions will be consumed from
6430
+ */
6431
+ assign?: string;
6432
+ /**
6433
+ * (string) - A comma-separated list of Kafka topics to read from. For example, 'topicA,topicB,topicC'
6434
+ */
6435
+ subscribe?: string;
6436
+ /**
6437
+ * (string) - A regular expression matching topics to subscribe to. For example, 'topic.*' will subscribe to all topics starting with 'topic'
6438
+ */
6439
+ subscribePattern?: string;
6440
+ }
6441
+ export interface GetFeatureEngineeringKafkaConfigsKafkaConfigValueSchema {
6442
+ /**
6443
+ * (string) - Schema of the JSON object in standard IETF JSON schema format (https://json-schema.org/)
6444
+ */
6445
+ jsonSchema?: string;
6446
+ }
6158
6447
  export interface GetFeatureEngineeringMaterializedFeatureOfflineStoreConfig {
6159
6448
  /**
6160
- * (string) - The Unity Catalog catalog name
6449
+ * (string) - The Unity Catalog catalog name. This name is also used as the Lakebase logical database name
6161
6450
  */
6162
6451
  catalogName: string;
6163
6452
  /**
@@ -6166,37 +6455,34 @@ export interface GetFeatureEngineeringMaterializedFeatureOfflineStoreConfig {
6166
6455
  schemaName: string;
6167
6456
  /**
6168
6457
  * (string) - Prefix for Unity Catalog table name.
6169
- * The materialized feature will be stored in a table with this prefix and a generated postfix
6458
+ * The materialized feature will be stored in a Lakebase table with this prefix and a generated postfix
6170
6459
  */
6171
6460
  tableNamePrefix: string;
6172
6461
  }
6173
6462
  export interface GetFeatureEngineeringMaterializedFeatureOnlineStoreConfig {
6174
6463
  /**
6175
- * (string) - The capacity of the online store. Valid values are "CU_1", "CU_2", "CU_4", "CU_8"
6176
- */
6177
- capacity: string;
6178
- /**
6179
- * (string) - The timestamp when the online store was created
6180
- */
6181
- creationTime: string;
6182
- /**
6183
- * (string) - The email of the creator of the online store
6464
+ * (string) - The Unity Catalog catalog name. This name is also used as the Lakebase logical database name
6184
6465
  */
6185
- creator: string;
6466
+ catalogName: string;
6186
6467
  /**
6187
- * (string) - The name of the online store. This is the unique identifier for the online store
6468
+ * (string) - The name of the target online store
6188
6469
  */
6189
- name: string;
6470
+ onlineStoreName: string;
6190
6471
  /**
6191
- * (integer) - The number of read replicas for the online store. Defaults to 0
6472
+ * (string) - The Unity Catalog schema name
6192
6473
  */
6193
- readReplicaCount?: number;
6474
+ schemaName: string;
6194
6475
  /**
6195
- * (string) - The current state of the online store. Possible values are: `AVAILABLE`, `DELETING`, `FAILING_OVER`, `STARTING`, `STOPPED`, `UPDATING`
6476
+ * (string) - Prefix for Unity Catalog table name.
6477
+ * The materialized feature will be stored in a Lakebase table with this prefix and a generated postfix
6196
6478
  */
6197
- state: string;
6479
+ tableNamePrefix: string;
6198
6480
  }
6199
6481
  export interface GetFeatureEngineeringMaterializedFeaturesMaterializedFeature {
6482
+ /**
6483
+ * (string) - The quartz cron expression that defines the schedule of the materialization pipeline. The schedule is evaluated in the UTC timezone
6484
+ */
6485
+ cronSchedule: string;
6200
6486
  /**
6201
6487
  * Filter by feature name. If specified, only materialized features materialized from this feature will be returned
6202
6488
  */
@@ -6215,7 +6501,7 @@ export interface GetFeatureEngineeringMaterializedFeaturesMaterializedFeature {
6215
6501
  */
6216
6502
  offlineStoreConfig: outputs.GetFeatureEngineeringMaterializedFeaturesMaterializedFeatureOfflineStoreConfig;
6217
6503
  /**
6218
- * (OnlineStore)
6504
+ * (OnlineStoreConfig)
6219
6505
  */
6220
6506
  onlineStoreConfig: outputs.GetFeatureEngineeringMaterializedFeaturesMaterializedFeatureOnlineStoreConfig;
6221
6507
  /**
@@ -6229,7 +6515,7 @@ export interface GetFeatureEngineeringMaterializedFeaturesMaterializedFeature {
6229
6515
  }
6230
6516
  export interface GetFeatureEngineeringMaterializedFeaturesMaterializedFeatureOfflineStoreConfig {
6231
6517
  /**
6232
- * (string) - The Unity Catalog catalog name
6518
+ * (string) - The Unity Catalog catalog name. This name is also used as the Lakebase logical database name
6233
6519
  */
6234
6520
  catalogName: string;
6235
6521
  /**
@@ -6238,35 +6524,28 @@ export interface GetFeatureEngineeringMaterializedFeaturesMaterializedFeatureOff
6238
6524
  schemaName: string;
6239
6525
  /**
6240
6526
  * (string) - Prefix for Unity Catalog table name.
6241
- * The materialized feature will be stored in a table with this prefix and a generated postfix
6527
+ * The materialized feature will be stored in a Lakebase table with this prefix and a generated postfix
6242
6528
  */
6243
6529
  tableNamePrefix: string;
6244
6530
  }
6245
6531
  export interface GetFeatureEngineeringMaterializedFeaturesMaterializedFeatureOnlineStoreConfig {
6246
6532
  /**
6247
- * (string) - The capacity of the online store. Valid values are "CU_1", "CU_2", "CU_4", "CU_8"
6248
- */
6249
- capacity: string;
6250
- /**
6251
- * (string) - The timestamp when the online store was created
6533
+ * (string) - The Unity Catalog catalog name. This name is also used as the Lakebase logical database name
6252
6534
  */
6253
- creationTime: string;
6535
+ catalogName: string;
6254
6536
  /**
6255
- * (string) - The email of the creator of the online store
6537
+ * (string) - The name of the target online store
6256
6538
  */
6257
- creator: string;
6539
+ onlineStoreName: string;
6258
6540
  /**
6259
- * (string) - The name of the online store. This is the unique identifier for the online store
6260
- */
6261
- name: string;
6262
- /**
6263
- * (integer) - The number of read replicas for the online store. Defaults to 0
6541
+ * (string) - The Unity Catalog schema name
6264
6542
  */
6265
- readReplicaCount?: number;
6543
+ schemaName: string;
6266
6544
  /**
6267
- * (string) - The current state of the online store. Possible values are: `AVAILABLE`, `DELETING`, `FAILING_OVER`, `STARTING`, `STOPPED`, `UPDATING`
6545
+ * (string) - Prefix for Unity Catalog table name.
6546
+ * The materialized feature will be stored in a Lakebase table with this prefix and a generated postfix
6268
6547
  */
6269
- state: string;
6548
+ tableNamePrefix: string;
6270
6549
  }
6271
6550
  export interface GetFunctionsFunction {
6272
6551
  /**
@@ -8972,6 +9251,45 @@ export interface GetServicePrincipalFederationPolicyOidcPolicy {
8972
9251
  */
8973
9252
  subjectClaim?: string;
8974
9253
  }
9254
+ export interface GetServicePrincipalsServicePrincipal {
9255
+ /**
9256
+ * identifier for use in databricks_access_control_rule_set, e.g. `servicePrincipals/00000000-0000-0000-0000-000000000000`.
9257
+ */
9258
+ aclPrincipalId: string;
9259
+ /**
9260
+ * Whether service principal is active or not.
9261
+ */
9262
+ active: boolean;
9263
+ /**
9264
+ * Application ID of the service principal.
9265
+ */
9266
+ applicationId: string;
9267
+ /**
9268
+ * Display name of the service principal, e.g. `Foo SPN`.
9269
+ */
9270
+ displayName: string;
9271
+ /**
9272
+ * ID of the service principal in an external identity provider.
9273
+ */
9274
+ externalId: string;
9275
+ /**
9276
+ * Home folder of the service principal, e.g. `/Users/11111111-2222-3333-4444-555666777888`.
9277
+ */
9278
+ home: string;
9279
+ /**
9280
+ * The id of the service principal (SCIM ID).
9281
+ */
9282
+ id: string;
9283
+ /**
9284
+ * Repos location of the service principal, e.g. `/Repos/11111111-2222-3333-4444-555666777888`.
9285
+ */
9286
+ repos: string;
9287
+ /**
9288
+ * same as `id`.
9289
+ */
9290
+ scimId: string;
9291
+ spId: string;
9292
+ }
8975
9293
  export interface GetServingEndpointsEndpoint {
8976
9294
  /**
8977
9295
  * A block with AI Gateway configuration for the serving endpoint.
@@ -9636,6 +9954,79 @@ export interface GetTagPolicyValue {
9636
9954
  */
9637
9955
  name: string;
9638
9956
  }
9957
+ export interface GetUsersUser {
9958
+ /**
9959
+ * Boolean that represents if this user is active.
9960
+ */
9961
+ active?: boolean;
9962
+ displayName?: string;
9963
+ /**
9964
+ * All the emails associated with the Databricks user.
9965
+ */
9966
+ emails?: outputs.GetUsersUserEmail[];
9967
+ /**
9968
+ * Entitlements assigned to the user.
9969
+ */
9970
+ entitlements?: outputs.GetUsersUserEntitlement[];
9971
+ externalId?: string;
9972
+ /**
9973
+ * Indicates if the user is part of any groups.
9974
+ */
9975
+ groups?: outputs.GetUsersUserGroup[];
9976
+ /**
9977
+ * The ID of the user.
9978
+ * - `userName` - The username of the user.
9979
+ */
9980
+ id?: string;
9981
+ /**
9982
+ * - `givenName` - Given name of the Databricks user.
9983
+ * - `familyName` - Family name of the Databricks user.
9984
+ * - `displayName` - The display name of the user.
9985
+ */
9986
+ name?: outputs.GetUsersUserName;
9987
+ /**
9988
+ * Indicates if the user has any associated roles.
9989
+ */
9990
+ roles?: outputs.GetUsersUserRole[];
9991
+ /**
9992
+ * The schema of the user.
9993
+ * - `externalId` - Reserved for future use.
9994
+ */
9995
+ schemas?: string[];
9996
+ userName?: string;
9997
+ }
9998
+ export interface GetUsersUserEmail {
9999
+ display?: string;
10000
+ primary?: boolean;
10001
+ ref?: string;
10002
+ type?: string;
10003
+ value?: string;
10004
+ }
10005
+ export interface GetUsersUserEntitlement {
10006
+ display?: string;
10007
+ primary?: boolean;
10008
+ ref?: string;
10009
+ type?: string;
10010
+ value?: string;
10011
+ }
10012
+ export interface GetUsersUserGroup {
10013
+ display?: string;
10014
+ primary?: boolean;
10015
+ ref?: string;
10016
+ type?: string;
10017
+ value?: string;
10018
+ }
10019
+ export interface GetUsersUserName {
10020
+ familyName?: string;
10021
+ givenName?: string;
10022
+ }
10023
+ export interface GetUsersUserRole {
10024
+ display?: string;
10025
+ primary?: boolean;
10026
+ ref?: string;
10027
+ type?: string;
10028
+ value?: string;
10029
+ }
9639
10030
  export interface GetViewsProviderConfig {
9640
10031
  /**
9641
10032
  * Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with.
@@ -9731,6 +10122,24 @@ export interface GetVolumesProviderConfig {
9731
10122
  */
9732
10123
  workspaceId: string;
9733
10124
  }
10125
+ export interface GetWorkspaceEntityTagAssignmentsTagAssignment {
10126
+ /**
10127
+ * The identifier of the entity to which the tag is assigned
10128
+ */
10129
+ entityId: string;
10130
+ /**
10131
+ * The type of entity to which the tag is assigned. Allowed values are dashboards, geniespaces
10132
+ */
10133
+ entityType: string;
10134
+ /**
10135
+ * (string) - The key of the tag. The characters , . : / - = and leading/trailing spaces are not allowed
10136
+ */
10137
+ tagKey: string;
10138
+ /**
10139
+ * (string) - The value of the tag
10140
+ */
10141
+ tagValue: string;
10142
+ }
9734
10143
  export interface GetWorkspaceSettingV2AibiDashboardEmbeddingAccessPolicy {
9735
10144
  /**
9736
10145
  * (string) - Possible values are: `ALLOW_ALL_DOMAINS`, `ALLOW_APPROVED_DOMAINS`, `DENY_ALL_DOMAINS`
@@ -12750,6 +13159,7 @@ export interface JobTrigger {
12750
13159
  * configuration block to define a trigger for [File Arrival events](https://learn.microsoft.com/en-us/azure/databricks/workflows/jobs/file-arrival-triggers) consisting of following attributes:
12751
13160
  */
12752
13161
  fileArrival?: outputs.JobTriggerFileArrival;
13162
+ model?: outputs.JobTriggerModel;
12753
13163
  /**
12754
13164
  * Indicate whether this trigger is paused or not. Either `PAUSED` or `UNPAUSED`. When the `pauseStatus` field is omitted in the block, the server will default to using `UNPAUSED` as a value for `pauseStatus`.
12755
13165
  */
@@ -12777,6 +13187,16 @@ export interface JobTriggerFileArrival {
12777
13187
  */
12778
13188
  waitAfterLastChangeSeconds?: number;
12779
13189
  }
13190
+ export interface JobTriggerModel {
13191
+ aliases?: string[];
13192
+ /**
13193
+ * The table(s) condition based on which to trigger a job run. Possible values are `ANY_UPDATED`, `ALL_UPDATED`.
13194
+ */
13195
+ condition: string;
13196
+ minTimeBetweenTriggersSeconds?: number;
13197
+ securableName?: string;
13198
+ waitAfterLastChangeSeconds?: number;
13199
+ }
12780
13200
  export interface JobTriggerPeriodic {
12781
13201
  /**
12782
13202
  * Specifies the interval at which the job should run.
@@ -13965,11 +14385,11 @@ export interface MwsNetworksGcpNetworkInfo {
13965
14385
  */
13966
14386
  networkProjectId: string;
13967
14387
  /**
13968
- * @deprecated gcp_network_info.pod_ip_range_name is deprecated and will be removed in a future release. For more information, review the documentation at https://registry.terraform.io/providers/databricks/databricks/1.97.0/docs/guides/gcp-workspace#creating-a-vpc
14388
+ * @deprecated gcp_network_info.pod_ip_range_name is deprecated and will be removed in a future release. For more information, review the documentation at https://registry.terraform.io/providers/databricks/databricks/1.100.0/docs/guides/gcp-workspace#creating-a-vpc
13969
14389
  */
13970
14390
  podIpRangeName?: string;
13971
14391
  /**
13972
- * @deprecated gcp_network_info.service_ip_range_name is deprecated and will be removed in a future release. For more information, review the documentation at https://registry.terraform.io/providers/databricks/databricks/1.97.0/docs/guides/gcp-workspace#creating-a-vpc
14392
+ * @deprecated gcp_network_info.service_ip_range_name is deprecated and will be removed in a future release. For more information, review the documentation at https://registry.terraform.io/providers/databricks/databricks/1.100.0/docs/guides/gcp-workspace#creating-a-vpc
13973
14393
  */
13974
14394
  serviceIpRangeName?: string;
13975
14395
  /**
@@ -14030,11 +14450,11 @@ export interface MwsWorkspacesExternalCustomerInfo {
14030
14450
  }
14031
14451
  export interface MwsWorkspacesGcpManagedNetworkConfig {
14032
14452
  /**
14033
- * @deprecated gcp_managed_network_config.gke_cluster_pod_ip_range is deprecated and will be removed in a future release. For more information, review the documentation at https://registry.terraform.io/providers/databricks/databricks/1.97.0/docs/guides/gcp-workspace#creating-a-databricks-workspace
14453
+ * @deprecated gcp_managed_network_config.gke_cluster_pod_ip_range is deprecated and will be removed in a future release. For more information, review the documentation at https://registry.terraform.io/providers/databricks/databricks/1.100.0/docs/guides/gcp-workspace#creating-a-databricks-workspace
14034
14454
  */
14035
14455
  gkeClusterPodIpRange?: string;
14036
14456
  /**
14037
- * @deprecated gcp_managed_network_config.gke_cluster_service_ip_range is deprecated and will be removed in a future release. For more information, review the documentation at https://registry.terraform.io/providers/databricks/databricks/1.97.0/docs/guides/gcp-workspace#creating-a-databricks-workspace
14457
+ * @deprecated gcp_managed_network_config.gke_cluster_service_ip_range is deprecated and will be removed in a future release. For more information, review the documentation at https://registry.terraform.io/providers/databricks/databricks/1.100.0/docs/guides/gcp-workspace#creating-a-databricks-workspace
14038
14458
  */
14039
14459
  gkeClusterServiceIpRange?: string;
14040
14460
  subnetCidr: string;
@@ -14457,10 +14877,14 @@ export interface PipelineFilters {
14457
14877
  }
14458
14878
  export interface PipelineGatewayDefinition {
14459
14879
  /**
14460
- * Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source.
14880
+ * Deprecated, Immutable. The Unity Catalog connection this gateway pipeline uses to communicate with the source. *Use `connectionName` instead!*
14461
14881
  */
14462
14882
  connectionId?: string;
14883
+ /**
14884
+ * Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
14885
+ */
14463
14886
  connectionName: string;
14887
+ connectionParameters?: outputs.PipelineGatewayDefinitionConnectionParameters;
14464
14888
  /**
14465
14889
  * Required, Immutable. The name of the catalog for the gateway pipeline's storage location.
14466
14890
  */
@@ -14474,8 +14898,12 @@ export interface PipelineGatewayDefinition {
14474
14898
  */
14475
14899
  gatewayStorageSchema: string;
14476
14900
  }
14901
+ export interface PipelineGatewayDefinitionConnectionParameters {
14902
+ sourceCatalog?: string;
14903
+ }
14477
14904
  export interface PipelineIngestionDefinition {
14478
14905
  connectionName?: string;
14906
+ ingestFromUcForeignCatalog?: boolean;
14479
14907
  ingestionGatewayId?: string;
14480
14908
  netsuiteJarPath?: string;
14481
14909
  objects?: outputs.PipelineIngestionDefinitionObject[];
@@ -14594,7 +15022,7 @@ export interface PipelineIngestionDefinitionObjectTableTableConfigurationWorkday
14594
15022
  }
14595
15023
  export interface PipelineIngestionDefinitionSourceConfiguration {
14596
15024
  /**
14597
- * The name of catalog in Unity Catalog. *Change of this parameter forces recreation of the pipeline.* (Conflicts with `storage`).
15025
+ * The name of default catalog in Unity Catalog. *Change of this parameter forces recreation of the pipeline if you switch from `storage` to `catalog` or vice versa. If pipeline was already created with `catalog` set, the value could be changed.* (Conflicts with `storage`).
14598
15026
  */
14599
15027
  catalog?: outputs.PipelineIngestionDefinitionSourceConfigurationCatalog;
14600
15028
  }
@@ -15169,9 +15597,12 @@ export interface ShareObject {
15169
15597
  * Description about the object.
15170
15598
  */
15171
15599
  comment?: string;
15600
+ /**
15601
+ * The content of the notebook file when the data object type is NOTEBOOK_FILE. This should be base64 encoded. Required for adding a NOTEBOOK_FILE, optional for updating, ignored for other types.
15602
+ */
15172
15603
  content?: string;
15173
15604
  /**
15174
- * Type of the data object, currently `TABLE`, `VIEW`, `SCHEMA`, `VOLUME`, and `MODEL` are supported.
15605
+ * Type of the data object. Supported types: `TABLE`, `FOREIGN_TABLE`, `SCHEMA`, `VIEW`, `MATERIALIZED_VIEW`, `STREAMING_TABLE`, `MODEL`, `NOTEBOOK_FILE`, `FUNCTION`, `FEATURE_SPEC`, and `VOLUME`.
15175
15606
  */
15176
15607
  dataObjectType: string;
15177
15608
  effectiveCdfEnabled: boolean;
@@ -15189,6 +15620,9 @@ export interface ShareObject {
15189
15620
  * Full name of the object, e.g. `catalog.schema.name` for a tables, views, volumes and models, or `catalog.schema` for schemas.
15190
15621
  */
15191
15622
  name: string;
15623
+ /**
15624
+ * Array of partitions for the shared data.
15625
+ */
15192
15626
  partitions?: outputs.ShareObjectPartition[];
15193
15627
  /**
15194
15628
  * A user-provided new name for the data object within the share. If this new name is not provided, the object's original name will be used as the `sharedAs` name. The `sharedAs` name must be unique within a Share. Change forces creation of a new resource.