@pulumi/databricks 1.48.0 → 1.49.0-alpha.1723819798

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/types/input.d.ts CHANGED
@@ -514,7 +514,7 @@ export interface GetCatalogCatalogInfo {
514
514
  * A map of key-value properties attached to the securable.
515
515
  */
516
516
  options?: {
517
- [key: string]: any;
517
+ [key: string]: string;
518
518
  };
519
519
  /**
520
520
  * Current owner of the catalog
@@ -524,7 +524,7 @@ export interface GetCatalogCatalogInfo {
524
524
  * A map of key-value properties attached to the securable.
525
525
  */
526
526
  properties?: {
527
- [key: string]: any;
527
+ [key: string]: string;
528
528
  };
529
529
  /**
530
530
  * The name of delta sharing provider.
@@ -610,7 +610,7 @@ export interface GetCatalogCatalogInfoArgs {
610
610
  * A map of key-value properties attached to the securable.
611
611
  */
612
612
  options?: pulumi.Input<{
613
- [key: string]: any;
613
+ [key: string]: pulumi.Input<string>;
614
614
  }>;
615
615
  /**
616
616
  * Current owner of the catalog
@@ -620,7 +620,7 @@ export interface GetCatalogCatalogInfoArgs {
620
620
  * A map of key-value properties attached to the securable.
621
621
  */
622
622
  properties?: pulumi.Input<{
623
- [key: string]: any;
623
+ [key: string]: pulumi.Input<string>;
624
624
  }>;
625
625
  /**
626
626
  * The name of delta sharing provider.
@@ -698,14 +698,14 @@ export interface GetClusterClusterInfo {
698
698
  * Additional tags for cluster resources.
699
699
  */
700
700
  customTags?: {
701
- [key: string]: any;
701
+ [key: string]: string;
702
702
  };
703
703
  /**
704
704
  * Security features of the cluster. Unity Catalog requires `SINGLE_USER` or `USER_ISOLATION` mode. `LEGACY_PASSTHROUGH` for passthrough cluster and `LEGACY_TABLE_ACL` for Table ACL cluster. Default to `NONE`, i.e. no security feature enabled.
705
705
  */
706
706
  dataSecurityMode?: string;
707
707
  defaultTags?: {
708
- [key: string]: any;
708
+ [key: string]: string;
709
709
  };
710
710
  dockerImage?: inputs.GetClusterClusterInfoDockerImage;
711
711
  driver?: inputs.GetClusterClusterInfoDriver;
@@ -756,14 +756,14 @@ export interface GetClusterClusterInfo {
756
756
  * Map with key-value pairs to fine-tune Spark clusters.
757
757
  */
758
758
  sparkConf?: {
759
- [key: string]: any;
759
+ [key: string]: string;
760
760
  };
761
761
  sparkContextId?: number;
762
762
  /**
763
763
  * Map with environment variable key-value pairs to fine-tune Spark clusters. Key-value pairs of the form (X,Y) are exported (i.e., X='Y') while launching the driver and workers.
764
764
  */
765
765
  sparkEnvVars?: {
766
- [key: string]: any;
766
+ [key: string]: string;
767
767
  };
768
768
  /**
769
769
  * [Runtime version](https://docs.databricks.com/runtime/index.html) of the cluster.
@@ -807,14 +807,14 @@ export interface GetClusterClusterInfoArgs {
807
807
  * Additional tags for cluster resources.
808
808
  */
809
809
  customTags?: pulumi.Input<{
810
- [key: string]: any;
810
+ [key: string]: pulumi.Input<string>;
811
811
  }>;
812
812
  /**
813
813
  * Security features of the cluster. Unity Catalog requires `SINGLE_USER` or `USER_ISOLATION` mode. `LEGACY_PASSTHROUGH` for passthrough cluster and `LEGACY_TABLE_ACL` for Table ACL cluster. Default to `NONE`, i.e. no security feature enabled.
814
814
  */
815
815
  dataSecurityMode?: pulumi.Input<string>;
816
816
  defaultTags?: pulumi.Input<{
817
- [key: string]: any;
817
+ [key: string]: pulumi.Input<string>;
818
818
  }>;
819
819
  dockerImage?: pulumi.Input<inputs.GetClusterClusterInfoDockerImageArgs>;
820
820
  driver?: pulumi.Input<inputs.GetClusterClusterInfoDriverArgs>;
@@ -865,14 +865,14 @@ export interface GetClusterClusterInfoArgs {
865
865
  * Map with key-value pairs to fine-tune Spark clusters.
866
866
  */
867
867
  sparkConf?: pulumi.Input<{
868
- [key: string]: any;
868
+ [key: string]: pulumi.Input<string>;
869
869
  }>;
870
870
  sparkContextId?: pulumi.Input<number>;
871
871
  /**
872
872
  * Map with environment variable key-value pairs to fine-tune Spark clusters. Key-value pairs of the form (X,Y) are exported (i.e., X='Y') while launching the driver and workers.
873
873
  */
874
874
  sparkEnvVars?: pulumi.Input<{
875
- [key: string]: any;
875
+ [key: string]: pulumi.Input<string>;
876
876
  }>;
877
877
  /**
878
878
  * [Runtime version](https://docs.databricks.com/runtime/index.html) of the cluster.
@@ -1153,7 +1153,7 @@ export interface GetClusterClusterInfoSpec {
1153
1153
  * Additional tags for cluster resources.
1154
1154
  */
1155
1155
  customTags?: {
1156
- [key: string]: any;
1156
+ [key: string]: string;
1157
1157
  };
1158
1158
  /**
1159
1159
  * Security features of the cluster. Unity Catalog requires `SINGLE_USER` or `USER_ISOLATION` mode. `LEGACY_PASSTHROUGH` for passthrough cluster and `LEGACY_TABLE_ACL` for Table ACL cluster. Default to `NONE`, i.e. no security feature enabled.
@@ -1208,13 +1208,13 @@ export interface GetClusterClusterInfoSpec {
1208
1208
  * Map with key-value pairs to fine-tune Spark clusters.
1209
1209
  */
1210
1210
  sparkConf?: {
1211
- [key: string]: any;
1211
+ [key: string]: string;
1212
1212
  };
1213
1213
  /**
1214
1214
  * Map with environment variable key-value pairs to fine-tune Spark clusters. Key-value pairs of the form (X,Y) are exported (i.e., X='Y') while launching the driver and workers.
1215
1215
  */
1216
1216
  sparkEnvVars?: {
1217
- [key: string]: any;
1217
+ [key: string]: string;
1218
1218
  };
1219
1219
  /**
1220
1220
  * [Runtime version](https://docs.databricks.com/runtime/index.html) of the cluster.
@@ -1245,7 +1245,7 @@ export interface GetClusterClusterInfoSpecArgs {
1245
1245
  * Additional tags for cluster resources.
1246
1246
  */
1247
1247
  customTags?: pulumi.Input<{
1248
- [key: string]: any;
1248
+ [key: string]: pulumi.Input<string>;
1249
1249
  }>;
1250
1250
  /**
1251
1251
  * Security features of the cluster. Unity Catalog requires `SINGLE_USER` or `USER_ISOLATION` mode. `LEGACY_PASSTHROUGH` for passthrough cluster and `LEGACY_TABLE_ACL` for Table ACL cluster. Default to `NONE`, i.e. no security feature enabled.
@@ -1300,13 +1300,13 @@ export interface GetClusterClusterInfoSpecArgs {
1300
1300
  * Map with key-value pairs to fine-tune Spark clusters.
1301
1301
  */
1302
1302
  sparkConf?: pulumi.Input<{
1303
- [key: string]: any;
1303
+ [key: string]: pulumi.Input<string>;
1304
1304
  }>;
1305
1305
  /**
1306
1306
  * Map with environment variable key-value pairs to fine-tune Spark clusters. Key-value pairs of the form (X,Y) are exported (i.e., X='Y') while launching the driver and workers.
1307
1307
  */
1308
1308
  sparkEnvVars?: pulumi.Input<{
1309
- [key: string]: any;
1309
+ [key: string]: pulumi.Input<string>;
1310
1310
  }>;
1311
1311
  /**
1312
1312
  * [Runtime version](https://docs.databricks.com/runtime/index.html) of the cluster.
@@ -1591,14 +1591,14 @@ export interface GetClusterClusterInfoSpecWorkloadTypeClientsArgs {
1591
1591
  export interface GetClusterClusterInfoTerminationReason {
1592
1592
  code?: string;
1593
1593
  parameters?: {
1594
- [key: string]: any;
1594
+ [key: string]: string;
1595
1595
  };
1596
1596
  type?: string;
1597
1597
  }
1598
1598
  export interface GetClusterClusterInfoTerminationReasonArgs {
1599
1599
  code?: pulumi.Input<string>;
1600
1600
  parameters?: pulumi.Input<{
1601
- [key: string]: any;
1601
+ [key: string]: pulumi.Input<string>;
1602
1602
  }>;
1603
1603
  type?: pulumi.Input<string>;
1604
1604
  }
@@ -1896,10 +1896,10 @@ export interface GetInstancePoolPoolInfo {
1896
1896
  awsAttributes?: inputs.GetInstancePoolPoolInfoAwsAttributes;
1897
1897
  azureAttributes?: inputs.GetInstancePoolPoolInfoAzureAttributes;
1898
1898
  customTags?: {
1899
- [key: string]: any;
1899
+ [key: string]: string;
1900
1900
  };
1901
1901
  defaultTags?: {
1902
- [key: string]: any;
1902
+ [key: string]: string;
1903
1903
  };
1904
1904
  diskSpec?: inputs.GetInstancePoolPoolInfoDiskSpec;
1905
1905
  enableElasticDisk?: boolean;
@@ -1920,10 +1920,10 @@ export interface GetInstancePoolPoolInfoArgs {
1920
1920
  awsAttributes?: pulumi.Input<inputs.GetInstancePoolPoolInfoAwsAttributesArgs>;
1921
1921
  azureAttributes?: pulumi.Input<inputs.GetInstancePoolPoolInfoAzureAttributesArgs>;
1922
1922
  customTags?: pulumi.Input<{
1923
- [key: string]: any;
1923
+ [key: string]: pulumi.Input<string>;
1924
1924
  }>;
1925
1925
  defaultTags?: pulumi.Input<{
1926
- [key: string]: any;
1926
+ [key: string]: pulumi.Input<string>;
1927
1927
  }>;
1928
1928
  diskSpec?: pulumi.Input<inputs.GetInstancePoolPoolInfoDiskSpecArgs>;
1929
1929
  enableElasticDisk?: pulumi.Input<boolean>;
@@ -2134,7 +2134,7 @@ export interface GetJobJobSettingsSettings {
2134
2134
  sparkPythonTask?: inputs.GetJobJobSettingsSettingsSparkPythonTask;
2135
2135
  sparkSubmitTask?: inputs.GetJobJobSettingsSettingsSparkSubmitTask;
2136
2136
  tags?: {
2137
- [key: string]: any;
2137
+ [key: string]: string;
2138
2138
  };
2139
2139
  tasks?: inputs.GetJobJobSettingsSettingsTask[];
2140
2140
  timeoutSeconds?: number;
@@ -2177,7 +2177,7 @@ export interface GetJobJobSettingsSettingsArgs {
2177
2177
  sparkPythonTask?: pulumi.Input<inputs.GetJobJobSettingsSettingsSparkPythonTaskArgs>;
2178
2178
  sparkSubmitTask?: pulumi.Input<inputs.GetJobJobSettingsSettingsSparkSubmitTaskArgs>;
2179
2179
  tags?: pulumi.Input<{
2180
- [key: string]: any;
2180
+ [key: string]: pulumi.Input<string>;
2181
2181
  }>;
2182
2182
  tasks?: pulumi.Input<pulumi.Input<inputs.GetJobJobSettingsSettingsTaskArgs>[]>;
2183
2183
  timeoutSeconds?: pulumi.Input<number>;
@@ -2309,7 +2309,7 @@ export interface GetJobJobSettingsSettingsJobClusterNewCluster {
2309
2309
  clusterMountInfos?: inputs.GetJobJobSettingsSettingsJobClusterNewClusterClusterMountInfo[];
2310
2310
  clusterName?: string;
2311
2311
  customTags?: {
2312
- [key: string]: any;
2312
+ [key: string]: string;
2313
2313
  };
2314
2314
  dataSecurityMode?: string;
2315
2315
  dockerImage?: inputs.GetJobJobSettingsSettingsJobClusterNewClusterDockerImage;
@@ -2327,10 +2327,10 @@ export interface GetJobJobSettingsSettingsJobClusterNewCluster {
2327
2327
  runtimeEngine?: string;
2328
2328
  singleUserName?: string;
2329
2329
  sparkConf?: {
2330
- [key: string]: any;
2330
+ [key: string]: string;
2331
2331
  };
2332
2332
  sparkEnvVars?: {
2333
- [key: string]: any;
2333
+ [key: string]: string;
2334
2334
  };
2335
2335
  sparkVersion: string;
2336
2336
  sshPublicKeys?: string[];
@@ -2347,7 +2347,7 @@ export interface GetJobJobSettingsSettingsJobClusterNewClusterArgs {
2347
2347
  clusterMountInfos?: pulumi.Input<pulumi.Input<inputs.GetJobJobSettingsSettingsJobClusterNewClusterClusterMountInfoArgs>[]>;
2348
2348
  clusterName?: pulumi.Input<string>;
2349
2349
  customTags?: pulumi.Input<{
2350
- [key: string]: any;
2350
+ [key: string]: pulumi.Input<string>;
2351
2351
  }>;
2352
2352
  dataSecurityMode?: pulumi.Input<string>;
2353
2353
  dockerImage?: pulumi.Input<inputs.GetJobJobSettingsSettingsJobClusterNewClusterDockerImageArgs>;
@@ -2365,10 +2365,10 @@ export interface GetJobJobSettingsSettingsJobClusterNewClusterArgs {
2365
2365
  runtimeEngine?: pulumi.Input<string>;
2366
2366
  singleUserName?: pulumi.Input<string>;
2367
2367
  sparkConf?: pulumi.Input<{
2368
- [key: string]: any;
2368
+ [key: string]: pulumi.Input<string>;
2369
2369
  }>;
2370
2370
  sparkEnvVars?: pulumi.Input<{
2371
- [key: string]: any;
2371
+ [key: string]: pulumi.Input<string>;
2372
2372
  }>;
2373
2373
  sparkVersion: pulumi.Input<string>;
2374
2374
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
@@ -2635,7 +2635,7 @@ export interface GetJobJobSettingsSettingsNewCluster {
2635
2635
  clusterMountInfos?: inputs.GetJobJobSettingsSettingsNewClusterClusterMountInfo[];
2636
2636
  clusterName?: string;
2637
2637
  customTags?: {
2638
- [key: string]: any;
2638
+ [key: string]: string;
2639
2639
  };
2640
2640
  dataSecurityMode?: string;
2641
2641
  dockerImage?: inputs.GetJobJobSettingsSettingsNewClusterDockerImage;
@@ -2653,10 +2653,10 @@ export interface GetJobJobSettingsSettingsNewCluster {
2653
2653
  runtimeEngine?: string;
2654
2654
  singleUserName?: string;
2655
2655
  sparkConf?: {
2656
- [key: string]: any;
2656
+ [key: string]: string;
2657
2657
  };
2658
2658
  sparkEnvVars?: {
2659
- [key: string]: any;
2659
+ [key: string]: string;
2660
2660
  };
2661
2661
  sparkVersion: string;
2662
2662
  sshPublicKeys?: string[];
@@ -2673,7 +2673,7 @@ export interface GetJobJobSettingsSettingsNewClusterArgs {
2673
2673
  clusterMountInfos?: pulumi.Input<pulumi.Input<inputs.GetJobJobSettingsSettingsNewClusterClusterMountInfoArgs>[]>;
2674
2674
  clusterName?: pulumi.Input<string>;
2675
2675
  customTags?: pulumi.Input<{
2676
- [key: string]: any;
2676
+ [key: string]: pulumi.Input<string>;
2677
2677
  }>;
2678
2678
  dataSecurityMode?: pulumi.Input<string>;
2679
2679
  dockerImage?: pulumi.Input<inputs.GetJobJobSettingsSettingsNewClusterDockerImageArgs>;
@@ -2691,10 +2691,10 @@ export interface GetJobJobSettingsSettingsNewClusterArgs {
2691
2691
  runtimeEngine?: pulumi.Input<string>;
2692
2692
  singleUserName?: pulumi.Input<string>;
2693
2693
  sparkConf?: pulumi.Input<{
2694
- [key: string]: any;
2694
+ [key: string]: pulumi.Input<string>;
2695
2695
  }>;
2696
2696
  sparkEnvVars?: pulumi.Input<{
2697
- [key: string]: any;
2697
+ [key: string]: pulumi.Input<string>;
2698
2698
  }>;
2699
2699
  sparkVersion: pulumi.Input<string>;
2700
2700
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
@@ -2908,7 +2908,7 @@ export interface GetJobJobSettingsSettingsNewClusterWorkloadTypeClientsArgs {
2908
2908
  }
2909
2909
  export interface GetJobJobSettingsSettingsNotebookTask {
2910
2910
  baseParameters?: {
2911
- [key: string]: any;
2911
+ [key: string]: string;
2912
2912
  };
2913
2913
  notebookPath: string;
2914
2914
  source?: string;
@@ -2916,7 +2916,7 @@ export interface GetJobJobSettingsSettingsNotebookTask {
2916
2916
  }
2917
2917
  export interface GetJobJobSettingsSettingsNotebookTaskArgs {
2918
2918
  baseParameters?: pulumi.Input<{
2919
- [key: string]: any;
2919
+ [key: string]: pulumi.Input<string>;
2920
2920
  }>;
2921
2921
  notebookPath: pulumi.Input<string>;
2922
2922
  source?: pulumi.Input<string>;
@@ -2955,7 +2955,7 @@ export interface GetJobJobSettingsSettingsPipelineTaskArgs {
2955
2955
  export interface GetJobJobSettingsSettingsPythonWheelTask {
2956
2956
  entryPoint?: string;
2957
2957
  namedParameters?: {
2958
- [key: string]: any;
2958
+ [key: string]: string;
2959
2959
  };
2960
2960
  packageName?: string;
2961
2961
  parameters?: string[];
@@ -2963,7 +2963,7 @@ export interface GetJobJobSettingsSettingsPythonWheelTask {
2963
2963
  export interface GetJobJobSettingsSettingsPythonWheelTaskArgs {
2964
2964
  entryPoint?: pulumi.Input<string>;
2965
2965
  namedParameters?: pulumi.Input<{
2966
- [key: string]: any;
2966
+ [key: string]: pulumi.Input<string>;
2967
2967
  }>;
2968
2968
  packageName?: pulumi.Input<string>;
2969
2969
  parameters?: pulumi.Input<pulumi.Input<string>[]>;
@@ -2985,13 +2985,13 @@ export interface GetJobJobSettingsSettingsRunAsArgs {
2985
2985
  export interface GetJobJobSettingsSettingsRunJobTask {
2986
2986
  jobId: number;
2987
2987
  jobParameters?: {
2988
- [key: string]: any;
2988
+ [key: string]: string;
2989
2989
  };
2990
2990
  }
2991
2991
  export interface GetJobJobSettingsSettingsRunJobTaskArgs {
2992
2992
  jobId: pulumi.Input<number>;
2993
2993
  jobParameters?: pulumi.Input<{
2994
- [key: string]: any;
2994
+ [key: string]: pulumi.Input<string>;
2995
2995
  }>;
2996
2996
  }
2997
2997
  export interface GetJobJobSettingsSettingsSchedule {
@@ -3333,7 +3333,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNewCluster {
3333
3333
  clusterMountInfos?: inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterMountInfo[];
3334
3334
  clusterName?: string;
3335
3335
  customTags?: {
3336
- [key: string]: any;
3336
+ [key: string]: string;
3337
3337
  };
3338
3338
  dataSecurityMode?: string;
3339
3339
  dockerImage?: inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterDockerImage;
@@ -3351,10 +3351,10 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNewCluster {
3351
3351
  runtimeEngine?: string;
3352
3352
  singleUserName?: string;
3353
3353
  sparkConf?: {
3354
- [key: string]: any;
3354
+ [key: string]: string;
3355
3355
  };
3356
3356
  sparkEnvVars?: {
3357
- [key: string]: any;
3357
+ [key: string]: string;
3358
3358
  };
3359
3359
  sparkVersion: string;
3360
3360
  sshPublicKeys?: string[];
@@ -3371,7 +3371,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterArgs {
3371
3371
  clusterMountInfos?: pulumi.Input<pulumi.Input<inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterMountInfoArgs>[]>;
3372
3372
  clusterName?: pulumi.Input<string>;
3373
3373
  customTags?: pulumi.Input<{
3374
- [key: string]: any;
3374
+ [key: string]: pulumi.Input<string>;
3375
3375
  }>;
3376
3376
  dataSecurityMode?: pulumi.Input<string>;
3377
3377
  dockerImage?: pulumi.Input<inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterDockerImageArgs>;
@@ -3389,10 +3389,10 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterArgs {
3389
3389
  runtimeEngine?: pulumi.Input<string>;
3390
3390
  singleUserName?: pulumi.Input<string>;
3391
3391
  sparkConf?: pulumi.Input<{
3392
- [key: string]: any;
3392
+ [key: string]: pulumi.Input<string>;
3393
3393
  }>;
3394
3394
  sparkEnvVars?: pulumi.Input<{
3395
- [key: string]: any;
3395
+ [key: string]: pulumi.Input<string>;
3396
3396
  }>;
3397
3397
  sparkVersion: pulumi.Input<string>;
3398
3398
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
@@ -3606,7 +3606,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterWorkloadT
3606
3606
  }
3607
3607
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNotebookTask {
3608
3608
  baseParameters?: {
3609
- [key: string]: any;
3609
+ [key: string]: string;
3610
3610
  };
3611
3611
  notebookPath: string;
3612
3612
  source?: string;
@@ -3614,7 +3614,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNotebookTask {
3614
3614
  }
3615
3615
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNotebookTaskArgs {
3616
3616
  baseParameters?: pulumi.Input<{
3617
- [key: string]: any;
3617
+ [key: string]: pulumi.Input<string>;
3618
3618
  }>;
3619
3619
  notebookPath: pulumi.Input<string>;
3620
3620
  source?: pulumi.Input<string>;
@@ -3641,7 +3641,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskPipelineTaskArgs {
3641
3641
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskPythonWheelTask {
3642
3642
  entryPoint?: string;
3643
3643
  namedParameters?: {
3644
- [key: string]: any;
3644
+ [key: string]: string;
3645
3645
  };
3646
3646
  packageName?: string;
3647
3647
  parameters?: string[];
@@ -3649,7 +3649,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskPythonWheelTask {
3649
3649
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskPythonWheelTaskArgs {
3650
3650
  entryPoint?: pulumi.Input<string>;
3651
3651
  namedParameters?: pulumi.Input<{
3652
- [key: string]: any;
3652
+ [key: string]: pulumi.Input<string>;
3653
3653
  }>;
3654
3654
  packageName?: pulumi.Input<string>;
3655
3655
  parameters?: pulumi.Input<pulumi.Input<string>[]>;
@@ -3657,13 +3657,13 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskPythonWheelTaskArgs
3657
3657
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskRunJobTask {
3658
3658
  jobId: number;
3659
3659
  jobParameters?: {
3660
- [key: string]: any;
3660
+ [key: string]: string;
3661
3661
  };
3662
3662
  }
3663
3663
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskRunJobTaskArgs {
3664
3664
  jobId: pulumi.Input<number>;
3665
3665
  jobParameters?: pulumi.Input<{
3666
- [key: string]: any;
3666
+ [key: string]: pulumi.Input<string>;
3667
3667
  }>;
3668
3668
  }
3669
3669
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskSparkJarTask {
@@ -3697,7 +3697,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTask {
3697
3697
  dashboard?: inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskDashboard;
3698
3698
  file?: inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskFile;
3699
3699
  parameters?: {
3700
- [key: string]: any;
3700
+ [key: string]: string;
3701
3701
  };
3702
3702
  query?: inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskQuery;
3703
3703
  warehouseId: string;
@@ -3707,7 +3707,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskArgs {
3707
3707
  dashboard?: pulumi.Input<inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskDashboardArgs>;
3708
3708
  file?: pulumi.Input<inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskFileArgs>;
3709
3709
  parameters?: pulumi.Input<{
3710
- [key: string]: any;
3710
+ [key: string]: pulumi.Input<string>;
3711
3711
  }>;
3712
3712
  query?: pulumi.Input<inputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskQueryArgs>;
3713
3713
  warehouseId: pulumi.Input<string>;
@@ -3909,7 +3909,7 @@ export interface GetJobJobSettingsSettingsTaskNewCluster {
3909
3909
  clusterMountInfos?: inputs.GetJobJobSettingsSettingsTaskNewClusterClusterMountInfo[];
3910
3910
  clusterName?: string;
3911
3911
  customTags?: {
3912
- [key: string]: any;
3912
+ [key: string]: string;
3913
3913
  };
3914
3914
  dataSecurityMode?: string;
3915
3915
  dockerImage?: inputs.GetJobJobSettingsSettingsTaskNewClusterDockerImage;
@@ -3927,10 +3927,10 @@ export interface GetJobJobSettingsSettingsTaskNewCluster {
3927
3927
  runtimeEngine?: string;
3928
3928
  singleUserName?: string;
3929
3929
  sparkConf?: {
3930
- [key: string]: any;
3930
+ [key: string]: string;
3931
3931
  };
3932
3932
  sparkEnvVars?: {
3933
- [key: string]: any;
3933
+ [key: string]: string;
3934
3934
  };
3935
3935
  sparkVersion: string;
3936
3936
  sshPublicKeys?: string[];
@@ -3947,7 +3947,7 @@ export interface GetJobJobSettingsSettingsTaskNewClusterArgs {
3947
3947
  clusterMountInfos?: pulumi.Input<pulumi.Input<inputs.GetJobJobSettingsSettingsTaskNewClusterClusterMountInfoArgs>[]>;
3948
3948
  clusterName?: pulumi.Input<string>;
3949
3949
  customTags?: pulumi.Input<{
3950
- [key: string]: any;
3950
+ [key: string]: pulumi.Input<string>;
3951
3951
  }>;
3952
3952
  dataSecurityMode?: pulumi.Input<string>;
3953
3953
  dockerImage?: pulumi.Input<inputs.GetJobJobSettingsSettingsTaskNewClusterDockerImageArgs>;
@@ -3965,10 +3965,10 @@ export interface GetJobJobSettingsSettingsTaskNewClusterArgs {
3965
3965
  runtimeEngine?: pulumi.Input<string>;
3966
3966
  singleUserName?: pulumi.Input<string>;
3967
3967
  sparkConf?: pulumi.Input<{
3968
- [key: string]: any;
3968
+ [key: string]: pulumi.Input<string>;
3969
3969
  }>;
3970
3970
  sparkEnvVars?: pulumi.Input<{
3971
- [key: string]: any;
3971
+ [key: string]: pulumi.Input<string>;
3972
3972
  }>;
3973
3973
  sparkVersion: pulumi.Input<string>;
3974
3974
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
@@ -4182,7 +4182,7 @@ export interface GetJobJobSettingsSettingsTaskNewClusterWorkloadTypeClientsArgs
4182
4182
  }
4183
4183
  export interface GetJobJobSettingsSettingsTaskNotebookTask {
4184
4184
  baseParameters?: {
4185
- [key: string]: any;
4185
+ [key: string]: string;
4186
4186
  };
4187
4187
  notebookPath: string;
4188
4188
  source?: string;
@@ -4190,7 +4190,7 @@ export interface GetJobJobSettingsSettingsTaskNotebookTask {
4190
4190
  }
4191
4191
  export interface GetJobJobSettingsSettingsTaskNotebookTaskArgs {
4192
4192
  baseParameters?: pulumi.Input<{
4193
- [key: string]: any;
4193
+ [key: string]: pulumi.Input<string>;
4194
4194
  }>;
4195
4195
  notebookPath: pulumi.Input<string>;
4196
4196
  source?: pulumi.Input<string>;
@@ -4217,7 +4217,7 @@ export interface GetJobJobSettingsSettingsTaskPipelineTaskArgs {
4217
4217
  export interface GetJobJobSettingsSettingsTaskPythonWheelTask {
4218
4218
  entryPoint?: string;
4219
4219
  namedParameters?: {
4220
- [key: string]: any;
4220
+ [key: string]: string;
4221
4221
  };
4222
4222
  packageName?: string;
4223
4223
  parameters?: string[];
@@ -4225,7 +4225,7 @@ export interface GetJobJobSettingsSettingsTaskPythonWheelTask {
4225
4225
  export interface GetJobJobSettingsSettingsTaskPythonWheelTaskArgs {
4226
4226
  entryPoint?: pulumi.Input<string>;
4227
4227
  namedParameters?: pulumi.Input<{
4228
- [key: string]: any;
4228
+ [key: string]: pulumi.Input<string>;
4229
4229
  }>;
4230
4230
  packageName?: pulumi.Input<string>;
4231
4231
  parameters?: pulumi.Input<pulumi.Input<string>[]>;
@@ -4233,13 +4233,13 @@ export interface GetJobJobSettingsSettingsTaskPythonWheelTaskArgs {
4233
4233
  export interface GetJobJobSettingsSettingsTaskRunJobTask {
4234
4234
  jobId: number;
4235
4235
  jobParameters?: {
4236
- [key: string]: any;
4236
+ [key: string]: string;
4237
4237
  };
4238
4238
  }
4239
4239
  export interface GetJobJobSettingsSettingsTaskRunJobTaskArgs {
4240
4240
  jobId: pulumi.Input<number>;
4241
4241
  jobParameters?: pulumi.Input<{
4242
- [key: string]: any;
4242
+ [key: string]: pulumi.Input<string>;
4243
4243
  }>;
4244
4244
  }
4245
4245
  export interface GetJobJobSettingsSettingsTaskSparkJarTask {
@@ -4273,7 +4273,7 @@ export interface GetJobJobSettingsSettingsTaskSqlTask {
4273
4273
  dashboard?: inputs.GetJobJobSettingsSettingsTaskSqlTaskDashboard;
4274
4274
  file?: inputs.GetJobJobSettingsSettingsTaskSqlTaskFile;
4275
4275
  parameters?: {
4276
- [key: string]: any;
4276
+ [key: string]: string;
4277
4277
  };
4278
4278
  query?: inputs.GetJobJobSettingsSettingsTaskSqlTaskQuery;
4279
4279
  warehouseId: string;
@@ -4283,7 +4283,7 @@ export interface GetJobJobSettingsSettingsTaskSqlTaskArgs {
4283
4283
  dashboard?: pulumi.Input<inputs.GetJobJobSettingsSettingsTaskSqlTaskDashboardArgs>;
4284
4284
  file?: pulumi.Input<inputs.GetJobJobSettingsSettingsTaskSqlTaskFileArgs>;
4285
4285
  parameters?: pulumi.Input<{
4286
- [key: string]: any;
4286
+ [key: string]: pulumi.Input<string>;
4287
4287
  }>;
4288
4288
  query?: pulumi.Input<inputs.GetJobJobSettingsSettingsTaskSqlTaskQueryArgs>;
4289
4289
  warehouseId: pulumi.Input<string>;
@@ -4749,7 +4749,7 @@ export interface GetSchemaSchemaInfo {
4749
4749
  * map of properties set on the schema
4750
4750
  */
4751
4751
  properties?: {
4752
- [key: string]: any;
4752
+ [key: string]: string;
4753
4753
  };
4754
4754
  /**
4755
4755
  * the unique identifier of the volume
@@ -4825,7 +4825,7 @@ export interface GetSchemaSchemaInfoArgs {
4825
4825
  * map of properties set on the schema
4826
4826
  */
4827
4827
  properties?: pulumi.Input<{
4828
- [key: string]: any;
4828
+ [key: string]: pulumi.Input<string>;
4829
4829
  }>;
4830
4830
  /**
4831
4831
  * the unique identifier of the volume
@@ -4957,14 +4957,14 @@ export interface GetSqlWarehouseHealthArgs {
4957
4957
  export interface GetSqlWarehouseHealthFailureReason {
4958
4958
  code?: string;
4959
4959
  parameters?: {
4960
- [key: string]: any;
4960
+ [key: string]: string;
4961
4961
  };
4962
4962
  type?: string;
4963
4963
  }
4964
4964
  export interface GetSqlWarehouseHealthFailureReasonArgs {
4965
4965
  code?: pulumi.Input<string>;
4966
4966
  parameters?: pulumi.Input<{
4967
- [key: string]: any;
4967
+ [key: string]: pulumi.Input<string>;
4968
4968
  }>;
4969
4969
  type?: pulumi.Input<string>;
4970
4970
  }
@@ -5245,7 +5245,7 @@ export interface GetTableTableInfo {
5245
5245
  owner?: string;
5246
5246
  pipelineId?: string;
5247
5247
  properties?: {
5248
- [key: string]: any;
5248
+ [key: string]: string;
5249
5249
  };
5250
5250
  rowFilter?: inputs.GetTableTableInfoRowFilter;
5251
5251
  /**
@@ -5311,7 +5311,7 @@ export interface GetTableTableInfoArgs {
5311
5311
  owner?: pulumi.Input<string>;
5312
5312
  pipelineId?: pulumi.Input<string>;
5313
5313
  properties?: pulumi.Input<{
5314
- [key: string]: any;
5314
+ [key: string]: pulumi.Input<string>;
5315
5315
  }>;
5316
5316
  rowFilter?: pulumi.Input<inputs.GetTableTableInfoRowFilterArgs>;
5317
5317
  /**
@@ -5388,12 +5388,12 @@ export interface GetTableTableInfoColumnMaskArgs {
5388
5388
  }
5389
5389
  export interface GetTableTableInfoDeltaRuntimePropertiesKvpairs {
5390
5390
  deltaRuntimeProperties: {
5391
- [key: string]: any;
5391
+ [key: string]: string;
5392
5392
  };
5393
5393
  }
5394
5394
  export interface GetTableTableInfoDeltaRuntimePropertiesKvpairsArgs {
5395
5395
  deltaRuntimeProperties: pulumi.Input<{
5396
- [key: string]: any;
5396
+ [key: string]: pulumi.Input<string>;
5397
5397
  }>;
5398
5398
  }
5399
5399
  export interface GetTableTableInfoEffectivePredictiveOptimizationFlag {
@@ -5931,7 +5931,7 @@ export interface JobJobClusterNewCluster {
5931
5931
  clusterMountInfos?: pulumi.Input<pulumi.Input<inputs.JobJobClusterNewClusterClusterMountInfo>[]>;
5932
5932
  clusterName?: pulumi.Input<string>;
5933
5933
  customTags?: pulumi.Input<{
5934
- [key: string]: any;
5934
+ [key: string]: pulumi.Input<string>;
5935
5935
  }>;
5936
5936
  dataSecurityMode?: pulumi.Input<string>;
5937
5937
  dockerImage?: pulumi.Input<inputs.JobJobClusterNewClusterDockerImage>;
@@ -5953,10 +5953,10 @@ export interface JobJobClusterNewCluster {
5953
5953
  runtimeEngine?: pulumi.Input<string>;
5954
5954
  singleUserName?: pulumi.Input<string>;
5955
5955
  sparkConf?: pulumi.Input<{
5956
- [key: string]: any;
5956
+ [key: string]: pulumi.Input<string>;
5957
5957
  }>;
5958
5958
  sparkEnvVars?: pulumi.Input<{
5959
- [key: string]: any;
5959
+ [key: string]: pulumi.Input<string>;
5960
5960
  }>;
5961
5961
  sparkVersion: pulumi.Input<string>;
5962
5962
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
@@ -6138,7 +6138,7 @@ export interface JobNewCluster {
6138
6138
  clusterMountInfos?: pulumi.Input<pulumi.Input<inputs.JobNewClusterClusterMountInfo>[]>;
6139
6139
  clusterName?: pulumi.Input<string>;
6140
6140
  customTags?: pulumi.Input<{
6141
- [key: string]: any;
6141
+ [key: string]: pulumi.Input<string>;
6142
6142
  }>;
6143
6143
  dataSecurityMode?: pulumi.Input<string>;
6144
6144
  dockerImage?: pulumi.Input<inputs.JobNewClusterDockerImage>;
@@ -6160,10 +6160,10 @@ export interface JobNewCluster {
6160
6160
  runtimeEngine?: pulumi.Input<string>;
6161
6161
  singleUserName?: pulumi.Input<string>;
6162
6162
  sparkConf?: pulumi.Input<{
6163
- [key: string]: any;
6163
+ [key: string]: pulumi.Input<string>;
6164
6164
  }>;
6165
6165
  sparkEnvVars?: pulumi.Input<{
6166
- [key: string]: any;
6166
+ [key: string]: pulumi.Input<string>;
6167
6167
  }>;
6168
6168
  sparkVersion: pulumi.Input<string>;
6169
6169
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
@@ -6318,7 +6318,7 @@ export interface JobNotebookTask {
6318
6318
  * (Map) Base parameters to be used for each run of this job. If the run is initiated by a call to run-now with parameters specified, the two parameters maps will be merged. If the same key is specified in baseParameters and in run-now, the value from run-now will be used. If the notebook takes a parameter that is not specified in the job’s baseParameters or the run-now override parameters, the default value from the notebook will be used. Retrieve these parameters in a notebook using `dbutils.widgets.get`.
6319
6319
  */
6320
6320
  baseParameters?: pulumi.Input<{
6321
- [key: string]: any;
6321
+ [key: string]: pulumi.Input<string>;
6322
6322
  }>;
6323
6323
  /**
6324
6324
  * The path of the databricks.Notebook to be run in the Databricks workspace or remote repository. For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. For notebooks stored in a remote repository, the path must be relative. This field is required.
@@ -6378,7 +6378,7 @@ export interface JobPythonWheelTask {
6378
6378
  * Named parameters for the task
6379
6379
  */
6380
6380
  namedParameters?: pulumi.Input<{
6381
- [key: string]: any;
6381
+ [key: string]: pulumi.Input<string>;
6382
6382
  }>;
6383
6383
  /**
6384
6384
  * Name of Python package
@@ -6425,7 +6425,7 @@ export interface JobRunJobTask {
6425
6425
  * (Map) Job parameters for the task
6426
6426
  */
6427
6427
  jobParameters?: pulumi.Input<{
6428
- [key: string]: any;
6428
+ [key: string]: pulumi.Input<string>;
6429
6429
  }>;
6430
6430
  }
6431
6431
  export interface JobSchedule {
@@ -6888,7 +6888,7 @@ export interface JobTaskForEachTaskTaskNewCluster {
6888
6888
  clusterMountInfos?: pulumi.Input<pulumi.Input<inputs.JobTaskForEachTaskTaskNewClusterClusterMountInfo>[]>;
6889
6889
  clusterName?: pulumi.Input<string>;
6890
6890
  customTags?: pulumi.Input<{
6891
- [key: string]: any;
6891
+ [key: string]: pulumi.Input<string>;
6892
6892
  }>;
6893
6893
  dataSecurityMode?: pulumi.Input<string>;
6894
6894
  dockerImage?: pulumi.Input<inputs.JobTaskForEachTaskTaskNewClusterDockerImage>;
@@ -6910,10 +6910,10 @@ export interface JobTaskForEachTaskTaskNewCluster {
6910
6910
  runtimeEngine?: pulumi.Input<string>;
6911
6911
  singleUserName?: pulumi.Input<string>;
6912
6912
  sparkConf?: pulumi.Input<{
6913
- [key: string]: any;
6913
+ [key: string]: pulumi.Input<string>;
6914
6914
  }>;
6915
6915
  sparkEnvVars?: pulumi.Input<{
6916
- [key: string]: any;
6916
+ [key: string]: pulumi.Input<string>;
6917
6917
  }>;
6918
6918
  sparkVersion: pulumi.Input<string>;
6919
6919
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
@@ -7068,7 +7068,7 @@ export interface JobTaskForEachTaskTaskNotebookTask {
7068
7068
  * (Map) Base parameters to be used for each run of this job. If the run is initiated by a call to run-now with parameters specified, the two parameters maps will be merged. If the same key is specified in baseParameters and in run-now, the value from run-now will be used. If the notebook takes a parameter that is not specified in the job’s baseParameters or the run-now override parameters, the default value from the notebook will be used. Retrieve these parameters in a notebook using `dbutils.widgets.get`.
7069
7069
  */
7070
7070
  baseParameters?: pulumi.Input<{
7071
- [key: string]: any;
7071
+ [key: string]: pulumi.Input<string>;
7072
7072
  }>;
7073
7073
  /**
7074
7074
  * The path of the databricks.Notebook to be run in the Databricks workspace or remote repository. For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. For notebooks stored in a remote repository, the path must be relative. This field is required.
@@ -7120,7 +7120,7 @@ export interface JobTaskForEachTaskTaskPythonWheelTask {
7120
7120
  * Named parameters for the task
7121
7121
  */
7122
7122
  namedParameters?: pulumi.Input<{
7123
- [key: string]: any;
7123
+ [key: string]: pulumi.Input<string>;
7124
7124
  }>;
7125
7125
  /**
7126
7126
  * Name of Python package
@@ -7142,19 +7142,19 @@ export interface JobTaskForEachTaskTaskRunJobTask {
7142
7142
  * (Map) Job parameters for the task
7143
7143
  */
7144
7144
  jobParameters?: pulumi.Input<{
7145
- [key: string]: any;
7145
+ [key: string]: pulumi.Input<string>;
7146
7146
  }>;
7147
7147
  notebookParams?: pulumi.Input<{
7148
- [key: string]: any;
7148
+ [key: string]: pulumi.Input<string>;
7149
7149
  }>;
7150
7150
  pipelineParams?: pulumi.Input<inputs.JobTaskForEachTaskTaskRunJobTaskPipelineParams>;
7151
7151
  pythonNamedParams?: pulumi.Input<{
7152
- [key: string]: any;
7152
+ [key: string]: pulumi.Input<string>;
7153
7153
  }>;
7154
7154
  pythonParams?: pulumi.Input<pulumi.Input<string>[]>;
7155
7155
  sparkSubmitParams?: pulumi.Input<pulumi.Input<string>[]>;
7156
7156
  sqlParams?: pulumi.Input<{
7157
- [key: string]: any;
7157
+ [key: string]: pulumi.Input<string>;
7158
7158
  }>;
7159
7159
  }
7160
7160
  export interface JobTaskForEachTaskTaskRunJobTaskPipelineParams {
@@ -7213,7 +7213,7 @@ export interface JobTaskForEachTaskTaskSqlTask {
7213
7213
  * (Map) parameters to be used for each run of this task. The SQL alert task does not support custom parameters.
7214
7214
  */
7215
7215
  parameters?: pulumi.Input<{
7216
- [key: string]: any;
7216
+ [key: string]: pulumi.Input<string>;
7217
7217
  }>;
7218
7218
  /**
7219
7219
  * block consisting of single string field: `queryId` - identifier of the Databricks SQL Query (databricks_sql_query).
@@ -7434,7 +7434,7 @@ export interface JobTaskNewCluster {
7434
7434
  clusterMountInfos?: pulumi.Input<pulumi.Input<inputs.JobTaskNewClusterClusterMountInfo>[]>;
7435
7435
  clusterName?: pulumi.Input<string>;
7436
7436
  customTags?: pulumi.Input<{
7437
- [key: string]: any;
7437
+ [key: string]: pulumi.Input<string>;
7438
7438
  }>;
7439
7439
  dataSecurityMode?: pulumi.Input<string>;
7440
7440
  dockerImage?: pulumi.Input<inputs.JobTaskNewClusterDockerImage>;
@@ -7456,10 +7456,10 @@ export interface JobTaskNewCluster {
7456
7456
  runtimeEngine?: pulumi.Input<string>;
7457
7457
  singleUserName?: pulumi.Input<string>;
7458
7458
  sparkConf?: pulumi.Input<{
7459
- [key: string]: any;
7459
+ [key: string]: pulumi.Input<string>;
7460
7460
  }>;
7461
7461
  sparkEnvVars?: pulumi.Input<{
7462
- [key: string]: any;
7462
+ [key: string]: pulumi.Input<string>;
7463
7463
  }>;
7464
7464
  sparkVersion: pulumi.Input<string>;
7465
7465
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
@@ -7614,7 +7614,7 @@ export interface JobTaskNotebookTask {
7614
7614
  * (Map) Base parameters to be used for each run of this job. If the run is initiated by a call to run-now with parameters specified, the two parameters maps will be merged. If the same key is specified in baseParameters and in run-now, the value from run-now will be used. If the notebook takes a parameter that is not specified in the job’s baseParameters or the run-now override parameters, the default value from the notebook will be used. Retrieve these parameters in a notebook using `dbutils.widgets.get`.
7615
7615
  */
7616
7616
  baseParameters?: pulumi.Input<{
7617
- [key: string]: any;
7617
+ [key: string]: pulumi.Input<string>;
7618
7618
  }>;
7619
7619
  /**
7620
7620
  * The path of the databricks.Notebook to be run in the Databricks workspace or remote repository. For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. For notebooks stored in a remote repository, the path must be relative. This field is required.
@@ -7666,7 +7666,7 @@ export interface JobTaskPythonWheelTask {
7666
7666
  * Named parameters for the task
7667
7667
  */
7668
7668
  namedParameters?: pulumi.Input<{
7669
- [key: string]: any;
7669
+ [key: string]: pulumi.Input<string>;
7670
7670
  }>;
7671
7671
  /**
7672
7672
  * Name of Python package
@@ -7688,19 +7688,19 @@ export interface JobTaskRunJobTask {
7688
7688
  * (Map) Job parameters for the task
7689
7689
  */
7690
7690
  jobParameters?: pulumi.Input<{
7691
- [key: string]: any;
7691
+ [key: string]: pulumi.Input<string>;
7692
7692
  }>;
7693
7693
  notebookParams?: pulumi.Input<{
7694
- [key: string]: any;
7694
+ [key: string]: pulumi.Input<string>;
7695
7695
  }>;
7696
7696
  pipelineParams?: pulumi.Input<inputs.JobTaskRunJobTaskPipelineParams>;
7697
7697
  pythonNamedParams?: pulumi.Input<{
7698
- [key: string]: any;
7698
+ [key: string]: pulumi.Input<string>;
7699
7699
  }>;
7700
7700
  pythonParams?: pulumi.Input<pulumi.Input<string>[]>;
7701
7701
  sparkSubmitParams?: pulumi.Input<pulumi.Input<string>[]>;
7702
7702
  sqlParams?: pulumi.Input<{
7703
- [key: string]: any;
7703
+ [key: string]: pulumi.Input<string>;
7704
7704
  }>;
7705
7705
  }
7706
7706
  export interface JobTaskRunJobTaskPipelineParams {
@@ -7759,7 +7759,7 @@ export interface JobTaskSqlTask {
7759
7759
  * (Map) parameters to be used for each run of this task. The SQL alert task does not support custom parameters.
7760
7760
  */
7761
7761
  parameters?: pulumi.Input<{
7762
- [key: string]: any;
7762
+ [key: string]: pulumi.Input<string>;
7763
7763
  }>;
7764
7764
  /**
7765
7765
  * block consisting of single string field: `queryId` - identifier of the Databricks SQL Query (databricks_sql_query).
@@ -8271,7 +8271,7 @@ export interface ModelServingConfigServedEntity {
8271
8271
  * An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: ```{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}```
8272
8272
  */
8273
8273
  environmentVars?: pulumi.Input<{
8274
- [key: string]: any;
8274
+ [key: string]: pulumi.Input<string>;
8275
8275
  }>;
8276
8276
  /**
8277
8277
  * The external model to be served. NOTE: Only one of `externalModel` and (`entityName`, `entityVersion`, `workloadSize`, `workloadType`, and `scaleToZeroEnabled`) can be specified with the latter set being used for custom model serving for a Databricks registered model. When an `externalModel` is present, the served entities list can only have one `servedEntity` object. For an existing endpoint with `externalModel`, it can not be updated to an endpoint without `externalModel`. If the endpoint is created without `externalModel`, users cannot update it to add `externalModel` later.
@@ -8452,7 +8452,7 @@ export interface ModelServingConfigServedModel {
8452
8452
  * a map of environment variable name/values that will be used for serving this model. Environment variables may refer to Databricks secrets using the standard syntax: `{{secrets/secret_scope/secret_key}}`.
8453
8453
  */
8454
8454
  environmentVars?: pulumi.Input<{
8455
- [key: string]: any;
8455
+ [key: string]: pulumi.Input<string>;
8456
8456
  }>;
8457
8457
  /**
8458
8458
  * ARN of the instance profile that the served model will use to access AWS resources.
@@ -8917,7 +8917,7 @@ export interface PipelineCluster {
8917
8917
  azureAttributes?: pulumi.Input<inputs.PipelineClusterAzureAttributes>;
8918
8918
  clusterLogConf?: pulumi.Input<inputs.PipelineClusterClusterLogConf>;
8919
8919
  customTags?: pulumi.Input<{
8920
- [key: string]: any;
8920
+ [key: string]: pulumi.Input<string>;
8921
8921
  }>;
8922
8922
  driverInstancePoolId?: pulumi.Input<string>;
8923
8923
  driverNodeTypeId?: pulumi.Input<string>;
@@ -8930,10 +8930,10 @@ export interface PipelineCluster {
8930
8930
  numWorkers?: pulumi.Input<number>;
8931
8931
  policyId?: pulumi.Input<string>;
8932
8932
  sparkConf?: pulumi.Input<{
8933
- [key: string]: any;
8933
+ [key: string]: pulumi.Input<string>;
8934
8934
  }>;
8935
8935
  sparkEnvVars?: pulumi.Input<{
8936
- [key: string]: any;
8936
+ [key: string]: pulumi.Input<string>;
8937
8937
  }>;
8938
8938
  sshPublicKeys?: pulumi.Input<pulumi.Input<string>[]>;
8939
8939
  }
@@ -9259,7 +9259,7 @@ export interface RecipientPropertiesKvpairs {
9259
9259
  * a map of string key-value pairs with recipient's properties. Properties with name starting with `databricks.` are reserved.
9260
9260
  */
9261
9261
  properties: pulumi.Input<{
9262
- [key: string]: any;
9262
+ [key: string]: pulumi.Input<string>;
9263
9263
  }>;
9264
9264
  }
9265
9265
  export interface RecipientToken {
@@ -9420,7 +9420,7 @@ export interface SqlEndpointHealth {
9420
9420
  export interface SqlEndpointHealthFailureReason {
9421
9421
  code?: pulumi.Input<string>;
9422
9422
  parameters?: pulumi.Input<{
9423
- [key: string]: any;
9423
+ [key: string]: pulumi.Input<string>;
9424
9424
  }>;
9425
9425
  type?: pulumi.Input<string>;
9426
9426
  }