@pulumi/databricks 1.48.0 → 1.49.0-alpha.1723819798

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/types/output.d.ts CHANGED
@@ -513,7 +513,7 @@ export interface GetCatalogCatalogInfo {
513
513
  * A map of key-value properties attached to the securable.
514
514
  */
515
515
  options?: {
516
- [key: string]: any;
516
+ [key: string]: string;
517
517
  };
518
518
  /**
519
519
  * Current owner of the catalog
@@ -523,7 +523,7 @@ export interface GetCatalogCatalogInfo {
523
523
  * A map of key-value properties attached to the securable.
524
524
  */
525
525
  properties?: {
526
- [key: string]: any;
526
+ [key: string]: string;
527
527
  };
528
528
  /**
529
529
  * The name of delta sharing provider.
@@ -593,14 +593,14 @@ export interface GetClusterClusterInfo {
593
593
  * Additional tags for cluster resources.
594
594
  */
595
595
  customTags?: {
596
- [key: string]: any;
596
+ [key: string]: string;
597
597
  };
598
598
  /**
599
599
  * Security features of the cluster. Unity Catalog requires `SINGLE_USER` or `USER_ISOLATION` mode. `LEGACY_PASSTHROUGH` for passthrough cluster and `LEGACY_TABLE_ACL` for Table ACL cluster. Default to `NONE`, i.e. no security feature enabled.
600
600
  */
601
601
  dataSecurityMode?: string;
602
602
  defaultTags?: {
603
- [key: string]: any;
603
+ [key: string]: string;
604
604
  };
605
605
  dockerImage?: outputs.GetClusterClusterInfoDockerImage;
606
606
  driver?: outputs.GetClusterClusterInfoDriver;
@@ -651,14 +651,14 @@ export interface GetClusterClusterInfo {
651
651
  * Map with key-value pairs to fine-tune Spark clusters.
652
652
  */
653
653
  sparkConf?: {
654
- [key: string]: any;
654
+ [key: string]: string;
655
655
  };
656
656
  sparkContextId?: number;
657
657
  /**
658
658
  * Map with environment variable key-value pairs to fine-tune Spark clusters. Key-value pairs of the form (X,Y) are exported (i.e., X='Y') while launching the driver and workers.
659
659
  */
660
660
  sparkEnvVars?: {
661
- [key: string]: any;
661
+ [key: string]: string;
662
662
  };
663
663
  /**
664
664
  * [Runtime version](https://docs.databricks.com/runtime/index.html) of the cluster.
@@ -817,7 +817,7 @@ export interface GetClusterClusterInfoSpec {
817
817
  * Additional tags for cluster resources.
818
818
  */
819
819
  customTags?: {
820
- [key: string]: any;
820
+ [key: string]: string;
821
821
  };
822
822
  /**
823
823
  * Security features of the cluster. Unity Catalog requires `SINGLE_USER` or `USER_ISOLATION` mode. `LEGACY_PASSTHROUGH` for passthrough cluster and `LEGACY_TABLE_ACL` for Table ACL cluster. Default to `NONE`, i.e. no security feature enabled.
@@ -872,13 +872,13 @@ export interface GetClusterClusterInfoSpec {
872
872
  * Map with key-value pairs to fine-tune Spark clusters.
873
873
  */
874
874
  sparkConf?: {
875
- [key: string]: any;
875
+ [key: string]: string;
876
876
  };
877
877
  /**
878
878
  * Map with environment variable key-value pairs to fine-tune Spark clusters. Key-value pairs of the form (X,Y) are exported (i.e., X='Y') while launching the driver and workers.
879
879
  */
880
880
  sparkEnvVars?: {
881
- [key: string]: any;
881
+ [key: string]: string;
882
882
  };
883
883
  /**
884
884
  * [Runtime version](https://docs.databricks.com/runtime/index.html) of the cluster.
@@ -1028,7 +1028,7 @@ export interface GetClusterClusterInfoSpecWorkloadTypeClients {
1028
1028
  export interface GetClusterClusterInfoTerminationReason {
1029
1029
  code?: string;
1030
1030
  parameters?: {
1031
- [key: string]: any;
1031
+ [key: string]: string;
1032
1032
  };
1033
1033
  type?: string;
1034
1034
  }
@@ -1188,10 +1188,10 @@ export interface GetInstancePoolPoolInfo {
1188
1188
  awsAttributes?: outputs.GetInstancePoolPoolInfoAwsAttributes;
1189
1189
  azureAttributes?: outputs.GetInstancePoolPoolInfoAzureAttributes;
1190
1190
  customTags?: {
1191
- [key: string]: any;
1191
+ [key: string]: string;
1192
1192
  };
1193
1193
  defaultTags: {
1194
- [key: string]: any;
1194
+ [key: string]: string;
1195
1195
  };
1196
1196
  diskSpec?: outputs.GetInstancePoolPoolInfoDiskSpec;
1197
1197
  enableElasticDisk?: boolean;
@@ -1323,7 +1323,7 @@ export interface GetJobJobSettingsSettings {
1323
1323
  sparkPythonTask?: outputs.GetJobJobSettingsSettingsSparkPythonTask;
1324
1324
  sparkSubmitTask?: outputs.GetJobJobSettingsSettingsSparkSubmitTask;
1325
1325
  tags?: {
1326
- [key: string]: any;
1326
+ [key: string]: string;
1327
1327
  };
1328
1328
  tasks?: outputs.GetJobJobSettingsSettingsTask[];
1329
1329
  timeoutSeconds?: number;
@@ -1398,7 +1398,7 @@ export interface GetJobJobSettingsSettingsJobClusterNewCluster {
1398
1398
  clusterMountInfos?: outputs.GetJobJobSettingsSettingsJobClusterNewClusterClusterMountInfo[];
1399
1399
  clusterName?: string;
1400
1400
  customTags?: {
1401
- [key: string]: any;
1401
+ [key: string]: string;
1402
1402
  };
1403
1403
  dataSecurityMode?: string;
1404
1404
  dockerImage?: outputs.GetJobJobSettingsSettingsJobClusterNewClusterDockerImage;
@@ -1416,10 +1416,10 @@ export interface GetJobJobSettingsSettingsJobClusterNewCluster {
1416
1416
  runtimeEngine?: string;
1417
1417
  singleUserName?: string;
1418
1418
  sparkConf?: {
1419
- [key: string]: any;
1419
+ [key: string]: string;
1420
1420
  };
1421
1421
  sparkEnvVars?: {
1422
- [key: string]: any;
1422
+ [key: string]: string;
1423
1423
  };
1424
1424
  sparkVersion: string;
1425
1425
  sshPublicKeys?: string[];
@@ -1561,7 +1561,7 @@ export interface GetJobJobSettingsSettingsNewCluster {
1561
1561
  clusterMountInfos?: outputs.GetJobJobSettingsSettingsNewClusterClusterMountInfo[];
1562
1562
  clusterName?: string;
1563
1563
  customTags?: {
1564
- [key: string]: any;
1564
+ [key: string]: string;
1565
1565
  };
1566
1566
  dataSecurityMode?: string;
1567
1567
  dockerImage?: outputs.GetJobJobSettingsSettingsNewClusterDockerImage;
@@ -1579,10 +1579,10 @@ export interface GetJobJobSettingsSettingsNewCluster {
1579
1579
  runtimeEngine?: string;
1580
1580
  singleUserName?: string;
1581
1581
  sparkConf?: {
1582
- [key: string]: any;
1582
+ [key: string]: string;
1583
1583
  };
1584
1584
  sparkEnvVars?: {
1585
- [key: string]: any;
1585
+ [key: string]: string;
1586
1586
  };
1587
1587
  sparkVersion: string;
1588
1588
  sshPublicKeys?: string[];
@@ -1693,7 +1693,7 @@ export interface GetJobJobSettingsSettingsNewClusterWorkloadTypeClients {
1693
1693
  }
1694
1694
  export interface GetJobJobSettingsSettingsNotebookTask {
1695
1695
  baseParameters?: {
1696
- [key: string]: any;
1696
+ [key: string]: string;
1697
1697
  };
1698
1698
  notebookPath: string;
1699
1699
  source?: string;
@@ -1717,7 +1717,7 @@ export interface GetJobJobSettingsSettingsPipelineTask {
1717
1717
  export interface GetJobJobSettingsSettingsPythonWheelTask {
1718
1718
  entryPoint?: string;
1719
1719
  namedParameters?: {
1720
- [key: string]: any;
1720
+ [key: string]: string;
1721
1721
  };
1722
1722
  packageName?: string;
1723
1723
  parameters?: string[];
@@ -1732,7 +1732,7 @@ export interface GetJobJobSettingsSettingsRunAs {
1732
1732
  export interface GetJobJobSettingsSettingsRunJobTask {
1733
1733
  jobId: number;
1734
1734
  jobParameters?: {
1735
- [key: string]: any;
1735
+ [key: string]: string;
1736
1736
  };
1737
1737
  }
1738
1738
  export interface GetJobJobSettingsSettingsSchedule {
@@ -1910,7 +1910,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNewCluster {
1910
1910
  clusterMountInfos?: outputs.GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterClusterMountInfo[];
1911
1911
  clusterName?: string;
1912
1912
  customTags?: {
1913
- [key: string]: any;
1913
+ [key: string]: string;
1914
1914
  };
1915
1915
  dataSecurityMode?: string;
1916
1916
  dockerImage?: outputs.GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterDockerImage;
@@ -1928,10 +1928,10 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNewCluster {
1928
1928
  runtimeEngine?: string;
1929
1929
  singleUserName?: string;
1930
1930
  sparkConf?: {
1931
- [key: string]: any;
1931
+ [key: string]: string;
1932
1932
  };
1933
1933
  sparkEnvVars?: {
1934
- [key: string]: any;
1934
+ [key: string]: string;
1935
1935
  };
1936
1936
  sparkVersion: string;
1937
1937
  sshPublicKeys?: string[];
@@ -2042,7 +2042,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNewClusterWorkloadT
2042
2042
  }
2043
2043
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskNotebookTask {
2044
2044
  baseParameters?: {
2045
- [key: string]: any;
2045
+ [key: string]: string;
2046
2046
  };
2047
2047
  notebookPath: string;
2048
2048
  source?: string;
@@ -2060,7 +2060,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskPipelineTask {
2060
2060
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskPythonWheelTask {
2061
2061
  entryPoint?: string;
2062
2062
  namedParameters?: {
2063
- [key: string]: any;
2063
+ [key: string]: string;
2064
2064
  };
2065
2065
  packageName?: string;
2066
2066
  parameters?: string[];
@@ -2068,7 +2068,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskPythonWheelTask {
2068
2068
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskRunJobTask {
2069
2069
  jobId: number;
2070
2070
  jobParameters?: {
2071
- [key: string]: any;
2071
+ [key: string]: string;
2072
2072
  };
2073
2073
  }
2074
2074
  export interface GetJobJobSettingsSettingsTaskForEachTaskTaskSparkJarTask {
@@ -2089,7 +2089,7 @@ export interface GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTask {
2089
2089
  dashboard?: outputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskDashboard;
2090
2090
  file?: outputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskFile;
2091
2091
  parameters?: {
2092
- [key: string]: any;
2092
+ [key: string]: string;
2093
2093
  };
2094
2094
  query?: outputs.GetJobJobSettingsSettingsTaskForEachTaskTaskSqlTaskQuery;
2095
2095
  warehouseId: string;
@@ -2198,7 +2198,7 @@ export interface GetJobJobSettingsSettingsTaskNewCluster {
2198
2198
  clusterMountInfos?: outputs.GetJobJobSettingsSettingsTaskNewClusterClusterMountInfo[];
2199
2199
  clusterName?: string;
2200
2200
  customTags?: {
2201
- [key: string]: any;
2201
+ [key: string]: string;
2202
2202
  };
2203
2203
  dataSecurityMode?: string;
2204
2204
  dockerImage?: outputs.GetJobJobSettingsSettingsTaskNewClusterDockerImage;
@@ -2216,10 +2216,10 @@ export interface GetJobJobSettingsSettingsTaskNewCluster {
2216
2216
  runtimeEngine?: string;
2217
2217
  singleUserName?: string;
2218
2218
  sparkConf?: {
2219
- [key: string]: any;
2219
+ [key: string]: string;
2220
2220
  };
2221
2221
  sparkEnvVars?: {
2222
- [key: string]: any;
2222
+ [key: string]: string;
2223
2223
  };
2224
2224
  sparkVersion: string;
2225
2225
  sshPublicKeys?: string[];
@@ -2330,7 +2330,7 @@ export interface GetJobJobSettingsSettingsTaskNewClusterWorkloadTypeClients {
2330
2330
  }
2331
2331
  export interface GetJobJobSettingsSettingsTaskNotebookTask {
2332
2332
  baseParameters?: {
2333
- [key: string]: any;
2333
+ [key: string]: string;
2334
2334
  };
2335
2335
  notebookPath: string;
2336
2336
  source?: string;
@@ -2348,7 +2348,7 @@ export interface GetJobJobSettingsSettingsTaskPipelineTask {
2348
2348
  export interface GetJobJobSettingsSettingsTaskPythonWheelTask {
2349
2349
  entryPoint?: string;
2350
2350
  namedParameters?: {
2351
- [key: string]: any;
2351
+ [key: string]: string;
2352
2352
  };
2353
2353
  packageName?: string;
2354
2354
  parameters?: string[];
@@ -2356,7 +2356,7 @@ export interface GetJobJobSettingsSettingsTaskPythonWheelTask {
2356
2356
  export interface GetJobJobSettingsSettingsTaskRunJobTask {
2357
2357
  jobId: number;
2358
2358
  jobParameters?: {
2359
- [key: string]: any;
2359
+ [key: string]: string;
2360
2360
  };
2361
2361
  }
2362
2362
  export interface GetJobJobSettingsSettingsTaskSparkJarTask {
@@ -2377,7 +2377,7 @@ export interface GetJobJobSettingsSettingsTaskSqlTask {
2377
2377
  dashboard?: outputs.GetJobJobSettingsSettingsTaskSqlTaskDashboard;
2378
2378
  file?: outputs.GetJobJobSettingsSettingsTaskSqlTaskFile;
2379
2379
  parameters?: {
2380
- [key: string]: any;
2380
+ [key: string]: string;
2381
2381
  };
2382
2382
  query?: outputs.GetJobJobSettingsSettingsTaskSqlTaskQuery;
2383
2383
  warehouseId: string;
@@ -2646,7 +2646,7 @@ export interface GetSchemaSchemaInfo {
2646
2646
  * map of properties set on the schema
2647
2647
  */
2648
2648
  properties?: {
2649
- [key: string]: any;
2649
+ [key: string]: string;
2650
2650
  };
2651
2651
  /**
2652
2652
  * the unique identifier of the volume
@@ -2725,7 +2725,7 @@ export interface GetSqlWarehouseHealth {
2725
2725
  export interface GetSqlWarehouseHealthFailureReason {
2726
2726
  code?: string;
2727
2727
  parameters?: {
2728
- [key: string]: any;
2728
+ [key: string]: string;
2729
2729
  };
2730
2730
  type?: string;
2731
2731
  }
@@ -2887,7 +2887,7 @@ export interface GetTableTableInfo {
2887
2887
  owner?: string;
2888
2888
  pipelineId?: string;
2889
2889
  properties?: {
2890
- [key: string]: any;
2890
+ [key: string]: string;
2891
2891
  };
2892
2892
  rowFilter?: outputs.GetTableTableInfoRowFilter;
2893
2893
  /**
@@ -2940,7 +2940,7 @@ export interface GetTableTableInfoColumnMask {
2940
2940
  }
2941
2941
  export interface GetTableTableInfoDeltaRuntimePropertiesKvpairs {
2942
2942
  deltaRuntimeProperties: {
2943
- [key: string]: any;
2943
+ [key: string]: string;
2944
2944
  };
2945
2945
  }
2946
2946
  export interface GetTableTableInfoEffectivePredictiveOptimizationFlag {
@@ -3345,7 +3345,7 @@ export interface JobJobClusterNewCluster {
3345
3345
  clusterMountInfos?: outputs.JobJobClusterNewClusterClusterMountInfo[];
3346
3346
  clusterName?: string;
3347
3347
  customTags?: {
3348
- [key: string]: any;
3348
+ [key: string]: string;
3349
3349
  };
3350
3350
  dataSecurityMode?: string;
3351
3351
  dockerImage?: outputs.JobJobClusterNewClusterDockerImage;
@@ -3367,10 +3367,10 @@ export interface JobJobClusterNewCluster {
3367
3367
  runtimeEngine?: string;
3368
3368
  singleUserName?: string;
3369
3369
  sparkConf?: {
3370
- [key: string]: any;
3370
+ [key: string]: string;
3371
3371
  };
3372
3372
  sparkEnvVars?: {
3373
- [key: string]: any;
3373
+ [key: string]: string;
3374
3374
  };
3375
3375
  sparkVersion: string;
3376
3376
  sshPublicKeys?: string[];
@@ -3552,7 +3552,7 @@ export interface JobNewCluster {
3552
3552
  clusterMountInfos?: outputs.JobNewClusterClusterMountInfo[];
3553
3553
  clusterName?: string;
3554
3554
  customTags?: {
3555
- [key: string]: any;
3555
+ [key: string]: string;
3556
3556
  };
3557
3557
  dataSecurityMode?: string;
3558
3558
  dockerImage?: outputs.JobNewClusterDockerImage;
@@ -3574,10 +3574,10 @@ export interface JobNewCluster {
3574
3574
  runtimeEngine?: string;
3575
3575
  singleUserName?: string;
3576
3576
  sparkConf?: {
3577
- [key: string]: any;
3577
+ [key: string]: string;
3578
3578
  };
3579
3579
  sparkEnvVars?: {
3580
- [key: string]: any;
3580
+ [key: string]: string;
3581
3581
  };
3582
3582
  sparkVersion: string;
3583
3583
  sshPublicKeys?: string[];
@@ -3732,7 +3732,7 @@ export interface JobNotebookTask {
3732
3732
  * (Map) Base parameters to be used for each run of this job. If the run is initiated by a call to run-now with parameters specified, the two parameters maps will be merged. If the same key is specified in baseParameters and in run-now, the value from run-now will be used. If the notebook takes a parameter that is not specified in the job’s baseParameters or the run-now override parameters, the default value from the notebook will be used. Retrieve these parameters in a notebook using `dbutils.widgets.get`.
3733
3733
  */
3734
3734
  baseParameters?: {
3735
- [key: string]: any;
3735
+ [key: string]: string;
3736
3736
  };
3737
3737
  /**
3738
3738
  * The path of the databricks.Notebook to be run in the Databricks workspace or remote repository. For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. For notebooks stored in a remote repository, the path must be relative. This field is required.
@@ -3792,7 +3792,7 @@ export interface JobPythonWheelTask {
3792
3792
  * Named parameters for the task
3793
3793
  */
3794
3794
  namedParameters?: {
3795
- [key: string]: any;
3795
+ [key: string]: string;
3796
3796
  };
3797
3797
  /**
3798
3798
  * Name of Python package
@@ -3839,7 +3839,7 @@ export interface JobRunJobTask {
3839
3839
  * (Map) Job parameters for the task
3840
3840
  */
3841
3841
  jobParameters?: {
3842
- [key: string]: any;
3842
+ [key: string]: string;
3843
3843
  };
3844
3844
  }
3845
3845
  export interface JobSchedule {
@@ -4302,7 +4302,7 @@ export interface JobTaskForEachTaskTaskNewCluster {
4302
4302
  clusterMountInfos?: outputs.JobTaskForEachTaskTaskNewClusterClusterMountInfo[];
4303
4303
  clusterName?: string;
4304
4304
  customTags?: {
4305
- [key: string]: any;
4305
+ [key: string]: string;
4306
4306
  };
4307
4307
  dataSecurityMode?: string;
4308
4308
  dockerImage?: outputs.JobTaskForEachTaskTaskNewClusterDockerImage;
@@ -4324,10 +4324,10 @@ export interface JobTaskForEachTaskTaskNewCluster {
4324
4324
  runtimeEngine?: string;
4325
4325
  singleUserName?: string;
4326
4326
  sparkConf?: {
4327
- [key: string]: any;
4327
+ [key: string]: string;
4328
4328
  };
4329
4329
  sparkEnvVars?: {
4330
- [key: string]: any;
4330
+ [key: string]: string;
4331
4331
  };
4332
4332
  sparkVersion: string;
4333
4333
  sshPublicKeys?: string[];
@@ -4482,7 +4482,7 @@ export interface JobTaskForEachTaskTaskNotebookTask {
4482
4482
  * (Map) Base parameters to be used for each run of this job. If the run is initiated by a call to run-now with parameters specified, the two parameters maps will be merged. If the same key is specified in baseParameters and in run-now, the value from run-now will be used. If the notebook takes a parameter that is not specified in the job’s baseParameters or the run-now override parameters, the default value from the notebook will be used. Retrieve these parameters in a notebook using `dbutils.widgets.get`.
4483
4483
  */
4484
4484
  baseParameters?: {
4485
- [key: string]: any;
4485
+ [key: string]: string;
4486
4486
  };
4487
4487
  /**
4488
4488
  * The path of the databricks.Notebook to be run in the Databricks workspace or remote repository. For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. For notebooks stored in a remote repository, the path must be relative. This field is required.
@@ -4534,7 +4534,7 @@ export interface JobTaskForEachTaskTaskPythonWheelTask {
4534
4534
  * Named parameters for the task
4535
4535
  */
4536
4536
  namedParameters?: {
4537
- [key: string]: any;
4537
+ [key: string]: string;
4538
4538
  };
4539
4539
  /**
4540
4540
  * Name of Python package
@@ -4556,19 +4556,19 @@ export interface JobTaskForEachTaskTaskRunJobTask {
4556
4556
  * (Map) Job parameters for the task
4557
4557
  */
4558
4558
  jobParameters?: {
4559
- [key: string]: any;
4559
+ [key: string]: string;
4560
4560
  };
4561
4561
  notebookParams?: {
4562
- [key: string]: any;
4562
+ [key: string]: string;
4563
4563
  };
4564
4564
  pipelineParams?: outputs.JobTaskForEachTaskTaskRunJobTaskPipelineParams;
4565
4565
  pythonNamedParams?: {
4566
- [key: string]: any;
4566
+ [key: string]: string;
4567
4567
  };
4568
4568
  pythonParams?: string[];
4569
4569
  sparkSubmitParams?: string[];
4570
4570
  sqlParams?: {
4571
- [key: string]: any;
4571
+ [key: string]: string;
4572
4572
  };
4573
4573
  }
4574
4574
  export interface JobTaskForEachTaskTaskRunJobTaskPipelineParams {
@@ -4627,7 +4627,7 @@ export interface JobTaskForEachTaskTaskSqlTask {
4627
4627
  * (Map) parameters to be used for each run of this task. The SQL alert task does not support custom parameters.
4628
4628
  */
4629
4629
  parameters?: {
4630
- [key: string]: any;
4630
+ [key: string]: string;
4631
4631
  };
4632
4632
  /**
4633
4633
  * block consisting of single string field: `queryId` - identifier of the Databricks SQL Query (databricks_sql_query).
@@ -4848,7 +4848,7 @@ export interface JobTaskNewCluster {
4848
4848
  clusterMountInfos?: outputs.JobTaskNewClusterClusterMountInfo[];
4849
4849
  clusterName?: string;
4850
4850
  customTags?: {
4851
- [key: string]: any;
4851
+ [key: string]: string;
4852
4852
  };
4853
4853
  dataSecurityMode?: string;
4854
4854
  dockerImage?: outputs.JobTaskNewClusterDockerImage;
@@ -4870,10 +4870,10 @@ export interface JobTaskNewCluster {
4870
4870
  runtimeEngine?: string;
4871
4871
  singleUserName?: string;
4872
4872
  sparkConf?: {
4873
- [key: string]: any;
4873
+ [key: string]: string;
4874
4874
  };
4875
4875
  sparkEnvVars?: {
4876
- [key: string]: any;
4876
+ [key: string]: string;
4877
4877
  };
4878
4878
  sparkVersion: string;
4879
4879
  sshPublicKeys?: string[];
@@ -5028,7 +5028,7 @@ export interface JobTaskNotebookTask {
5028
5028
  * (Map) Base parameters to be used for each run of this job. If the run is initiated by a call to run-now with parameters specified, the two parameters maps will be merged. If the same key is specified in baseParameters and in run-now, the value from run-now will be used. If the notebook takes a parameter that is not specified in the job’s baseParameters or the run-now override parameters, the default value from the notebook will be used. Retrieve these parameters in a notebook using `dbutils.widgets.get`.
5029
5029
  */
5030
5030
  baseParameters?: {
5031
- [key: string]: any;
5031
+ [key: string]: string;
5032
5032
  };
5033
5033
  /**
5034
5034
  * The path of the databricks.Notebook to be run in the Databricks workspace or remote repository. For notebooks stored in the Databricks workspace, the path must be absolute and begin with a slash. For notebooks stored in a remote repository, the path must be relative. This field is required.
@@ -5080,7 +5080,7 @@ export interface JobTaskPythonWheelTask {
5080
5080
  * Named parameters for the task
5081
5081
  */
5082
5082
  namedParameters?: {
5083
- [key: string]: any;
5083
+ [key: string]: string;
5084
5084
  };
5085
5085
  /**
5086
5086
  * Name of Python package
@@ -5102,19 +5102,19 @@ export interface JobTaskRunJobTask {
5102
5102
  * (Map) Job parameters for the task
5103
5103
  */
5104
5104
  jobParameters?: {
5105
- [key: string]: any;
5105
+ [key: string]: string;
5106
5106
  };
5107
5107
  notebookParams?: {
5108
- [key: string]: any;
5108
+ [key: string]: string;
5109
5109
  };
5110
5110
  pipelineParams?: outputs.JobTaskRunJobTaskPipelineParams;
5111
5111
  pythonNamedParams?: {
5112
- [key: string]: any;
5112
+ [key: string]: string;
5113
5113
  };
5114
5114
  pythonParams?: string[];
5115
5115
  sparkSubmitParams?: string[];
5116
5116
  sqlParams?: {
5117
- [key: string]: any;
5117
+ [key: string]: string;
5118
5118
  };
5119
5119
  }
5120
5120
  export interface JobTaskRunJobTaskPipelineParams {
@@ -5173,7 +5173,7 @@ export interface JobTaskSqlTask {
5173
5173
  * (Map) parameters to be used for each run of this task. The SQL alert task does not support custom parameters.
5174
5174
  */
5175
5175
  parameters?: {
5176
- [key: string]: any;
5176
+ [key: string]: string;
5177
5177
  };
5178
5178
  /**
5179
5179
  * block consisting of single string field: `queryId` - identifier of the Databricks SQL Query (databricks_sql_query).
@@ -5685,7 +5685,7 @@ export interface ModelServingConfigServedEntity {
5685
5685
  * An object containing a set of optional, user-specified environment variable key-value pairs used for serving this entity. Note: this is an experimental feature and subject to change. Example entity environment variables that refer to Databricks secrets: ```{"OPENAI_API_KEY": "{{secrets/my_scope/my_key}}", "DATABRICKS_TOKEN": "{{secrets/my_scope2/my_key2}}"}```
5686
5686
  */
5687
5687
  environmentVars?: {
5688
- [key: string]: any;
5688
+ [key: string]: string;
5689
5689
  };
5690
5690
  /**
5691
5691
  * The external model to be served. NOTE: Only one of `externalModel` and (`entityName`, `entityVersion`, `workloadSize`, `workloadType`, and `scaleToZeroEnabled`) can be specified with the latter set being used for custom model serving for a Databricks registered model. When an `externalModel` is present, the served entities list can only have one `servedEntity` object. For an existing endpoint with `externalModel`, it can not be updated to an endpoint without `externalModel`. If the endpoint is created without `externalModel`, users cannot update it to add `externalModel` later.
@@ -5866,7 +5866,7 @@ export interface ModelServingConfigServedModel {
5866
5866
  * a map of environment variable name/values that will be used for serving this model. Environment variables may refer to Databricks secrets using the standard syntax: `{{secrets/secret_scope/secret_key}}`.
5867
5867
  */
5868
5868
  environmentVars?: {
5869
- [key: string]: any;
5869
+ [key: string]: string;
5870
5870
  };
5871
5871
  /**
5872
5872
  * ARN of the instance profile that the served model will use to access AWS resources.
@@ -6331,7 +6331,7 @@ export interface PipelineCluster {
6331
6331
  azureAttributes?: outputs.PipelineClusterAzureAttributes;
6332
6332
  clusterLogConf?: outputs.PipelineClusterClusterLogConf;
6333
6333
  customTags?: {
6334
- [key: string]: any;
6334
+ [key: string]: string;
6335
6335
  };
6336
6336
  driverInstancePoolId?: string;
6337
6337
  driverNodeTypeId: string;
@@ -6344,10 +6344,10 @@ export interface PipelineCluster {
6344
6344
  numWorkers?: number;
6345
6345
  policyId?: string;
6346
6346
  sparkConf?: {
6347
- [key: string]: any;
6347
+ [key: string]: string;
6348
6348
  };
6349
6349
  sparkEnvVars?: {
6350
- [key: string]: any;
6350
+ [key: string]: string;
6351
6351
  };
6352
6352
  sshPublicKeys?: string[];
6353
6353
  }
@@ -6673,7 +6673,7 @@ export interface RecipientPropertiesKvpairs {
6673
6673
  * a map of string key-value pairs with recipient's properties. Properties with name starting with `databricks.` are reserved.
6674
6674
  */
6675
6675
  properties: {
6676
- [key: string]: any;
6676
+ [key: string]: string;
6677
6677
  };
6678
6678
  }
6679
6679
  export interface RecipientToken {
@@ -6834,7 +6834,7 @@ export interface SqlEndpointHealth {
6834
6834
  export interface SqlEndpointHealthFailureReason {
6835
6835
  code?: string;
6836
6836
  parameters?: {
6837
- [key: string]: any;
6837
+ [key: string]: string;
6838
6838
  };
6839
6839
  type?: string;
6840
6840
  }
@@ -21,7 +21,7 @@ import * as pulumi from "@pulumi/pulumi";
21
21
  * import * as databricks from "@pulumi/databricks";
22
22
  *
23
23
  * const _this = new databricks.WorkspaceConf("this", {customConfig: {
24
- * enableIpAccessLists: true,
24
+ * enableIpAccessLists: "true",
25
25
  * }});
26
26
  * ```
27
27
  *
@@ -49,7 +49,7 @@ export declare class WorkspaceConf extends pulumi.CustomResource {
49
49
  * Key-value map of strings that represent workspace configuration. Upon resource deletion, properties that start with `enable` or `enforce` will be reset to `false` value, regardless of initial default one.
50
50
  */
51
51
  readonly customConfig: pulumi.Output<{
52
- [key: string]: any;
52
+ [key: string]: string;
53
53
  } | undefined>;
54
54
  /**
55
55
  * Create a WorkspaceConf resource with the given unique name, arguments, and options.
@@ -68,7 +68,7 @@ export interface WorkspaceConfState {
68
68
  * Key-value map of strings that represent workspace configuration. Upon resource deletion, properties that start with `enable` or `enforce` will be reset to `false` value, regardless of initial default one.
69
69
  */
70
70
  customConfig?: pulumi.Input<{
71
- [key: string]: any;
71
+ [key: string]: pulumi.Input<string>;
72
72
  }>;
73
73
  }
74
74
  /**
@@ -79,6 +79,6 @@ export interface WorkspaceConfArgs {
79
79
  * Key-value map of strings that represent workspace configuration. Upon resource deletion, properties that start with `enable` or `enforce` will be reset to `false` value, regardless of initial default one.
80
80
  */
81
81
  customConfig?: pulumi.Input<{
82
- [key: string]: any;
82
+ [key: string]: pulumi.Input<string>;
83
83
  }>;
84
84
  }
package/workspaceConf.js CHANGED
@@ -27,7 +27,7 @@ const utilities = require("./utilities");
27
27
  * import * as databricks from "@pulumi/databricks";
28
28
  *
29
29
  * const _this = new databricks.WorkspaceConf("this", {customConfig: {
30
- * enableIpAccessLists: true,
30
+ * enableIpAccessLists: "true",
31
31
  * }});
32
32
  * ```
33
33
  *