@maxim_mazurok/gapi.client.dataproc-v1 0.0.20230830 → 0.0.20230919

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.d.ts +38 -35
  2. package/package.json +1 -1
  3. package/tests.ts +117 -9
package/index.d.ts CHANGED
@@ -9,7 +9,7 @@
9
9
  // This file was generated by https://github.com/Maxim-Mazurok/google-api-typings-generator. Please do not edit it manually.
10
10
  // In case of any problems please post issue to https://github.com/Maxim-Mazurok/google-api-typings-generator
11
11
  // Generated from: https://dataproc.googleapis.com/$discovery/rest?version=v1
12
- // Revision: 20230830
12
+ // Revision: 20230919
13
13
 
14
14
  /// <reference types="gapi.client" />
15
15
 
@@ -531,9 +531,6 @@ declare namespace gapi.client {
531
531
  /** Optional. The Cloud KMS key name to use for PD disk encryption for all instances in the cluster. */
532
532
  gcePdKmsKeyName?:
533
533
  string;
534
- /** Optional. The Cloud KMS key name to use for encrypting customer core content and cluster PD disk for all instances in the cluster. */
535
- kmsKey?:
536
- string;
537
534
  }
538
535
  interface EndpointConfig {
539
536
  /** Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false. */
@@ -583,11 +580,11 @@ declare namespace gapi.client {
583
580
  subnetworkUri?:
584
581
  string;
585
582
  /**
586
- * Optional. The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work
587
- * to finish. If ttl is not specified for a batch workload, the workload will be allowed to run until it exits naturally (or runs forever without exiting). If ttl is not specified for
588
- * an interactive session, it defaults to 24h. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4h. Minimum value is 10 minutes; maximum value is 14
589
- * days (see JSON representation of Duration (https://developers.google.com/protocol-buffers/docs/proto3#json)). If both ttl and idle_ttl are specified (for an interactive session),
590
- * the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idle_ttl or when ttl has been exceeded, whichever occurs first.
583
+ * Optional. The duration after which the workload will be terminated, specified as the JSON representation for Duration (https://protobuf.dev/programming-guides/proto3/#json). When
584
+ * the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a batch workload, the workload will
585
+ * be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for
586
+ * a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idle_ttl are specified (for an interactive
587
+ * session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idle_ttl or when ttl has been exceeded, whichever occurs first.
591
588
  */
592
589
  ttl?:
593
590
  string;
@@ -608,8 +605,8 @@ declare namespace gapi.client {
608
605
  }
609
606
  interface FlinkJob {
610
607
  /**
611
- * Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job
612
- * submission.
608
+ * Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision might occur that causes an incorrect
609
+ * job submission.
613
610
  */
614
611
  args?:
615
612
  string[];
@@ -619,19 +616,19 @@ declare namespace gapi.client {
619
616
  /** Optional. The runtime log config for job execution. */
620
617
  loggingConfig?:
621
618
  LoggingConfig;
622
- /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. */
619
+ /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jarFileUris. */
623
620
  mainClass?:
624
621
  string;
625
622
  /** The HCFS URI of the jar file that contains the main class. */
626
623
  mainJarFileUri?:
627
624
  string;
628
625
  /**
629
- * Optional. A mapping of property names to values, used to configure Flink. Properties that conflict with values set by the Dataproc API may beoverwritten. Can include properties set
630
- * in/etc/flink/conf/flink-defaults.conf and classes in user code.
626
+ * Optional. A mapping of property names to values, used to configure Flink. Properties that conflict with values set by the Dataproc API might beoverwritten. Can include properties
627
+ * set in/etc/flink/conf/flink-defaults.conf and classes in user code.
631
628
  */
632
629
  properties?:
633
630
  { [P in string]: string };
634
- /** Optional. HCFS URI of the savepoint which contains the last saved progress for this job */
631
+ /** Optional. HCFS URI of the savepoint, which contains the last saved progress for starting the current job. */
635
632
  savepointUri?:
636
633
  string;
637
634
  }
@@ -831,8 +828,8 @@ declare namespace gapi.client {
831
828
  archiveUris?:
832
829
  string[];
833
830
  /**
834
- * Optional. The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an
835
- * incorrect job submission.
831
+ * Optional. The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision might occur that causes
832
+ * an incorrect job submission.
836
833
  */
837
834
  args?:
838
835
  string[];
@@ -855,7 +852,7 @@ declare namespace gapi.client {
855
852
  mainJarFileUri?:
856
853
  string;
857
854
  /**
858
- * Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties
855
+ * Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
859
856
  * set in /etc/hadoop/conf/*-site and classes in user code.
860
857
  */
861
858
  properties?:
@@ -869,8 +866,8 @@ declare namespace gapi.client {
869
866
  jarFileUris?:
870
867
  string[];
871
868
  /**
872
- * Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set
873
- * in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
869
+ * Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
870
+ * set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
874
871
  */
875
872
  properties?:
876
873
  { [P in string]: string };
@@ -1055,7 +1052,7 @@ declare namespace gapi.client {
1055
1052
  done?:
1056
1053
  boolean;
1057
1054
  /**
1058
- * Output only. If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same
1055
+ * Output only. If present, the location of miscellaneous control files which can be used as part of job setup and handling. If not present, control files might be placed in the same
1059
1056
  * location as driver_output_uri.
1060
1057
  */
1061
1058
  driverControlFilesUri?:
@@ -1075,11 +1072,11 @@ declare namespace gapi.client {
1075
1072
  /** Optional. Job is a Hive job. */
1076
1073
  hiveJob?:
1077
1074
  HiveJob;
1078
- /** Output only. A UUID that uniquely identifies a job within the project over time. This is in contrast to a user-settable reference.job_id that may be reused over time. */
1075
+ /** Output only. A UUID that uniquely identifies a job within the project over time. This is in contrast to a user-settable reference.job_id that might be reused over time. */
1079
1076
  jobUuid?:
1080
1077
  string;
1081
1078
  /**
1082
- * Optional. The labels to associate with this job. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values may be
1079
+ * Optional. The labels to associate with this job. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values can be
1083
1080
  * empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
1084
1081
  */
1085
1082
  labels?:
@@ -1114,7 +1111,7 @@ declare namespace gapi.client {
1114
1111
  /** Optional. Job is a SparkSql job. */
1115
1112
  sparkSqlJob?:
1116
1113
  SparkSqlJob;
1117
- /** Output only. The job status. Additional application-specific status information may be contained in the type_job and yarn_applications fields. */
1114
+ /** Output only. The job status. Additional application-specific status information might be contained in the type_job and yarn_applications fields. */
1118
1115
  status?:
1119
1116
  JobStatus;
1120
1117
  /** Output only. The previous job status. */
@@ -1123,7 +1120,7 @@ declare namespace gapi.client {
1123
1120
  /** Optional. Job is a Trino job. */
1124
1121
  trinoJob?:
1125
1122
  TrinoJob;
1126
- /** Output only. The collection of YARN applications spun up by this job.Beta Feature: This report is available for testing purposes only. It may be changed before final release. */
1123
+ /** Output only. The collection of YARN applications spun up by this job.Beta Feature: This report is available for testing purposes only. It might be changed before final release. */
1127
1124
  yarnApplications?:
1128
1125
  YarnApplication[];
1129
1126
  }
@@ -1165,14 +1162,14 @@ declare namespace gapi.client {
1165
1162
  }
1166
1163
  interface JobScheduling {
1167
1164
  /**
1168
- * Optional. Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.A job may be reported as
1165
+ * Optional. Maximum number of times per hour a driver can be restarted as a result of driver exiting with non-zero code before job is reported failed.A job might be reported as
1169
1166
  * thrashing if the driver exits with a non-zero code four times within a 10-minute window.Maximum value is 10.Note: This restartable job option is not supported in Dataproc workflow
1170
1167
  * templates (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template).
1171
1168
  */
1172
1169
  maxFailuresPerHour?:
1173
1170
  number;
1174
1171
  /**
1175
- * Optional. Maximum total number of times a driver may be restarted as a result of the driver exiting with a non-zero code. After the maximum number is reached, the job will be
1172
+ * Optional. Maximum total number of times a driver can be restarted as a result of the driver exiting with a non-zero code. After the maximum number is reached, the job will be
1176
1173
  * reported as failed.Maximum value is 240.Note: Currently, this restartable job option is not supported in Dataproc workflow templates
1177
1174
  * (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template).
1178
1175
  */
@@ -1382,7 +1379,7 @@ declare namespace gapi.client {
1382
1379
  WorkflowTemplate[];
1383
1380
  }
1384
1381
  interface LoggingConfig {
1385
- /** The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: - 'com.google = FATAL' - 'root = INFO' - 'org.apache = DEBUG' */
1382
+ /** The per-package log levels for the driver. This can include "root" package name to configure rootLogger. Examples: - 'com.google = FATAL' - 'root = INFO' - 'org.apache = DEBUG' */
1386
1383
  driverLogLevels?:
1387
1384
  { [P in string]: string };
1388
1385
  }
@@ -1553,6 +1550,9 @@ declare namespace gapi.client {
1553
1550
  { [P in string]: any };
1554
1551
  }
1555
1552
  interface OrderedJob {
1553
+ /** Optional. Job is a Flink job. */
1554
+ flinkJob?:
1555
+ FlinkJob;
1556
1556
  /** Optional. Job is a Hadoop job. */
1557
1557
  hadoopJob?:
1558
1558
  HadoopJob;
@@ -1628,7 +1628,7 @@ declare namespace gapi.client {
1628
1628
  loggingConfig?:
1629
1629
  LoggingConfig;
1630
1630
  /**
1631
- * Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set
1631
+ * Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties set
1632
1632
  * in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
1633
1633
  */
1634
1634
  properties?:
@@ -1749,7 +1749,7 @@ declare namespace gapi.client {
1749
1749
  mainPythonFileUri?:
1750
1750
  string;
1751
1751
  /**
1752
- * Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties
1752
+ * Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
1753
1753
  * set in /etc/spark/conf/spark-defaults.conf and classes in user code.
1754
1754
  */
1755
1755
  properties?:
@@ -2003,6 +2003,9 @@ declare namespace gapi.client {
2003
2003
  /** Output only. The time the template was last updated. */
2004
2004
  updateTime?:
2005
2005
  string;
2006
+ /** Output only. A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template. */
2007
+ uuid?:
2008
+ string;
2006
2009
  }
2007
2010
  interface SetIamPolicyRequest {
2008
2011
  /**
@@ -2089,15 +2092,15 @@ declare namespace gapi.client {
2089
2092
  /** Optional. The runtime log config for job execution. */
2090
2093
  loggingConfig?:
2091
2094
  LoggingConfig;
2092
- /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. */
2095
+ /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in SparkJob.jar_file_uris. */
2093
2096
  mainClass?:
2094
2097
  string;
2095
2098
  /** The HCFS URI of the jar file that contains the main class. */
2096
2099
  mainJarFileUri?:
2097
2100
  string;
2098
2101
  /**
2099
- * Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set
2100
- * in /etc/spark/conf/spark-defaults.conf and classes in user code.
2102
+ * Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
2103
+ * set in /etc/spark/conf/spark-defaults.conf and classes in user code.
2101
2104
  */
2102
2105
  properties?:
2103
2106
  { [P in string]: string };
@@ -2139,7 +2142,7 @@ declare namespace gapi.client {
2139
2142
  mainRFileUri?:
2140
2143
  string;
2141
2144
  /**
2142
- * Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties
2145
+ * Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
2143
2146
  * set in /etc/spark/conf/spark-defaults.conf and classes in user code.
2144
2147
  */
2145
2148
  properties?:
@@ -2163,7 +2166,7 @@ declare namespace gapi.client {
2163
2166
  /** Optional. The runtime log config for job execution. */
2164
2167
  loggingConfig?:
2165
2168
  LoggingConfig;
2166
- /** Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. */
2169
+ /** Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API might be overwritten. */
2167
2170
  properties?:
2168
2171
  { [P in string]: string };
2169
2172
  /** The HCFS URI of the script that contains SQL queries. */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@maxim_mazurok/gapi.client.dataproc-v1",
3
- "version": "0.0.20230830",
3
+ "version": "0.0.20230919",
4
4
  "description": "TypeScript typings for Cloud Dataproc API v1",
5
5
  "license": "MIT",
6
6
  "author": {
package/tests.ts CHANGED
@@ -3,7 +3,7 @@
3
3
  // This file was generated by https://github.com/Maxim-Mazurok/google-api-typings-generator. Please do not edit it manually.
4
4
  // In case of any problems please post issue to https://github.com/Maxim-Mazurok/google-api-typings-generator
5
5
 
6
- // Revision: 20230830
6
+ // Revision: 20230919
7
7
 
8
8
  gapi.load('client', async () => {
9
9
  /** now we can use gapi.client */
@@ -482,6 +482,7 @@ gapi.load('client', async () => {
482
482
  version: "Test string",
483
483
  },
484
484
  updateTime: "Test string",
485
+ uuid: "Test string",
485
486
  });
486
487
  /** Deletes a session template. */
487
488
  await gapi.client.dataproc.projects.locations.sessionTemplates.delete({
@@ -546,6 +547,7 @@ gapi.load('client', async () => {
546
547
  version: "Test string",
547
548
  },
548
549
  updateTime: "Test string",
550
+ uuid: "Test string",
549
551
  });
550
552
  /** Creates new workflow template. */
551
553
  await gapi.client.dataproc.projects.locations.workflowTemplates.create({
@@ -556,6 +558,25 @@ gapi.load('client', async () => {
556
558
  id: "Test string",
557
559
  jobs: [
558
560
  {
561
+ flinkJob: {
562
+ args: [
563
+ "Test string"
564
+ ],
565
+ jarFileUris: [
566
+ "Test string"
567
+ ],
568
+ loggingConfig: {
569
+ driverLogLevels: {
570
+ A: "Test string"
571
+ },
572
+ },
573
+ mainClass: "Test string",
574
+ mainJarFileUri: "Test string",
575
+ properties: {
576
+ A: "Test string"
577
+ },
578
+ savepointUri: "Test string",
579
+ },
559
580
  hadoopJob: {
560
581
  archiveUris: [
561
582
  "Test string"
@@ -886,7 +907,6 @@ gapi.load('client', async () => {
886
907
  },
887
908
  encryptionConfig: {
888
909
  gcePdKmsKeyName: "Test string",
889
- kmsKey: "Test string",
890
910
  },
891
911
  endpointConfig: {
892
912
  enableHttpPortAccess: true,
@@ -1234,6 +1254,25 @@ gapi.load('client', async () => {
1234
1254
  id: "Test string",
1235
1255
  jobs: [
1236
1256
  {
1257
+ flinkJob: {
1258
+ args: [
1259
+ "Test string"
1260
+ ],
1261
+ jarFileUris: [
1262
+ "Test string"
1263
+ ],
1264
+ loggingConfig: {
1265
+ driverLogLevels: {
1266
+ A: "Test string"
1267
+ },
1268
+ },
1269
+ mainClass: "Test string",
1270
+ mainJarFileUri: "Test string",
1271
+ properties: {
1272
+ A: "Test string"
1273
+ },
1274
+ savepointUri: "Test string",
1275
+ },
1237
1276
  hadoopJob: {
1238
1277
  archiveUris: [
1239
1278
  "Test string"
@@ -1564,7 +1603,6 @@ gapi.load('client', async () => {
1564
1603
  },
1565
1604
  encryptionConfig: {
1566
1605
  gcePdKmsKeyName: "Test string",
1567
- kmsKey: "Test string",
1568
1606
  },
1569
1607
  endpointConfig: {
1570
1608
  enableHttpPortAccess: true,
@@ -1912,6 +1950,25 @@ gapi.load('client', async () => {
1912
1950
  id: "Test string",
1913
1951
  jobs: [
1914
1952
  {
1953
+ flinkJob: {
1954
+ args: [
1955
+ "Test string"
1956
+ ],
1957
+ jarFileUris: [
1958
+ "Test string"
1959
+ ],
1960
+ loggingConfig: {
1961
+ driverLogLevels: {
1962
+ A: "Test string"
1963
+ },
1964
+ },
1965
+ mainClass: "Test string",
1966
+ mainJarFileUri: "Test string",
1967
+ properties: {
1968
+ A: "Test string"
1969
+ },
1970
+ savepointUri: "Test string",
1971
+ },
1915
1972
  hadoopJob: {
1916
1973
  archiveUris: [
1917
1974
  "Test string"
@@ -2242,7 +2299,6 @@ gapi.load('client', async () => {
2242
2299
  },
2243
2300
  encryptionConfig: {
2244
2301
  gcePdKmsKeyName: "Test string",
2245
- kmsKey: "Test string",
2246
2302
  },
2247
2303
  endpointConfig: {
2248
2304
  enableHttpPortAccess: true,
@@ -2767,7 +2823,6 @@ gapi.load('client', async () => {
2767
2823
  },
2768
2824
  encryptionConfig: {
2769
2825
  gcePdKmsKeyName: "Test string",
2770
- kmsKey: "Test string",
2771
2826
  },
2772
2827
  endpointConfig: {
2773
2828
  enableHttpPortAccess: true,
@@ -3311,7 +3366,6 @@ gapi.load('client', async () => {
3311
3366
  },
3312
3367
  encryptionConfig: {
3313
3368
  gcePdKmsKeyName: "Test string",
3314
- kmsKey: "Test string",
3315
3369
  },
3316
3370
  endpointConfig: {
3317
3371
  enableHttpPortAccess: true,
@@ -4824,6 +4878,25 @@ gapi.load('client', async () => {
4824
4878
  id: "Test string",
4825
4879
  jobs: [
4826
4880
  {
4881
+ flinkJob: {
4882
+ args: [
4883
+ "Test string"
4884
+ ],
4885
+ jarFileUris: [
4886
+ "Test string"
4887
+ ],
4888
+ loggingConfig: {
4889
+ driverLogLevels: {
4890
+ A: "Test string"
4891
+ },
4892
+ },
4893
+ mainClass: "Test string",
4894
+ mainJarFileUri: "Test string",
4895
+ properties: {
4896
+ A: "Test string"
4897
+ },
4898
+ savepointUri: "Test string",
4899
+ },
4827
4900
  hadoopJob: {
4828
4901
  archiveUris: [
4829
4902
  "Test string"
@@ -5154,7 +5227,6 @@ gapi.load('client', async () => {
5154
5227
  },
5155
5228
  encryptionConfig: {
5156
5229
  gcePdKmsKeyName: "Test string",
5157
- kmsKey: "Test string",
5158
5230
  },
5159
5231
  endpointConfig: {
5160
5232
  enableHttpPortAccess: true,
@@ -5502,6 +5574,25 @@ gapi.load('client', async () => {
5502
5574
  id: "Test string",
5503
5575
  jobs: [
5504
5576
  {
5577
+ flinkJob: {
5578
+ args: [
5579
+ "Test string"
5580
+ ],
5581
+ jarFileUris: [
5582
+ "Test string"
5583
+ ],
5584
+ loggingConfig: {
5585
+ driverLogLevels: {
5586
+ A: "Test string"
5587
+ },
5588
+ },
5589
+ mainClass: "Test string",
5590
+ mainJarFileUri: "Test string",
5591
+ properties: {
5592
+ A: "Test string"
5593
+ },
5594
+ savepointUri: "Test string",
5595
+ },
5505
5596
  hadoopJob: {
5506
5597
  archiveUris: [
5507
5598
  "Test string"
@@ -5832,7 +5923,6 @@ gapi.load('client', async () => {
5832
5923
  },
5833
5924
  encryptionConfig: {
5834
5925
  gcePdKmsKeyName: "Test string",
5835
- kmsKey: "Test string",
5836
5926
  },
5837
5927
  endpointConfig: {
5838
5928
  enableHttpPortAccess: true,
@@ -6180,6 +6270,25 @@ gapi.load('client', async () => {
6180
6270
  id: "Test string",
6181
6271
  jobs: [
6182
6272
  {
6273
+ flinkJob: {
6274
+ args: [
6275
+ "Test string"
6276
+ ],
6277
+ jarFileUris: [
6278
+ "Test string"
6279
+ ],
6280
+ loggingConfig: {
6281
+ driverLogLevels: {
6282
+ A: "Test string"
6283
+ },
6284
+ },
6285
+ mainClass: "Test string",
6286
+ mainJarFileUri: "Test string",
6287
+ properties: {
6288
+ A: "Test string"
6289
+ },
6290
+ savepointUri: "Test string",
6291
+ },
6183
6292
  hadoopJob: {
6184
6293
  archiveUris: [
6185
6294
  "Test string"
@@ -6510,7 +6619,6 @@ gapi.load('client', async () => {
6510
6619
  },
6511
6620
  encryptionConfig: {
6512
6621
  gcePdKmsKeyName: "Test string",
6513
- kmsKey: "Test string",
6514
6622
  },
6515
6623
  endpointConfig: {
6516
6624
  enableHttpPortAccess: true,