@maxim_mazurok/gapi.client.dataproc-v1 0.0.20230908 → 0.0.20230926

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.d.ts +53 -35
  2. package/package.json +1 -1
  3. package/tests.ts +220 -9
package/index.d.ts CHANGED
@@ -9,7 +9,7 @@
9
9
  // This file was generated by https://github.com/Maxim-Mazurok/google-api-typings-generator. Please do not edit it manually.
10
10
  // In case of any problems please post issue to https://github.com/Maxim-Mazurok/google-api-typings-generator
11
11
  // Generated from: https://dataproc.googleapis.com/$discovery/rest?version=v1
12
- // Revision: 20230908
12
+ // Revision: 20230926
13
13
 
14
14
  /// <reference types="gapi.client" />
15
15
 
@@ -531,9 +531,6 @@ declare namespace gapi.client {
531
531
  /** Optional. The Cloud KMS key name to use for PD disk encryption for all instances in the cluster. */
532
532
  gcePdKmsKeyName?:
533
533
  string;
534
- /** Optional. The Cloud KMS key name to use for encrypting customer core content and cluster PD disk for all instances in the cluster. */
535
- kmsKey?:
536
- string;
537
534
  }
538
535
  interface EndpointConfig {
539
536
  /** Optional. If true, enable http access to specific ports on the cluster from external sources. Defaults to false. */
@@ -583,11 +580,11 @@ declare namespace gapi.client {
583
580
  subnetworkUri?:
584
581
  string;
585
582
  /**
586
- * Optional. The duration after which the workload will be terminated. When the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work
587
- * to finish. If ttl is not specified for a batch workload, the workload will be allowed to run until it exits naturally (or runs forever without exiting). If ttl is not specified for
588
- * an interactive session, it defaults to 24h. If ttl is not specified for a batch that uses 2.1+ runtime version, it defaults to 4h. Minimum value is 10 minutes; maximum value is 14
589
- * days (see JSON representation of Duration (https://developers.google.com/protocol-buffers/docs/proto3#json)). If both ttl and idle_ttl are specified (for an interactive session),
590
- * the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idle_ttl or when ttl has been exceeded, whichever occurs first.
583
+ * Optional. The duration after which the workload will be terminated, specified as the JSON representation for Duration (https://protobuf.dev/programming-guides/proto3/#json). When
584
+ * the workload exceeds this duration, it will be unconditionally terminated without waiting for ongoing work to finish. If ttl is not specified for a batch workload, the workload will
585
+ * be allowed to run until it exits naturally (or run forever without exiting). If ttl is not specified for an interactive session, it defaults to 24 hours. If ttl is not specified for
586
+ * a batch that uses 2.1+ runtime version, it defaults to 4 hours. Minimum value is 10 minutes; maximum value is 14 days. If both ttl and idle_ttl are specified (for an interactive
587
+ * session), the conditions are treated as OR conditions: the workload will be terminated when it has been idle for idle_ttl or when ttl has been exceeded, whichever occurs first.
591
588
  */
592
589
  ttl?:
593
590
  string;
@@ -608,8 +605,8 @@ declare namespace gapi.client {
608
605
  }
609
606
  interface FlinkJob {
610
607
  /**
611
- * Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job
612
- * submission.
608
+ * Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision might occur that causes an incorrect
609
+ * job submission.
613
610
  */
614
611
  args?:
615
612
  string[];
@@ -619,19 +616,19 @@ declare namespace gapi.client {
619
616
  /** Optional. The runtime log config for job execution. */
620
617
  loggingConfig?:
621
618
  LoggingConfig;
622
- /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. */
619
+ /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jarFileUris. */
623
620
  mainClass?:
624
621
  string;
625
622
  /** The HCFS URI of the jar file that contains the main class. */
626
623
  mainJarFileUri?:
627
624
  string;
628
625
  /**
629
- * Optional. A mapping of property names to values, used to configure Flink. Properties that conflict with values set by the Dataproc API may beoverwritten. Can include properties set
630
- * in/etc/flink/conf/flink-defaults.conf and classes in user code.
626
+ * Optional. A mapping of property names to values, used to configure Flink. Properties that conflict with values set by the Dataproc API might beoverwritten. Can include properties
627
+ * set in/etc/flink/conf/flink-defaults.conf and classes in user code.
631
628
  */
632
629
  properties?:
633
630
  { [P in string]: string };
634
- /** Optional. HCFS URI of the savepoint which contains the last saved progress for this job */
631
+ /** Optional. HCFS URI of the savepoint, which contains the last saved progress for starting the current job. */
635
632
  savepointUri?:
636
633
  string;
637
634
  }
@@ -831,8 +828,8 @@ declare namespace gapi.client {
831
828
  archiveUris?:
832
829
  string[];
833
830
  /**
834
- * Optional. The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision may occur that causes an
835
- * incorrect job submission.
831
+ * Optional. The arguments to pass to the driver. Do not include arguments, such as -libjars or -Dfoo=bar, that can be set as job properties, since a collision might occur that causes
832
+ * an incorrect job submission.
836
833
  */
837
834
  args?:
838
835
  string[];
@@ -855,7 +852,7 @@ declare namespace gapi.client {
855
852
  mainJarFileUri?:
856
853
  string;
857
854
  /**
858
- * Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties
855
+ * Optional. A mapping of property names to values, used to configure Hadoop. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
859
856
  * set in /etc/hadoop/conf/*-site and classes in user code.
860
857
  */
861
858
  properties?:
@@ -869,8 +866,8 @@ declare namespace gapi.client {
869
866
  jarFileUris?:
870
867
  string[];
871
868
  /**
872
- * Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set
873
- * in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
869
+ * Optional. A mapping of property names and values, used to configure Hive. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
870
+ * set in /etc/hadoop/conf/*-site.xml, /etc/hive/conf/hive-site.xml, and classes in user code.
874
871
  */
875
872
  properties?:
876
873
  { [P in string]: string };
@@ -987,6 +984,9 @@ declare namespace gapi.client {
987
984
  */
988
985
  preemptibility?:
989
986
  string;
987
+ /** Optional. Configuration to handle the startup of instances during cluster create and update process. */
988
+ startupConfig?:
989
+ StartupConfig;
990
990
  }
991
991
  interface InstanceReference {
992
992
  /** The unique identifier of the Compute Engine instance. */
@@ -1055,7 +1055,7 @@ declare namespace gapi.client {
1055
1055
  done?:
1056
1056
  boolean;
1057
1057
  /**
1058
- * Output only. If present, the location of miscellaneous control files which may be used as part of job setup and handling. If not present, control files may be placed in the same
1058
+ * Output only. If present, the location of miscellaneous control files which can be used as part of job setup and handling. If not present, control files might be placed in the same
1059
1059
  * location as driver_output_uri.
1060
1060
  */
1061
1061
  driverControlFilesUri?:
@@ -1075,11 +1075,11 @@ declare namespace gapi.client {
1075
1075
  /** Optional. Job is a Hive job. */
1076
1076
  hiveJob?:
1077
1077
  HiveJob;
1078
- /** Output only. A UUID that uniquely identifies a job within the project over time. This is in contrast to a user-settable reference.job_id that may be reused over time. */
1078
+ /** Output only. A UUID that uniquely identifies a job within the project over time. This is in contrast to a user-settable reference.job_id that might be reused over time. */
1079
1079
  jobUuid?:
1080
1080
  string;
1081
1081
  /**
1082
- * Optional. The labels to associate with this job. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values may be
1082
+ * Optional. The labels to associate with this job. Label keys must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). Label values can be
1083
1083
  * empty, but, if present, must contain 1 to 63 characters, and must conform to RFC 1035 (https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be associated with a job.
1084
1084
  */
1085
1085
  labels?:
@@ -1114,7 +1114,7 @@ declare namespace gapi.client {
1114
1114
  /** Optional. Job is a SparkSql job. */
1115
1115
  sparkSqlJob?:
1116
1116
  SparkSqlJob;
1117
- /** Output only. The job status. Additional application-specific status information may be contained in the type_job and yarn_applications fields. */
1117
+ /** Output only. The job status. Additional application-specific status information might be contained in the type_job and yarn_applications fields. */
1118
1118
  status?:
1119
1119
  JobStatus;
1120
1120
  /** Output only. The previous job status. */
@@ -1123,7 +1123,7 @@ declare namespace gapi.client {
1123
1123
  /** Optional. Job is a Trino job. */
1124
1124
  trinoJob?:
1125
1125
  TrinoJob;
1126
- /** Output only. The collection of YARN applications spun up by this job.Beta Feature: This report is available for testing purposes only. It may be changed before final release. */
1126
+ /** Output only. The collection of YARN applications spun up by this job.Beta Feature: This report is available for testing purposes only. It might be changed before final release. */
1127
1127
  yarnApplications?:
1128
1128
  YarnApplication[];
1129
1129
  }
@@ -1165,14 +1165,14 @@ declare namespace gapi.client {
1165
1165
  }
1166
1166
  interface JobScheduling {
1167
1167
  /**
1168
- * Optional. Maximum number of times per hour a driver may be restarted as a result of driver exiting with non-zero code before job is reported failed.A job may be reported as
1168
+ * Optional. Maximum number of times per hour a driver can be restarted as a result of driver exiting with non-zero code before job is reported failed.A job might be reported as
1169
1169
  * thrashing if the driver exits with a non-zero code four times within a 10-minute window.Maximum value is 10.Note: This restartable job option is not supported in Dataproc workflow
1170
1170
  * templates (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template).
1171
1171
  */
1172
1172
  maxFailuresPerHour?:
1173
1173
  number;
1174
1174
  /**
1175
- * Optional. Maximum total number of times a driver may be restarted as a result of the driver exiting with a non-zero code. After the maximum number is reached, the job will be
1175
+ * Optional. Maximum total number of times a driver can be restarted as a result of the driver exiting with a non-zero code. After the maximum number is reached, the job will be
1176
1176
  * reported as failed.Maximum value is 240.Note: Currently, this restartable job option is not supported in Dataproc workflow templates
1177
1177
  * (https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template).
1178
1178
  */
@@ -1382,7 +1382,7 @@ declare namespace gapi.client {
1382
1382
  WorkflowTemplate[];
1383
1383
  }
1384
1384
  interface LoggingConfig {
1385
- /** The per-package log levels for the driver. This may include "root" package name to configure rootLogger. Examples: - 'com.google = FATAL' - 'root = INFO' - 'org.apache = DEBUG' */
1385
+ /** The per-package log levels for the driver. This can include "root" package name to configure rootLogger. Examples: - 'com.google = FATAL' - 'root = INFO' - 'org.apache = DEBUG' */
1386
1386
  driverLogLevels?:
1387
1387
  { [P in string]: string };
1388
1388
  }
@@ -1553,6 +1553,9 @@ declare namespace gapi.client {
1553
1553
  { [P in string]: any };
1554
1554
  }
1555
1555
  interface OrderedJob {
1556
+ /** Optional. Job is a Flink job. */
1557
+ flinkJob?:
1558
+ FlinkJob;
1556
1559
  /** Optional. Job is a Hadoop job. */
1557
1560
  hadoopJob?:
1558
1561
  HadoopJob;
@@ -1628,7 +1631,7 @@ declare namespace gapi.client {
1628
1631
  loggingConfig?:
1629
1632
  LoggingConfig;
1630
1633
  /**
1631
- * Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set
1634
+ * Optional. A mapping of property names to values, used to configure Pig. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties set
1632
1635
  * in /etc/hadoop/conf/*-site.xml, /etc/pig/conf/pig.properties, and classes in user code.
1633
1636
  */
1634
1637
  properties?:
@@ -1749,7 +1752,7 @@ declare namespace gapi.client {
1749
1752
  mainPythonFileUri?:
1750
1753
  string;
1751
1754
  /**
1752
- * Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties
1755
+ * Optional. A mapping of property names to values, used to configure PySpark. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
1753
1756
  * set in /etc/spark/conf/spark-defaults.conf and classes in user code.
1754
1757
  */
1755
1758
  properties?:
@@ -2003,6 +2006,9 @@ declare namespace gapi.client {
2003
2006
  /** Output only. The time the template was last updated. */
2004
2007
  updateTime?:
2005
2008
  string;
2009
+ /** Output only. A session template UUID (Unique Universal Identifier). The service generates this value when it creates the session template. */
2010
+ uuid?:
2011
+ string;
2006
2012
  }
2007
2013
  interface SetIamPolicyRequest {
2008
2014
  /**
@@ -2089,15 +2095,15 @@ declare namespace gapi.client {
2089
2095
  /** Optional. The runtime log config for job execution. */
2090
2096
  loggingConfig?:
2091
2097
  LoggingConfig;
2092
- /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. */
2098
+ /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in SparkJob.jar_file_uris. */
2093
2099
  mainClass?:
2094
2100
  string;
2095
2101
  /** The HCFS URI of the jar file that contains the main class. */
2096
2102
  mainJarFileUri?:
2097
2103
  string;
2098
2104
  /**
2099
- * Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties set
2100
- * in /etc/spark/conf/spark-defaults.conf and classes in user code.
2105
+ * Optional. A mapping of property names to values, used to configure Spark. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
2106
+ * set in /etc/spark/conf/spark-defaults.conf and classes in user code.
2101
2107
  */
2102
2108
  properties?:
2103
2109
  { [P in string]: string };
@@ -2139,7 +2145,7 @@ declare namespace gapi.client {
2139
2145
  mainRFileUri?:
2140
2146
  string;
2141
2147
  /**
2142
- * Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API may be overwritten. Can include properties
2148
+ * Optional. A mapping of property names to values, used to configure SparkR. Properties that conflict with values set by the Dataproc API might be overwritten. Can include properties
2143
2149
  * set in /etc/spark/conf/spark-defaults.conf and classes in user code.
2144
2150
  */
2145
2151
  properties?:
@@ -2163,7 +2169,7 @@ declare namespace gapi.client {
2163
2169
  /** Optional. The runtime log config for job execution. */
2164
2170
  loggingConfig?:
2165
2171
  LoggingConfig;
2166
- /** Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API may be overwritten. */
2172
+ /** Optional. A mapping of property names to values, used to configure Spark SQL's SparkConf. Properties that conflict with values set by the Dataproc API might be overwritten. */
2167
2173
  properties?:
2168
2174
  { [P in string]: string };
2169
2175
  /** The HCFS URI of the script that contains SQL queries. */
@@ -2183,6 +2189,9 @@ declare namespace gapi.client {
2183
2189
  */
2184
2190
  gracefulDecommissionTimeout?:
2185
2191
  string;
2192
+ /** Optional. Remove only idle workers when scaling down cluster */
2193
+ removeOnlyIdleWorkers?:
2194
+ boolean;
2186
2195
  /**
2187
2196
  * Required. Fraction of required executors to remove from Spark Serverless clusters. A scale-down factor of 1.0 will result in scaling down so that there are no more executors for the
2188
2197
  * Spark Job.(more aggressive scaling). A scale-down factor closer to 0 will result in a smaller magnitude of scaling donw (less aggressive scaling).Bounds: 0.0, 1.0.
@@ -2222,6 +2231,15 @@ declare namespace gapi.client {
2222
2231
  requestId?:
2223
2232
  string;
2224
2233
  }
2234
+ interface StartupConfig {
2235
+ /**
2236
+ * Optional. The config setting to enable cluster creation/ updation to be successful only after required_registration_fraction of instances are up and running. This configuration is
2237
+ * applicable to only secondary workers for now. The cluster will fail if required_registration_fraction of instances are not available. This will include instance creation, agent
2238
+ * registration, and service registration (if enabled).
2239
+ */
2240
+ requiredRegistrationFraction?:
2241
+ number;
2242
+ }
2225
2243
  interface StateHistory {
2226
2244
  /** Output only. The state of the batch at this point in history. */
2227
2245
  state?:
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@maxim_mazurok/gapi.client.dataproc-v1",
3
- "version": "0.0.20230908",
3
+ "version": "0.0.20230926",
4
4
  "description": "TypeScript typings for Cloud Dataproc API v1",
5
5
  "license": "MIT",
6
6
  "author": {
package/tests.ts CHANGED
@@ -3,7 +3,7 @@
3
3
  // This file was generated by https://github.com/Maxim-Mazurok/google-api-typings-generator. Please do not edit it manually.
4
4
  // In case of any problems please post issue to https://github.com/Maxim-Mazurok/google-api-typings-generator
5
5
 
6
- // Revision: 20230908
6
+ // Revision: 20230926
7
7
 
8
8
  gapi.load('client', async () => {
9
9
  /** now we can use gapi.client */
@@ -37,6 +37,7 @@ gapi.load('client', async () => {
37
37
  cooldownPeriod: "Test string",
38
38
  sparkStandaloneConfig: {
39
39
  gracefulDecommissionTimeout: "Test string",
40
+ removeOnlyIdleWorkers: true,
40
41
  scaleDownFactor: 42,
41
42
  scaleDownMinWorkerFraction: 42,
42
43
  scaleUpFactor: 42,
@@ -130,6 +131,7 @@ gapi.load('client', async () => {
130
131
  cooldownPeriod: "Test string",
131
132
  sparkStandaloneConfig: {
132
133
  gracefulDecommissionTimeout: "Test string",
134
+ removeOnlyIdleWorkers: true,
133
135
  scaleDownFactor: 42,
134
136
  scaleDownMinWorkerFraction: 42,
135
137
  scaleUpFactor: 42,
@@ -482,6 +484,7 @@ gapi.load('client', async () => {
482
484
  version: "Test string",
483
485
  },
484
486
  updateTime: "Test string",
487
+ uuid: "Test string",
485
488
  });
486
489
  /** Deletes a session template. */
487
490
  await gapi.client.dataproc.projects.locations.sessionTemplates.delete({
@@ -546,6 +549,7 @@ gapi.load('client', async () => {
546
549
  version: "Test string",
547
550
  },
548
551
  updateTime: "Test string",
552
+ uuid: "Test string",
549
553
  });
550
554
  /** Creates new workflow template. */
551
555
  await gapi.client.dataproc.projects.locations.workflowTemplates.create({
@@ -556,6 +560,25 @@ gapi.load('client', async () => {
556
560
  id: "Test string",
557
561
  jobs: [
558
562
  {
563
+ flinkJob: {
564
+ args: [
565
+ "Test string"
566
+ ],
567
+ jarFileUris: [
568
+ "Test string"
569
+ ],
570
+ loggingConfig: {
571
+ driverLogLevels: {
572
+ A: "Test string"
573
+ },
574
+ },
575
+ mainClass: "Test string",
576
+ mainJarFileUri: "Test string",
577
+ properties: {
578
+ A: "Test string"
579
+ },
580
+ savepointUri: "Test string",
581
+ },
559
582
  hadoopJob: {
560
583
  archiveUris: [
561
584
  "Test string"
@@ -865,6 +888,9 @@ gapi.load('client', async () => {
865
888
  minNumInstances: 42,
866
889
  numInstances: 42,
867
890
  preemptibility: "Test string",
891
+ startupConfig: {
892
+ requiredRegistrationFraction: 42,
893
+ },
868
894
  },
869
895
  roles: [
870
896
  "Test string"
@@ -886,7 +912,6 @@ gapi.load('client', async () => {
886
912
  },
887
913
  encryptionConfig: {
888
914
  gcePdKmsKeyName: "Test string",
889
- kmsKey: "Test string",
890
915
  },
891
916
  endpointConfig: {
892
917
  enableHttpPortAccess: true,
@@ -1032,6 +1057,9 @@ gapi.load('client', async () => {
1032
1057
  minNumInstances: 42,
1033
1058
  numInstances: 42,
1034
1059
  preemptibility: "Test string",
1060
+ startupConfig: {
1061
+ requiredRegistrationFraction: 42,
1062
+ },
1035
1063
  },
1036
1064
  metastoreConfig: {
1037
1065
  dataprocMetastoreService: "Test string",
@@ -1088,6 +1116,9 @@ gapi.load('client', async () => {
1088
1116
  minNumInstances: 42,
1089
1117
  numInstances: 42,
1090
1118
  preemptibility: "Test string",
1119
+ startupConfig: {
1120
+ requiredRegistrationFraction: 42,
1121
+ },
1091
1122
  },
1092
1123
  securityConfig: {
1093
1124
  identityConfig: {
@@ -1175,6 +1206,9 @@ gapi.load('client', async () => {
1175
1206
  minNumInstances: 42,
1176
1207
  numInstances: 42,
1177
1208
  preemptibility: "Test string",
1209
+ startupConfig: {
1210
+ requiredRegistrationFraction: 42,
1211
+ },
1178
1212
  },
1179
1213
  },
1180
1214
  labels: {
@@ -1234,6 +1268,25 @@ gapi.load('client', async () => {
1234
1268
  id: "Test string",
1235
1269
  jobs: [
1236
1270
  {
1271
+ flinkJob: {
1272
+ args: [
1273
+ "Test string"
1274
+ ],
1275
+ jarFileUris: [
1276
+ "Test string"
1277
+ ],
1278
+ loggingConfig: {
1279
+ driverLogLevels: {
1280
+ A: "Test string"
1281
+ },
1282
+ },
1283
+ mainClass: "Test string",
1284
+ mainJarFileUri: "Test string",
1285
+ properties: {
1286
+ A: "Test string"
1287
+ },
1288
+ savepointUri: "Test string",
1289
+ },
1237
1290
  hadoopJob: {
1238
1291
  archiveUris: [
1239
1292
  "Test string"
@@ -1543,6 +1596,9 @@ gapi.load('client', async () => {
1543
1596
  minNumInstances: 42,
1544
1597
  numInstances: 42,
1545
1598
  preemptibility: "Test string",
1599
+ startupConfig: {
1600
+ requiredRegistrationFraction: 42,
1601
+ },
1546
1602
  },
1547
1603
  roles: [
1548
1604
  "Test string"
@@ -1564,7 +1620,6 @@ gapi.load('client', async () => {
1564
1620
  },
1565
1621
  encryptionConfig: {
1566
1622
  gcePdKmsKeyName: "Test string",
1567
- kmsKey: "Test string",
1568
1623
  },
1569
1624
  endpointConfig: {
1570
1625
  enableHttpPortAccess: true,
@@ -1710,6 +1765,9 @@ gapi.load('client', async () => {
1710
1765
  minNumInstances: 42,
1711
1766
  numInstances: 42,
1712
1767
  preemptibility: "Test string",
1768
+ startupConfig: {
1769
+ requiredRegistrationFraction: 42,
1770
+ },
1713
1771
  },
1714
1772
  metastoreConfig: {
1715
1773
  dataprocMetastoreService: "Test string",
@@ -1766,6 +1824,9 @@ gapi.load('client', async () => {
1766
1824
  minNumInstances: 42,
1767
1825
  numInstances: 42,
1768
1826
  preemptibility: "Test string",
1827
+ startupConfig: {
1828
+ requiredRegistrationFraction: 42,
1829
+ },
1769
1830
  },
1770
1831
  securityConfig: {
1771
1832
  identityConfig: {
@@ -1853,6 +1914,9 @@ gapi.load('client', async () => {
1853
1914
  minNumInstances: 42,
1854
1915
  numInstances: 42,
1855
1916
  preemptibility: "Test string",
1917
+ startupConfig: {
1918
+ requiredRegistrationFraction: 42,
1919
+ },
1856
1920
  },
1857
1921
  },
1858
1922
  labels: {
@@ -1912,6 +1976,25 @@ gapi.load('client', async () => {
1912
1976
  id: "Test string",
1913
1977
  jobs: [
1914
1978
  {
1979
+ flinkJob: {
1980
+ args: [
1981
+ "Test string"
1982
+ ],
1983
+ jarFileUris: [
1984
+ "Test string"
1985
+ ],
1986
+ loggingConfig: {
1987
+ driverLogLevels: {
1988
+ A: "Test string"
1989
+ },
1990
+ },
1991
+ mainClass: "Test string",
1992
+ mainJarFileUri: "Test string",
1993
+ properties: {
1994
+ A: "Test string"
1995
+ },
1996
+ savepointUri: "Test string",
1997
+ },
1915
1998
  hadoopJob: {
1916
1999
  archiveUris: [
1917
2000
  "Test string"
@@ -2221,6 +2304,9 @@ gapi.load('client', async () => {
2221
2304
  minNumInstances: 42,
2222
2305
  numInstances: 42,
2223
2306
  preemptibility: "Test string",
2307
+ startupConfig: {
2308
+ requiredRegistrationFraction: 42,
2309
+ },
2224
2310
  },
2225
2311
  roles: [
2226
2312
  "Test string"
@@ -2242,7 +2328,6 @@ gapi.load('client', async () => {
2242
2328
  },
2243
2329
  encryptionConfig: {
2244
2330
  gcePdKmsKeyName: "Test string",
2245
- kmsKey: "Test string",
2246
2331
  },
2247
2332
  endpointConfig: {
2248
2333
  enableHttpPortAccess: true,
@@ -2388,6 +2473,9 @@ gapi.load('client', async () => {
2388
2473
  minNumInstances: 42,
2389
2474
  numInstances: 42,
2390
2475
  preemptibility: "Test string",
2476
+ startupConfig: {
2477
+ requiredRegistrationFraction: 42,
2478
+ },
2391
2479
  },
2392
2480
  metastoreConfig: {
2393
2481
  dataprocMetastoreService: "Test string",
@@ -2444,6 +2532,9 @@ gapi.load('client', async () => {
2444
2532
  minNumInstances: 42,
2445
2533
  numInstances: 42,
2446
2534
  preemptibility: "Test string",
2535
+ startupConfig: {
2536
+ requiredRegistrationFraction: 42,
2537
+ },
2447
2538
  },
2448
2539
  securityConfig: {
2449
2540
  identityConfig: {
@@ -2531,6 +2622,9 @@ gapi.load('client', async () => {
2531
2622
  minNumInstances: 42,
2532
2623
  numInstances: 42,
2533
2624
  preemptibility: "Test string",
2625
+ startupConfig: {
2626
+ requiredRegistrationFraction: 42,
2627
+ },
2534
2628
  },
2535
2629
  },
2536
2630
  labels: {
@@ -2549,6 +2643,7 @@ gapi.load('client', async () => {
2549
2643
  cooldownPeriod: "Test string",
2550
2644
  sparkStandaloneConfig: {
2551
2645
  gracefulDecommissionTimeout: "Test string",
2646
+ removeOnlyIdleWorkers: true,
2552
2647
  scaleDownFactor: 42,
2553
2648
  scaleDownMinWorkerFraction: 42,
2554
2649
  scaleUpFactor: 42,
@@ -2642,6 +2737,7 @@ gapi.load('client', async () => {
2642
2737
  cooldownPeriod: "Test string",
2643
2738
  sparkStandaloneConfig: {
2644
2739
  gracefulDecommissionTimeout: "Test string",
2740
+ removeOnlyIdleWorkers: true,
2645
2741
  scaleDownFactor: 42,
2646
2742
  scaleDownMinWorkerFraction: 42,
2647
2743
  scaleUpFactor: 42,
@@ -2746,6 +2842,9 @@ gapi.load('client', async () => {
2746
2842
  minNumInstances: 42,
2747
2843
  numInstances: 42,
2748
2844
  preemptibility: "Test string",
2845
+ startupConfig: {
2846
+ requiredRegistrationFraction: 42,
2847
+ },
2749
2848
  },
2750
2849
  roles: [
2751
2850
  "Test string"
@@ -2767,7 +2866,6 @@ gapi.load('client', async () => {
2767
2866
  },
2768
2867
  encryptionConfig: {
2769
2868
  gcePdKmsKeyName: "Test string",
2770
- kmsKey: "Test string",
2771
2869
  },
2772
2870
  endpointConfig: {
2773
2871
  enableHttpPortAccess: true,
@@ -2913,6 +3011,9 @@ gapi.load('client', async () => {
2913
3011
  minNumInstances: 42,
2914
3012
  numInstances: 42,
2915
3013
  preemptibility: "Test string",
3014
+ startupConfig: {
3015
+ requiredRegistrationFraction: 42,
3016
+ },
2916
3017
  },
2917
3018
  metastoreConfig: {
2918
3019
  dataprocMetastoreService: "Test string",
@@ -2969,6 +3070,9 @@ gapi.load('client', async () => {
2969
3070
  minNumInstances: 42,
2970
3071
  numInstances: 42,
2971
3072
  preemptibility: "Test string",
3073
+ startupConfig: {
3074
+ requiredRegistrationFraction: 42,
3075
+ },
2972
3076
  },
2973
3077
  securityConfig: {
2974
3078
  identityConfig: {
@@ -3056,6 +3160,9 @@ gapi.load('client', async () => {
3056
3160
  minNumInstances: 42,
3057
3161
  numInstances: 42,
3058
3162
  preemptibility: "Test string",
3163
+ startupConfig: {
3164
+ requiredRegistrationFraction: 42,
3165
+ },
3059
3166
  },
3060
3167
  },
3061
3168
  labels: {
@@ -3290,6 +3397,9 @@ gapi.load('client', async () => {
3290
3397
  minNumInstances: 42,
3291
3398
  numInstances: 42,
3292
3399
  preemptibility: "Test string",
3400
+ startupConfig: {
3401
+ requiredRegistrationFraction: 42,
3402
+ },
3293
3403
  },
3294
3404
  roles: [
3295
3405
  "Test string"
@@ -3311,7 +3421,6 @@ gapi.load('client', async () => {
3311
3421
  },
3312
3422
  encryptionConfig: {
3313
3423
  gcePdKmsKeyName: "Test string",
3314
- kmsKey: "Test string",
3315
3424
  },
3316
3425
  endpointConfig: {
3317
3426
  enableHttpPortAccess: true,
@@ -3457,6 +3566,9 @@ gapi.load('client', async () => {
3457
3566
  minNumInstances: 42,
3458
3567
  numInstances: 42,
3459
3568
  preemptibility: "Test string",
3569
+ startupConfig: {
3570
+ requiredRegistrationFraction: 42,
3571
+ },
3460
3572
  },
3461
3573
  metastoreConfig: {
3462
3574
  dataprocMetastoreService: "Test string",
@@ -3513,6 +3625,9 @@ gapi.load('client', async () => {
3513
3625
  minNumInstances: 42,
3514
3626
  numInstances: 42,
3515
3627
  preemptibility: "Test string",
3628
+ startupConfig: {
3629
+ requiredRegistrationFraction: 42,
3630
+ },
3516
3631
  },
3517
3632
  securityConfig: {
3518
3633
  identityConfig: {
@@ -3600,6 +3715,9 @@ gapi.load('client', async () => {
3600
3715
  minNumInstances: 42,
3601
3716
  numInstances: 42,
3602
3717
  preemptibility: "Test string",
3718
+ startupConfig: {
3719
+ requiredRegistrationFraction: 42,
3720
+ },
3603
3721
  },
3604
3722
  },
3605
3723
  labels: {
@@ -3827,6 +3945,9 @@ gapi.load('client', async () => {
3827
3945
  minNumInstances: 42,
3828
3946
  numInstances: 42,
3829
3947
  preemptibility: "Test string",
3948
+ startupConfig: {
3949
+ requiredRegistrationFraction: 42,
3950
+ },
3830
3951
  },
3831
3952
  roles: [
3832
3953
  "Test string"
@@ -4824,6 +4945,25 @@ gapi.load('client', async () => {
4824
4945
  id: "Test string",
4825
4946
  jobs: [
4826
4947
  {
4948
+ flinkJob: {
4949
+ args: [
4950
+ "Test string"
4951
+ ],
4952
+ jarFileUris: [
4953
+ "Test string"
4954
+ ],
4955
+ loggingConfig: {
4956
+ driverLogLevels: {
4957
+ A: "Test string"
4958
+ },
4959
+ },
4960
+ mainClass: "Test string",
4961
+ mainJarFileUri: "Test string",
4962
+ properties: {
4963
+ A: "Test string"
4964
+ },
4965
+ savepointUri: "Test string",
4966
+ },
4827
4967
  hadoopJob: {
4828
4968
  archiveUris: [
4829
4969
  "Test string"
@@ -5133,6 +5273,9 @@ gapi.load('client', async () => {
5133
5273
  minNumInstances: 42,
5134
5274
  numInstances: 42,
5135
5275
  preemptibility: "Test string",
5276
+ startupConfig: {
5277
+ requiredRegistrationFraction: 42,
5278
+ },
5136
5279
  },
5137
5280
  roles: [
5138
5281
  "Test string"
@@ -5154,7 +5297,6 @@ gapi.load('client', async () => {
5154
5297
  },
5155
5298
  encryptionConfig: {
5156
5299
  gcePdKmsKeyName: "Test string",
5157
- kmsKey: "Test string",
5158
5300
  },
5159
5301
  endpointConfig: {
5160
5302
  enableHttpPortAccess: true,
@@ -5300,6 +5442,9 @@ gapi.load('client', async () => {
5300
5442
  minNumInstances: 42,
5301
5443
  numInstances: 42,
5302
5444
  preemptibility: "Test string",
5445
+ startupConfig: {
5446
+ requiredRegistrationFraction: 42,
5447
+ },
5303
5448
  },
5304
5449
  metastoreConfig: {
5305
5450
  dataprocMetastoreService: "Test string",
@@ -5356,6 +5501,9 @@ gapi.load('client', async () => {
5356
5501
  minNumInstances: 42,
5357
5502
  numInstances: 42,
5358
5503
  preemptibility: "Test string",
5504
+ startupConfig: {
5505
+ requiredRegistrationFraction: 42,
5506
+ },
5359
5507
  },
5360
5508
  securityConfig: {
5361
5509
  identityConfig: {
@@ -5443,6 +5591,9 @@ gapi.load('client', async () => {
5443
5591
  minNumInstances: 42,
5444
5592
  numInstances: 42,
5445
5593
  preemptibility: "Test string",
5594
+ startupConfig: {
5595
+ requiredRegistrationFraction: 42,
5596
+ },
5446
5597
  },
5447
5598
  },
5448
5599
  labels: {
@@ -5502,6 +5653,25 @@ gapi.load('client', async () => {
5502
5653
  id: "Test string",
5503
5654
  jobs: [
5504
5655
  {
5656
+ flinkJob: {
5657
+ args: [
5658
+ "Test string"
5659
+ ],
5660
+ jarFileUris: [
5661
+ "Test string"
5662
+ ],
5663
+ loggingConfig: {
5664
+ driverLogLevels: {
5665
+ A: "Test string"
5666
+ },
5667
+ },
5668
+ mainClass: "Test string",
5669
+ mainJarFileUri: "Test string",
5670
+ properties: {
5671
+ A: "Test string"
5672
+ },
5673
+ savepointUri: "Test string",
5674
+ },
5505
5675
  hadoopJob: {
5506
5676
  archiveUris: [
5507
5677
  "Test string"
@@ -5811,6 +5981,9 @@ gapi.load('client', async () => {
5811
5981
  minNumInstances: 42,
5812
5982
  numInstances: 42,
5813
5983
  preemptibility: "Test string",
5984
+ startupConfig: {
5985
+ requiredRegistrationFraction: 42,
5986
+ },
5814
5987
  },
5815
5988
  roles: [
5816
5989
  "Test string"
@@ -5832,7 +6005,6 @@ gapi.load('client', async () => {
5832
6005
  },
5833
6006
  encryptionConfig: {
5834
6007
  gcePdKmsKeyName: "Test string",
5835
- kmsKey: "Test string",
5836
6008
  },
5837
6009
  endpointConfig: {
5838
6010
  enableHttpPortAccess: true,
@@ -5978,6 +6150,9 @@ gapi.load('client', async () => {
5978
6150
  minNumInstances: 42,
5979
6151
  numInstances: 42,
5980
6152
  preemptibility: "Test string",
6153
+ startupConfig: {
6154
+ requiredRegistrationFraction: 42,
6155
+ },
5981
6156
  },
5982
6157
  metastoreConfig: {
5983
6158
  dataprocMetastoreService: "Test string",
@@ -6034,6 +6209,9 @@ gapi.load('client', async () => {
6034
6209
  minNumInstances: 42,
6035
6210
  numInstances: 42,
6036
6211
  preemptibility: "Test string",
6212
+ startupConfig: {
6213
+ requiredRegistrationFraction: 42,
6214
+ },
6037
6215
  },
6038
6216
  securityConfig: {
6039
6217
  identityConfig: {
@@ -6121,6 +6299,9 @@ gapi.load('client', async () => {
6121
6299
  minNumInstances: 42,
6122
6300
  numInstances: 42,
6123
6301
  preemptibility: "Test string",
6302
+ startupConfig: {
6303
+ requiredRegistrationFraction: 42,
6304
+ },
6124
6305
  },
6125
6306
  },
6126
6307
  labels: {
@@ -6180,6 +6361,25 @@ gapi.load('client', async () => {
6180
6361
  id: "Test string",
6181
6362
  jobs: [
6182
6363
  {
6364
+ flinkJob: {
6365
+ args: [
6366
+ "Test string"
6367
+ ],
6368
+ jarFileUris: [
6369
+ "Test string"
6370
+ ],
6371
+ loggingConfig: {
6372
+ driverLogLevels: {
6373
+ A: "Test string"
6374
+ },
6375
+ },
6376
+ mainClass: "Test string",
6377
+ mainJarFileUri: "Test string",
6378
+ properties: {
6379
+ A: "Test string"
6380
+ },
6381
+ savepointUri: "Test string",
6382
+ },
6183
6383
  hadoopJob: {
6184
6384
  archiveUris: [
6185
6385
  "Test string"
@@ -6489,6 +6689,9 @@ gapi.load('client', async () => {
6489
6689
  minNumInstances: 42,
6490
6690
  numInstances: 42,
6491
6691
  preemptibility: "Test string",
6692
+ startupConfig: {
6693
+ requiredRegistrationFraction: 42,
6694
+ },
6492
6695
  },
6493
6696
  roles: [
6494
6697
  "Test string"
@@ -6510,7 +6713,6 @@ gapi.load('client', async () => {
6510
6713
  },
6511
6714
  encryptionConfig: {
6512
6715
  gcePdKmsKeyName: "Test string",
6513
- kmsKey: "Test string",
6514
6716
  },
6515
6717
  endpointConfig: {
6516
6718
  enableHttpPortAccess: true,
@@ -6656,6 +6858,9 @@ gapi.load('client', async () => {
6656
6858
  minNumInstances: 42,
6657
6859
  numInstances: 42,
6658
6860
  preemptibility: "Test string",
6861
+ startupConfig: {
6862
+ requiredRegistrationFraction: 42,
6863
+ },
6659
6864
  },
6660
6865
  metastoreConfig: {
6661
6866
  dataprocMetastoreService: "Test string",
@@ -6712,6 +6917,9 @@ gapi.load('client', async () => {
6712
6917
  minNumInstances: 42,
6713
6918
  numInstances: 42,
6714
6919
  preemptibility: "Test string",
6920
+ startupConfig: {
6921
+ requiredRegistrationFraction: 42,
6922
+ },
6715
6923
  },
6716
6924
  securityConfig: {
6717
6925
  identityConfig: {
@@ -6799,6 +7007,9 @@ gapi.load('client', async () => {
6799
7007
  minNumInstances: 42,
6800
7008
  numInstances: 42,
6801
7009
  preemptibility: "Test string",
7010
+ startupConfig: {
7011
+ requiredRegistrationFraction: 42,
7012
+ },
6802
7013
  },
6803
7014
  },
6804
7015
  labels: {