@maxim_mazurok/gapi.client.dataproc-v1 0.0.20230630 → 0.0.20230719

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.d.ts +51 -8
  2. package/package.json +1 -1
  3. package/tests.ts +61 -2
package/index.d.ts CHANGED
@@ -9,7 +9,7 @@
9
9
  // This file was generated by https://github.com/Maxim-Mazurok/google-api-typings-generator. Please do not edit it manually.
10
10
  // In case of any problems please post issue to https://github.com/Maxim-Mazurok/google-api-typings-generator
11
11
  // Generated from: https://dataproc.googleapis.com/$discovery/rest?version=v1
12
- // Revision: 20230630
12
+ // Revision: 20230719
13
13
 
14
14
  /// <reference types="gapi.client" />
15
15
 
@@ -550,10 +550,10 @@ declare namespace gapi.client {
550
550
  }
551
551
  interface ExecutionConfig {
552
552
  /**
553
- * Optional. The duration to keep the session alive while it's idling. Exceeding this threshold causes the session to terminate. This field cannot be set on a batch workload. Minimum
554
- * value is 10 minutes; maximum value is 14 days (see JSON representation of Duration (https://developers.google.com/protocol-buffers/docs/proto3#json)). Defaults to 4 hours if not
555
- * set. If both ttl and idle_ttl are specified for an interactive session, the conditions are treated as OR conditions: the workload will be terminated when it has been idle for
556
- * idle_ttl or when ttl has been exceeded, whichever occurs first.
553
+ * Optional. Applies to sessions only. The duration to keep the session alive while it's idling. Exceeding this threshold causes the session to terminate. This field cannot be set on a
554
+ * batch workload. Minimum value is 10 minutes; maximum value is 14 days (see JSON representation of Duration (https://developers.google.com/protocol-buffers/docs/proto3#json)).
555
+ * Defaults to 4 hours if not set. If both ttl and idle_ttl are specified for an interactive session, the conditions are treated as OR conditions: the workload will be terminated when
556
+ * it has been idle for idle_ttl or when ttl has been exceeded, whichever occurs first.
557
557
  */
558
558
  idleTtl?:
559
559
  string;
@@ -603,6 +603,35 @@ declare namespace gapi.client {
603
603
  title?:
604
604
  string;
605
605
  }
606
+ interface FlinkJob {
607
+ /**
608
+ * Optional. The arguments to pass to the driver. Do not include arguments, such as --conf, that can be set as job properties, since a collision may occur that causes an incorrect job
609
+ * submission.
610
+ */
611
+ args?:
612
+ string[];
613
+ /** Optional. HCFS URIs of jar files to add to the CLASSPATHs of the Flink driver and tasks. */
614
+ jarFileUris?:
615
+ string[];
616
+ /** Optional. The runtime log config for job execution. */
617
+ loggingConfig?:
618
+ LoggingConfig;
619
+ /** The name of the driver's main class. The jar file that contains the class must be in the default CLASSPATH or specified in jar_file_uris. */
620
+ mainClass?:
621
+ string;
622
+ /** The HCFS URI of the jar file that contains the main class. */
623
+ mainJarFileUri?:
624
+ string;
625
+ /**
626
+ * Optional. A mapping of property names to values, used to configure Flink. Properties that conflict with values set by the Dataproc API may beoverwritten. Can include properties set
627
+ * in/etc/flink/conf/flink-defaults.conf and classes in user code.
628
+ */
629
+ properties?:
630
+ { [P in string]: string };
631
+ /** Optional. HCFS URI of the savepoint which contains the last saved progress for this job */
632
+ savepointUri?:
633
+ string;
634
+ }
606
635
  interface GceClusterConfig {
607
636
  /** Optional. Confidential Instance Config for clusters using Confidential VMs (https://cloud.google.com/compute/confidential-vm/docs). */
608
637
  confidentialInstanceConfig?:
@@ -615,7 +644,7 @@ declare namespace gapi.client {
615
644
  internalIpOnly?:
616
645
  boolean;
617
646
  /**
618
- * The Compute Engine metadata entries to add to all instances (see Project and instance metadata
647
+ * Optional. The Compute Engine metadata entries to add to all instances (see Project and instance metadata
619
648
  * (https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)).
620
649
  */
621
650
  metadata?:
@@ -996,6 +1025,9 @@ declare namespace gapi.client {
996
1025
  /** Optional. Driver scheduling configuration. */
997
1026
  driverSchedulingConfig?:
998
1027
  DriverSchedulingConfig;
1028
+ /** Optional. Job is a Flink job. */
1029
+ flinkJob?:
1030
+ FlinkJob;
999
1031
  /** Optional. Job is a Hadoop job. */
1000
1032
  hadoopJob?:
1001
1033
  HadoopJob;
@@ -1737,7 +1769,12 @@ declare namespace gapi.client {
1737
1769
  string;
1738
1770
  }
1739
1771
  interface RuntimeInfo {
1740
- /** Output only. Approximate workload resource usage calculated after workload finishes (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)). */
1772
+ /**
1773
+ * Output only. Approximate workload resource usage, calculated when the workload completes (see Dataproc Serverless pricing
1774
+ * (https://cloud.google.com/dataproc-serverless/pricing)).Note: This metric calculation may change in the future, for example, to capture cumulative workload resource consumption
1775
+ * during workload execution (see the Dataproc Serverless release notes (https://cloud.google.com/dataproc-serverless/docs/release-notes) for announcements, changes, fixes and other
1776
+ * Dataproc developments).
1777
+ */
1741
1778
  approximateUsage?:
1742
1779
  UsageMetrics;
1743
1780
  /** Output only. Snapshot of current workload resource usage. */
@@ -2137,9 +2174,15 @@ declare namespace gapi.client {
2137
2174
  /** Optional. Milli (one-thousandth) Dataproc Compute Units (DCUs) (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)). */
2138
2175
  milliDcu?:
2139
2176
  string;
2177
+ /** Optional. Milli (one-thousandth) Dataproc Compute Units (DCUs) charged at premium tier (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)). */
2178
+ milliDcuPremium?:
2179
+ string;
2140
2180
  /** Optional. Shuffle Storage in gigabytes (GB). (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)) */
2141
2181
  shuffleStorageGb?:
2142
2182
  string;
2183
+ /** Optional. Shuffle Storage in gigabytes (GB) charged at premium tier. (see Dataproc Serverless pricing (https://cloud.google.com/dataproc-serverless/pricing)) */
2184
+ shuffleStorageGbPremium?:
2185
+ string;
2143
2186
  /** Optional. The timestamp of the usage snapshot. */
2144
2187
  snapshotTime?:
2145
2188
  string;
@@ -2861,7 +2904,7 @@ declare namespace gapi.client {
2861
2904
  string;
2862
2905
  },
2863
2906
  body: Batch): Request<Operation>;
2864
- /** Deletes the batch workload resource. If the batch is not in terminal state, the delete fails and the response returns FAILED_PRECONDITION. */
2907
+ /** Deletes the batch workload resource. If the batch is not in a CANCELLED, SUCCEEDED or FAILED State, the delete operation fails and the response returns FAILED_PRECONDITION. */
2865
2908
  delete(request?: {
2866
2909
  /** V1 error format. */
2867
2910
  "$.xgafv"?:
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@maxim_mazurok/gapi.client.dataproc-v1",
3
- "version": "0.0.20230630",
3
+ "version": "0.0.20230719",
4
4
  "description": "TypeScript typings for Cloud Dataproc API v1",
5
5
  "license": "MIT",
6
6
  "author": {
package/tests.ts CHANGED
@@ -3,7 +3,7 @@
3
3
  // This file was generated by https://github.com/Maxim-Mazurok/google-api-typings-generator. Please do not edit it manually.
4
4
  // In case of any problems please post issue to https://github.com/Maxim-Mazurok/google-api-typings-generator
5
5
 
6
- // Revision: 20230630
6
+ // Revision: 20230719
7
7
 
8
8
  gapi.load('client', async () => {
9
9
  /** now we can use gapi.client */
@@ -224,7 +224,9 @@ gapi.load('client', async () => {
224
224
  },
225
225
  currentUsage: {
226
226
  milliDcu: "Test string",
227
+ milliDcuPremium: "Test string",
227
228
  shuffleStorageGb: "Test string",
229
+ shuffleStorageGbPremium: "Test string",
228
230
  snapshotTime: "Test string",
229
231
  },
230
232
  diagnosticOutputUri: "Test string",
@@ -282,7 +284,7 @@ gapi.load('client', async () => {
282
284
  stateTime: "Test string",
283
285
  uuid: "Test string",
284
286
  });
285
- /** Deletes the batch workload resource. If the batch is not in terminal state, the delete fails and the response returns FAILED_PRECONDITION. */
287
+ /** Deletes the batch workload resource. If the batch is not in a CANCELLED, SUCCEEDED or FAILED State, the delete operation fails and the response returns FAILED_PRECONDITION. */
286
288
  await gapi.client.dataproc.projects.locations.batches.delete({
287
289
  name: "Test string",
288
290
  });
@@ -3300,6 +3302,25 @@ gapi.load('client', async () => {
3300
3302
  memoryMb: 42,
3301
3303
  vcores: 42,
3302
3304
  },
3305
+ flinkJob: {
3306
+ args: [
3307
+ "Test string"
3308
+ ],
3309
+ jarFileUris: [
3310
+ "Test string"
3311
+ ],
3312
+ loggingConfig: {
3313
+ driverLogLevels: {
3314
+ A: "Test string"
3315
+ },
3316
+ },
3317
+ mainClass: "Test string",
3318
+ mainJarFileUri: "Test string",
3319
+ properties: {
3320
+ A: "Test string"
3321
+ },
3322
+ savepointUri: "Test string",
3323
+ },
3303
3324
  hadoopJob: {
3304
3325
  archiveUris: [
3305
3326
  "Test string"
@@ -3577,6 +3598,25 @@ gapi.load('client', async () => {
3577
3598
  memoryMb: 42,
3578
3599
  vcores: 42,
3579
3600
  },
3601
+ flinkJob: {
3602
+ args: [
3603
+ "Test string"
3604
+ ],
3605
+ jarFileUris: [
3606
+ "Test string"
3607
+ ],
3608
+ loggingConfig: {
3609
+ driverLogLevels: {
3610
+ A: "Test string"
3611
+ },
3612
+ },
3613
+ mainClass: "Test string",
3614
+ mainJarFileUri: "Test string",
3615
+ properties: {
3616
+ A: "Test string"
3617
+ },
3618
+ savepointUri: "Test string",
3619
+ },
3580
3620
  hadoopJob: {
3581
3621
  archiveUris: [
3582
3622
  "Test string"
@@ -3833,6 +3873,25 @@ gapi.load('client', async () => {
3833
3873
  memoryMb: 42,
3834
3874
  vcores: 42,
3835
3875
  },
3876
+ flinkJob: {
3877
+ args: [
3878
+ "Test string"
3879
+ ],
3880
+ jarFileUris: [
3881
+ "Test string"
3882
+ ],
3883
+ loggingConfig: {
3884
+ driverLogLevels: {
3885
+ A: "Test string"
3886
+ },
3887
+ },
3888
+ mainClass: "Test string",
3889
+ mainJarFileUri: "Test string",
3890
+ properties: {
3891
+ A: "Test string"
3892
+ },
3893
+ savepointUri: "Test string",
3894
+ },
3836
3895
  hadoopJob: {
3837
3896
  archiveUris: [
3838
3897
  "Test string"