@maxim_mazurok/gapi.client.bigquery-v2 0.0.20230715 → 0.0.20230805

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/index.d.ts +64 -14
  2. package/package.json +2 -2
  3. package/tests.ts +50 -12
package/index.d.ts CHANGED
@@ -9,7 +9,7 @@
9
9
  // This file was generated by https://github.com/Maxim-Mazurok/google-api-typings-generator. Please do not edit it manually.
10
10
  // In case of any problems please post issue to https://github.com/Maxim-Mazurok/google-api-typings-generator
11
11
  // Generated from: https://bigquery.googleapis.com/$discovery/rest?version=v2
12
- // Revision: 20230715
12
+ // Revision: 20230805
13
13
 
14
14
  /// <reference types="gapi.client" />
15
15
 
@@ -245,6 +245,23 @@ declare namespace gapi.client {
245
245
  biEngineReasons?:
246
246
  BiEngineReason[];
247
247
  }
248
+ interface BigLakeConfiguration {
249
+ /** [Required] Required and immutable. Credential reference for accessing external storage system. Normalized as project_id.location_id.connection_id. */
250
+ connectionId?:
251
+ string;
252
+ /** [Required] Required and immutable. Open source file format that the table data is stored in. Currently only PARQUET is supported. */
253
+ fileFormat?:
254
+ string;
255
+ /**
256
+ * [Required] Required and immutable. Fully qualified location prefix of the external folder where data is stored. Normalized to standard format: "gs:////". Starts with "gs://" rather
257
+ * than "/bigstore/". Ends with "/". Does not contain "*". See also BigLakeStorageMetadata on how it is used.
258
+ */
259
+ storageUri?:
260
+ string;
261
+ /** [Required] Required and immutable. Open source file format that the table data is stored in. Currently only PARQUET is supported. */
262
+ tableFormat?:
263
+ string;
264
+ }
248
265
  interface BigQueryModelTraining {
249
266
  /** [Output-only, Beta] Index of current ML training iteration. Updated during create model query job to show job progress. */
250
267
  currentIteration?:
@@ -1393,13 +1410,13 @@ declare namespace gapi.client {
1393
1410
  }
1394
1411
  interface IndexUnusedReason {
1395
1412
  /** [Output-only] Specifies the base table involved in the reason that no search index was used. */
1396
- base_table?:
1413
+ baseTable?:
1397
1414
  TableReference;
1398
1415
  /** [Output-only] Specifies the high-level reason for the scenario when no search index was used. */
1399
1416
  code?:
1400
1417
  string;
1401
1418
  /** [Output-only] Specifies the name of the unused search index, if available. */
1402
- index_name?:
1419
+ indexName?:
1403
1420
  string;
1404
1421
  /** [Output-only] Free form human-readable reason for the scenario when no search index was used. */
1405
1422
  message?:
@@ -2181,10 +2198,10 @@ declare namespace gapi.client {
2181
2198
  }
2182
2199
  interface JobStatistics5 {
2183
2200
  /** [Output-only] Number of logical bytes copied to the destination table. */
2184
- copied_logical_bytes?:
2201
+ copiedLogicalBytes?:
2185
2202
  string;
2186
2203
  /** [Output-only] Number of rows copied to the destination table. */
2187
- copied_rows?:
2204
+ copiedRows?:
2188
2205
  string;
2189
2206
  }
2190
2207
  interface JobStatus {
@@ -2247,7 +2264,7 @@ declare namespace gapi.client {
2247
2264
  }
2248
2265
  interface MaterializedViewDefinition {
2249
2266
  /** [Optional] Allow non incremental materialized view definition. The default value is "false". */
2250
- allow_non_incremental_definition?:
2267
+ allowNonIncrementalDefinition?:
2251
2268
  boolean;
2252
2269
  /** [Optional] [TrustedTester] Enable automatic refresh of the materialized view when the base table is updated. The default value is "true". */
2253
2270
  enableRefresh?:
@@ -2787,6 +2804,9 @@ declare namespace gapi.client {
2787
2804
  /** Output only. Max number of rows in each batch sent to the remote service. If unset, the number of rows in each batch is set dynamically. */
2788
2805
  maxBatchingRows?:
2789
2806
  string;
2807
+ /** Output only. The model version for LLM. */
2808
+ remoteModelVersion?:
2809
+ string;
2790
2810
  /** Output only. The remote service type for remote model. */
2791
2811
  remoteServiceType?:
2792
2812
  string;
@@ -2798,6 +2818,9 @@ declare namespace gapi.client {
2798
2818
  /** Output only. The time when this routine was created, in milliseconds since the epoch. */
2799
2819
  creationTime?:
2800
2820
  string;
2821
+ /** Optional. Data governance specific option, if the value is DATA_MASKING, the function will be validated as masking functions. */
2822
+ dataGovernanceType?:
2823
+ string;
2801
2824
  /**
2802
2825
  * Required. The body of the routine. For functions, this is the expression in the AS clause. If language=SQL, it is the substring inside (but excluding) the parentheses. For example,
2803
2826
  * for the function created with the following statement: `CREATE FUNCTION JoinLines(x string, y string) as (concat(x, "\n", y))` The definition_body is `concat(x, "\n", y)` (\n is not
@@ -2954,7 +2977,7 @@ declare namespace gapi.client {
2954
2977
  * When index_usage_mode is UNUSED or PARTIALLY_USED, this field explains why index was not used in all or part of the search query. If index_usage_mode is FULLLY_USED, this field is
2955
2978
  * not populated.
2956
2979
  */
2957
- indexUnusedReason?:
2980
+ indexUnusedReasons?:
2958
2981
  IndexUnusedReason[];
2959
2982
  /** Specifies index usage mode for the query. */
2960
2983
  indexUsageMode?:
@@ -3045,19 +3068,22 @@ declare namespace gapi.client {
3045
3068
  endpoints?:
3046
3069
  { [P in string]: string };
3047
3070
  /** [Output-only] Logging info is used to generate a link to Cloud Logging. */
3048
- logging_info?:
3071
+ loggingInfo?:
3049
3072
  SparkLoggingInfo;
3050
3073
  /** [Output-only] Spark job id if a Spark job is created successfully. */
3051
- spark_job_id?:
3074
+ sparkJobId?:
3052
3075
  string;
3053
3076
  /** [Output-only] Location where the Spark job is executed. */
3054
- spark_job_location?:
3077
+ sparkJobLocation?:
3055
3078
  string;
3056
3079
  }
3057
3080
  interface StandardSqlDataType {
3058
3081
  /** The type of the array's elements, if type_kind = "ARRAY". */
3059
3082
  arrayElementType?:
3060
3083
  StandardSqlDataType;
3084
+ /** The type of the range's elements, if type_kind = "RANGE". */
3085
+ rangeElementType?:
3086
+ StandardSqlDataType;
3061
3087
  /** The fields of this struct, in order, if type_kind = "STRUCT". */
3062
3088
  structType?:
3063
3089
  StandardSqlStructType;
@@ -3102,6 +3128,9 @@ declare namespace gapi.client {
3102
3128
  string[];
3103
3129
  }
3104
3130
  interface Table {
3131
+ /** [Optional] Specifies the configuration of a BigLake managed table. */
3132
+ biglakeConfiguration?:
3133
+ BigLakeConfiguration;
3105
3134
  /** [Output-only] Clone definition. */
3106
3135
  cloneDefinition?:
3107
3136
  CloneDefinition;
@@ -3571,6 +3600,9 @@ declare namespace gapi.client {
3571
3600
  /** Whether or not p-value test should be computed for this model. Only available for linear and logistic regression models. */
3572
3601
  calculatePValues?:
3573
3602
  boolean;
3603
+ /** Categorical feature encoding method. */
3604
+ categoryEncodingMethod?:
3605
+ string;
3574
3606
  /** If true, clean spikes and dips in the input time series. */
3575
3607
  cleanSpikesAndDips?:
3576
3608
  boolean;
@@ -3636,6 +3668,9 @@ declare namespace gapi.client {
3636
3668
  /** The geographical region based on which the holidays are considered in time series modeling. If a valid value is specified, then holiday effects modeling is enabled. */
3637
3669
  holidayRegion?:
3638
3670
  string;
3671
+ /** A list of geographical regions that are used for time series modeling. */
3672
+ holidayRegions?:
3673
+ string[];
3639
3674
  /** The number of periods ahead that need to be forecasted. */
3640
3675
  horizon?:
3641
3676
  string;
@@ -3693,7 +3728,10 @@ declare namespace gapi.client {
3693
3728
  /** Maximum number of trials to run in parallel. */
3694
3729
  maxParallelTrials?:
3695
3730
  string;
3696
- /** Get truncated length by last n points in time series. Use separately from time_series_length_fraction and min_time_series_length. */
3731
+ /**
3732
+ * The maximum number of time points in a time series that can be used in modeling the trend component of the time series. Don't use this option with the `timeSeriesLengthFraction` or
3733
+ * `minTimeSeriesLength` options.
3734
+ */
3697
3735
  maxTimeSeriesLength?:
3698
3736
  string;
3699
3737
  /** Maximum depth of a tree for boosted tree models. */
@@ -3705,7 +3743,12 @@ declare namespace gapi.client {
3705
3743
  /** Minimum split loss for boosted tree models. */
3706
3744
  minSplitLoss?:
3707
3745
  number;
3708
- /** Set fast trend ARIMA_PLUS model minimum training length. Use in pair with time_series_length_fraction. */
3746
+ /**
3747
+ * The minimum number of time points in a time series that are used in modeling the trend component of the time series. If you use this option you must also set the
3748
+ * `timeSeriesLengthFraction` option. This training option ensures that enough time points are available when you use `timeSeriesLengthFraction` in trend modeling. This is particularly
3749
+ * important when forecasting multiple time series in a single query using `timeSeriesIdColumn`. If the total number of time points is less than the `minTimeSeriesLength` value, then
3750
+ * the query uses all available time points.
3751
+ */
3709
3752
  minTimeSeriesLength?:
3710
3753
  string;
3711
3754
  /** Minimum sum of instance weight needed in a child for boosted tree models. */
@@ -3771,7 +3814,11 @@ declare namespace gapi.client {
3771
3814
  /** The time series id columns that were used during ARIMA model training. */
3772
3815
  timeSeriesIdColumns?:
3773
3816
  string[];
3774
- /** Get truncated length by fraction in time series. */
3817
+ /**
3818
+ * The fraction of the interpolated length of the time series that's used to model the time series trend component. All of the time points of the time series are used to model the
3819
+ * non-trend component. This training option accelerates modeling training without sacrificing much forecasting accuracy. You can use this option with `minTimeSeriesLength` but not
3820
+ * with `maxTimeSeriesLength`.
3821
+ */
3775
3822
  timeSeriesLengthFraction?:
3776
3823
  number;
3777
3824
  /** Column to be designated as time series timestamp for ARIMA model. */
@@ -3780,7 +3827,10 @@ declare namespace gapi.client {
3780
3827
  /** Tree construction algorithm for boosted tree models. */
3781
3828
  treeMethod?:
3782
3829
  string;
3783
- /** The smoothing window size for the trend component of the time series. */
3830
+ /**
3831
+ * Smoothing window size for the trend component. When a positive value is specified, a center moving average smoothing is applied on the history trend. When the smoothing window is
3832
+ * out of the boundary at the beginning or the end of the trend, the first element or the last element is padded to fill the smoothing window before the average is applied.
3833
+ */
3784
3834
  trendSmoothingWindowSize?:
3785
3835
  string;
3786
3836
  /** User column specified for matrix factorization models. */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@maxim_mazurok/gapi.client.bigquery-v2",
3
- "version": "0.0.20230715",
3
+ "version": "0.0.20230805",
4
4
  "description": "TypeScript typings for BigQuery API v2",
5
5
  "license": "MIT",
6
6
  "author": {
@@ -15,6 +15,6 @@
15
15
  "types": "index.d.ts",
16
16
  "dependencies": {
17
17
  "@types/gapi.client": "*",
18
- "@types/gapi.client.discovery": "*"
18
+ "@types/gapi.client.discovery-v1": "*"
19
19
  }
20
20
  }
package/tests.ts CHANGED
@@ -3,7 +3,7 @@
3
3
  // This file was generated by https://github.com/Maxim-Mazurok/google-api-typings-generator. Please do not edit it manually.
4
4
  // In case of any problems please post issue to https://github.com/Maxim-Mazurok/google-api-typings-generator
5
5
 
6
- // Revision: 20230715
6
+ // Revision: 20230805
7
7
 
8
8
  gapi.load('client', async () => {
9
9
  /** now we can use gapi.client */
@@ -676,8 +676,8 @@ gapi.load('client', async () => {
676
676
  statistics: {
677
677
  completionRatio: 42,
678
678
  copy: {
679
- copied_logical_bytes: "Test string",
680
- copied_rows: "Test string",
679
+ copiedLogicalBytes: "Test string",
680
+ copiedRows: "Test string",
681
681
  },
682
682
  creationTime: "Test string",
683
683
  dataMaskingStatistics: {
@@ -855,15 +855,15 @@ gapi.load('client', async () => {
855
855
  ],
856
856
  },
857
857
  searchStatistics: {
858
- indexUnusedReason: [
858
+ indexUnusedReasons: [
859
859
  {
860
- base_table: {
860
+ baseTable: {
861
861
  datasetId: "Test string",
862
862
  projectId: "Test string",
863
863
  tableId: "Test string",
864
864
  },
865
865
  code: "Test string",
866
- index_name: "Test string",
866
+ indexName: "Test string",
867
867
  message: "Test string",
868
868
  }
869
869
  ],
@@ -873,12 +873,12 @@ gapi.load('client', async () => {
873
873
  endpoints: {
874
874
  A: "Test string"
875
875
  },
876
- logging_info: {
876
+ loggingInfo: {
877
877
  project_id: "Test string",
878
878
  resource_type: "Test string",
879
879
  },
880
- spark_job_id: "Test string",
881
- spark_job_location: "Test string",
880
+ sparkJobId: "Test string",
881
+ sparkJobLocation: "Test string",
882
882
  },
883
883
  statementType: "Test string",
884
884
  timeline: [
@@ -1084,6 +1084,7 @@ gapi.load('client', async () => {
1084
1084
  name: "Test string",
1085
1085
  type: {
1086
1086
  arrayElementType: undefined,
1087
+ rangeElementType: undefined,
1087
1088
  structType: {
1088
1089
  fields: undefined,
1089
1090
  },
@@ -1466,6 +1467,7 @@ gapi.load('client', async () => {
1466
1467
  boosterType: "Test string",
1467
1468
  budgetHours: 42,
1468
1469
  calculatePValues: true,
1470
+ categoryEncodingMethod: "Test string",
1469
1471
  cleanSpikesAndDips: true,
1470
1472
  colorSpace: "Test string",
1471
1473
  colsampleBylevel: 42,
@@ -1487,6 +1489,9 @@ gapi.load('client', async () => {
1487
1489
  "Test string"
1488
1490
  ],
1489
1491
  holidayRegion: "Test string",
1492
+ holidayRegions: [
1493
+ "Test string"
1494
+ ],
1490
1495
  horizon: "Test string",
1491
1496
  hparamTuningObjectives: [
1492
1497
  "Test string"
@@ -1712,6 +1717,7 @@ gapi.load('client', async () => {
1712
1717
  name: "Test string",
1713
1718
  type: {
1714
1719
  arrayElementType: undefined,
1720
+ rangeElementType: undefined,
1715
1721
  structType: {
1716
1722
  fields: undefined,
1717
1723
  },
@@ -1737,6 +1743,7 @@ gapi.load('client', async () => {
1737
1743
  connection: "Test string",
1738
1744
  endpoint: "Test string",
1739
1745
  maxBatchingRows: "Test string",
1746
+ remoteModelVersion: "Test string",
1740
1747
  remoteServiceType: "Test string",
1741
1748
  },
1742
1749
  trainingRuns: [
@@ -1945,6 +1952,7 @@ gapi.load('client', async () => {
1945
1952
  boosterType: "Test string",
1946
1953
  budgetHours: 42,
1947
1954
  calculatePValues: true,
1955
+ categoryEncodingMethod: "Test string",
1948
1956
  cleanSpikesAndDips: true,
1949
1957
  colorSpace: "Test string",
1950
1958
  colsampleBylevel: 42,
@@ -1966,6 +1974,9 @@ gapi.load('client', async () => {
1966
1974
  "Test string"
1967
1975
  ],
1968
1976
  holidayRegion: "Test string",
1977
+ holidayRegions: [
1978
+ "Test string"
1979
+ ],
1969
1980
  horizon: "Test string",
1970
1981
  hparamTuningObjectives: [
1971
1982
  "Test string"
@@ -2046,6 +2057,7 @@ gapi.load('client', async () => {
2046
2057
  transformSql: "Test string",
2047
2058
  type: {
2048
2059
  arrayElementType: undefined,
2060
+ rangeElementType: undefined,
2049
2061
  structType: {
2050
2062
  fields: [
2051
2063
  {
@@ -2091,6 +2103,7 @@ gapi.load('client', async () => {
2091
2103
  argumentKind: "Test string",
2092
2104
  dataType: {
2093
2105
  arrayElementType: undefined,
2106
+ rangeElementType: undefined,
2094
2107
  structType: {
2095
2108
  fields: [
2096
2109
  {
@@ -2106,6 +2119,7 @@ gapi.load('client', async () => {
2106
2119
  }
2107
2120
  ],
2108
2121
  creationTime: "Test string",
2122
+ dataGovernanceType: "Test string",
2109
2123
  definitionBody: "Test string",
2110
2124
  description: "Test string",
2111
2125
  determinismLevel: "Test string",
@@ -2129,6 +2143,7 @@ gapi.load('client', async () => {
2129
2143
  name: "Test string",
2130
2144
  type: {
2131
2145
  arrayElementType: undefined,
2146
+ rangeElementType: undefined,
2132
2147
  structType: {
2133
2148
  fields: undefined,
2134
2149
  },
@@ -2139,6 +2154,7 @@ gapi.load('client', async () => {
2139
2154
  },
2140
2155
  returnType: {
2141
2156
  arrayElementType: undefined,
2157
+ rangeElementType: undefined,
2142
2158
  structType: {
2143
2159
  fields: [
2144
2160
  {
@@ -2199,6 +2215,7 @@ gapi.load('client', async () => {
2199
2215
  argumentKind: "Test string",
2200
2216
  dataType: {
2201
2217
  arrayElementType: undefined,
2218
+ rangeElementType: undefined,
2202
2219
  structType: {
2203
2220
  fields: [
2204
2221
  {
@@ -2214,6 +2231,7 @@ gapi.load('client', async () => {
2214
2231
  }
2215
2232
  ],
2216
2233
  creationTime: "Test string",
2234
+ dataGovernanceType: "Test string",
2217
2235
  definitionBody: "Test string",
2218
2236
  description: "Test string",
2219
2237
  determinismLevel: "Test string",
@@ -2237,6 +2255,7 @@ gapi.load('client', async () => {
2237
2255
  name: "Test string",
2238
2256
  type: {
2239
2257
  arrayElementType: undefined,
2258
+ rangeElementType: undefined,
2240
2259
  structType: {
2241
2260
  fields: undefined,
2242
2261
  },
@@ -2247,6 +2266,7 @@ gapi.load('client', async () => {
2247
2266
  },
2248
2267
  returnType: {
2249
2268
  arrayElementType: undefined,
2269
+ rangeElementType: undefined,
2250
2270
  structType: {
2251
2271
  fields: [
2252
2272
  {
@@ -2370,6 +2390,12 @@ gapi.load('client', async () => {
2370
2390
  datasetId: "Test string",
2371
2391
  projectId: "Test string",
2372
2392
  }, {
2393
+ biglakeConfiguration: {
2394
+ connectionId: "Test string",
2395
+ fileFormat: "Test string",
2396
+ storageUri: "Test string",
2397
+ tableFormat: "Test string",
2398
+ },
2373
2399
  cloneDefinition: {
2374
2400
  baseTableReference: {
2375
2401
  datasetId: "Test string",
@@ -2500,7 +2526,7 @@ gapi.load('client', async () => {
2500
2526
  lastModifiedTime: "Test string",
2501
2527
  location: "Test string",
2502
2528
  materializedView: {
2503
- allow_non_incremental_definition: true,
2529
+ allowNonIncrementalDefinition: true,
2504
2530
  enableRefresh: true,
2505
2531
  lastRefreshTime: "Test string",
2506
2532
  maxStaleness: "Test string",
@@ -2669,6 +2695,12 @@ gapi.load('client', async () => {
2669
2695
  projectId: "Test string",
2670
2696
  tableId: "Test string",
2671
2697
  }, {
2698
+ biglakeConfiguration: {
2699
+ connectionId: "Test string",
2700
+ fileFormat: "Test string",
2701
+ storageUri: "Test string",
2702
+ tableFormat: "Test string",
2703
+ },
2672
2704
  cloneDefinition: {
2673
2705
  baseTableReference: {
2674
2706
  datasetId: "Test string",
@@ -2799,7 +2831,7 @@ gapi.load('client', async () => {
2799
2831
  lastModifiedTime: "Test string",
2800
2832
  location: "Test string",
2801
2833
  materializedView: {
2802
- allow_non_incremental_definition: true,
2834
+ allowNonIncrementalDefinition: true,
2803
2835
  enableRefresh: true,
2804
2836
  lastRefreshTime: "Test string",
2805
2837
  maxStaleness: "Test string",
@@ -3009,6 +3041,12 @@ gapi.load('client', async () => {
3009
3041
  projectId: "Test string",
3010
3042
  tableId: "Test string",
3011
3043
  }, {
3044
+ biglakeConfiguration: {
3045
+ connectionId: "Test string",
3046
+ fileFormat: "Test string",
3047
+ storageUri: "Test string",
3048
+ tableFormat: "Test string",
3049
+ },
3012
3050
  cloneDefinition: {
3013
3051
  baseTableReference: {
3014
3052
  datasetId: "Test string",
@@ -3139,7 +3177,7 @@ gapi.load('client', async () => {
3139
3177
  lastModifiedTime: "Test string",
3140
3178
  location: "Test string",
3141
3179
  materializedView: {
3142
- allow_non_incremental_definition: true,
3180
+ allowNonIncrementalDefinition: true,
3143
3181
  enableRefresh: true,
3144
3182
  lastRefreshTime: "Test string",
3145
3183
  maxStaleness: "Test string",