@google-cloud/dlp 5.12.0 → 6.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -4,6 +4,29 @@
4
4
 
5
5
  [1]: https://www.npmjs.com/package/PACKAGE NAME?activeTab=versions
6
6
 
7
+ ## [6.0.0](https://github.com/googleapis/google-cloud-node/compare/dlp-v5.13.0...dlp-v6.0.0) (2025-03-18)
8
+
9
+
10
+ ### ⚠ BREAKING CHANGES
11
+
12
+ * upgrade to Node 18 ([#6096](https://github.com/googleapis/google-cloud-node/issues/6096))
13
+
14
+ ### Features
15
+
16
+ * [Many APIs] add request/response debug logging to gapics ([388b4e2](https://github.com/googleapis/google-cloud-node/commit/388b4e20329b7f6fc0dd061dddff573c45104213))
17
+
18
+
19
+ ### Miscellaneous Chores
20
+
21
+ * Upgrade to Node 18 ([#6096](https://github.com/googleapis/google-cloud-node/issues/6096)) ([eadae64](https://github.com/googleapis/google-cloud-node/commit/eadae64d54e07aa2c65097ea52e65008d4e87436))
22
+
23
+ ## [5.13.0](https://github.com/googleapis/google-cloud-node/compare/dlp-v5.12.0...dlp-v5.13.0) (2025-02-28)
24
+
25
+
26
+ ### Features
27
+
28
+ * [dlp] discovery of Vertex AI datasets ([#6041](https://github.com/googleapis/google-cloud-node/issues/6041)) ([65beb7c](https://github.com/googleapis/google-cloud-node/commit/65beb7cee55e4f37a7449175905f896609ad001a))
29
+
7
30
  ## [5.12.0](https://github.com/googleapis/google-cloud-node/compare/dlp-v5.11.0...dlp-v5.12.0) (2024-10-30)
8
31
 
9
32
 
package/README.md CHANGED
@@ -44,7 +44,7 @@ Google APIs Client Libraries, in [Client Libraries Explained][explained].
44
44
  1. [Select or create a Cloud Platform project][projects].
45
45
  1. [Enable billing for your project][billing].
46
46
  1. [Enable the Cloud Data Loss Prevention API][enable_api].
47
- 1. [Set up authentication][auth] so you can access the
47
+ 1. [Set up authentication with a service account][auth] so you can access the
48
48
  API from your local workstation.
49
49
 
50
50
  ### Installing the client library
@@ -250,4 +250,4 @@ See [LICENSE](https://github.com/googleapis/google-cloud-node/blob/main/LICENSE)
250
250
  [projects]: https://console.cloud.google.com/project
251
251
  [billing]: https://support.google.com/cloud/answer/6293499#enable-billing
252
252
  [enable_api]: https://console.cloud.google.com/flows/enableapi?apiid=dlp.googleapis.com
253
- [auth]: https://cloud.google.com/docs/authentication/external/set-up-adc-local
253
+ [auth]: https://cloud.google.com/docs/authentication/getting-started
@@ -1,4 +1,4 @@
1
- // Copyright 2024 Google LLC
1
+ // Copyright 2025 Google LLC
2
2
  //
3
3
  // Licensed under the Apache License, Version 2.0 (the "License");
4
4
  // you may not use this file except in compliance with the License.
@@ -136,7 +136,7 @@ service DlpService {
136
136
  };
137
137
  }
138
138
 
139
- // Returns a list of the sensitive information types that DLP API
139
+ // Returns a list of the sensitive information types that the DLP API
140
140
  // supports. See
141
141
  // https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
142
142
  // to learn more.
@@ -1234,6 +1234,9 @@ message ByteContentItem {
1234
1234
 
1235
1235
  // Executable file types. Only used for profiling.
1236
1236
  EXECUTABLE = 17;
1237
+
1238
+ // AI model file types. Only used for profiling.
1239
+ AI_MODEL = 18;
1237
1240
  }
1238
1241
 
1239
1242
  // The type of data stored in the bytes string. Default will be TEXT_UTF8.
@@ -2024,6 +2027,9 @@ message InfoTypeDescription {
2024
2027
  // request.
2025
2028
  string description = 4;
2026
2029
 
2030
+ // A sample that is a true positive for this infoType.
2031
+ string example = 8;
2032
+
2027
2033
  // A list of available versions for the infotype.
2028
2034
  repeated VersionDescription versions = 9;
2029
2035
 
@@ -2241,6 +2247,9 @@ message InfoTypeCategory {
2241
2247
  // Information that is not sensitive on its own, but provides details about
2242
2248
  // the circumstances surrounding an entity or an event.
2243
2249
  CONTEXTUAL_INFORMATION = 7;
2250
+
2251
+ // Category for `CustomInfoType` types.
2252
+ CUSTOM = 8;
2244
2253
  }
2245
2254
 
2246
2255
  // Categories of infotypes.
@@ -2989,7 +2998,8 @@ message PrimitiveTransformation {
2989
2998
  // Mask
2990
2999
  CharacterMaskConfig character_mask_config = 3;
2991
3000
 
2992
- // Ffx-Fpe
3001
+ // Ffx-Fpe. Strongly discouraged, consider using CryptoDeterministicConfig
3002
+ // instead. Fpe is computationally expensive incurring latency costs.
2993
3003
  CryptoReplaceFfxFpeConfig crypto_replace_ffx_fpe_config = 4;
2994
3004
 
2995
3005
  // Fixed size bucketing
@@ -3311,7 +3321,7 @@ message BucketingConfig {
3311
3321
  //
3312
3322
  // Note: We recommend using CryptoDeterministicConfig for all use cases which
3313
3323
  // do not require preserving the input alphabet space and size, plus warrant
3314
- // referential integrity.
3324
+ // referential integrity. FPE incurs significant latency costs.
3315
3325
  message CryptoReplaceFfxFpeConfig {
3316
3326
  // These are commonly used subsets of the alphabet that the FFX mode
3317
3327
  // natively supports. In the algorithm, the alphabet is selected using
@@ -4824,15 +4834,32 @@ message DataProfileAction {
4824
4834
  // If set, the detailed data profiles will be persisted to the location
4825
4835
  // of your choice whenever updated.
4826
4836
  message Export {
4827
- // Store all table and column profiles in an existing table or a new table
4828
- // in an existing dataset. Each re-generation will result in new rows in
4829
- // BigQuery. Data is inserted using [streaming
4830
- // insert](https://cloud.google.com/blog/products/bigquery/life-of-a-bigquery-streaming-insert)
4831
- // and so data may be in the buffer for a period of time after the profile
4832
- // has finished. The Pub/Sub notification is sent before the streaming
4833
- // buffer is guaranteed to be written, so data may not be instantly
4834
- // visible to queries by the time your topic receives the Pub/Sub
4835
- // notification.
4837
+ // Store all profiles to BigQuery.
4838
+ //
4839
+ // * The system will create a new dataset and table for you if none are
4840
+ // are provided. The dataset will be named
4841
+ // `sensitive_data_protection_discovery` and table will be named
4842
+ // `discovery_profiles`. This table will be placed in the same project as
4843
+ // the container project running the scan. After the first profile is
4844
+ // generated and the dataset and table are created, the discovery scan
4845
+ // configuration will be updated with the dataset and table names.
4846
+ // * See [Analyze data profiles stored in
4847
+ // BigQuery](https://cloud.google.com/sensitive-data-protection/docs/analyze-data-profiles).
4848
+ // * See [Sample queries for your BigQuery
4849
+ // table](https://cloud.google.com/sensitive-data-protection/docs/analyze-data-profiles#sample_sql_queries).
4850
+ // * Data is inserted using [streaming
4851
+ // insert](https://cloud.google.com/blog/products/bigquery/life-of-a-bigquery-streaming-insert)
4852
+ // and so data may be in the buffer for a period of time after the
4853
+ // profile has finished.
4854
+ // * The Pub/Sub notification is sent before the streaming buffer is
4855
+ // guaranteed to be written, so data may not be instantly
4856
+ // visible to queries by the time your topic receives the Pub/Sub
4857
+ // notification.
4858
+ // * The best practice is to use the same table for an entire organization
4859
+ // so that you can take advantage of the [provided Looker
4860
+ // reports](https://cloud.google.com/sensitive-data-protection/docs/analyze-data-profiles#use_a_premade_report).
4861
+ // If you use VPC Service Controls to define security perimeters, then
4862
+ // you must use a separate table for each boundary.
4836
4863
  BigQueryTable profile_table = 1;
4837
4864
  }
4838
4865
 
@@ -4896,7 +4923,8 @@ message DataProfileAction {
4896
4923
  // Message expressing intention to publish to Google Security Operations.
4897
4924
  message PublishToChronicle {}
4898
4925
 
4899
- // If set, a summary finding will be created/updated in SCC for each profile.
4926
+ // If set, a summary finding will be created or updated in Security Command
4927
+ // Center for each profile.
4900
4928
  message PublishToSecurityCommandCenter {}
4901
4929
 
4902
4930
  // If set, attaches the [tags]
@@ -4967,7 +4995,7 @@ message DataProfileAction {
4967
4995
  // analytics](https://cloud.google.com/chronicle/docs/detection/usecase-dlp-high-risk-user-download).
4968
4996
  PublishToChronicle publish_to_chronicle = 3;
4969
4997
 
4970
- // Publishes findings to SCC for each data profile.
4998
+ // Publishes findings to Security Command Center for each data profile.
4971
4999
  PublishToSecurityCommandCenter publish_to_scc = 4;
4972
5000
 
4973
5001
  // Tags the profiled resources with the specified tag values.
@@ -4988,7 +5016,7 @@ message DataProfileJobConfig {
4988
5016
 
4989
5017
  // The project that will run the scan. The DLP service
4990
5018
  // account that exists within this project must have access to all resources
4991
- // that are profiled, and the Cloud DLP API must be enabled.
5019
+ // that are profiled, and the DLP API must be enabled.
4992
5020
  string project_id = 5;
4993
5021
 
4994
5022
  // Must be set only when scanning other clouds.
@@ -5133,7 +5161,7 @@ message DiscoveryConfig {
5133
5161
 
5134
5162
  // The project that will run the scan. The DLP service
5135
5163
  // account that exists within this project must have access to all resources
5136
- // that are profiled, and the Cloud DLP API must be enabled.
5164
+ // that are profiled, and the DLP API must be enabled.
5137
5165
  string project_id = 2;
5138
5166
  }
5139
5167
 
@@ -5208,6 +5236,12 @@ message DiscoveryConfig {
5208
5236
 
5209
5237
  // Required. A status for this configuration.
5210
5238
  Status status = 10 [(google.api.field_behavior) = REQUIRED];
5239
+
5240
+ // Optional. Processing location configuration. Vertex AI dataset scanning
5241
+ // will set processing_location.image_fallback_type to MultiRegionProcessing
5242
+ // by default.
5243
+ ProcessingLocation processing_location = 13
5244
+ [(google.api.field_behavior) = OPTIONAL];
5211
5245
  }
5212
5246
 
5213
5247
  // Target used to match against for Discovery.
@@ -5234,6 +5268,16 @@ message DiscoveryTarget {
5234
5268
  // Other clouds target for discovery. The first target to match a resource
5235
5269
  // will be the one applied.
5236
5270
  OtherCloudDiscoveryTarget other_cloud_target = 5;
5271
+
5272
+ // Vertex AI dataset target for Discovery. The first target to match a
5273
+ // dataset will be the one applied. Note that discovery for Vertex AI can
5274
+ // incur Cloud Storage Class B operation charges for storage.objects.get
5275
+ // operations and retrieval fees. For more information, see [Cloud Storage
5276
+ // pricing](https://cloud.google.com/storage/pricing#price-tables).
5277
+ // Note that discovery for Vertex AI dataset will not be able to scan images
5278
+ // unless DiscoveryConfig.processing_location.image_fallback_location has
5279
+ // multi_region_processing or global_processing configured.
5280
+ VertexDatasetDiscoveryTarget vertex_dataset_target = 7;
5237
5281
  }
5238
5282
  }
5239
5283
 
@@ -5798,13 +5842,13 @@ message DiscoveryCloudStorageConditions {
5798
5842
  // Scan buckets regardless of the attribute.
5799
5843
  ALL_SUPPORTED_BUCKETS = 1;
5800
5844
 
5801
- // Buckets with autoclass disabled
5802
- // (https://cloud.google.com/storage/docs/autoclass). Only one of
5845
+ // Buckets with [Autoclass](https://cloud.google.com/storage/docs/autoclass)
5846
+ // disabled. Only one of
5803
5847
  // AUTOCLASS_DISABLED or AUTOCLASS_ENABLED should be set.
5804
5848
  AUTOCLASS_DISABLED = 2;
5805
5849
 
5806
- // Buckets with autoclass enabled
5807
- // (https://cloud.google.com/storage/docs/autoclass). Only one of
5850
+ // Buckets with [Autoclass](https://cloud.google.com/storage/docs/autoclass)
5851
+ // enabled. Only one of
5808
5852
  // AUTOCLASS_DISABLED or AUTOCLASS_ENABLED should be set. Scanning
5809
5853
  // Autoclass-enabled buckets can affect object storage classes.
5810
5854
  AUTOCLASS_ENABLED = 3;
@@ -6088,6 +6132,114 @@ message OtherCloudDiscoveryStartingLocation {
6088
6132
  // Match discovery resources not covered by any other filter.
6089
6133
  message AllOtherResources {}
6090
6134
 
6135
+ // Target used to match against for discovery with Vertex AI datasets.
6136
+ message VertexDatasetDiscoveryTarget {
6137
+ // Required. The datasets the discovery cadence applies to. The first target
6138
+ // with a matching filter will be the one to apply to a dataset.
6139
+ DiscoveryVertexDatasetFilter filter = 1
6140
+ [(google.api.field_behavior) = REQUIRED];
6141
+
6142
+ // In addition to matching the filter, these conditions must be true
6143
+ // before a profile is generated.
6144
+ DiscoveryVertexDatasetConditions conditions = 2;
6145
+
6146
+ // Type of schedule.
6147
+ oneof cadence {
6148
+ // How often and when to update profiles. New datasets that match both the
6149
+ // filter and conditions are scanned as quickly as possible depending on
6150
+ // system capacity.
6151
+ DiscoveryVertexDatasetGenerationCadence generation_cadence = 3;
6152
+
6153
+ // Disable profiling for datasets that match this filter.
6154
+ Disabled disabled = 4;
6155
+ }
6156
+ }
6157
+
6158
+ // Determines what datasets will have profiles generated within an organization
6159
+ // or project. Includes the ability to filter by regular expression patterns
6160
+ // on project ID or dataset regex.
6161
+ message DiscoveryVertexDatasetFilter {
6162
+ // Whether the filter applies to a specific set of datasets or all
6163
+ // other datasets within the location being profiled. The first
6164
+ // filter to match will be applied, regardless of the condition. If none is
6165
+ // set, this field defaults to `others`.
6166
+ oneof filter {
6167
+ // A specific set of Vertex AI datasets for this filter to apply to.
6168
+ VertexDatasetCollection collection = 1;
6169
+
6170
+ // The dataset resource to scan. Targets including this can only include
6171
+ // one target (the target with this dataset resource reference).
6172
+ VertexDatasetResourceReference vertex_dataset_resource_reference = 2;
6173
+
6174
+ // Catch-all. This should always be the last target in the list because
6175
+ // anything above it will apply first. Should only appear once in a
6176
+ // configuration. If none is specified, a default one will be added
6177
+ // automatically.
6178
+ AllOtherResources others = 100;
6179
+ }
6180
+ }
6181
+
6182
+ // Match dataset resources using regex filters.
6183
+ message VertexDatasetCollection {
6184
+ // The pattern used to filter dataset resources.
6185
+ oneof pattern {
6186
+ // The regex used to filter dataset resources.
6187
+ VertexDatasetRegexes vertex_dataset_regexes = 1;
6188
+ }
6189
+ }
6190
+
6191
+ // A collection of regular expressions to determine what datasets to match
6192
+ // against.
6193
+ message VertexDatasetRegexes {
6194
+ // Required. The group of regular expression patterns to match against one or
6195
+ // more datasets. Maximum of 100 entries. The sum of the lengths of all
6196
+ // regular expressions can't exceed 10 KiB.
6197
+ repeated VertexDatasetRegex patterns = 1
6198
+ [(google.api.field_behavior) = REQUIRED];
6199
+ }
6200
+
6201
+ // A pattern to match against one or more dataset resources.
6202
+ message VertexDatasetRegex {
6203
+ // For organizations, if unset, will match all projects. Has no effect
6204
+ // for configurations created within a project.
6205
+ string project_id_regex = 1;
6206
+ }
6207
+
6208
+ // Identifies a single Vertex AI dataset.
6209
+ message VertexDatasetResourceReference {
6210
+ // Required. The name of the dataset resource. If set within a project-level
6211
+ // configuration, the specified resource must be within the project.
6212
+ string dataset_resource_name = 1 [(google.api.field_behavior) = REQUIRED];
6213
+ }
6214
+
6215
+ // Requirements that must be true before a dataset is profiled for the
6216
+ // first time.
6217
+ message DiscoveryVertexDatasetConditions {
6218
+ // Vertex AI dataset must have been created after this date. Used to avoid
6219
+ // backfilling.
6220
+ google.protobuf.Timestamp created_after = 1;
6221
+
6222
+ // Minimum age a Vertex AI dataset must have. If set, the value must be 1 hour
6223
+ // or greater.
6224
+ google.protobuf.Duration min_age = 2;
6225
+ }
6226
+
6227
+ // How often existing datasets should have their profiles refreshed.
6228
+ // New datasets are scanned as quickly as possible depending on system
6229
+ // capacity.
6230
+ message DiscoveryVertexDatasetGenerationCadence {
6231
+ // If you set this field, profiles are refreshed at this
6232
+ // frequency regardless of whether the underlying datasets have changed.
6233
+ // Defaults to never.
6234
+ DataProfileUpdateFrequency refresh_frequency = 1;
6235
+
6236
+ // Governs when to update data profiles when the inspection rules
6237
+ // defined by the `InspectTemplate` change.
6238
+ // If not set, changing the template will not cause a data profile to be
6239
+ // updated.
6240
+ DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2;
6241
+ }
6242
+
6091
6243
  // Combines all of the information about a DLP job.
6092
6244
  message DlpJob {
6093
6245
  option (google.api.resource) = {
@@ -6165,7 +6317,8 @@ message DlpJob {
6165
6317
  repeated ActionDetails action_details = 12;
6166
6318
  }
6167
6319
 
6168
- // The request message for [DlpJobs.GetDlpJob][].
6320
+ // The request message for
6321
+ // [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob].
6169
6322
  message GetDlpJobRequest {
6170
6323
  // Required. The name of the DlpJob resource.
6171
6324
  string name = 1 [
@@ -7366,6 +7519,9 @@ message TableDataProfile {
7366
7519
 
7367
7520
  // The time at which the table was created.
7368
7521
  google.protobuf.Timestamp create_time = 23;
7522
+
7523
+ // Resources related to this profile.
7524
+ repeated RelatedResource related_resources = 41;
7369
7525
  }
7370
7526
 
7371
7527
  // Success or errors for the profile generation.
@@ -7668,14 +7824,16 @@ message FileStoreDataProfile {
7668
7824
  // profile.
7669
7825
  repeated string data_storage_locations = 19;
7670
7826
 
7671
- // The location type of the bucket (region, dual-region, multi-region, etc).
7672
- // If dual-region, expect data_storage_locations to be populated.
7827
+ // The location type of the file store (region, dual-region, multi-region,
7828
+ // etc). If dual-region, expect data_storage_locations to be populated.
7673
7829
  string location_type = 20;
7674
7830
 
7675
7831
  // The file store path.
7676
7832
  //
7677
7833
  // * Cloud Storage: `gs://{bucket}`
7678
7834
  // * Amazon S3: `s3://{bucket}`
7835
+ // * Vertex AI dataset:
7836
+ // `projects/{project_number}/locations/{location}/datasets/{dataset_id}`
7679
7837
  string file_store_path = 6;
7680
7838
 
7681
7839
  // The resource name of the resource profiled.
@@ -7732,6 +7890,19 @@ message FileStoreDataProfile {
7732
7890
 
7733
7891
  // The file store does not have any files.
7734
7892
  bool file_store_is_empty = 23;
7893
+
7894
+ // Resources related to this profile.
7895
+ repeated RelatedResource related_resources = 26;
7896
+ }
7897
+
7898
+ // A related resource.
7899
+ // Examples:
7900
+ //
7901
+ // * The source BigQuery table for a Vertex AI dataset.
7902
+ // * The source Cloud Storage bucket for a Vertex AI dataset.
7903
+ message RelatedResource {
7904
+ // The full resource name of the related resource.
7905
+ string full_resource = 1;
7735
7906
  }
7736
7907
 
7737
7908
  // Information regarding the discovered InfoType.
@@ -7775,8 +7946,8 @@ message FileClusterSummary {
7775
7946
  // File extensions can be derived from the file name or the file content.
7776
7947
  repeated FileExtensionInfo file_extensions_seen = 8;
7777
7948
 
7778
- // True if no files exist in this cluster. If the bucket had more files than
7779
- // could be listed, this will be false even if no files for this cluster
7949
+ // True if no files exist in this cluster. If the file store had more files
7950
+ // than could be listed, this will be false even if no files for this cluster
7780
7951
  // were seen and file_extensions_seen is empty.
7781
7952
  bool no_files_exist = 9;
7782
7953
  }
@@ -8124,8 +8295,8 @@ message DeleteConnectionRequest {
8124
8295
  ];
8125
8296
  }
8126
8297
 
8127
- // A data connection to allow DLP to profile data in locations that require
8128
- // additional configuration.
8298
+ // A data connection to allow the DLP API to profile data in locations that
8299
+ // require additional configuration.
8129
8300
  message Connection {
8130
8301
  option (google.api.resource) = {
8131
8302
  type: "dlp.googleapis.com/Connection"
@@ -8157,8 +8328,8 @@ enum ConnectionState {
8157
8328
  // Unused
8158
8329
  CONNECTION_STATE_UNSPECIFIED = 0;
8159
8330
 
8160
- // DLP automatically created this connection during an initial scan, and it is
8161
- // awaiting full configuration by a user.
8331
+ // The DLP API automatically created this connection during an initial scan,
8332
+ // and it is awaiting full configuration by a user.
8162
8333
  MISSING_CREDENTIALS = 1;
8163
8334
 
8164
8335
  // A configured connection that has not encountered any errors.
@@ -8233,7 +8404,7 @@ message CloudSqlProperties {
8233
8404
  CloudSqlIamCredential cloud_sql_iam = 3;
8234
8405
  }
8235
8406
 
8236
- // Required. DLP will limit its connections to max_connections.
8407
+ // Required. The DLP API will limit its connections to max_connections.
8237
8408
  // Must be 2 or greater.
8238
8409
  int32 max_connections = 4 [(google.api.field_behavior) = REQUIRED];
8239
8410
 
@@ -8299,6 +8470,9 @@ message FileClusterType {
8299
8470
 
8300
8471
  // Executable files like .exe, .class, .apk etc.
8301
8472
  CLUSTER_EXECUTABLE = 9;
8473
+
8474
+ // AI models like .tflite etc.
8475
+ CLUSTER_AI_MODEL = 10;
8302
8476
  }
8303
8477
 
8304
8478
  // File cluster type.
@@ -8307,3 +8481,29 @@ message FileClusterType {
8307
8481
  Cluster cluster = 1;
8308
8482
  }
8309
8483
  }
8484
+
8485
+ // Configure processing location for discovery and inspection. For example,
8486
+ // image OCR is only provided in limited regions but configuring
8487
+ // ProcessingLocation will redirect OCR to a location where OCR is provided.
8488
+ message ProcessingLocation {
8489
+ // Processing will happen in a multi-region that contains the current region
8490
+ // if available.
8491
+ message MultiRegionProcessing {}
8492
+
8493
+ // Processing will happen in the global region.
8494
+ message GlobalProcessing {}
8495
+
8496
+ // Configure image processing to fall back to the configured processing option
8497
+ // below if unavailable in the request location.
8498
+ message ImageFallbackLocation {
8499
+ // Processing will happen in a multi-region that contains the current region
8500
+ // if available.
8501
+ MultiRegionProcessing multi_region_processing = 100;
8502
+
8503
+ // Processing will happen in the global region.
8504
+ GlobalProcessing global_processing = 200;
8505
+ }
8506
+
8507
+ // Image processing will fall back using this configuration.
8508
+ ImageFallbackLocation image_fallback_location = 1;
8509
+ }
@@ -1,4 +1,4 @@
1
- // Copyright 2024 Google LLC
1
+ // Copyright 2025 Google LLC
2
2
  //
3
3
  // Licensed under the Apache License, Version 2.0 (the "License");
4
4
  // you may not use this file except in compliance with the License.
@@ -850,7 +850,7 @@ message RecordKey {
850
850
  // `<project_id>:<dataset_id>.<table_id>` or
851
851
  // `<project_id>.<dataset_id>.<table_id>`.
852
852
  message BigQueryTable {
853
- // The Google Cloud Platform project ID of the project containing the table.
853
+ // The Google Cloud project ID of the project containing the table.
854
854
  // If omitted, project ID is inferred from the API call.
855
855
  string project_id = 1;
856
856