@google-cloud/dlp 5.11.0 → 5.13.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -4,6 +4,20 @@
4
4
 
5
5
  [1]: https://www.npmjs.com/package/PACKAGE NAME?activeTab=versions
6
6
 
7
+ ## [5.13.0](https://github.com/googleapis/google-cloud-node/compare/dlp-v5.12.0...dlp-v5.13.0) (2025-02-28)
8
+
9
+
10
+ ### Features
11
+
12
+ * [dlp] discovery of Vertex AI datasets ([#6041](https://github.com/googleapis/google-cloud-node/issues/6041)) ([65beb7c](https://github.com/googleapis/google-cloud-node/commit/65beb7cee55e4f37a7449175905f896609ad001a))
13
+
14
+ ## [5.12.0](https://github.com/googleapis/google-cloud-node/compare/dlp-v5.11.0...dlp-v5.12.0) (2024-10-30)
15
+
16
+
17
+ ### Features
18
+
19
+ * [dlp] discovery of BigQuery snapshots ([#5757](https://github.com/googleapis/google-cloud-node/issues/5757)) ([3f262fc](https://github.com/googleapis/google-cloud-node/commit/3f262fc59aa6054bd22f6868e76e7c686a9444a3))
20
+
7
21
  ## [5.11.0](https://github.com/googleapis/google-cloud-node/compare/dlp-v5.10.0...dlp-v5.11.0) (2024-09-24)
8
22
 
9
23
 
@@ -47,13 +47,9 @@ option (google.api.resource_definition) = {
47
47
  pattern: "organizations/{organization}/locations/{location}"
48
48
  };
49
49
 
50
- // The Cloud Data Loss Prevention (DLP) API is a service that allows clients
51
- // to detect the presence of Personally Identifiable Information (PII) and other
52
- // privacy-sensitive data in user-supplied, unstructured data streams, like text
53
- // blocks or images.
54
- // The service also includes methods for sensitive data redaction and
55
- // scheduling of data scans on Google Cloud Platform based data sets.
56
- //
50
+ // Sensitive Data Protection provides access to a powerful sensitive data
51
+ // inspection, classification, and de-identification platform that works
52
+ // on text, images, and Google Cloud storage repositories.
57
53
  // To learn more about concepts and find how-to guides see
58
54
  // https://cloud.google.com/sensitive-data-protection/docs/.
59
55
  service DlpService {
@@ -140,7 +136,7 @@ service DlpService {
140
136
  };
141
137
  }
142
138
 
143
- // Returns a list of the sensitive information types that DLP API
139
+ // Returns a list of the sensitive information types that the DLP API
144
140
  // supports. See
145
141
  // https://cloud.google.com/sensitive-data-protection/docs/infotypes-reference
146
142
  // to learn more.
@@ -1238,6 +1234,9 @@ message ByteContentItem {
1238
1234
 
1239
1235
  // Executable file types. Only used for profiling.
1240
1236
  EXECUTABLE = 17;
1237
+
1238
+ // AI model file types. Only used for profiling.
1239
+ AI_MODEL = 18;
1241
1240
  }
1242
1241
 
1243
1242
  // The type of data stored in the bytes string. Default will be TEXT_UTF8.
@@ -2028,6 +2027,9 @@ message InfoTypeDescription {
2028
2027
  // request.
2029
2028
  string description = 4;
2030
2029
 
2030
+ // A sample that is a true positive for this infoType.
2031
+ string example = 8;
2032
+
2031
2033
  // A list of available versions for the infotype.
2032
2034
  repeated VersionDescription versions = 9;
2033
2035
 
@@ -2245,6 +2247,9 @@ message InfoTypeCategory {
2245
2247
  // Information that is not sensitive on its own, but provides details about
2246
2248
  // the circumstances surrounding an entity or an event.
2247
2249
  CONTEXTUAL_INFORMATION = 7;
2250
+
2251
+ // Category for `CustomInfoType` types.
2252
+ CUSTOM = 8;
2248
2253
  }
2249
2254
 
2250
2255
  // Categories of infotypes.
@@ -2993,7 +2998,8 @@ message PrimitiveTransformation {
2993
2998
  // Mask
2994
2999
  CharacterMaskConfig character_mask_config = 3;
2995
3000
 
2996
- // Ffx-Fpe
3001
+ // Ffx-Fpe. Strongly discouraged, consider using CryptoDeterministicConfig
3002
+ // instead. Fpe is computationally expensive incurring latency costs.
2997
3003
  CryptoReplaceFfxFpeConfig crypto_replace_ffx_fpe_config = 4;
2998
3004
 
2999
3005
  // Fixed size bucketing
@@ -3315,7 +3321,7 @@ message BucketingConfig {
3315
3321
  //
3316
3322
  // Note: We recommend using CryptoDeterministicConfig for all use cases which
3317
3323
  // do not require preserving the input alphabet space and size, plus warrant
3318
- // referential integrity.
3324
+ // referential integrity. FPE incurs significant latency costs.
3319
3325
  message CryptoReplaceFfxFpeConfig {
3320
3326
  // These are commonly used subsets of the alphabet that the FFX mode
3321
3327
  // natively supports. In the algorithm, the alphabet is selected using
@@ -4828,15 +4834,32 @@ message DataProfileAction {
4828
4834
  // If set, the detailed data profiles will be persisted to the location
4829
4835
  // of your choice whenever updated.
4830
4836
  message Export {
4831
- // Store all table and column profiles in an existing table or a new table
4832
- // in an existing dataset. Each re-generation will result in new rows in
4833
- // BigQuery. Data is inserted using [streaming
4834
- // insert](https://cloud.google.com/blog/products/bigquery/life-of-a-bigquery-streaming-insert)
4835
- // and so data may be in the buffer for a period of time after the profile
4836
- // has finished. The Pub/Sub notification is sent before the streaming
4837
- // buffer is guaranteed to be written, so data may not be instantly
4838
- // visible to queries by the time your topic receives the Pub/Sub
4839
- // notification.
4837
+ // Store all profiles to BigQuery.
4838
+ //
4839
+ // * The system will create a new dataset and table for you if none are
4840
+ // are provided. The dataset will be named
4841
+ // `sensitive_data_protection_discovery` and table will be named
4842
+ // `discovery_profiles`. This table will be placed in the same project as
4843
+ // the container project running the scan. After the first profile is
4844
+ // generated and the dataset and table are created, the discovery scan
4845
+ // configuration will be updated with the dataset and table names.
4846
+ // * See [Analyze data profiles stored in
4847
+ // BigQuery](https://cloud.google.com/sensitive-data-protection/docs/analyze-data-profiles).
4848
+ // * See [Sample queries for your BigQuery
4849
+ // table](https://cloud.google.com/sensitive-data-protection/docs/analyze-data-profiles#sample_sql_queries).
4850
+ // * Data is inserted using [streaming
4851
+ // insert](https://cloud.google.com/blog/products/bigquery/life-of-a-bigquery-streaming-insert)
4852
+ // and so data may be in the buffer for a period of time after the
4853
+ // profile has finished.
4854
+ // * The Pub/Sub notification is sent before the streaming buffer is
4855
+ // guaranteed to be written, so data may not be instantly
4856
+ // visible to queries by the time your topic receives the Pub/Sub
4857
+ // notification.
4858
+ // * The best practice is to use the same table for an entire organization
4859
+ // so that you can take advantage of the [provided Looker
4860
+ // reports](https://cloud.google.com/sensitive-data-protection/docs/analyze-data-profiles#use_a_premade_report).
4861
+ // If you use VPC Service Controls to define security perimeters, then
4862
+ // you must use a separate table for each boundary.
4840
4863
  BigQueryTable profile_table = 1;
4841
4864
  }
4842
4865
 
@@ -4900,7 +4923,8 @@ message DataProfileAction {
4900
4923
  // Message expressing intention to publish to Google Security Operations.
4901
4924
  message PublishToChronicle {}
4902
4925
 
4903
- // If set, a summary finding will be created/updated in SCC for each profile.
4926
+ // If set, a summary finding will be created or updated in Security Command
4927
+ // Center for each profile.
4904
4928
  message PublishToSecurityCommandCenter {}
4905
4929
 
4906
4930
  // If set, attaches the [tags]
@@ -4971,7 +4995,7 @@ message DataProfileAction {
4971
4995
  // analytics](https://cloud.google.com/chronicle/docs/detection/usecase-dlp-high-risk-user-download).
4972
4996
  PublishToChronicle publish_to_chronicle = 3;
4973
4997
 
4974
- // Publishes findings to SCC for each data profile.
4998
+ // Publishes findings to Security Command Center for each data profile.
4975
4999
  PublishToSecurityCommandCenter publish_to_scc = 4;
4976
5000
 
4977
5001
  // Tags the profiled resources with the specified tag values.
@@ -4992,7 +5016,7 @@ message DataProfileJobConfig {
4992
5016
 
4993
5017
  // The project that will run the scan. The DLP service
4994
5018
  // account that exists within this project must have access to all resources
4995
- // that are profiled, and the Cloud DLP API must be enabled.
5019
+ // that are profiled, and the DLP API must be enabled.
4996
5020
  string project_id = 5;
4997
5021
 
4998
5022
  // Must be set only when scanning other clouds.
@@ -5051,8 +5075,8 @@ message BigQueryTableTypes {
5051
5075
  repeated BigQueryTableType types = 1;
5052
5076
  }
5053
5077
 
5054
- // Over time new types may be added. Currently VIEW, MATERIALIZED_VIEW,
5055
- // and SNAPSHOT are not supported.
5078
+ // Over time new types may be added. Currently VIEW, MATERIALIZED_VIEW, and
5079
+ // non-BigLake external tables are not supported.
5056
5080
  enum BigQueryTableTypeCollection {
5057
5081
  // Unused.
5058
5082
  BIG_QUERY_COLLECTION_UNSPECIFIED = 0;
@@ -5070,8 +5094,8 @@ enum BigQueryTableTypeCollection {
5070
5094
  BIG_QUERY_COLLECTION_ONLY_SUPPORTED_TYPES = 2;
5071
5095
  }
5072
5096
 
5073
- // Over time new types may be added. Currently VIEW, MATERIALIZED_VIEW,
5074
- // SNAPSHOT, and non-BigLake external tables are not supported.
5097
+ // Over time new types may be added. Currently VIEW, MATERIALIZED_VIEW, and
5098
+ // non-BigLake external tables are not supported.
5075
5099
  enum BigQueryTableType {
5076
5100
  // Unused.
5077
5101
  BIG_QUERY_TABLE_TYPE_UNSPECIFIED = 0;
@@ -5081,6 +5105,9 @@ enum BigQueryTableType {
5081
5105
 
5082
5106
  // A table that references data stored in Cloud Storage.
5083
5107
  BIG_QUERY_TABLE_TYPE_EXTERNAL_BIG_LAKE = 2;
5108
+
5109
+ // A snapshot of a BigQuery table.
5110
+ BIG_QUERY_TABLE_TYPE_SNAPSHOT = 3;
5084
5111
  }
5085
5112
 
5086
5113
  // How frequently data profiles can be updated. New options can be added at a
@@ -5134,7 +5161,7 @@ message DiscoveryConfig {
5134
5161
 
5135
5162
  // The project that will run the scan. The DLP service
5136
5163
  // account that exists within this project must have access to all resources
5137
- // that are profiled, and the Cloud DLP API must be enabled.
5164
+ // that are profiled, and the DLP API must be enabled.
5138
5165
  string project_id = 2;
5139
5166
  }
5140
5167
 
@@ -5209,6 +5236,12 @@ message DiscoveryConfig {
5209
5236
 
5210
5237
  // Required. A status for this configuration.
5211
5238
  Status status = 10 [(google.api.field_behavior) = REQUIRED];
5239
+
5240
+ // Optional. Processing location configuration. Vertex AI dataset scanning
5241
+ // will set processing_location.image_fallback_type to MultiRegionProcessing
5242
+ // by default.
5243
+ ProcessingLocation processing_location = 13
5244
+ [(google.api.field_behavior) = OPTIONAL];
5212
5245
  }
5213
5246
 
5214
5247
  // Target used to match against for Discovery.
@@ -5235,6 +5268,16 @@ message DiscoveryTarget {
5235
5268
  // Other clouds target for discovery. The first target to match a resource
5236
5269
  // will be the one applied.
5237
5270
  OtherCloudDiscoveryTarget other_cloud_target = 5;
5271
+
5272
+ // Vertex AI dataset target for Discovery. The first target to match a
5273
+ // dataset will be the one applied. Note that discovery for Vertex AI can
5274
+ // incur Cloud Storage Class B operation charges for storage.objects.get
5275
+ // operations and retrieval fees. For more information, see [Cloud Storage
5276
+ // pricing](https://cloud.google.com/storage/pricing#price-tables).
5277
+ // Note that discovery for Vertex AI dataset will not be able to scan images
5278
+ // unless DiscoveryConfig.processing_location.image_fallback_location has
5279
+ // multi_region_processing or global_processing configured.
5280
+ VertexDatasetDiscoveryTarget vertex_dataset_target = 7;
5238
5281
  }
5239
5282
  }
5240
5283
 
@@ -5799,13 +5842,13 @@ message DiscoveryCloudStorageConditions {
5799
5842
  // Scan buckets regardless of the attribute.
5800
5843
  ALL_SUPPORTED_BUCKETS = 1;
5801
5844
 
5802
- // Buckets with autoclass disabled
5803
- // (https://cloud.google.com/storage/docs/autoclass). Only one of
5845
+ // Buckets with [Autoclass](https://cloud.google.com/storage/docs/autoclass)
5846
+ // disabled. Only one of
5804
5847
  // AUTOCLASS_DISABLED or AUTOCLASS_ENABLED should be set.
5805
5848
  AUTOCLASS_DISABLED = 2;
5806
5849
 
5807
- // Buckets with autoclass enabled
5808
- // (https://cloud.google.com/storage/docs/autoclass). Only one of
5850
+ // Buckets with [Autoclass](https://cloud.google.com/storage/docs/autoclass)
5851
+ // enabled. Only one of
5809
5852
  // AUTOCLASS_DISABLED or AUTOCLASS_ENABLED should be set. Scanning
5810
5853
  // Autoclass-enabled buckets can affect object storage classes.
5811
5854
  AUTOCLASS_ENABLED = 3;
@@ -6089,6 +6132,114 @@ message OtherCloudDiscoveryStartingLocation {
6089
6132
  // Match discovery resources not covered by any other filter.
6090
6133
  message AllOtherResources {}
6091
6134
 
6135
+ // Target used to match against for discovery with Vertex AI datasets.
6136
+ message VertexDatasetDiscoveryTarget {
6137
+ // Required. The datasets the discovery cadence applies to. The first target
6138
+ // with a matching filter will be the one to apply to a dataset.
6139
+ DiscoveryVertexDatasetFilter filter = 1
6140
+ [(google.api.field_behavior) = REQUIRED];
6141
+
6142
+ // In addition to matching the filter, these conditions must be true
6143
+ // before a profile is generated.
6144
+ DiscoveryVertexDatasetConditions conditions = 2;
6145
+
6146
+ // Type of schedule.
6147
+ oneof cadence {
6148
+ // How often and when to update profiles. New datasets that match both the
6149
+ // filter and conditions are scanned as quickly as possible depending on
6150
+ // system capacity.
6151
+ DiscoveryVertexDatasetGenerationCadence generation_cadence = 3;
6152
+
6153
+ // Disable profiling for datasets that match this filter.
6154
+ Disabled disabled = 4;
6155
+ }
6156
+ }
6157
+
6158
+ // Determines what datasets will have profiles generated within an organization
6159
+ // or project. Includes the ability to filter by regular expression patterns
6160
+ // on project ID or dataset regex.
6161
+ message DiscoveryVertexDatasetFilter {
6162
+ // Whether the filter applies to a specific set of datasets or all
6163
+ // other datasets within the location being profiled. The first
6164
+ // filter to match will be applied, regardless of the condition. If none is
6165
+ // set, this field defaults to `others`.
6166
+ oneof filter {
6167
+ // A specific set of Vertex AI datasets for this filter to apply to.
6168
+ VertexDatasetCollection collection = 1;
6169
+
6170
+ // The dataset resource to scan. Targets including this can only include
6171
+ // one target (the target with this dataset resource reference).
6172
+ VertexDatasetResourceReference vertex_dataset_resource_reference = 2;
6173
+
6174
+ // Catch-all. This should always be the last target in the list because
6175
+ // anything above it will apply first. Should only appear once in a
6176
+ // configuration. If none is specified, a default one will be added
6177
+ // automatically.
6178
+ AllOtherResources others = 100;
6179
+ }
6180
+ }
6181
+
6182
+ // Match dataset resources using regex filters.
6183
+ message VertexDatasetCollection {
6184
+ // The pattern used to filter dataset resources.
6185
+ oneof pattern {
6186
+ // The regex used to filter dataset resources.
6187
+ VertexDatasetRegexes vertex_dataset_regexes = 1;
6188
+ }
6189
+ }
6190
+
6191
+ // A collection of regular expressions to determine what datasets to match
6192
+ // against.
6193
+ message VertexDatasetRegexes {
6194
+ // Required. The group of regular expression patterns to match against one or
6195
+ // more datasets. Maximum of 100 entries. The sum of the lengths of all
6196
+ // regular expressions can't exceed 10 KiB.
6197
+ repeated VertexDatasetRegex patterns = 1
6198
+ [(google.api.field_behavior) = REQUIRED];
6199
+ }
6200
+
6201
+ // A pattern to match against one or more dataset resources.
6202
+ message VertexDatasetRegex {
6203
+ // For organizations, if unset, will match all projects. Has no effect
6204
+ // for configurations created within a project.
6205
+ string project_id_regex = 1;
6206
+ }
6207
+
6208
+ // Identifies a single Vertex AI dataset.
6209
+ message VertexDatasetResourceReference {
6210
+ // Required. The name of the dataset resource. If set within a project-level
6211
+ // configuration, the specified resource must be within the project.
6212
+ string dataset_resource_name = 1 [(google.api.field_behavior) = REQUIRED];
6213
+ }
6214
+
6215
+ // Requirements that must be true before a dataset is profiled for the
6216
+ // first time.
6217
+ message DiscoveryVertexDatasetConditions {
6218
+ // Vertex AI dataset must have been created after this date. Used to avoid
6219
+ // backfilling.
6220
+ google.protobuf.Timestamp created_after = 1;
6221
+
6222
+ // Minimum age a Vertex AI dataset must have. If set, the value must be 1 hour
6223
+ // or greater.
6224
+ google.protobuf.Duration min_age = 2;
6225
+ }
6226
+
6227
+ // How often existing datasets should have their profiles refreshed.
6228
+ // New datasets are scanned as quickly as possible depending on system
6229
+ // capacity.
6230
+ message DiscoveryVertexDatasetGenerationCadence {
6231
+ // If you set this field, profiles are refreshed at this
6232
+ // frequency regardless of whether the underlying datasets have changed.
6233
+ // Defaults to never.
6234
+ DataProfileUpdateFrequency refresh_frequency = 1;
6235
+
6236
+ // Governs when to update data profiles when the inspection rules
6237
+ // defined by the `InspectTemplate` change.
6238
+ // If not set, changing the template will not cause a data profile to be
6239
+ // updated.
6240
+ DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2;
6241
+ }
6242
+
6092
6243
  // Combines all of the information about a DLP job.
6093
6244
  message DlpJob {
6094
6245
  option (google.api.resource) = {
@@ -6166,7 +6317,8 @@ message DlpJob {
6166
6317
  repeated ActionDetails action_details = 12;
6167
6318
  }
6168
6319
 
6169
- // The request message for [DlpJobs.GetDlpJob][].
6320
+ // The request message for
6321
+ // [GetDlpJob][google.privacy.dlp.v2.DlpService.GetDlpJob].
6170
6322
  message GetDlpJobRequest {
6171
6323
  // Required. The name of the DlpJob resource.
6172
6324
  string name = 1 [
@@ -7301,13 +7453,14 @@ message TableDataProfile {
7301
7453
  // locations.
7302
7454
  string dataset_location = 29;
7303
7455
 
7304
- // If the resource is BigQuery, the dataset ID.
7456
+ // If the resource is BigQuery, the dataset ID.
7305
7457
  string dataset_id = 25;
7306
7458
 
7307
- // If the resource is BigQuery, the BigQuery table ID.
7459
+ // The table ID.
7308
7460
  string table_id = 26;
7309
7461
 
7310
- // The resource name of the resource profiled.
7462
+ // The Cloud Asset Inventory resource that was profiled in order to generate
7463
+ // this TableDataProfile.
7311
7464
  // https://cloud.google.com/apis/design/resource_names#full_resource_name
7312
7465
  string full_resource = 3;
7313
7466
 
@@ -7366,6 +7519,9 @@ message TableDataProfile {
7366
7519
 
7367
7520
  // The time at which the table was created.
7368
7521
  google.protobuf.Timestamp create_time = 23;
7522
+
7523
+ // Resources related to this profile.
7524
+ repeated RelatedResource related_resources = 41;
7369
7525
  }
7370
7526
 
7371
7527
  // Success or errors for the profile generation.
@@ -7566,15 +7722,15 @@ message ColumnDataProfile {
7566
7722
  // The Google Cloud project ID that owns the profiled resource.
7567
7723
  string dataset_project_id = 19;
7568
7724
 
7569
- // The BigQuery location where the dataset's data is stored.
7725
+ // If supported, the location where the dataset's data is stored.
7570
7726
  // See https://cloud.google.com/bigquery/docs/locations for supported
7571
- // locations.
7727
+ // BigQuery locations.
7572
7728
  string dataset_location = 20;
7573
7729
 
7574
- // The BigQuery dataset ID.
7730
+ // The BigQuery dataset ID, if the resource profiled is a BigQuery table.
7575
7731
  string dataset_id = 21;
7576
7732
 
7577
- // The BigQuery table ID.
7733
+ // The table ID.
7578
7734
  string table_id = 22;
7579
7735
 
7580
7736
  // The name of the column.
@@ -7668,14 +7824,16 @@ message FileStoreDataProfile {
7668
7824
  // profile.
7669
7825
  repeated string data_storage_locations = 19;
7670
7826
 
7671
- // The location type of the bucket (region, dual-region, multi-region, etc).
7672
- // If dual-region, expect data_storage_locations to be populated.
7827
+ // The location type of the file store (region, dual-region, multi-region,
7828
+ // etc). If dual-region, expect data_storage_locations to be populated.
7673
7829
  string location_type = 20;
7674
7830
 
7675
7831
  // The file store path.
7676
7832
  //
7677
7833
  // * Cloud Storage: `gs://{bucket}`
7678
7834
  // * Amazon S3: `s3://{bucket}`
7835
+ // * Vertex AI dataset:
7836
+ // `projects/{project_number}/locations/{location}/datasets/{dataset_id}`
7679
7837
  string file_store_path = 6;
7680
7838
 
7681
7839
  // The resource name of the resource profiled.
@@ -7732,6 +7890,19 @@ message FileStoreDataProfile {
7732
7890
 
7733
7891
  // The file store does not have any files.
7734
7892
  bool file_store_is_empty = 23;
7893
+
7894
+ // Resources related to this profile.
7895
+ repeated RelatedResource related_resources = 26;
7896
+ }
7897
+
7898
+ // A related resource.
7899
+ // Examples:
7900
+ //
7901
+ // * The source BigQuery table for a Vertex AI dataset.
7902
+ // * The source Cloud Storage bucket for a Vertex AI dataset.
7903
+ message RelatedResource {
7904
+ // The full resource name of the related resource.
7905
+ string full_resource = 1;
7735
7906
  }
7736
7907
 
7737
7908
  // Information regarding the discovered InfoType.
@@ -7775,8 +7946,8 @@ message FileClusterSummary {
7775
7946
  // File extensions can be derived from the file name or the file content.
7776
7947
  repeated FileExtensionInfo file_extensions_seen = 8;
7777
7948
 
7778
- // True if no files exist in this cluster. If the bucket had more files than
7779
- // could be listed, this will be false even if no files for this cluster
7949
+ // True if no files exist in this cluster. If the file store had more files
7950
+ // than could be listed, this will be false even if no files for this cluster
7780
7951
  // were seen and file_extensions_seen is empty.
7781
7952
  bool no_files_exist = 9;
7782
7953
  }
@@ -8124,8 +8295,8 @@ message DeleteConnectionRequest {
8124
8295
  ];
8125
8296
  }
8126
8297
 
8127
- // A data connection to allow DLP to profile data in locations that require
8128
- // additional configuration.
8298
+ // A data connection to allow the DLP API to profile data in locations that
8299
+ // require additional configuration.
8129
8300
  message Connection {
8130
8301
  option (google.api.resource) = {
8131
8302
  type: "dlp.googleapis.com/Connection"
@@ -8157,8 +8328,8 @@ enum ConnectionState {
8157
8328
  // Unused
8158
8329
  CONNECTION_STATE_UNSPECIFIED = 0;
8159
8330
 
8160
- // DLP automatically created this connection during an initial scan, and it is
8161
- // awaiting full configuration by a user.
8331
+ // The DLP API automatically created this connection during an initial scan,
8332
+ // and it is awaiting full configuration by a user.
8162
8333
  MISSING_CREDENTIALS = 1;
8163
8334
 
8164
8335
  // A configured connection that has not encountered any errors.
@@ -8233,7 +8404,7 @@ message CloudSqlProperties {
8233
8404
  CloudSqlIamCredential cloud_sql_iam = 3;
8234
8405
  }
8235
8406
 
8236
- // Required. DLP will limit its connections to max_connections.
8407
+ // Required. The DLP API will limit its connections to max_connections.
8237
8408
  // Must be 2 or greater.
8238
8409
  int32 max_connections = 4 [(google.api.field_behavior) = REQUIRED];
8239
8410
 
@@ -8299,6 +8470,9 @@ message FileClusterType {
8299
8470
 
8300
8471
  // Executable files like .exe, .class, .apk etc.
8301
8472
  CLUSTER_EXECUTABLE = 9;
8473
+
8474
+ // AI models like .tflite etc.
8475
+ CLUSTER_AI_MODEL = 10;
8302
8476
  }
8303
8477
 
8304
8478
  // File cluster type.
@@ -8307,3 +8481,29 @@ message FileClusterType {
8307
8481
  Cluster cluster = 1;
8308
8482
  }
8309
8483
  }
8484
+
8485
+ // Configure processing location for discovery and inspection. For example,
8486
+ // image OCR is only provided in limited regions but configuring
8487
+ // ProcessingLocation will redirect OCR to a location where OCR is provided.
8488
+ message ProcessingLocation {
8489
+ // Processing will happen in a multi-region that contains the current region
8490
+ // if available.
8491
+ message MultiRegionProcessing {}
8492
+
8493
+ // Processing will happen in the global region.
8494
+ message GlobalProcessing {}
8495
+
8496
+ // Configure image processing to fall back to the configured processing option
8497
+ // below if unavailable in the request location.
8498
+ message ImageFallbackLocation {
8499
+ // Processing will happen in a multi-region that contains the current region
8500
+ // if available.
8501
+ MultiRegionProcessing multi_region_processing = 100;
8502
+
8503
+ // Processing will happen in the global region.
8504
+ GlobalProcessing global_processing = 200;
8505
+ }
8506
+
8507
+ // Image processing will fall back using this configuration.
8508
+ ImageFallbackLocation image_fallback_location = 1;
8509
+ }
@@ -850,7 +850,7 @@ message RecordKey {
850
850
  // `<project_id>:<dataset_id>.<table_id>` or
851
851
  // `<project_id>.<dataset_id>.<table_id>`.
852
852
  message BigQueryTable {
853
- // The Google Cloud Platform project ID of the project containing the table.
853
+ // The Google Cloud project ID of the project containing the table.
854
854
  // If omitted, project ID is inferred from the API call.
855
855
  string project_id = 1;
856
856