@google-cloud/discoveryengine 1.5.0 → 1.6.0
Sign up to get free protection for your applications and to get access to all the features.
- package/CHANGELOG.md +7 -0
- package/build/protos/google/cloud/discoveryengine/v1alpha/chunk.proto +1 -35
- package/build/protos/google/cloud/discoveryengine/v1alpha/common.proto +0 -20
- package/build/protos/google/cloud/discoveryengine/v1alpha/document.proto +0 -19
- package/build/protos/google/cloud/discoveryengine/v1alpha/document_service.proto +0 -64
- package/build/protos/google/cloud/discoveryengine/v1alpha/import_config.proto +21 -312
- package/build/protos/google/cloud/discoveryengine/v1alpha/search_service.proto +23 -153
- package/build/protos/google/cloud/discoveryengine/v1alpha/search_tuning_service.proto +2 -5
- package/build/protos/google/cloud/discoveryengine/v1alpha/serving_config_service.proto +1 -2
- package/build/protos/protos.d.ts +4806 -6944
- package/build/protos/protos.js +30876 -36494
- package/build/protos/protos.json +103 -604
- package/build/src/v1alpha/completion_service_client.js +0 -3
- package/build/src/v1alpha/data_store_service_client.js +0 -3
- package/build/src/v1alpha/document_service_client.d.ts +21 -80
- package/build/src/v1alpha/document_service_client.js +0 -25
- package/build/src/v1alpha/document_service_client_config.json +0 -5
- package/build/src/v1alpha/engine_service_client.js +0 -3
- package/build/src/v1alpha/estimate_billing_service_client.js +0 -3
- package/build/src/v1alpha/index.d.ts +1 -1
- package/build/src/v1alpha/index.js +3 -3
- package/build/src/v1alpha/schema_service_client.js +0 -3
- package/build/src/v1alpha/search_service_client.d.ts +15 -30
- package/build/src/v1alpha/search_service_client.js +10 -20
- package/build/src/v1alpha/search_tuning_service_client.js +0 -3
- package/build/src/v1alpha/serving_config_service_client.d.ts +1 -2
- package/build/src/v1alpha/serving_config_service_client.js +1 -2
- package/build/src/v1alpha/site_search_engine_service_client.js +0 -3
- package/build/src/v1alpha/user_event_service_client.js +0 -3
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,12 @@
|
|
1
1
|
# Changelog
|
2
2
|
|
3
|
+
## [1.6.0](https://github.com/googleapis/google-cloud-node/compare/discoveryengine-v1.5.0...discoveryengine-v1.6.0) (2024-04-16)
|
4
|
+
|
5
|
+
|
6
|
+
### Features
|
7
|
+
|
8
|
+
* [Many APIs] add several fields to manage state of database encryption update ([#5188](https://github.com/googleapis/google-cloud-node/issues/5188)) ([c5ccdf9](https://github.com/googleapis/google-cloud-node/commit/c5ccdf93641e7bb6d0e5c636168fad0feafab6e3))
|
9
|
+
|
3
10
|
## [1.5.0](https://github.com/googleapis/google-cloud-node/compare/discoveryengine-v1.4.1...discoveryengine-v1.5.0) (2024-04-05)
|
4
11
|
|
5
12
|
|
@@ -48,34 +48,6 @@ message Chunk {
|
|
48
48
|
string title = 2;
|
49
49
|
}
|
50
50
|
|
51
|
-
// Page span of the chunk.
|
52
|
-
message PageSpan {
|
53
|
-
// The start page of the chunk.
|
54
|
-
int32 page_start = 1;
|
55
|
-
|
56
|
-
// The end page of the chunk.
|
57
|
-
int32 page_end = 2;
|
58
|
-
}
|
59
|
-
|
60
|
-
// Metadata of the current chunk. This field is only populated on
|
61
|
-
// [SearchService.Search][google.cloud.discoveryengine.v1alpha.SearchService.Search]
|
62
|
-
// API.
|
63
|
-
message ChunkMetadata {
|
64
|
-
// The previous chunks of the current chunk. The number is controlled by
|
65
|
-
// [SearchRequest.ContentSearchSpec.ChunkSpec.num_previous_chunks][google.cloud.discoveryengine.v1alpha.SearchRequest.ContentSearchSpec.ChunkSpec.num_previous_chunks].
|
66
|
-
// This field is only populated on
|
67
|
-
// [SearchService.Search][google.cloud.discoveryengine.v1alpha.SearchService.Search]
|
68
|
-
// API.
|
69
|
-
repeated Chunk previous_chunks = 1;
|
70
|
-
|
71
|
-
// The next chunks of the current chunk. The number is controlled by
|
72
|
-
// [SearchRequest.ContentSearchSpec.ChunkSpec.num_next_chunks][google.cloud.discoveryengine.v1alpha.SearchRequest.ContentSearchSpec.ChunkSpec.num_next_chunks].
|
73
|
-
// This field is only populated on
|
74
|
-
// [SearchService.Search][google.cloud.discoveryengine.v1alpha.SearchService.Search]
|
75
|
-
// API.
|
76
|
-
repeated Chunk next_chunks = 2;
|
77
|
-
}
|
78
|
-
|
79
51
|
// The full resource name of the chunk.
|
80
52
|
// Format:
|
81
53
|
// `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document_id}/chunks/{chunk_id}`.
|
@@ -84,7 +56,7 @@ message Chunk {
|
|
84
56
|
// characters.
|
85
57
|
string name = 1;
|
86
58
|
|
87
|
-
// Unique chunk
|
59
|
+
// Unique chunk id of the current chunk.
|
88
60
|
string id = 2;
|
89
61
|
|
90
62
|
// Content is a string from a document (parsed content).
|
@@ -97,10 +69,4 @@ message Chunk {
|
|
97
69
|
// It contains derived data that are not in the original input document.
|
98
70
|
google.protobuf.Struct derived_struct_data = 4
|
99
71
|
[(google.api.field_behavior) = OUTPUT_ONLY];
|
100
|
-
|
101
|
-
// Page span of the chunk.
|
102
|
-
PageSpan page_span = 6;
|
103
|
-
|
104
|
-
// Output only. Metadata of the current chunk.
|
105
|
-
ChunkMetadata chunk_metadata = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
|
106
72
|
}
|
@@ -39,18 +39,6 @@ option (google.api.resource_definition) = {
|
|
39
39
|
type: "discoveryengine.googleapis.com/Location"
|
40
40
|
pattern: "projects/{project}/locations/{location}"
|
41
41
|
};
|
42
|
-
option (google.api.resource_definition) = {
|
43
|
-
type: "discoveryengine.googleapis.com/GroundingConfig"
|
44
|
-
pattern: "projects/{project}/locations/{location}/groundingConfigs/{grounding_config}"
|
45
|
-
};
|
46
|
-
option (google.api.resource_definition) = {
|
47
|
-
type: "discoveryengine.googleapis.com/RankingConfig"
|
48
|
-
pattern: "projects/{project}/locations/{location}/rankingConfigs/{ranking_config}"
|
49
|
-
};
|
50
|
-
option (google.api.resource_definition) = {
|
51
|
-
type: "healthcare.googleapis.com/FhirStore"
|
52
|
-
pattern: "projects/{project}/locations/{location}/datasets/{dataset}/fhirStores/{fhir_store}"
|
53
|
-
};
|
54
42
|
|
55
43
|
// The industry vertical associated with the
|
56
44
|
// [DataStore][google.cloud.discoveryengine.v1alpha.DataStore].
|
@@ -64,9 +52,6 @@ enum IndustryVertical {
|
|
64
52
|
|
65
53
|
// The media industry vertical.
|
66
54
|
MEDIA = 2;
|
67
|
-
|
68
|
-
// The healthcare FHIR vertical.
|
69
|
-
HEALTHCARE_FHIR = 7;
|
70
55
|
}
|
71
56
|
|
72
57
|
// The type of solution.
|
@@ -82,11 +67,6 @@ enum SolutionType {
|
|
82
67
|
|
83
68
|
// Used for use cases related to the Generative AI agent.
|
84
69
|
SOLUTION_TYPE_CHAT = 3;
|
85
|
-
|
86
|
-
// Used for use cases related to the Generative Chat agent.
|
87
|
-
// It's used for Generative chat engine only, the associated data stores
|
88
|
-
// must enrolled with `SOLUTION_TYPE_CHAT` solution.
|
89
|
-
SOLUTION_TYPE_GENERATIVE_CHAT = 4;
|
90
70
|
}
|
91
71
|
|
92
72
|
// Tiers of search features. Different tiers might have different
|
@@ -202,22 +202,3 @@ message Document {
|
|
202
202
|
google.protobuf.Timestamp index_time = 13
|
203
203
|
[(google.api.field_behavior) = OUTPUT_ONLY];
|
204
204
|
}
|
205
|
-
|
206
|
-
// Document captures all raw metadata information of items to be recommended or
|
207
|
-
// searched.
|
208
|
-
message ProcessedDocument {
|
209
|
-
// Output format of the processed document.
|
210
|
-
oneof processed_data_format {
|
211
|
-
// The JSON string representation of the processed document.
|
212
|
-
string json_data = 2;
|
213
|
-
}
|
214
|
-
|
215
|
-
// Required. Full resource name of the referenced document, in the format
|
216
|
-
// `projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*`.
|
217
|
-
string document = 1 [
|
218
|
-
(google.api.field_behavior) = REQUIRED,
|
219
|
-
(google.api.resource_reference) = {
|
220
|
-
type: "discoveryengine.googleapis.com/Document"
|
221
|
-
}
|
222
|
-
];
|
223
|
-
}
|
@@ -158,19 +158,6 @@ service DocumentService {
|
|
158
158
|
metadata_type: "google.cloud.discoveryengine.v1alpha.PurgeDocumentsMetadata"
|
159
159
|
};
|
160
160
|
}
|
161
|
-
|
162
|
-
// Gets the parsed layout information for a
|
163
|
-
// [Document][google.cloud.discoveryengine.v1alpha.Document].
|
164
|
-
rpc GetProcessedDocument(GetProcessedDocumentRequest)
|
165
|
-
returns (ProcessedDocument) {
|
166
|
-
option (google.api.http) = {
|
167
|
-
get: "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}:getProcessedDocument"
|
168
|
-
additional_bindings {
|
169
|
-
get: "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*}:getProcessedDocument"
|
170
|
-
}
|
171
|
-
};
|
172
|
-
option (google.api.method_signature) = "name";
|
173
|
-
}
|
174
161
|
}
|
175
162
|
|
176
163
|
// Request message for
|
@@ -335,54 +322,3 @@ message DeleteDocumentRequest {
|
|
335
322
|
}
|
336
323
|
];
|
337
324
|
}
|
338
|
-
|
339
|
-
// Request message for
|
340
|
-
// [DocumentService.GetDocument][google.cloud.discoveryengine.v1alpha.DocumentService.GetDocument]
|
341
|
-
// method.
|
342
|
-
message GetProcessedDocumentRequest {
|
343
|
-
// The type of processing to return in the response.
|
344
|
-
enum ProcessedDocumentType {
|
345
|
-
// Default value.
|
346
|
-
PROCESSED_DOCUMENT_TYPE_UNSPECIFIED = 0;
|
347
|
-
|
348
|
-
// Available for all data store parsing configs.
|
349
|
-
PARSED_DOCUMENT = 1;
|
350
|
-
|
351
|
-
// Only available if ChunkingConfig is enabeld on the data store.
|
352
|
-
CHUNKED_DOCUMENT = 2;
|
353
|
-
}
|
354
|
-
|
355
|
-
// The format of the returned processed document. If unspecified, defaults to
|
356
|
-
// JSON.
|
357
|
-
enum ProcessedDocumentFormat {
|
358
|
-
// Default value.
|
359
|
-
PROCESSED_DOCUMENT_FORMAT_UNSPECIFIED = 0;
|
360
|
-
|
361
|
-
// output format will be a JSON string representation of processed document.
|
362
|
-
JSON = 1;
|
363
|
-
}
|
364
|
-
|
365
|
-
// Required. Full resource name of
|
366
|
-
// [Document][google.cloud.discoveryengine.v1alpha.Document], such as
|
367
|
-
// `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`.
|
368
|
-
//
|
369
|
-
// If the caller does not have permission to access the
|
370
|
-
// [Document][google.cloud.discoveryengine.v1alpha.Document], regardless of
|
371
|
-
// whether or not it exists, a `PERMISSION_DENIED` error is returned.
|
372
|
-
//
|
373
|
-
// If the requested [Document][google.cloud.discoveryengine.v1alpha.Document]
|
374
|
-
// does not exist, a `NOT_FOUND` error is returned.
|
375
|
-
string name = 1 [
|
376
|
-
(google.api.field_behavior) = REQUIRED,
|
377
|
-
(google.api.resource_reference) = {
|
378
|
-
type: "discoveryengine.googleapis.com/Document"
|
379
|
-
}
|
380
|
-
];
|
381
|
-
|
382
|
-
// Required. What type of processing to return.
|
383
|
-
ProcessedDocumentType processed_document_type = 2
|
384
|
-
[(google.api.field_behavior) = REQUIRED];
|
385
|
-
|
386
|
-
// What format output should be. If unspecified, defaults to JSON.
|
387
|
-
ProcessedDocumentFormat processed_document_format = 3;
|
388
|
-
}
|
@@ -21,7 +21,6 @@ import "google/api/resource.proto";
|
|
21
21
|
import "google/cloud/discoveryengine/v1alpha/completion.proto";
|
22
22
|
import "google/cloud/discoveryengine/v1alpha/document.proto";
|
23
23
|
import "google/cloud/discoveryengine/v1alpha/user_event.proto";
|
24
|
-
import "google/protobuf/field_mask.proto";
|
25
24
|
import "google/protobuf/timestamp.proto";
|
26
25
|
import "google/rpc/status.proto";
|
27
26
|
import "google/type/date.proto";
|
@@ -124,233 +123,6 @@ message BigQuerySource {
|
|
124
123
|
string data_schema = 6;
|
125
124
|
}
|
126
125
|
|
127
|
-
// The Spanner source for importing data
|
128
|
-
message SpannerSource {
|
129
|
-
// The project ID that the Spanner source is in with a length limit of 128
|
130
|
-
// characters. If not specified, inherits the project ID from the parent
|
131
|
-
// request.
|
132
|
-
string project_id = 1;
|
133
|
-
|
134
|
-
// Required. The instance ID of the source Spanner table.
|
135
|
-
string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
|
136
|
-
|
137
|
-
// Required. The database ID of the source Spanner table.
|
138
|
-
string database_id = 3 [(google.api.field_behavior) = REQUIRED];
|
139
|
-
|
140
|
-
// Required. The table name of the Spanner database that needs to be imported.
|
141
|
-
string table_id = 4 [(google.api.field_behavior) = REQUIRED];
|
142
|
-
|
143
|
-
// Whether to apply data boost on Spanner export. Enabling this option will
|
144
|
-
// incur additional cost. More info can be found
|
145
|
-
// [here](https://cloud.google.com/spanner/docs/databoost/databoost-overview#billing_and_quotas).
|
146
|
-
bool enable_data_boost = 5;
|
147
|
-
}
|
148
|
-
|
149
|
-
// The Bigtable Options object that contains information to support
|
150
|
-
// the import.
|
151
|
-
message BigtableOptions {
|
152
|
-
// The column family of the Bigtable.
|
153
|
-
message BigtableColumnFamily {
|
154
|
-
// The field name to use for this column family in the document. The
|
155
|
-
// name has to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`. If not set,
|
156
|
-
// it is parsed from the family name with best effort. However, due to
|
157
|
-
// different naming patterns, field name collisions could happen, where
|
158
|
-
// parsing behavior is undefined.
|
159
|
-
string field_name = 1;
|
160
|
-
|
161
|
-
// The encoding mode of the values when the type is not STRING.
|
162
|
-
// Acceptable encoding values are:
|
163
|
-
//
|
164
|
-
// * `TEXT`: indicates values are alphanumeric text strings.
|
165
|
-
// * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
|
166
|
-
// family of functions. This can be overridden for a specific column
|
167
|
-
// by listing that column in `columns` and specifying an encoding for it.
|
168
|
-
Encoding encoding = 2;
|
169
|
-
|
170
|
-
// The type of values in this column family.
|
171
|
-
// The values are expected to be encoded using `HBase Bytes.toBytes`
|
172
|
-
// function when the encoding value is set to `BINARY`.
|
173
|
-
Type type = 3;
|
174
|
-
|
175
|
-
// The list of objects that contains column level information for each
|
176
|
-
// column. If a column is not present in this list it will be ignored.
|
177
|
-
repeated BigtableColumn columns = 4;
|
178
|
-
}
|
179
|
-
|
180
|
-
// The column of the Bigtable.
|
181
|
-
message BigtableColumn {
|
182
|
-
// Required. Qualifier of the column. If it cannot be decoded with utf-8,
|
183
|
-
// use a base-64 encoded string instead.
|
184
|
-
bytes qualifier = 1 [(google.api.field_behavior) = REQUIRED];
|
185
|
-
|
186
|
-
// The field name to use for this column in the document. The name has to
|
187
|
-
// match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
|
188
|
-
// If not set, it is parsed from the qualifier bytes with best effort.
|
189
|
-
// However, due to different naming patterns, field name collisions could
|
190
|
-
// happen, where parsing behavior is undefined.
|
191
|
-
string field_name = 2;
|
192
|
-
|
193
|
-
// The encoding mode of the values when the type is not `STRING`.
|
194
|
-
// Acceptable encoding values are:
|
195
|
-
//
|
196
|
-
// * `TEXT`: indicates values are alphanumeric text strings.
|
197
|
-
// * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
|
198
|
-
// family of functions. This can be overridden for a specific column
|
199
|
-
// by listing that column in `columns` and specifying an encoding for it.
|
200
|
-
Encoding encoding = 3;
|
201
|
-
|
202
|
-
// The type of values in this column family.
|
203
|
-
// The values are expected to be encoded using `HBase Bytes.toBytes`
|
204
|
-
// function when the encoding value is set to `BINARY`.
|
205
|
-
Type type = 4;
|
206
|
-
}
|
207
|
-
|
208
|
-
// The type of values in a Bigtable column or column family.
|
209
|
-
// The values are expected to be encoded using
|
210
|
-
// [HBase
|
211
|
-
// Bytes.toBytes](https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/util/Bytes.html)
|
212
|
-
// function when the encoding value is set to `BINARY`.
|
213
|
-
enum Type {
|
214
|
-
// The type is unspecified.
|
215
|
-
TYPE_UNSPECIFIED = 0;
|
216
|
-
|
217
|
-
// String type.
|
218
|
-
STRING = 1;
|
219
|
-
|
220
|
-
// Numerical type.
|
221
|
-
NUMBER = 2;
|
222
|
-
|
223
|
-
// Integer type.
|
224
|
-
INTEGER = 3;
|
225
|
-
|
226
|
-
// Variable length integer type.
|
227
|
-
VAR_INTEGER = 4;
|
228
|
-
|
229
|
-
// BigDecimal type.
|
230
|
-
BIG_NUMERIC = 5;
|
231
|
-
|
232
|
-
// Boolean type.
|
233
|
-
BOOLEAN = 6;
|
234
|
-
|
235
|
-
// JSON type.
|
236
|
-
JSON = 7;
|
237
|
-
}
|
238
|
-
|
239
|
-
// The encoding mode of a Bigtable column or column family.
|
240
|
-
enum Encoding {
|
241
|
-
// The encoding is unspecified.
|
242
|
-
ENCODING_UNSPECIFIED = 0;
|
243
|
-
|
244
|
-
// Text encoding.
|
245
|
-
TEXT = 1;
|
246
|
-
|
247
|
-
// Binary encoding.
|
248
|
-
BINARY = 2;
|
249
|
-
}
|
250
|
-
|
251
|
-
// The field name used for saving row key value in the document. The name has
|
252
|
-
// to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
|
253
|
-
string key_field_name = 1;
|
254
|
-
|
255
|
-
// The mapping from family names to an object that contains column families
|
256
|
-
// level information for the given column family. If a family is not present
|
257
|
-
// in this map it will be ignored.
|
258
|
-
map<string, BigtableColumnFamily> families = 2;
|
259
|
-
}
|
260
|
-
|
261
|
-
// The Cloud Bigtable source for importing data.
|
262
|
-
message BigtableSource {
|
263
|
-
// The project ID that the Bigtable source is in with a length limit of 128
|
264
|
-
// characters. If not specified, inherits the project ID from the parent
|
265
|
-
// request.
|
266
|
-
string project_id = 1;
|
267
|
-
|
268
|
-
// Required. The instance ID of the Cloud Bigtable that needs to be imported.
|
269
|
-
string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
|
270
|
-
|
271
|
-
// Required. The table ID of the Cloud Bigtable that needs to be imported.
|
272
|
-
string table_id = 3 [(google.api.field_behavior) = REQUIRED];
|
273
|
-
|
274
|
-
// Required. Bigtable options that contains information needed when parsing
|
275
|
-
// data into typed structures. For example, column type annotations.
|
276
|
-
BigtableOptions bigtable_options = 4 [(google.api.field_behavior) = REQUIRED];
|
277
|
-
}
|
278
|
-
|
279
|
-
// Cloud FhirStore source import data from.
|
280
|
-
message FhirStoreSource {
|
281
|
-
// Required. The full resource name of the FHIR store to import data from, in
|
282
|
-
// the format of
|
283
|
-
// `projects/{project}/locations/{location}/datasets/{dataset}/fhirStores/{fhir_store}`.
|
284
|
-
string fhir_store = 1 [
|
285
|
-
(google.api.field_behavior) = REQUIRED,
|
286
|
-
(google.api.resource_reference) = {
|
287
|
-
type: "healthcare.googleapis.com/FhirStore"
|
288
|
-
}
|
289
|
-
];
|
290
|
-
|
291
|
-
// Intermediate Cloud Storage directory used for the import with a length
|
292
|
-
// limit of 2,000 characters. Can be specified if one wants to have the
|
293
|
-
// FhirStore export to a specific Cloud Storage directory.
|
294
|
-
string gcs_staging_dir = 2;
|
295
|
-
}
|
296
|
-
|
297
|
-
// Cloud SQL source import data from.
|
298
|
-
message CloudSqlSource {
|
299
|
-
// The project ID that the Cloud SQL source is in with a length limit of 128
|
300
|
-
// characters. If not specified, inherits the project ID from the parent
|
301
|
-
// request.
|
302
|
-
string project_id = 1;
|
303
|
-
|
304
|
-
// Required. The Cloud SQL instance to copy the data from with a length limit
|
305
|
-
// of 256 characters.
|
306
|
-
string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
|
307
|
-
|
308
|
-
// Required. The Cloud SQL database to copy the data from with a length limit
|
309
|
-
// of 256 characters.
|
310
|
-
string database_id = 3 [(google.api.field_behavior) = REQUIRED];
|
311
|
-
|
312
|
-
// Required. The Cloud SQL table to copy the data from with a length limit of
|
313
|
-
// 256 characters.
|
314
|
-
string table_id = 4 [(google.api.field_behavior) = REQUIRED];
|
315
|
-
|
316
|
-
// Intermediate Cloud Storage directory used for the import with a length
|
317
|
-
// limit of 2,000 characters. Can be specified if one wants to have the
|
318
|
-
// Cloud SQL export to a specific Cloud Storage directory.
|
319
|
-
//
|
320
|
-
// Please ensure that the Cloud SQL service account has the necessary Cloud
|
321
|
-
// Storage Admin permissions to access the specified Cloud Storage directory.
|
322
|
-
string gcs_staging_dir = 5;
|
323
|
-
|
324
|
-
// Option for serverless export. Enabling this option will incur
|
325
|
-
// additional cost. More info can be found
|
326
|
-
// [here](https://cloud.google.com/sql/pricing#serverless).
|
327
|
-
bool offload = 6;
|
328
|
-
}
|
329
|
-
|
330
|
-
// Firestore source import data from.
|
331
|
-
message FirestoreSource {
|
332
|
-
// The project ID that the Cloud SQL source is in with a length limit of 128
|
333
|
-
// characters. If not specified, inherits the project ID from the parent
|
334
|
-
// request.
|
335
|
-
string project_id = 1;
|
336
|
-
|
337
|
-
// Required. The Firestore database to copy the data from with a length limit
|
338
|
-
// of 256 characters.
|
339
|
-
string database_id = 2 [(google.api.field_behavior) = REQUIRED];
|
340
|
-
|
341
|
-
// Required. The Firestore collection to copy the data from with a length
|
342
|
-
// limit of 1,500 characters.
|
343
|
-
string collection_id = 3 [(google.api.field_behavior) = REQUIRED];
|
344
|
-
|
345
|
-
// Intermediate Cloud Storage directory used for the import with a length
|
346
|
-
// limit of 2,000 characters. Can be specified if one wants to have the
|
347
|
-
// Firestore export to a specific Cloud Storage directory.
|
348
|
-
//
|
349
|
-
// Please ensure that the Firestore service account has the necessary Cloud
|
350
|
-
// Storage Admin permissions to access the specified Cloud Storage directory.
|
351
|
-
string gcs_staging_dir = 4;
|
352
|
-
}
|
353
|
-
|
354
126
|
// Configuration of destination for Import related errors.
|
355
127
|
message ImportErrorConfig {
|
356
128
|
// Required. Errors destination.
|
@@ -448,9 +220,6 @@ message ImportDocumentsMetadata {
|
|
448
220
|
|
449
221
|
// Count of entries that encountered errors while processing.
|
450
222
|
int64 failure_count = 4;
|
451
|
-
|
452
|
-
// Total count of entries that were processed.
|
453
|
-
int64 total_count = 5;
|
454
223
|
}
|
455
224
|
|
456
225
|
// Request message for Import methods.
|
@@ -487,21 +256,6 @@ message ImportDocumentsRequest {
|
|
487
256
|
|
488
257
|
// BigQuery input source.
|
489
258
|
BigQuerySource bigquery_source = 4;
|
490
|
-
|
491
|
-
// FhirStore input source.
|
492
|
-
FhirStoreSource fhir_store_source = 10;
|
493
|
-
|
494
|
-
// Spanner input source.
|
495
|
-
SpannerSource spanner_source = 11;
|
496
|
-
|
497
|
-
// Cloud SQL input source.
|
498
|
-
CloudSqlSource cloud_sql_source = 12;
|
499
|
-
|
500
|
-
// Firestore input source.
|
501
|
-
FirestoreSource firestore_source = 13;
|
502
|
-
|
503
|
-
// Cloud Bigtable input source.
|
504
|
-
BigtableSource bigtable_source = 15;
|
505
259
|
}
|
506
260
|
|
507
261
|
// Required. The parent branch resource name, such as
|
@@ -522,10 +276,6 @@ message ImportDocumentsRequest {
|
|
522
276
|
// [ReconciliationMode.INCREMENTAL][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.ReconciliationMode.INCREMENTAL].
|
523
277
|
ReconciliationMode reconciliation_mode = 6;
|
524
278
|
|
525
|
-
// Indicates which fields in the provided imported documents to update. If
|
526
|
-
// not set, the default is to update all fields.
|
527
|
-
google.protobuf.FieldMask update_mask = 7;
|
528
|
-
|
529
279
|
// Whether to automatically generate IDs for the documents if absent.
|
530
280
|
//
|
531
281
|
// If set to `true`,
|
@@ -539,54 +289,45 @@ message ImportDocumentsRequest {
|
|
539
289
|
// [id_field][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.id_field],
|
540
290
|
// otherwise, documents without IDs fail to be imported.
|
541
291
|
//
|
542
|
-
//
|
543
|
-
//
|
544
|
-
//
|
292
|
+
// Only set this field when using
|
293
|
+
// [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource] or
|
294
|
+
// [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource], and
|
295
|
+
// when
|
545
296
|
// [GcsSource.data_schema][google.cloud.discoveryengine.v1alpha.GcsSource.data_schema]
|
546
|
-
//
|
547
|
-
// * [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource].
|
297
|
+
// or
|
548
298
|
// [BigQuerySource.data_schema][google.cloud.discoveryengine.v1alpha.BigQuerySource.data_schema]
|
549
|
-
//
|
550
|
-
// * [SpannerSource][google.cloud.discoveryengine.v1alpha.SpannerSource].
|
551
|
-
// * [CloudSqlSource][google.cloud.discoveryengine.v1alpha.CloudSqlSource].
|
552
|
-
// * [FirestoreSource][google.cloud.discoveryengine.v1alpha.FirestoreSource].
|
553
|
-
// * [BigtableSource][google.cloud.discoveryengine.v1alpha.BigtableSource].
|
299
|
+
// is `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
554
300
|
bool auto_generate_ids = 8;
|
555
301
|
|
556
|
-
// The field
|
557
|
-
// the documents.
|
302
|
+
// The field in the Cloud Storage and BigQuery sources that indicates the
|
303
|
+
// unique IDs of the documents.
|
558
304
|
//
|
559
305
|
// For [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource] it is the
|
560
306
|
// key of the JSON field. For instance, `my_id` for JSON `{"my_id":
|
561
|
-
// "some_uuid"}`. For
|
562
|
-
//
|
307
|
+
// "some_uuid"}`. For
|
308
|
+
// [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource] it is
|
309
|
+
// the column name of the BigQuery table where the unique ids are stored.
|
563
310
|
//
|
564
|
-
// The values of the JSON field or the
|
311
|
+
// The values of the JSON field or the BigQuery column are used as the
|
565
312
|
// [Document.id][google.cloud.discoveryengine.v1alpha.Document.id]s. The JSON
|
566
|
-
// field or the
|
313
|
+
// field or the BigQuery column must be of string type, and the values must be
|
567
314
|
// set as valid strings conform to
|
568
315
|
// [RFC-1034](https://tools.ietf.org/html/rfc1034) with 1-63 characters.
|
569
316
|
// Otherwise, documents without valid IDs fail to be imported.
|
570
317
|
//
|
571
|
-
// Only set this field when
|
318
|
+
// Only set this field when using
|
319
|
+
// [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource] or
|
320
|
+
// [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource], and
|
321
|
+
// when
|
322
|
+
// [GcsSource.data_schema][google.cloud.discoveryengine.v1alpha.GcsSource.data_schema]
|
323
|
+
// or
|
324
|
+
// [BigQuerySource.data_schema][google.cloud.discoveryengine.v1alpha.BigQuerySource.data_schema]
|
325
|
+
// is `custom`. And only set this field when
|
572
326
|
// [auto_generate_ids][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.auto_generate_ids]
|
573
327
|
// is unset or set as `false`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
574
328
|
//
|
575
329
|
// If it is unset, a default value `_id` is used when importing from the
|
576
330
|
// allowed data sources.
|
577
|
-
//
|
578
|
-
// Supported data sources:
|
579
|
-
//
|
580
|
-
// * [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource].
|
581
|
-
// [GcsSource.data_schema][google.cloud.discoveryengine.v1alpha.GcsSource.data_schema]
|
582
|
-
// must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
583
|
-
// * [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource].
|
584
|
-
// [BigQuerySource.data_schema][google.cloud.discoveryengine.v1alpha.BigQuerySource.data_schema]
|
585
|
-
// must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
586
|
-
// * [SpannerSource][google.cloud.discoveryengine.v1alpha.SpannerSource].
|
587
|
-
// * [CloudSqlSource][google.cloud.discoveryengine.v1alpha.CloudSqlSource].
|
588
|
-
// * [FirestoreSource][google.cloud.discoveryengine.v1alpha.FirestoreSource].
|
589
|
-
// * [BigtableSource][google.cloud.discoveryengine.v1alpha.BigtableSource].
|
590
331
|
string id_field = 9;
|
591
332
|
}
|
592
333
|
|
@@ -664,35 +405,3 @@ message ImportSuggestionDenyListEntriesMetadata {
|
|
664
405
|
// finish time.
|
665
406
|
google.protobuf.Timestamp update_time = 2;
|
666
407
|
}
|
667
|
-
|
668
|
-
// Response of the
|
669
|
-
// [CompletionService.ImportCompletionSuggestions][google.cloud.discoveryengine.v1alpha.CompletionService.ImportCompletionSuggestions]
|
670
|
-
// method. If the long running operation is done, this message is returned by
|
671
|
-
// the google.longrunning.Operations.response field if the operation is
|
672
|
-
// successful.
|
673
|
-
message ImportCompletionSuggestionsResponse {
|
674
|
-
// A sample of errors encountered while processing the request.
|
675
|
-
repeated google.rpc.Status error_samples = 1;
|
676
|
-
|
677
|
-
// Count of
|
678
|
-
// [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s
|
679
|
-
// successfully imported.
|
680
|
-
int64 success_count = 2;
|
681
|
-
|
682
|
-
// Count of
|
683
|
-
// [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s
|
684
|
-
// that failed to be imported.
|
685
|
-
int64 failure_count = 3;
|
686
|
-
}
|
687
|
-
|
688
|
-
// Metadata related to the progress of the ImportCompletionSuggestions
|
689
|
-
// operation. This will be returned by the google.longrunning.Operation.metadata
|
690
|
-
// field.
|
691
|
-
message ImportCompletionSuggestionsMetadata {
|
692
|
-
// Operation create time.
|
693
|
-
google.protobuf.Timestamp create_time = 1;
|
694
|
-
|
695
|
-
// Operation last update time. If the operation is done, this is also the
|
696
|
-
// finish time.
|
697
|
-
google.protobuf.Timestamp update_time = 2;
|
698
|
-
}
|