@google-cloud/discoveryengine 1.4.1 → 1.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/README.md +2 -0
- package/build/protos/google/cloud/discoveryengine/v1alpha/chunk.proto +35 -1
- package/build/protos/google/cloud/discoveryengine/v1alpha/common.proto +20 -0
- package/build/protos/google/cloud/discoveryengine/v1alpha/document.proto +19 -0
- package/build/protos/google/cloud/discoveryengine/v1alpha/document_service.proto +64 -0
- package/build/protos/google/cloud/discoveryengine/v1alpha/import_config.proto +312 -21
- package/build/protos/google/cloud/discoveryengine/v1alpha/rank_service.proto +115 -0
- package/build/protos/google/cloud/discoveryengine/v1alpha/search_service.proto +153 -23
- package/build/protos/google/cloud/discoveryengine/v1alpha/search_tuning_service.proto +5 -2
- package/build/protos/google/cloud/discoveryengine/v1alpha/serving_config_service.proto +2 -1
- package/build/protos/protos.d.ts +2743 -221
- package/build/protos/protos.js +10286 -3762
- package/build/protos/protos.json +595 -5
- package/build/src/v1/completion_service_client.js +10 -7
- package/build/src/v1/conversational_search_service_client.js +10 -7
- package/build/src/v1/data_store_service_client.js +10 -7
- package/build/src/v1/document_service_client.js +10 -7
- package/build/src/v1/engine_service_client.js +10 -7
- package/build/src/v1/schema_service_client.js +10 -7
- package/build/src/v1/search_service_client.js +10 -7
- package/build/src/v1/site_search_engine_service_client.js +10 -7
- package/build/src/v1/user_event_service_client.js +10 -7
- package/build/src/v1alpha/acl_config_service_client.js +10 -7
- package/build/src/v1alpha/chunk_service_client.js +10 -7
- package/build/src/v1alpha/completion_service_client.js +13 -7
- package/build/src/v1alpha/conversational_search_service_client.js +10 -7
- package/build/src/v1alpha/data_store_service_client.js +13 -7
- package/build/src/v1alpha/document_service_client.d.ts +80 -21
- package/build/src/v1alpha/document_service_client.js +35 -7
- package/build/src/v1alpha/document_service_client_config.json +5 -0
- package/build/src/v1alpha/engine_service_client.js +13 -7
- package/build/src/v1alpha/estimate_billing_service_client.js +13 -7
- package/build/src/v1alpha/index.d.ts +1 -0
- package/build/src/v1alpha/index.js +3 -1
- package/build/src/v1alpha/rank_service_client.d.ts +1254 -0
- package/build/src/v1alpha/rank_service_client.js +1796 -0
- package/build/src/v1alpha/rank_service_client_config.json +30 -0
- package/build/src/v1alpha/recommendation_service_client.js +10 -7
- package/build/src/v1alpha/schema_service_client.js +13 -7
- package/build/src/v1alpha/search_service_client.d.ts +30 -15
- package/build/src/v1alpha/search_service_client.js +30 -17
- package/build/src/v1alpha/search_tuning_service_client.js +13 -7
- package/build/src/v1alpha/serving_config_service_client.d.ts +2 -1
- package/build/src/v1alpha/serving_config_service_client.js +12 -8
- package/build/src/v1alpha/site_search_engine_service_client.js +13 -7
- package/build/src/v1alpha/user_event_service_client.js +13 -7
- package/build/src/v1beta/completion_service_client.js +10 -7
- package/build/src/v1beta/conversational_search_service_client.js +10 -7
- package/build/src/v1beta/data_store_service_client.js +10 -7
- package/build/src/v1beta/document_service_client.js +10 -7
- package/build/src/v1beta/engine_service_client.js +10 -7
- package/build/src/v1beta/recommendation_service_client.js +10 -7
- package/build/src/v1beta/schema_service_client.js +10 -7
- package/build/src/v1beta/search_service_client.js +10 -7
- package/build/src/v1beta/search_tuning_service_client.js +10 -7
- package/build/src/v1beta/serving_config_service_client.js +10 -7
- package/build/src/v1beta/site_search_engine_service_client.js +10 -7
- package/build/src/v1beta/user_event_service_client.js +10 -7
- package/package.json +1 -1
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,12 @@
|
|
1
1
|
# Changelog
|
2
2
|
|
3
|
+
## [1.5.0](https://github.com/googleapis/google-cloud-node/compare/discoveryengine-v1.4.1...discoveryengine-v1.5.0) (2024-04-05)
|
4
|
+
|
5
|
+
|
6
|
+
### Features
|
7
|
+
|
8
|
+
* [discoveryengine] support import data from Cloud Spanner, BigTable, SQL and Firestore ([#5218](https://github.com/googleapis/google-cloud-node/issues/5218)) ([cc25e93](https://github.com/googleapis/google-cloud-node/commit/cc25e935f087526738303cd94c406bd352a7b595))
|
9
|
+
|
3
10
|
## [1.4.1](https://github.com/googleapis/google-cloud-node/compare/discoveryengine-v1.4.0...discoveryengine-v1.4.1) (2024-03-12)
|
4
11
|
|
5
12
|
|
package/README.md
CHANGED
@@ -195,6 +195,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/google-cloud-node/
|
|
195
195
|
| Document_service.create_document | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.create_document.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.create_document.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
196
196
|
| Document_service.delete_document | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.delete_document.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.delete_document.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
197
197
|
| Document_service.get_document | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.get_document.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.get_document.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
198
|
+
| Document_service.get_processed_document | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.get_processed_document.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.get_processed_document.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
198
199
|
| Document_service.import_documents | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.import_documents.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.import_documents.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
199
200
|
| Document_service.list_documents | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.list_documents.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.list_documents.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
200
201
|
| Document_service.purge_documents | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.purge_documents.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/document_service.purge_documents.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
@@ -208,6 +209,7 @@ Samples are in the [`samples/`](https://github.com/googleapis/google-cloud-node/
|
|
208
209
|
| Engine_service.tune_engine | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/engine_service.tune_engine.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/engine_service.tune_engine.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
209
210
|
| Engine_service.update_engine | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/engine_service.update_engine.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/engine_service.update_engine.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
210
211
|
| Estimate_billing_service.estimate_data_size | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/estimate_billing_service.estimate_data_size.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/estimate_billing_service.estimate_data_size.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
212
|
+
| Rank_service.rank | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/rank_service.rank.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/rank_service.rank.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
211
213
|
| Recommendation_service.recommend | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/recommendation_service.recommend.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/recommendation_service.recommend.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
212
214
|
| Schema_service.create_schema | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/schema_service.create_schema.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/schema_service.create_schema.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
213
215
|
| Schema_service.delete_schema | [source code](https://github.com/googleapis/google-cloud-node/blob/main/packages/google-cloud-discoveryengine/samples/generated/v1alpha/schema_service.delete_schema.js) | [![Open in Cloud Shell][shell_img]](https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/google-cloud-node&page=editor&open_in_editor=packages/google-cloud-discoveryengine/samples/generated/v1alpha/schema_service.delete_schema.js,packages/google-cloud-discoveryengine/samples/README.md) |
|
@@ -48,6 +48,34 @@ message Chunk {
|
|
48
48
|
string title = 2;
|
49
49
|
}
|
50
50
|
|
51
|
+
// Page span of the chunk.
|
52
|
+
message PageSpan {
|
53
|
+
// The start page of the chunk.
|
54
|
+
int32 page_start = 1;
|
55
|
+
|
56
|
+
// The end page of the chunk.
|
57
|
+
int32 page_end = 2;
|
58
|
+
}
|
59
|
+
|
60
|
+
// Metadata of the current chunk. This field is only populated on
|
61
|
+
// [SearchService.Search][google.cloud.discoveryengine.v1alpha.SearchService.Search]
|
62
|
+
// API.
|
63
|
+
message ChunkMetadata {
|
64
|
+
// The previous chunks of the current chunk. The number is controlled by
|
65
|
+
// [SearchRequest.ContentSearchSpec.ChunkSpec.num_previous_chunks][google.cloud.discoveryengine.v1alpha.SearchRequest.ContentSearchSpec.ChunkSpec.num_previous_chunks].
|
66
|
+
// This field is only populated on
|
67
|
+
// [SearchService.Search][google.cloud.discoveryengine.v1alpha.SearchService.Search]
|
68
|
+
// API.
|
69
|
+
repeated Chunk previous_chunks = 1;
|
70
|
+
|
71
|
+
// The next chunks of the current chunk. The number is controlled by
|
72
|
+
// [SearchRequest.ContentSearchSpec.ChunkSpec.num_next_chunks][google.cloud.discoveryengine.v1alpha.SearchRequest.ContentSearchSpec.ChunkSpec.num_next_chunks].
|
73
|
+
// This field is only populated on
|
74
|
+
// [SearchService.Search][google.cloud.discoveryengine.v1alpha.SearchService.Search]
|
75
|
+
// API.
|
76
|
+
repeated Chunk next_chunks = 2;
|
77
|
+
}
|
78
|
+
|
51
79
|
// The full resource name of the chunk.
|
52
80
|
// Format:
|
53
81
|
// `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document_id}/chunks/{chunk_id}`.
|
@@ -56,7 +84,7 @@ message Chunk {
|
|
56
84
|
// characters.
|
57
85
|
string name = 1;
|
58
86
|
|
59
|
-
// Unique chunk
|
87
|
+
// Unique chunk ID of the current chunk.
|
60
88
|
string id = 2;
|
61
89
|
|
62
90
|
// Content is a string from a document (parsed content).
|
@@ -69,4 +97,10 @@ message Chunk {
|
|
69
97
|
// It contains derived data that are not in the original input document.
|
70
98
|
google.protobuf.Struct derived_struct_data = 4
|
71
99
|
[(google.api.field_behavior) = OUTPUT_ONLY];
|
100
|
+
|
101
|
+
// Page span of the chunk.
|
102
|
+
PageSpan page_span = 6;
|
103
|
+
|
104
|
+
// Output only. Metadata of the current chunk.
|
105
|
+
ChunkMetadata chunk_metadata = 7 [(google.api.field_behavior) = OUTPUT_ONLY];
|
72
106
|
}
|
@@ -39,6 +39,18 @@ option (google.api.resource_definition) = {
|
|
39
39
|
type: "discoveryengine.googleapis.com/Location"
|
40
40
|
pattern: "projects/{project}/locations/{location}"
|
41
41
|
};
|
42
|
+
option (google.api.resource_definition) = {
|
43
|
+
type: "discoveryengine.googleapis.com/GroundingConfig"
|
44
|
+
pattern: "projects/{project}/locations/{location}/groundingConfigs/{grounding_config}"
|
45
|
+
};
|
46
|
+
option (google.api.resource_definition) = {
|
47
|
+
type: "discoveryengine.googleapis.com/RankingConfig"
|
48
|
+
pattern: "projects/{project}/locations/{location}/rankingConfigs/{ranking_config}"
|
49
|
+
};
|
50
|
+
option (google.api.resource_definition) = {
|
51
|
+
type: "healthcare.googleapis.com/FhirStore"
|
52
|
+
pattern: "projects/{project}/locations/{location}/datasets/{dataset}/fhirStores/{fhir_store}"
|
53
|
+
};
|
42
54
|
|
43
55
|
// The industry vertical associated with the
|
44
56
|
// [DataStore][google.cloud.discoveryengine.v1alpha.DataStore].
|
@@ -52,6 +64,9 @@ enum IndustryVertical {
|
|
52
64
|
|
53
65
|
// The media industry vertical.
|
54
66
|
MEDIA = 2;
|
67
|
+
|
68
|
+
// The healthcare FHIR vertical.
|
69
|
+
HEALTHCARE_FHIR = 7;
|
55
70
|
}
|
56
71
|
|
57
72
|
// The type of solution.
|
@@ -67,6 +82,11 @@ enum SolutionType {
|
|
67
82
|
|
68
83
|
// Used for use cases related to the Generative AI agent.
|
69
84
|
SOLUTION_TYPE_CHAT = 3;
|
85
|
+
|
86
|
+
// Used for use cases related to the Generative Chat agent.
|
87
|
+
// It's used for Generative chat engine only, the associated data stores
|
88
|
+
// must enrolled with `SOLUTION_TYPE_CHAT` solution.
|
89
|
+
SOLUTION_TYPE_GENERATIVE_CHAT = 4;
|
70
90
|
}
|
71
91
|
|
72
92
|
// Tiers of search features. Different tiers might have different
|
@@ -202,3 +202,22 @@ message Document {
|
|
202
202
|
google.protobuf.Timestamp index_time = 13
|
203
203
|
[(google.api.field_behavior) = OUTPUT_ONLY];
|
204
204
|
}
|
205
|
+
|
206
|
+
// Document captures all raw metadata information of items to be recommended or
|
207
|
+
// searched.
|
208
|
+
message ProcessedDocument {
|
209
|
+
// Output format of the processed document.
|
210
|
+
oneof processed_data_format {
|
211
|
+
// The JSON string representation of the processed document.
|
212
|
+
string json_data = 2;
|
213
|
+
}
|
214
|
+
|
215
|
+
// Required. Full resource name of the referenced document, in the format
|
216
|
+
// `projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*`.
|
217
|
+
string document = 1 [
|
218
|
+
(google.api.field_behavior) = REQUIRED,
|
219
|
+
(google.api.resource_reference) = {
|
220
|
+
type: "discoveryengine.googleapis.com/Document"
|
221
|
+
}
|
222
|
+
];
|
223
|
+
}
|
@@ -158,6 +158,19 @@ service DocumentService {
|
|
158
158
|
metadata_type: "google.cloud.discoveryengine.v1alpha.PurgeDocumentsMetadata"
|
159
159
|
};
|
160
160
|
}
|
161
|
+
|
162
|
+
// Gets the parsed layout information for a
|
163
|
+
// [Document][google.cloud.discoveryengine.v1alpha.Document].
|
164
|
+
rpc GetProcessedDocument(GetProcessedDocumentRequest)
|
165
|
+
returns (ProcessedDocument) {
|
166
|
+
option (google.api.http) = {
|
167
|
+
get: "/v1alpha/{name=projects/*/locations/*/dataStores/*/branches/*/documents/*}:getProcessedDocument"
|
168
|
+
additional_bindings {
|
169
|
+
get: "/v1alpha/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/documents/*}:getProcessedDocument"
|
170
|
+
}
|
171
|
+
};
|
172
|
+
option (google.api.method_signature) = "name";
|
173
|
+
}
|
161
174
|
}
|
162
175
|
|
163
176
|
// Request message for
|
@@ -322,3 +335,54 @@ message DeleteDocumentRequest {
|
|
322
335
|
}
|
323
336
|
];
|
324
337
|
}
|
338
|
+
|
339
|
+
// Request message for
|
340
|
+
// [DocumentService.GetDocument][google.cloud.discoveryengine.v1alpha.DocumentService.GetDocument]
|
341
|
+
// method.
|
342
|
+
message GetProcessedDocumentRequest {
|
343
|
+
// The type of processing to return in the response.
|
344
|
+
enum ProcessedDocumentType {
|
345
|
+
// Default value.
|
346
|
+
PROCESSED_DOCUMENT_TYPE_UNSPECIFIED = 0;
|
347
|
+
|
348
|
+
// Available for all data store parsing configs.
|
349
|
+
PARSED_DOCUMENT = 1;
|
350
|
+
|
351
|
+
// Only available if ChunkingConfig is enabeld on the data store.
|
352
|
+
CHUNKED_DOCUMENT = 2;
|
353
|
+
}
|
354
|
+
|
355
|
+
// The format of the returned processed document. If unspecified, defaults to
|
356
|
+
// JSON.
|
357
|
+
enum ProcessedDocumentFormat {
|
358
|
+
// Default value.
|
359
|
+
PROCESSED_DOCUMENT_FORMAT_UNSPECIFIED = 0;
|
360
|
+
|
361
|
+
// output format will be a JSON string representation of processed document.
|
362
|
+
JSON = 1;
|
363
|
+
}
|
364
|
+
|
365
|
+
// Required. Full resource name of
|
366
|
+
// [Document][google.cloud.discoveryengine.v1alpha.Document], such as
|
367
|
+
// `projects/{project}/locations/{location}/collections/{collection}/dataStores/{data_store}/branches/{branch}/documents/{document}`.
|
368
|
+
//
|
369
|
+
// If the caller does not have permission to access the
|
370
|
+
// [Document][google.cloud.discoveryengine.v1alpha.Document], regardless of
|
371
|
+
// whether or not it exists, a `PERMISSION_DENIED` error is returned.
|
372
|
+
//
|
373
|
+
// If the requested [Document][google.cloud.discoveryengine.v1alpha.Document]
|
374
|
+
// does not exist, a `NOT_FOUND` error is returned.
|
375
|
+
string name = 1 [
|
376
|
+
(google.api.field_behavior) = REQUIRED,
|
377
|
+
(google.api.resource_reference) = {
|
378
|
+
type: "discoveryengine.googleapis.com/Document"
|
379
|
+
}
|
380
|
+
];
|
381
|
+
|
382
|
+
// Required. What type of processing to return.
|
383
|
+
ProcessedDocumentType processed_document_type = 2
|
384
|
+
[(google.api.field_behavior) = REQUIRED];
|
385
|
+
|
386
|
+
// What format output should be. If unspecified, defaults to JSON.
|
387
|
+
ProcessedDocumentFormat processed_document_format = 3;
|
388
|
+
}
|
@@ -21,6 +21,7 @@ import "google/api/resource.proto";
|
|
21
21
|
import "google/cloud/discoveryengine/v1alpha/completion.proto";
|
22
22
|
import "google/cloud/discoveryengine/v1alpha/document.proto";
|
23
23
|
import "google/cloud/discoveryengine/v1alpha/user_event.proto";
|
24
|
+
import "google/protobuf/field_mask.proto";
|
24
25
|
import "google/protobuf/timestamp.proto";
|
25
26
|
import "google/rpc/status.proto";
|
26
27
|
import "google/type/date.proto";
|
@@ -123,6 +124,233 @@ message BigQuerySource {
|
|
123
124
|
string data_schema = 6;
|
124
125
|
}
|
125
126
|
|
127
|
+
// The Spanner source for importing data
|
128
|
+
message SpannerSource {
|
129
|
+
// The project ID that the Spanner source is in with a length limit of 128
|
130
|
+
// characters. If not specified, inherits the project ID from the parent
|
131
|
+
// request.
|
132
|
+
string project_id = 1;
|
133
|
+
|
134
|
+
// Required. The instance ID of the source Spanner table.
|
135
|
+
string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
|
136
|
+
|
137
|
+
// Required. The database ID of the source Spanner table.
|
138
|
+
string database_id = 3 [(google.api.field_behavior) = REQUIRED];
|
139
|
+
|
140
|
+
// Required. The table name of the Spanner database that needs to be imported.
|
141
|
+
string table_id = 4 [(google.api.field_behavior) = REQUIRED];
|
142
|
+
|
143
|
+
// Whether to apply data boost on Spanner export. Enabling this option will
|
144
|
+
// incur additional cost. More info can be found
|
145
|
+
// [here](https://cloud.google.com/spanner/docs/databoost/databoost-overview#billing_and_quotas).
|
146
|
+
bool enable_data_boost = 5;
|
147
|
+
}
|
148
|
+
|
149
|
+
// The Bigtable Options object that contains information to support
|
150
|
+
// the import.
|
151
|
+
message BigtableOptions {
|
152
|
+
// The column family of the Bigtable.
|
153
|
+
message BigtableColumnFamily {
|
154
|
+
// The field name to use for this column family in the document. The
|
155
|
+
// name has to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`. If not set,
|
156
|
+
// it is parsed from the family name with best effort. However, due to
|
157
|
+
// different naming patterns, field name collisions could happen, where
|
158
|
+
// parsing behavior is undefined.
|
159
|
+
string field_name = 1;
|
160
|
+
|
161
|
+
// The encoding mode of the values when the type is not STRING.
|
162
|
+
// Acceptable encoding values are:
|
163
|
+
//
|
164
|
+
// * `TEXT`: indicates values are alphanumeric text strings.
|
165
|
+
// * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
|
166
|
+
// family of functions. This can be overridden for a specific column
|
167
|
+
// by listing that column in `columns` and specifying an encoding for it.
|
168
|
+
Encoding encoding = 2;
|
169
|
+
|
170
|
+
// The type of values in this column family.
|
171
|
+
// The values are expected to be encoded using `HBase Bytes.toBytes`
|
172
|
+
// function when the encoding value is set to `BINARY`.
|
173
|
+
Type type = 3;
|
174
|
+
|
175
|
+
// The list of objects that contains column level information for each
|
176
|
+
// column. If a column is not present in this list it will be ignored.
|
177
|
+
repeated BigtableColumn columns = 4;
|
178
|
+
}
|
179
|
+
|
180
|
+
// The column of the Bigtable.
|
181
|
+
message BigtableColumn {
|
182
|
+
// Required. Qualifier of the column. If it cannot be decoded with utf-8,
|
183
|
+
// use a base-64 encoded string instead.
|
184
|
+
bytes qualifier = 1 [(google.api.field_behavior) = REQUIRED];
|
185
|
+
|
186
|
+
// The field name to use for this column in the document. The name has to
|
187
|
+
// match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
|
188
|
+
// If not set, it is parsed from the qualifier bytes with best effort.
|
189
|
+
// However, due to different naming patterns, field name collisions could
|
190
|
+
// happen, where parsing behavior is undefined.
|
191
|
+
string field_name = 2;
|
192
|
+
|
193
|
+
// The encoding mode of the values when the type is not `STRING`.
|
194
|
+
// Acceptable encoding values are:
|
195
|
+
//
|
196
|
+
// * `TEXT`: indicates values are alphanumeric text strings.
|
197
|
+
// * `BINARY`: indicates values are encoded using `HBase Bytes.toBytes`
|
198
|
+
// family of functions. This can be overridden for a specific column
|
199
|
+
// by listing that column in `columns` and specifying an encoding for it.
|
200
|
+
Encoding encoding = 3;
|
201
|
+
|
202
|
+
// The type of values in this column family.
|
203
|
+
// The values are expected to be encoded using `HBase Bytes.toBytes`
|
204
|
+
// function when the encoding value is set to `BINARY`.
|
205
|
+
Type type = 4;
|
206
|
+
}
|
207
|
+
|
208
|
+
// The type of values in a Bigtable column or column family.
|
209
|
+
// The values are expected to be encoded using
|
210
|
+
// [HBase
|
211
|
+
// Bytes.toBytes](https://hbase.apache.org/apidocs/org/apache/hadoop/hbase/util/Bytes.html)
|
212
|
+
// function when the encoding value is set to `BINARY`.
|
213
|
+
enum Type {
|
214
|
+
// The type is unspecified.
|
215
|
+
TYPE_UNSPECIFIED = 0;
|
216
|
+
|
217
|
+
// String type.
|
218
|
+
STRING = 1;
|
219
|
+
|
220
|
+
// Numerical type.
|
221
|
+
NUMBER = 2;
|
222
|
+
|
223
|
+
// Integer type.
|
224
|
+
INTEGER = 3;
|
225
|
+
|
226
|
+
// Variable length integer type.
|
227
|
+
VAR_INTEGER = 4;
|
228
|
+
|
229
|
+
// BigDecimal type.
|
230
|
+
BIG_NUMERIC = 5;
|
231
|
+
|
232
|
+
// Boolean type.
|
233
|
+
BOOLEAN = 6;
|
234
|
+
|
235
|
+
// JSON type.
|
236
|
+
JSON = 7;
|
237
|
+
}
|
238
|
+
|
239
|
+
// The encoding mode of a Bigtable column or column family.
|
240
|
+
enum Encoding {
|
241
|
+
// The encoding is unspecified.
|
242
|
+
ENCODING_UNSPECIFIED = 0;
|
243
|
+
|
244
|
+
// Text encoding.
|
245
|
+
TEXT = 1;
|
246
|
+
|
247
|
+
// Binary encoding.
|
248
|
+
BINARY = 2;
|
249
|
+
}
|
250
|
+
|
251
|
+
// The field name used for saving row key value in the document. The name has
|
252
|
+
// to match the pattern `[a-zA-Z0-9][a-zA-Z0-9-_]*`.
|
253
|
+
string key_field_name = 1;
|
254
|
+
|
255
|
+
// The mapping from family names to an object that contains column families
|
256
|
+
// level information for the given column family. If a family is not present
|
257
|
+
// in this map it will be ignored.
|
258
|
+
map<string, BigtableColumnFamily> families = 2;
|
259
|
+
}
|
260
|
+
|
261
|
+
// The Cloud Bigtable source for importing data.
|
262
|
+
message BigtableSource {
|
263
|
+
// The project ID that the Bigtable source is in with a length limit of 128
|
264
|
+
// characters. If not specified, inherits the project ID from the parent
|
265
|
+
// request.
|
266
|
+
string project_id = 1;
|
267
|
+
|
268
|
+
// Required. The instance ID of the Cloud Bigtable that needs to be imported.
|
269
|
+
string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
|
270
|
+
|
271
|
+
// Required. The table ID of the Cloud Bigtable that needs to be imported.
|
272
|
+
string table_id = 3 [(google.api.field_behavior) = REQUIRED];
|
273
|
+
|
274
|
+
// Required. Bigtable options that contains information needed when parsing
|
275
|
+
// data into typed structures. For example, column type annotations.
|
276
|
+
BigtableOptions bigtable_options = 4 [(google.api.field_behavior) = REQUIRED];
|
277
|
+
}
|
278
|
+
|
279
|
+
// Cloud FhirStore source import data from.
|
280
|
+
message FhirStoreSource {
|
281
|
+
// Required. The full resource name of the FHIR store to import data from, in
|
282
|
+
// the format of
|
283
|
+
// `projects/{project}/locations/{location}/datasets/{dataset}/fhirStores/{fhir_store}`.
|
284
|
+
string fhir_store = 1 [
|
285
|
+
(google.api.field_behavior) = REQUIRED,
|
286
|
+
(google.api.resource_reference) = {
|
287
|
+
type: "healthcare.googleapis.com/FhirStore"
|
288
|
+
}
|
289
|
+
];
|
290
|
+
|
291
|
+
// Intermediate Cloud Storage directory used for the import with a length
|
292
|
+
// limit of 2,000 characters. Can be specified if one wants to have the
|
293
|
+
// FhirStore export to a specific Cloud Storage directory.
|
294
|
+
string gcs_staging_dir = 2;
|
295
|
+
}
|
296
|
+
|
297
|
+
// Cloud SQL source import data from.
|
298
|
+
message CloudSqlSource {
|
299
|
+
// The project ID that the Cloud SQL source is in with a length limit of 128
|
300
|
+
// characters. If not specified, inherits the project ID from the parent
|
301
|
+
// request.
|
302
|
+
string project_id = 1;
|
303
|
+
|
304
|
+
// Required. The Cloud SQL instance to copy the data from with a length limit
|
305
|
+
// of 256 characters.
|
306
|
+
string instance_id = 2 [(google.api.field_behavior) = REQUIRED];
|
307
|
+
|
308
|
+
// Required. The Cloud SQL database to copy the data from with a length limit
|
309
|
+
// of 256 characters.
|
310
|
+
string database_id = 3 [(google.api.field_behavior) = REQUIRED];
|
311
|
+
|
312
|
+
// Required. The Cloud SQL table to copy the data from with a length limit of
|
313
|
+
// 256 characters.
|
314
|
+
string table_id = 4 [(google.api.field_behavior) = REQUIRED];
|
315
|
+
|
316
|
+
// Intermediate Cloud Storage directory used for the import with a length
|
317
|
+
// limit of 2,000 characters. Can be specified if one wants to have the
|
318
|
+
// Cloud SQL export to a specific Cloud Storage directory.
|
319
|
+
//
|
320
|
+
// Please ensure that the Cloud SQL service account has the necessary Cloud
|
321
|
+
// Storage Admin permissions to access the specified Cloud Storage directory.
|
322
|
+
string gcs_staging_dir = 5;
|
323
|
+
|
324
|
+
// Option for serverless export. Enabling this option will incur
|
325
|
+
// additional cost. More info can be found
|
326
|
+
// [here](https://cloud.google.com/sql/pricing#serverless).
|
327
|
+
bool offload = 6;
|
328
|
+
}
|
329
|
+
|
330
|
+
// Firestore source import data from.
|
331
|
+
message FirestoreSource {
|
332
|
+
// The project ID that the Cloud SQL source is in with a length limit of 128
|
333
|
+
// characters. If not specified, inherits the project ID from the parent
|
334
|
+
// request.
|
335
|
+
string project_id = 1;
|
336
|
+
|
337
|
+
// Required. The Firestore database to copy the data from with a length limit
|
338
|
+
// of 256 characters.
|
339
|
+
string database_id = 2 [(google.api.field_behavior) = REQUIRED];
|
340
|
+
|
341
|
+
// Required. The Firestore collection to copy the data from with a length
|
342
|
+
// limit of 1,500 characters.
|
343
|
+
string collection_id = 3 [(google.api.field_behavior) = REQUIRED];
|
344
|
+
|
345
|
+
// Intermediate Cloud Storage directory used for the import with a length
|
346
|
+
// limit of 2,000 characters. Can be specified if one wants to have the
|
347
|
+
// Firestore export to a specific Cloud Storage directory.
|
348
|
+
//
|
349
|
+
// Please ensure that the Firestore service account has the necessary Cloud
|
350
|
+
// Storage Admin permissions to access the specified Cloud Storage directory.
|
351
|
+
string gcs_staging_dir = 4;
|
352
|
+
}
|
353
|
+
|
126
354
|
// Configuration of destination for Import related errors.
|
127
355
|
message ImportErrorConfig {
|
128
356
|
// Required. Errors destination.
|
@@ -220,6 +448,9 @@ message ImportDocumentsMetadata {
|
|
220
448
|
|
221
449
|
// Count of entries that encountered errors while processing.
|
222
450
|
int64 failure_count = 4;
|
451
|
+
|
452
|
+
// Total count of entries that were processed.
|
453
|
+
int64 total_count = 5;
|
223
454
|
}
|
224
455
|
|
225
456
|
// Request message for Import methods.
|
@@ -256,6 +487,21 @@ message ImportDocumentsRequest {
|
|
256
487
|
|
257
488
|
// BigQuery input source.
|
258
489
|
BigQuerySource bigquery_source = 4;
|
490
|
+
|
491
|
+
// FhirStore input source.
|
492
|
+
FhirStoreSource fhir_store_source = 10;
|
493
|
+
|
494
|
+
// Spanner input source.
|
495
|
+
SpannerSource spanner_source = 11;
|
496
|
+
|
497
|
+
// Cloud SQL input source.
|
498
|
+
CloudSqlSource cloud_sql_source = 12;
|
499
|
+
|
500
|
+
// Firestore input source.
|
501
|
+
FirestoreSource firestore_source = 13;
|
502
|
+
|
503
|
+
// Cloud Bigtable input source.
|
504
|
+
BigtableSource bigtable_source = 15;
|
259
505
|
}
|
260
506
|
|
261
507
|
// Required. The parent branch resource name, such as
|
@@ -276,6 +522,10 @@ message ImportDocumentsRequest {
|
|
276
522
|
// [ReconciliationMode.INCREMENTAL][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.ReconciliationMode.INCREMENTAL].
|
277
523
|
ReconciliationMode reconciliation_mode = 6;
|
278
524
|
|
525
|
+
// Indicates which fields in the provided imported documents to update. If
|
526
|
+
// not set, the default is to update all fields.
|
527
|
+
google.protobuf.FieldMask update_mask = 7;
|
528
|
+
|
279
529
|
// Whether to automatically generate IDs for the documents if absent.
|
280
530
|
//
|
281
531
|
// If set to `true`,
|
@@ -289,45 +539,54 @@ message ImportDocumentsRequest {
|
|
289
539
|
// [id_field][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.id_field],
|
290
540
|
// otherwise, documents without IDs fail to be imported.
|
291
541
|
//
|
292
|
-
//
|
293
|
-
//
|
294
|
-
// [
|
295
|
-
// when
|
542
|
+
// Supported data sources:
|
543
|
+
//
|
544
|
+
// * [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource].
|
296
545
|
// [GcsSource.data_schema][google.cloud.discoveryengine.v1alpha.GcsSource.data_schema]
|
297
|
-
// or
|
546
|
+
// must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
547
|
+
// * [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource].
|
298
548
|
// [BigQuerySource.data_schema][google.cloud.discoveryengine.v1alpha.BigQuerySource.data_schema]
|
299
|
-
//
|
549
|
+
// must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
550
|
+
// * [SpannerSource][google.cloud.discoveryengine.v1alpha.SpannerSource].
|
551
|
+
// * [CloudSqlSource][google.cloud.discoveryengine.v1alpha.CloudSqlSource].
|
552
|
+
// * [FirestoreSource][google.cloud.discoveryengine.v1alpha.FirestoreSource].
|
553
|
+
// * [BigtableSource][google.cloud.discoveryengine.v1alpha.BigtableSource].
|
300
554
|
bool auto_generate_ids = 8;
|
301
555
|
|
302
|
-
// The field
|
303
|
-
//
|
556
|
+
// The field indicates the ID field or column to be used as unique IDs of
|
557
|
+
// the documents.
|
304
558
|
//
|
305
559
|
// For [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource] it is the
|
306
560
|
// key of the JSON field. For instance, `my_id` for JSON `{"my_id":
|
307
|
-
// "some_uuid"}`. For
|
308
|
-
//
|
309
|
-
// the column name of the BigQuery table where the unique ids are stored.
|
561
|
+
// "some_uuid"}`. For others, it may be the column name of the table where the
|
562
|
+
// unique ids are stored.
|
310
563
|
//
|
311
|
-
// The values of the JSON field or the
|
564
|
+
// The values of the JSON field or the table column are used as the
|
312
565
|
// [Document.id][google.cloud.discoveryengine.v1alpha.Document.id]s. The JSON
|
313
|
-
// field or the
|
566
|
+
// field or the table column must be of string type, and the values must be
|
314
567
|
// set as valid strings conform to
|
315
568
|
// [RFC-1034](https://tools.ietf.org/html/rfc1034) with 1-63 characters.
|
316
569
|
// Otherwise, documents without valid IDs fail to be imported.
|
317
570
|
//
|
318
|
-
// Only set this field when
|
319
|
-
// [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource] or
|
320
|
-
// [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource], and
|
321
|
-
// when
|
322
|
-
// [GcsSource.data_schema][google.cloud.discoveryengine.v1alpha.GcsSource.data_schema]
|
323
|
-
// or
|
324
|
-
// [BigQuerySource.data_schema][google.cloud.discoveryengine.v1alpha.BigQuerySource.data_schema]
|
325
|
-
// is `custom`. And only set this field when
|
571
|
+
// Only set this field when
|
326
572
|
// [auto_generate_ids][google.cloud.discoveryengine.v1alpha.ImportDocumentsRequest.auto_generate_ids]
|
327
573
|
// is unset or set as `false`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
328
574
|
//
|
329
575
|
// If it is unset, a default value `_id` is used when importing from the
|
330
576
|
// allowed data sources.
|
577
|
+
//
|
578
|
+
// Supported data sources:
|
579
|
+
//
|
580
|
+
// * [GcsSource][google.cloud.discoveryengine.v1alpha.GcsSource].
|
581
|
+
// [GcsSource.data_schema][google.cloud.discoveryengine.v1alpha.GcsSource.data_schema]
|
582
|
+
// must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
583
|
+
// * [BigQuerySource][google.cloud.discoveryengine.v1alpha.BigQuerySource].
|
584
|
+
// [BigQuerySource.data_schema][google.cloud.discoveryengine.v1alpha.BigQuerySource.data_schema]
|
585
|
+
// must be `custom` or `csv`. Otherwise, an INVALID_ARGUMENT error is thrown.
|
586
|
+
// * [SpannerSource][google.cloud.discoveryengine.v1alpha.SpannerSource].
|
587
|
+
// * [CloudSqlSource][google.cloud.discoveryengine.v1alpha.CloudSqlSource].
|
588
|
+
// * [FirestoreSource][google.cloud.discoveryengine.v1alpha.FirestoreSource].
|
589
|
+
// * [BigtableSource][google.cloud.discoveryengine.v1alpha.BigtableSource].
|
331
590
|
string id_field = 9;
|
332
591
|
}
|
333
592
|
|
@@ -405,3 +664,35 @@ message ImportSuggestionDenyListEntriesMetadata {
|
|
405
664
|
// finish time.
|
406
665
|
google.protobuf.Timestamp update_time = 2;
|
407
666
|
}
|
667
|
+
|
668
|
+
// Response of the
|
669
|
+
// [CompletionService.ImportCompletionSuggestions][google.cloud.discoveryengine.v1alpha.CompletionService.ImportCompletionSuggestions]
|
670
|
+
// method. If the long running operation is done, this message is returned by
|
671
|
+
// the google.longrunning.Operations.response field if the operation is
|
672
|
+
// successful.
|
673
|
+
message ImportCompletionSuggestionsResponse {
|
674
|
+
// A sample of errors encountered while processing the request.
|
675
|
+
repeated google.rpc.Status error_samples = 1;
|
676
|
+
|
677
|
+
// Count of
|
678
|
+
// [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s
|
679
|
+
// successfully imported.
|
680
|
+
int64 success_count = 2;
|
681
|
+
|
682
|
+
// Count of
|
683
|
+
// [CompletionSuggestion][google.cloud.discoveryengine.v1alpha.CompletionSuggestion]s
|
684
|
+
// that failed to be imported.
|
685
|
+
int64 failure_count = 3;
|
686
|
+
}
|
687
|
+
|
688
|
+
// Metadata related to the progress of the ImportCompletionSuggestions
|
689
|
+
// operation. This will be returned by the google.longrunning.Operation.metadata
|
690
|
+
// field.
|
691
|
+
message ImportCompletionSuggestionsMetadata {
|
692
|
+
// Operation create time.
|
693
|
+
google.protobuf.Timestamp create_time = 1;
|
694
|
+
|
695
|
+
// Operation last update time. If the operation is done, this is also the
|
696
|
+
// finish time.
|
697
|
+
google.protobuf.Timestamp update_time = 2;
|
698
|
+
}
|