google-cloud-ai_platform-v1 0.35.0 → 0.37.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/AUTHENTICATION.md +72 -99
- data/README.md +1 -1
- data/lib/google/cloud/ai_platform/v1/dataset_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/dataset_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/deployment_resource_pool_service/client.rb +896 -0
- data/lib/google/cloud/ai_platform/v1/deployment_resource_pool_service/credentials.rb +47 -0
- data/lib/google/cloud/ai_platform/v1/deployment_resource_pool_service/operations.rb +809 -0
- data/lib/google/cloud/ai_platform/v1/deployment_resource_pool_service/paths.rb +83 -0
- data/lib/google/cloud/ai_platform/v1/deployment_resource_pool_service.rb +50 -0
- data/lib/google/cloud/ai_platform/v1/endpoint_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/endpoint_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/endpoint_service/paths.rb +19 -0
- data/lib/google/cloud/ai_platform/v1/feature_online_store_admin_service/client.rb +46 -18
- data/lib/google/cloud/ai_platform/v1/feature_online_store_admin_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/feature_online_store_service/client.rb +146 -17
- data/lib/google/cloud/ai_platform/v1/feature_online_store_service.rb +2 -0
- data/lib/google/cloud/ai_platform/v1/feature_registry_service/client.rb +44 -18
- data/lib/google/cloud/ai_platform/v1/feature_registry_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/featurestore_online_serving_service/client.rb +42 -17
- data/lib/google/cloud/ai_platform/v1/featurestore_service/client.rb +44 -18
- data/lib/google/cloud/ai_platform/v1/featurestore_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/index_endpoint_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/index_endpoint_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/index_service/client.rb +52 -18
- data/lib/google/cloud/ai_platform/v1/index_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/job_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/job_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/llm_utility_service/client.rb +49 -18
- data/lib/google/cloud/ai_platform/v1/match_service/client.rb +42 -17
- data/lib/google/cloud/ai_platform/v1/metadata_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/metadata_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/migration_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/migration_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/model_garden_service/client.rb +42 -17
- data/lib/google/cloud/ai_platform/v1/model_service/client.rb +47 -21
- data/lib/google/cloud/ai_platform/v1/model_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/pipeline_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/pipeline_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/prediction_service/client.rb +544 -20
- data/lib/google/cloud/ai_platform/v1/schedule_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/schedule_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/specialist_pool_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/specialist_pool_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/tensorboard_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/tensorboard_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1/version.rb +1 -1
- data/lib/google/cloud/ai_platform/v1/vizier_service/client.rb +43 -17
- data/lib/google/cloud/ai_platform/v1/vizier_service/operations.rb +28 -6
- data/lib/google/cloud/ai_platform/v1.rb +1 -0
- data/lib/google/cloud/aiplatform/v1/accelerator_type_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/annotation_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/annotation_spec_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/artifact_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/batch_prediction_job_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/completion_stats_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/content_pb.rb +64 -0
- data/lib/google/cloud/aiplatform/v1/context_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/custom_job_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/data_item_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/data_labeling_job_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/dataset_pb.rb +4 -2
- data/lib/google/cloud/aiplatform/v1/dataset_service_pb.rb +4 -2
- data/lib/google/cloud/aiplatform/v1/dataset_version_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/deployed_index_ref_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/deployed_model_ref_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/deployment_resource_pool_pb.rb +49 -0
- data/lib/google/cloud/aiplatform/v1/deployment_resource_pool_service_pb.rb +65 -0
- data/lib/google/cloud/aiplatform/v1/deployment_resource_pool_service_services_pb.rb +53 -0
- data/lib/google/cloud/aiplatform/v1/encryption_spec_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/endpoint_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/endpoint_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/entity_type_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/env_var_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/evaluated_annotation_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/event_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/execution_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/explanation_metadata_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/explanation_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/feature_group_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/feature_monitoring_stats_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/feature_online_store_admin_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/feature_online_store_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/feature_online_store_service_pb.rb +11 -2
- data/lib/google/cloud/aiplatform/v1/feature_online_store_service_services_pb.rb +5 -0
- data/lib/google/cloud/aiplatform/v1/feature_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/feature_registry_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/feature_selector_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/feature_view_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/feature_view_sync_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/featurestore_monitoring_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/featurestore_online_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/featurestore_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/featurestore_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/hyperparameter_tuning_job_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/index_endpoint_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/index_endpoint_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/index_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/index_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/io_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/job_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/job_state_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/lineage_subgraph_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/llm_utility_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/machine_resources_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/manual_batch_tuning_parameters_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/match_service_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/metadata_schema_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/metadata_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/metadata_store_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/migratable_resource_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/migration_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/model_deployment_monitoring_job_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/model_evaluation_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/model_evaluation_slice_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/model_garden_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/model_monitoring_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/model_pb.rb +3 -2
- data/lib/google/cloud/aiplatform/v1/model_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/nas_job_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/openapi_pb.rb +47 -0
- data/lib/google/cloud/aiplatform/v1/operation_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/pipeline_failure_policy_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/pipeline_job_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/pipeline_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/pipeline_state_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/prediction_service_pb.rb +16 -2
- data/lib/google/cloud/aiplatform/v1/prediction_service_services_pb.rb +16 -3
- data/lib/google/cloud/aiplatform/v1/publisher_model_pb.rb +6 -2
- data/lib/google/cloud/aiplatform/v1/saved_query_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/schedule_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/schedule_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/service_networking_pb.rb +4 -2
- data/lib/google/cloud/aiplatform/v1/specialist_pool_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/specialist_pool_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/study_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/tensorboard_data_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/tensorboard_experiment_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/tensorboard_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/tensorboard_run_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/tensorboard_service_pb.rb +2 -2
- data/lib/google/cloud/aiplatform/v1/tensorboard_time_series_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/tool_pb.rb +51 -0
- data/lib/google/cloud/aiplatform/v1/training_pipeline_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/types_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/unmanaged_container_model_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/user_action_reference_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/value_pb.rb +1 -1
- data/lib/google/cloud/aiplatform/v1/vizier_service_pb.rb +2 -2
- data/proto_docs/google/api/client.rb +1 -0
- data/proto_docs/google/cloud/aiplatform/v1/batch_prediction_job.rb +2 -2
- data/proto_docs/google/cloud/aiplatform/v1/content.rb +312 -0
- data/proto_docs/google/cloud/aiplatform/v1/custom_job.rb +15 -0
- data/proto_docs/google/cloud/aiplatform/v1/dataset.rb +102 -0
- data/proto_docs/google/cloud/aiplatform/v1/dataset_service.rb +12 -1
- data/proto_docs/google/cloud/aiplatform/v1/deployed_index_ref.rb +3 -0
- data/proto_docs/google/cloud/aiplatform/v1/deployment_resource_pool.rb +45 -0
- data/proto_docs/google/cloud/aiplatform/v1/deployment_resource_pool_service.rb +170 -0
- data/proto_docs/google/cloud/aiplatform/v1/endpoint.rb +6 -0
- data/proto_docs/google/cloud/aiplatform/v1/explanation.rb +8 -0
- data/proto_docs/google/cloud/aiplatform/v1/feature.rb +5 -1
- data/proto_docs/google/cloud/aiplatform/v1/feature_group.rb +2 -2
- data/proto_docs/google/cloud/aiplatform/v1/feature_online_store.rb +2 -2
- data/proto_docs/google/cloud/aiplatform/v1/feature_online_store_admin_service.rb +3 -1
- data/proto_docs/google/cloud/aiplatform/v1/feature_online_store_service.rb +150 -0
- data/proto_docs/google/cloud/aiplatform/v1/feature_view.rb +3 -2
- data/proto_docs/google/cloud/aiplatform/v1/feature_view_sync.rb +2 -2
- data/proto_docs/google/cloud/aiplatform/v1/featurestore_service.rb +1 -1
- data/proto_docs/google/cloud/aiplatform/v1/index_endpoint.rb +5 -0
- data/proto_docs/google/cloud/aiplatform/v1/index_service.rb +28 -0
- data/proto_docs/google/cloud/aiplatform/v1/metadata_service.rb +1 -0
- data/proto_docs/google/cloud/aiplatform/v1/model.rb +55 -4
- data/proto_docs/google/cloud/aiplatform/v1/model_deployment_monitoring_job.rb +4 -0
- data/proto_docs/google/cloud/aiplatform/v1/model_evaluation.rb +1 -1
- data/proto_docs/google/cloud/aiplatform/v1/model_monitoring.rb +1 -0
- data/proto_docs/google/cloud/aiplatform/v1/model_service.rb +4 -4
- data/proto_docs/google/cloud/aiplatform/v1/nas_job.rb +1 -0
- data/proto_docs/google/cloud/aiplatform/v1/openapi.rb +102 -0
- data/proto_docs/google/cloud/aiplatform/v1/pipeline_job.rb +1 -0
- data/proto_docs/google/cloud/aiplatform/v1/prediction_service.rb +195 -0
- data/proto_docs/google/cloud/aiplatform/v1/publisher_model.rb +73 -1
- data/proto_docs/google/cloud/aiplatform/v1/service_networking.rb +16 -0
- data/proto_docs/google/cloud/aiplatform/v1/tool.rb +112 -0
- data/proto_docs/google/protobuf/any.rb +2 -1
- data/proto_docs/google/type/date.rb +53 -0
- metadata +27 -122
@@ -32,6 +32,9 @@ module Google
|
|
32
32
|
# A service for online predictions and explanations.
|
33
33
|
#
|
34
34
|
class Client
|
35
|
+
# @private
|
36
|
+
DEFAULT_ENDPOINT_TEMPLATE = "aiplatform.$UNIVERSE_DOMAIN$"
|
37
|
+
|
35
38
|
include Paths
|
36
39
|
|
37
40
|
# @private
|
@@ -92,6 +95,15 @@ module Google
|
|
92
95
|
@config
|
93
96
|
end
|
94
97
|
|
98
|
+
##
|
99
|
+
# The effective universe domain
|
100
|
+
#
|
101
|
+
# @return [String]
|
102
|
+
#
|
103
|
+
def universe_domain
|
104
|
+
@prediction_service_stub.universe_domain
|
105
|
+
end
|
106
|
+
|
95
107
|
##
|
96
108
|
# Create a new PredictionService client object.
|
97
109
|
#
|
@@ -125,8 +137,9 @@ module Google
|
|
125
137
|
credentials = @config.credentials
|
126
138
|
# Use self-signed JWT if the endpoint is unchanged from default,
|
127
139
|
# but only if the default endpoint does not have a region prefix.
|
128
|
-
enable_self_signed_jwt = @config.endpoint
|
129
|
-
|
140
|
+
enable_self_signed_jwt = @config.endpoint.nil? ||
|
141
|
+
(@config.endpoint == Configuration::DEFAULT_ENDPOINT &&
|
142
|
+
!@config.endpoint.split(".").first.include?("-"))
|
130
143
|
credentials ||= Credentials.default scope: @config.scope,
|
131
144
|
enable_self_signed_jwt: enable_self_signed_jwt
|
132
145
|
if credentials.is_a?(::String) || credentials.is_a?(::Hash)
|
@@ -135,26 +148,30 @@ module Google
|
|
135
148
|
@quota_project_id = @config.quota_project
|
136
149
|
@quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
|
137
150
|
|
151
|
+
@prediction_service_stub = ::Gapic::ServiceStub.new(
|
152
|
+
::Google::Cloud::AIPlatform::V1::PredictionService::Stub,
|
153
|
+
credentials: credentials,
|
154
|
+
endpoint: @config.endpoint,
|
155
|
+
endpoint_template: DEFAULT_ENDPOINT_TEMPLATE,
|
156
|
+
universe_domain: @config.universe_domain,
|
157
|
+
channel_args: @config.channel_args,
|
158
|
+
interceptors: @config.interceptors,
|
159
|
+
channel_pool_config: @config.channel_pool
|
160
|
+
)
|
161
|
+
|
138
162
|
@location_client = Google::Cloud::Location::Locations::Client.new do |config|
|
139
163
|
config.credentials = credentials
|
140
164
|
config.quota_project = @quota_project_id
|
141
|
-
config.endpoint = @
|
165
|
+
config.endpoint = @prediction_service_stub.endpoint
|
166
|
+
config.universe_domain = @prediction_service_stub.universe_domain
|
142
167
|
end
|
143
168
|
|
144
169
|
@iam_policy_client = Google::Iam::V1::IAMPolicy::Client.new do |config|
|
145
170
|
config.credentials = credentials
|
146
171
|
config.quota_project = @quota_project_id
|
147
|
-
config.endpoint = @
|
172
|
+
config.endpoint = @prediction_service_stub.endpoint
|
173
|
+
config.universe_domain = @prediction_service_stub.universe_domain
|
148
174
|
end
|
149
|
-
|
150
|
-
@prediction_service_stub = ::Gapic::ServiceStub.new(
|
151
|
-
::Google::Cloud::AIPlatform::V1::PredictionService::Stub,
|
152
|
-
credentials: credentials,
|
153
|
-
endpoint: @config.endpoint,
|
154
|
-
channel_args: @config.channel_args,
|
155
|
-
interceptors: @config.interceptors,
|
156
|
-
channel_pool_config: @config.channel_pool
|
157
|
-
)
|
158
175
|
end
|
159
176
|
|
160
177
|
##
|
@@ -390,8 +407,100 @@ module Google
|
|
390
407
|
end
|
391
408
|
|
392
409
|
##
|
393
|
-
# Perform
|
394
|
-
#
|
410
|
+
# Perform a streaming online prediction with an arbitrary HTTP payload.
|
411
|
+
#
|
412
|
+
# @overload stream_raw_predict(request, options = nil)
|
413
|
+
# Pass arguments to `stream_raw_predict` via a request object, either of type
|
414
|
+
# {::Google::Cloud::AIPlatform::V1::StreamRawPredictRequest} or an equivalent Hash.
|
415
|
+
#
|
416
|
+
# @param request [::Google::Cloud::AIPlatform::V1::StreamRawPredictRequest, ::Hash]
|
417
|
+
# A request object representing the call parameters. Required. To specify no
|
418
|
+
# parameters, or to keep all the default parameter values, pass an empty Hash.
|
419
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
420
|
+
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
421
|
+
#
|
422
|
+
# @overload stream_raw_predict(endpoint: nil, http_body: nil)
|
423
|
+
# Pass arguments to `stream_raw_predict` via keyword arguments. Note that at
|
424
|
+
# least one keyword argument is required. To specify no parameters, or to keep all
|
425
|
+
# the default parameter values, pass an empty Hash as a request object (see above).
|
426
|
+
#
|
427
|
+
# @param endpoint [::String]
|
428
|
+
# Required. The name of the Endpoint requested to serve the prediction.
|
429
|
+
# Format:
|
430
|
+
# `projects/{project}/locations/{location}/endpoints/{endpoint}`
|
431
|
+
# @param http_body [::Google::Api::HttpBody, ::Hash]
|
432
|
+
# The prediction input. Supports HTTP headers and arbitrary data payload.
|
433
|
+
#
|
434
|
+
# @yield [response, operation] Access the result along with the RPC operation
|
435
|
+
# @yieldparam response [::Enumerable<::Google::Api::HttpBody>]
|
436
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
437
|
+
#
|
438
|
+
# @return [::Enumerable<::Google::Api::HttpBody>]
|
439
|
+
#
|
440
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
441
|
+
#
|
442
|
+
# @example Basic example
|
443
|
+
# require "google/cloud/ai_platform/v1"
|
444
|
+
#
|
445
|
+
# # Create a client object. The client can be reused for multiple calls.
|
446
|
+
# client = Google::Cloud::AIPlatform::V1::PredictionService::Client.new
|
447
|
+
#
|
448
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
449
|
+
# request = Google::Cloud::AIPlatform::V1::StreamRawPredictRequest.new
|
450
|
+
#
|
451
|
+
# # Call the stream_raw_predict method to start streaming.
|
452
|
+
# output = client.stream_raw_predict request
|
453
|
+
#
|
454
|
+
# # The returned object is a streamed enumerable yielding elements of type
|
455
|
+
# # ::Google::Api::HttpBody
|
456
|
+
# output.each do |current_response|
|
457
|
+
# p current_response
|
458
|
+
# end
|
459
|
+
#
|
460
|
+
def stream_raw_predict request, options = nil
|
461
|
+
raise ::ArgumentError, "request must be provided" if request.nil?
|
462
|
+
|
463
|
+
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::StreamRawPredictRequest
|
464
|
+
|
465
|
+
# Converts hash and nil to an options object
|
466
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
467
|
+
|
468
|
+
# Customize the options with defaults
|
469
|
+
metadata = @config.rpcs.stream_raw_predict.metadata.to_h
|
470
|
+
|
471
|
+
# Set x-goog-api-client and x-goog-user-project headers
|
472
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
473
|
+
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
474
|
+
gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION
|
475
|
+
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
476
|
+
|
477
|
+
header_params = {}
|
478
|
+
if request.endpoint
|
479
|
+
header_params["endpoint"] = request.endpoint
|
480
|
+
end
|
481
|
+
|
482
|
+
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
483
|
+
metadata[:"x-goog-request-params"] ||= request_params_header
|
484
|
+
|
485
|
+
options.apply_defaults timeout: @config.rpcs.stream_raw_predict.timeout,
|
486
|
+
metadata: metadata,
|
487
|
+
retry_policy: @config.rpcs.stream_raw_predict.retry_policy
|
488
|
+
|
489
|
+
options.apply_defaults timeout: @config.timeout,
|
490
|
+
metadata: @config.metadata,
|
491
|
+
retry_policy: @config.retry_policy
|
492
|
+
|
493
|
+
@prediction_service_stub.call_rpc :stream_raw_predict, request, options: options do |response, operation|
|
494
|
+
yield response, operation if block_given?
|
495
|
+
return response
|
496
|
+
end
|
497
|
+
rescue ::GRPC::BadStatus => e
|
498
|
+
raise ::Google::Cloud::Error.from_error(e)
|
499
|
+
end
|
500
|
+
|
501
|
+
##
|
502
|
+
# Perform an unary online prediction request to a gRPC model server for
|
503
|
+
# Vertex first-party products and frameworks.
|
395
504
|
#
|
396
505
|
# @overload direct_predict(request, options = nil)
|
397
506
|
# Pass arguments to `direct_predict` via a request object, either of type
|
@@ -482,7 +591,8 @@ module Google
|
|
482
591
|
end
|
483
592
|
|
484
593
|
##
|
485
|
-
# Perform an online prediction request
|
594
|
+
# Perform an unary online prediction request to a gRPC model server for
|
595
|
+
# custom containers.
|
486
596
|
#
|
487
597
|
# @overload direct_raw_predict(request, options = nil)
|
488
598
|
# Pass arguments to `direct_raw_predict` via a request object, either of type
|
@@ -578,6 +688,164 @@ module Google
|
|
578
688
|
raise ::Google::Cloud::Error.from_error(e)
|
579
689
|
end
|
580
690
|
|
691
|
+
##
|
692
|
+
# Perform a streaming online prediction request to a gRPC model server for
|
693
|
+
# Vertex first-party products and frameworks.
|
694
|
+
#
|
695
|
+
# @param request [::Gapic::StreamInput, ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectPredictRequest, ::Hash>]
|
696
|
+
# An enumerable of {::Google::Cloud::AIPlatform::V1::StreamDirectPredictRequest} instances.
|
697
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
698
|
+
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
699
|
+
#
|
700
|
+
# @yield [response, operation] Access the result along with the RPC operation
|
701
|
+
# @yieldparam response [::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectPredictResponse>]
|
702
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
703
|
+
#
|
704
|
+
# @return [::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectPredictResponse>]
|
705
|
+
#
|
706
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
707
|
+
#
|
708
|
+
# @example Basic example
|
709
|
+
# require "google/cloud/ai_platform/v1"
|
710
|
+
#
|
711
|
+
# # Create a client object. The client can be reused for multiple calls.
|
712
|
+
# client = Google::Cloud::AIPlatform::V1::PredictionService::Client.new
|
713
|
+
#
|
714
|
+
# # Create an input stream.
|
715
|
+
# input = Gapic::StreamInput.new
|
716
|
+
#
|
717
|
+
# # Call the stream_direct_predict method to start streaming.
|
718
|
+
# output = client.stream_direct_predict input
|
719
|
+
#
|
720
|
+
# # Send requests on the stream. For each request object, set fields by
|
721
|
+
# # passing keyword arguments. Be sure to close the stream when done.
|
722
|
+
# input << Google::Cloud::AIPlatform::V1::StreamDirectPredictRequest.new
|
723
|
+
# input << Google::Cloud::AIPlatform::V1::StreamDirectPredictRequest.new
|
724
|
+
# input.close
|
725
|
+
#
|
726
|
+
# # The returned object is a streamed enumerable yielding elements of type
|
727
|
+
# # ::Google::Cloud::AIPlatform::V1::StreamDirectPredictResponse
|
728
|
+
# output.each do |current_response|
|
729
|
+
# p current_response
|
730
|
+
# end
|
731
|
+
#
|
732
|
+
def stream_direct_predict request, options = nil
|
733
|
+
unless request.is_a? ::Enumerable
|
734
|
+
raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum
|
735
|
+
request = request.to_enum
|
736
|
+
end
|
737
|
+
|
738
|
+
request = request.lazy.map do |req|
|
739
|
+
::Gapic::Protobuf.coerce req, to: ::Google::Cloud::AIPlatform::V1::StreamDirectPredictRequest
|
740
|
+
end
|
741
|
+
|
742
|
+
# Converts hash and nil to an options object
|
743
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
744
|
+
|
745
|
+
# Customize the options with defaults
|
746
|
+
metadata = @config.rpcs.stream_direct_predict.metadata.to_h
|
747
|
+
|
748
|
+
# Set x-goog-api-client and x-goog-user-project headers
|
749
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
750
|
+
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
751
|
+
gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION
|
752
|
+
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
753
|
+
|
754
|
+
options.apply_defaults timeout: @config.rpcs.stream_direct_predict.timeout,
|
755
|
+
metadata: metadata,
|
756
|
+
retry_policy: @config.rpcs.stream_direct_predict.retry_policy
|
757
|
+
|
758
|
+
options.apply_defaults timeout: @config.timeout,
|
759
|
+
metadata: @config.metadata,
|
760
|
+
retry_policy: @config.retry_policy
|
761
|
+
|
762
|
+
@prediction_service_stub.call_rpc :stream_direct_predict, request, options: options do |response, operation|
|
763
|
+
yield response, operation if block_given?
|
764
|
+
return response
|
765
|
+
end
|
766
|
+
rescue ::GRPC::BadStatus => e
|
767
|
+
raise ::Google::Cloud::Error.from_error(e)
|
768
|
+
end
|
769
|
+
|
770
|
+
##
|
771
|
+
# Perform a streaming online prediction request to a gRPC model server for
|
772
|
+
# custom containers.
|
773
|
+
#
|
774
|
+
# @param request [::Gapic::StreamInput, ::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictRequest, ::Hash>]
|
775
|
+
# An enumerable of {::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictRequest} instances.
|
776
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
777
|
+
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
778
|
+
#
|
779
|
+
# @yield [response, operation] Access the result along with the RPC operation
|
780
|
+
# @yieldparam response [::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictResponse>]
|
781
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
782
|
+
#
|
783
|
+
# @return [::Enumerable<::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictResponse>]
|
784
|
+
#
|
785
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
786
|
+
#
|
787
|
+
# @example Basic example
|
788
|
+
# require "google/cloud/ai_platform/v1"
|
789
|
+
#
|
790
|
+
# # Create a client object. The client can be reused for multiple calls.
|
791
|
+
# client = Google::Cloud::AIPlatform::V1::PredictionService::Client.new
|
792
|
+
#
|
793
|
+
# # Create an input stream.
|
794
|
+
# input = Gapic::StreamInput.new
|
795
|
+
#
|
796
|
+
# # Call the stream_direct_raw_predict method to start streaming.
|
797
|
+
# output = client.stream_direct_raw_predict input
|
798
|
+
#
|
799
|
+
# # Send requests on the stream. For each request object, set fields by
|
800
|
+
# # passing keyword arguments. Be sure to close the stream when done.
|
801
|
+
# input << Google::Cloud::AIPlatform::V1::StreamDirectRawPredictRequest.new
|
802
|
+
# input << Google::Cloud::AIPlatform::V1::StreamDirectRawPredictRequest.new
|
803
|
+
# input.close
|
804
|
+
#
|
805
|
+
# # The returned object is a streamed enumerable yielding elements of type
|
806
|
+
# # ::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictResponse
|
807
|
+
# output.each do |current_response|
|
808
|
+
# p current_response
|
809
|
+
# end
|
810
|
+
#
|
811
|
+
def stream_direct_raw_predict request, options = nil
|
812
|
+
unless request.is_a? ::Enumerable
|
813
|
+
raise ::ArgumentError, "request must be an Enumerable" unless request.respond_to? :to_enum
|
814
|
+
request = request.to_enum
|
815
|
+
end
|
816
|
+
|
817
|
+
request = request.lazy.map do |req|
|
818
|
+
::Gapic::Protobuf.coerce req, to: ::Google::Cloud::AIPlatform::V1::StreamDirectRawPredictRequest
|
819
|
+
end
|
820
|
+
|
821
|
+
# Converts hash and nil to an options object
|
822
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
823
|
+
|
824
|
+
# Customize the options with defaults
|
825
|
+
metadata = @config.rpcs.stream_direct_raw_predict.metadata.to_h
|
826
|
+
|
827
|
+
# Set x-goog-api-client and x-goog-user-project headers
|
828
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
829
|
+
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
830
|
+
gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION
|
831
|
+
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
832
|
+
|
833
|
+
options.apply_defaults timeout: @config.rpcs.stream_direct_raw_predict.timeout,
|
834
|
+
metadata: metadata,
|
835
|
+
retry_policy: @config.rpcs.stream_direct_raw_predict.retry_policy
|
836
|
+
|
837
|
+
options.apply_defaults timeout: @config.timeout,
|
838
|
+
metadata: @config.metadata,
|
839
|
+
retry_policy: @config.retry_policy
|
840
|
+
|
841
|
+
@prediction_service_stub.call_rpc :stream_direct_raw_predict, request, options: options do |response, operation|
|
842
|
+
yield response, operation if block_given?
|
843
|
+
return response
|
844
|
+
end
|
845
|
+
rescue ::GRPC::BadStatus => e
|
846
|
+
raise ::Google::Cloud::Error.from_error(e)
|
847
|
+
end
|
848
|
+
|
581
849
|
##
|
582
850
|
# Perform a streaming online prediction request for Vertex first-party
|
583
851
|
# products and frameworks.
|
@@ -956,6 +1224,219 @@ module Google
|
|
956
1224
|
raise ::Google::Cloud::Error.from_error(e)
|
957
1225
|
end
|
958
1226
|
|
1227
|
+
##
|
1228
|
+
# Generate content with multimodal inputs.
|
1229
|
+
#
|
1230
|
+
# @overload generate_content(request, options = nil)
|
1231
|
+
# Pass arguments to `generate_content` via a request object, either of type
|
1232
|
+
# {::Google::Cloud::AIPlatform::V1::GenerateContentRequest} or an equivalent Hash.
|
1233
|
+
#
|
1234
|
+
# @param request [::Google::Cloud::AIPlatform::V1::GenerateContentRequest, ::Hash]
|
1235
|
+
# A request object representing the call parameters. Required. To specify no
|
1236
|
+
# parameters, or to keep all the default parameter values, pass an empty Hash.
|
1237
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
1238
|
+
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
1239
|
+
#
|
1240
|
+
# @overload generate_content(model: nil, contents: nil, tools: nil, safety_settings: nil, generation_config: nil)
|
1241
|
+
# Pass arguments to `generate_content` via keyword arguments. Note that at
|
1242
|
+
# least one keyword argument is required. To specify no parameters, or to keep all
|
1243
|
+
# the default parameter values, pass an empty Hash as a request object (see above).
|
1244
|
+
#
|
1245
|
+
# @param model [::String]
|
1246
|
+
# Required. The name of the publisher model requested to serve the
|
1247
|
+
# prediction. Format:
|
1248
|
+
# `projects/{project}/locations/{location}/publishers/*/models/*`
|
1249
|
+
# @param contents [::Array<::Google::Cloud::AIPlatform::V1::Content, ::Hash>]
|
1250
|
+
# Required. The content of the current conversation with the model.
|
1251
|
+
#
|
1252
|
+
# For single-turn queries, this is a single instance. For multi-turn queries,
|
1253
|
+
# this is a repeated field that contains conversation history + latest
|
1254
|
+
# request.
|
1255
|
+
# @param tools [::Array<::Google::Cloud::AIPlatform::V1::Tool, ::Hash>]
|
1256
|
+
# Optional. A list of `Tools` the model may use to generate the next
|
1257
|
+
# response.
|
1258
|
+
#
|
1259
|
+
# A `Tool` is a piece of code that enables the system to interact with
|
1260
|
+
# external systems to perform an action, or set of actions, outside of
|
1261
|
+
# knowledge and scope of the model.
|
1262
|
+
# @param safety_settings [::Array<::Google::Cloud::AIPlatform::V1::SafetySetting, ::Hash>]
|
1263
|
+
# Optional. Per request settings for blocking unsafe content.
|
1264
|
+
# Enforced on GenerateContentResponse.candidates.
|
1265
|
+
# @param generation_config [::Google::Cloud::AIPlatform::V1::GenerationConfig, ::Hash]
|
1266
|
+
# Optional. Generation config.
|
1267
|
+
#
|
1268
|
+
# @yield [response, operation] Access the result along with the RPC operation
|
1269
|
+
# @yieldparam response [::Google::Cloud::AIPlatform::V1::GenerateContentResponse]
|
1270
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
1271
|
+
#
|
1272
|
+
# @return [::Google::Cloud::AIPlatform::V1::GenerateContentResponse]
|
1273
|
+
#
|
1274
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
1275
|
+
#
|
1276
|
+
# @example Basic example
|
1277
|
+
# require "google/cloud/ai_platform/v1"
|
1278
|
+
#
|
1279
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1280
|
+
# client = Google::Cloud::AIPlatform::V1::PredictionService::Client.new
|
1281
|
+
#
|
1282
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1283
|
+
# request = Google::Cloud::AIPlatform::V1::GenerateContentRequest.new
|
1284
|
+
#
|
1285
|
+
# # Call the generate_content method.
|
1286
|
+
# result = client.generate_content request
|
1287
|
+
#
|
1288
|
+
# # The returned object is of type Google::Cloud::AIPlatform::V1::GenerateContentResponse.
|
1289
|
+
# p result
|
1290
|
+
#
|
1291
|
+
def generate_content request, options = nil
|
1292
|
+
raise ::ArgumentError, "request must be provided" if request.nil?
|
1293
|
+
|
1294
|
+
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::GenerateContentRequest
|
1295
|
+
|
1296
|
+
# Converts hash and nil to an options object
|
1297
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
1298
|
+
|
1299
|
+
# Customize the options with defaults
|
1300
|
+
metadata = @config.rpcs.generate_content.metadata.to_h
|
1301
|
+
|
1302
|
+
# Set x-goog-api-client and x-goog-user-project headers
|
1303
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
1304
|
+
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
1305
|
+
gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION
|
1306
|
+
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
1307
|
+
|
1308
|
+
header_params = {}
|
1309
|
+
if request.model
|
1310
|
+
header_params["model"] = request.model
|
1311
|
+
end
|
1312
|
+
|
1313
|
+
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
1314
|
+
metadata[:"x-goog-request-params"] ||= request_params_header
|
1315
|
+
|
1316
|
+
options.apply_defaults timeout: @config.rpcs.generate_content.timeout,
|
1317
|
+
metadata: metadata,
|
1318
|
+
retry_policy: @config.rpcs.generate_content.retry_policy
|
1319
|
+
|
1320
|
+
options.apply_defaults timeout: @config.timeout,
|
1321
|
+
metadata: @config.metadata,
|
1322
|
+
retry_policy: @config.retry_policy
|
1323
|
+
|
1324
|
+
@prediction_service_stub.call_rpc :generate_content, request, options: options do |response, operation|
|
1325
|
+
yield response, operation if block_given?
|
1326
|
+
return response
|
1327
|
+
end
|
1328
|
+
rescue ::GRPC::BadStatus => e
|
1329
|
+
raise ::Google::Cloud::Error.from_error(e)
|
1330
|
+
end
|
1331
|
+
|
1332
|
+
##
|
1333
|
+
# Generate content with multimodal inputs with streaming support.
|
1334
|
+
#
|
1335
|
+
# @overload stream_generate_content(request, options = nil)
|
1336
|
+
# Pass arguments to `stream_generate_content` via a request object, either of type
|
1337
|
+
# {::Google::Cloud::AIPlatform::V1::GenerateContentRequest} or an equivalent Hash.
|
1338
|
+
#
|
1339
|
+
# @param request [::Google::Cloud::AIPlatform::V1::GenerateContentRequest, ::Hash]
|
1340
|
+
# A request object representing the call parameters. Required. To specify no
|
1341
|
+
# parameters, or to keep all the default parameter values, pass an empty Hash.
|
1342
|
+
# @param options [::Gapic::CallOptions, ::Hash]
|
1343
|
+
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
|
1344
|
+
#
|
1345
|
+
# @overload stream_generate_content(model: nil, contents: nil, tools: nil, safety_settings: nil, generation_config: nil)
|
1346
|
+
# Pass arguments to `stream_generate_content` via keyword arguments. Note that at
|
1347
|
+
# least one keyword argument is required. To specify no parameters, or to keep all
|
1348
|
+
# the default parameter values, pass an empty Hash as a request object (see above).
|
1349
|
+
#
|
1350
|
+
# @param model [::String]
|
1351
|
+
# Required. The name of the publisher model requested to serve the
|
1352
|
+
# prediction. Format:
|
1353
|
+
# `projects/{project}/locations/{location}/publishers/*/models/*`
|
1354
|
+
# @param contents [::Array<::Google::Cloud::AIPlatform::V1::Content, ::Hash>]
|
1355
|
+
# Required. The content of the current conversation with the model.
|
1356
|
+
#
|
1357
|
+
# For single-turn queries, this is a single instance. For multi-turn queries,
|
1358
|
+
# this is a repeated field that contains conversation history + latest
|
1359
|
+
# request.
|
1360
|
+
# @param tools [::Array<::Google::Cloud::AIPlatform::V1::Tool, ::Hash>]
|
1361
|
+
# Optional. A list of `Tools` the model may use to generate the next
|
1362
|
+
# response.
|
1363
|
+
#
|
1364
|
+
# A `Tool` is a piece of code that enables the system to interact with
|
1365
|
+
# external systems to perform an action, or set of actions, outside of
|
1366
|
+
# knowledge and scope of the model.
|
1367
|
+
# @param safety_settings [::Array<::Google::Cloud::AIPlatform::V1::SafetySetting, ::Hash>]
|
1368
|
+
# Optional. Per request settings for blocking unsafe content.
|
1369
|
+
# Enforced on GenerateContentResponse.candidates.
|
1370
|
+
# @param generation_config [::Google::Cloud::AIPlatform::V1::GenerationConfig, ::Hash]
|
1371
|
+
# Optional. Generation config.
|
1372
|
+
#
|
1373
|
+
# @yield [response, operation] Access the result along with the RPC operation
|
1374
|
+
# @yieldparam response [::Enumerable<::Google::Cloud::AIPlatform::V1::GenerateContentResponse>]
|
1375
|
+
# @yieldparam operation [::GRPC::ActiveCall::Operation]
|
1376
|
+
#
|
1377
|
+
# @return [::Enumerable<::Google::Cloud::AIPlatform::V1::GenerateContentResponse>]
|
1378
|
+
#
|
1379
|
+
# @raise [::Google::Cloud::Error] if the RPC is aborted.
|
1380
|
+
#
|
1381
|
+
# @example Basic example
|
1382
|
+
# require "google/cloud/ai_platform/v1"
|
1383
|
+
#
|
1384
|
+
# # Create a client object. The client can be reused for multiple calls.
|
1385
|
+
# client = Google::Cloud::AIPlatform::V1::PredictionService::Client.new
|
1386
|
+
#
|
1387
|
+
# # Create a request. To set request fields, pass in keyword arguments.
|
1388
|
+
# request = Google::Cloud::AIPlatform::V1::GenerateContentRequest.new
|
1389
|
+
#
|
1390
|
+
# # Call the stream_generate_content method to start streaming.
|
1391
|
+
# output = client.stream_generate_content request
|
1392
|
+
#
|
1393
|
+
# # The returned object is a streamed enumerable yielding elements of type
|
1394
|
+
# # ::Google::Cloud::AIPlatform::V1::GenerateContentResponse
|
1395
|
+
# output.each do |current_response|
|
1396
|
+
# p current_response
|
1397
|
+
# end
|
1398
|
+
#
|
1399
|
+
def stream_generate_content request, options = nil
|
1400
|
+
raise ::ArgumentError, "request must be provided" if request.nil?
|
1401
|
+
|
1402
|
+
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::AIPlatform::V1::GenerateContentRequest
|
1403
|
+
|
1404
|
+
# Converts hash and nil to an options object
|
1405
|
+
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
|
1406
|
+
|
1407
|
+
# Customize the options with defaults
|
1408
|
+
metadata = @config.rpcs.stream_generate_content.metadata.to_h
|
1409
|
+
|
1410
|
+
# Set x-goog-api-client and x-goog-user-project headers
|
1411
|
+
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
|
1412
|
+
lib_name: @config.lib_name, lib_version: @config.lib_version,
|
1413
|
+
gapic_version: ::Google::Cloud::AIPlatform::V1::VERSION
|
1414
|
+
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
|
1415
|
+
|
1416
|
+
header_params = {}
|
1417
|
+
if request.model
|
1418
|
+
header_params["model"] = request.model
|
1419
|
+
end
|
1420
|
+
|
1421
|
+
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
|
1422
|
+
metadata[:"x-goog-request-params"] ||= request_params_header
|
1423
|
+
|
1424
|
+
options.apply_defaults timeout: @config.rpcs.stream_generate_content.timeout,
|
1425
|
+
metadata: metadata,
|
1426
|
+
retry_policy: @config.rpcs.stream_generate_content.retry_policy
|
1427
|
+
|
1428
|
+
options.apply_defaults timeout: @config.timeout,
|
1429
|
+
metadata: @config.metadata,
|
1430
|
+
retry_policy: @config.retry_policy
|
1431
|
+
|
1432
|
+
@prediction_service_stub.call_rpc :stream_generate_content, request, options: options do |response, operation|
|
1433
|
+
yield response, operation if block_given?
|
1434
|
+
return response
|
1435
|
+
end
|
1436
|
+
rescue ::GRPC::BadStatus => e
|
1437
|
+
raise ::Google::Cloud::Error.from_error(e)
|
1438
|
+
end
|
1439
|
+
|
959
1440
|
##
|
960
1441
|
# Configuration class for the PredictionService API.
|
961
1442
|
#
|
@@ -986,9 +1467,9 @@ module Google
|
|
986
1467
|
# end
|
987
1468
|
#
|
988
1469
|
# @!attribute [rw] endpoint
|
989
|
-
#
|
990
|
-
#
|
991
|
-
# @return [::String]
|
1470
|
+
# A custom service endpoint, as a hostname or hostname:port. The default is
|
1471
|
+
# nil, indicating to use the default endpoint in the current universe domain.
|
1472
|
+
# @return [::String,nil]
|
992
1473
|
# @!attribute [rw] credentials
|
993
1474
|
# Credentials to send with calls. You may provide any of the following types:
|
994
1475
|
# * (`String`) The path to a service account key file in JSON format
|
@@ -1034,13 +1515,20 @@ module Google
|
|
1034
1515
|
# @!attribute [rw] quota_project
|
1035
1516
|
# A separate project against which to charge quota.
|
1036
1517
|
# @return [::String]
|
1518
|
+
# @!attribute [rw] universe_domain
|
1519
|
+
# The universe domain within which to make requests. This determines the
|
1520
|
+
# default endpoint URL. The default value of nil uses the environment
|
1521
|
+
# universe (usually the default "googleapis.com" universe).
|
1522
|
+
# @return [::String,nil]
|
1037
1523
|
#
|
1038
1524
|
class Configuration
|
1039
1525
|
extend ::Gapic::Config
|
1040
1526
|
|
1527
|
+
# @private
|
1528
|
+
# The endpoint specific to the default "googleapis.com" universe. Deprecated.
|
1041
1529
|
DEFAULT_ENDPOINT = "aiplatform.googleapis.com"
|
1042
1530
|
|
1043
|
-
config_attr :endpoint,
|
1531
|
+
config_attr :endpoint, nil, ::String, nil
|
1044
1532
|
config_attr :credentials, nil do |value|
|
1045
1533
|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
|
1046
1534
|
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
|
@@ -1055,6 +1543,7 @@ module Google
|
|
1055
1543
|
config_attr :metadata, nil, ::Hash, nil
|
1056
1544
|
config_attr :retry_policy, nil, ::Hash, ::Proc, nil
|
1057
1545
|
config_attr :quota_project, nil, ::String, nil
|
1546
|
+
config_attr :universe_domain, nil, ::String, nil
|
1058
1547
|
|
1059
1548
|
# @private
|
1060
1549
|
def initialize parent_config = nil
|
@@ -1112,6 +1601,11 @@ module Google
|
|
1112
1601
|
#
|
1113
1602
|
attr_reader :raw_predict
|
1114
1603
|
##
|
1604
|
+
# RPC-specific configuration for `stream_raw_predict`
|
1605
|
+
# @return [::Gapic::Config::Method]
|
1606
|
+
#
|
1607
|
+
attr_reader :stream_raw_predict
|
1608
|
+
##
|
1115
1609
|
# RPC-specific configuration for `direct_predict`
|
1116
1610
|
# @return [::Gapic::Config::Method]
|
1117
1611
|
#
|
@@ -1122,6 +1616,16 @@ module Google
|
|
1122
1616
|
#
|
1123
1617
|
attr_reader :direct_raw_predict
|
1124
1618
|
##
|
1619
|
+
# RPC-specific configuration for `stream_direct_predict`
|
1620
|
+
# @return [::Gapic::Config::Method]
|
1621
|
+
#
|
1622
|
+
attr_reader :stream_direct_predict
|
1623
|
+
##
|
1624
|
+
# RPC-specific configuration for `stream_direct_raw_predict`
|
1625
|
+
# @return [::Gapic::Config::Method]
|
1626
|
+
#
|
1627
|
+
attr_reader :stream_direct_raw_predict
|
1628
|
+
##
|
1125
1629
|
# RPC-specific configuration for `streaming_predict`
|
1126
1630
|
# @return [::Gapic::Config::Method]
|
1127
1631
|
#
|
@@ -1141,6 +1645,16 @@ module Google
|
|
1141
1645
|
# @return [::Gapic::Config::Method]
|
1142
1646
|
#
|
1143
1647
|
attr_reader :explain
|
1648
|
+
##
|
1649
|
+
# RPC-specific configuration for `generate_content`
|
1650
|
+
# @return [::Gapic::Config::Method]
|
1651
|
+
#
|
1652
|
+
attr_reader :generate_content
|
1653
|
+
##
|
1654
|
+
# RPC-specific configuration for `stream_generate_content`
|
1655
|
+
# @return [::Gapic::Config::Method]
|
1656
|
+
#
|
1657
|
+
attr_reader :stream_generate_content
|
1144
1658
|
|
1145
1659
|
# @private
|
1146
1660
|
def initialize parent_rpcs = nil
|
@@ -1148,10 +1662,16 @@ module Google
|
|
1148
1662
|
@predict = ::Gapic::Config::Method.new predict_config
|
1149
1663
|
raw_predict_config = parent_rpcs.raw_predict if parent_rpcs.respond_to? :raw_predict
|
1150
1664
|
@raw_predict = ::Gapic::Config::Method.new raw_predict_config
|
1665
|
+
stream_raw_predict_config = parent_rpcs.stream_raw_predict if parent_rpcs.respond_to? :stream_raw_predict
|
1666
|
+
@stream_raw_predict = ::Gapic::Config::Method.new stream_raw_predict_config
|
1151
1667
|
direct_predict_config = parent_rpcs.direct_predict if parent_rpcs.respond_to? :direct_predict
|
1152
1668
|
@direct_predict = ::Gapic::Config::Method.new direct_predict_config
|
1153
1669
|
direct_raw_predict_config = parent_rpcs.direct_raw_predict if parent_rpcs.respond_to? :direct_raw_predict
|
1154
1670
|
@direct_raw_predict = ::Gapic::Config::Method.new direct_raw_predict_config
|
1671
|
+
stream_direct_predict_config = parent_rpcs.stream_direct_predict if parent_rpcs.respond_to? :stream_direct_predict
|
1672
|
+
@stream_direct_predict = ::Gapic::Config::Method.new stream_direct_predict_config
|
1673
|
+
stream_direct_raw_predict_config = parent_rpcs.stream_direct_raw_predict if parent_rpcs.respond_to? :stream_direct_raw_predict
|
1674
|
+
@stream_direct_raw_predict = ::Gapic::Config::Method.new stream_direct_raw_predict_config
|
1155
1675
|
streaming_predict_config = parent_rpcs.streaming_predict if parent_rpcs.respond_to? :streaming_predict
|
1156
1676
|
@streaming_predict = ::Gapic::Config::Method.new streaming_predict_config
|
1157
1677
|
server_streaming_predict_config = parent_rpcs.server_streaming_predict if parent_rpcs.respond_to? :server_streaming_predict
|
@@ -1160,6 +1680,10 @@ module Google
|
|
1160
1680
|
@streaming_raw_predict = ::Gapic::Config::Method.new streaming_raw_predict_config
|
1161
1681
|
explain_config = parent_rpcs.explain if parent_rpcs.respond_to? :explain
|
1162
1682
|
@explain = ::Gapic::Config::Method.new explain_config
|
1683
|
+
generate_content_config = parent_rpcs.generate_content if parent_rpcs.respond_to? :generate_content
|
1684
|
+
@generate_content = ::Gapic::Config::Method.new generate_content_config
|
1685
|
+
stream_generate_content_config = parent_rpcs.stream_generate_content if parent_rpcs.respond_to? :stream_generate_content
|
1686
|
+
@stream_generate_content = ::Gapic::Config::Method.new stream_generate_content_config
|
1163
1687
|
|
1164
1688
|
yield self if block_given?
|
1165
1689
|
end
|