google-cloud-bigquery-storage-v1 0.6.2 → 0.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "googleauth"
20
+
21
+ module Google
22
+ module Cloud
23
+ module Bigquery
24
+ module Storage
25
+ module V1
26
+ module BigQueryWrite
27
+ # Credentials for the BigQueryWrite API.
28
+ class Credentials < ::Google::Auth::Credentials
29
+ self.scope = [
30
+ "https://www.googleapis.com/auth/bigquery",
31
+ "https://www.googleapis.com/auth/bigquery.insertdata",
32
+ "https://www.googleapis.com/auth/cloud-platform"
33
+ ]
34
+ self.env_vars = [
35
+ "BIGQUERY_STORAGE_CREDENTIALS",
36
+ "BIGQUERY_STORAGE_KEYFILE",
37
+ "GOOGLE_CLOUD_CREDENTIALS",
38
+ "GOOGLE_CLOUD_KEYFILE",
39
+ "GCLOUD_KEYFILE",
40
+ "BIGQUERY_STORAGE_CREDENTIALS_JSON",
41
+ "BIGQUERY_STORAGE_KEYFILE_JSON",
42
+ "GOOGLE_CLOUD_CREDENTIALS_JSON",
43
+ "GOOGLE_CLOUD_KEYFILE_JSON",
44
+ "GCLOUD_KEYFILE_JSON"
45
+ ]
46
+ self.paths = [
47
+ "~/.config/google_cloud/application_default_credentials.json"
48
+ ]
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Bigquery
23
+ module Storage
24
+ module V1
25
+ module BigQueryWrite
26
+ # Path helper methods for the BigQueryWrite API.
27
+ module Paths
28
+ ##
29
+ # Create a fully-qualified Table resource string.
30
+ #
31
+ # The resource will be in the following format:
32
+ #
33
+ # `projects/{project}/datasets/{dataset}/tables/{table}`
34
+ #
35
+ # @param project [String]
36
+ # @param dataset [String]
37
+ # @param table [String]
38
+ #
39
+ # @return [::String]
40
+ def table_path project:, dataset:, table:
41
+ raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
42
+ raise ::ArgumentError, "dataset cannot contain /" if dataset.to_s.include? "/"
43
+
44
+ "projects/#{project}/datasets/#{dataset}/tables/#{table}"
45
+ end
46
+
47
+ ##
48
+ # Create a fully-qualified WriteStream resource string.
49
+ #
50
+ # The resource will be in the following format:
51
+ #
52
+ # `projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}`
53
+ #
54
+ # @param project [String]
55
+ # @param dataset [String]
56
+ # @param table [String]
57
+ # @param stream [String]
58
+ #
59
+ # @return [::String]
60
+ def write_stream_path project:, dataset:, table:, stream:
61
+ raise ::ArgumentError, "project cannot contain /" if project.to_s.include? "/"
62
+ raise ::ArgumentError, "dataset cannot contain /" if dataset.to_s.include? "/"
63
+ raise ::ArgumentError, "table cannot contain /" if table.to_s.include? "/"
64
+
65
+ "projects/#{project}/datasets/#{dataset}/tables/#{table}/streams/#{stream}"
66
+ end
67
+
68
+ extend self
69
+ end
70
+ end
71
+ end
72
+ end
73
+ end
74
+ end
75
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "gapic/common"
20
+ require "gapic/config"
21
+ require "gapic/config/method"
22
+
23
+ require "google/cloud/bigquery/storage/v1/version"
24
+
25
+ require "google/cloud/bigquery/storage/v1/big_query_write/credentials"
26
+ require "google/cloud/bigquery/storage/v1/big_query_write/paths"
27
+ require "google/cloud/bigquery/storage/v1/big_query_write/client"
28
+
29
+ module Google
30
+ module Cloud
31
+ module Bigquery
32
+ module Storage
33
+ module V1
34
+ ##
35
+ # BigQuery Write API.
36
+ #
37
+ # The Write API can be used to write data to BigQuery.
38
+ #
39
+ # For supplementary information about the Write API, see:
40
+ # https://cloud.google.com/bigquery/docs/write-api
41
+ #
42
+ # To load this service and instantiate a client:
43
+ #
44
+ # require "google/cloud/bigquery/storage/v1/big_query_write"
45
+ # client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
46
+ #
47
+ module BigQueryWrite
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+
55
+ helper_path = ::File.join __dir__, "big_query_write", "helpers.rb"
56
+ require "google/cloud/bigquery/storage/v1/big_query_write/helpers" if ::File.file? helper_path
@@ -0,0 +1,29 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/bigquery/storage/v1/protobuf.proto
3
+
4
+ require 'google/protobuf/descriptor_pb'
5
+ require 'google/protobuf'
6
+
7
+ Google::Protobuf::DescriptorPool.generated_pool.build do
8
+ add_file("google/cloud/bigquery/storage/v1/protobuf.proto", :syntax => :proto3) do
9
+ add_message "google.cloud.bigquery.storage.v1.ProtoSchema" do
10
+ optional :proto_descriptor, :message, 1, "google.protobuf.DescriptorProto"
11
+ end
12
+ add_message "google.cloud.bigquery.storage.v1.ProtoRows" do
13
+ repeated :serialized_rows, :bytes, 1
14
+ end
15
+ end
16
+ end
17
+
18
+ module Google
19
+ module Cloud
20
+ module Bigquery
21
+ module Storage
22
+ module V1
23
+ ProtoSchema = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ProtoSchema").msgclass
24
+ ProtoRows = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ProtoRows").msgclass
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -1,15 +1,20 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/bigquery/storage/v1/storage.proto
3
3
 
4
- require 'google/protobuf'
5
-
6
4
  require 'google/api/annotations_pb'
7
5
  require 'google/api/client_pb'
8
6
  require 'google/api/field_behavior_pb'
9
7
  require 'google/api/resource_pb'
10
8
  require 'google/cloud/bigquery/storage/v1/arrow_pb'
11
9
  require 'google/cloud/bigquery/storage/v1/avro_pb'
10
+ require 'google/cloud/bigquery/storage/v1/protobuf_pb'
12
11
  require 'google/cloud/bigquery/storage/v1/stream_pb'
12
+ require 'google/cloud/bigquery/storage/v1/table_pb'
13
+ require 'google/protobuf/timestamp_pb'
14
+ require 'google/protobuf/wrappers_pb'
15
+ require 'google/rpc/status_pb'
16
+ require 'google/protobuf'
17
+
13
18
  Google::Protobuf::DescriptorPool.generated_pool.build do
14
19
  add_file("google/cloud/bigquery/storage/v1/storage.proto", :syntax => :proto3) do
15
20
  add_message "google.cloud.bigquery.storage.v1.CreateReadSessionRequest" do
@@ -52,6 +57,71 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
52
57
  optional :primary_stream, :message, 1, "google.cloud.bigquery.storage.v1.ReadStream"
53
58
  optional :remainder_stream, :message, 2, "google.cloud.bigquery.storage.v1.ReadStream"
54
59
  end
60
+ add_message "google.cloud.bigquery.storage.v1.CreateWriteStreamRequest" do
61
+ optional :parent, :string, 1
62
+ optional :write_stream, :message, 2, "google.cloud.bigquery.storage.v1.WriteStream"
63
+ end
64
+ add_message "google.cloud.bigquery.storage.v1.AppendRowsRequest" do
65
+ optional :write_stream, :string, 1
66
+ optional :offset, :message, 2, "google.protobuf.Int64Value"
67
+ optional :trace_id, :string, 6
68
+ oneof :rows do
69
+ optional :proto_rows, :message, 4, "google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData"
70
+ end
71
+ end
72
+ add_message "google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData" do
73
+ optional :writer_schema, :message, 1, "google.cloud.bigquery.storage.v1.ProtoSchema"
74
+ optional :rows, :message, 2, "google.cloud.bigquery.storage.v1.ProtoRows"
75
+ end
76
+ add_message "google.cloud.bigquery.storage.v1.AppendRowsResponse" do
77
+ optional :updated_schema, :message, 3, "google.cloud.bigquery.storage.v1.TableSchema"
78
+ oneof :response do
79
+ optional :append_result, :message, 1, "google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult"
80
+ optional :error, :message, 2, "google.rpc.Status"
81
+ end
82
+ end
83
+ add_message "google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult" do
84
+ optional :offset, :message, 1, "google.protobuf.Int64Value"
85
+ end
86
+ add_message "google.cloud.bigquery.storage.v1.GetWriteStreamRequest" do
87
+ optional :name, :string, 1
88
+ end
89
+ add_message "google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest" do
90
+ optional :parent, :string, 1
91
+ repeated :write_streams, :string, 2
92
+ end
93
+ add_message "google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse" do
94
+ optional :commit_time, :message, 1, "google.protobuf.Timestamp"
95
+ repeated :stream_errors, :message, 2, "google.cloud.bigquery.storage.v1.StorageError"
96
+ end
97
+ add_message "google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest" do
98
+ optional :name, :string, 1
99
+ end
100
+ add_message "google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse" do
101
+ optional :row_count, :int64, 1
102
+ end
103
+ add_message "google.cloud.bigquery.storage.v1.FlushRowsRequest" do
104
+ optional :write_stream, :string, 1
105
+ optional :offset, :message, 2, "google.protobuf.Int64Value"
106
+ end
107
+ add_message "google.cloud.bigquery.storage.v1.FlushRowsResponse" do
108
+ optional :offset, :int64, 1
109
+ end
110
+ add_message "google.cloud.bigquery.storage.v1.StorageError" do
111
+ optional :code, :enum, 1, "google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode"
112
+ optional :entity, :string, 2
113
+ optional :error_message, :string, 3
114
+ end
115
+ add_enum "google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode" do
116
+ value :STORAGE_ERROR_CODE_UNSPECIFIED, 0
117
+ value :TABLE_NOT_FOUND, 1
118
+ value :STREAM_ALREADY_COMMITTED, 2
119
+ value :STREAM_NOT_FOUND, 3
120
+ value :INVALID_STREAM_TYPE, 4
121
+ value :INVALID_STREAM_STATE, 5
122
+ value :STREAM_FINALIZED, 6
123
+ value :SCHEMA_MISMATCH_EXTRA_FIELDS, 7
124
+ end
55
125
  end
56
126
  end
57
127
 
@@ -68,6 +138,20 @@ module Google
68
138
  ReadRowsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadRowsResponse").msgclass
69
139
  SplitReadStreamRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.SplitReadStreamRequest").msgclass
70
140
  SplitReadStreamResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.SplitReadStreamResponse").msgclass
141
+ CreateWriteStreamRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.CreateWriteStreamRequest").msgclass
142
+ AppendRowsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AppendRowsRequest").msgclass
143
+ AppendRowsRequest::ProtoData = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData").msgclass
144
+ AppendRowsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AppendRowsResponse").msgclass
145
+ AppendRowsResponse::AppendResult = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult").msgclass
146
+ GetWriteStreamRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.GetWriteStreamRequest").msgclass
147
+ BatchCommitWriteStreamsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest").msgclass
148
+ BatchCommitWriteStreamsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse").msgclass
149
+ FinalizeWriteStreamRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest").msgclass
150
+ FinalizeWriteStreamResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse").msgclass
151
+ FlushRowsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.FlushRowsRequest").msgclass
152
+ FlushRowsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.FlushRowsResponse").msgclass
153
+ StorageError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.StorageError").msgclass
154
+ StorageError::StorageErrorCode = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode").enummodule
71
155
  end
72
156
  end
73
157
  end
@@ -30,7 +30,7 @@ module Google
30
30
  # The Read API can be used to read data from BigQuery.
31
31
  class Service
32
32
 
33
- include GRPC::GenericService
33
+ include ::GRPC::GenericService
34
34
 
35
35
  self.marshal_class_method = :encode
36
36
  self.unmarshal_class_method = :decode
@@ -53,7 +53,7 @@ module Google
53
53
  # limits are enforced based on the number of pre-filtered rows, so some
54
54
  # filters can lead to lopsided assignments.
55
55
  #
56
- # Read sessions automatically expire 24 hours after they are created and do
56
+ # Read sessions automatically expire 6 hours after they are created and do
57
57
  # not require manual clean-up by the caller.
58
58
  rpc :CreateReadSession, ::Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest, ::Google::Cloud::Bigquery::Storage::V1::ReadSession
59
59
  # Reads rows from the stream in the format prescribed by the ReadSession.
@@ -79,6 +79,85 @@ module Google
79
79
  rpc :SplitReadStream, ::Google::Cloud::Bigquery::Storage::V1::SplitReadStreamRequest, ::Google::Cloud::Bigquery::Storage::V1::SplitReadStreamResponse
80
80
  end
81
81
 
82
+ Stub = Service.rpc_stub_class
83
+ end
84
+ module BigQueryWrite
85
+ # BigQuery Write API.
86
+ #
87
+ # The Write API can be used to write data to BigQuery.
88
+ #
89
+ # For supplementary information about the Write API, see:
90
+ # https://cloud.google.com/bigquery/docs/write-api
91
+ class Service
92
+
93
+ include ::GRPC::GenericService
94
+
95
+ self.marshal_class_method = :encode
96
+ self.unmarshal_class_method = :decode
97
+ self.service_name = 'google.cloud.bigquery.storage.v1.BigQueryWrite'
98
+
99
+ # Creates a write stream to the given table.
100
+ # Additionally, every table has a special stream named '_default'
101
+ # to which data can be written. This stream doesn't need to be created using
102
+ # CreateWriteStream. It is a stream that can be used simultaneously by any
103
+ # number of clients. Data written to this stream is considered committed as
104
+ # soon as an acknowledgement is received.
105
+ rpc :CreateWriteStream, ::Google::Cloud::Bigquery::Storage::V1::CreateWriteStreamRequest, ::Google::Cloud::Bigquery::Storage::V1::WriteStream
106
+ # Appends data to the given stream.
107
+ #
108
+ # If `offset` is specified, the `offset` is checked against the end of
109
+ # stream. The server returns `OUT_OF_RANGE` in `AppendRowsResponse` if an
110
+ # attempt is made to append to an offset beyond the current end of the stream
111
+ # or `ALREADY_EXISTS` if user provides an `offset` that has already been
112
+ # written to. User can retry with adjusted offset within the same RPC
113
+ # connection. If `offset` is not specified, append happens at the end of the
114
+ # stream.
115
+ #
116
+ # The response contains an optional offset at which the append
117
+ # happened. No offset information will be returned for appends to a
118
+ # default stream.
119
+ #
120
+ # Responses are received in the same order in which requests are sent.
121
+ # There will be one response for each successful inserted request. Responses
122
+ # may optionally embed error information if the originating AppendRequest was
123
+ # not successfully processed.
124
+ #
125
+ # The specifics of when successfully appended data is made visible to the
126
+ # table are governed by the type of stream:
127
+ #
128
+ # * For COMMITTED streams (which includes the default stream), data is
129
+ # visible immediately upon successful append.
130
+ #
131
+ # * For BUFFERED streams, data is made visible via a subsequent `FlushRows`
132
+ # rpc which advances a cursor to a newer offset in the stream.
133
+ #
134
+ # * For PENDING streams, data is not made visible until the stream itself is
135
+ # finalized (via the `FinalizeWriteStream` rpc), and the stream is explicitly
136
+ # committed via the `BatchCommitWriteStreams` rpc.
137
+ rpc :AppendRows, stream(::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest), stream(::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse)
138
+ # Gets information about a write stream.
139
+ rpc :GetWriteStream, ::Google::Cloud::Bigquery::Storage::V1::GetWriteStreamRequest, ::Google::Cloud::Bigquery::Storage::V1::WriteStream
140
+ # Finalize a write stream so that no new data can be appended to the
141
+ # stream. Finalize is not supported on the '_default' stream.
142
+ rpc :FinalizeWriteStream, ::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamRequest, ::Google::Cloud::Bigquery::Storage::V1::FinalizeWriteStreamResponse
143
+ # Atomically commits a group of `PENDING` streams that belong to the same
144
+ # `parent` table.
145
+ #
146
+ # Streams must be finalized before commit and cannot be committed multiple
147
+ # times. Once a stream is committed, data in the stream becomes available
148
+ # for read operations.
149
+ rpc :BatchCommitWriteStreams, ::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsRequest, ::Google::Cloud::Bigquery::Storage::V1::BatchCommitWriteStreamsResponse
150
+ # Flushes rows to a BUFFERED stream.
151
+ #
152
+ # If users are appending rows to BUFFERED stream, flush operation is
153
+ # required in order for the rows to become available for reading. A
154
+ # Flush operation flushes up to any previously flushed offset in a BUFFERED
155
+ # stream, to the offset specified in the request.
156
+ #
157
+ # Flush is not supported on the _default stream, since it is not BUFFERED.
158
+ rpc :FlushRows, ::Google::Cloud::Bigquery::Storage::V1::FlushRowsRequest, ::Google::Cloud::Bigquery::Storage::V1::FlushRowsResponse
159
+ end
160
+
82
161
  Stub = Service.rpc_stub_class
83
162
  end
84
163
  end
@@ -1,13 +1,14 @@
1
1
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
2
  # source: google/cloud/bigquery/storage/v1/stream.proto
3
3
 
4
- require 'google/protobuf'
5
-
6
4
  require 'google/api/field_behavior_pb'
7
5
  require 'google/api/resource_pb'
8
6
  require 'google/cloud/bigquery/storage/v1/arrow_pb'
9
7
  require 'google/cloud/bigquery/storage/v1/avro_pb'
8
+ require 'google/cloud/bigquery/storage/v1/table_pb'
10
9
  require 'google/protobuf/timestamp_pb'
10
+ require 'google/protobuf'
11
+
11
12
  Google::Protobuf::DescriptorPool.generated_pool.build do
12
13
  add_file("google/cloud/bigquery/storage/v1/stream.proto", :syntax => :proto3) do
13
14
  add_message "google.cloud.bigquery.storage.v1.ReadSession" do
@@ -18,6 +19,7 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
18
19
  optional :table_modifiers, :message, 7, "google.cloud.bigquery.storage.v1.ReadSession.TableModifiers"
19
20
  optional :read_options, :message, 8, "google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"
20
21
  repeated :streams, :message, 10, "google.cloud.bigquery.storage.v1.ReadStream"
22
+ optional :estimated_total_bytes_scanned, :int64, 12
21
23
  oneof :schema do
22
24
  optional :avro_schema, :message, 4, "google.cloud.bigquery.storage.v1.AvroSchema"
23
25
  optional :arrow_schema, :message, 5, "google.cloud.bigquery.storage.v1.ArrowSchema"
@@ -36,6 +38,19 @@ Google::Protobuf::DescriptorPool.generated_pool.build do
36
38
  add_message "google.cloud.bigquery.storage.v1.ReadStream" do
37
39
  optional :name, :string, 1
38
40
  end
41
+ add_message "google.cloud.bigquery.storage.v1.WriteStream" do
42
+ optional :name, :string, 1
43
+ optional :type, :enum, 2, "google.cloud.bigquery.storage.v1.WriteStream.Type"
44
+ optional :create_time, :message, 3, "google.protobuf.Timestamp"
45
+ optional :commit_time, :message, 4, "google.protobuf.Timestamp"
46
+ optional :table_schema, :message, 5, "google.cloud.bigquery.storage.v1.TableSchema"
47
+ end
48
+ add_enum "google.cloud.bigquery.storage.v1.WriteStream.Type" do
49
+ value :TYPE_UNSPECIFIED, 0
50
+ value :COMMITTED, 1
51
+ value :PENDING, 2
52
+ value :BUFFERED, 3
53
+ end
39
54
  add_enum "google.cloud.bigquery.storage.v1.DataFormat" do
40
55
  value :DATA_FORMAT_UNSPECIFIED, 0
41
56
  value :AVRO, 1
@@ -53,6 +68,8 @@ module Google
53
68
  ReadSession::TableModifiers = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadSession.TableModifiers").msgclass
54
69
  ReadSession::TableReadOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions").msgclass
55
70
  ReadStream = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadStream").msgclass
71
+ WriteStream = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.WriteStream").msgclass
72
+ WriteStream::Type = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.WriteStream.Type").enummodule
56
73
  DataFormat = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.DataFormat").enummodule
57
74
  end
58
75
  end
@@ -0,0 +1,62 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/bigquery/storage/v1/table.proto
3
+
4
+ require 'google/api/field_behavior_pb'
5
+ require 'google/protobuf'
6
+
7
+ Google::Protobuf::DescriptorPool.generated_pool.build do
8
+ add_file("google/cloud/bigquery/storage/v1/table.proto", :syntax => :proto3) do
9
+ add_message "google.cloud.bigquery.storage.v1.TableSchema" do
10
+ repeated :fields, :message, 1, "google.cloud.bigquery.storage.v1.TableFieldSchema"
11
+ end
12
+ add_message "google.cloud.bigquery.storage.v1.TableFieldSchema" do
13
+ optional :name, :string, 1
14
+ optional :type, :enum, 2, "google.cloud.bigquery.storage.v1.TableFieldSchema.Type"
15
+ optional :mode, :enum, 3, "google.cloud.bigquery.storage.v1.TableFieldSchema.Mode"
16
+ repeated :fields, :message, 4, "google.cloud.bigquery.storage.v1.TableFieldSchema"
17
+ optional :description, :string, 6
18
+ optional :max_length, :int64, 7
19
+ optional :precision, :int64, 8
20
+ optional :scale, :int64, 9
21
+ end
22
+ add_enum "google.cloud.bigquery.storage.v1.TableFieldSchema.Type" do
23
+ value :TYPE_UNSPECIFIED, 0
24
+ value :STRING, 1
25
+ value :INT64, 2
26
+ value :DOUBLE, 3
27
+ value :STRUCT, 4
28
+ value :BYTES, 5
29
+ value :BOOL, 6
30
+ value :TIMESTAMP, 7
31
+ value :DATE, 8
32
+ value :TIME, 9
33
+ value :DATETIME, 10
34
+ value :GEOGRAPHY, 11
35
+ value :NUMERIC, 12
36
+ value :BIGNUMERIC, 13
37
+ value :INTERVAL, 14
38
+ value :JSON, 15
39
+ end
40
+ add_enum "google.cloud.bigquery.storage.v1.TableFieldSchema.Mode" do
41
+ value :MODE_UNSPECIFIED, 0
42
+ value :NULLABLE, 1
43
+ value :REQUIRED, 2
44
+ value :REPEATED, 3
45
+ end
46
+ end
47
+ end
48
+
49
+ module Google
50
+ module Cloud
51
+ module Bigquery
52
+ module Storage
53
+ module V1
54
+ TableSchema = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.TableSchema").msgclass
55
+ TableFieldSchema = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.TableFieldSchema").msgclass
56
+ TableFieldSchema::Type = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.TableFieldSchema.Type").enummodule
57
+ TableFieldSchema::Mode = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.TableFieldSchema.Mode").enummodule
58
+ end
59
+ end
60
+ end
61
+ end
62
+ end
@@ -22,7 +22,7 @@ module Google
22
22
  module Bigquery
23
23
  module Storage
24
24
  module V1
25
- VERSION = "0.6.2"
25
+ VERSION = "0.8.1"
26
26
  end
27
27
  end
28
28
  end
@@ -17,6 +17,7 @@
17
17
  # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
18
 
19
19
  require "google/cloud/bigquery/storage/v1/big_query_read"
20
+ require "google/cloud/bigquery/storage/v1/big_query_write"
20
21
  require "google/cloud/bigquery/storage/v1/version"
21
22
 
22
23
  module Google
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2021 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Bigquery
23
+ module Storage
24
+ module V1
25
+ # ProtoSchema describes the schema of the serialized protocol buffer data rows.
26
+ # @!attribute [rw] proto_descriptor
27
+ # @return [::Google::Protobuf::DescriptorProto]
28
+ # Descriptor for input message. The provided descriptor must be self
29
+ # contained, such that data rows sent can be fully decoded using only the
30
+ # single descriptor. For data rows that are compositions of multiple
31
+ # independent messages, this means the descriptor may need to be transformed
32
+ # to only use nested types:
33
+ # https://developers.google.com/protocol-buffers/docs/proto#nested
34
+ #
35
+ # For additional information for how proto types and values map onto BigQuery
36
+ # see: https://cloud.google.com/bigquery/docs/write-api#data_type_conversions
37
+ class ProtoSchema
38
+ include ::Google::Protobuf::MessageExts
39
+ extend ::Google::Protobuf::MessageExts::ClassMethods
40
+ end
41
+
42
+ # @!attribute [rw] serialized_rows
43
+ # @return [::Array<::String>]
44
+ # A sequence of rows serialized as a Protocol Buffer.
45
+ #
46
+ # See https://developers.google.com/protocol-buffers/docs/overview for more
47
+ # information on deserializing this field.
48
+ class ProtoRows
49
+ include ::Google::Protobuf::MessageExts
50
+ extend ::Google::Protobuf::MessageExts::ClassMethods
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
56
+ end