google-cloud-bigquery-storage-v1 0.18.0 → 0.19.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c754ac4db39e1112d5d4bc913247f32e7485c1a61fcaa8c648854d901e7dbebc
4
- data.tar.gz: 17313c366341a1f7268c44e0600a62a99c1d462664a3a3b4b53986854e42c88c
3
+ metadata.gz: c4c9bcc1e1fe81fdef41b45abc97c9053fa02aea537ba4c9ba0b0d61f723b247
4
+ data.tar.gz: 72289a2d70d6fc3cfa72c81608bca2a9ac980fd1dcd8d3dc925b507c15ac6149
5
5
  SHA512:
6
- metadata.gz: 6d840cc71fb207f1baab314707ee42a60183501b335695b1517f0da99f7a9164ff5f6f3b014bff79f113c95aae422b5537f5bc9aab22ad7d6a3e9c704a163646
7
- data.tar.gz: 3d2b217346af0d50d23a79839f9eff981d5cac8c29ef16a5210f38f33ddee70819017a3a1c8a0b053cdb33806ae153d2df5f3ad89a7f0e77a97a2f2b55e69f95
6
+ metadata.gz: e36bc4746d852ba254c2f2060b5961252014ecb7d4def6d9270508b7a21ac0a7752416f39bef354f63ba92ca029e39b1f83d2628e480d4afc77f69088efdef11
7
+ data.tar.gz: e067cd9a227b539803d7d40b803c65f793812b049d0f7397314967c54bcdec53a03c0eadbd5b88f797346d4ef4b30c2afe2ea6178ee355b18318aa2c5d3921a0
data/README.md CHANGED
@@ -47,7 +47,7 @@ for general usage information.
47
47
 
48
48
  To enable logging for this library, set the logger for the underlying [gRPC](https://github.com/grpc/grpc/tree/master/src/ruby) library.
49
49
  The logger that you set may be a Ruby stdlib [`Logger`](https://ruby-doc.org/current/stdlibs/logger/Logger.html) as shown below,
50
- or a [`Google::Cloud::Logging::Logger`](https://googleapis.dev/ruby/google-cloud-logging/latest)
50
+ or a [`Google::Cloud::Logging::Logger`](https://cloud.google.com/ruby/docs/reference/google-cloud-logging/latest)
51
51
  that will write logs to [Cloud Logging](https://cloud.google.com/logging/). See [grpc/logconfig.rb](https://github.com/grpc/grpc/blob/master/src/ruby/lib/grpc/logconfig.rb)
52
52
  and the gRPC [spec_helper.rb](https://github.com/grpc/grpc/blob/master/src/ruby/spec/spec_helper.rb) for additional information.
53
53
 
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
3
  # source: google/cloud/bigquery/storage/v1/annotations.proto
3
4
 
@@ -5,9 +6,31 @@ require 'google/protobuf'
5
6
 
6
7
  require 'google/protobuf/descriptor_pb'
7
8
 
8
- Google::Protobuf::DescriptorPool.generated_pool.build do
9
- add_file("google/cloud/bigquery/storage/v1/annotations.proto", :syntax => :proto3) do
9
+
10
+ descriptor_data = "\n2google/cloud/bigquery/storage/v1/annotations.proto\x12 google.cloud.bigquery.storage.v1\x1a google/protobuf/descriptor.proto:9\n\x0b\x63olumn_name\x12\x1d.google.protobuf.FieldOptions\x18\xb5\xc3\xf7\xd8\x01 \x01(\t\x88\x01\x01\x42\xc0\x01\n$com.google.cloud.bigquery.storage.v1B\x10\x41nnotationsProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
11
+
12
+ pool = Google::Protobuf::DescriptorPool.generated_pool
13
+
14
+ begin
15
+ pool.add_serialized_file(descriptor_data)
16
+ rescue TypeError => e
17
+ # Compatibility code: will be removed in the next major version.
18
+ require 'google/protobuf/descriptor_pb'
19
+ parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
20
+ parsed.clear_dependency
21
+ serialized = parsed.class.encode(parsed)
22
+ file = pool.add_serialized_file(serialized)
23
+ warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
24
+ imports = [
25
+ ]
26
+ imports.each do |type_name, expected_filename|
27
+ import_file = pool.lookup(type_name).file_descriptor
28
+ if import_file.name != expected_filename
29
+ warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
30
+ end
10
31
  end
32
+ warn "Each proto file must use a consistent fully-qualified name."
33
+ warn "This will become an error in the next major version."
11
34
  end
12
35
 
13
36
  module Google
@@ -1,26 +1,34 @@
1
+ # frozen_string_literal: true
1
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
3
  # source: google/cloud/bigquery/storage/v1/arrow.proto
3
4
 
4
5
  require 'google/protobuf'
5
6
 
6
- Google::Protobuf::DescriptorPool.generated_pool.build do
7
- add_file("google/cloud/bigquery/storage/v1/arrow.proto", :syntax => :proto3) do
8
- add_message "google.cloud.bigquery.storage.v1.ArrowSchema" do
9
- optional :serialized_schema, :bytes, 1
10
- end
11
- add_message "google.cloud.bigquery.storage.v1.ArrowRecordBatch" do
12
- optional :serialized_record_batch, :bytes, 1
13
- optional :row_count, :int64, 2
14
- end
15
- add_message "google.cloud.bigquery.storage.v1.ArrowSerializationOptions" do
16
- optional :buffer_compression, :enum, 2, "google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec"
17
- end
18
- add_enum "google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec" do
19
- value :COMPRESSION_UNSPECIFIED, 0
20
- value :LZ4_FRAME, 1
21
- value :ZSTD, 2
7
+
8
+ descriptor_data = "\n,google/cloud/bigquery/storage/v1/arrow.proto\x12 google.cloud.bigquery.storage.v1\"(\n\x0b\x41rrowSchema\x12\x19\n\x11serialized_schema\x18\x01 \x01(\x0c\"J\n\x10\x41rrowRecordBatch\x12\x1f\n\x17serialized_record_batch\x18\x01 \x01(\x0c\x12\x15\n\trow_count\x18\x02 \x01(\x03\x42\x02\x18\x01\"\xcf\x01\n\x19\x41rrowSerializationOptions\x12h\n\x12\x62uffer_compression\x18\x02 \x01(\x0e\x32L.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec\"H\n\x10\x43ompressionCodec\x12\x1b\n\x17\x43OMPRESSION_UNSPECIFIED\x10\x00\x12\r\n\tLZ4_FRAME\x10\x01\x12\x08\n\x04ZSTD\x10\x02\x42\xba\x01\n$com.google.cloud.bigquery.storage.v1B\nArrowProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
9
+
10
+ pool = Google::Protobuf::DescriptorPool.generated_pool
11
+
12
+ begin
13
+ pool.add_serialized_file(descriptor_data)
14
+ rescue TypeError => e
15
+ # Compatibility code: will be removed in the next major version.
16
+ require 'google/protobuf/descriptor_pb'
17
+ parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
18
+ parsed.clear_dependency
19
+ serialized = parsed.class.encode(parsed)
20
+ file = pool.add_serialized_file(serialized)
21
+ warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
22
+ imports = [
23
+ ]
24
+ imports.each do |type_name, expected_filename|
25
+ import_file = pool.lookup(type_name).file_descriptor
26
+ if import_file.name != expected_filename
27
+ warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
22
28
  end
23
29
  end
30
+ warn "Each proto file must use a consistent fully-qualified name."
31
+ warn "This will become an error in the next major version."
24
32
  end
25
33
 
26
34
  module Google
@@ -1,21 +1,34 @@
1
+ # frozen_string_literal: true
1
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
3
  # source: google/cloud/bigquery/storage/v1/avro.proto
3
4
 
4
5
  require 'google/protobuf'
5
6
 
6
- Google::Protobuf::DescriptorPool.generated_pool.build do
7
- add_file("google/cloud/bigquery/storage/v1/avro.proto", :syntax => :proto3) do
8
- add_message "google.cloud.bigquery.storage.v1.AvroSchema" do
9
- optional :schema, :string, 1
10
- end
11
- add_message "google.cloud.bigquery.storage.v1.AvroRows" do
12
- optional :serialized_binary_rows, :bytes, 1
13
- optional :row_count, :int64, 2
14
- end
15
- add_message "google.cloud.bigquery.storage.v1.AvroSerializationOptions" do
16
- optional :enable_display_name_attribute, :bool, 1
7
+
8
+ descriptor_data = "\n+google/cloud/bigquery/storage/v1/avro.proto\x12 google.cloud.bigquery.storage.v1\"\x1c\n\nAvroSchema\x12\x0e\n\x06schema\x18\x01 \x01(\t\"A\n\x08\x41vroRows\x12\x1e\n\x16serialized_binary_rows\x18\x01 \x01(\x0c\x12\x15\n\trow_count\x18\x02 \x01(\x03\x42\x02\x18\x01\"A\n\x18\x41vroSerializationOptions\x12%\n\x1d\x65nable_display_name_attribute\x18\x01 \x01(\x08\x42\xb9\x01\n$com.google.cloud.bigquery.storage.v1B\tAvroProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
9
+
10
+ pool = Google::Protobuf::DescriptorPool.generated_pool
11
+
12
+ begin
13
+ pool.add_serialized_file(descriptor_data)
14
+ rescue TypeError => e
15
+ # Compatibility code: will be removed in the next major version.
16
+ require 'google/protobuf/descriptor_pb'
17
+ parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
18
+ parsed.clear_dependency
19
+ serialized = parsed.class.encode(parsed)
20
+ file = pool.add_serialized_file(serialized)
21
+ warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
22
+ imports = [
23
+ ]
24
+ imports.each do |type_name, expected_filename|
25
+ import_file = pool.lookup(type_name).file_descriptor
26
+ if import_file.name != expected_filename
27
+ warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
17
28
  end
18
29
  end
30
+ warn "Each proto file must use a consistent fully-qualified name."
31
+ warn "This will become an error in the next major version."
19
32
  end
20
33
 
21
34
  module Google
@@ -336,13 +336,13 @@ module Google
336
336
  # # Create a request. To set request fields, pass in keyword arguments.
337
337
  # request = Google::Cloud::Bigquery::Storage::V1::ReadRowsRequest.new
338
338
  #
339
- # # Call the read_rows method.
340
- # result = client.read_rows request
339
+ # # Call the read_rows method to start streaming.
340
+ # output = client.read_rows request
341
341
  #
342
- # # The returned object is a streamed enumerable yielding elements of
343
- # # type ::Google::Cloud::Bigquery::Storage::V1::ReadRowsResponse.
344
- # result.each do |response|
345
- # p response
342
+ # # The returned object is a streamed enumerable yielding elements of type
343
+ # # ::Google::Cloud::Bigquery::Storage::V1::ReadRowsResponse
344
+ # output.each do |current_response|
345
+ # p current_response
346
346
  # end
347
347
  #
348
348
  def read_rows request, options = nil
@@ -528,9 +528,9 @@ module Google
528
528
  # * (`String`) The path to a service account key file in JSON format
529
529
  # * (`Hash`) A service account key as a Hash
530
530
  # * (`Google::Auth::Credentials`) A googleauth credentials object
531
- # (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
531
+ # (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials))
532
532
  # * (`Signet::OAuth2::Client`) A signet oauth2 client object
533
- # (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
533
+ # (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client))
534
534
  # * (`GRPC::Core::Channel`) a gRPC channel with included credentials
535
535
  # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
536
536
  # * (`nil`) indicating no credentials
@@ -325,22 +325,22 @@ module Google
325
325
  # # Create a client object. The client can be reused for multiple calls.
326
326
  # client = Google::Cloud::Bigquery::Storage::V1::BigQueryWrite::Client.new
327
327
  #
328
- # # Create an input stream
328
+ # # Create an input stream.
329
329
  # input = Gapic::StreamInput.new
330
330
  #
331
331
  # # Call the append_rows method to start streaming.
332
332
  # output = client.append_rows input
333
333
  #
334
- # # Send requests on the stream. For each request, pass in keyword
335
- # # arguments to set fields. Be sure to close the stream when done.
334
+ # # Send requests on the stream. For each request object, set fields by
335
+ # # passing keyword arguments. Be sure to close the stream when done.
336
336
  # input << Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest.new
337
337
  # input << Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest.new
338
338
  # input.close
339
339
  #
340
- # # Handle streamed responses. These may be interleaved with inputs.
341
- # # Each response is of type ::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse.
342
- # output.each do |response|
343
- # p response
340
+ # # The returned object is a streamed enumerable yielding elements of type
341
+ # # ::Google::Cloud::Bigquery::Storage::V1::AppendRowsResponse
342
+ # output.each do |current_response|
343
+ # p current_response
344
344
  # end
345
345
  #
346
346
  def append_rows request, options = nil
@@ -783,9 +783,9 @@ module Google
783
783
  # * (`String`) The path to a service account key file in JSON format
784
784
  # * (`Hash`) A service account key as a Hash
785
785
  # * (`Google::Auth::Credentials`) A googleauth credentials object
786
- # (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
786
+ # (see the [googleauth docs](https://rubydoc.info/gems/googleauth/Google/Auth/Credentials))
787
787
  # * (`Signet::OAuth2::Client`) A signet oauth2 client object
788
- # (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
788
+ # (see the [signet docs](https://rubydoc.info/gems/signet/Signet/OAuth2/Client))
789
789
  # * (`GRPC::Core::Channel`) a gRPC channel with included credentials
790
790
  # * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
791
791
  # * (`nil`) indicating no credentials
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
3
  # source: google/cloud/bigquery/storage/v1/protobuf.proto
3
4
 
@@ -5,15 +6,32 @@ require 'google/protobuf'
5
6
 
6
7
  require 'google/protobuf/descriptor_pb'
7
8
 
8
- Google::Protobuf::DescriptorPool.generated_pool.build do
9
- add_file("google/cloud/bigquery/storage/v1/protobuf.proto", :syntax => :proto3) do
10
- add_message "google.cloud.bigquery.storage.v1.ProtoSchema" do
11
- optional :proto_descriptor, :message, 1, "google.protobuf.DescriptorProto"
12
- end
13
- add_message "google.cloud.bigquery.storage.v1.ProtoRows" do
14
- repeated :serialized_rows, :bytes, 1
9
+
10
+ descriptor_data = "\n/google/cloud/bigquery/storage/v1/protobuf.proto\x12 google.cloud.bigquery.storage.v1\x1a google/protobuf/descriptor.proto\"I\n\x0bProtoSchema\x12:\n\x10proto_descriptor\x18\x01 \x01(\x0b\x32 .google.protobuf.DescriptorProto\"$\n\tProtoRows\x12\x17\n\x0fserialized_rows\x18\x01 \x03(\x0c\x42\xbd\x01\n$com.google.cloud.bigquery.storage.v1B\rProtoBufProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
11
+
12
+ pool = Google::Protobuf::DescriptorPool.generated_pool
13
+
14
+ begin
15
+ pool.add_serialized_file(descriptor_data)
16
+ rescue TypeError => e
17
+ # Compatibility code: will be removed in the next major version.
18
+ require 'google/protobuf/descriptor_pb'
19
+ parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
20
+ parsed.clear_dependency
21
+ serialized = parsed.class.encode(parsed)
22
+ file = pool.add_serialized_file(serialized)
23
+ warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
24
+ imports = [
25
+ ["google.protobuf.DescriptorProto", "google/protobuf/descriptor.proto"],
26
+ ]
27
+ imports.each do |type_name, expected_filename|
28
+ import_file = pool.lookup(type_name).file_descriptor
29
+ if import_file.name != expected_filename
30
+ warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
15
31
  end
16
32
  end
33
+ warn "Each proto file must use a consistent fully-qualified name."
34
+ warn "This will become an error in the next major version."
17
35
  end
18
36
 
19
37
  module Google
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
3
  # source: google/cloud/bigquery/storage/v1/storage.proto
3
4
 
@@ -16,135 +17,39 @@ require 'google/protobuf/timestamp_pb'
16
17
  require 'google/protobuf/wrappers_pb'
17
18
  require 'google/rpc/status_pb'
18
19
 
19
- Google::Protobuf::DescriptorPool.generated_pool.build do
20
- add_file("google/cloud/bigquery/storage/v1/storage.proto", :syntax => :proto3) do
21
- add_message "google.cloud.bigquery.storage.v1.CreateReadSessionRequest" do
22
- optional :parent, :string, 1
23
- optional :read_session, :message, 2, "google.cloud.bigquery.storage.v1.ReadSession"
24
- optional :max_stream_count, :int32, 3
25
- optional :preferred_min_stream_count, :int32, 4
26
- end
27
- add_message "google.cloud.bigquery.storage.v1.ReadRowsRequest" do
28
- optional :read_stream, :string, 1
29
- optional :offset, :int64, 2
30
- end
31
- add_message "google.cloud.bigquery.storage.v1.ThrottleState" do
32
- optional :throttle_percent, :int32, 1
33
- end
34
- add_message "google.cloud.bigquery.storage.v1.StreamStats" do
35
- optional :progress, :message, 2, "google.cloud.bigquery.storage.v1.StreamStats.Progress"
36
- end
37
- add_message "google.cloud.bigquery.storage.v1.StreamStats.Progress" do
38
- optional :at_response_start, :double, 1
39
- optional :at_response_end, :double, 2
40
- end
41
- add_message "google.cloud.bigquery.storage.v1.ReadRowsResponse" do
42
- optional :row_count, :int64, 6
43
- optional :stats, :message, 2, "google.cloud.bigquery.storage.v1.StreamStats"
44
- optional :throttle_state, :message, 5, "google.cloud.bigquery.storage.v1.ThrottleState"
45
- oneof :rows do
46
- optional :avro_rows, :message, 3, "google.cloud.bigquery.storage.v1.AvroRows"
47
- optional :arrow_record_batch, :message, 4, "google.cloud.bigquery.storage.v1.ArrowRecordBatch"
48
- end
49
- oneof :schema do
50
- optional :avro_schema, :message, 7, "google.cloud.bigquery.storage.v1.AvroSchema"
51
- optional :arrow_schema, :message, 8, "google.cloud.bigquery.storage.v1.ArrowSchema"
52
- end
53
- end
54
- add_message "google.cloud.bigquery.storage.v1.SplitReadStreamRequest" do
55
- optional :name, :string, 1
56
- optional :fraction, :double, 2
57
- end
58
- add_message "google.cloud.bigquery.storage.v1.SplitReadStreamResponse" do
59
- optional :primary_stream, :message, 1, "google.cloud.bigquery.storage.v1.ReadStream"
60
- optional :remainder_stream, :message, 2, "google.cloud.bigquery.storage.v1.ReadStream"
61
- end
62
- add_message "google.cloud.bigquery.storage.v1.CreateWriteStreamRequest" do
63
- optional :parent, :string, 1
64
- optional :write_stream, :message, 2, "google.cloud.bigquery.storage.v1.WriteStream"
65
- end
66
- add_message "google.cloud.bigquery.storage.v1.AppendRowsRequest" do
67
- optional :write_stream, :string, 1
68
- optional :offset, :message, 2, "google.protobuf.Int64Value"
69
- optional :trace_id, :string, 6
70
- map :missing_value_interpretations, :string, :enum, 7, "google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation"
71
- oneof :rows do
72
- optional :proto_rows, :message, 4, "google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData"
73
- end
74
- end
75
- add_message "google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoData" do
76
- optional :writer_schema, :message, 1, "google.cloud.bigquery.storage.v1.ProtoSchema"
77
- optional :rows, :message, 2, "google.cloud.bigquery.storage.v1.ProtoRows"
78
- end
79
- add_enum "google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation" do
80
- value :MISSING_VALUE_INTERPRETATION_UNSPECIFIED, 0
81
- value :NULL_VALUE, 1
82
- value :DEFAULT_VALUE, 2
83
- end
84
- add_message "google.cloud.bigquery.storage.v1.AppendRowsResponse" do
85
- optional :updated_schema, :message, 3, "google.cloud.bigquery.storage.v1.TableSchema"
86
- repeated :row_errors, :message, 4, "google.cloud.bigquery.storage.v1.RowError"
87
- optional :write_stream, :string, 5
88
- oneof :response do
89
- optional :append_result, :message, 1, "google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult"
90
- optional :error, :message, 2, "google.rpc.Status"
91
- end
92
- end
93
- add_message "google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResult" do
94
- optional :offset, :message, 1, "google.protobuf.Int64Value"
95
- end
96
- add_message "google.cloud.bigquery.storage.v1.GetWriteStreamRequest" do
97
- optional :name, :string, 1
98
- optional :view, :enum, 3, "google.cloud.bigquery.storage.v1.WriteStreamView"
99
- end
100
- add_message "google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest" do
101
- optional :parent, :string, 1
102
- repeated :write_streams, :string, 2
103
- end
104
- add_message "google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse" do
105
- optional :commit_time, :message, 1, "google.protobuf.Timestamp"
106
- repeated :stream_errors, :message, 2, "google.cloud.bigquery.storage.v1.StorageError"
107
- end
108
- add_message "google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest" do
109
- optional :name, :string, 1
110
- end
111
- add_message "google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse" do
112
- optional :row_count, :int64, 1
113
- end
114
- add_message "google.cloud.bigquery.storage.v1.FlushRowsRequest" do
115
- optional :write_stream, :string, 1
116
- optional :offset, :message, 2, "google.protobuf.Int64Value"
117
- end
118
- add_message "google.cloud.bigquery.storage.v1.FlushRowsResponse" do
119
- optional :offset, :int64, 1
120
- end
121
- add_message "google.cloud.bigquery.storage.v1.StorageError" do
122
- optional :code, :enum, 1, "google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode"
123
- optional :entity, :string, 2
124
- optional :error_message, :string, 3
125
- end
126
- add_enum "google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode" do
127
- value :STORAGE_ERROR_CODE_UNSPECIFIED, 0
128
- value :TABLE_NOT_FOUND, 1
129
- value :STREAM_ALREADY_COMMITTED, 2
130
- value :STREAM_NOT_FOUND, 3
131
- value :INVALID_STREAM_TYPE, 4
132
- value :INVALID_STREAM_STATE, 5
133
- value :STREAM_FINALIZED, 6
134
- value :SCHEMA_MISMATCH_EXTRA_FIELDS, 7
135
- value :OFFSET_ALREADY_EXISTS, 8
136
- value :OFFSET_OUT_OF_RANGE, 9
137
- end
138
- add_message "google.cloud.bigquery.storage.v1.RowError" do
139
- optional :index, :int64, 1
140
- optional :code, :enum, 2, "google.cloud.bigquery.storage.v1.RowError.RowErrorCode"
141
- optional :message, :string, 3
142
- end
143
- add_enum "google.cloud.bigquery.storage.v1.RowError.RowErrorCode" do
144
- value :ROW_ERROR_CODE_UNSPECIFIED, 0
145
- value :FIELDS_ERROR, 1
20
+
21
+ descriptor_data = "\n.google/cloud/bigquery/storage/v1/storage.proto\x12 google.cloud.bigquery.storage.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/bigquery/storage/v1/arrow.proto\x1a+google/cloud/bigquery/storage/v1/avro.proto\x1a/google/cloud/bigquery/storage/v1/protobuf.proto\x1a-google/cloud/bigquery/storage/v1/stream.proto\x1a,google/cloud/bigquery/storage/v1/table.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"\xe7\x01\n\x18\x43reateReadSessionRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12H\n\x0cread_session\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ReadSessionB\x03\xe0\x41\x02\x12\x18\n\x10max_stream_count\x18\x03 \x01(\x05\x12\"\n\x1apreferred_min_stream_count\x18\x04 \x01(\x05\"i\n\x0fReadRowsRequest\x12\x46\n\x0bread_stream\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)bigquerystorage.googleapis.com/ReadStream\x12\x0e\n\x06offset\x18\x02 \x01(\x03\")\n\rThrottleState\x12\x18\n\x10throttle_percent\x18\x01 \x01(\x05\"\x97\x01\n\x0bStreamStats\x12H\n\x08progress\x18\x02 \x01(\x0b\x32\x36.google.cloud.bigquery.storage.v1.StreamStats.Progress\x1a>\n\x08Progress\x12\x19\n\x11\x61t_response_start\x18\x01 \x01(\x01\x12\x17\n\x0f\x61t_response_end\x18\x02 \x01(\x01\"\xe7\x03\n\x10ReadRowsResponse\x12?\n\tavro_rows\x18\x03 \x01(\x0b\x32*.google.cloud.bigquery.storage.v1.AvroRowsH\x00\x12P\n\x12\x61rrow_record_batch\x18\x04 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1.ArrowRecordBatchH\x00\x12\x11\n\trow_count\x18\x06 \x01(\x03\x12<\n\x05stats\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.StreamStats\x12G\n\x0ethrottle_state\x18\x05 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1.ThrottleState\x12H\n\x0b\x61vro_schema\x18\x07 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.AvroSchemaB\x03\xe0\x41\x03H\x01\x12J\n\x0c\x61rrow_schema\x18\x08 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ArrowSchemaB\x03\xe0\x41\x03H\x01\x42\x06\n\x04rowsB\x08\n\x06schema\"k\n\x16SplitReadStreamRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)bigquerystorage.googleapis.com/ReadStream\x12\x10\n\x08\x66raction\x18\x02 \x01(\x01\"\xa7\x01\n\x17SplitReadStreamResponse\x12\x44\n\x0eprimary_stream\x18\x01 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.ReadStream\x12\x46\n\x10remainder_stream\x18\x02 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.ReadStream\"\x9b\x01\n\x18\x43reateWriteStreamRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x62igquery.googleapis.com/Table\x12H\n\x0cwrite_stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.WriteStreamB\x03\xe0\x41\x02\"\x89\x06\n\x11\x41ppendRowsRequest\x12H\n\x0cwrite_stream\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*bigquerystorage.googleapis.com/WriteStream\x12+\n\x06offset\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12S\n\nproto_rows\x18\x04 \x01(\x0b\x32=.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoDataH\x00\x12\x10\n\x08trace_id\x18\x06 \x01(\t\x12{\n\x1dmissing_value_interpretations\x18\x07 \x03(\x0b\x32T.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretationsEntry\x1a\x8c\x01\n\tProtoData\x12\x44\n\rwriter_schema\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ProtoSchema\x12\x39\n\x04rows\x18\x02 \x01(\x0b\x32+.google.cloud.bigquery.storage.v1.ProtoRows\x1a\x92\x01\n MissingValueInterpretationsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12]\n\x05value\x18\x02 \x01(\x0e\x32N.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation:\x02\x38\x01\"m\n\x1aMissingValueInterpretation\x12,\n(MISSING_VALUE_INTERPRETATION_UNSPECIFIED\x10\x00\x12\x0e\n\nNULL_VALUE\x10\x01\x12\x11\n\rDEFAULT_VALUE\x10\x02\x42\x06\n\x04rows\"\xfb\x02\n\x12\x41ppendRowsResponse\x12Z\n\rappend_result\x18\x01 \x01(\x0b\x32\x41.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResultH\x00\x12#\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12\x45\n\x0eupdated_schema\x18\x03 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.TableSchema\x12>\n\nrow_errors\x18\x04 \x03(\x0b\x32*.google.cloud.bigquery.storage.v1.RowError\x12\x14\n\x0cwrite_stream\x18\x05 \x01(\t\x1a;\n\x0c\x41ppendResult\x12+\n\x06offset\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\n\x08response\"\x9a\x01\n\x15GetWriteStreamRequest\x12@\n\x04name\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*bigquerystorage.googleapis.com/WriteStream\x12?\n\x04view\x18\x03 \x01(\x0e\x32\x31.google.cloud.bigquery.storage.v1.WriteStreamView\"s\n\x1e\x42\x61tchCommitWriteStreamsRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x62igquery.googleapis.com/Table\x12\x1a\n\rwrite_streams\x18\x02 \x03(\tB\x03\xe0\x41\x02\"\x99\x01\n\x1f\x42\x61tchCommitWriteStreamsResponse\x12/\n\x0b\x63ommit_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n\rstream_errors\x18\x02 \x03(\x0b\x32..google.cloud.bigquery.storage.v1.StorageError\"^\n\x1a\x46inalizeWriteStreamRequest\x12@\n\x04name\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*bigquerystorage.googleapis.com/WriteStream\"0\n\x1b\x46inalizeWriteStreamResponse\x12\x11\n\trow_count\x18\x01 \x01(\x03\"\x89\x01\n\x10\x46lushRowsRequest\x12H\n\x0cwrite_stream\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*bigquerystorage.googleapis.com/WriteStream\x12+\n\x06offset\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\"#\n\x11\x46lushRowsResponse\x12\x0e\n\x06offset\x18\x01 \x01(\x03\"\xa4\x04\n\x0cStorageError\x12M\n\x04\x63ode\x18\x01 \x01(\x0e\x32?.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode\x12\x0e\n\x06\x65ntity\x18\x02 \x01(\t\x12\x15\n\rerror_message\x18\x03 \x01(\t\"\x9d\x03\n\x10StorageErrorCode\x12\"\n\x1eSTORAGE_ERROR_CODE_UNSPECIFIED\x10\x00\x12\x13\n\x0fTABLE_NOT_FOUND\x10\x01\x12\x1c\n\x18STREAM_ALREADY_COMMITTED\x10\x02\x12\x14\n\x10STREAM_NOT_FOUND\x10\x03\x12\x17\n\x13INVALID_STREAM_TYPE\x10\x04\x12\x18\n\x14INVALID_STREAM_STATE\x10\x05\x12\x14\n\x10STREAM_FINALIZED\x10\x06\x12 \n\x1cSCHEMA_MISMATCH_EXTRA_FIELDS\x10\x07\x12\x19\n\x15OFFSET_ALREADY_EXISTS\x10\x08\x12\x17\n\x13OFFSET_OUT_OF_RANGE\x10\t\x12\x15\n\x11\x43MEK_NOT_PROVIDED\x10\n\x12\x19\n\x15INVALID_CMEK_PROVIDED\x10\x0b\x12\x19\n\x15\x43MEK_ENCRYPTION_ERROR\x10\x0c\x12\x15\n\x11KMS_SERVICE_ERROR\x10\r\x12\x19\n\x15KMS_PERMISSION_DENIED\x10\x0e\"\xb3\x01\n\x08RowError\x12\r\n\x05index\x18\x01 \x01(\x03\x12\x45\n\x04\x63ode\x18\x02 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.RowError.RowErrorCode\x12\x0f\n\x07message\x18\x03 \x01(\t\"@\n\x0cRowErrorCode\x12\x1e\n\x1aROW_ERROR_CODE_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x46IELDS_ERROR\x10\x01\x32\x92\x06\n\x0c\x42igQueryRead\x12\xe9\x01\n\x11\x43reateReadSession\x12:.google.cloud.bigquery.storage.v1.CreateReadSessionRequest\x1a-.google.cloud.bigquery.storage.v1.ReadSession\"i\x82\xd3\xe4\x93\x02<\"7/v1/{read_session.table=projects/*/datasets/*/tables/*}:\x01*\xda\x41$parent,read_session,max_stream_count\x12\xcf\x01\n\x08ReadRows\x12\x31.google.cloud.bigquery.storage.v1.ReadRowsRequest\x1a\x32.google.cloud.bigquery.storage.v1.ReadRowsResponse\"Z\x82\xd3\xe4\x93\x02?\x12=/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}\xda\x41\x12read_stream,offset0\x01\x12\xc6\x01\n\x0fSplitReadStream\x12\x38.google.cloud.bigquery.storage.v1.SplitReadStreamRequest\x1a\x39.google.cloud.bigquery.storage.v1.SplitReadStreamResponse\">\x82\xd3\xe4\x93\x02\x38\x12\x36/v1/{name=projects/*/locations/*/sessions/*/streams/*}\x1a{\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41Whttps://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform2\xbc\x0b\n\rBigQueryWrite\x12\xd7\x01\n\x11\x43reateWriteStream\x12:.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest\x1a-.google.cloud.bigquery.storage.v1.WriteStream\"W\x82\xd3\xe4\x93\x02;\"+/v1/{parent=projects/*/datasets/*/tables/*}:\x0cwrite_stream\xda\x41\x13parent,write_stream\x12\xd2\x01\n\nAppendRows\x12\x33.google.cloud.bigquery.storage.v1.AppendRowsRequest\x1a\x34.google.cloud.bigquery.storage.v1.AppendRowsResponse\"U\x82\xd3\xe4\x93\x02@\";/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}:\x01*\xda\x41\x0cwrite_stream(\x01\x30\x01\x12\xbf\x01\n\x0eGetWriteStream\x12\x37.google.cloud.bigquery.storage.v1.GetWriteStreamRequest\x1a-.google.cloud.bigquery.storage.v1.WriteStream\"E\x82\xd3\xe4\x93\x02\x38\"3/v1/{name=projects/*/datasets/*/tables/*/streams/*}:\x01*\xda\x41\x04name\x12\xd9\x01\n\x13\x46inalizeWriteStream\x12<.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest\x1a=.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse\"E\x82\xd3\xe4\x93\x02\x38\"3/v1/{name=projects/*/datasets/*/tables/*/streams/*}:\x01*\xda\x41\x04name\x12\xdc\x01\n\x17\x42\x61tchCommitWriteStreams\x12@.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest\x1a\x41.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse\"<\x82\xd3\xe4\x93\x02-\x12+/v1/{parent=projects/*/datasets/*/tables/*}\xda\x41\x06parent\x12\xcb\x01\n\tFlushRows\x12\x32.google.cloud.bigquery.storage.v1.FlushRowsRequest\x1a\x33.google.cloud.bigquery.storage.v1.FlushRowsResponse\"U\x82\xd3\xe4\x93\x02@\";/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}:\x01*\xda\x41\x0cwrite_stream\x1a\xb0\x01\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41\x8b\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.insertdata,https://www.googleapis.com/auth/cloud-platformB\x94\x02\n$com.google.cloud.bigquery.storage.v1B\x0cStorageProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1\xea\x41U\n\x1d\x62igquery.googleapis.com/Table\x12\x34projects/{project}/datasets/{dataset}/tables/{table}b\x06proto3"
22
+
23
+ pool = Google::Protobuf::DescriptorPool.generated_pool
24
+
25
+ begin
26
+ pool.add_serialized_file(descriptor_data)
27
+ rescue TypeError => e
28
+ # Compatibility code: will be removed in the next major version.
29
+ require 'google/protobuf/descriptor_pb'
30
+ parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
31
+ parsed.clear_dependency
32
+ serialized = parsed.class.encode(parsed)
33
+ file = pool.add_serialized_file(serialized)
34
+ warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
35
+ imports = [
36
+ ["google.cloud.bigquery.storage.v1.ReadSession", "google/cloud/bigquery/storage/v1/stream.proto"],
37
+ ["google.cloud.bigquery.storage.v1.AvroRows", "google/cloud/bigquery/storage/v1/avro.proto"],
38
+ ["google.cloud.bigquery.storage.v1.ArrowRecordBatch", "google/cloud/bigquery/storage/v1/arrow.proto"],
39
+ ["google.protobuf.Int64Value", "google/protobuf/wrappers.proto"],
40
+ ["google.cloud.bigquery.storage.v1.ProtoSchema", "google/cloud/bigquery/storage/v1/protobuf.proto"],
41
+ ["google.rpc.Status", "google/rpc/status.proto"],
42
+ ["google.cloud.bigquery.storage.v1.TableSchema", "google/cloud/bigquery/storage/v1/table.proto"],
43
+ ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"],
44
+ ]
45
+ imports.each do |type_name, expected_filename|
46
+ import_file = pool.lookup(type_name).file_descriptor
47
+ if import_file.name != expected_filename
48
+ warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
146
49
  end
147
50
  end
51
+ warn "Each proto file must use a consistent fully-qualified name."
52
+ warn "This will become an error in the next major version."
148
53
  end
149
54
 
150
55
  module Google
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
3
  # source: google/cloud/bigquery/storage/v1/stream.proto
3
4
 
@@ -10,68 +11,35 @@ require 'google/cloud/bigquery/storage/v1/avro_pb'
10
11
  require 'google/cloud/bigquery/storage/v1/table_pb'
11
12
  require 'google/protobuf/timestamp_pb'
12
13
 
13
- Google::Protobuf::DescriptorPool.generated_pool.build do
14
- add_file("google/cloud/bigquery/storage/v1/stream.proto", :syntax => :proto3) do
15
- add_message "google.cloud.bigquery.storage.v1.ReadSession" do
16
- optional :name, :string, 1
17
- optional :expire_time, :message, 2, "google.protobuf.Timestamp"
18
- optional :data_format, :enum, 3, "google.cloud.bigquery.storage.v1.DataFormat"
19
- optional :table, :string, 6
20
- optional :table_modifiers, :message, 7, "google.cloud.bigquery.storage.v1.ReadSession.TableModifiers"
21
- optional :read_options, :message, 8, "google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"
22
- repeated :streams, :message, 10, "google.cloud.bigquery.storage.v1.ReadStream"
23
- optional :estimated_total_bytes_scanned, :int64, 12
24
- optional :estimated_row_count, :int64, 14
25
- optional :trace_id, :string, 13
26
- oneof :schema do
27
- optional :avro_schema, :message, 4, "google.cloud.bigquery.storage.v1.AvroSchema"
28
- optional :arrow_schema, :message, 5, "google.cloud.bigquery.storage.v1.ArrowSchema"
29
- end
30
- end
31
- add_message "google.cloud.bigquery.storage.v1.ReadSession.TableModifiers" do
32
- optional :snapshot_time, :message, 1, "google.protobuf.Timestamp"
33
- end
34
- add_message "google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions" do
35
- repeated :selected_fields, :string, 1
36
- optional :row_restriction, :string, 2
37
- oneof :output_format_serialization_options do
38
- optional :arrow_serialization_options, :message, 3, "google.cloud.bigquery.storage.v1.ArrowSerializationOptions"
39
- optional :avro_serialization_options, :message, 4, "google.cloud.bigquery.storage.v1.AvroSerializationOptions"
40
- end
41
- end
42
- add_message "google.cloud.bigquery.storage.v1.ReadStream" do
43
- optional :name, :string, 1
44
- end
45
- add_message "google.cloud.bigquery.storage.v1.WriteStream" do
46
- optional :name, :string, 1
47
- optional :type, :enum, 2, "google.cloud.bigquery.storage.v1.WriteStream.Type"
48
- optional :create_time, :message, 3, "google.protobuf.Timestamp"
49
- optional :commit_time, :message, 4, "google.protobuf.Timestamp"
50
- optional :table_schema, :message, 5, "google.cloud.bigquery.storage.v1.TableSchema"
51
- optional :write_mode, :enum, 7, "google.cloud.bigquery.storage.v1.WriteStream.WriteMode"
52
- optional :location, :string, 8
53
- end
54
- add_enum "google.cloud.bigquery.storage.v1.WriteStream.Type" do
55
- value :TYPE_UNSPECIFIED, 0
56
- value :COMMITTED, 1
57
- value :PENDING, 2
58
- value :BUFFERED, 3
59
- end
60
- add_enum "google.cloud.bigquery.storage.v1.WriteStream.WriteMode" do
61
- value :WRITE_MODE_UNSPECIFIED, 0
62
- value :INSERT, 1
63
- end
64
- add_enum "google.cloud.bigquery.storage.v1.DataFormat" do
65
- value :DATA_FORMAT_UNSPECIFIED, 0
66
- value :AVRO, 1
67
- value :ARROW, 2
68
- end
69
- add_enum "google.cloud.bigquery.storage.v1.WriteStreamView" do
70
- value :WRITE_STREAM_VIEW_UNSPECIFIED, 0
71
- value :BASIC, 1
72
- value :FULL, 2
14
+
15
+ descriptor_data = "\n-google/cloud/bigquery/storage/v1/stream.proto\x12 google.cloud.bigquery.storage.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/bigquery/storage/v1/arrow.proto\x1a+google/cloud/bigquery/storage/v1/avro.proto\x1a,google/cloud/bigquery/storage/v1/table.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xff\t\n\x0bReadSession\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x34\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x46\n\x0b\x64\x61ta_format\x18\x03 \x01(\x0e\x32,.google.cloud.bigquery.storage.v1.DataFormatB\x03\xe0\x41\x05\x12H\n\x0b\x61vro_schema\x18\x04 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.AvroSchemaB\x03\xe0\x41\x03H\x00\x12J\n\x0c\x61rrow_schema\x18\x05 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ArrowSchemaB\x03\xe0\x41\x03H\x00\x12\x34\n\x05table\x18\x06 \x01(\tB%\xe0\x41\x05\xfa\x41\x1f\n\x1d\x62igquery.googleapis.com/Table\x12Z\n\x0ftable_modifiers\x18\x07 \x01(\x0b\x32<.google.cloud.bigquery.storage.v1.ReadSession.TableModifiersB\x03\xe0\x41\x01\x12Y\n\x0cread_options\x18\x08 \x01(\x0b\x32>.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptionsB\x03\xe0\x41\x01\x12\x42\n\x07streams\x18\n \x03(\x0b\x32,.google.cloud.bigquery.storage.v1.ReadStreamB\x03\xe0\x41\x03\x12*\n\x1d\x65stimated_total_bytes_scanned\x18\x0c \x01(\x03\x42\x03\xe0\x41\x03\x12 \n\x13\x65stimated_row_count\x18\x0e \x01(\x03\x42\x03\xe0\x41\x03\x12\x15\n\x08trace_id\x18\r \x01(\tB\x03\xe0\x41\x01\x1a\x43\n\x0eTableModifiers\x12\x31\n\rsnapshot_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\xf6\x02\n\x10TableReadOptions\x12\x17\n\x0fselected_fields\x18\x01 \x03(\t\x12\x17\n\x0frow_restriction\x18\x02 \x01(\t\x12g\n\x1b\x61rrow_serialization_options\x18\x03 \x01(\x0b\x32;.google.cloud.bigquery.storage.v1.ArrowSerializationOptionsB\x03\xe0\x41\x01H\x00\x12\x65\n\x1a\x61vro_serialization_options\x18\x04 \x01(\x0b\x32:.google.cloud.bigquery.storage.v1.AvroSerializationOptionsB\x03\xe0\x41\x01H\x00\x12#\n\x11sample_percentage\x18\x05 \x01(\x01\x42\x03\xe0\x41\x01H\x01\x88\x01\x01\x42%\n#output_format_serialization_optionsB\x14\n\x12_sample_percentage:k\xea\x41h\n*bigquerystorage.googleapis.com/ReadSession\x12:projects/{project}/locations/{location}/sessions/{session}B\x08\n\x06schema\"\x9c\x01\n\nReadStream\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03:{\xea\x41x\n)bigquerystorage.googleapis.com/ReadStream\x12Kprojects/{project}/locations/{location}/sessions/{session}/streams/{stream}\"\xfb\x04\n\x0bWriteStream\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x45\n\x04type\x18\x02 \x01(\x0e\x32\x32.google.cloud.bigquery.storage.v1.WriteStream.TypeB\x03\xe0\x41\x05\x12\x34\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x0ctable_schema\x18\x05 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.TableSchemaB\x03\xe0\x41\x03\x12P\n\nwrite_mode\x18\x07 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.WriteStream.WriteModeB\x03\xe0\x41\x05\x12\x15\n\x08location\x18\x08 \x01(\tB\x03\xe0\x41\x05\"F\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\r\n\tCOMMITTED\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0c\n\x08\x42UFFERED\x10\x03\"3\n\tWriteMode\x12\x1a\n\x16WRITE_MODE_UNSPECIFIED\x10\x00\x12\n\n\x06INSERT\x10\x01:v\xea\x41s\n*bigquerystorage.googleapis.com/WriteStream\x12\x45projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}*>\n\nDataFormat\x12\x1b\n\x17\x44\x41TA_FORMAT_UNSPECIFIED\x10\x00\x12\x08\n\x04\x41VRO\x10\x01\x12\t\n\x05\x41RROW\x10\x02*I\n\x0fWriteStreamView\x12!\n\x1dWRITE_STREAM_VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02\x42\xbb\x01\n$com.google.cloud.bigquery.storage.v1B\x0bStreamProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
16
+
17
+ pool = Google::Protobuf::DescriptorPool.generated_pool
18
+
19
+ begin
20
+ pool.add_serialized_file(descriptor_data)
21
+ rescue TypeError => e
22
+ # Compatibility code: will be removed in the next major version.
23
+ require 'google/protobuf/descriptor_pb'
24
+ parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
25
+ parsed.clear_dependency
26
+ serialized = parsed.class.encode(parsed)
27
+ file = pool.add_serialized_file(serialized)
28
+ warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
29
+ imports = [
30
+ ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"],
31
+ ["google.cloud.bigquery.storage.v1.AvroSchema", "google/cloud/bigquery/storage/v1/avro.proto"],
32
+ ["google.cloud.bigquery.storage.v1.ArrowSchema", "google/cloud/bigquery/storage/v1/arrow.proto"],
33
+ ["google.cloud.bigquery.storage.v1.TableSchema", "google/cloud/bigquery/storage/v1/table.proto"],
34
+ ]
35
+ imports.each do |type_name, expected_filename|
36
+ import_file = pool.lookup(type_name).file_descriptor
37
+ if import_file.name != expected_filename
38
+ warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
73
39
  end
74
40
  end
41
+ warn "Each proto file must use a consistent fully-qualified name."
42
+ warn "This will become an error in the next major version."
75
43
  end
76
44
 
77
45
  module Google
@@ -1,3 +1,4 @@
1
+ # frozen_string_literal: true
1
2
  # Generated by the protocol buffer compiler. DO NOT EDIT!
2
3
  # source: google/cloud/bigquery/storage/v1/table.proto
3
4
 
@@ -5,47 +6,31 @@ require 'google/protobuf'
5
6
 
6
7
  require 'google/api/field_behavior_pb'
7
8
 
8
- Google::Protobuf::DescriptorPool.generated_pool.build do
9
- add_file("google/cloud/bigquery/storage/v1/table.proto", :syntax => :proto3) do
10
- add_message "google.cloud.bigquery.storage.v1.TableSchema" do
11
- repeated :fields, :message, 1, "google.cloud.bigquery.storage.v1.TableFieldSchema"
12
- end
13
- add_message "google.cloud.bigquery.storage.v1.TableFieldSchema" do
14
- optional :name, :string, 1
15
- optional :type, :enum, 2, "google.cloud.bigquery.storage.v1.TableFieldSchema.Type"
16
- optional :mode, :enum, 3, "google.cloud.bigquery.storage.v1.TableFieldSchema.Mode"
17
- repeated :fields, :message, 4, "google.cloud.bigquery.storage.v1.TableFieldSchema"
18
- optional :description, :string, 6
19
- optional :max_length, :int64, 7
20
- optional :precision, :int64, 8
21
- optional :scale, :int64, 9
22
- optional :default_value_expression, :string, 10
23
- end
24
- add_enum "google.cloud.bigquery.storage.v1.TableFieldSchema.Type" do
25
- value :TYPE_UNSPECIFIED, 0
26
- value :STRING, 1
27
- value :INT64, 2
28
- value :DOUBLE, 3
29
- value :STRUCT, 4
30
- value :BYTES, 5
31
- value :BOOL, 6
32
- value :TIMESTAMP, 7
33
- value :DATE, 8
34
- value :TIME, 9
35
- value :DATETIME, 10
36
- value :GEOGRAPHY, 11
37
- value :NUMERIC, 12
38
- value :BIGNUMERIC, 13
39
- value :INTERVAL, 14
40
- value :JSON, 15
41
- end
42
- add_enum "google.cloud.bigquery.storage.v1.TableFieldSchema.Mode" do
43
- value :MODE_UNSPECIFIED, 0
44
- value :NULLABLE, 1
45
- value :REQUIRED, 2
46
- value :REPEATED, 3
9
+
10
+ descriptor_data = "\n,google/cloud/bigquery/storage/v1/table.proto\x12 google.cloud.bigquery.storage.v1\x1a\x1fgoogle/api/field_behavior.proto\"Q\n\x0bTableSchema\x12\x42\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.storage.v1.TableFieldSchema\"\xac\x05\n\x10TableFieldSchema\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12J\n\x04type\x18\x02 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.TableFieldSchema.TypeB\x03\xe0\x41\x02\x12J\n\x04mode\x18\x03 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.TableFieldSchema.ModeB\x03\xe0\x41\x01\x12G\n\x06\x66ields\x18\x04 \x03(\x0b\x32\x32.google.cloud.bigquery.storage.v1.TableFieldSchemaB\x03\xe0\x41\x01\x12\x18\n\x0b\x64\x65scription\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nmax_length\x18\x07 \x01(\x03\x42\x03\xe0\x41\x01\x12\x16\n\tprecision\x18\x08 \x01(\x03\x42\x03\xe0\x41\x01\x12\x12\n\x05scale\x18\t \x01(\x03\x42\x03\xe0\x41\x01\x12%\n\x18\x64\x65\x66\x61ult_value_expression\x18\n \x01(\tB\x03\xe0\x41\x01\"\xd5\x01\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\t\n\x05INT64\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\n\n\x06STRUCT\x10\x04\x12\t\n\x05\x42YTES\x10\x05\x12\x08\n\x04\x42OOL\x10\x06\x12\r\n\tTIMESTAMP\x10\x07\x12\x08\n\x04\x44\x41TE\x10\x08\x12\x08\n\x04TIME\x10\t\x12\x0c\n\x08\x44\x41TETIME\x10\n\x12\r\n\tGEOGRAPHY\x10\x0b\x12\x0b\n\x07NUMERIC\x10\x0c\x12\x0e\n\nBIGNUMERIC\x10\r\x12\x0c\n\x08INTERVAL\x10\x0e\x12\x08\n\x04JSON\x10\x0f\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x0c\n\x08NULLABLE\x10\x01\x12\x0c\n\x08REQUIRED\x10\x02\x12\x0c\n\x08REPEATED\x10\x03\x42\xba\x01\n$com.google.cloud.bigquery.storage.v1B\nTableProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
11
+
12
+ pool = Google::Protobuf::DescriptorPool.generated_pool
13
+
14
+ begin
15
+ pool.add_serialized_file(descriptor_data)
16
+ rescue TypeError => e
17
+ # Compatibility code: will be removed in the next major version.
18
+ require 'google/protobuf/descriptor_pb'
19
+ parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
20
+ parsed.clear_dependency
21
+ serialized = parsed.class.encode(parsed)
22
+ file = pool.add_serialized_file(serialized)
23
+ warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
24
+ imports = [
25
+ ]
26
+ imports.each do |type_name, expected_filename|
27
+ import_file = pool.lookup(type_name).file_descriptor
28
+ if import_file.name != expected_filename
29
+ warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
47
30
  end
48
31
  end
32
+ warn "Each proto file must use a consistent fully-qualified name."
33
+ warn "This will become an error in the next major version."
49
34
  end
50
35
 
51
36
  module Google
@@ -22,7 +22,7 @@ module Google
22
22
  module Bigquery
23
23
  module Storage
24
24
  module V1
25
- VERSION = "0.18.0"
25
+ VERSION = "0.19.0"
26
26
  end
27
27
  end
28
28
  end
@@ -35,7 +35,9 @@ module Google
35
35
  # Details about how and where to publish client libraries.
36
36
  # @!attribute [rw] version
37
37
  # @return [::String]
38
- # Version of the API to apply these settings to.
38
+ # Version of the API to apply these settings to. This is the full protobuf
39
+ # package for the API, ending in the version element.
40
+ # Examples: "google.cloud.speech.v1" and "google.spanner.admin.database.v1".
39
41
  # @!attribute [rw] launch_stage
40
42
  # @return [::Google::Api::LaunchStage]
41
43
  # Launch stage of this version of the API.
@@ -81,7 +83,7 @@ module Google
81
83
  # long-running operation pattern.
82
84
  # @!attribute [rw] new_issue_uri
83
85
  # @return [::String]
84
- # Link to a place that API users can report issues. Example:
86
+ # Link to a *public* URI where users can report issues. Example:
85
87
  # https://issuetracker.google.com/issues/new?component=190865&template=1161103
86
88
  # @!attribute [rw] documentation_uri
87
89
  # @return [::String]
@@ -111,6 +113,10 @@ module Google
111
113
  # Client library settings. If the same version string appears multiple
112
114
  # times in this list, then the last one wins. Settings from earlier
113
115
  # settings with the same version string are discarded.
116
+ # @!attribute [rw] proto_reference_documentation_uri
117
+ # @return [::String]
118
+ # Optional link to proto reference documentation. Example:
119
+ # https://cloud.google.com/pubsub/lite/docs/reference/rpc
114
120
  class Publishing
115
121
  include ::Google::Protobuf::MessageExts
116
122
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -203,9 +209,57 @@ module Google
203
209
  # @!attribute [rw] common
204
210
  # @return [::Google::Api::CommonLanguageSettings]
205
211
  # Some settings.
212
+ # @!attribute [rw] renamed_services
213
+ # @return [::Google::Protobuf::Map{::String => ::String}]
214
+ # Map from original service names to renamed versions.
215
+ # This is used when the default generated types
216
+ # would cause a naming conflict. (Neither name is
217
+ # fully-qualified.)
218
+ # Example: Subscriber to SubscriberServiceApi.
219
+ # @!attribute [rw] renamed_resources
220
+ # @return [::Google::Protobuf::Map{::String => ::String}]
221
+ # Map from full resource types to the effective short name
222
+ # for the resource. This is used when otherwise resource
223
+ # named from different services would cause naming collisions.
224
+ # Example entry:
225
+ # "datalabeling.googleapis.com/Dataset": "DataLabelingDataset"
226
+ # @!attribute [rw] ignored_resources
227
+ # @return [::Array<::String>]
228
+ # List of full resource types to ignore during generation.
229
+ # This is typically used for API-specific Location resources,
230
+ # which should be handled by the generator as if they were actually
231
+ # the common Location resources.
232
+ # Example entry: "documentai.googleapis.com/Location"
233
+ # @!attribute [rw] forced_namespace_aliases
234
+ # @return [::Array<::String>]
235
+ # Namespaces which must be aliased in snippets due to
236
+ # a known (but non-generator-predictable) naming collision
237
+ # @!attribute [rw] handwritten_signatures
238
+ # @return [::Array<::String>]
239
+ # Method signatures (in the form "service.method(signature)")
240
+ # which are provided separately, so shouldn't be generated.
241
+ # Snippets *calling* these methods are still generated, however.
206
242
  class DotnetSettings
207
243
  include ::Google::Protobuf::MessageExts
208
244
  extend ::Google::Protobuf::MessageExts::ClassMethods
245
+
246
+ # @!attribute [rw] key
247
+ # @return [::String]
248
+ # @!attribute [rw] value
249
+ # @return [::String]
250
+ class RenamedServicesEntry
251
+ include ::Google::Protobuf::MessageExts
252
+ extend ::Google::Protobuf::MessageExts::ClassMethods
253
+ end
254
+
255
+ # @!attribute [rw] key
256
+ # @return [::String]
257
+ # @!attribute [rw] value
258
+ # @return [::String]
259
+ class RenamedResourcesEntry
260
+ include ::Google::Protobuf::MessageExts
261
+ extend ::Google::Protobuf::MessageExts::ClassMethods
262
+ end
209
263
  end
210
264
 
211
265
  # Settings for Ruby client libraries.
@@ -240,8 +294,8 @@ module Google
240
294
  # Example of a YAML configuration::
241
295
  #
242
296
  # publishing:
243
- # method_behavior:
244
- # - selector: CreateAdDomain
297
+ # method_settings:
298
+ # - selector: google.cloud.speech.v2.Speech.BatchRecognize
245
299
  # long_running:
246
300
  # initial_poll_delay:
247
301
  # seconds: 60 # 1 minute
@@ -299,6 +353,15 @@ module Google
299
353
 
300
354
  # Street View Org.
301
355
  STREET_VIEW = 4
356
+
357
+ # Shopping Org.
358
+ SHOPPING = 5
359
+
360
+ # Geo Org.
361
+ GEO = 6
362
+
363
+ # Generative AI - https://developers.generativeai.google
364
+ GENERATIVE_AI = 7
302
365
  end
303
366
 
304
367
  # To where should client libraries be published?
@@ -485,6 +485,22 @@ module Google
485
485
 
486
486
  # Offset out of range.
487
487
  OFFSET_OUT_OF_RANGE = 9
488
+
489
+ # Customer-managed encryption key (CMEK) not provided for CMEK-enabled
490
+ # data.
491
+ CMEK_NOT_PROVIDED = 10
492
+
493
+ # Customer-managed encryption key (CMEK) was incorrectly provided.
494
+ INVALID_CMEK_PROVIDED = 11
495
+
496
+ # There is an encryption error while using customer-managed encryption key.
497
+ CMEK_ENCRYPTION_ERROR = 12
498
+
499
+ # Key Management Service (KMS) service returned an error.
500
+ KMS_SERVICE_ERROR = 13
501
+
502
+ # Permission denied while using customer-managed encryption key.
503
+ KMS_PERMISSION_DENIED = 14
488
504
  end
489
505
  end
490
506
 
@@ -161,6 +161,14 @@ module Google
161
161
  # @!attribute [rw] avro_serialization_options
162
162
  # @return [::Google::Cloud::Bigquery::Storage::V1::AvroSerializationOptions]
163
163
  # Optional. Options specific to the Apache Avro output format
164
+ # @!attribute [rw] sample_percentage
165
+ # @return [::Float]
166
+ # Optional. Specifies a table sampling percentage. Specifically, the query
167
+ # planner will use TABLESAMPLE SYSTEM (sample_percentage PERCENT). This
168
+ # samples at the file-level. It will randomly choose for each file whether
169
+ # to include that file in the sample returned. Note, that if the table only
170
+ # has one file, then TABLESAMPLE SYSTEM will select that file and return
171
+ # all returnable rows contained within.
164
172
  class TableReadOptions
165
173
  include ::Google::Protobuf::MessageExts
166
174
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -43,8 +43,12 @@ module Google
43
43
  # if (any.is(Foo.class)) {
44
44
  # foo = any.unpack(Foo.class);
45
45
  # }
46
+ # // or ...
47
+ # if (any.isSameTypeAs(Foo.getDefaultInstance())) {
48
+ # foo = any.unpack(Foo.getDefaultInstance());
49
+ # }
46
50
  #
47
- # Example 3: Pack and unpack a message in Python.
51
+ # Example 3: Pack and unpack a message in Python.
48
52
  #
49
53
  # foo = Foo(...)
50
54
  # any = Any()
@@ -54,7 +58,7 @@ module Google
54
58
  # any.Unpack(foo)
55
59
  # ...
56
60
  #
57
- # Example 4: Pack and unpack a message in Go
61
+ # Example 4: Pack and unpack a message in Go
58
62
  #
59
63
  # foo := &pb.Foo{...}
60
64
  # any, err := anypb.New(foo)
@@ -73,9 +77,8 @@ module Google
73
77
  # in the type URL, for example "foo.bar.com/x/y.z" will yield type
74
78
  # name "y.z".
75
79
  #
76
- #
77
80
  # JSON
78
- #
81
+ # ====
79
82
  # The JSON representation of an `Any` value uses the regular
80
83
  # representation of the deserialized, embedded message, with an
81
84
  # additional field `@type` which contains the type URL. Example:
@@ -69,7 +69,6 @@ module Google
69
69
  # Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
70
70
  # .setNanos((int) ((millis % 1000) * 1000000)).build();
71
71
  #
72
- #
73
72
  # Example 5: Compute Timestamp from Java `Instant.now()`.
74
73
  #
75
74
  # Instant now = Instant.now();
@@ -78,7 +77,6 @@ module Google
78
77
  # Timestamp.newBuilder().setSeconds(now.getEpochSecond())
79
78
  # .setNanos(now.getNano()).build();
80
79
  #
81
- #
82
80
  # Example 6: Compute Timestamp from current time in Python.
83
81
  #
84
82
  # timestamp = Timestamp()
@@ -108,7 +106,7 @@ module Google
108
106
  # [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
109
107
  # the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
110
108
  # the Joda Time's [`ISODateTimeFormat.dateTime()`](
111
- # http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
109
+ # http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
112
110
  # ) to obtain a formatter capable of generating timestamps in this format.
113
111
  # @!attribute [rw] seconds
114
112
  # @return [::Integer]
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery-storage-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.18.0
4
+ version: 0.19.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-02-13 00:00:00.000000000 Z
11
+ date: 2023-05-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: gapic-common
@@ -16,7 +16,7 @@ dependencies:
16
16
  requirements:
17
17
  - - ">="
18
18
  - !ruby/object:Gem::Version
19
- version: 0.16.0
19
+ version: 0.19.0
20
20
  - - "<"
21
21
  - !ruby/object:Gem::Version
22
22
  version: 2.a
@@ -26,7 +26,7 @@ dependencies:
26
26
  requirements:
27
27
  - - ">="
28
28
  - !ruby/object:Gem::Version
29
- version: 0.16.0
29
+ version: 0.19.0
30
30
  - - "<"
31
31
  - !ruby/object:Gem::Version
32
32
  version: 2.a
@@ -50,14 +50,14 @@ dependencies:
50
50
  requirements:
51
51
  - - "~>"
52
52
  - !ruby/object:Gem::Version
53
- version: 1.26.1
53
+ version: 1.26.3
54
54
  type: :development
55
55
  prerelease: false
56
56
  version_requirements: !ruby/object:Gem::Requirement
57
57
  requirements:
58
58
  - - "~>"
59
59
  - !ruby/object:Gem::Version
60
- version: 1.26.1
60
+ version: 1.26.3
61
61
  - !ruby/object:Gem::Dependency
62
62
  name: minitest
63
63
  requirement: !ruby/object:Gem::Requirement