google-cloud-bigquery-storage-v1 1.5.1 → 1.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 75297df60393538987d9663c74022cf135367b2c135d560a1e7ccc5edca93d3a
4
- data.tar.gz: 28d1b2cb1dd450b0a461cbe1763b151f91c14ba1d6da27292d577d49022078fc
3
+ metadata.gz: 8b0dadc8bf969bfbc8e55aa4438911a54674704feb9531bfe6766ec1d4866492
4
+ data.tar.gz: 486c66ac7b2bda33eac4293f4e3911ce1c6e00bd427e1012b0f7d31c7265361c
5
5
  SHA512:
6
- metadata.gz: 247e6bae649a9e3a9b89f746dbf72e403abb5c01ded83289c4733057b25ba9826ee031f9aed6f140b356f7a2020b1c962e1410d86e84d2ecef8feb31f76c1ffc
7
- data.tar.gz: db2c55f71e22d9a47c74f54d90eb2b25ae1dff3a122b1e0f73125f4400e419c9b2a84a6b11895f19b4fa23caf0d4e5a56e10f4297131d13e193a8b5b080e09e0
6
+ metadata.gz: 8db14a897607c1379c3d2713c26edff49c5e8bf10e4f2bf4a364531e3d60d1d25d18dc100fdf8c9ea42adfaa506dbf165e8be8aba0d066c44dc0e2adfdc64264
7
+ data.tar.gz: 9e189057d56054f4a09f4453dffaf6e5b821b345160162fe0aff1f8b04f387761e56eaeed62bf5c423a5356db09a8dd46eaf990bc798be89010cde8b78128060
@@ -9,29 +9,8 @@ require 'google/protobuf/descriptor_pb'
9
9
 
10
10
  descriptor_data = "\n2google/cloud/bigquery/storage/v1/annotations.proto\x12 google.cloud.bigquery.storage.v1\x1a google/protobuf/descriptor.proto:9\n\x0b\x63olumn_name\x12\x1d.google.protobuf.FieldOptions\x18\xb5\xc3\xf7\xd8\x01 \x01(\t\x88\x01\x01\x42\xc0\x01\n$com.google.cloud.bigquery.storage.v1B\x10\x41nnotationsProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
11
11
 
12
- pool = Google::Protobuf::DescriptorPool.generated_pool
13
-
14
- begin
15
- pool.add_serialized_file(descriptor_data)
16
- rescue TypeError
17
- # Compatibility code: will be removed in the next major version.
18
- require 'google/protobuf/descriptor_pb'
19
- parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
20
- parsed.clear_dependency
21
- serialized = parsed.class.encode(parsed)
22
- file = pool.add_serialized_file(serialized)
23
- warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
24
- imports = [
25
- ]
26
- imports.each do |type_name, expected_filename|
27
- import_file = pool.lookup(type_name).file_descriptor
28
- if import_file.name != expected_filename
29
- warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
30
- end
31
- end
32
- warn "Each proto file must use a consistent fully-qualified name."
33
- warn "This will become an error in the next major version."
34
- end
12
+ pool = ::Google::Protobuf::DescriptorPool.generated_pool
13
+ pool.add_serialized_file(descriptor_data)
35
14
 
36
15
  module Google
37
16
  module Cloud
@@ -5,31 +5,10 @@
5
5
  require 'google/protobuf'
6
6
 
7
7
 
8
- descriptor_data = "\n,google/cloud/bigquery/storage/v1/arrow.proto\x12 google.cloud.bigquery.storage.v1\"(\n\x0b\x41rrowSchema\x12\x19\n\x11serialized_schema\x18\x01 \x01(\x0c\"J\n\x10\x41rrowRecordBatch\x12\x1f\n\x17serialized_record_batch\x18\x01 \x01(\x0c\x12\x15\n\trow_count\x18\x02 \x01(\x03\x42\x02\x18\x01\"\xcf\x01\n\x19\x41rrowSerializationOptions\x12h\n\x12\x62uffer_compression\x18\x02 \x01(\x0e\x32L.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec\"H\n\x10\x43ompressionCodec\x12\x1b\n\x17\x43OMPRESSION_UNSPECIFIED\x10\x00\x12\r\n\tLZ4_FRAME\x10\x01\x12\x08\n\x04ZSTD\x10\x02\x42\xba\x01\n$com.google.cloud.bigquery.storage.v1B\nArrowProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
8
+ descriptor_data = "\n,google/cloud/bigquery/storage/v1/arrow.proto\x12 google.cloud.bigquery.storage.v1\"(\n\x0b\x41rrowSchema\x12\x19\n\x11serialized_schema\x18\x01 \x01(\x0c\"J\n\x10\x41rrowRecordBatch\x12\x1f\n\x17serialized_record_batch\x18\x01 \x01(\x0c\x12\x15\n\trow_count\x18\x02 \x01(\x03\x42\x02\x18\x01\"\xec\x03\n\x19\x41rrowSerializationOptions\x12h\n\x12\x62uffer_compression\x18\x02 \x01(\x0e\x32L.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec\x12v\n\x19picos_timestamp_precision\x18\x03 \x01(\x0e\x32S.google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision\"H\n\x10\x43ompressionCodec\x12\x1b\n\x17\x43OMPRESSION_UNSPECIFIED\x10\x00\x12\r\n\tLZ4_FRAME\x10\x01\x12\x08\n\x04ZSTD\x10\x02\"\xa2\x01\n\x17PicosTimestampPrecision\x12)\n%PICOS_TIMESTAMP_PRECISION_UNSPECIFIED\x10\x00\x12\x1e\n\x1aTIMESTAMP_PRECISION_MICROS\x10\x01\x12\x1d\n\x19TIMESTAMP_PRECISION_NANOS\x10\x02\x12\x1d\n\x19TIMESTAMP_PRECISION_PICOS\x10\x03\x42\xba\x01\n$com.google.cloud.bigquery.storage.v1B\nArrowProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
9
9
 
10
- pool = Google::Protobuf::DescriptorPool.generated_pool
11
-
12
- begin
13
- pool.add_serialized_file(descriptor_data)
14
- rescue TypeError
15
- # Compatibility code: will be removed in the next major version.
16
- require 'google/protobuf/descriptor_pb'
17
- parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
18
- parsed.clear_dependency
19
- serialized = parsed.class.encode(parsed)
20
- file = pool.add_serialized_file(serialized)
21
- warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
22
- imports = [
23
- ]
24
- imports.each do |type_name, expected_filename|
25
- import_file = pool.lookup(type_name).file_descriptor
26
- if import_file.name != expected_filename
27
- warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
28
- end
29
- end
30
- warn "Each proto file must use a consistent fully-qualified name."
31
- warn "This will become an error in the next major version."
32
- end
10
+ pool = ::Google::Protobuf::DescriptorPool.generated_pool
11
+ pool.add_serialized_file(descriptor_data)
33
12
 
34
13
  module Google
35
14
  module Cloud
@@ -40,6 +19,7 @@ module Google
40
19
  ArrowRecordBatch = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ArrowRecordBatch").msgclass
41
20
  ArrowSerializationOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ArrowSerializationOptions").msgclass
42
21
  ArrowSerializationOptions::CompressionCodec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ArrowSerializationOptions.CompressionCodec").enummodule
22
+ ArrowSerializationOptions::PicosTimestampPrecision = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ArrowSerializationOptions.PicosTimestampPrecision").enummodule
43
23
  end
44
24
  end
45
25
  end
@@ -5,31 +5,10 @@
5
5
  require 'google/protobuf'
6
6
 
7
7
 
8
- descriptor_data = "\n+google/cloud/bigquery/storage/v1/avro.proto\x12 google.cloud.bigquery.storage.v1\"\x1c\n\nAvroSchema\x12\x0e\n\x06schema\x18\x01 \x01(\t\"A\n\x08\x41vroRows\x12\x1e\n\x16serialized_binary_rows\x18\x01 \x01(\x0c\x12\x15\n\trow_count\x18\x02 \x01(\x03\x42\x02\x18\x01\"A\n\x18\x41vroSerializationOptions\x12%\n\x1d\x65nable_display_name_attribute\x18\x01 \x01(\x08\x42\xb9\x01\n$com.google.cloud.bigquery.storage.v1B\tAvroProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
8
+ descriptor_data = "\n+google/cloud/bigquery/storage/v1/avro.proto\x12 google.cloud.bigquery.storage.v1\"\x1c\n\nAvroSchema\x12\x0e\n\x06schema\x18\x01 \x01(\t\"A\n\x08\x41vroRows\x12\x1e\n\x16serialized_binary_rows\x18\x01 \x01(\x0c\x12\x15\n\trow_count\x18\x02 \x01(\x03\x42\x02\x18\x01\"\xdd\x02\n\x18\x41vroSerializationOptions\x12%\n\x1d\x65nable_display_name_attribute\x18\x01 \x01(\x08\x12u\n\x19picos_timestamp_precision\x18\x02 \x01(\x0e\x32R.google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision\"\xa2\x01\n\x17PicosTimestampPrecision\x12)\n%PICOS_TIMESTAMP_PRECISION_UNSPECIFIED\x10\x00\x12\x1e\n\x1aTIMESTAMP_PRECISION_MICROS\x10\x01\x12\x1d\n\x19TIMESTAMP_PRECISION_NANOS\x10\x02\x12\x1d\n\x19TIMESTAMP_PRECISION_PICOS\x10\x03\x42\xb9\x01\n$com.google.cloud.bigquery.storage.v1B\tAvroProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
9
9
 
10
- pool = Google::Protobuf::DescriptorPool.generated_pool
11
-
12
- begin
13
- pool.add_serialized_file(descriptor_data)
14
- rescue TypeError
15
- # Compatibility code: will be removed in the next major version.
16
- require 'google/protobuf/descriptor_pb'
17
- parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
18
- parsed.clear_dependency
19
- serialized = parsed.class.encode(parsed)
20
- file = pool.add_serialized_file(serialized)
21
- warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
22
- imports = [
23
- ]
24
- imports.each do |type_name, expected_filename|
25
- import_file = pool.lookup(type_name).file_descriptor
26
- if import_file.name != expected_filename
27
- warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
28
- end
29
- end
30
- warn "Each proto file must use a consistent fully-qualified name."
31
- warn "This will become an error in the next major version."
32
- end
10
+ pool = ::Google::Protobuf::DescriptorPool.generated_pool
11
+ pool.add_serialized_file(descriptor_data)
33
12
 
34
13
  module Google
35
14
  module Cloud
@@ -39,6 +18,7 @@ module Google
39
18
  AvroSchema = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AvroSchema").msgclass
40
19
  AvroRows = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AvroRows").msgclass
41
20
  AvroSerializationOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AvroSerializationOptions").msgclass
21
+ AvroSerializationOptions::PicosTimestampPrecision = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.AvroSerializationOptions.PicosTimestampPrecision").enummodule
42
22
  end
43
23
  end
44
24
  end
@@ -329,9 +329,9 @@ module Google
329
329
 
330
330
  ##
331
331
  # Reads rows from the stream in the format prescribed by the ReadSession.
332
- # Each response contains one or more table rows, up to a maximum of 100 MiB
332
+ # Each response contains one or more table rows, up to a maximum of 128 MB
333
333
  # per response; read requests which attempt to read individual rows larger
334
- # than 100 MiB will fail.
334
+ # than 128 MB will fail.
335
335
  #
336
336
  # Each request also returns a set of stream statistics reflecting the current
337
337
  # state of the stream.
@@ -9,30 +9,8 @@ require 'google/protobuf/descriptor_pb'
9
9
 
10
10
  descriptor_data = "\n/google/cloud/bigquery/storage/v1/protobuf.proto\x12 google.cloud.bigquery.storage.v1\x1a google/protobuf/descriptor.proto\"I\n\x0bProtoSchema\x12:\n\x10proto_descriptor\x18\x01 \x01(\x0b\x32 .google.protobuf.DescriptorProto\"$\n\tProtoRows\x12\x17\n\x0fserialized_rows\x18\x01 \x03(\x0c\x42\xbd\x01\n$com.google.cloud.bigquery.storage.v1B\rProtoBufProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
11
11
 
12
- pool = Google::Protobuf::DescriptorPool.generated_pool
13
-
14
- begin
15
- pool.add_serialized_file(descriptor_data)
16
- rescue TypeError
17
- # Compatibility code: will be removed in the next major version.
18
- require 'google/protobuf/descriptor_pb'
19
- parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
20
- parsed.clear_dependency
21
- serialized = parsed.class.encode(parsed)
22
- file = pool.add_serialized_file(serialized)
23
- warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
24
- imports = [
25
- ["google.protobuf.DescriptorProto", "google/protobuf/descriptor.proto"],
26
- ]
27
- imports.each do |type_name, expected_filename|
28
- import_file = pool.lookup(type_name).file_descriptor
29
- if import_file.name != expected_filename
30
- warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
31
- end
32
- end
33
- warn "Each proto file must use a consistent fully-qualified name."
34
- warn "This will become an error in the next major version."
35
- end
12
+ pool = ::Google::Protobuf::DescriptorPool.generated_pool
13
+ pool.add_serialized_file(descriptor_data)
36
14
 
37
15
  module Google
38
16
  module Cloud
@@ -20,37 +20,8 @@ require 'google/rpc/status_pb'
20
20
 
21
21
  descriptor_data = "\n.google/cloud/bigquery/storage/v1/storage.proto\x12 google.cloud.bigquery.storage.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/bigquery/storage/v1/arrow.proto\x1a+google/cloud/bigquery/storage/v1/avro.proto\x1a/google/cloud/bigquery/storage/v1/protobuf.proto\x1a-google/cloud/bigquery/storage/v1/stream.proto\x1a,google/cloud/bigquery/storage/v1/table.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/wrappers.proto\x1a\x17google/rpc/status.proto\"\xe7\x01\n\x18\x43reateReadSessionRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12H\n\x0cread_session\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ReadSessionB\x03\xe0\x41\x02\x12\x18\n\x10max_stream_count\x18\x03 \x01(\x05\x12\"\n\x1apreferred_min_stream_count\x18\x04 \x01(\x05\"i\n\x0fReadRowsRequest\x12\x46\n\x0bread_stream\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)bigquerystorage.googleapis.com/ReadStream\x12\x0e\n\x06offset\x18\x02 \x01(\x03\")\n\rThrottleState\x12\x18\n\x10throttle_percent\x18\x01 \x01(\x05\"\x97\x01\n\x0bStreamStats\x12H\n\x08progress\x18\x02 \x01(\x0b\x32\x36.google.cloud.bigquery.storage.v1.StreamStats.Progress\x1a>\n\x08Progress\x12\x19\n\x11\x61t_response_start\x18\x01 \x01(\x01\x12\x17\n\x0f\x61t_response_end\x18\x02 \x01(\x01\"\xac\x04\n\x10ReadRowsResponse\x12?\n\tavro_rows\x18\x03 \x01(\x0b\x32*.google.cloud.bigquery.storage.v1.AvroRowsH\x00\x12P\n\x12\x61rrow_record_batch\x18\x04 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1.ArrowRecordBatchH\x00\x12\x11\n\trow_count\x18\x06 \x01(\x03\x12<\n\x05stats\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.StreamStats\x12G\n\x0ethrottle_state\x18\x05 \x01(\x0b\x32/.google.cloud.bigquery.storage.v1.ThrottleState\x12H\n\x0b\x61vro_schema\x18\x07 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.AvroSchemaB\x03\xe0\x41\x03H\x01\x12J\n\x0c\x61rrow_schema\x18\x08 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ArrowSchemaB\x03\xe0\x41\x03H\x01\x12(\n\x16uncompressed_byte_size\x18\t \x01(\x03\x42\x03\xe0\x41\x01H\x02\x88\x01\x01\x42\x06\n\x04rowsB\x08\n\x06schemaB\x19\n\x17_uncompressed_byte_size\"k\n\x16SplitReadStreamRequest\x12?\n\x04name\x18\x01 \x01(\tB1\xe0\x41\x02\xfa\x41+\n)bigquerystorage.googleapis.com/ReadStream\x12\x10\n\x08\x66raction\x18\x02 \x01(\x01\"\xa7\x01\n\x17SplitReadStreamResponse\x12\x44\n\x0eprimary_stream\x18\x01 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.ReadStream\x12\x46\n\x10remainder_stream\x18\x02 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.ReadStream\"\x9b\x01\n\x18\x43reateWriteStreamRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x62igquery.googleapis.com/Table\x12H\n\x0cwrite_stream\x18\x02 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.WriteStreamB\x03\xe0\x41\x02\"\xf8\x08\n\x11\x41ppendRowsRequest\x12H\n\x0cwrite_stream\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*bigquerystorage.googleapis.com/WriteStream\x12+\n\x06offset\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\x12S\n\nproto_rows\x18\x04 \x01(\x0b\x32=.google.cloud.bigquery.storage.v1.AppendRowsRequest.ProtoDataH\x00\x12S\n\narrow_rows\x18\x05 \x01(\x0b\x32=.google.cloud.bigquery.storage.v1.AppendRowsRequest.ArrowDataH\x00\x12\x10\n\x08trace_id\x18\x06 \x01(\t\x12{\n\x1dmissing_value_interpretations\x18\x07 \x03(\x0b\x32T.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretationsEntry\x12\x81\x01\n$default_missing_value_interpretation\x18\x08 \x01(\x0e\x32N.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretationB\x03\xe0\x41\x01\x1a\x93\x01\n\tArrowData\x12\x44\n\rwriter_schema\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ArrowSchema\x12@\n\x04rows\x18\x02 \x01(\x0b\x32\x32.google.cloud.bigquery.storage.v1.ArrowRecordBatch\x1a\x8c\x01\n\tProtoData\x12\x44\n\rwriter_schema\x18\x01 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ProtoSchema\x12\x39\n\x04rows\x18\x02 \x01(\x0b\x32+.google.cloud.bigquery.storage.v1.ProtoRows\x1a\x92\x01\n MissingValueInterpretationsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12]\n\x05value\x18\x02 \x01(\x0e\x32N.google.cloud.bigquery.storage.v1.AppendRowsRequest.MissingValueInterpretation:\x02\x38\x01\"m\n\x1aMissingValueInterpretation\x12,\n(MISSING_VALUE_INTERPRETATION_UNSPECIFIED\x10\x00\x12\x0e\n\nNULL_VALUE\x10\x01\x12\x11\n\rDEFAULT_VALUE\x10\x02\x42\x06\n\x04rows\"\xfb\x02\n\x12\x41ppendRowsResponse\x12Z\n\rappend_result\x18\x01 \x01(\x0b\x32\x41.google.cloud.bigquery.storage.v1.AppendRowsResponse.AppendResultH\x00\x12#\n\x05\x65rror\x18\x02 \x01(\x0b\x32\x12.google.rpc.StatusH\x00\x12\x45\n\x0eupdated_schema\x18\x03 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.TableSchema\x12>\n\nrow_errors\x18\x04 \x03(\x0b\x32*.google.cloud.bigquery.storage.v1.RowError\x12\x14\n\x0cwrite_stream\x18\x05 \x01(\t\x1a;\n\x0c\x41ppendResult\x12+\n\x06offset\x18\x01 \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\n\n\x08response\"\x9a\x01\n\x15GetWriteStreamRequest\x12@\n\x04name\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*bigquerystorage.googleapis.com/WriteStream\x12?\n\x04view\x18\x03 \x01(\x0e\x32\x31.google.cloud.bigquery.storage.v1.WriteStreamView\"s\n\x1e\x42\x61tchCommitWriteStreamsRequest\x12\x35\n\x06parent\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x62igquery.googleapis.com/Table\x12\x1a\n\rwrite_streams\x18\x02 \x03(\tB\x03\xe0\x41\x02\"\x99\x01\n\x1f\x42\x61tchCommitWriteStreamsResponse\x12/\n\x0b\x63ommit_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x45\n\rstream_errors\x18\x02 \x03(\x0b\x32..google.cloud.bigquery.storage.v1.StorageError\"^\n\x1a\x46inalizeWriteStreamRequest\x12@\n\x04name\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*bigquerystorage.googleapis.com/WriteStream\"0\n\x1b\x46inalizeWriteStreamResponse\x12\x11\n\trow_count\x18\x01 \x01(\x03\"\x89\x01\n\x10\x46lushRowsRequest\x12H\n\x0cwrite_stream\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*bigquerystorage.googleapis.com/WriteStream\x12+\n\x06offset\x18\x02 \x01(\x0b\x32\x1b.google.protobuf.Int64Value\"#\n\x11\x46lushRowsResponse\x12\x0e\n\x06offset\x18\x01 \x01(\x03\"\xa4\x04\n\x0cStorageError\x12M\n\x04\x63ode\x18\x01 \x01(\x0e\x32?.google.cloud.bigquery.storage.v1.StorageError.StorageErrorCode\x12\x0e\n\x06\x65ntity\x18\x02 \x01(\t\x12\x15\n\rerror_message\x18\x03 \x01(\t\"\x9d\x03\n\x10StorageErrorCode\x12\"\n\x1eSTORAGE_ERROR_CODE_UNSPECIFIED\x10\x00\x12\x13\n\x0fTABLE_NOT_FOUND\x10\x01\x12\x1c\n\x18STREAM_ALREADY_COMMITTED\x10\x02\x12\x14\n\x10STREAM_NOT_FOUND\x10\x03\x12\x17\n\x13INVALID_STREAM_TYPE\x10\x04\x12\x18\n\x14INVALID_STREAM_STATE\x10\x05\x12\x14\n\x10STREAM_FINALIZED\x10\x06\x12 \n\x1cSCHEMA_MISMATCH_EXTRA_FIELDS\x10\x07\x12\x19\n\x15OFFSET_ALREADY_EXISTS\x10\x08\x12\x17\n\x13OFFSET_OUT_OF_RANGE\x10\t\x12\x15\n\x11\x43MEK_NOT_PROVIDED\x10\n\x12\x19\n\x15INVALID_CMEK_PROVIDED\x10\x0b\x12\x19\n\x15\x43MEK_ENCRYPTION_ERROR\x10\x0c\x12\x15\n\x11KMS_SERVICE_ERROR\x10\r\x12\x19\n\x15KMS_PERMISSION_DENIED\x10\x0e\"\xb3\x01\n\x08RowError\x12\r\n\x05index\x18\x01 \x01(\x03\x12\x45\n\x04\x63ode\x18\x02 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.RowError.RowErrorCode\x12\x0f\n\x07message\x18\x03 \x01(\t\"@\n\x0cRowErrorCode\x12\x1e\n\x1aROW_ERROR_CODE_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x46IELDS_ERROR\x10\x01\x32\x92\x06\n\x0c\x42igQueryRead\x12\xe9\x01\n\x11\x43reateReadSession\x12:.google.cloud.bigquery.storage.v1.CreateReadSessionRequest\x1a-.google.cloud.bigquery.storage.v1.ReadSession\"i\xda\x41$parent,read_session,max_stream_count\x82\xd3\xe4\x93\x02<\"7/v1/{read_session.table=projects/*/datasets/*/tables/*}:\x01*\x12\xcf\x01\n\x08ReadRows\x12\x31.google.cloud.bigquery.storage.v1.ReadRowsRequest\x1a\x32.google.cloud.bigquery.storage.v1.ReadRowsResponse\"Z\xda\x41\x12read_stream,offset\x82\xd3\xe4\x93\x02?\x12=/v1/{read_stream=projects/*/locations/*/sessions/*/streams/*}0\x01\x12\xc6\x01\n\x0fSplitReadStream\x12\x38.google.cloud.bigquery.storage.v1.SplitReadStreamRequest\x1a\x39.google.cloud.bigquery.storage.v1.SplitReadStreamResponse\">\x82\xd3\xe4\x93\x02\x38\x12\x36/v1/{name=projects/*/locations/*/sessions/*/streams/*}\x1a{\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41Whttps://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/cloud-platform2\xbc\x0b\n\rBigQueryWrite\x12\xd7\x01\n\x11\x43reateWriteStream\x12:.google.cloud.bigquery.storage.v1.CreateWriteStreamRequest\x1a-.google.cloud.bigquery.storage.v1.WriteStream\"W\xda\x41\x13parent,write_stream\x82\xd3\xe4\x93\x02;\"+/v1/{parent=projects/*/datasets/*/tables/*}:\x0cwrite_stream\x12\xd2\x01\n\nAppendRows\x12\x33.google.cloud.bigquery.storage.v1.AppendRowsRequest\x1a\x34.google.cloud.bigquery.storage.v1.AppendRowsResponse\"U\xda\x41\x0cwrite_stream\x82\xd3\xe4\x93\x02@\";/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}:\x01*(\x01\x30\x01\x12\xbf\x01\n\x0eGetWriteStream\x12\x37.google.cloud.bigquery.storage.v1.GetWriteStreamRequest\x1a-.google.cloud.bigquery.storage.v1.WriteStream\"E\xda\x41\x04name\x82\xd3\xe4\x93\x02\x38\"3/v1/{name=projects/*/datasets/*/tables/*/streams/*}:\x01*\x12\xd9\x01\n\x13\x46inalizeWriteStream\x12<.google.cloud.bigquery.storage.v1.FinalizeWriteStreamRequest\x1a=.google.cloud.bigquery.storage.v1.FinalizeWriteStreamResponse\"E\xda\x41\x04name\x82\xd3\xe4\x93\x02\x38\"3/v1/{name=projects/*/datasets/*/tables/*/streams/*}:\x01*\x12\xdc\x01\n\x17\x42\x61tchCommitWriteStreams\x12@.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsRequest\x1a\x41.google.cloud.bigquery.storage.v1.BatchCommitWriteStreamsResponse\"<\xda\x41\x06parent\x82\xd3\xe4\x93\x02-\x12+/v1/{parent=projects/*/datasets/*/tables/*}\x12\xcb\x01\n\tFlushRows\x12\x32.google.cloud.bigquery.storage.v1.FlushRowsRequest\x1a\x33.google.cloud.bigquery.storage.v1.FlushRowsResponse\"U\xda\x41\x0cwrite_stream\x82\xd3\xe4\x93\x02@\";/v1/{write_stream=projects/*/datasets/*/tables/*/streams/*}:\x01*\x1a\xb0\x01\xca\x41\x1e\x62igquerystorage.googleapis.com\xd2\x41\x8b\x01https://www.googleapis.com/auth/bigquery,https://www.googleapis.com/auth/bigquery.insertdata,https://www.googleapis.com/auth/cloud-platformB\x94\x02\n$com.google.cloud.bigquery.storage.v1B\x0cStorageProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1\xea\x41U\n\x1d\x62igquery.googleapis.com/Table\x12\x34projects/{project}/datasets/{dataset}/tables/{table}b\x06proto3"
22
22
 
23
- pool = Google::Protobuf::DescriptorPool.generated_pool
24
-
25
- begin
26
- pool.add_serialized_file(descriptor_data)
27
- rescue TypeError
28
- # Compatibility code: will be removed in the next major version.
29
- require 'google/protobuf/descriptor_pb'
30
- parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
31
- parsed.clear_dependency
32
- serialized = parsed.class.encode(parsed)
33
- file = pool.add_serialized_file(serialized)
34
- warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
35
- imports = [
36
- ["google.cloud.bigquery.storage.v1.ReadSession", "google/cloud/bigquery/storage/v1/stream.proto"],
37
- ["google.cloud.bigquery.storage.v1.AvroRows", "google/cloud/bigquery/storage/v1/avro.proto"],
38
- ["google.cloud.bigquery.storage.v1.ArrowRecordBatch", "google/cloud/bigquery/storage/v1/arrow.proto"],
39
- ["google.protobuf.Int64Value", "google/protobuf/wrappers.proto"],
40
- ["google.cloud.bigquery.storage.v1.ProtoSchema", "google/cloud/bigquery/storage/v1/protobuf.proto"],
41
- ["google.rpc.Status", "google/rpc/status.proto"],
42
- ["google.cloud.bigquery.storage.v1.TableSchema", "google/cloud/bigquery/storage/v1/table.proto"],
43
- ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"],
44
- ]
45
- imports.each do |type_name, expected_filename|
46
- import_file = pool.lookup(type_name).file_descriptor
47
- if import_file.name != expected_filename
48
- warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
49
- end
50
- end
51
- warn "Each proto file must use a consistent fully-qualified name."
52
- warn "This will become an error in the next major version."
53
- end
23
+ pool = ::Google::Protobuf::DescriptorPool.generated_pool
24
+ pool.add_serialized_file(descriptor_data)
54
25
 
55
26
  module Google
56
27
  module Cloud
@@ -57,9 +57,9 @@ module Google
57
57
  # not require manual clean-up by the caller.
58
58
  rpc :CreateReadSession, ::Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest, ::Google::Cloud::Bigquery::Storage::V1::ReadSession
59
59
  # Reads rows from the stream in the format prescribed by the ReadSession.
60
- # Each response contains one or more table rows, up to a maximum of 100 MiB
60
+ # Each response contains one or more table rows, up to a maximum of 128 MB
61
61
  # per response; read requests which attempt to read individual rows larger
62
- # than 100 MiB will fail.
62
+ # than 128 MB will fail.
63
63
  #
64
64
  # Each request also returns a set of stream statistics reflecting the current
65
65
  # state of the stream.
@@ -12,35 +12,10 @@ require 'google/cloud/bigquery/storage/v1/table_pb'
12
12
  require 'google/protobuf/timestamp_pb'
13
13
 
14
14
 
15
- descriptor_data = "\n-google/cloud/bigquery/storage/v1/stream.proto\x12 google.cloud.bigquery.storage.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/bigquery/storage/v1/arrow.proto\x1a+google/cloud/bigquery/storage/v1/avro.proto\x1a,google/cloud/bigquery/storage/v1/table.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xc3\x0c\n\x0bReadSession\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x34\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x46\n\x0b\x64\x61ta_format\x18\x03 \x01(\x0e\x32,.google.cloud.bigquery.storage.v1.DataFormatB\x03\xe0\x41\x05\x12H\n\x0b\x61vro_schema\x18\x04 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.AvroSchemaB\x03\xe0\x41\x03H\x00\x12J\n\x0c\x61rrow_schema\x18\x05 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ArrowSchemaB\x03\xe0\x41\x03H\x00\x12\x34\n\x05table\x18\x06 \x01(\tB%\xe0\x41\x05\xfa\x41\x1f\n\x1d\x62igquery.googleapis.com/Table\x12Z\n\x0ftable_modifiers\x18\x07 \x01(\x0b\x32<.google.cloud.bigquery.storage.v1.ReadSession.TableModifiersB\x03\xe0\x41\x01\x12Y\n\x0cread_options\x18\x08 \x01(\x0b\x32>.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptionsB\x03\xe0\x41\x01\x12\x42\n\x07streams\x18\n \x03(\x0b\x32,.google.cloud.bigquery.storage.v1.ReadStreamB\x03\xe0\x41\x03\x12*\n\x1d\x65stimated_total_bytes_scanned\x18\x0c \x01(\x03\x42\x03\xe0\x41\x03\x12/\n\"estimated_total_physical_file_size\x18\x0f \x01(\x03\x42\x03\xe0\x41\x03\x12 \n\x13\x65stimated_row_count\x18\x0e \x01(\x03\x42\x03\xe0\x41\x03\x12\x15\n\x08trace_id\x18\r \x01(\tB\x03\xe0\x41\x01\x1a\x43\n\x0eTableModifiers\x12\x31\n\rsnapshot_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x89\x05\n\x10TableReadOptions\x12\x17\n\x0fselected_fields\x18\x01 \x03(\t\x12\x17\n\x0frow_restriction\x18\x02 \x01(\t\x12g\n\x1b\x61rrow_serialization_options\x18\x03 \x01(\x0b\x32;.google.cloud.bigquery.storage.v1.ArrowSerializationOptionsB\x03\xe0\x41\x01H\x00\x12\x65\n\x1a\x61vro_serialization_options\x18\x04 \x01(\x0b\x32:.google.cloud.bigquery.storage.v1.AvroSerializationOptionsB\x03\xe0\x41\x01H\x00\x12#\n\x11sample_percentage\x18\x05 \x01(\x01\x42\x03\xe0\x41\x01H\x01\x88\x01\x01\x12\x85\x01\n\x1aresponse_compression_codec\x18\x06 \x01(\x0e\x32W.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodecB\x03\xe0\x41\x01H\x02\x88\x01\x01\"j\n\x18ResponseCompressionCodec\x12*\n&RESPONSE_COMPRESSION_CODEC_UNSPECIFIED\x10\x00\x12\"\n\x1eRESPONSE_COMPRESSION_CODEC_LZ4\x10\x02\x42%\n#output_format_serialization_optionsB\x14\n\x12_sample_percentageB\x1d\n\x1b_response_compression_codec:k\xea\x41h\n*bigquerystorage.googleapis.com/ReadSession\x12:projects/{project}/locations/{location}/sessions/{session}B\x08\n\x06schema\"\x9c\x01\n\nReadStream\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03:{\xea\x41x\n)bigquerystorage.googleapis.com/ReadStream\x12Kprojects/{project}/locations/{location}/sessions/{session}/streams/{stream}\"\xfb\x04\n\x0bWriteStream\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x45\n\x04type\x18\x02 \x01(\x0e\x32\x32.google.cloud.bigquery.storage.v1.WriteStream.TypeB\x03\xe0\x41\x05\x12\x34\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x0ctable_schema\x18\x05 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.TableSchemaB\x03\xe0\x41\x03\x12P\n\nwrite_mode\x18\x07 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.WriteStream.WriteModeB\x03\xe0\x41\x05\x12\x15\n\x08location\x18\x08 \x01(\tB\x03\xe0\x41\x05\"F\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\r\n\tCOMMITTED\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0c\n\x08\x42UFFERED\x10\x03\"3\n\tWriteMode\x12\x1a\n\x16WRITE_MODE_UNSPECIFIED\x10\x00\x12\n\n\x06INSERT\x10\x01:v\xea\x41s\n*bigquerystorage.googleapis.com/WriteStream\x12\x45projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}*>\n\nDataFormat\x12\x1b\n\x17\x44\x41TA_FORMAT_UNSPECIFIED\x10\x00\x12\x08\n\x04\x41VRO\x10\x01\x12\t\n\x05\x41RROW\x10\x02*I\n\x0fWriteStreamView\x12!\n\x1dWRITE_STREAM_VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02\x42\xbb\x01\n$com.google.cloud.bigquery.storage.v1B\x0bStreamProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
15
+ descriptor_data = "\n-google/cloud/bigquery/storage/v1/stream.proto\x12 google.cloud.bigquery.storage.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a,google/cloud/bigquery/storage/v1/arrow.proto\x1a+google/cloud/bigquery/storage/v1/avro.proto\x1a,google/cloud/bigquery/storage/v1/table.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xc3\x0c\n\x0bReadSession\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x34\n\x0b\x65xpire_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x46\n\x0b\x64\x61ta_format\x18\x03 \x01(\x0e\x32,.google.cloud.bigquery.storage.v1.DataFormatB\x03\xe0\x41\x05\x12H\n\x0b\x61vro_schema\x18\x04 \x01(\x0b\x32,.google.cloud.bigquery.storage.v1.AvroSchemaB\x03\xe0\x41\x03H\x00\x12J\n\x0c\x61rrow_schema\x18\x05 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.ArrowSchemaB\x03\xe0\x41\x03H\x00\x12\x34\n\x05table\x18\x06 \x01(\tB%\xe0\x41\x05\xfa\x41\x1f\n\x1d\x62igquery.googleapis.com/Table\x12Z\n\x0ftable_modifiers\x18\x07 \x01(\x0b\x32<.google.cloud.bigquery.storage.v1.ReadSession.TableModifiersB\x03\xe0\x41\x01\x12Y\n\x0cread_options\x18\x08 \x01(\x0b\x32>.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptionsB\x03\xe0\x41\x01\x12\x42\n\x07streams\x18\n \x03(\x0b\x32,.google.cloud.bigquery.storage.v1.ReadStreamB\x03\xe0\x41\x03\x12*\n\x1d\x65stimated_total_bytes_scanned\x18\x0c \x01(\x03\x42\x03\xe0\x41\x03\x12/\n\"estimated_total_physical_file_size\x18\x0f \x01(\x03\x42\x03\xe0\x41\x03\x12 \n\x13\x65stimated_row_count\x18\x0e \x01(\x03\x42\x03\xe0\x41\x03\x12\x15\n\x08trace_id\x18\r \x01(\tB\x03\xe0\x41\x01\x1a\x43\n\x0eTableModifiers\x12\x31\n\rsnapshot_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x1a\x89\x05\n\x10TableReadOptions\x12\x17\n\x0fselected_fields\x18\x01 \x03(\t\x12\x17\n\x0frow_restriction\x18\x02 \x01(\t\x12g\n\x1b\x61rrow_serialization_options\x18\x03 \x01(\x0b\x32;.google.cloud.bigquery.storage.v1.ArrowSerializationOptionsB\x03\xe0\x41\x01H\x00\x12\x65\n\x1a\x61vro_serialization_options\x18\x04 \x01(\x0b\x32:.google.cloud.bigquery.storage.v1.AvroSerializationOptionsB\x03\xe0\x41\x01H\x00\x12#\n\x11sample_percentage\x18\x05 \x01(\x01\x42\x03\xe0\x41\x01H\x01\x88\x01\x01\x12\x85\x01\n\x1aresponse_compression_codec\x18\x06 \x01(\x0e\x32W.google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions.ResponseCompressionCodecB\x03\xe0\x41\x01H\x02\x88\x01\x01\"j\n\x18ResponseCompressionCodec\x12*\n&RESPONSE_COMPRESSION_CODEC_UNSPECIFIED\x10\x00\x12\"\n\x1eRESPONSE_COMPRESSION_CODEC_LZ4\x10\x02\x42%\n#output_format_serialization_optionsB\x14\n\x12_sample_percentageB\x1d\n\x1b_response_compression_codec:k\xea\x41h\n*bigquerystorage.googleapis.com/ReadSession\x12:projects/{project}/locations/{location}/sessions/{session}B\x08\n\x06schema\"\x9c\x01\n\nReadStream\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03:{\xea\x41x\n)bigquerystorage.googleapis.com/ReadStream\x12Kprojects/{project}/locations/{location}/sessions/{session}/streams/{stream}\"\xfb\x04\n\x0bWriteStream\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x45\n\x04type\x18\x02 \x01(\x0e\x32\x32.google.cloud.bigquery.storage.v1.WriteStream.TypeB\x03\xe0\x41\x05\x12\x34\n\x0b\x63reate_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0b\x63ommit_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12H\n\x0ctable_schema\x18\x05 \x01(\x0b\x32-.google.cloud.bigquery.storage.v1.TableSchemaB\x03\xe0\x41\x03\x12P\n\nwrite_mode\x18\x07 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.WriteStream.WriteModeB\x03\xe0\x41\x05\x12\x15\n\x08location\x18\x08 \x01(\tB\x03\xe0\x41\x03\"F\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\r\n\tCOMMITTED\x10\x01\x12\x0b\n\x07PENDING\x10\x02\x12\x0c\n\x08\x42UFFERED\x10\x03\"3\n\tWriteMode\x12\x1a\n\x16WRITE_MODE_UNSPECIFIED\x10\x00\x12\n\n\x06INSERT\x10\x01:v\xea\x41s\n*bigquerystorage.googleapis.com/WriteStream\x12\x45projects/{project}/datasets/{dataset}/tables/{table}/streams/{stream}*>\n\nDataFormat\x12\x1b\n\x17\x44\x41TA_FORMAT_UNSPECIFIED\x10\x00\x12\x08\n\x04\x41VRO\x10\x01\x12\t\n\x05\x41RROW\x10\x02*I\n\x0fWriteStreamView\x12!\n\x1dWRITE_STREAM_VIEW_UNSPECIFIED\x10\x00\x12\t\n\x05\x42\x41SIC\x10\x01\x12\x08\n\x04\x46ULL\x10\x02\x42\xbb\x01\n$com.google.cloud.bigquery.storage.v1B\x0bStreamProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
16
16
 
17
- pool = Google::Protobuf::DescriptorPool.generated_pool
18
-
19
- begin
20
- pool.add_serialized_file(descriptor_data)
21
- rescue TypeError
22
- # Compatibility code: will be removed in the next major version.
23
- require 'google/protobuf/descriptor_pb'
24
- parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
25
- parsed.clear_dependency
26
- serialized = parsed.class.encode(parsed)
27
- file = pool.add_serialized_file(serialized)
28
- warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
29
- imports = [
30
- ["google.protobuf.Timestamp", "google/protobuf/timestamp.proto"],
31
- ["google.cloud.bigquery.storage.v1.AvroSchema", "google/cloud/bigquery/storage/v1/avro.proto"],
32
- ["google.cloud.bigquery.storage.v1.ArrowSchema", "google/cloud/bigquery/storage/v1/arrow.proto"],
33
- ["google.cloud.bigquery.storage.v1.TableSchema", "google/cloud/bigquery/storage/v1/table.proto"],
34
- ]
35
- imports.each do |type_name, expected_filename|
36
- import_file = pool.lookup(type_name).file_descriptor
37
- if import_file.name != expected_filename
38
- warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
39
- end
40
- end
41
- warn "Each proto file must use a consistent fully-qualified name."
42
- warn "This will become an error in the next major version."
43
- end
17
+ pool = ::Google::Protobuf::DescriptorPool.generated_pool
18
+ pool.add_serialized_file(descriptor_data)
44
19
 
45
20
  module Google
46
21
  module Cloud
@@ -5,33 +5,13 @@
5
5
  require 'google/protobuf'
6
6
 
7
7
  require 'google/api/field_behavior_pb'
8
+ require 'google/protobuf/wrappers_pb'
8
9
 
9
10
 
10
- descriptor_data = "\n,google/cloud/bigquery/storage/v1/table.proto\x12 google.cloud.bigquery.storage.v1\x1a\x1fgoogle/api/field_behavior.proto\"Q\n\x0bTableSchema\x12\x42\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.storage.v1.TableFieldSchema\"\xfd\x06\n\x10TableFieldSchema\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12J\n\x04type\x18\x02 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.TableFieldSchema.TypeB\x03\xe0\x41\x02\x12J\n\x04mode\x18\x03 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.TableFieldSchema.ModeB\x03\xe0\x41\x01\x12G\n\x06\x66ields\x18\x04 \x03(\x0b\x32\x32.google.cloud.bigquery.storage.v1.TableFieldSchemaB\x03\xe0\x41\x01\x12\x18\n\x0b\x64\x65scription\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nmax_length\x18\x07 \x01(\x03\x42\x03\xe0\x41\x01\x12\x16\n\tprecision\x18\x08 \x01(\x03\x42\x03\xe0\x41\x01\x12\x12\n\x05scale\x18\t \x01(\x03\x42\x03\xe0\x41\x01\x12%\n\x18\x64\x65\x66\x61ult_value_expression\x18\n \x01(\tB\x03\xe0\x41\x01\x12\x64\n\x12range_element_type\x18\x0b \x01(\x0b\x32\x43.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementTypeB\x03\xe0\x41\x01\x1a^\n\x10\x46ieldElementType\x12J\n\x04type\x18\x01 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.TableFieldSchema.TypeB\x03\xe0\x41\x02\"\xe0\x01\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\t\n\x05INT64\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\n\n\x06STRUCT\x10\x04\x12\t\n\x05\x42YTES\x10\x05\x12\x08\n\x04\x42OOL\x10\x06\x12\r\n\tTIMESTAMP\x10\x07\x12\x08\n\x04\x44\x41TE\x10\x08\x12\x08\n\x04TIME\x10\t\x12\x0c\n\x08\x44\x41TETIME\x10\n\x12\r\n\tGEOGRAPHY\x10\x0b\x12\x0b\n\x07NUMERIC\x10\x0c\x12\x0e\n\nBIGNUMERIC\x10\r\x12\x0c\n\x08INTERVAL\x10\x0e\x12\x08\n\x04JSON\x10\x0f\x12\t\n\x05RANGE\x10\x10\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x0c\n\x08NULLABLE\x10\x01\x12\x0c\n\x08REQUIRED\x10\x02\x12\x0c\n\x08REPEATED\x10\x03\x42\xba\x01\n$com.google.cloud.bigquery.storage.v1B\nTableProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
11
+ descriptor_data = "\n,google/cloud/bigquery/storage/v1/table.proto\x12 google.cloud.bigquery.storage.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1egoogle/protobuf/wrappers.proto\"Q\n\x0bTableSchema\x12\x42\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x32.google.cloud.bigquery.storage.v1.TableFieldSchema\"\xbc\x07\n\x10TableFieldSchema\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12J\n\x04type\x18\x02 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.TableFieldSchema.TypeB\x03\xe0\x41\x02\x12J\n\x04mode\x18\x03 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.TableFieldSchema.ModeB\x03\xe0\x41\x01\x12G\n\x06\x66ields\x18\x04 \x03(\x0b\x32\x32.google.cloud.bigquery.storage.v1.TableFieldSchemaB\x03\xe0\x41\x01\x12\x18\n\x0b\x64\x65scription\x18\x06 \x01(\tB\x03\xe0\x41\x01\x12\x17\n\nmax_length\x18\x07 \x01(\x03\x42\x03\xe0\x41\x01\x12\x16\n\tprecision\x18\x08 \x01(\x03\x42\x03\xe0\x41\x01\x12\x12\n\x05scale\x18\t \x01(\x03\x42\x03\xe0\x41\x01\x12%\n\x18\x64\x65\x66\x61ult_value_expression\x18\n \x01(\tB\x03\xe0\x41\x01\x12=\n\x13timestamp_precision\x18\x1b \x01(\x0b\x32\x1b.google.protobuf.Int64ValueB\x03\xe0\x41\x01\x12\x64\n\x12range_element_type\x18\x0b \x01(\x0b\x32\x43.google.cloud.bigquery.storage.v1.TableFieldSchema.FieldElementTypeB\x03\xe0\x41\x01\x1a^\n\x10\x46ieldElementType\x12J\n\x04type\x18\x01 \x01(\x0e\x32\x37.google.cloud.bigquery.storage.v1.TableFieldSchema.TypeB\x03\xe0\x41\x02\"\xe0\x01\n\x04Type\x12\x14\n\x10TYPE_UNSPECIFIED\x10\x00\x12\n\n\x06STRING\x10\x01\x12\t\n\x05INT64\x10\x02\x12\n\n\x06\x44OUBLE\x10\x03\x12\n\n\x06STRUCT\x10\x04\x12\t\n\x05\x42YTES\x10\x05\x12\x08\n\x04\x42OOL\x10\x06\x12\r\n\tTIMESTAMP\x10\x07\x12\x08\n\x04\x44\x41TE\x10\x08\x12\x08\n\x04TIME\x10\t\x12\x0c\n\x08\x44\x41TETIME\x10\n\x12\r\n\tGEOGRAPHY\x10\x0b\x12\x0b\n\x07NUMERIC\x10\x0c\x12\x0e\n\nBIGNUMERIC\x10\r\x12\x0c\n\x08INTERVAL\x10\x0e\x12\x08\n\x04JSON\x10\x0f\x12\t\n\x05RANGE\x10\x10\"F\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\x0c\n\x08NULLABLE\x10\x01\x12\x0c\n\x08REQUIRED\x10\x02\x12\x0c\n\x08REPEATED\x10\x03\x42\xba\x01\n$com.google.cloud.bigquery.storage.v1B\nTableProtoP\x01Z>cloud.google.com/go/bigquery/storage/apiv1/storagepb;storagepb\xaa\x02 Google.Cloud.BigQuery.Storage.V1\xca\x02 Google\\Cloud\\BigQuery\\Storage\\V1b\x06proto3"
11
12
 
12
- pool = Google::Protobuf::DescriptorPool.generated_pool
13
-
14
- begin
15
- pool.add_serialized_file(descriptor_data)
16
- rescue TypeError
17
- # Compatibility code: will be removed in the next major version.
18
- require 'google/protobuf/descriptor_pb'
19
- parsed = Google::Protobuf::FileDescriptorProto.decode(descriptor_data)
20
- parsed.clear_dependency
21
- serialized = parsed.class.encode(parsed)
22
- file = pool.add_serialized_file(serialized)
23
- warn "Warning: Protobuf detected an import path issue while loading generated file #{__FILE__}"
24
- imports = [
25
- ]
26
- imports.each do |type_name, expected_filename|
27
- import_file = pool.lookup(type_name).file_descriptor
28
- if import_file.name != expected_filename
29
- warn "- #{file.name} imports #{expected_filename}, but that import was loaded as #{import_file.name}"
30
- end
31
- end
32
- warn "Each proto file must use a consistent fully-qualified name."
33
- warn "This will become an error in the next major version."
34
- end
13
+ pool = ::Google::Protobuf::DescriptorPool.generated_pool
14
+ pool.add_serialized_file(descriptor_data)
35
15
 
36
16
  module Google
37
17
  module Cloud
@@ -22,7 +22,7 @@ module Google
22
22
  module Bigquery
23
23
  module Storage
24
24
  module V1
25
- VERSION = "1.5.1"
25
+ VERSION = "1.7.0"
26
26
  end
27
27
  end
28
28
  end
@@ -55,6 +55,10 @@ module Google
55
55
  # @return [::Google::Cloud::Bigquery::Storage::V1::ArrowSerializationOptions::CompressionCodec]
56
56
  # The compression codec to use for Arrow buffers in serialized record
57
57
  # batches.
58
+ # @!attribute [rw] picos_timestamp_precision
59
+ # @return [::Google::Cloud::Bigquery::Storage::V1::ArrowSerializationOptions::PicosTimestampPrecision]
60
+ # Optional. Set timestamp precision option. If not set, the default precision
61
+ # is microseconds.
58
62
  class ArrowSerializationOptions
59
63
  include ::Google::Protobuf::MessageExts
60
64
  extend ::Google::Protobuf::MessageExts::ClassMethods
@@ -70,6 +74,27 @@ module Google
70
74
  # Zstandard compression.
71
75
  ZSTD = 2
72
76
  end
77
+
78
+ # The precision of the timestamp value in the Avro message. This precision
79
+ # will **only** be applied to the column(s) with the `TIMESTAMP_PICOS` type.
80
+ module PicosTimestampPrecision
81
+ # Unspecified timestamp precision. The default precision is microseconds.
82
+ PICOS_TIMESTAMP_PRECISION_UNSPECIFIED = 0
83
+
84
+ # Timestamp values returned by Read API will be truncated to microsecond
85
+ # level precision. The value will be encoded as Arrow TIMESTAMP type in a
86
+ # 64 bit integer.
87
+ TIMESTAMP_PRECISION_MICROS = 1
88
+
89
+ # Timestamp values returned by Read API will be truncated to nanosecond
90
+ # level precision. The value will be encoded as Arrow TIMESTAMP type in a
91
+ # 64 bit integer.
92
+ TIMESTAMP_PRECISION_NANOS = 2
93
+
94
+ # Read API will return full precision picosecond value. The value will be
95
+ # encoded as a string which conforms to ISO 8601 format.
96
+ TIMESTAMP_PRECISION_PICOS = 3
97
+ end
73
98
  end
74
99
  end
75
100
  end
@@ -59,9 +59,34 @@ module Google
59
59
  # Setting this field to true, populates avro field names with a placeholder
60
60
  # value and populates a "displayName" attribute for every avro field with the
61
61
  # original column name.
62
+ # @!attribute [rw] picos_timestamp_precision
63
+ # @return [::Google::Cloud::Bigquery::Storage::V1::AvroSerializationOptions::PicosTimestampPrecision]
64
+ # Optional. Set timestamp precision option. If not set, the default precision
65
+ # is microseconds.
62
66
  class AvroSerializationOptions
63
67
  include ::Google::Protobuf::MessageExts
64
68
  extend ::Google::Protobuf::MessageExts::ClassMethods
69
+
70
+ # The precision of the timestamp value in the Avro message. This precision
71
+ # will **only** be applied to the column(s) with the `TIMESTAMP_PICOS` type.
72
+ module PicosTimestampPrecision
73
+ # Unspecified timestamp precision. The default precision is microseconds.
74
+ PICOS_TIMESTAMP_PRECISION_UNSPECIFIED = 0
75
+
76
+ # Timestamp values returned by Read API will be truncated to microsecond
77
+ # level precision. The value will be encoded as Avro TIMESTAMP type in a
78
+ # 64 bit integer.
79
+ TIMESTAMP_PRECISION_MICROS = 1
80
+
81
+ # Timestamp values returned by Read API will be truncated to nanosecond
82
+ # level precision. The value will be encoded as Avro TIMESTAMP type in a
83
+ # 64 bit integer.
84
+ TIMESTAMP_PRECISION_NANOS = 2
85
+
86
+ # Read API will return full precision picosecond value. The value will be
87
+ # encoded as a string which conforms to ISO 8601 format.
88
+ TIMESTAMP_PRECISION_PICOS = 3
89
+ end
65
90
  end
66
91
  end
67
92
  end
@@ -268,8 +268,7 @@ module Google
268
268
  # Note: The following fields are mutually exclusive: `proto_rows`, `arrow_rows`. If a field in that set is populated, all other fields in the set will automatically be cleared.
269
269
  # @!attribute [rw] arrow_rows
270
270
  # @return [::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest::ArrowData]
271
- # Rows in arrow format. This is an experimental feature only selected for
272
- # allowlisted customers.
271
+ # Rows in arrow format.
273
272
  #
274
273
  # Note: The following fields are mutually exclusive: `arrow_rows`, `proto_rows`. If a field in that set is populated, all other fields in the set will automatically be cleared.
275
274
  # @!attribute [rw] trace_id
@@ -300,8 +299,8 @@ module Google
300
299
  # @return [::Google::Cloud::Bigquery::Storage::V1::AppendRowsRequest::MissingValueInterpretation]
301
300
  # Optional. Default missing value interpretation for all columns in the
302
301
  # table. When a value is specified on an `AppendRowsRequest`, it is applied
303
- # to all requests on the connection from that point forward, until a
304
- # subsequent `AppendRowsRequest` sets it to a different value.
302
+ # to all requests from that point forward, until a subsequent
303
+ # `AppendRowsRequest` sets it to a different value.
305
304
  # `missing_value_interpretation` can override
306
305
  # `default_missing_value_interpretation`. For example, if you want to write
307
306
  # `NULL` instead of using default values for some columns, you can set
@@ -312,8 +311,6 @@ module Google
312
311
  extend ::Google::Protobuf::MessageExts::ClassMethods
313
312
 
314
313
  # Arrow schema and data.
315
- # Arrow format is an experimental feature only selected for allowlisted
316
- # customers.
317
314
  # @!attribute [rw] writer_schema
318
315
  # @return [::Google::Cloud::Bigquery::Storage::V1::ArrowSchema]
319
316
  # Optional. Arrow Schema used to serialize the data.
@@ -329,8 +326,8 @@ module Google
329
326
  # requests.
330
327
  # @!attribute [rw] writer_schema
331
328
  # @return [::Google::Cloud::Bigquery::Storage::V1::ProtoSchema]
332
- # The protocol buffer schema used to serialize the data. Provide this value
333
- # whenever:
329
+ # Optional. The protocol buffer schema used to serialize the data. Provide
330
+ # this value whenever:
334
331
  #
335
332
  # * You send the first request of an RPC connection.
336
333
  #
@@ -339,7 +336,7 @@ module Google
339
336
  # * You specify a new destination table.
340
337
  # @!attribute [rw] rows
341
338
  # @return [::Google::Cloud::Bigquery::Storage::V1::ProtoRows]
342
- # Serialized row data in protobuf message format.
339
+ # Required. Serialized row data in protobuf message format.
343
340
  # Currently, the backend expects the serialized rows to adhere to
344
341
  # proto2 semantics when appending rows, particularly with respect to
345
342
  # how default values are encoded.
@@ -248,10 +248,10 @@ module Google
248
248
  # @!attribute [rw] write_mode
249
249
  # @return [::Google::Cloud::Bigquery::Storage::V1::WriteStream::WriteMode]
250
250
  # Immutable. Mode of the stream.
251
- # @!attribute [rw] location
251
+ # @!attribute [r] location
252
252
  # @return [::String]
253
- # Immutable. The geographic location where the stream's dataset resides. See
254
- # https://cloud.google.com/bigquery/docs/locations for supported
253
+ # Output only. The geographic location where the stream's dataset resides.
254
+ # See https://cloud.google.com/bigquery/docs/locations for supported
255
255
  # locations.
256
256
  class WriteStream
257
257
  include ::Google::Protobuf::MessageExts
@@ -107,6 +107,15 @@ module Google
107
107
  # @return [::String]
108
108
  # Optional. A SQL expression to specify the [default value]
109
109
  # (https://cloud.google.com/bigquery/docs/default-values) for this field.
110
+ # @!attribute [rw] timestamp_precision
111
+ # @return [::Google::Protobuf::Int64Value]
112
+ # Optional. Precision (maximum number of total digits in base 10) for seconds
113
+ # of TIMESTAMP type.
114
+ #
115
+ # Possible values include:
116
+ #
117
+ # * 6 (Default, for TIMESTAMP type with microsecond precision)
118
+ # * 12 (For TIMESTAMP type with picosecond precision)
110
119
  # @!attribute [rw] range_element_type
111
120
  # @return [::Google::Cloud::Bigquery::Storage::V1::TableFieldSchema::FieldElementType]
112
121
  # Optional. The subtype of the RANGE, if the type of this field is RANGE. If
@@ -22,6 +22,9 @@ module Google
22
22
  # Wrapper message for `double`.
23
23
  #
24
24
  # The JSON representation for `DoubleValue` is JSON number.
25
+ #
26
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
27
+ # has no plan to be removed.
25
28
  # @!attribute [rw] value
26
29
  # @return [::Float]
27
30
  # The double value.
@@ -33,6 +36,9 @@ module Google
33
36
  # Wrapper message for `float`.
34
37
  #
35
38
  # The JSON representation for `FloatValue` is JSON number.
39
+ #
40
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
41
+ # has no plan to be removed.
36
42
  # @!attribute [rw] value
37
43
  # @return [::Float]
38
44
  # The float value.
@@ -44,6 +50,9 @@ module Google
44
50
  # Wrapper message for `int64`.
45
51
  #
46
52
  # The JSON representation for `Int64Value` is JSON string.
53
+ #
54
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
55
+ # has no plan to be removed.
47
56
  # @!attribute [rw] value
48
57
  # @return [::Integer]
49
58
  # The int64 value.
@@ -55,6 +64,9 @@ module Google
55
64
  # Wrapper message for `uint64`.
56
65
  #
57
66
  # The JSON representation for `UInt64Value` is JSON string.
67
+ #
68
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
69
+ # has no plan to be removed.
58
70
  # @!attribute [rw] value
59
71
  # @return [::Integer]
60
72
  # The uint64 value.
@@ -66,6 +78,9 @@ module Google
66
78
  # Wrapper message for `int32`.
67
79
  #
68
80
  # The JSON representation for `Int32Value` is JSON number.
81
+ #
82
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
83
+ # has no plan to be removed.
69
84
  # @!attribute [rw] value
70
85
  # @return [::Integer]
71
86
  # The int32 value.
@@ -77,6 +92,9 @@ module Google
77
92
  # Wrapper message for `uint32`.
78
93
  #
79
94
  # The JSON representation for `UInt32Value` is JSON number.
95
+ #
96
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
97
+ # has no plan to be removed.
80
98
  # @!attribute [rw] value
81
99
  # @return [::Integer]
82
100
  # The uint32 value.
@@ -88,6 +106,9 @@ module Google
88
106
  # Wrapper message for `bool`.
89
107
  #
90
108
  # The JSON representation for `BoolValue` is JSON `true` and `false`.
109
+ #
110
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
111
+ # has no plan to be removed.
91
112
  # @!attribute [rw] value
92
113
  # @return [::Boolean]
93
114
  # The bool value.
@@ -99,6 +120,9 @@ module Google
99
120
  # Wrapper message for `string`.
100
121
  #
101
122
  # The JSON representation for `StringValue` is JSON string.
123
+ #
124
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
125
+ # has no plan to be removed.
102
126
  # @!attribute [rw] value
103
127
  # @return [::String]
104
128
  # The string value.
@@ -110,6 +134,9 @@ module Google
110
134
  # Wrapper message for `bytes`.
111
135
  #
112
136
  # The JSON representation for `BytesValue` is JSON string.
137
+ #
138
+ # Not recommended for use in new APIs, but still useful for legacy APIs and
139
+ # has no plan to be removed.
113
140
  # @!attribute [rw] value
114
141
  # @return [::String]
115
142
  # The bytes value.
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: google-cloud-bigquery-storage-v1
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.5.1
4
+ version: 1.7.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Google LLC
@@ -96,7 +96,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
96
96
  requirements:
97
97
  - - ">="
98
98
  - !ruby/object:Gem::Version
99
- version: '3.1'
99
+ version: '3.2'
100
100
  required_rubygems_version: !ruby/object:Gem::Requirement
101
101
  requirements:
102
102
  - - ">="