google-cloud-bigquery-storage-v1 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+ require "googleauth"
20
+
21
+ module Google
22
+ module Cloud
23
+ module Bigquery
24
+ module Storage
25
+ module V1
26
+ module BigQueryRead
27
+ # Credentials for the BigQueryRead API.
28
+ class Credentials < Google::Auth::Credentials
29
+ self.scope = [
30
+ "https://www.googleapis.com/auth/bigquery",
31
+ "https://www.googleapis.com/auth/bigquery.readonly",
32
+ "https://www.googleapis.com/auth/cloud-platform"
33
+ ]
34
+ self.env_vars = [
35
+ "BIGQUERY_STORAGE_CREDENTIALS",
36
+ "BIGQUERY_STORAGE_KEYFILE",
37
+ "GOOGLE_CLOUD_CREDENTIALS",
38
+ "GOOGLE_CLOUD_KEYFILE",
39
+ "GCLOUD_KEYFILE",
40
+ "BIGQUERY_STORAGE_CREDENTIALS_JSON",
41
+ "BIGQUERY_STORAGE_KEYFILE_JSON",
42
+ "GOOGLE_CLOUD_CREDENTIALS_JSON",
43
+ "GOOGLE_CLOUD_KEYFILE_JSON",
44
+ "GCLOUD_KEYFILE_JSON"
45
+ ]
46
+ self.paths = [
47
+ "~/.config/google_cloud/application_default_credentials.json"
48
+ ]
49
+ end
50
+ end
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,108 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Bigquery
23
+ module Storage
24
+ module V1
25
+ module BigQueryRead
26
+ # Path helper methods for the BigQueryRead API.
27
+ module Paths
28
+ ##
29
+ # Create a fully-qualified Project resource string.
30
+ #
31
+ # The resource will be in the following format:
32
+ #
33
+ # `projects/{project}`
34
+ #
35
+ # @param project [String]
36
+ #
37
+ # @return [String]
38
+ def project_path project:
39
+ "projects/#{project}"
40
+ end
41
+
42
+ ##
43
+ # Create a fully-qualified ReadSession resource string.
44
+ #
45
+ # The resource will be in the following format:
46
+ #
47
+ # `projects/{project}/locations/{location}/sessions/{session}`
48
+ #
49
+ # @param project [String]
50
+ # @param location [String]
51
+ # @param session [String]
52
+ #
53
+ # @return [String]
54
+ def read_session_path project:, location:, session:
55
+ raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
56
+ raise ArgumentError, "location cannot contain /" if location.to_s.include? "/"
57
+
58
+ "projects/#{project}/locations/#{location}/sessions/#{session}"
59
+ end
60
+
61
+ ##
62
+ # Create a fully-qualified ReadStream resource string.
63
+ #
64
+ # The resource will be in the following format:
65
+ #
66
+ # `projects/{project}/locations/{location}/sessions/{session}/streams/{stream}`
67
+ #
68
+ # @param project [String]
69
+ # @param location [String]
70
+ # @param session [String]
71
+ # @param stream [String]
72
+ #
73
+ # @return [String]
74
+ def read_stream_path project:, location:, session:, stream:
75
+ raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
76
+ raise ArgumentError, "location cannot contain /" if location.to_s.include? "/"
77
+ raise ArgumentError, "session cannot contain /" if session.to_s.include? "/"
78
+
79
+ "projects/#{project}/locations/#{location}/sessions/#{session}/streams/#{stream}"
80
+ end
81
+
82
+ ##
83
+ # Create a fully-qualified Table resource string.
84
+ #
85
+ # The resource will be in the following format:
86
+ #
87
+ # `projects/{project}/datasets/{dataset}/tables/{table}`
88
+ #
89
+ # @param project [String]
90
+ # @param dataset [String]
91
+ # @param table [String]
92
+ #
93
+ # @return [String]
94
+ def table_path project:, dataset:, table:
95
+ raise ArgumentError, "project cannot contain /" if project.to_s.include? "/"
96
+ raise ArgumentError, "dataset cannot contain /" if dataset.to_s.include? "/"
97
+
98
+ "projects/#{project}/datasets/#{dataset}/tables/#{table}"
99
+ end
100
+
101
+ extend self
102
+ end
103
+ end
104
+ end
105
+ end
106
+ end
107
+ end
108
+ end
@@ -0,0 +1,71 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/bigquery/storage/v1/storage.proto
3
+
4
+ require 'google/protobuf'
5
+
6
+ require 'google/api/annotations_pb'
7
+ require 'google/api/client_pb'
8
+ require 'google/api/field_behavior_pb'
9
+ require 'google/api/resource_pb'
10
+ require 'google/cloud/bigquery/storage/v1/arrow_pb'
11
+ require 'google/cloud/bigquery/storage/v1/avro_pb'
12
+ require 'google/cloud/bigquery/storage/v1/stream_pb'
13
+ Google::Protobuf::DescriptorPool.generated_pool.build do
14
+ add_file("google/cloud/bigquery/storage/v1/storage.proto", :syntax => :proto3) do
15
+ add_message "google.cloud.bigquery.storage.v1.CreateReadSessionRequest" do
16
+ optional :parent, :string, 1
17
+ optional :read_session, :message, 2, "google.cloud.bigquery.storage.v1.ReadSession"
18
+ optional :max_stream_count, :int32, 3
19
+ end
20
+ add_message "google.cloud.bigquery.storage.v1.ReadRowsRequest" do
21
+ optional :read_stream, :string, 1
22
+ optional :offset, :int64, 2
23
+ end
24
+ add_message "google.cloud.bigquery.storage.v1.ThrottleState" do
25
+ optional :throttle_percent, :int32, 1
26
+ end
27
+ add_message "google.cloud.bigquery.storage.v1.StreamStats" do
28
+ optional :progress, :message, 2, "google.cloud.bigquery.storage.v1.StreamStats.Progress"
29
+ end
30
+ add_message "google.cloud.bigquery.storage.v1.StreamStats.Progress" do
31
+ optional :at_response_start, :double, 1
32
+ optional :at_response_end, :double, 2
33
+ end
34
+ add_message "google.cloud.bigquery.storage.v1.ReadRowsResponse" do
35
+ optional :row_count, :int64, 6
36
+ optional :stats, :message, 2, "google.cloud.bigquery.storage.v1.StreamStats"
37
+ optional :throttle_state, :message, 5, "google.cloud.bigquery.storage.v1.ThrottleState"
38
+ oneof :rows do
39
+ optional :avro_rows, :message, 3, "google.cloud.bigquery.storage.v1.AvroRows"
40
+ optional :arrow_record_batch, :message, 4, "google.cloud.bigquery.storage.v1.ArrowRecordBatch"
41
+ end
42
+ end
43
+ add_message "google.cloud.bigquery.storage.v1.SplitReadStreamRequest" do
44
+ optional :name, :string, 1
45
+ optional :fraction, :double, 2
46
+ end
47
+ add_message "google.cloud.bigquery.storage.v1.SplitReadStreamResponse" do
48
+ optional :primary_stream, :message, 1, "google.cloud.bigquery.storage.v1.ReadStream"
49
+ optional :remainder_stream, :message, 2, "google.cloud.bigquery.storage.v1.ReadStream"
50
+ end
51
+ end
52
+ end
53
+
54
+ module Google
55
+ module Cloud
56
+ module Bigquery
57
+ module Storage
58
+ module V1
59
+ CreateReadSessionRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.CreateReadSessionRequest").msgclass
60
+ ReadRowsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadRowsRequest").msgclass
61
+ ThrottleState = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ThrottleState").msgclass
62
+ StreamStats = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.StreamStats").msgclass
63
+ StreamStats::Progress = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.StreamStats.Progress").msgclass
64
+ ReadRowsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadRowsResponse").msgclass
65
+ SplitReadStreamRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.SplitReadStreamRequest").msgclass
66
+ SplitReadStreamResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.SplitReadStreamResponse").msgclass
67
+ end
68
+ end
69
+ end
70
+ end
71
+ end
@@ -0,0 +1,89 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # Source: google/cloud/bigquery/storage/v1/storage.proto for package 'google.cloud.bigquery.storage.v1'
3
+ # Original file comments:
4
+ # Copyright 2019 Google LLC.
5
+ #
6
+ # Licensed under the Apache License, Version 2.0 (the "License");
7
+ # you may not use this file except in compliance with the License.
8
+ # You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+ #
18
+ #
19
+
20
+ require 'grpc'
21
+ require 'google/cloud/bigquery/storage/v1/storage_pb'
22
+
23
+ module Google
24
+ module Cloud
25
+ module Bigquery
26
+ module Storage
27
+ module V1
28
+ module BigQueryRead
29
+ # BigQuery Read API.
30
+ #
31
+ # The Read API can be used to read data from BigQuery.
32
+ class Service
33
+
34
+ include GRPC::GenericService
35
+
36
+ self.marshal_class_method = :encode
37
+ self.unmarshal_class_method = :decode
38
+ self.service_name = 'google.cloud.bigquery.storage.v1.BigQueryRead'
39
+
40
+ # Creates a new read session. A read session divides the contents of a
41
+ # BigQuery table into one or more streams, which can then be used to read
42
+ # data from the table. The read session also specifies properties of the
43
+ # data to be read, such as a list of columns or a push-down filter describing
44
+ # the rows to be returned.
45
+ #
46
+ # A particular row can be read by at most one stream. When the caller has
47
+ # reached the end of each stream in the session, then all the data in the
48
+ # table has been read.
49
+ #
50
+ # Data is assigned to each stream such that roughly the same number of
51
+ # rows can be read from each stream. Because the server-side unit for
52
+ # assigning data is collections of rows, the API does not guarantee that
53
+ # each stream will return the same number or rows. Additionally, the
54
+ # limits are enforced based on the number of pre-filtered rows, so some
55
+ # filters can lead to lopsided assignments.
56
+ #
57
+ # Read sessions automatically expire 24 hours after they are created and do
58
+ # not require manual clean-up by the caller.
59
+ rpc :CreateReadSession, CreateReadSessionRequest, ReadSession
60
+ # Reads rows from the stream in the format prescribed by the ReadSession.
61
+ # Each response contains one or more table rows, up to a maximum of 100 MiB
62
+ # per response; read requests which attempt to read individual rows larger
63
+ # than 100 MiB will fail.
64
+ #
65
+ # Each request also returns a set of stream statistics reflecting the current
66
+ # state of the stream.
67
+ rpc :ReadRows, ReadRowsRequest, stream(ReadRowsResponse)
68
+ # Splits a given `ReadStream` into two `ReadStream` objects. These
69
+ # `ReadStream` objects are referred to as the primary and the residual
70
+ # streams of the split. The original `ReadStream` can still be read from in
71
+ # the same manner as before. Both of the returned `ReadStream` objects can
72
+ # also be read from, and the rows returned by both child streams will be
73
+ # the same as the rows read from the original stream.
74
+ #
75
+ # Moreover, the two child streams will be allocated back-to-back in the
76
+ # original `ReadStream`. Concretely, it is guaranteed that for streams
77
+ # original, primary, and residual, that original[0-j] = primary[0-j] and
78
+ # original[j-n] = residual[0-m] once the streams have been read to
79
+ # completion.
80
+ rpc :SplitReadStream, SplitReadStreamRequest, SplitReadStreamResponse
81
+ end
82
+
83
+ Stub = Service.rpc_stub_class
84
+ end
85
+ end
86
+ end
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,58 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/bigquery/storage/v1/stream.proto
3
+
4
+ require 'google/protobuf'
5
+
6
+ require 'google/api/field_behavior_pb'
7
+ require 'google/api/resource_pb'
8
+ require 'google/cloud/bigquery/storage/v1/arrow_pb'
9
+ require 'google/cloud/bigquery/storage/v1/avro_pb'
10
+ require 'google/protobuf/timestamp_pb'
11
+ Google::Protobuf::DescriptorPool.generated_pool.build do
12
+ add_file("google/cloud/bigquery/storage/v1/stream.proto", :syntax => :proto3) do
13
+ add_message "google.cloud.bigquery.storage.v1.ReadSession" do
14
+ optional :name, :string, 1
15
+ optional :expire_time, :message, 2, "google.protobuf.Timestamp"
16
+ optional :data_format, :enum, 3, "google.cloud.bigquery.storage.v1.DataFormat"
17
+ optional :table, :string, 6
18
+ optional :table_modifiers, :message, 7, "google.cloud.bigquery.storage.v1.ReadSession.TableModifiers"
19
+ optional :read_options, :message, 8, "google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions"
20
+ repeated :streams, :message, 10, "google.cloud.bigquery.storage.v1.ReadStream"
21
+ oneof :schema do
22
+ optional :avro_schema, :message, 4, "google.cloud.bigquery.storage.v1.AvroSchema"
23
+ optional :arrow_schema, :message, 5, "google.cloud.bigquery.storage.v1.ArrowSchema"
24
+ end
25
+ end
26
+ add_message "google.cloud.bigquery.storage.v1.ReadSession.TableModifiers" do
27
+ optional :snapshot_time, :message, 1, "google.protobuf.Timestamp"
28
+ end
29
+ add_message "google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions" do
30
+ repeated :selected_fields, :string, 1
31
+ optional :row_restriction, :string, 2
32
+ end
33
+ add_message "google.cloud.bigquery.storage.v1.ReadStream" do
34
+ optional :name, :string, 1
35
+ end
36
+ add_enum "google.cloud.bigquery.storage.v1.DataFormat" do
37
+ value :DATA_FORMAT_UNSPECIFIED, 0
38
+ value :AVRO, 1
39
+ value :ARROW, 2
40
+ end
41
+ end
42
+ end
43
+
44
+ module Google
45
+ module Cloud
46
+ module Bigquery
47
+ module Storage
48
+ module V1
49
+ ReadSession = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadSession").msgclass
50
+ ReadSession::TableModifiers = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadSession.TableModifiers").msgclass
51
+ ReadSession::TableReadOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadSession.TableReadOptions").msgclass
52
+ ReadStream = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.ReadStream").msgclass
53
+ DataFormat = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.bigquery.storage.v1.DataFormat").enummodule
54
+ end
55
+ end
56
+ end
57
+ end
58
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Cloud
22
+ module Bigquery
23
+ module Storage
24
+ module V1
25
+ VERSION = "0.1.0"
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,15 @@
1
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
2
+ # source: google/cloud/common_resources.proto
3
+
4
+ require 'google/protobuf'
5
+
6
+ require 'google/api/resource_pb'
7
+ Google::Protobuf::DescriptorPool.generated_pool.build do
8
+ add_file("google/cloud/common_resources.proto", :syntax => :proto3) do
9
+ end
10
+ end
11
+
12
+ module Google
13
+ module Cloud
14
+ end
15
+ end
@@ -0,0 +1,4 @@
1
+ # BigQuery Storage V1 Protocol Buffer Documentation
2
+
3
+ These files are for the YARD documentation of the generated protobuf files.
4
+ They are not intended to be required or loaded at runtime.
@@ -0,0 +1,59 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Copyright 2020 Google LLC
4
+ #
5
+ # Licensed under the Apache License, Version 2.0 (the "License");
6
+ # you may not use this file except in compliance with the License.
7
+ # You may obtain a copy of the License at
8
+ #
9
+ # https://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing, software
12
+ # distributed under the License is distributed on an "AS IS" BASIS,
13
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
+ # See the License for the specific language governing permissions and
15
+ # limitations under the License.
16
+
17
+ # Auto-generated by gapic-generator-ruby. DO NOT EDIT!
18
+
19
+
20
+ module Google
21
+ module Api
22
+ # An indicator of the behavior of a given field (for example, that a field
23
+ # is required in requests, or given as output but ignored as input).
24
+ # This **does not** change the behavior in protocol buffers itself; it only
25
+ # denotes the behavior and may affect how API tooling handles the field.
26
+ #
27
+ # Note: This enum **may** receive new values in the future.
28
+ module FieldBehavior
29
+ # Conventional default for enums. Do not use this.
30
+ FIELD_BEHAVIOR_UNSPECIFIED = 0
31
+
32
+ # Specifically denotes a field as optional.
33
+ # While all fields in protocol buffers are optional, this may be specified
34
+ # for emphasis if appropriate.
35
+ OPTIONAL = 1
36
+
37
+ # Denotes a field as required.
38
+ # This indicates that the field **must** be provided as part of the request,
39
+ # and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
40
+ REQUIRED = 2
41
+
42
+ # Denotes a field as output only.
43
+ # This indicates that the field is provided in responses, but including the
44
+ # field in a request does nothing (the server *must* ignore it and
45
+ # *must not* throw an error as a result of the field's presence).
46
+ OUTPUT_ONLY = 3
47
+
48
+ # Denotes a field as input only.
49
+ # This indicates that the field is provided in requests, and the
50
+ # corresponding field is not included in output.
51
+ INPUT_ONLY = 4
52
+
53
+ # Denotes a field as immutable.
54
+ # This indicates that the field may be set once in a request to create a
55
+ # resource, but may not be changed thereafter.
56
+ IMMUTABLE = 5
57
+ end
58
+ end
59
+ end