google-cloud-bigquery-storage-v1 0.6.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/AUTHENTICATION.md +8 -8
- data/README.md +1 -1
- data/lib/google/cloud/bigquery/storage/v1/big_query_read/client.rb +43 -53
- data/lib/google/cloud/bigquery/storage/v1/big_query_write/client.rb +827 -0
- data/lib/google/cloud/bigquery/storage/v1/big_query_write/credentials.rb +55 -0
- data/lib/google/cloud/bigquery/storage/v1/big_query_write/paths.rb +75 -0
- data/lib/google/cloud/bigquery/storage/v1/big_query_write.rb +56 -0
- data/lib/google/cloud/bigquery/storage/v1/protobuf_pb.rb +29 -0
- data/lib/google/cloud/bigquery/storage/v1/storage_pb.rb +86 -2
- data/lib/google/cloud/bigquery/storage/v1/storage_services_pb.rb +80 -1
- data/lib/google/cloud/bigquery/storage/v1/stream_pb.rb +19 -2
- data/lib/google/cloud/bigquery/storage/v1/table_pb.rb +62 -0
- data/lib/google/cloud/bigquery/storage/v1/version.rb +1 -1
- data/lib/google/cloud/bigquery/storage/v1.rb +1 -0
- data/proto_docs/google/api/field_behavior.rb +7 -1
- data/proto_docs/google/cloud/bigquery/storage/v1/protobuf.rb +56 -0
- data/proto_docs/google/cloud/bigquery/storage/v1/storage.rb +244 -1
- data/proto_docs/google/cloud/bigquery/storage/v1/stream.rb +50 -0
- data/proto_docs/google/cloud/bigquery/storage/v1/table.rb +172 -0
- data/proto_docs/google/protobuf/any.rb +141 -0
- data/proto_docs/google/protobuf/wrappers.rb +121 -0
- data/proto_docs/google/rpc/status.rb +46 -0
- metadata +15 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 6693e2ad11b7bf193b9fc692f91d7ffb59251df9fa28ba3ea24f6e77e385bba1
|
4
|
+
data.tar.gz: d52647196c983df0ae4cba7fe40267b3ef1d394693257a35a6eefa1b5f75502c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1a384535064401bb446fc3464358e076be4f6c99dd3d16e15e43e1d73c5d165dbc3cd2e3176dd9ccba0ca7eb728dca959bc26103e88ac433bd3fd3ce14edb4d1
|
7
|
+
data.tar.gz: 338a6b1f618ec95fcd662d0b033b0543324e0de62660e51972a52d3872f7c7071387e055474abd2877c2cd75768407f35d3535e6064cafcd284673d815d4f20d
|
data/AUTHENTICATION.md
CHANGED
@@ -66,11 +66,11 @@ The environment variables that google-cloud-bigquery-storage-v1
|
|
66
66
|
checks for credentials are configured on the service Credentials class (such as
|
67
67
|
{::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Credentials}):
|
68
68
|
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
69
|
+
* `BIGQUERY_STORAGE_CREDENTIALS` - Path to JSON file, or JSON contents
|
70
|
+
* `BIGQUERY_STORAGE_KEYFILE` - Path to JSON file, or JSON contents
|
71
|
+
* `GOOGLE_CLOUD_CREDENTIALS` - Path to JSON file, or JSON contents
|
72
|
+
* `GOOGLE_CLOUD_KEYFILE` - Path to JSON file, or JSON contents
|
73
|
+
* `GOOGLE_APPLICATION_CREDENTIALS` - Path to JSON file
|
74
74
|
|
75
75
|
```ruby
|
76
76
|
require "google/cloud/bigquery/storage/v1"
|
@@ -82,8 +82,8 @@ client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
|
82
82
|
|
83
83
|
### Configuration
|
84
84
|
|
85
|
-
The **Credentials JSON** can be configured instead of
|
86
|
-
environment
|
85
|
+
The path to the **Credentials JSON** file can be configured instead of storing
|
86
|
+
it in an environment variable. Either on an individual client initialization:
|
87
87
|
|
88
88
|
```ruby
|
89
89
|
require "google/cloud/bigquery/storage/v1"
|
@@ -93,7 +93,7 @@ client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |co
|
|
93
93
|
end
|
94
94
|
```
|
95
95
|
|
96
|
-
Or
|
96
|
+
Or globally for all clients:
|
97
97
|
|
98
98
|
```ruby
|
99
99
|
require "google/cloud/bigquery/storage/v1"
|
data/README.md
CHANGED
@@ -33,7 +33,7 @@ In order to use this library, you first need to go through the following steps:
|
|
33
33
|
require "google/cloud/bigquery/storage/v1"
|
34
34
|
|
35
35
|
client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
36
|
-
request =
|
36
|
+
request = ::Google::Cloud::Bigquery::Storage::V1::CreateReadSessionRequest.new # (request fields as keyword arguments...)
|
37
37
|
response = client.create_read_session request
|
38
38
|
```
|
39
39
|
|
@@ -44,13 +44,12 @@ module Google
|
|
44
44
|
# See {::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client::Configuration}
|
45
45
|
# for a description of the configuration fields.
|
46
46
|
#
|
47
|
-
#
|
47
|
+
# @example
|
48
48
|
#
|
49
|
-
#
|
50
|
-
#
|
51
|
-
#
|
52
|
-
#
|
53
|
-
# end
|
49
|
+
# # Modify the configuration for all BigQueryRead clients
|
50
|
+
# ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
|
51
|
+
# config.timeout = 10.0
|
52
|
+
# end
|
54
53
|
#
|
55
54
|
# @yield [config] Configure the Client client.
|
56
55
|
# @yieldparam config [Client::Configuration]
|
@@ -70,26 +69,17 @@ module Google
|
|
70
69
|
|
71
70
|
default_config.rpcs.create_read_session.timeout = 600.0
|
72
71
|
default_config.rpcs.create_read_session.retry_policy = {
|
73
|
-
initial_delay: 0.1,
|
74
|
-
max_delay: 60.0,
|
75
|
-
multiplier: 1.3,
|
76
|
-
retry_codes: [4, 14]
|
72
|
+
initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
|
77
73
|
}
|
78
74
|
|
79
75
|
default_config.rpcs.read_rows.timeout = 86_400.0
|
80
76
|
default_config.rpcs.read_rows.retry_policy = {
|
81
|
-
initial_delay: 0.1,
|
82
|
-
max_delay: 60.0,
|
83
|
-
multiplier: 1.3,
|
84
|
-
retry_codes: [14]
|
77
|
+
initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [14]
|
85
78
|
}
|
86
79
|
|
87
80
|
default_config.rpcs.split_read_stream.timeout = 600.0
|
88
81
|
default_config.rpcs.split_read_stream.retry_policy = {
|
89
|
-
initial_delay: 0.1,
|
90
|
-
max_delay: 60.0,
|
91
|
-
multiplier: 1.3,
|
92
|
-
retry_codes: [4, 14]
|
82
|
+
initial_delay: 0.1, max_delay: 60.0, multiplier: 1.3, retry_codes: [4, 14]
|
93
83
|
}
|
94
84
|
|
95
85
|
default_config
|
@@ -121,19 +111,15 @@ module Google
|
|
121
111
|
##
|
122
112
|
# Create a new BigQueryRead client object.
|
123
113
|
#
|
124
|
-
#
|
125
|
-
#
|
126
|
-
# To create a new BigQueryRead client with the default
|
127
|
-
# configuration:
|
128
|
-
#
|
129
|
-
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
114
|
+
# @example
|
130
115
|
#
|
131
|
-
#
|
132
|
-
#
|
116
|
+
# # Create a client using the default configuration
|
117
|
+
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new
|
133
118
|
#
|
134
|
-
#
|
135
|
-
#
|
136
|
-
#
|
119
|
+
# # Create a client using a custom configuration
|
120
|
+
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
|
121
|
+
# config.timeout = 10.0
|
122
|
+
# end
|
137
123
|
#
|
138
124
|
# @yield [config] Configure the BigQueryRead client.
|
139
125
|
# @yieldparam config [Client::Configuration]
|
@@ -153,14 +139,13 @@ module Google
|
|
153
139
|
|
154
140
|
# Create credentials
|
155
141
|
credentials = @config.credentials
|
156
|
-
# Use self-signed JWT if the
|
142
|
+
# Use self-signed JWT if the endpoint is unchanged from default,
|
157
143
|
# but only if the default endpoint does not have a region prefix.
|
158
|
-
enable_self_signed_jwt = @config.
|
159
|
-
@config.endpoint == Client.configure.endpoint &&
|
144
|
+
enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
|
160
145
|
!@config.endpoint.split(".").first.include?("-")
|
161
146
|
credentials ||= Credentials.default scope: @config.scope,
|
162
147
|
enable_self_signed_jwt: enable_self_signed_jwt
|
163
|
-
if credentials.is_a?(String) || credentials.is_a?(Hash)
|
148
|
+
if credentials.is_a?(::String) || credentials.is_a?(::Hash)
|
164
149
|
credentials = Credentials.new credentials, scope: @config.scope
|
165
150
|
end
|
166
151
|
@quota_project_id = @config.quota_project
|
@@ -195,7 +180,7 @@ module Google
|
|
195
180
|
# limits are enforced based on the number of pre-filtered rows, so some
|
196
181
|
# filters can lead to lopsided assignments.
|
197
182
|
#
|
198
|
-
# Read sessions automatically expire
|
183
|
+
# Read sessions automatically expire 6 hours after they are created and do
|
199
184
|
# not require manual clean-up by the caller.
|
200
185
|
#
|
201
186
|
# @overload create_read_session(request, options = nil)
|
@@ -262,7 +247,9 @@ module Google
|
|
262
247
|
options.apply_defaults timeout: @config.rpcs.create_read_session.timeout,
|
263
248
|
metadata: metadata,
|
264
249
|
retry_policy: @config.rpcs.create_read_session.retry_policy
|
265
|
-
|
250
|
+
|
251
|
+
options.apply_defaults timeout: @config.timeout,
|
252
|
+
metadata: @config.metadata,
|
266
253
|
retry_policy: @config.retry_policy
|
267
254
|
|
268
255
|
@big_query_read_stub.call_rpc :create_read_session, request, options: options do |response, operation|
|
@@ -338,7 +325,9 @@ module Google
|
|
338
325
|
options.apply_defaults timeout: @config.rpcs.read_rows.timeout,
|
339
326
|
metadata: metadata,
|
340
327
|
retry_policy: @config.rpcs.read_rows.retry_policy
|
341
|
-
|
328
|
+
|
329
|
+
options.apply_defaults timeout: @config.timeout,
|
330
|
+
metadata: @config.metadata,
|
342
331
|
retry_policy: @config.retry_policy
|
343
332
|
|
344
333
|
@big_query_read_stub.call_rpc :read_rows, request, options: options do |response, operation|
|
@@ -423,7 +412,9 @@ module Google
|
|
423
412
|
options.apply_defaults timeout: @config.rpcs.split_read_stream.timeout,
|
424
413
|
metadata: metadata,
|
425
414
|
retry_policy: @config.rpcs.split_read_stream.retry_policy
|
426
|
-
|
415
|
+
|
416
|
+
options.apply_defaults timeout: @config.timeout,
|
417
|
+
metadata: @config.metadata,
|
427
418
|
retry_policy: @config.retry_policy
|
428
419
|
|
429
420
|
@big_query_read_stub.call_rpc :split_read_stream, request, options: options do |response, operation|
|
@@ -447,22 +438,21 @@ module Google
|
|
447
438
|
# Configuration can be applied globally to all clients, or to a single client
|
448
439
|
# on construction.
|
449
440
|
#
|
450
|
-
#
|
451
|
-
#
|
452
|
-
#
|
453
|
-
# to 20 seconds,
|
454
|
-
#
|
455
|
-
#
|
456
|
-
#
|
457
|
-
#
|
458
|
-
#
|
459
|
-
#
|
460
|
-
#
|
461
|
-
#
|
462
|
-
#
|
463
|
-
#
|
464
|
-
#
|
465
|
-
# end
|
441
|
+
# @example
|
442
|
+
#
|
443
|
+
# # Modify the global config, setting the timeout for
|
444
|
+
# # create_read_session to 20 seconds,
|
445
|
+
# # and all remaining timeouts to 10 seconds.
|
446
|
+
# ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.configure do |config|
|
447
|
+
# config.timeout = 10.0
|
448
|
+
# config.rpcs.create_read_session.timeout = 20.0
|
449
|
+
# end
|
450
|
+
#
|
451
|
+
# # Apply the above configuration only to a new client.
|
452
|
+
# client = ::Google::Cloud::Bigquery::Storage::V1::BigQueryRead::Client.new do |config|
|
453
|
+
# config.timeout = 10.0
|
454
|
+
# config.rpcs.create_read_session.timeout = 20.0
|
455
|
+
# end
|
466
456
|
#
|
467
457
|
# @!attribute [rw] endpoint
|
468
458
|
# The hostname or hostname:port of the service endpoint.
|