fluent-plugin-bigquery 3.2.0 → 3.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +2 -0
- data/README.md +0 -1
- data/lib/fluent/plugin/bigquery/errors.rb +9 -0
- data/lib/fluent/plugin/bigquery/version.rb +1 -1
- data/lib/fluent/plugin/bigquery/writer.rb +4 -3
- data/lib/fluent/plugin/out_bigquery_base.rb +0 -4
- data/test/plugin/test_out_bigquery_load.rb +22 -7
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2a8c0c40659fb474ce5c072f6aa8c9708c4f93755da5c2a763de19e9c3971b86
|
4
|
+
data.tar.gz: 2c9366bf88b9c7a6673c765bb6d57e06e24aae6dd0a731458470d122905086f7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 558e461e2c19e70e1748185962f33e3b827c95c293afad90737a8cd9da57fea28bf2109a0660e0f112e7ae9ea60dd64f063960017123fcf84cce434bb7883c8f
|
7
|
+
data.tar.gz: aab29d009005ed142542397498da79a5b875dfae989c529aa102194fe142db7247cdf2034a774362cdb25603f0a000a23338306e859232e0981d33c8c928075c
|
data/.gitignore
CHANGED
data/README.md
CHANGED
@@ -45,7 +45,6 @@ Because embedded gem dependency sometimes restricts ruby environment.
|
|
45
45
|
| private_key_path | string | yes (private_key) | no | nil | GCP Private Key file path |
|
46
46
|
| private_key_passphrase | string | yes (private_key) | no | nil | GCP Private Key Passphrase |
|
47
47
|
| json_key | string | yes (json_key) | no | nil | GCP JSON Key file path or JSON Key string |
|
48
|
-
| location | string | no | no | nil | BigQuery Data Location. The geographic location of the job. Required except for US and EU. |
|
49
48
|
| project | string | yes | yes | nil | |
|
50
49
|
| dataset | string | yes | yes | nil | |
|
51
50
|
| table | string | yes (either `tables`) | yes | nil | |
|
@@ -5,6 +5,7 @@ module Fluent
|
|
5
5
|
RETRYABLE_ERROR_REASON = %w(backendError internalError rateLimitExceeded tableUnavailable).freeze
|
6
6
|
RETRYABLE_INSERT_ERRORS_REASON = %w(timeout backendError internalError rateLimitExceeded).freeze
|
7
7
|
RETRYABLE_STATUS_CODE = [500, 502, 503, 504]
|
8
|
+
REGION_NOT_WRITABLE_MESSAGE = -"is not writable in the region"
|
8
9
|
|
9
10
|
class << self
|
10
11
|
# @param e [Google::Apis::Error]
|
@@ -19,6 +20,10 @@ module Fluent
|
|
19
20
|
|
20
21
|
# @param e [Google::Apis::Error]
|
21
22
|
def retryable_error?(e)
|
23
|
+
retryable_server_error?(e) || retryable_region_not_writable?(e)
|
24
|
+
end
|
25
|
+
|
26
|
+
def retryable_server_error?(e)
|
22
27
|
e.is_a?(Google::Apis::ServerError) && RETRYABLE_STATUS_CODE.include?(e.status_code)
|
23
28
|
end
|
24
29
|
|
@@ -30,6 +35,10 @@ module Fluent
|
|
30
35
|
RETRYABLE_INSERT_ERRORS_REASON.include?(reason)
|
31
36
|
end
|
32
37
|
|
38
|
+
def retryable_region_not_writable?(e)
|
39
|
+
e.is_a?(Google::Apis::ClientError) && e.status_code == 400 && e.message.include?(REGION_NOT_WRITABLE_MESSAGE)
|
40
|
+
end
|
41
|
+
|
33
42
|
# Guard for instantiation
|
34
43
|
private :new
|
35
44
|
def inherited(subclass)
|
@@ -101,6 +101,7 @@ module Fluent
|
|
101
101
|
end
|
102
102
|
end
|
103
103
|
rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
|
104
|
+
log.debug "insert error: #{e.message}", status_code: e.respond_to?(:status_code) ? e.status_code : nil, reason: e.respond_to?(:reason) ? e.reason : nil
|
104
105
|
error_data = { project_id: project, dataset: dataset, table: table_id, code: e.status_code, message: e.message }
|
105
106
|
wrapped = Fluent::BigQuery::Error.wrap(e)
|
106
107
|
if wrapped.retryable?
|
@@ -112,7 +113,7 @@ module Fluent
|
|
112
113
|
raise wrapped
|
113
114
|
end
|
114
115
|
|
115
|
-
JobReference = Struct.new(:chunk_id, :chunk_id_hex, :project_id, :dataset_id, :table_id, :job_id) do
|
116
|
+
JobReference = Struct.new(:chunk_id, :chunk_id_hex, :project_id, :dataset_id, :table_id, :job_id, :location) do
|
116
117
|
def as_hash(*keys)
|
117
118
|
if keys.empty?
|
118
119
|
to_h
|
@@ -161,7 +162,7 @@ module Fluent
|
|
161
162
|
upload_source: upload_source,
|
162
163
|
content_type: "application/octet-stream",
|
163
164
|
)
|
164
|
-
JobReference.new(chunk_id, chunk_id_hex, project, dataset, table_id, res.job_reference.job_id)
|
165
|
+
JobReference.new(chunk_id, chunk_id_hex, project, dataset, table_id, res.job_reference.job_id, res.job_reference.location)
|
165
166
|
rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
|
166
167
|
log.error "job.load API", project_id: project, dataset: dataset, table: table_id, code: e.status_code, message: e.message
|
167
168
|
|
@@ -175,7 +176,7 @@ module Fluent
|
|
175
176
|
def fetch_load_job(job_reference)
|
176
177
|
project = job_reference.project_id
|
177
178
|
job_id = job_reference.job_id
|
178
|
-
location =
|
179
|
+
location = job_reference.location
|
179
180
|
|
180
181
|
res = client.get_job(project, job_id, location: location)
|
181
182
|
log.debug "load job fetched", id: job_id, state: res.status.state, **job_reference.as_hash(:project_id, :dataset_id, :table_id)
|
@@ -29,9 +29,6 @@ module Fluent
|
|
29
29
|
config_param :private_key_path, :string, default: nil
|
30
30
|
config_param :private_key_passphrase, :string, default: 'notasecret', secret: true
|
31
31
|
config_param :json_key, default: nil, secret: true
|
32
|
-
# The geographic location of the job. Required except for US and EU.
|
33
|
-
# https://github.com/googleapis/google-api-ruby-client/blob/master/generated/google/apis/bigquery_v2/service.rb#L350
|
34
|
-
config_param :location, :string, default: nil
|
35
32
|
|
36
33
|
# see as simple reference
|
37
34
|
# https://github.com/abronte/BigQuery/blob/master/lib/bigquery.rb
|
@@ -135,7 +132,6 @@ module Fluent
|
|
135
132
|
private_key_path: @private_key_path, private_key_passphrase: @private_key_passphrase,
|
136
133
|
email: @email,
|
137
134
|
json_key: @json_key,
|
138
|
-
location: @location,
|
139
135
|
source_format: @source_format,
|
140
136
|
skip_invalid_rows: @skip_invalid_rows,
|
141
137
|
ignore_unknown_values: @ignore_unknown_values,
|
@@ -65,7 +65,10 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
65
65
|
}
|
66
66
|
}
|
67
67
|
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
68
|
-
stub!.job_reference.stub
|
68
|
+
stub!.job_reference.stub! do |s|
|
69
|
+
s.job_id { "dummy_job_id" }
|
70
|
+
s.location { "us" }
|
71
|
+
end
|
69
72
|
end
|
70
73
|
end
|
71
74
|
|
@@ -118,7 +121,10 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
118
121
|
},
|
119
122
|
job_reference: {project_id: 'yourproject_id', job_id: satisfy { |x| x =~ /fluentd_job_.*/}} ,
|
120
123
|
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
121
|
-
stub!.job_reference.stub
|
124
|
+
stub!.job_reference.stub! do |s|
|
125
|
+
s.job_id { "dummy_job_id" }
|
126
|
+
s.location { "us" }
|
127
|
+
end
|
122
128
|
end
|
123
129
|
end
|
124
130
|
|
@@ -155,10 +161,13 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
155
161
|
}
|
156
162
|
}
|
157
163
|
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
158
|
-
stub!.job_reference.stub
|
164
|
+
stub!.job_reference.stub! do |s|
|
165
|
+
s.job_id { "dummy_job_id" }
|
166
|
+
s.location { "us" }
|
167
|
+
end
|
159
168
|
end
|
160
169
|
|
161
|
-
mock(writer.client).get_job('yourproject_id', 'dummy_job_id', :
|
170
|
+
mock(writer.client).get_job('yourproject_id', 'dummy_job_id', location: "us") do
|
162
171
|
stub! do |s|
|
163
172
|
s.id { 'dummy_job_id' }
|
164
173
|
s.configuration.stub! do |_s|
|
@@ -238,10 +247,13 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
238
247
|
}
|
239
248
|
}
|
240
249
|
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
241
|
-
stub!.job_reference.stub
|
250
|
+
stub!.job_reference.stub! do |s|
|
251
|
+
s.job_id { "dummy_job_id" }
|
252
|
+
s.location { "us" }
|
253
|
+
end
|
242
254
|
end
|
243
255
|
|
244
|
-
mock(writer.client).get_job('yourproject_id', 'dummy_job_id', :
|
256
|
+
mock(writer.client).get_job('yourproject_id', 'dummy_job_id', location: "us") do
|
245
257
|
stub! do |s|
|
246
258
|
s.id { 'dummy_job_id' }
|
247
259
|
s.configuration.stub! do |_s|
|
@@ -318,7 +330,10 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
318
330
|
}
|
319
331
|
}
|
320
332
|
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
321
|
-
stub!.job_reference.stub
|
333
|
+
stub!.job_reference.stub! do |s|
|
334
|
+
s.job_id { "dummy_job_id" }
|
335
|
+
s.location { "us" }
|
336
|
+
end
|
322
337
|
end
|
323
338
|
end
|
324
339
|
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-bigquery
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.
|
4
|
+
version: 3.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Naoya Ito
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2024-
|
12
|
+
date: 2024-11-22 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: rake
|
@@ -191,7 +191,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
191
191
|
- !ruby/object:Gem::Version
|
192
192
|
version: '0'
|
193
193
|
requirements: []
|
194
|
-
rubygems_version: 3.
|
194
|
+
rubygems_version: 3.3.27
|
195
195
|
signing_key:
|
196
196
|
specification_version: 4
|
197
197
|
summary: Fluentd plugin to store data on Google BigQuery
|