fluent-plugin-bigquery 2.3.0 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/linux.yml +0 -1
- data/.github/workflows/windows.yml +4 -1
- data/Gemfile +3 -0
- data/README.md +6 -11
- data/integration/README.md +14 -0
- data/integration/create_table.sh +4 -0
- data/integration/dummer_insert.rb +12 -0
- data/integration/dummer_load.rb +12 -0
- data/integration/fluent.conf +88 -0
- data/integration/schema.json +22 -0
- data/lib/fluent/plugin/bigquery/version.rb +1 -1
- data/lib/fluent/plugin/bigquery/writer.rb +6 -8
- data/lib/fluent/plugin/out_bigquery_base.rb +7 -2
- data/lib/fluent/plugin/out_bigquery_insert.rb +2 -0
- data/test/plugin/test_out_bigquery_insert.rb +45 -21
- data/test/plugin/test_out_bigquery_load.rb +5 -5
- metadata +12 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: bc6de961c8c42fddf3d9e297e93db560d16cfc098161232c90ee64f0a5679fee
|
4
|
+
data.tar.gz: 5ec1fee690f77d0fa25d8e427c6ad354cdfdbfafe30a4aee4fea9a5e73db5eb3
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 20fc96d420611a0d12f7cb34656ae87872f24131c70039383a8f8b7d51048a7d4f277a80675f2bee834113fd13d2a9780b772b517f2140481f7fb86ce63f24e3
|
7
|
+
data.tar.gz: cecc8f8682761ddfb22d942b69103823cc728923f6d7043d967254ed02c754db4e792132769f7f3aa91986aa27895ac83bf16358be21e03d3c94e77c43975231
|
data/.github/workflows/linux.yml
CHANGED
data/Gemfile
CHANGED
data/README.md
CHANGED
@@ -1,13 +1,5 @@
|
|
1
1
|
# fluent-plugin-bigquery
|
2
2
|
|
3
|
-
## Notice
|
4
|
-
|
5
|
-
We will transfer fluent-plugin-bigquery repository to [fluent-plugins-nursery](https://github.com/fluent-plugins-nursery) organization.
|
6
|
-
It does not change maintenance plan.
|
7
|
-
The main purpose is that it solves mismatch between maintainers and current organization.
|
8
|
-
|
9
|
-
---
|
10
|
-
|
11
3
|
[Fluentd](http://fluentd.org) output plugin to load/insert data into Google BigQuery.
|
12
4
|
|
13
5
|
- **Plugin type**: Output
|
@@ -31,6 +23,7 @@ OAuth flow for installed applications.
|
|
31
23
|
| v0.4.x | 0.12.x | 2.0 or later |
|
32
24
|
| v1.x.x | 0.14.x or later | 2.2 or later |
|
33
25
|
| v2.x.x | 0.14.x or later | 2.3 or later |
|
26
|
+
| v3.x.x | 1.x or later | 2.7 or later |
|
34
27
|
|
35
28
|
## With docker image
|
36
29
|
If you use official alpine based fluentd docker image (https://github.com/fluent/fluentd-docker-image),
|
@@ -388,10 +381,10 @@ format to construct table ids.
|
|
388
381
|
Table ids are formatted at runtime
|
389
382
|
using the chunk key time.
|
390
383
|
|
391
|
-
see.
|
384
|
+
see. https://docs.fluentd.org/configuration/buffer-section
|
392
385
|
|
393
386
|
For example, with the configuration below,
|
394
|
-
data is inserted into tables `
|
387
|
+
data is inserted into tables `accesslog_2014_08_02`, `accesslog_2014_08_03` and so on.
|
395
388
|
|
396
389
|
```apache
|
397
390
|
<match dummy>
|
@@ -401,7 +394,7 @@ data is inserted into tables `accesslog_2014_08`, `accesslog_2014_09` and so on.
|
|
401
394
|
|
402
395
|
project yourproject_id
|
403
396
|
dataset yourdataset_id
|
404
|
-
table accesslog_%Y_%
|
397
|
+
table accesslog_%Y_%m_%d
|
405
398
|
|
406
399
|
<buffer time>
|
407
400
|
timekey 1d
|
@@ -410,6 +403,8 @@ data is inserted into tables `accesslog_2014_08`, `accesslog_2014_09` and so on.
|
|
410
403
|
</match>
|
411
404
|
```
|
412
405
|
|
406
|
+
**NOTE: In current fluentd (v1.15.x), The maximum unit supported by strftime formatting is the granularity of days**
|
407
|
+
|
413
408
|
#### record attribute formatting
|
414
409
|
The format can be suffixed with attribute name.
|
415
410
|
|
@@ -0,0 +1,14 @@
|
|
1
|
+
# Requirements
|
2
|
+
|
3
|
+
Set Environment Variable
|
4
|
+
|
5
|
+
- GOOGLE_APPLICATION_CREDENTIALS (json key path)
|
6
|
+
- PROJECT_NAME
|
7
|
+
- DATASET_NAME
|
8
|
+
- TABLE_NAME
|
9
|
+
|
10
|
+
# How to use
|
11
|
+
|
12
|
+
1. execute `create_table.sh`
|
13
|
+
1. `bundle exec fluentd -c fluent.conf`
|
14
|
+
1. `bundle exec dummer -c dummer_insert.rb` or `bundle exec dummer -c dummer_load.rb`
|
@@ -0,0 +1,12 @@
|
|
1
|
+
require "time"
|
2
|
+
|
3
|
+
configure "insert" do
|
4
|
+
host "localhost"
|
5
|
+
port 24224
|
6
|
+
rate 100
|
7
|
+
tag type: :string, any: %w(insert_data)
|
8
|
+
field :id, type: :integer, countup: true
|
9
|
+
field :string_field, type: :string, any: %w(str1 str2 str3 str4)
|
10
|
+
field :timestamp_field, type: :string, value: Time.now.iso8601
|
11
|
+
field :date, type: :string, value: Time.now.strftime("%Y-%m-%d")
|
12
|
+
end
|
@@ -0,0 +1,12 @@
|
|
1
|
+
require "time"
|
2
|
+
|
3
|
+
configure "load" do
|
4
|
+
host "localhost"
|
5
|
+
port 24224
|
6
|
+
rate 100
|
7
|
+
tag type: :string, any: %w(load_data)
|
8
|
+
field :id, type: :integer, countup: true
|
9
|
+
field :string_field, type: :string, any: %w(str1 str2 str3 str4)
|
10
|
+
field :timestamp_field, type: :string, value: Time.now.iso8601
|
11
|
+
field :date, type: :string, value: Time.now.strftime("%Y-%m-%d")
|
12
|
+
end
|
@@ -0,0 +1,88 @@
|
|
1
|
+
<source>
|
2
|
+
@type forward
|
3
|
+
port 24224
|
4
|
+
bind 0.0.0.0
|
5
|
+
</source>
|
6
|
+
|
7
|
+
<match insert_data>
|
8
|
+
@id bigquery-insert-integration
|
9
|
+
@type bigquery_insert
|
10
|
+
|
11
|
+
allow_retry_insert_errors true
|
12
|
+
|
13
|
+
auth_method json_key
|
14
|
+
json_key "#{ENV["GOOGLE_APPLICATION_CREDENTIALS"]}"
|
15
|
+
|
16
|
+
<buffer>
|
17
|
+
@type file
|
18
|
+
|
19
|
+
chunk_limit_size 1m
|
20
|
+
chunk_limit_records 1500
|
21
|
+
total_limit_size 1g
|
22
|
+
path ./log/bigquery-insert-integration
|
23
|
+
|
24
|
+
flush_interval 30
|
25
|
+
flush_thread_count 4
|
26
|
+
flush_at_shutdown true
|
27
|
+
|
28
|
+
retry_max_times 14
|
29
|
+
retry_max_interval 30m
|
30
|
+
</buffer>
|
31
|
+
|
32
|
+
request_open_timeout_sec 2m
|
33
|
+
|
34
|
+
slow_flush_log_threshold 30.0
|
35
|
+
|
36
|
+
project "#{ENV["PROJECT_NAME"]}"
|
37
|
+
dataset "#{ENV["DATASET_NAME"]}"
|
38
|
+
table "#{ENV["TABLE_NAME"]}"
|
39
|
+
auto_create_table false
|
40
|
+
fetch_schema true
|
41
|
+
fetch_schema_table "#{ENV["TABLE_NAME"]}"
|
42
|
+
|
43
|
+
insert_id_field id
|
44
|
+
|
45
|
+
<secondary>
|
46
|
+
@type file
|
47
|
+
path ./log/bigquery-insert-integration.errors
|
48
|
+
</secondary>
|
49
|
+
</match>
|
50
|
+
|
51
|
+
<match load_data>
|
52
|
+
@id bigquery-load-integration
|
53
|
+
@type bigquery_load
|
54
|
+
|
55
|
+
auth_method json_key
|
56
|
+
json_key "#{ENV["GOOGLE_APPLICATION_CREDENTIALS"]}"
|
57
|
+
|
58
|
+
<buffer>
|
59
|
+
@type file
|
60
|
+
|
61
|
+
chunk_limit_size 1m
|
62
|
+
total_limit_size 1g
|
63
|
+
path ./log/bigquery-load-integration
|
64
|
+
|
65
|
+
flush_interval 120
|
66
|
+
flush_thread_count 4
|
67
|
+
flush_at_shutdown true
|
68
|
+
|
69
|
+
retry_max_times 14
|
70
|
+
retry_max_interval 30m
|
71
|
+
</buffer>
|
72
|
+
|
73
|
+
request_open_timeout_sec 2m
|
74
|
+
|
75
|
+
slow_flush_log_threshold 300.0
|
76
|
+
|
77
|
+
project "#{ENV["PROJECT_NAME"]}"
|
78
|
+
dataset "#{ENV["DATASET_NAME"]}"
|
79
|
+
table "#{ENV["TABLE_NAME"]}"
|
80
|
+
auto_create_table false
|
81
|
+
fetch_schema true
|
82
|
+
fetch_schema_table "#{ENV["TABLE_NAME"]}"
|
83
|
+
|
84
|
+
<secondary>
|
85
|
+
@type file
|
86
|
+
path ./log/bigquery-load-integration.errors
|
87
|
+
</secondary>
|
88
|
+
</match>
|
@@ -0,0 +1,22 @@
|
|
1
|
+
[
|
2
|
+
{
|
3
|
+
"name": "id",
|
4
|
+
"type": "INTEGER",
|
5
|
+
"mode": "REQUIRED"
|
6
|
+
},
|
7
|
+
{
|
8
|
+
"name": "string_field",
|
9
|
+
"type": "STRING",
|
10
|
+
"mode": "NULLABLE"
|
11
|
+
},
|
12
|
+
{
|
13
|
+
"name": "timestamp_field",
|
14
|
+
"type": "TIMESTAMP",
|
15
|
+
"mode": "NULLABLE"
|
16
|
+
},
|
17
|
+
{
|
18
|
+
"name": "date",
|
19
|
+
"type": "DATE",
|
20
|
+
"mode": "REQUIRED"
|
21
|
+
}
|
22
|
+
]
|
@@ -1,7 +1,7 @@
|
|
1
1
|
module Fluent
|
2
2
|
module BigQuery
|
3
3
|
class Writer
|
4
|
-
def initialize(log, auth_method, options
|
4
|
+
def initialize(log, auth_method, **options)
|
5
5
|
@auth_method = auth_method
|
6
6
|
@scope = "https://www.googleapis.com/auth/bigquery"
|
7
7
|
@options = options
|
@@ -37,7 +37,7 @@ module Fluent
|
|
37
37
|
definition.merge!(time_partitioning: time_partitioning) if time_partitioning
|
38
38
|
definition.merge!(require_partition_filter: require_partition_filter) if require_partition_filter
|
39
39
|
definition.merge!(clustering: clustering) if clustering
|
40
|
-
client.insert_table(project, dataset, definition, {})
|
40
|
+
client.insert_table(project, dataset, definition, **{})
|
41
41
|
log.debug "create table", project_id: project, dataset: dataset, table: table_id
|
42
42
|
rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
|
43
43
|
message = e.message
|
@@ -83,7 +83,7 @@ module Fluent
|
|
83
83
|
if @options[:auto_create_table]
|
84
84
|
res = insert_all_table_data_with_create_table(project, dataset, table_id, body, schema)
|
85
85
|
else
|
86
|
-
res = client.insert_all_table_data(project, dataset, table_id, body, {})
|
86
|
+
res = client.insert_all_table_data(project, dataset, table_id, body, **{})
|
87
87
|
end
|
88
88
|
log.debug "insert rows", project_id: project, dataset: dataset, table: table_id, count: rows.size
|
89
89
|
|
@@ -158,10 +158,8 @@ module Fluent
|
|
158
158
|
res = client.insert_job(
|
159
159
|
project,
|
160
160
|
configuration,
|
161
|
-
|
162
|
-
|
163
|
-
content_type: "application/octet-stream",
|
164
|
-
}
|
161
|
+
upload_source: upload_source,
|
162
|
+
content_type: "application/octet-stream",
|
165
163
|
)
|
166
164
|
JobReference.new(chunk_id, chunk_id_hex, project, dataset, table_id, res.job_reference.job_id)
|
167
165
|
rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
|
@@ -343,7 +341,7 @@ module Fluent
|
|
343
341
|
|
344
342
|
def insert_all_table_data_with_create_table(project, dataset, table_id, body, schema)
|
345
343
|
try_count ||= 1
|
346
|
-
res = client.insert_all_table_data(project, dataset, table_id, body, {})
|
344
|
+
res = client.insert_all_table_data(project, dataset, table_id, body, **{})
|
347
345
|
rescue Google::Apis::ClientError => e
|
348
346
|
if e.status_code == 404 && /Not Found: Table/i =~ e.message
|
349
347
|
if try_count == 1
|
@@ -131,7 +131,7 @@ module Fluent
|
|
131
131
|
end
|
132
132
|
|
133
133
|
def writer
|
134
|
-
@writer ||= Fluent::BigQuery::Writer.new(@log, @auth_method,
|
134
|
+
@writer ||= Fluent::BigQuery::Writer.new(@log, @auth_method,
|
135
135
|
private_key_path: @private_key_path, private_key_passphrase: @private_key_passphrase,
|
136
136
|
email: @email,
|
137
137
|
json_key: @json_key,
|
@@ -150,10 +150,15 @@ module Fluent
|
|
150
150
|
clustering_fields: @clustering_fields,
|
151
151
|
timeout_sec: @request_timeout_sec,
|
152
152
|
open_timeout_sec: @request_open_timeout_sec,
|
153
|
-
|
153
|
+
)
|
154
154
|
end
|
155
155
|
|
156
156
|
def format(tag, time, record)
|
157
|
+
if record.nil?
|
158
|
+
log.warn("nil record detected. corrupted chunks? tag=#{tag}, time=#{time}")
|
159
|
+
return
|
160
|
+
end
|
161
|
+
|
157
162
|
record = inject_values_to_record(tag, time, record)
|
158
163
|
|
159
164
|
meta = metadata(tag, time, record)
|
@@ -96,6 +96,8 @@ module Fluent
|
|
96
96
|
schema = get_schema(project, dataset, metadata)
|
97
97
|
|
98
98
|
insert(project, dataset, table_id, rows, schema, template_suffix)
|
99
|
+
rescue MultiJson::ParseError => e
|
100
|
+
raise Fluent::UnrecoverableError.new(e)
|
99
101
|
end
|
100
102
|
|
101
103
|
def insert(project, dataset, table_id, rows, schema, template_suffix)
|
@@ -5,6 +5,17 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
5
5
|
Fluent::Test.setup
|
6
6
|
end
|
7
7
|
|
8
|
+
def is_ruby2?
|
9
|
+
RUBY_VERSION.to_i < 3
|
10
|
+
end
|
11
|
+
|
12
|
+
def build_args(args)
|
13
|
+
if is_ruby2?
|
14
|
+
args << {}
|
15
|
+
end
|
16
|
+
args
|
17
|
+
end
|
18
|
+
|
8
19
|
SCHEMA_PATH = File.join(File.dirname(__FILE__), "testdata", "apache.schema")
|
9
20
|
|
10
21
|
CONFIG = %[
|
@@ -123,11 +134,12 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
123
134
|
driver = create_driver
|
124
135
|
|
125
136
|
stub_writer do |writer|
|
126
|
-
|
137
|
+
args = build_args(['yourproject_id', 'yourdataset_id', 'foo', {
|
127
138
|
rows: [{json: hash_including(entry)}],
|
128
139
|
skip_invalid_rows: false,
|
129
140
|
ignore_unknown_values: false
|
130
|
-
}
|
141
|
+
}])
|
142
|
+
mock(writer.client).insert_all_table_data(*args) do
|
131
143
|
s = stub!
|
132
144
|
s.insert_errors { nil }
|
133
145
|
s
|
@@ -188,11 +200,12 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
188
200
|
|
189
201
|
entry = {a: "b"}
|
190
202
|
stub_writer do |writer|
|
191
|
-
|
203
|
+
args = build_args(['yourproject_id', 'yourdataset_id', 'foo', {
|
192
204
|
rows: [{json: hash_including(entry)}],
|
193
205
|
skip_invalid_rows: false,
|
194
206
|
ignore_unknown_values: false
|
195
|
-
}
|
207
|
+
}])
|
208
|
+
mock(writer.client).insert_all_table_data(*args) do
|
196
209
|
ex = Google::Apis::ServerError.new("error", status_code: d["status_code"])
|
197
210
|
raise ex
|
198
211
|
end
|
@@ -247,11 +260,12 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
247
260
|
|
248
261
|
entry = {a: "b"}
|
249
262
|
stub_writer do |writer|
|
250
|
-
|
263
|
+
args = build_args(['yourproject_id', 'yourdataset_id', 'foo', {
|
251
264
|
rows: [{json: hash_including(entry)}],
|
252
265
|
skip_invalid_rows: false,
|
253
266
|
ignore_unknown_values: false
|
254
|
-
}
|
267
|
+
}])
|
268
|
+
mock(writer.client).insert_all_table_data(*args) do
|
255
269
|
ex = Google::Apis::ServerError.new("error", status_code: 501)
|
256
270
|
def ex.reason
|
257
271
|
"invalid"
|
@@ -269,7 +283,7 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
269
283
|
assert_raise Fluent::BigQuery::UnRetryableError do
|
270
284
|
driver.instance.write(chunk)
|
271
285
|
end
|
272
|
-
assert_in_delta driver.instance.retry.secondary_transition_at , Time.now, 0.
|
286
|
+
assert_in_delta driver.instance.retry.secondary_transition_at , Time.now, 0.2
|
273
287
|
driver.instance_shutdown
|
274
288
|
end
|
275
289
|
|
@@ -292,11 +306,15 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
292
306
|
CONFIG
|
293
307
|
|
294
308
|
stub_writer do |writer|
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
309
|
+
args = ['yourproject_id', 'yourdataset_id', 'foo_2014_08_20', {
|
310
|
+
rows: [entry[0]],
|
311
|
+
skip_invalid_rows: false,
|
312
|
+
ignore_unknown_values: false
|
313
|
+
}]
|
314
|
+
if RUBY_VERSION.to_i < 3
|
315
|
+
args << {}
|
316
|
+
end
|
317
|
+
mock(writer.client).insert_all_table_data(*args) { stub!.insert_errors { nil } }
|
300
318
|
end
|
301
319
|
|
302
320
|
driver.run do
|
@@ -354,19 +372,21 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
354
372
|
skip_invalid_rows: false,
|
355
373
|
ignore_unknown_values: false,
|
356
374
|
}
|
357
|
-
|
375
|
+
args = build_args(['yourproject_id', 'yourdataset_id', 'foo', body])
|
376
|
+
mock(writer.client).insert_all_table_data(*args) do
|
358
377
|
raise Google::Apis::ClientError.new("notFound: Not found: Table yourproject_id:yourdataset_id.foo", status_code: 404)
|
359
378
|
end.at_least(1)
|
360
379
|
mock(writer).sleep(instance_of(Numeric)) { nil }.at_least(1)
|
361
380
|
|
362
|
-
|
381
|
+
args = build_args(['yourproject_id', 'yourdataset_id', {
|
363
382
|
table_reference: {
|
364
383
|
table_id: 'foo',
|
365
384
|
},
|
366
385
|
schema: {
|
367
386
|
fields: schema_fields,
|
368
387
|
},
|
369
|
-
}
|
388
|
+
}])
|
389
|
+
mock(writer.client).insert_table(*args)
|
370
390
|
end
|
371
391
|
|
372
392
|
assert_raise(RuntimeError) do
|
@@ -432,12 +452,13 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
432
452
|
skip_invalid_rows: false,
|
433
453
|
ignore_unknown_values: false,
|
434
454
|
}
|
435
|
-
|
455
|
+
args = build_args(['yourproject_id', 'yourdataset_id', 'foo', body])
|
456
|
+
mock(writer.client).insert_all_table_data(*args) do
|
436
457
|
raise Google::Apis::ClientError.new("notFound: Not found: Table yourproject_id:yourdataset_id.foo", status_code: 404)
|
437
458
|
end.at_least(1)
|
438
459
|
mock(writer).sleep(instance_of(Numeric)) { nil }.at_least(1)
|
439
460
|
|
440
|
-
|
461
|
+
args = build_args(['yourproject_id', 'yourdataset_id', {
|
441
462
|
table_reference: {
|
442
463
|
table_id: 'foo',
|
443
464
|
},
|
@@ -450,7 +471,8 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
450
471
|
expiration_ms: 3600000,
|
451
472
|
},
|
452
473
|
require_partition_filter: true,
|
453
|
-
}
|
474
|
+
}])
|
475
|
+
mock(writer.client).insert_table(*args)
|
454
476
|
end
|
455
477
|
|
456
478
|
assert_raise(RuntimeError) do
|
@@ -519,12 +541,13 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
519
541
|
skip_invalid_rows: false,
|
520
542
|
ignore_unknown_values: false,
|
521
543
|
}
|
522
|
-
|
544
|
+
args = build_args(['yourproject_id', 'yourdataset_id', 'foo', body])
|
545
|
+
mock(writer.client).insert_all_table_data(*args) do
|
523
546
|
raise Google::Apis::ClientError.new("notFound: Not found: Table yourproject_id:yourdataset_id.foo", status_code: 404)
|
524
547
|
end.at_least(1)
|
525
548
|
mock(writer).sleep(instance_of(Numeric)) { nil }.at_least(1)
|
526
549
|
|
527
|
-
|
550
|
+
args = build_args(['yourproject_id', 'yourdataset_id', {
|
528
551
|
table_reference: {
|
529
552
|
table_id: 'foo',
|
530
553
|
},
|
@@ -542,7 +565,8 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
|
|
542
565
|
'vhost',
|
543
566
|
],
|
544
567
|
},
|
545
|
-
}
|
568
|
+
}])
|
569
|
+
mock(writer.client).insert_table(*args)
|
546
570
|
end
|
547
571
|
|
548
572
|
assert_raise(RuntimeError) do
|
@@ -64,7 +64,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
64
64
|
max_bad_records: 0,
|
65
65
|
}
|
66
66
|
}
|
67
|
-
},
|
67
|
+
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
68
68
|
stub!.job_reference.stub!.job_id { "dummy_job_id" }
|
69
69
|
end
|
70
70
|
end
|
@@ -117,7 +117,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
117
117
|
},
|
118
118
|
},
|
119
119
|
job_reference: {project_id: 'yourproject_id', job_id: satisfy { |x| x =~ /fluentd_job_.*/}} ,
|
120
|
-
},
|
120
|
+
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
121
121
|
stub!.job_reference.stub!.job_id { "dummy_job_id" }
|
122
122
|
end
|
123
123
|
end
|
@@ -154,7 +154,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
154
154
|
max_bad_records: 0,
|
155
155
|
}
|
156
156
|
}
|
157
|
-
},
|
157
|
+
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
158
158
|
stub!.job_reference.stub!.job_id { "dummy_job_id" }
|
159
159
|
end
|
160
160
|
|
@@ -237,7 +237,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
237
237
|
max_bad_records: 0,
|
238
238
|
}
|
239
239
|
}
|
240
|
-
},
|
240
|
+
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
241
241
|
stub!.job_reference.stub!.job_id { "dummy_job_id" }
|
242
242
|
end
|
243
243
|
|
@@ -317,7 +317,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
|
|
317
317
|
},
|
318
318
|
}
|
319
319
|
}
|
320
|
-
},
|
320
|
+
}, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
|
321
321
|
stub!.job_reference.stub!.job_id { "dummy_job_id" }
|
322
322
|
end
|
323
323
|
end
|
metadata
CHANGED
@@ -1,15 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-bigquery
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version:
|
4
|
+
version: 3.0.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Naoya Ito
|
8
8
|
- joker1007
|
9
|
-
autorequire:
|
9
|
+
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2022-
|
12
|
+
date: 2022-10-05 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: rake
|
@@ -148,6 +148,12 @@ files:
|
|
148
148
|
- Rakefile
|
149
149
|
- fluent-plugin-bigquery.gemspec
|
150
150
|
- gemfiles/activesupport-4.gemfile
|
151
|
+
- integration/README.md
|
152
|
+
- integration/create_table.sh
|
153
|
+
- integration/dummer_insert.rb
|
154
|
+
- integration/dummer_load.rb
|
155
|
+
- integration/fluent.conf
|
156
|
+
- integration/schema.json
|
151
157
|
- lib/fluent/plugin/bigquery/errors.rb
|
152
158
|
- lib/fluent/plugin/bigquery/helper.rb
|
153
159
|
- lib/fluent/plugin/bigquery/schema.rb
|
@@ -169,7 +175,7 @@ homepage: https://github.com/kaizenplatform/fluent-plugin-bigquery
|
|
169
175
|
licenses:
|
170
176
|
- Apache-2.0
|
171
177
|
metadata: {}
|
172
|
-
post_install_message:
|
178
|
+
post_install_message:
|
173
179
|
rdoc_options: []
|
174
180
|
require_paths:
|
175
181
|
- lib
|
@@ -184,8 +190,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
184
190
|
- !ruby/object:Gem::Version
|
185
191
|
version: '0'
|
186
192
|
requirements: []
|
187
|
-
rubygems_version: 3.
|
188
|
-
signing_key:
|
193
|
+
rubygems_version: 3.3.7
|
194
|
+
signing_key:
|
189
195
|
specification_version: 4
|
190
196
|
summary: Fluentd plugin to store data on Google BigQuery
|
191
197
|
test_files:
|