fluent-plugin-bigquery 2.3.0 → 3.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4209a2b6eaaf0b6f8ba315b6f5de6690e28fb47890aeea777bdb31889e4785ab
4
- data.tar.gz: b0983fb4fa16d72059b0e679ea4ee627d19e805779fa010888fa1723354896a5
3
+ metadata.gz: bc6de961c8c42fddf3d9e297e93db560d16cfc098161232c90ee64f0a5679fee
4
+ data.tar.gz: 5ec1fee690f77d0fa25d8e427c6ad354cdfdbfafe30a4aee4fea9a5e73db5eb3
5
5
  SHA512:
6
- metadata.gz: a6fc6891eda12bbc1272af7af9c4e8d48e588bc7ef65153b3a7524e39468baebb8fdb925856d1850bbda12fed5d33865faa56542503f76fdf724a18937c7d56e
7
- data.tar.gz: fff0599b6a838cb4ff233ba9585b558ff733eed8063c1cf36ee08aaacb9b3c2ca1bce4d13db2a51ecc72c398ba751a18b2856a6348f43738ee8ca366becdea61
6
+ metadata.gz: 20fc96d420611a0d12f7cb34656ae87872f24131c70039383a8f8b7d51048a7d4f277a80675f2bee834113fd13d2a9780b772b517f2140481f7fb86ce63f24e3
7
+ data.tar.gz: cecc8f8682761ddfb22d942b69103823cc728923f6d7043d967254ed02c754db4e792132769f7f3aa91986aa27895ac83bf16358be21e03d3c94e77c43975231
@@ -9,7 +9,6 @@ jobs:
9
9
  fail-fast: false
10
10
  matrix:
11
11
  ruby:
12
- - 2.6
13
12
  - 2.7
14
13
  - 3.0
15
14
  - 3.1
@@ -8,7 +8,10 @@ jobs:
8
8
  strategy:
9
9
  fail-fast: false
10
10
  matrix:
11
- ruby: [ '2.6', '2.7', '3.0', '3.1' ]
11
+ ruby:
12
+ - 2.7
13
+ - 3.0
14
+ - 3.1
12
15
  os:
13
16
  - windows-latest
14
17
  name: Ruby ${{ matrix.ruby }} unit testing on ${{ matrix.os }}
data/Gemfile CHANGED
@@ -2,3 +2,6 @@ source 'https://rubygems.org'
2
2
 
3
3
  # Specify your gem's dependencies in fluent-plugin-bigquery.gemspec
4
4
  gemspec
5
+
6
+ gem "oj"
7
+ gem "dummer"
data/README.md CHANGED
@@ -1,13 +1,5 @@
1
1
  # fluent-plugin-bigquery
2
2
 
3
- ## Notice
4
-
5
- We will transfer fluent-plugin-bigquery repository to [fluent-plugins-nursery](https://github.com/fluent-plugins-nursery) organization.
6
- It does not change maintenance plan.
7
- The main purpose is that it solves mismatch between maintainers and current organization.
8
-
9
- ---
10
-
11
3
  [Fluentd](http://fluentd.org) output plugin to load/insert data into Google BigQuery.
12
4
 
13
5
  - **Plugin type**: Output
@@ -31,6 +23,7 @@ OAuth flow for installed applications.
31
23
  | v0.4.x | 0.12.x | 2.0 or later |
32
24
  | v1.x.x | 0.14.x or later | 2.2 or later |
33
25
  | v2.x.x | 0.14.x or later | 2.3 or later |
26
+ | v3.x.x | 1.x or later | 2.7 or later |
34
27
 
35
28
  ## With docker image
36
29
  If you use official alpine based fluentd docker image (https://github.com/fluent/fluentd-docker-image),
@@ -388,10 +381,10 @@ format to construct table ids.
388
381
  Table ids are formatted at runtime
389
382
  using the chunk key time.
390
383
 
391
- see. http://docs.fluentd.org/v0.14/articles/output-plugin-overview
384
+ see. https://docs.fluentd.org/configuration/buffer-section
392
385
 
393
386
  For example, with the configuration below,
394
- data is inserted into tables `accesslog_2014_08`, `accesslog_2014_09` and so on.
387
+ data is inserted into tables `accesslog_2014_08_02`, `accesslog_2014_08_03` and so on.
395
388
 
396
389
  ```apache
397
390
  <match dummy>
@@ -401,7 +394,7 @@ data is inserted into tables `accesslog_2014_08`, `accesslog_2014_09` and so on.
401
394
 
402
395
  project yourproject_id
403
396
  dataset yourdataset_id
404
- table accesslog_%Y_%m
397
+ table accesslog_%Y_%m_%d
405
398
 
406
399
  <buffer time>
407
400
  timekey 1d
@@ -410,6 +403,8 @@ data is inserted into tables `accesslog_2014_08`, `accesslog_2014_09` and so on.
410
403
  </match>
411
404
  ```
412
405
 
406
+ **NOTE: In current fluentd (v1.15.x), The maximum unit supported by strftime formatting is the granularity of days**
407
+
413
408
  #### record attribute formatting
414
409
  The format can be suffixed with attribute name.
415
410
 
@@ -0,0 +1,14 @@
1
+ # Requirements
2
+
3
+ Set Environment Variable
4
+
5
+ - GOOGLE_APPLICATION_CREDENTIALS (json key path)
6
+ - PROJECT_NAME
7
+ - DATASET_NAME
8
+ - TABLE_NAME
9
+
10
+ # How to use
11
+
12
+ 1. execute `create_table.sh`
13
+ 1. `bundle exec fluentd -c fluent.conf`
14
+ 1. `bundle exec dummer -c dummer_insert.rb` or `bundle exec dummer -c dummer_load.rb`
@@ -0,0 +1,4 @@
1
+ #!/bin/sh
2
+
3
+ set -eux
4
+ bq mk -t --project_id=${PROJECT_NAME} --schema=$(dirname $0)/schema.json ${DATASET_NAME}.${TABLE_NAME}
@@ -0,0 +1,12 @@
1
+ require "time"
2
+
3
+ configure "insert" do
4
+ host "localhost"
5
+ port 24224
6
+ rate 100
7
+ tag type: :string, any: %w(insert_data)
8
+ field :id, type: :integer, countup: true
9
+ field :string_field, type: :string, any: %w(str1 str2 str3 str4)
10
+ field :timestamp_field, type: :string, value: Time.now.iso8601
11
+ field :date, type: :string, value: Time.now.strftime("%Y-%m-%d")
12
+ end
@@ -0,0 +1,12 @@
1
+ require "time"
2
+
3
+ configure "load" do
4
+ host "localhost"
5
+ port 24224
6
+ rate 100
7
+ tag type: :string, any: %w(load_data)
8
+ field :id, type: :integer, countup: true
9
+ field :string_field, type: :string, any: %w(str1 str2 str3 str4)
10
+ field :timestamp_field, type: :string, value: Time.now.iso8601
11
+ field :date, type: :string, value: Time.now.strftime("%Y-%m-%d")
12
+ end
@@ -0,0 +1,88 @@
1
+ <source>
2
+ @type forward
3
+ port 24224
4
+ bind 0.0.0.0
5
+ </source>
6
+
7
+ <match insert_data>
8
+ @id bigquery-insert-integration
9
+ @type bigquery_insert
10
+
11
+ allow_retry_insert_errors true
12
+
13
+ auth_method json_key
14
+ json_key "#{ENV["GOOGLE_APPLICATION_CREDENTIALS"]}"
15
+
16
+ <buffer>
17
+ @type file
18
+
19
+ chunk_limit_size 1m
20
+ chunk_limit_records 1500
21
+ total_limit_size 1g
22
+ path ./log/bigquery-insert-integration
23
+
24
+ flush_interval 30
25
+ flush_thread_count 4
26
+ flush_at_shutdown true
27
+
28
+ retry_max_times 14
29
+ retry_max_interval 30m
30
+ </buffer>
31
+
32
+ request_open_timeout_sec 2m
33
+
34
+ slow_flush_log_threshold 30.0
35
+
36
+ project "#{ENV["PROJECT_NAME"]}"
37
+ dataset "#{ENV["DATASET_NAME"]}"
38
+ table "#{ENV["TABLE_NAME"]}"
39
+ auto_create_table false
40
+ fetch_schema true
41
+ fetch_schema_table "#{ENV["TABLE_NAME"]}"
42
+
43
+ insert_id_field id
44
+
45
+ <secondary>
46
+ @type file
47
+ path ./log/bigquery-insert-integration.errors
48
+ </secondary>
49
+ </match>
50
+
51
+ <match load_data>
52
+ @id bigquery-load-integration
53
+ @type bigquery_load
54
+
55
+ auth_method json_key
56
+ json_key "#{ENV["GOOGLE_APPLICATION_CREDENTIALS"]}"
57
+
58
+ <buffer>
59
+ @type file
60
+
61
+ chunk_limit_size 1m
62
+ total_limit_size 1g
63
+ path ./log/bigquery-load-integration
64
+
65
+ flush_interval 120
66
+ flush_thread_count 4
67
+ flush_at_shutdown true
68
+
69
+ retry_max_times 14
70
+ retry_max_interval 30m
71
+ </buffer>
72
+
73
+ request_open_timeout_sec 2m
74
+
75
+ slow_flush_log_threshold 300.0
76
+
77
+ project "#{ENV["PROJECT_NAME"]}"
78
+ dataset "#{ENV["DATASET_NAME"]}"
79
+ table "#{ENV["TABLE_NAME"]}"
80
+ auto_create_table false
81
+ fetch_schema true
82
+ fetch_schema_table "#{ENV["TABLE_NAME"]}"
83
+
84
+ <secondary>
85
+ @type file
86
+ path ./log/bigquery-load-integration.errors
87
+ </secondary>
88
+ </match>
@@ -0,0 +1,22 @@
1
+ [
2
+ {
3
+ "name": "id",
4
+ "type": "INTEGER",
5
+ "mode": "REQUIRED"
6
+ },
7
+ {
8
+ "name": "string_field",
9
+ "type": "STRING",
10
+ "mode": "NULLABLE"
11
+ },
12
+ {
13
+ "name": "timestamp_field",
14
+ "type": "TIMESTAMP",
15
+ "mode": "NULLABLE"
16
+ },
17
+ {
18
+ "name": "date",
19
+ "type": "DATE",
20
+ "mode": "REQUIRED"
21
+ }
22
+ ]
@@ -1,5 +1,5 @@
1
1
  module Fluent
2
2
  module BigQueryPlugin
3
- VERSION = "2.3.0".freeze
3
+ VERSION = "3.0.0".freeze
4
4
  end
5
5
  end
@@ -1,7 +1,7 @@
1
1
  module Fluent
2
2
  module BigQuery
3
3
  class Writer
4
- def initialize(log, auth_method, options = {})
4
+ def initialize(log, auth_method, **options)
5
5
  @auth_method = auth_method
6
6
  @scope = "https://www.googleapis.com/auth/bigquery"
7
7
  @options = options
@@ -37,7 +37,7 @@ module Fluent
37
37
  definition.merge!(time_partitioning: time_partitioning) if time_partitioning
38
38
  definition.merge!(require_partition_filter: require_partition_filter) if require_partition_filter
39
39
  definition.merge!(clustering: clustering) if clustering
40
- client.insert_table(project, dataset, definition, {})
40
+ client.insert_table(project, dataset, definition, **{})
41
41
  log.debug "create table", project_id: project, dataset: dataset, table: table_id
42
42
  rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
43
43
  message = e.message
@@ -83,7 +83,7 @@ module Fluent
83
83
  if @options[:auto_create_table]
84
84
  res = insert_all_table_data_with_create_table(project, dataset, table_id, body, schema)
85
85
  else
86
- res = client.insert_all_table_data(project, dataset, table_id, body, {})
86
+ res = client.insert_all_table_data(project, dataset, table_id, body, **{})
87
87
  end
88
88
  log.debug "insert rows", project_id: project, dataset: dataset, table: table_id, count: rows.size
89
89
 
@@ -158,10 +158,8 @@ module Fluent
158
158
  res = client.insert_job(
159
159
  project,
160
160
  configuration,
161
- {
162
- upload_source: upload_source,
163
- content_type: "application/octet-stream",
164
- }
161
+ upload_source: upload_source,
162
+ content_type: "application/octet-stream",
165
163
  )
166
164
  JobReference.new(chunk_id, chunk_id_hex, project, dataset, table_id, res.job_reference.job_id)
167
165
  rescue Google::Apis::ServerError, Google::Apis::ClientError, Google::Apis::AuthorizationError => e
@@ -343,7 +341,7 @@ module Fluent
343
341
 
344
342
  def insert_all_table_data_with_create_table(project, dataset, table_id, body, schema)
345
343
  try_count ||= 1
346
- res = client.insert_all_table_data(project, dataset, table_id, body, {})
344
+ res = client.insert_all_table_data(project, dataset, table_id, body, **{})
347
345
  rescue Google::Apis::ClientError => e
348
346
  if e.status_code == 404 && /Not Found: Table/i =~ e.message
349
347
  if try_count == 1
@@ -131,7 +131,7 @@ module Fluent
131
131
  end
132
132
 
133
133
  def writer
134
- @writer ||= Fluent::BigQuery::Writer.new(@log, @auth_method, {
134
+ @writer ||= Fluent::BigQuery::Writer.new(@log, @auth_method,
135
135
  private_key_path: @private_key_path, private_key_passphrase: @private_key_passphrase,
136
136
  email: @email,
137
137
  json_key: @json_key,
@@ -150,10 +150,15 @@ module Fluent
150
150
  clustering_fields: @clustering_fields,
151
151
  timeout_sec: @request_timeout_sec,
152
152
  open_timeout_sec: @request_open_timeout_sec,
153
- })
153
+ )
154
154
  end
155
155
 
156
156
  def format(tag, time, record)
157
+ if record.nil?
158
+ log.warn("nil record detected. corrupted chunks? tag=#{tag}, time=#{time}")
159
+ return
160
+ end
161
+
157
162
  record = inject_values_to_record(tag, time, record)
158
163
 
159
164
  meta = metadata(tag, time, record)
@@ -96,6 +96,8 @@ module Fluent
96
96
  schema = get_schema(project, dataset, metadata)
97
97
 
98
98
  insert(project, dataset, table_id, rows, schema, template_suffix)
99
+ rescue MultiJson::ParseError => e
100
+ raise Fluent::UnrecoverableError.new(e)
99
101
  end
100
102
 
101
103
  def insert(project, dataset, table_id, rows, schema, template_suffix)
@@ -5,6 +5,17 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
5
5
  Fluent::Test.setup
6
6
  end
7
7
 
8
+ def is_ruby2?
9
+ RUBY_VERSION.to_i < 3
10
+ end
11
+
12
+ def build_args(args)
13
+ if is_ruby2?
14
+ args << {}
15
+ end
16
+ args
17
+ end
18
+
8
19
  SCHEMA_PATH = File.join(File.dirname(__FILE__), "testdata", "apache.schema")
9
20
 
10
21
  CONFIG = %[
@@ -123,11 +134,12 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
123
134
  driver = create_driver
124
135
 
125
136
  stub_writer do |writer|
126
- mock(writer.client).insert_all_table_data('yourproject_id', 'yourdataset_id', 'foo', {
137
+ args = build_args(['yourproject_id', 'yourdataset_id', 'foo', {
127
138
  rows: [{json: hash_including(entry)}],
128
139
  skip_invalid_rows: false,
129
140
  ignore_unknown_values: false
130
- }, {}) do
141
+ }])
142
+ mock(writer.client).insert_all_table_data(*args) do
131
143
  s = stub!
132
144
  s.insert_errors { nil }
133
145
  s
@@ -188,11 +200,12 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
188
200
 
189
201
  entry = {a: "b"}
190
202
  stub_writer do |writer|
191
- mock(writer.client).insert_all_table_data('yourproject_id', 'yourdataset_id', 'foo', {
203
+ args = build_args(['yourproject_id', 'yourdataset_id', 'foo', {
192
204
  rows: [{json: hash_including(entry)}],
193
205
  skip_invalid_rows: false,
194
206
  ignore_unknown_values: false
195
- }, {}) do
207
+ }])
208
+ mock(writer.client).insert_all_table_data(*args) do
196
209
  ex = Google::Apis::ServerError.new("error", status_code: d["status_code"])
197
210
  raise ex
198
211
  end
@@ -247,11 +260,12 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
247
260
 
248
261
  entry = {a: "b"}
249
262
  stub_writer do |writer|
250
- mock(writer.client).insert_all_table_data('yourproject_id', 'yourdataset_id', 'foo', {
263
+ args = build_args(['yourproject_id', 'yourdataset_id', 'foo', {
251
264
  rows: [{json: hash_including(entry)}],
252
265
  skip_invalid_rows: false,
253
266
  ignore_unknown_values: false
254
- }, {}) do
267
+ }])
268
+ mock(writer.client).insert_all_table_data(*args) do
255
269
  ex = Google::Apis::ServerError.new("error", status_code: 501)
256
270
  def ex.reason
257
271
  "invalid"
@@ -269,7 +283,7 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
269
283
  assert_raise Fluent::BigQuery::UnRetryableError do
270
284
  driver.instance.write(chunk)
271
285
  end
272
- assert_in_delta driver.instance.retry.secondary_transition_at , Time.now, 0.1
286
+ assert_in_delta driver.instance.retry.secondary_transition_at , Time.now, 0.2
273
287
  driver.instance_shutdown
274
288
  end
275
289
 
@@ -292,11 +306,15 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
292
306
  CONFIG
293
307
 
294
308
  stub_writer do |writer|
295
- mock(writer.client).insert_all_table_data('yourproject_id', 'yourdataset_id', 'foo_2014_08_20', {
296
- rows: [entry[0]],
297
- skip_invalid_rows: false,
298
- ignore_unknown_values: false
299
- }, {}) { stub!.insert_errors { nil } }
309
+ args = ['yourproject_id', 'yourdataset_id', 'foo_2014_08_20', {
310
+ rows: [entry[0]],
311
+ skip_invalid_rows: false,
312
+ ignore_unknown_values: false
313
+ }]
314
+ if RUBY_VERSION.to_i < 3
315
+ args << {}
316
+ end
317
+ mock(writer.client).insert_all_table_data(*args) { stub!.insert_errors { nil } }
300
318
  end
301
319
 
302
320
  driver.run do
@@ -354,19 +372,21 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
354
372
  skip_invalid_rows: false,
355
373
  ignore_unknown_values: false,
356
374
  }
357
- mock(writer.client).insert_all_table_data('yourproject_id', 'yourdataset_id', 'foo', body, {}) do
375
+ args = build_args(['yourproject_id', 'yourdataset_id', 'foo', body])
376
+ mock(writer.client).insert_all_table_data(*args) do
358
377
  raise Google::Apis::ClientError.new("notFound: Not found: Table yourproject_id:yourdataset_id.foo", status_code: 404)
359
378
  end.at_least(1)
360
379
  mock(writer).sleep(instance_of(Numeric)) { nil }.at_least(1)
361
380
 
362
- mock(writer.client).insert_table('yourproject_id', 'yourdataset_id', {
381
+ args = build_args(['yourproject_id', 'yourdataset_id', {
363
382
  table_reference: {
364
383
  table_id: 'foo',
365
384
  },
366
385
  schema: {
367
386
  fields: schema_fields,
368
387
  },
369
- }, {})
388
+ }])
389
+ mock(writer.client).insert_table(*args)
370
390
  end
371
391
 
372
392
  assert_raise(RuntimeError) do
@@ -432,12 +452,13 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
432
452
  skip_invalid_rows: false,
433
453
  ignore_unknown_values: false,
434
454
  }
435
- mock(writer.client).insert_all_table_data('yourproject_id', 'yourdataset_id', 'foo', body, {}) do
455
+ args = build_args(['yourproject_id', 'yourdataset_id', 'foo', body])
456
+ mock(writer.client).insert_all_table_data(*args) do
436
457
  raise Google::Apis::ClientError.new("notFound: Not found: Table yourproject_id:yourdataset_id.foo", status_code: 404)
437
458
  end.at_least(1)
438
459
  mock(writer).sleep(instance_of(Numeric)) { nil }.at_least(1)
439
460
 
440
- mock(writer.client).insert_table('yourproject_id', 'yourdataset_id', {
461
+ args = build_args(['yourproject_id', 'yourdataset_id', {
441
462
  table_reference: {
442
463
  table_id: 'foo',
443
464
  },
@@ -450,7 +471,8 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
450
471
  expiration_ms: 3600000,
451
472
  },
452
473
  require_partition_filter: true,
453
- }, {})
474
+ }])
475
+ mock(writer.client).insert_table(*args)
454
476
  end
455
477
 
456
478
  assert_raise(RuntimeError) do
@@ -519,12 +541,13 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
519
541
  skip_invalid_rows: false,
520
542
  ignore_unknown_values: false,
521
543
  }
522
- mock(writer.client).insert_all_table_data('yourproject_id', 'yourdataset_id', 'foo', body, {}) do
544
+ args = build_args(['yourproject_id', 'yourdataset_id', 'foo', body])
545
+ mock(writer.client).insert_all_table_data(*args) do
523
546
  raise Google::Apis::ClientError.new("notFound: Not found: Table yourproject_id:yourdataset_id.foo", status_code: 404)
524
547
  end.at_least(1)
525
548
  mock(writer).sleep(instance_of(Numeric)) { nil }.at_least(1)
526
549
 
527
- mock(writer.client).insert_table('yourproject_id', 'yourdataset_id', {
550
+ args = build_args(['yourproject_id', 'yourdataset_id', {
528
551
  table_reference: {
529
552
  table_id: 'foo',
530
553
  },
@@ -542,7 +565,8 @@ class BigQueryInsertOutputTest < Test::Unit::TestCase
542
565
  'vhost',
543
566
  ],
544
567
  },
545
- }, {})
568
+ }])
569
+ mock(writer.client).insert_table(*args)
546
570
  end
547
571
 
548
572
  assert_raise(RuntimeError) do
@@ -64,7 +64,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
64
64
  max_bad_records: 0,
65
65
  }
66
66
  }
67
- }, {upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream"}) do
67
+ }, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
68
68
  stub!.job_reference.stub!.job_id { "dummy_job_id" }
69
69
  end
70
70
  end
@@ -117,7 +117,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
117
117
  },
118
118
  },
119
119
  job_reference: {project_id: 'yourproject_id', job_id: satisfy { |x| x =~ /fluentd_job_.*/}} ,
120
- }, {upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream"}) do
120
+ }, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
121
121
  stub!.job_reference.stub!.job_id { "dummy_job_id" }
122
122
  end
123
123
  end
@@ -154,7 +154,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
154
154
  max_bad_records: 0,
155
155
  }
156
156
  }
157
- }, {upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream"}) do
157
+ }, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
158
158
  stub!.job_reference.stub!.job_id { "dummy_job_id" }
159
159
  end
160
160
 
@@ -237,7 +237,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
237
237
  max_bad_records: 0,
238
238
  }
239
239
  }
240
- }, {upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream"}) do
240
+ }, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
241
241
  stub!.job_reference.stub!.job_id { "dummy_job_id" }
242
242
  end
243
243
 
@@ -317,7 +317,7 @@ class BigQueryLoadOutputTest < Test::Unit::TestCase
317
317
  },
318
318
  }
319
319
  }
320
- }, {upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream"}) do
320
+ }, upload_source: duck_type(:write, :sync, :rewind), content_type: "application/octet-stream") do
321
321
  stub!.job_reference.stub!.job_id { "dummy_job_id" }
322
322
  end
323
323
  end
metadata CHANGED
@@ -1,15 +1,15 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-bigquery
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.3.0
4
+ version: 3.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Naoya Ito
8
8
  - joker1007
9
- autorequire:
9
+ autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2022-02-08 00:00:00.000000000 Z
12
+ date: 2022-10-05 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: rake
@@ -148,6 +148,12 @@ files:
148
148
  - Rakefile
149
149
  - fluent-plugin-bigquery.gemspec
150
150
  - gemfiles/activesupport-4.gemfile
151
+ - integration/README.md
152
+ - integration/create_table.sh
153
+ - integration/dummer_insert.rb
154
+ - integration/dummer_load.rb
155
+ - integration/fluent.conf
156
+ - integration/schema.json
151
157
  - lib/fluent/plugin/bigquery/errors.rb
152
158
  - lib/fluent/plugin/bigquery/helper.rb
153
159
  - lib/fluent/plugin/bigquery/schema.rb
@@ -169,7 +175,7 @@ homepage: https://github.com/kaizenplatform/fluent-plugin-bigquery
169
175
  licenses:
170
176
  - Apache-2.0
171
177
  metadata: {}
172
- post_install_message:
178
+ post_install_message:
173
179
  rdoc_options: []
174
180
  require_paths:
175
181
  - lib
@@ -184,8 +190,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
184
190
  - !ruby/object:Gem::Version
185
191
  version: '0'
186
192
  requirements: []
187
- rubygems_version: 3.1.4
188
- signing_key:
193
+ rubygems_version: 3.3.7
194
+ signing_key:
189
195
  specification_version: 4
190
196
  summary: Fluentd plugin to store data on Google BigQuery
191
197
  test_files: