fluent-plugin-bigquery 0.4.3 → 0.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 02a2cc7715e7a0815c97f43634b6575fe9520156
4
- data.tar.gz: fd1a8e9e4aeec1e3e687268391a5fb95a26cf6ec
3
+ metadata.gz: fbc5b978030f084d076e9123adcbf28fda708023
4
+ data.tar.gz: 7c99e2fa8af4ba7f211f5890ab9247732217b421
5
5
  SHA512:
6
- metadata.gz: 98120b81fbf0cfa4fc625d102b76d774a8cd0c154ce4526ce4fa86e25fe59cfb88f0698fe0d9025ff313a7165ba0cfd12059fbbc58f03fd39fc33a8df723a1d8
7
- data.tar.gz: 52b234401a2e38712741e5de56b09d0bc07dcb21f1e5c044d7562087857a1a72e5c42c8acacd0a6a03470d1b2c22e011f5866a7a6276723250930eaec295ecf1
6
+ metadata.gz: d6e90f8a800bf58514a6d919188cc900a2b1cfa1a6f6138fc1087280e9fd899a4766f6971cdbd53bb00064ff2371a62ea444af4ed833906771109a1003be9a9d
7
+ data.tar.gz: 34ccca10960087ab60d0221e84223b8c725c1a7ba7b216a4e1b830db640e9bf821534384d02c8fbdaac4b766d36096f3a52eabcdc744f4c2b08f3a9fb33a04eb
data/README.md CHANGED
@@ -66,6 +66,7 @@ Because embbeded gem dependency sometimes restricts ruby environment.
66
66
  | replace_record_key_regexp{1-10} | string | no | nil | see examples. |
67
67
  | convert_hash_to_json (deprecated) | bool | no | false | If true, converts Hash value of record to JSON String. |
68
68
  | insert_id_field | string | no | nil | Use key as `insert_id` of Streaming Insert API parameter. |
69
+ | add_insert_timestamp | string | no | no | nil | Adds a timestamp column just before sending the rows to BigQuery, so that buffering time is not taken into account. Gives a field in BigQuery which represents the insert time of the row. |
69
70
  | allow_retry_insert_errors | bool | no | false | Retry to insert rows when an insertErrors occurs. There is a possibility that rows are inserted in duplicate. |
70
71
  | request_timeout_sec | integer | no | nil | Bigquery API response timeout |
71
72
  | request_open_timeout_sec | integer | no | 60 | Bigquery API connection, and request timeout. If you send big data to Bigquery, set large value. |
@@ -1,5 +1,5 @@
1
1
  module Fluent
2
2
  module BigQueryPlugin
3
- VERSION = "0.4.3".freeze
3
+ VERSION = "0.4.4".freeze
4
4
  end
5
5
  end
@@ -119,6 +119,12 @@ module Fluent
119
119
  # prevent_duplicate_load (only load)
120
120
  config_param :prevent_duplicate_load, :bool, default: false
121
121
 
122
+ # add_insert_timestamp (only insert)
123
+ # adds a timestamp just before sending the rows to bigquery, so that
124
+ # buffering time is not taken into account. Gives a field in bigquery
125
+ # which represents the insert time of the row.
126
+ config_param :add_insert_timestamp, :string, default: nil
127
+
122
128
  config_param :method, :enum, list: [:insert, :load], default: :insert, skip_accessor: true
123
129
 
124
130
  # allow_retry_insert_errors (only insert)
@@ -418,9 +424,11 @@ module Fluent
418
424
  end
419
425
 
420
426
  def _write(chunk, table_format, template_suffix_format)
427
+ now = Time.now.utc.strftime("%Y-%m-%d %H:%M:%S.%6N") if @add_insert_timestamp
421
428
  rows = []
422
429
  chunk.msgpack_each do |row_object|
423
430
  # TODO: row size limit
431
+ row_object["json"][@add_insert_timestamp] = now if @add_insert_timestamp
424
432
  rows << row_object.deep_symbolize_keys
425
433
  end
426
434
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-bigquery
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.3
4
+ version: 0.4.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Naoya Ito
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2017-04-10 00:00:00.000000000 Z
12
+ date: 2017-04-18 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: rake