fluent-plugin-bigquery 0.4.3 → 0.4.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +1 -0
- data/lib/fluent/plugin/bigquery/version.rb +1 -1
- data/lib/fluent/plugin/out_bigquery.rb +8 -0
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fbc5b978030f084d076e9123adcbf28fda708023
|
4
|
+
data.tar.gz: 7c99e2fa8af4ba7f211f5890ab9247732217b421
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d6e90f8a800bf58514a6d919188cc900a2b1cfa1a6f6138fc1087280e9fd899a4766f6971cdbd53bb00064ff2371a62ea444af4ed833906771109a1003be9a9d
|
7
|
+
data.tar.gz: 34ccca10960087ab60d0221e84223b8c725c1a7ba7b216a4e1b830db640e9bf821534384d02c8fbdaac4b766d36096f3a52eabcdc744f4c2b08f3a9fb33a04eb
|
data/README.md
CHANGED
@@ -66,6 +66,7 @@ Because embbeded gem dependency sometimes restricts ruby environment.
|
|
66
66
|
| replace_record_key_regexp{1-10} | string | no | nil | see examples. |
|
67
67
|
| convert_hash_to_json (deprecated) | bool | no | false | If true, converts Hash value of record to JSON String. |
|
68
68
|
| insert_id_field | string | no | nil | Use key as `insert_id` of Streaming Insert API parameter. |
|
69
|
+
| add_insert_timestamp | string | no | no | nil | Adds a timestamp column just before sending the rows to BigQuery, so that buffering time is not taken into account. Gives a field in BigQuery which represents the insert time of the row. |
|
69
70
|
| allow_retry_insert_errors | bool | no | false | Retry to insert rows when an insertErrors occurs. There is a possibility that rows are inserted in duplicate. |
|
70
71
|
| request_timeout_sec | integer | no | nil | Bigquery API response timeout |
|
71
72
|
| request_open_timeout_sec | integer | no | 60 | Bigquery API connection, and request timeout. If you send big data to Bigquery, set large value. |
|
@@ -119,6 +119,12 @@ module Fluent
|
|
119
119
|
# prevent_duplicate_load (only load)
|
120
120
|
config_param :prevent_duplicate_load, :bool, default: false
|
121
121
|
|
122
|
+
# add_insert_timestamp (only insert)
|
123
|
+
# adds a timestamp just before sending the rows to bigquery, so that
|
124
|
+
# buffering time is not taken into account. Gives a field in bigquery
|
125
|
+
# which represents the insert time of the row.
|
126
|
+
config_param :add_insert_timestamp, :string, default: nil
|
127
|
+
|
122
128
|
config_param :method, :enum, list: [:insert, :load], default: :insert, skip_accessor: true
|
123
129
|
|
124
130
|
# allow_retry_insert_errors (only insert)
|
@@ -418,9 +424,11 @@ module Fluent
|
|
418
424
|
end
|
419
425
|
|
420
426
|
def _write(chunk, table_format, template_suffix_format)
|
427
|
+
now = Time.now.utc.strftime("%Y-%m-%d %H:%M:%S.%6N") if @add_insert_timestamp
|
421
428
|
rows = []
|
422
429
|
chunk.msgpack_each do |row_object|
|
423
430
|
# TODO: row size limit
|
431
|
+
row_object["json"][@add_insert_timestamp] = now if @add_insert_timestamp
|
424
432
|
rows << row_object.deep_symbolize_keys
|
425
433
|
end
|
426
434
|
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-bigquery
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.4.
|
4
|
+
version: 0.4.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Naoya Ito
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2017-04-
|
12
|
+
date: 2017-04-18 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: rake
|