fluentd 1.16.5 → 1.17.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +88 -0
- data/README.md +2 -1
- data/Rakefile +1 -1
- data/SECURITY.md +2 -2
- data/fluent.conf +14 -14
- data/lib/fluent/command/binlog_reader.rb +1 -1
- data/lib/fluent/command/cap_ctl.rb +4 -4
- data/lib/fluent/compat/call_super_mixin.rb +3 -3
- data/lib/fluent/compat/propagate_default.rb +4 -4
- data/lib/fluent/config/configure_proxy.rb +2 -2
- data/lib/fluent/config/types.rb +1 -1
- data/lib/fluent/config/yaml_parser/parser.rb +4 -0
- data/lib/fluent/configurable.rb +2 -2
- data/lib/fluent/counter/mutex_hash.rb +1 -1
- data/lib/fluent/fluent_log_event_router.rb +0 -2
- data/lib/fluent/log/console_adapter.rb +4 -2
- data/lib/fluent/plugin/buf_file.rb +1 -1
- data/lib/fluent/plugin/buffer/file_chunk.rb +1 -1
- data/lib/fluent/plugin/buffer/file_single_chunk.rb +2 -3
- data/lib/fluent/plugin/filter_parser.rb +26 -8
- data/lib/fluent/plugin/in_exec.rb +14 -2
- data/lib/fluent/plugin/in_http.rb +19 -54
- data/lib/fluent/plugin/in_sample.rb +13 -7
- data/lib/fluent/plugin/in_tail.rb +99 -25
- data/lib/fluent/plugin/out_copy.rb +1 -1
- data/lib/fluent/plugin/out_file.rb +8 -0
- data/lib/fluent/plugin/out_http.rb +137 -13
- data/lib/fluent/plugin/owned_by_mixin.rb +0 -1
- data/lib/fluent/plugin/parser_json.rb +26 -17
- data/lib/fluent/plugin/parser_msgpack.rb +24 -3
- data/lib/fluent/plugin_helper/http_server/server.rb +1 -1
- data/lib/fluent/plugin_helper/metrics.rb +2 -2
- data/lib/fluent/registry.rb +6 -6
- data/lib/fluent/test/output_test.rb +1 -1
- data/lib/fluent/unique_id.rb +1 -1
- data/lib/fluent/version.rb +1 -1
- data/templates/new_gem/fluent-plugin.gemspec.erb +6 -5
- metadata +109 -459
- data/.github/ISSUE_TEMPLATE/bug_report.yml +0 -71
- data/.github/ISSUE_TEMPLATE/config.yml +0 -5
- data/.github/ISSUE_TEMPLATE/feature_request.yml +0 -39
- data/.github/ISSUE_TEMPLATE.md +0 -17
- data/.github/PULL_REQUEST_TEMPLATE.md +0 -14
- data/.github/workflows/stale-actions.yml +0 -24
- data/.github/workflows/test.yml +0 -32
- data/.gitignore +0 -30
- data/Gemfile +0 -9
- data/fluentd.gemspec +0 -54
- data/test/command/test_binlog_reader.rb +0 -362
- data/test/command/test_ca_generate.rb +0 -70
- data/test/command/test_cap_ctl.rb +0 -100
- data/test/command/test_cat.rb +0 -128
- data/test/command/test_ctl.rb +0 -56
- data/test/command/test_fluentd.rb +0 -1291
- data/test/command/test_plugin_config_formatter.rb +0 -397
- data/test/command/test_plugin_generator.rb +0 -109
- data/test/compat/test_calls_super.rb +0 -166
- data/test/compat/test_parser.rb +0 -92
- data/test/config/assertions.rb +0 -42
- data/test/config/test_config_parser.rb +0 -551
- data/test/config/test_configurable.rb +0 -1784
- data/test/config/test_configure_proxy.rb +0 -604
- data/test/config/test_dsl.rb +0 -415
- data/test/config/test_element.rb +0 -518
- data/test/config/test_literal_parser.rb +0 -309
- data/test/config/test_plugin_configuration.rb +0 -56
- data/test/config/test_section.rb +0 -191
- data/test/config/test_system_config.rb +0 -195
- data/test/config/test_types.rb +0 -408
- data/test/counter/test_client.rb +0 -563
- data/test/counter/test_error.rb +0 -44
- data/test/counter/test_mutex_hash.rb +0 -179
- data/test/counter/test_server.rb +0 -589
- data/test/counter/test_store.rb +0 -258
- data/test/counter/test_validator.rb +0 -137
- data/test/helper.rb +0 -155
- data/test/helpers/fuzzy_assert.rb +0 -89
- data/test/helpers/process_extenstion.rb +0 -33
- data/test/log/test_console_adapter.rb +0 -110
- data/test/plugin/data/2010/01/20100102-030405.log +0 -0
- data/test/plugin/data/2010/01/20100102-030406.log +0 -0
- data/test/plugin/data/2010/01/20100102.log +0 -0
- data/test/plugin/data/log/bar +0 -0
- data/test/plugin/data/log/foo/bar.log +0 -0
- data/test/plugin/data/log/foo/bar2 +0 -0
- data/test/plugin/data/log/test.log +0 -0
- data/test/plugin/data/sd_file/config +0 -11
- data/test/plugin/data/sd_file/config.json +0 -17
- data/test/plugin/data/sd_file/config.yaml +0 -11
- data/test/plugin/data/sd_file/config.yml +0 -11
- data/test/plugin/data/sd_file/invalid_config.yml +0 -7
- data/test/plugin/in_tail/test_fifo.rb +0 -121
- data/test/plugin/in_tail/test_io_handler.rb +0 -150
- data/test/plugin/in_tail/test_position_file.rb +0 -346
- data/test/plugin/out_forward/test_ack_handler.rb +0 -140
- data/test/plugin/out_forward/test_connection_manager.rb +0 -145
- data/test/plugin/out_forward/test_handshake_protocol.rb +0 -112
- data/test/plugin/out_forward/test_load_balancer.rb +0 -106
- data/test/plugin/out_forward/test_socket_cache.rb +0 -174
- data/test/plugin/test_bare_output.rb +0 -131
- data/test/plugin/test_base.rb +0 -247
- data/test/plugin/test_buf_file.rb +0 -1314
- data/test/plugin/test_buf_file_single.rb +0 -898
- data/test/plugin/test_buf_memory.rb +0 -42
- data/test/plugin/test_buffer.rb +0 -1493
- data/test/plugin/test_buffer_chunk.rb +0 -209
- data/test/plugin/test_buffer_file_chunk.rb +0 -871
- data/test/plugin/test_buffer_file_single_chunk.rb +0 -611
- data/test/plugin/test_buffer_memory_chunk.rb +0 -339
- data/test/plugin/test_compressable.rb +0 -87
- data/test/plugin/test_file_util.rb +0 -96
- data/test/plugin/test_filter.rb +0 -368
- data/test/plugin/test_filter_grep.rb +0 -697
- data/test/plugin/test_filter_parser.rb +0 -731
- data/test/plugin/test_filter_record_transformer.rb +0 -577
- data/test/plugin/test_filter_stdout.rb +0 -207
- data/test/plugin/test_formatter_csv.rb +0 -136
- data/test/plugin/test_formatter_hash.rb +0 -38
- data/test/plugin/test_formatter_json.rb +0 -61
- data/test/plugin/test_formatter_ltsv.rb +0 -70
- data/test/plugin/test_formatter_msgpack.rb +0 -28
- data/test/plugin/test_formatter_out_file.rb +0 -116
- data/test/plugin/test_formatter_single_value.rb +0 -44
- data/test/plugin/test_formatter_tsv.rb +0 -76
- data/test/plugin/test_in_debug_agent.rb +0 -49
- data/test/plugin/test_in_exec.rb +0 -261
- data/test/plugin/test_in_forward.rb +0 -1178
- data/test/plugin/test_in_gc_stat.rb +0 -62
- data/test/plugin/test_in_http.rb +0 -1102
- data/test/plugin/test_in_monitor_agent.rb +0 -922
- data/test/plugin/test_in_object_space.rb +0 -66
- data/test/plugin/test_in_sample.rb +0 -190
- data/test/plugin/test_in_syslog.rb +0 -505
- data/test/plugin/test_in_tail.rb +0 -3288
- data/test/plugin/test_in_tcp.rb +0 -328
- data/test/plugin/test_in_udp.rb +0 -296
- data/test/plugin/test_in_unix.rb +0 -181
- data/test/plugin/test_input.rb +0 -137
- data/test/plugin/test_metadata.rb +0 -89
- data/test/plugin/test_metrics.rb +0 -294
- data/test/plugin/test_metrics_local.rb +0 -96
- data/test/plugin/test_multi_output.rb +0 -204
- data/test/plugin/test_out_copy.rb +0 -308
- data/test/plugin/test_out_exec.rb +0 -312
- data/test/plugin/test_out_exec_filter.rb +0 -606
- data/test/plugin/test_out_file.rb +0 -1038
- data/test/plugin/test_out_forward.rb +0 -1349
- data/test/plugin/test_out_http.rb +0 -429
- data/test/plugin/test_out_null.rb +0 -105
- data/test/plugin/test_out_relabel.rb +0 -28
- data/test/plugin/test_out_roundrobin.rb +0 -146
- data/test/plugin/test_out_secondary_file.rb +0 -458
- data/test/plugin/test_out_stdout.rb +0 -205
- data/test/plugin/test_out_stream.rb +0 -103
- data/test/plugin/test_output.rb +0 -1334
- data/test/plugin/test_output_as_buffered.rb +0 -2024
- data/test/plugin/test_output_as_buffered_backup.rb +0 -363
- data/test/plugin/test_output_as_buffered_compress.rb +0 -179
- data/test/plugin/test_output_as_buffered_overflow.rb +0 -250
- data/test/plugin/test_output_as_buffered_retries.rb +0 -966
- data/test/plugin/test_output_as_buffered_secondary.rb +0 -882
- data/test/plugin/test_output_as_standard.rb +0 -374
- data/test/plugin/test_owned_by.rb +0 -35
- data/test/plugin/test_parser.rb +0 -399
- data/test/plugin/test_parser_apache.rb +0 -42
- data/test/plugin/test_parser_apache2.rb +0 -47
- data/test/plugin/test_parser_apache_error.rb +0 -45
- data/test/plugin/test_parser_csv.rb +0 -200
- data/test/plugin/test_parser_json.rb +0 -138
- data/test/plugin/test_parser_labeled_tsv.rb +0 -160
- data/test/plugin/test_parser_multiline.rb +0 -111
- data/test/plugin/test_parser_nginx.rb +0 -88
- data/test/plugin/test_parser_none.rb +0 -52
- data/test/plugin/test_parser_regexp.rb +0 -284
- data/test/plugin/test_parser_syslog.rb +0 -650
- data/test/plugin/test_parser_tsv.rb +0 -122
- data/test/plugin/test_sd_file.rb +0 -228
- data/test/plugin/test_sd_srv.rb +0 -230
- data/test/plugin/test_storage.rb +0 -167
- data/test/plugin/test_storage_local.rb +0 -335
- data/test/plugin/test_string_util.rb +0 -26
- data/test/plugin_helper/data/cert/cert-key.pem +0 -27
- data/test/plugin_helper/data/cert/cert-with-CRLF.pem +0 -19
- data/test/plugin_helper/data/cert/cert-with-no-newline.pem +0 -19
- data/test/plugin_helper/data/cert/cert.pem +0 -19
- data/test/plugin_helper/data/cert/cert_chains/ca-cert-key.pem +0 -27
- data/test/plugin_helper/data/cert/cert_chains/ca-cert.pem +0 -20
- data/test/plugin_helper/data/cert/cert_chains/cert-key.pem +0 -27
- data/test/plugin_helper/data/cert/cert_chains/cert.pem +0 -40
- data/test/plugin_helper/data/cert/empty.pem +0 -0
- data/test/plugin_helper/data/cert/generate_cert.rb +0 -125
- data/test/plugin_helper/data/cert/with_ca/ca-cert-key-pass.pem +0 -30
- data/test/plugin_helper/data/cert/with_ca/ca-cert-key.pem +0 -27
- data/test/plugin_helper/data/cert/with_ca/ca-cert-pass.pem +0 -20
- data/test/plugin_helper/data/cert/with_ca/ca-cert.pem +0 -20
- data/test/plugin_helper/data/cert/with_ca/cert-key-pass.pem +0 -30
- data/test/plugin_helper/data/cert/with_ca/cert-key.pem +0 -27
- data/test/plugin_helper/data/cert/with_ca/cert-pass.pem +0 -21
- data/test/plugin_helper/data/cert/with_ca/cert.pem +0 -21
- data/test/plugin_helper/data/cert/without_ca/cert-key-pass.pem +0 -30
- data/test/plugin_helper/data/cert/without_ca/cert-key.pem +0 -27
- data/test/plugin_helper/data/cert/without_ca/cert-pass.pem +0 -20
- data/test/plugin_helper/data/cert/without_ca/cert.pem +0 -20
- data/test/plugin_helper/http_server/test_app.rb +0 -65
- data/test/plugin_helper/http_server/test_route.rb +0 -32
- data/test/plugin_helper/service_discovery/test_manager.rb +0 -93
- data/test/plugin_helper/service_discovery/test_round_robin_balancer.rb +0 -21
- data/test/plugin_helper/test_cert_option.rb +0 -25
- data/test/plugin_helper/test_child_process.rb +0 -862
- data/test/plugin_helper/test_compat_parameters.rb +0 -358
- data/test/plugin_helper/test_event_emitter.rb +0 -80
- data/test/plugin_helper/test_event_loop.rb +0 -52
- data/test/plugin_helper/test_extract.rb +0 -194
- data/test/plugin_helper/test_formatter.rb +0 -255
- data/test/plugin_helper/test_http_server_helper.rb +0 -372
- data/test/plugin_helper/test_inject.rb +0 -561
- data/test/plugin_helper/test_metrics.rb +0 -137
- data/test/plugin_helper/test_parser.rb +0 -264
- data/test/plugin_helper/test_record_accessor.rb +0 -238
- data/test/plugin_helper/test_retry_state.rb +0 -1006
- data/test/plugin_helper/test_server.rb +0 -1895
- data/test/plugin_helper/test_service_discovery.rb +0 -165
- data/test/plugin_helper/test_socket.rb +0 -146
- data/test/plugin_helper/test_storage.rb +0 -542
- data/test/plugin_helper/test_thread.rb +0 -164
- data/test/plugin_helper/test_timer.rb +0 -130
- data/test/scripts/exec_script.rb +0 -32
- data/test/scripts/fluent/plugin/formatter1/formatter_test1.rb +0 -7
- data/test/scripts/fluent/plugin/formatter2/formatter_test2.rb +0 -7
- data/test/scripts/fluent/plugin/formatter_known.rb +0 -8
- data/test/scripts/fluent/plugin/out_test.rb +0 -81
- data/test/scripts/fluent/plugin/out_test2.rb +0 -80
- data/test/scripts/fluent/plugin/parser_known.rb +0 -4
- data/test/test_capability.rb +0 -74
- data/test/test_clock.rb +0 -164
- data/test/test_config.rb +0 -369
- data/test/test_configdsl.rb +0 -148
- data/test/test_daemonizer.rb +0 -91
- data/test/test_engine.rb +0 -203
- data/test/test_event.rb +0 -531
- data/test/test_event_router.rb +0 -348
- data/test/test_event_time.rb +0 -199
- data/test/test_file_wrapper.rb +0 -53
- data/test/test_filter.rb +0 -121
- data/test/test_fluent_log_event_router.rb +0 -99
- data/test/test_formatter.rb +0 -369
- data/test/test_input.rb +0 -31
- data/test/test_log.rb +0 -1076
- data/test/test_match.rb +0 -148
- data/test/test_mixin.rb +0 -351
- data/test/test_msgpack_factory.rb +0 -50
- data/test/test_oj_options.rb +0 -55
- data/test/test_output.rb +0 -278
- data/test/test_plugin.rb +0 -251
- data/test/test_plugin_classes.rb +0 -370
- data/test/test_plugin_helper.rb +0 -81
- data/test/test_plugin_id.rb +0 -119
- data/test/test_process.rb +0 -14
- data/test/test_root_agent.rb +0 -951
- data/test/test_static_config_analysis.rb +0 -177
- data/test/test_supervisor.rb +0 -821
- data/test/test_test_drivers.rb +0 -136
- data/test/test_time_formatter.rb +0 -301
- data/test/test_time_parser.rb +0 -362
- data/test/test_tls.rb +0 -65
- data/test/test_unique_id.rb +0 -47
- data/test/test_variable_store.rb +0 -65
@@ -1,871 +0,0 @@
|
|
1
|
-
require_relative '../helper'
|
2
|
-
require 'fluent/plugin/buffer/file_chunk'
|
3
|
-
require 'fluent/plugin/compressable'
|
4
|
-
require 'fluent/unique_id'
|
5
|
-
|
6
|
-
require 'fileutils'
|
7
|
-
require 'msgpack'
|
8
|
-
require 'time'
|
9
|
-
require 'timecop'
|
10
|
-
|
11
|
-
class BufferFileChunkTest < Test::Unit::TestCase
|
12
|
-
include Fluent::Plugin::Compressable
|
13
|
-
|
14
|
-
setup do
|
15
|
-
@klass = Fluent::Plugin::Buffer::FileChunk
|
16
|
-
@chunkdir = File.expand_path('../../tmp/buffer_file_chunk', __FILE__)
|
17
|
-
FileUtils.rm_r @chunkdir rescue nil
|
18
|
-
FileUtils.mkdir_p @chunkdir
|
19
|
-
end
|
20
|
-
teardown do
|
21
|
-
Timecop.return
|
22
|
-
end
|
23
|
-
|
24
|
-
Metadata = Fluent::Plugin::Buffer::Metadata
|
25
|
-
|
26
|
-
def gen_metadata(timekey: nil, tag: nil, variables: nil)
|
27
|
-
Metadata.new(timekey, tag, variables)
|
28
|
-
end
|
29
|
-
|
30
|
-
def read_metadata_file(path)
|
31
|
-
File.open(path, 'rb') do |f|
|
32
|
-
chunk = f.read
|
33
|
-
if chunk.size <= 6 # size of BUFFER_HEADER (2) + size of data(4)
|
34
|
-
return nil
|
35
|
-
end
|
36
|
-
|
37
|
-
data = nil
|
38
|
-
if chunk.slice(0, 2) == Fluent::Plugin::Buffer::FileChunk::BUFFER_HEADER
|
39
|
-
size = chunk.slice(2, 4).unpack('N').first
|
40
|
-
if size
|
41
|
-
data = MessagePack.unpack(chunk.slice(6, size), symbolize_keys: true)
|
42
|
-
end
|
43
|
-
else
|
44
|
-
# old type
|
45
|
-
data = MessagePack.unpack(chunk, symbolize_keys: true)
|
46
|
-
end
|
47
|
-
|
48
|
-
data
|
49
|
-
end
|
50
|
-
end
|
51
|
-
|
52
|
-
def gen_path(path)
|
53
|
-
File.join(@chunkdir, path)
|
54
|
-
end
|
55
|
-
|
56
|
-
def gen_test_chunk_id
|
57
|
-
require 'time'
|
58
|
-
now = Time.parse('2016-04-07 14:31:33 +0900')
|
59
|
-
u1 = ((now.to_i * 1000 * 1000 + now.usec) << 12 | 1725) # 1725 is one of `rand(0xfff)`
|
60
|
-
u3 = 2979763054 # one of rand(0xffffffff)
|
61
|
-
u4 = 438020492 # ditto
|
62
|
-
[u1 >> 32, u1 & 0xffffffff, u3, u4].pack('NNNN')
|
63
|
-
# unique_id.unpack('N*').map{|n| n.to_s(16)}.join => "52fde6425d7406bdb19b936e1a1ba98c"
|
64
|
-
end
|
65
|
-
|
66
|
-
def hex_id(id)
|
67
|
-
id.unpack('N*').map{|n| n.to_s(16)}.join
|
68
|
-
end
|
69
|
-
|
70
|
-
sub_test_case 'classmethods' do
|
71
|
-
data(
|
72
|
-
correct_staged: ['/mydir/mypath/myfile.b00ff.log', :staged],
|
73
|
-
correct_queued: ['/mydir/mypath/myfile.q00ff.log', :queued],
|
74
|
-
incorrect_staged: ['/mydir/mypath/myfile.b00ff.log/unknown', :unknown],
|
75
|
-
incorrect_queued: ['/mydir/mypath/myfile.q00ff.log/unknown', :unknown],
|
76
|
-
output_file: ['/mydir/mypath/myfile.20160716.log', :unknown],
|
77
|
-
)
|
78
|
-
test 'can .assume_chunk_state' do |data|
|
79
|
-
path, expected = data
|
80
|
-
assert_equal expected, @klass.assume_chunk_state(path)
|
81
|
-
end
|
82
|
-
|
83
|
-
test '.generate_stage_chunk_path generates path with staged mark & chunk unique_id' do
|
84
|
-
assert_equal gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), @klass.generate_stage_chunk_path(gen_path("mychunk.*.log"), gen_test_chunk_id)
|
85
|
-
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
|
86
|
-
@klass.generate_stage_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
|
87
|
-
end
|
88
|
-
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
|
89
|
-
@klass.generate_stage_chunk_path(gen_path("mychunk.*"), gen_test_chunk_id)
|
90
|
-
end
|
91
|
-
assert_raise RuntimeError.new("BUG: buffer chunk path on stage MUST have '.*.'") do
|
92
|
-
@klass.generate_stage_chunk_path(gen_path("*.log"), gen_test_chunk_id)
|
93
|
-
end
|
94
|
-
end
|
95
|
-
|
96
|
-
test '.generate_queued_chunk_path generates path with enqueued mark for staged chunk path' do
|
97
|
-
assert_equal(
|
98
|
-
gen_path("mychunk.q52fde6425d7406bdb19b936e1a1ba98c.log"),
|
99
|
-
@klass.generate_queued_chunk_path(gen_path("mychunk.b52fde6425d7406bdb19b936e1a1ba98c.log"), gen_test_chunk_id)
|
100
|
-
)
|
101
|
-
end
|
102
|
-
|
103
|
-
test '.generate_queued_chunk_path generates special path with chunk unique_id for non staged chunk path' do
|
104
|
-
assert_equal(
|
105
|
-
gen_path("mychunk.log.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
|
106
|
-
@klass.generate_queued_chunk_path(gen_path("mychunk.log"), gen_test_chunk_id)
|
107
|
-
)
|
108
|
-
assert_equal(
|
109
|
-
gen_path("mychunk.q55555555555555555555555555555555.log.q52fde6425d7406bdb19b936e1a1ba98c.chunk"),
|
110
|
-
@klass.generate_queued_chunk_path(gen_path("mychunk.q55555555555555555555555555555555.log"), gen_test_chunk_id)
|
111
|
-
)
|
112
|
-
end
|
113
|
-
|
114
|
-
test '.unique_id_from_path recreates unique_id from file path to assume unique_id for v0.12 chunks' do
|
115
|
-
assert_equal gen_test_chunk_id, @klass.unique_id_from_path(gen_path("mychunk.q52fde6425d7406bdb19b936e1a1ba98c.log"))
|
116
|
-
end
|
117
|
-
end
|
118
|
-
|
119
|
-
sub_test_case 'newly created chunk' do
|
120
|
-
setup do
|
121
|
-
@chunk_path = File.join(@chunkdir, 'test.*.log')
|
122
|
-
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :create)
|
123
|
-
end
|
124
|
-
|
125
|
-
def gen_chunk_path(prefix, unique_id)
|
126
|
-
File.join(@chunkdir, "test.#{prefix}#{Fluent::UniqueId.hex(unique_id)}.log")
|
127
|
-
end
|
128
|
-
|
129
|
-
teardown do
|
130
|
-
if @c
|
131
|
-
@c.purge rescue nil
|
132
|
-
end
|
133
|
-
if File.exist? @chunk_path
|
134
|
-
File.unlink @chunk_path
|
135
|
-
end
|
136
|
-
end
|
137
|
-
|
138
|
-
test 'creates new files for chunk and metadata with specified path & permission' do
|
139
|
-
assert{ @c.unique_id.size == 16 }
|
140
|
-
assert_equal gen_chunk_path('b', @c.unique_id), @c.path
|
141
|
-
|
142
|
-
assert File.exist?(gen_chunk_path('b', @c.unique_id))
|
143
|
-
assert{ File.stat(gen_chunk_path('b', @c.unique_id)).mode.to_s(8).end_with?(Fluent::DEFAULT_FILE_PERMISSION.to_s(8)) }
|
144
|
-
|
145
|
-
assert File.exist?(gen_chunk_path('b', @c.unique_id) + '.meta')
|
146
|
-
assert{ File.stat(gen_chunk_path('b', @c.unique_id) + '.meta').mode.to_s(8).end_with?(Fluent::DEFAULT_FILE_PERMISSION.to_s(8)) }
|
147
|
-
|
148
|
-
assert_equal :unstaged, @c.state
|
149
|
-
assert @c.empty?
|
150
|
-
end
|
151
|
-
|
152
|
-
test 'can #append, #commit and #read it' do
|
153
|
-
assert @c.empty?
|
154
|
-
|
155
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
156
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
157
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
158
|
-
@c.append(data)
|
159
|
-
@c.commit
|
160
|
-
|
161
|
-
content = @c.read
|
162
|
-
ds = content.split("\n").select{|d| !d.empty? }
|
163
|
-
|
164
|
-
assert_equal 2, ds.size
|
165
|
-
assert_equal d1, JSON.parse(ds[0])
|
166
|
-
assert_equal d2, JSON.parse(ds[1])
|
167
|
-
|
168
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
169
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
170
|
-
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
|
171
|
-
@c.commit
|
172
|
-
|
173
|
-
content = @c.read
|
174
|
-
ds = content.split("\n").select{|d| !d.empty? }
|
175
|
-
|
176
|
-
assert_equal 4, ds.size
|
177
|
-
assert_equal d1, JSON.parse(ds[0])
|
178
|
-
assert_equal d2, JSON.parse(ds[1])
|
179
|
-
assert_equal d3, JSON.parse(ds[2])
|
180
|
-
assert_equal d4, JSON.parse(ds[3])
|
181
|
-
end
|
182
|
-
|
183
|
-
test 'can #concat, #commit and #read it' do
|
184
|
-
assert @c.empty?
|
185
|
-
|
186
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
187
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
188
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"].join
|
189
|
-
@c.concat(data, 2)
|
190
|
-
@c.commit
|
191
|
-
|
192
|
-
content = @c.read
|
193
|
-
ds = content.split("\n").select{|d| !d.empty? }
|
194
|
-
|
195
|
-
assert_equal 2, ds.size
|
196
|
-
assert_equal d1, JSON.parse(ds[0])
|
197
|
-
assert_equal d2, JSON.parse(ds[1])
|
198
|
-
|
199
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
200
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
201
|
-
@c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
|
202
|
-
@c.commit
|
203
|
-
|
204
|
-
content = @c.read
|
205
|
-
ds = content.split("\n").select{|d| !d.empty? }
|
206
|
-
|
207
|
-
assert_equal 4, ds.size
|
208
|
-
assert_equal d1, JSON.parse(ds[0])
|
209
|
-
assert_equal d2, JSON.parse(ds[1])
|
210
|
-
assert_equal d3, JSON.parse(ds[2])
|
211
|
-
assert_equal d4, JSON.parse(ds[3])
|
212
|
-
end
|
213
|
-
|
214
|
-
test 'has its contents in binary (ascii-8bit)' do
|
215
|
-
data1 = "aaa bbb ccc".force_encoding('utf-8')
|
216
|
-
@c.append([data1])
|
217
|
-
@c.commit
|
218
|
-
assert_equal Encoding::ASCII_8BIT, @c.instance_eval{ @chunk.external_encoding }
|
219
|
-
|
220
|
-
content = @c.read
|
221
|
-
assert_equal Encoding::ASCII_8BIT, content.encoding
|
222
|
-
end
|
223
|
-
|
224
|
-
test 'has #bytesize and #size' do
|
225
|
-
assert @c.empty?
|
226
|
-
|
227
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
228
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
229
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
230
|
-
@c.append(data)
|
231
|
-
|
232
|
-
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
|
233
|
-
assert_equal 2, @c.size
|
234
|
-
|
235
|
-
@c.commit
|
236
|
-
|
237
|
-
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
|
238
|
-
assert_equal 2, @c.size
|
239
|
-
|
240
|
-
first_bytesize = @c.bytesize
|
241
|
-
|
242
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
243
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
244
|
-
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
|
245
|
-
|
246
|
-
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
|
247
|
-
assert_equal 4, @c.size
|
248
|
-
|
249
|
-
@c.commit
|
250
|
-
|
251
|
-
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
|
252
|
-
assert_equal 4, @c.size
|
253
|
-
end
|
254
|
-
|
255
|
-
test 'can #rollback to revert non-committed data' do
|
256
|
-
assert @c.empty?
|
257
|
-
|
258
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
259
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
260
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
261
|
-
@c.append(data)
|
262
|
-
|
263
|
-
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
|
264
|
-
assert_equal 2, @c.size
|
265
|
-
|
266
|
-
@c.rollback
|
267
|
-
|
268
|
-
assert @c.empty?
|
269
|
-
|
270
|
-
assert_equal '', File.open(@c.path, 'rb'){|f| f.read }
|
271
|
-
|
272
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
273
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
274
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
275
|
-
@c.append(data)
|
276
|
-
@c.commit
|
277
|
-
|
278
|
-
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
|
279
|
-
assert_equal 2, @c.size
|
280
|
-
|
281
|
-
first_bytesize = @c.bytesize
|
282
|
-
|
283
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
284
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
285
|
-
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
|
286
|
-
|
287
|
-
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
|
288
|
-
assert_equal 4, @c.size
|
289
|
-
|
290
|
-
@c.rollback
|
291
|
-
|
292
|
-
assert_equal first_bytesize, @c.bytesize
|
293
|
-
assert_equal 2, @c.size
|
294
|
-
|
295
|
-
assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.open(@c.path, 'rb'){|f| f.read }
|
296
|
-
end
|
297
|
-
|
298
|
-
test 'can #rollback to revert non-committed data from #concat' do
|
299
|
-
assert @c.empty?
|
300
|
-
|
301
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
302
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
303
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"].join
|
304
|
-
@c.concat(data, 2)
|
305
|
-
|
306
|
-
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
|
307
|
-
assert_equal 2, @c.size
|
308
|
-
|
309
|
-
@c.rollback
|
310
|
-
|
311
|
-
assert @c.empty?
|
312
|
-
|
313
|
-
assert_equal '', File.open(@c.path, 'rb'){|f| f.read }
|
314
|
-
|
315
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
316
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
317
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
318
|
-
@c.append(data)
|
319
|
-
@c.commit
|
320
|
-
|
321
|
-
assert_equal (d1.to_json + "\n" + d2.to_json + "\n").bytesize, @c.bytesize
|
322
|
-
assert_equal 2, @c.size
|
323
|
-
|
324
|
-
first_bytesize = @c.bytesize
|
325
|
-
|
326
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
327
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
328
|
-
@c.concat([d3.to_json + "\n", d4.to_json + "\n"].join, 2)
|
329
|
-
|
330
|
-
assert_equal first_bytesize + (d3.to_json + "\n" + d4.to_json + "\n").bytesize, @c.bytesize
|
331
|
-
assert_equal 4, @c.size
|
332
|
-
|
333
|
-
@c.rollback
|
334
|
-
|
335
|
-
assert_equal first_bytesize, @c.bytesize
|
336
|
-
assert_equal 2, @c.size
|
337
|
-
|
338
|
-
assert_equal (d1.to_json + "\n" + d2.to_json + "\n"), File.open(@c.path, 'rb'){|f| f.read }
|
339
|
-
end
|
340
|
-
|
341
|
-
test 'can store its data by #close' do
|
342
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
343
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
344
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
345
|
-
@c.append(data)
|
346
|
-
@c.commit
|
347
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
348
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
349
|
-
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
|
350
|
-
@c.commit
|
351
|
-
|
352
|
-
content = @c.read
|
353
|
-
|
354
|
-
unique_id = @c.unique_id
|
355
|
-
size = @c.size
|
356
|
-
created_at = @c.created_at
|
357
|
-
modified_at = @c.modified_at
|
358
|
-
|
359
|
-
@c.close
|
360
|
-
|
361
|
-
assert_equal content, File.open(@c.path, 'rb'){|f| f.read }
|
362
|
-
|
363
|
-
stored_meta = {
|
364
|
-
timekey: nil, tag: nil, variables: nil, seq: 0,
|
365
|
-
id: unique_id,
|
366
|
-
s: size,
|
367
|
-
c: created_at.to_i,
|
368
|
-
m: modified_at.to_i,
|
369
|
-
}
|
370
|
-
|
371
|
-
assert_equal stored_meta, read_metadata_file(@c.path + '.meta')
|
372
|
-
end
|
373
|
-
|
374
|
-
test 'deletes all data by #purge' do
|
375
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
376
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
377
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
378
|
-
@c.append(data)
|
379
|
-
@c.commit
|
380
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
381
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
382
|
-
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
|
383
|
-
@c.commit
|
384
|
-
|
385
|
-
@c.purge
|
386
|
-
|
387
|
-
assert @c.empty?
|
388
|
-
assert_equal 0, @c.bytesize
|
389
|
-
assert_equal 0, @c.size
|
390
|
-
|
391
|
-
assert !File.exist?(@c.path)
|
392
|
-
assert !File.exist?(@c.path + '.meta')
|
393
|
-
end
|
394
|
-
|
395
|
-
test 'can #open its contents as io' do
|
396
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
397
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
398
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
399
|
-
@c.append(data)
|
400
|
-
@c.commit
|
401
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
402
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
403
|
-
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
|
404
|
-
@c.commit
|
405
|
-
|
406
|
-
lines = []
|
407
|
-
@c.open do |io|
|
408
|
-
assert io
|
409
|
-
io.readlines.each do |l|
|
410
|
-
lines << l
|
411
|
-
end
|
412
|
-
end
|
413
|
-
|
414
|
-
assert_equal d1.to_json + "\n", lines[0]
|
415
|
-
assert_equal d2.to_json + "\n", lines[1]
|
416
|
-
assert_equal d3.to_json + "\n", lines[2]
|
417
|
-
assert_equal d4.to_json + "\n", lines[3]
|
418
|
-
end
|
419
|
-
|
420
|
-
test '#write_metadata tries to store metadata on file' do
|
421
|
-
d1 = {"f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
422
|
-
d2 = {"f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
423
|
-
data = [d1.to_json + "\n", d2.to_json + "\n"]
|
424
|
-
@c.append(data)
|
425
|
-
@c.commit
|
426
|
-
|
427
|
-
expected = {
|
428
|
-
timekey: nil, tag: nil, variables: nil, seq: 0,
|
429
|
-
id: @c.unique_id,
|
430
|
-
s: @c.size,
|
431
|
-
c: @c.created_at.to_i,
|
432
|
-
m: @c.modified_at.to_i,
|
433
|
-
}
|
434
|
-
assert_equal expected, read_metadata_file(@c.path + '.meta')
|
435
|
-
|
436
|
-
d3 = {"f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
437
|
-
d4 = {"f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
438
|
-
@c.append([d3.to_json + "\n", d4.to_json + "\n"])
|
439
|
-
# append does write_metadata
|
440
|
-
|
441
|
-
dummy_now = Time.parse('2016-04-07 16:59:59 +0900')
|
442
|
-
Timecop.freeze(dummy_now)
|
443
|
-
@c.write_metadata
|
444
|
-
|
445
|
-
expected = {
|
446
|
-
timekey: nil, tag: nil, variables: nil, seq: 0,
|
447
|
-
id: @c.unique_id,
|
448
|
-
s: @c.size,
|
449
|
-
c: @c.created_at.to_i,
|
450
|
-
m: dummy_now.to_i,
|
451
|
-
}
|
452
|
-
assert_equal expected, read_metadata_file(@c.path + '.meta')
|
453
|
-
|
454
|
-
@c.commit
|
455
|
-
|
456
|
-
expected = {
|
457
|
-
timekey: nil, tag: nil, variables: nil, seq: 0,
|
458
|
-
id: @c.unique_id,
|
459
|
-
s: @c.size,
|
460
|
-
c: @c.created_at.to_i,
|
461
|
-
m: @c.modified_at.to_i,
|
462
|
-
}
|
463
|
-
assert_equal expected, read_metadata_file(@c.path + '.meta')
|
464
|
-
|
465
|
-
content = @c.read
|
466
|
-
|
467
|
-
unique_id = @c.unique_id
|
468
|
-
size = @c.size
|
469
|
-
created_at = @c.created_at
|
470
|
-
modified_at = @c.modified_at
|
471
|
-
|
472
|
-
@c.close
|
473
|
-
|
474
|
-
assert_equal content, File.open(@c.path, 'rb'){|f| f.read }
|
475
|
-
|
476
|
-
stored_meta = {
|
477
|
-
timekey: nil, tag: nil, variables: nil, seq: 0,
|
478
|
-
id: unique_id,
|
479
|
-
s: size,
|
480
|
-
c: created_at.to_i,
|
481
|
-
m: modified_at.to_i,
|
482
|
-
}
|
483
|
-
|
484
|
-
assert_equal stored_meta, read_metadata_file(@c.path + '.meta')
|
485
|
-
end
|
486
|
-
end
|
487
|
-
|
488
|
-
test 'ensure to remove metadata file if #write_metadata raise an error because of disk full' do
|
489
|
-
chunk_path = File.join(@chunkdir, 'test.*.log')
|
490
|
-
stub(Fluent::UniqueId).hex(anything) { 'id' } # to fix chunk id
|
491
|
-
|
492
|
-
any_instance_of(Fluent::Plugin::Buffer::FileChunk) do |klass|
|
493
|
-
stub(klass).write_metadata(anything) do |v|
|
494
|
-
raise 'disk full'
|
495
|
-
end
|
496
|
-
end
|
497
|
-
|
498
|
-
err = assert_raise(Fluent::Plugin::Buffer::BufferOverflowError) do
|
499
|
-
Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, chunk_path, :create)
|
500
|
-
end
|
501
|
-
|
502
|
-
assert_false File.exist?(File.join(@chunkdir, 'test.bid.log.meta'))
|
503
|
-
assert_match(/create buffer metadata/, err.message)
|
504
|
-
end
|
505
|
-
|
506
|
-
sub_test_case 'chunk with file for staged chunk' do
|
507
|
-
setup do
|
508
|
-
@chunk_id = gen_test_chunk_id
|
509
|
-
@chunk_path = File.join(@chunkdir, "test_staged.b#{hex_id(@chunk_id)}.log")
|
510
|
-
@enqueued_path = File.join(@chunkdir, "test_staged.q#{hex_id(@chunk_id)}.log")
|
511
|
-
|
512
|
-
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
513
|
-
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
514
|
-
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
515
|
-
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
516
|
-
@d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
|
517
|
-
File.open(@chunk_path, 'wb') do |f|
|
518
|
-
f.write @d
|
519
|
-
end
|
520
|
-
|
521
|
-
@metadata = {
|
522
|
-
timekey: nil, tag: 'testing', variables: {k: "x"}, seq: 0,
|
523
|
-
id: @chunk_id,
|
524
|
-
s: 4,
|
525
|
-
c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
|
526
|
-
m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
|
527
|
-
}
|
528
|
-
File.open(@chunk_path + '.meta', 'wb') do |f|
|
529
|
-
f.write @metadata.to_msgpack
|
530
|
-
end
|
531
|
-
|
532
|
-
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :staged)
|
533
|
-
end
|
534
|
-
|
535
|
-
teardown do
|
536
|
-
if @c
|
537
|
-
@c.purge rescue nil
|
538
|
-
end
|
539
|
-
[@chunk_path, @chunk_path + '.meta', @enqueued_path, @enqueued_path + '.meta'].each do |path|
|
540
|
-
File.unlink path if File.exist? path
|
541
|
-
end
|
542
|
-
end
|
543
|
-
|
544
|
-
test 'can load as staged chunk from file with metadata' do
|
545
|
-
assert_equal @chunk_path, @c.path
|
546
|
-
assert_equal :staged, @c.state
|
547
|
-
|
548
|
-
assert_nil @c.metadata.timekey
|
549
|
-
assert_equal 'testing', @c.metadata.tag
|
550
|
-
assert_equal({k: "x"}, @c.metadata.variables)
|
551
|
-
|
552
|
-
assert_equal 4, @c.size
|
553
|
-
assert_equal Time.parse('2016-04-07 17:44:00 +0900'), @c.created_at
|
554
|
-
assert_equal Time.parse('2016-04-07 17:44:13 +0900'), @c.modified_at
|
555
|
-
|
556
|
-
content = @c.read
|
557
|
-
assert_equal @d, content
|
558
|
-
end
|
559
|
-
|
560
|
-
test 'can be enqueued' do
|
561
|
-
stage_path = @c.path
|
562
|
-
queue_path = @enqueued_path
|
563
|
-
assert File.exist?(stage_path)
|
564
|
-
assert File.exist?(stage_path + '.meta')
|
565
|
-
assert !File.exist?(queue_path)
|
566
|
-
assert !File.exist?(queue_path + '.meta')
|
567
|
-
|
568
|
-
@c.enqueued!
|
569
|
-
|
570
|
-
assert_equal queue_path, @c.path
|
571
|
-
|
572
|
-
assert !File.exist?(stage_path)
|
573
|
-
assert !File.exist?(stage_path + '.meta')
|
574
|
-
assert File.exist?(queue_path)
|
575
|
-
assert File.exist?(queue_path + '.meta')
|
576
|
-
|
577
|
-
assert_nil @c.metadata.timekey
|
578
|
-
assert_equal 'testing', @c.metadata.tag
|
579
|
-
assert_equal({k: "x"}, @c.metadata.variables)
|
580
|
-
|
581
|
-
assert_equal 4, @c.size
|
582
|
-
assert_equal Time.parse('2016-04-07 17:44:00 +0900'), @c.created_at
|
583
|
-
assert_equal Time.parse('2016-04-07 17:44:13 +0900'), @c.modified_at
|
584
|
-
|
585
|
-
assert_equal @d, File.open(@c.path, 'rb'){|f| f.read }
|
586
|
-
assert_equal @metadata, read_metadata_file(@c.path + '.meta')
|
587
|
-
end
|
588
|
-
|
589
|
-
test '#write_metadata tries to store metadata on file with non-committed data' do
|
590
|
-
d5 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
591
|
-
d5s = d5.to_json + "\n"
|
592
|
-
@c.append([d5s])
|
593
|
-
|
594
|
-
metadata = {
|
595
|
-
timekey: nil, tag: 'testing', variables: {k: "x"}, seq: 0,
|
596
|
-
id: @chunk_id,
|
597
|
-
s: 4,
|
598
|
-
c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
|
599
|
-
m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
|
600
|
-
}
|
601
|
-
assert_equal metadata, read_metadata_file(@c.path + '.meta')
|
602
|
-
|
603
|
-
@c.write_metadata
|
604
|
-
|
605
|
-
metadata = {
|
606
|
-
timekey: nil, tag: 'testing', variables: {k: "x"}, seq: 0,
|
607
|
-
id: @chunk_id,
|
608
|
-
s: 5,
|
609
|
-
c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
|
610
|
-
m: Time.parse('2016-04-07 17:44:38 +0900').to_i,
|
611
|
-
}
|
612
|
-
|
613
|
-
dummy_now = Time.parse('2016-04-07 17:44:38 +0900')
|
614
|
-
Timecop.freeze(dummy_now)
|
615
|
-
@c.write_metadata
|
616
|
-
|
617
|
-
assert_equal metadata, read_metadata_file(@c.path + '.meta')
|
618
|
-
end
|
619
|
-
|
620
|
-
test '#file_rename can rename chunk files even in windows, and call callback with file size' do
|
621
|
-
data = "aaaaaaaaaaaaaaaaaaaaabbbbbbbbbbbbbbbbbbbbbbbbbccccccccccccccccccccccccccccc"
|
622
|
-
|
623
|
-
testing_file1 = gen_path('rename1.test')
|
624
|
-
testing_file2 = gen_path('rename2.test')
|
625
|
-
f = File.open(testing_file1, 'wb', @c.permission)
|
626
|
-
f.set_encoding(Encoding::ASCII_8BIT)
|
627
|
-
f.sync = true
|
628
|
-
f.binmode
|
629
|
-
f.write data
|
630
|
-
pos = f.pos
|
631
|
-
|
632
|
-
assert f.binmode?
|
633
|
-
assert f.sync
|
634
|
-
assert_equal data.bytesize, f.size
|
635
|
-
|
636
|
-
io = nil
|
637
|
-
@c.file_rename(f, testing_file1, testing_file2, ->(new_io){ io = new_io })
|
638
|
-
assert io
|
639
|
-
if Fluent.windows?
|
640
|
-
assert{ f != io }
|
641
|
-
else
|
642
|
-
assert_equal f, io
|
643
|
-
end
|
644
|
-
assert_equal Encoding::ASCII_8BIT, io.external_encoding
|
645
|
-
assert io.sync
|
646
|
-
assert io.binmode?
|
647
|
-
assert_equal data.bytesize, io.size
|
648
|
-
|
649
|
-
assert_equal pos, io.pos
|
650
|
-
|
651
|
-
assert_equal '', io.read
|
652
|
-
|
653
|
-
io.rewind
|
654
|
-
assert_equal data, io.read
|
655
|
-
end
|
656
|
-
end
|
657
|
-
|
658
|
-
sub_test_case 'chunk with file for enqueued chunk' do
|
659
|
-
setup do
|
660
|
-
@chunk_id = gen_test_chunk_id
|
661
|
-
@enqueued_path = File.join(@chunkdir, "test_staged.q#{hex_id(@chunk_id)}.log")
|
662
|
-
|
663
|
-
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
664
|
-
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
665
|
-
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
666
|
-
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
667
|
-
@d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
|
668
|
-
File.open(@enqueued_path, 'wb') do |f|
|
669
|
-
f.write @d
|
670
|
-
end
|
671
|
-
|
672
|
-
@dummy_timekey = Time.parse('2016-04-07 17:40:00 +0900').to_i
|
673
|
-
|
674
|
-
@metadata = {
|
675
|
-
timekey: @dummy_timekey, tag: 'testing', variables: {k: "x"}, seq: 0,
|
676
|
-
id: @chunk_id,
|
677
|
-
s: 4,
|
678
|
-
c: Time.parse('2016-04-07 17:44:00 +0900').to_i,
|
679
|
-
m: Time.parse('2016-04-07 17:44:13 +0900').to_i,
|
680
|
-
}
|
681
|
-
File.open(@enqueued_path + '.meta', 'wb') do |f|
|
682
|
-
f.write @metadata.to_msgpack
|
683
|
-
end
|
684
|
-
|
685
|
-
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @enqueued_path, :queued)
|
686
|
-
end
|
687
|
-
|
688
|
-
teardown do
|
689
|
-
if @c
|
690
|
-
@c.purge rescue nil
|
691
|
-
end
|
692
|
-
[@enqueued_path, @enqueued_path + '.meta'].each do |path|
|
693
|
-
File.unlink path if File.exist? path
|
694
|
-
end
|
695
|
-
end
|
696
|
-
|
697
|
-
test 'can load as queued chunk (read only) with metadata' do
|
698
|
-
assert @c
|
699
|
-
assert_equal @chunk_id, @c.unique_id
|
700
|
-
assert_equal :queued, @c.state
|
701
|
-
assert_equal gen_metadata(timekey: @dummy_timekey, tag: 'testing', variables: {k: "x"}), @c.metadata
|
702
|
-
assert_equal Time.at(@metadata[:c]), @c.created_at
|
703
|
-
assert_equal Time.at(@metadata[:m]), @c.modified_at
|
704
|
-
assert_equal @metadata[:s], @c.size
|
705
|
-
assert_equal @d.bytesize, @c.bytesize
|
706
|
-
assert_equal @d, @c.read
|
707
|
-
|
708
|
-
assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
|
709
|
-
@c.append(["queued chunk is read only"])
|
710
|
-
end
|
711
|
-
assert_raise IOError do
|
712
|
-
@c.instance_eval{ @chunk }.write "chunk io is opened as read only"
|
713
|
-
end
|
714
|
-
end
|
715
|
-
end
|
716
|
-
|
717
|
-
sub_test_case 'chunk with queued chunk file of v0.12, without metadata' do
|
718
|
-
setup do
|
719
|
-
@chunk_id = gen_test_chunk_id
|
720
|
-
@chunk_path = File.join(@chunkdir, "test_v12.2016040811.q#{hex_id(@chunk_id)}.log")
|
721
|
-
|
722
|
-
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
723
|
-
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
724
|
-
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
725
|
-
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
726
|
-
@d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
|
727
|
-
File.open(@chunk_path, 'wb') do |f|
|
728
|
-
f.write @d
|
729
|
-
end
|
730
|
-
|
731
|
-
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :queued)
|
732
|
-
end
|
733
|
-
|
734
|
-
teardown do
|
735
|
-
if @c
|
736
|
-
@c.purge rescue nil
|
737
|
-
end
|
738
|
-
File.unlink @chunk_path if File.exist? @chunk_path
|
739
|
-
end
|
740
|
-
|
741
|
-
test 'can load as queued chunk from file without metadata' do
|
742
|
-
assert @c
|
743
|
-
assert_equal :queued, @c.state
|
744
|
-
assert_equal @chunk_id, @c.unique_id
|
745
|
-
assert_equal gen_metadata, @c.metadata
|
746
|
-
assert_equal @d.bytesize, @c.bytesize
|
747
|
-
assert_equal 0, @c.size
|
748
|
-
assert_equal @d, @c.read
|
749
|
-
|
750
|
-
assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
|
751
|
-
@c.append(["queued chunk is read only"])
|
752
|
-
end
|
753
|
-
assert_raise IOError do
|
754
|
-
@c.instance_eval{ @chunk }.write "chunk io is opened as read only"
|
755
|
-
end
|
756
|
-
end
|
757
|
-
end
|
758
|
-
|
759
|
-
sub_test_case 'chunk with staged chunk file of v0.12, without metadata' do
|
760
|
-
setup do
|
761
|
-
@chunk_id = gen_test_chunk_id
|
762
|
-
@chunk_path = File.join(@chunkdir, "test_v12.2016040811.b#{hex_id(@chunk_id)}.log")
|
763
|
-
|
764
|
-
@d1 = {"k" => "x", "f1" => 'v1', "f2" => 'v2', "f3" => 'v3'}
|
765
|
-
@d2 = {"k" => "x", "f1" => 'vv1', "f2" => 'vv2', "f3" => 'vv3'}
|
766
|
-
@d3 = {"k" => "x", "f1" => 'x', "f2" => 'y', "f3" => 'z'}
|
767
|
-
@d4 = {"k" => "x", "f1" => 'a', "f2" => 'b', "f3" => 'c'}
|
768
|
-
@d = [@d1,@d2,@d3,@d4].map{|d| d.to_json + "\n" }.join
|
769
|
-
File.open(@chunk_path, 'wb') do |f|
|
770
|
-
f.write @d
|
771
|
-
end
|
772
|
-
|
773
|
-
@c = Fluent::Plugin::Buffer::FileChunk.new(gen_metadata, @chunk_path, :staged)
|
774
|
-
end
|
775
|
-
|
776
|
-
teardown do
|
777
|
-
if @c
|
778
|
-
@c.purge rescue nil
|
779
|
-
end
|
780
|
-
File.unlink @chunk_path if File.exist? @chunk_path
|
781
|
-
end
|
782
|
-
|
783
|
-
test 'can load as queued chunk from file without metadata even if it was loaded as staged chunk' do
|
784
|
-
assert @c
|
785
|
-
assert_equal :queued, @c.state
|
786
|
-
assert_equal @chunk_id, @c.unique_id
|
787
|
-
assert_equal gen_metadata, @c.metadata
|
788
|
-
assert_equal @d.bytesize, @c.bytesize
|
789
|
-
assert_equal 0, @c.size
|
790
|
-
assert_equal @d, @c.read
|
791
|
-
|
792
|
-
assert_raise RuntimeError.new("BUG: concatenating to unwritable chunk, now 'queued'") do
|
793
|
-
@c.append(["queued chunk is read only"])
|
794
|
-
end
|
795
|
-
assert_raise IOError do
|
796
|
-
@c.instance_eval{ @chunk }.write "chunk io is opened as read only"
|
797
|
-
end
|
798
|
-
end
|
799
|
-
end
|
800
|
-
|
801
|
-
sub_test_case 'compressed buffer' do
|
802
|
-
setup do
|
803
|
-
@src = 'text data for compressing' * 5
|
804
|
-
@gzipped_src = compress(@src)
|
805
|
-
end
|
806
|
-
|
807
|
-
test '#append with compress option writes compressed data to chunk when compress is gzip' do
|
808
|
-
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
|
809
|
-
c.append([@src, @src], compress: :gzip)
|
810
|
-
c.commit
|
811
|
-
|
812
|
-
# check chunk is compressed
|
813
|
-
assert c.read(compressed: :gzip).size < [@src, @src].join("").size
|
814
|
-
|
815
|
-
assert_equal @src + @src, c.read
|
816
|
-
end
|
817
|
-
|
818
|
-
test '#open passes io object having decompressed data to a block when compress is gzip' do
|
819
|
-
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
|
820
|
-
c.concat(@gzipped_src, @src.size)
|
821
|
-
c.commit
|
822
|
-
|
823
|
-
decomressed_data = c.open do |io|
|
824
|
-
v = io.read
|
825
|
-
assert_equal @src, v
|
826
|
-
v
|
827
|
-
end
|
828
|
-
assert_equal @src, decomressed_data
|
829
|
-
end
|
830
|
-
|
831
|
-
test '#open with compressed option passes io object having decompressed data to a block when compress is gzip' do
|
832
|
-
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
|
833
|
-
c.concat(@gzipped_src, @src.size)
|
834
|
-
c.commit
|
835
|
-
|
836
|
-
comressed_data = c.open(compressed: :gzip) do |io|
|
837
|
-
v = io.read
|
838
|
-
assert_equal @gzipped_src, v
|
839
|
-
v
|
840
|
-
end
|
841
|
-
assert_equal @gzipped_src, comressed_data
|
842
|
-
end
|
843
|
-
|
844
|
-
test '#write_to writes decompressed data when compress is gzip' do
|
845
|
-
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
|
846
|
-
c.concat(@gzipped_src, @src.size)
|
847
|
-
c.commit
|
848
|
-
|
849
|
-
assert_equal @src, c.read
|
850
|
-
assert_equal @gzipped_src, c.read(compressed: :gzip)
|
851
|
-
|
852
|
-
io = StringIO.new
|
853
|
-
c.write_to(io)
|
854
|
-
assert_equal @src, io.string
|
855
|
-
end
|
856
|
-
|
857
|
-
test '#write_to with compressed option writes compressed data when compress is gzip' do
|
858
|
-
c = @klass.new(gen_metadata, File.join(@chunkdir,'test.*.log'), :create, compress: :gzip)
|
859
|
-
c.concat(@gzipped_src, @src.size)
|
860
|
-
c.commit
|
861
|
-
|
862
|
-
assert_equal @src, c.read
|
863
|
-
assert_equal @gzipped_src, c.read(compressed: :gzip)
|
864
|
-
|
865
|
-
io = StringIO.new
|
866
|
-
io.set_encoding(Encoding::ASCII_8BIT)
|
867
|
-
c.write_to(io, compressed: :gzip)
|
868
|
-
assert_equal @gzipped_src, io.string
|
869
|
-
end
|
870
|
-
end
|
871
|
-
end
|