fluentd 0.12.0.pre.1 → 0.12.0.pre.2
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of fluentd might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/.gitignore +1 -1
- data/.travis.yml +1 -0
- data/ChangeLog +21 -0
- data/README.md +10 -2
- data/Rakefile +4 -13
- data/example/v1_literal_example.conf +36 -0
- data/fluentd.gemspec +4 -1
- data/lib/fluent/buffer.rb +73 -46
- data/lib/fluent/command/fluentd.rb +7 -2
- data/lib/fluent/config/basic_parser.rb +5 -0
- data/lib/fluent/config/element.rb +2 -5
- data/lib/fluent/config/literal_parser.rb +26 -7
- data/lib/fluent/config/section.rb +2 -0
- data/lib/fluent/config/v1_parser.rb +9 -2
- data/lib/fluent/formatter.rb +2 -1
- data/lib/fluent/mixin.rb +22 -7
- data/lib/fluent/output.rb +17 -8
- data/lib/fluent/parser.rb +14 -3
- data/lib/fluent/plugin/buf_file.rb +30 -15
- data/lib/fluent/plugin/filter_grep.rb +69 -0
- data/lib/fluent/plugin/filter_record_transformer.rb +183 -0
- data/lib/fluent/plugin/in_exec.rb +6 -0
- data/lib/fluent/plugin/in_forward.rb +34 -4
- data/lib/fluent/plugin/in_http.rb +1 -1
- data/lib/fluent/plugin/out_exec.rb +1 -1
- data/lib/fluent/plugin/out_exec_filter.rb +8 -1
- data/lib/fluent/plugin/out_forward.rb +82 -4
- data/lib/fluent/supervisor.rb +1 -1
- data/lib/fluent/timezone.rb +131 -0
- data/lib/fluent/version.rb +1 -1
- data/test/config/assertions.rb +42 -0
- data/test/config/test_config_parser.rb +385 -0
- data/test/config/test_configurable.rb +530 -0
- data/test/config/test_configure_proxy.rb +99 -0
- data/test/config/test_dsl.rb +237 -0
- data/test/config/test_literal_parser.rb +293 -0
- data/test/config/test_section.rb +112 -0
- data/test/config/test_system_config.rb +49 -0
- data/test/helper.rb +25 -0
- data/test/plugin/test_buf_file.rb +604 -0
- data/test/plugin/test_buf_memory.rb +204 -0
- data/test/plugin/test_filter_grep.rb +124 -0
- data/test/plugin/test_filter_record_transformer.rb +251 -0
- data/test/plugin/test_in_exec.rb +1 -0
- data/test/plugin/test_in_forward.rb +205 -2
- data/test/plugin/test_in_gc_stat.rb +1 -0
- data/test/plugin/test_in_http.rb +58 -2
- data/test/plugin/test_in_object_space.rb +1 -0
- data/test/plugin/test_in_status.rb +1 -0
- data/test/plugin/test_in_stream.rb +1 -1
- data/test/plugin/test_in_syslog.rb +1 -1
- data/test/plugin/test_in_tail.rb +1 -0
- data/test/plugin/test_in_tcp.rb +1 -1
- data/test/plugin/test_in_udp.rb +1 -1
- data/test/plugin/test_out_copy.rb +1 -0
- data/test/plugin/test_out_exec.rb +1 -0
- data/test/plugin/test_out_exec_filter.rb +1 -0
- data/test/plugin/test_out_file.rb +36 -0
- data/test/plugin/test_out_forward.rb +279 -8
- data/test/plugin/test_out_roundrobin.rb +1 -0
- data/test/plugin/test_out_stdout.rb +1 -0
- data/test/plugin/test_out_stream.rb +1 -1
- data/test/test_buffer.rb +530 -0
- data/test/test_config.rb +1 -1
- data/test/test_configdsl.rb +1 -1
- data/test/test_formatter.rb +223 -0
- data/test/test_match.rb +1 -2
- data/test/test_mixin.rb +74 -2
- data/test/test_parser.rb +7 -1
- metadata +88 -35
- data/lib/fluent/plugin/buf_zfile.rb +0 -75
- data/spec/config/config_parser_spec.rb +0 -314
- data/spec/config/configurable_spec.rb +0 -524
- data/spec/config/configure_proxy_spec.rb +0 -96
- data/spec/config/dsl_spec.rb +0 -239
- data/spec/config/helper.rb +0 -49
- data/spec/config/literal_parser_spec.rb +0 -222
- data/spec/config/section_spec.rb +0 -97
- data/spec/config/system_config_spec.rb +0 -49
- data/spec/spec_helper.rb +0 -60
@@ -1,96 +0,0 @@
|
|
1
|
-
require 'fluent/config/configure_proxy'
|
2
|
-
|
3
|
-
describe Fluent::Config::ConfigureProxy do
|
4
|
-
context 'to generate a instance' do
|
5
|
-
describe '#initialize' do
|
6
|
-
it 'has default values' do
|
7
|
-
proxy = Fluent::Config::ConfigureProxy.new('section')
|
8
|
-
expect(proxy.name).to eql(:section)
|
9
|
-
|
10
|
-
proxy = Fluent::Config::ConfigureProxy.new(:section)
|
11
|
-
expect(proxy.name).to eql(:section)
|
12
|
-
expect(proxy.param_name).to eql(:section)
|
13
|
-
expect(proxy.required).to be_nil
|
14
|
-
expect(proxy.required?).to be false
|
15
|
-
expect(proxy.multi).to be_nil
|
16
|
-
expect(proxy.multi?).to be true
|
17
|
-
end
|
18
|
-
|
19
|
-
it 'can specify param_name/required/multi with optional arguments' do
|
20
|
-
proxy = Fluent::Config::ConfigureProxy.new(:section, param_name: 'sections', required: false, multi: true)
|
21
|
-
expect(proxy.name).to eql(:section)
|
22
|
-
expect(proxy.param_name).to eql(:sections)
|
23
|
-
expect(proxy.required).to be false
|
24
|
-
expect(proxy.required?).to be false
|
25
|
-
expect(proxy.multi).to be true
|
26
|
-
expect(proxy.multi?).to be true
|
27
|
-
|
28
|
-
proxy = Fluent::Config::ConfigureProxy.new(:section, param_name: :sections, required: true, multi: false)
|
29
|
-
expect(proxy.name).to eql(:section)
|
30
|
-
expect(proxy.param_name).to eql(:sections)
|
31
|
-
expect(proxy.required).to be true
|
32
|
-
expect(proxy.required?).to be true
|
33
|
-
expect(proxy.multi).to be false
|
34
|
-
expect(proxy.multi?).to be false
|
35
|
-
end
|
36
|
-
end
|
37
|
-
|
38
|
-
describe '#merge' do
|
39
|
-
it 'generate a new instance which values are overwritten by the argument object' do
|
40
|
-
proxy = p1 = Fluent::Config::ConfigureProxy.new(:section)
|
41
|
-
expect(proxy.name).to eql(:section)
|
42
|
-
expect(proxy.param_name).to eql(:section)
|
43
|
-
expect(proxy.required).to be_nil
|
44
|
-
expect(proxy.required?).to be false
|
45
|
-
expect(proxy.multi).to be_nil
|
46
|
-
expect(proxy.multi?).to be true
|
47
|
-
|
48
|
-
p2 = Fluent::Config::ConfigureProxy.new(:section, param_name: :sections, required: true, multi: false)
|
49
|
-
proxy = p1.merge(p2)
|
50
|
-
expect(proxy.name).to eql(:section)
|
51
|
-
expect(proxy.param_name).to eql(:sections)
|
52
|
-
expect(proxy.required).to be true
|
53
|
-
expect(proxy.required?).to be true
|
54
|
-
expect(proxy.multi).to be false
|
55
|
-
expect(proxy.multi?).to be false
|
56
|
-
end
|
57
|
-
|
58
|
-
it 'does not overwrite with argument object without any specifications of required/multi' do
|
59
|
-
p1 = Fluent::Config::ConfigureProxy.new(:section1)
|
60
|
-
p2 = Fluent::Config::ConfigureProxy.new(:section2, param_name: :sections, required: true, multi: false)
|
61
|
-
p3 = Fluent::Config::ConfigureProxy.new(:section3)
|
62
|
-
proxy = p1.merge(p2).merge(p3)
|
63
|
-
expect(proxy.name).to eql(:section3)
|
64
|
-
expect(proxy.param_name).to eql(:section3)
|
65
|
-
expect(proxy.required).to be true
|
66
|
-
expect(proxy.required?).to be true
|
67
|
-
expect(proxy.multi).to be false
|
68
|
-
expect(proxy.multi?).to be false
|
69
|
-
end
|
70
|
-
end
|
71
|
-
|
72
|
-
describe '#config_param / #config_set_default / #config_argument' do
|
73
|
-
it 'does not permit config_set_default for param w/ :default option' do
|
74
|
-
proxy = Fluent::Config::ConfigureProxy.new(:section)
|
75
|
-
proxy.config_param(:name, :string, default: "name1")
|
76
|
-
expect{ proxy.config_set_default(:name, "name2") }.to raise_error(ArgumentError)
|
77
|
-
end
|
78
|
-
|
79
|
-
it 'does not permit default value specification twice' do
|
80
|
-
proxy = Fluent::Config::ConfigureProxy.new(:section)
|
81
|
-
proxy.config_param(:name, :string)
|
82
|
-
proxy.config_set_default(:name, "name1")
|
83
|
-
expect{ proxy.config_set_default(:name, "name2") }.to raise_error(ArgumentError)
|
84
|
-
end
|
85
|
-
|
86
|
-
it 'does not permit default value specification twice, even on config_argument' do
|
87
|
-
proxy = Fluent::Config::ConfigureProxy.new(:section)
|
88
|
-
proxy.config_param(:name, :string)
|
89
|
-
proxy.config_set_default(:name, "name1")
|
90
|
-
|
91
|
-
proxy.config_argument(:name)
|
92
|
-
expect{ proxy.config_argument(:name, default: "name2") }.to raise_error(ArgumentError)
|
93
|
-
end
|
94
|
-
end
|
95
|
-
end
|
96
|
-
end
|
data/spec/config/dsl_spec.rb
DELETED
@@ -1,239 +0,0 @@
|
|
1
|
-
require_relative "./helper"
|
2
|
-
|
3
|
-
require 'fluent/config/element'
|
4
|
-
require "fluent/config/dsl"
|
5
|
-
|
6
|
-
DSL_CONFIG_EXAMPLE = %q[
|
7
|
-
worker {
|
8
|
-
hostname = "myhostname"
|
9
|
-
|
10
|
-
(0..9).each { |i|
|
11
|
-
source {
|
12
|
-
type :tail
|
13
|
-
path "/var/log/httpd/access.part#{i}.log"
|
14
|
-
|
15
|
-
filter ('bar.**') {
|
16
|
-
type :hoge
|
17
|
-
val1 "moge"
|
18
|
-
val2 ["foo", "bar", "baz"]
|
19
|
-
val3 10
|
20
|
-
id :hoge
|
21
|
-
|
22
|
-
subsection {
|
23
|
-
foo "bar"
|
24
|
-
}
|
25
|
-
subsection {
|
26
|
-
foo "baz"
|
27
|
-
}
|
28
|
-
}
|
29
|
-
|
30
|
-
filter ('foo.**') {
|
31
|
-
type "pass"
|
32
|
-
}
|
33
|
-
|
34
|
-
match ('{foo,bar}.**') {
|
35
|
-
type "file"
|
36
|
-
path "/var/log/httpd/access.#{hostname}.#{i}.log"
|
37
|
-
}
|
38
|
-
}
|
39
|
-
}
|
40
|
-
}
|
41
|
-
]
|
42
|
-
|
43
|
-
DSL_CONFIG_EXAMPLE_WITHOUT_WORKER = %q[
|
44
|
-
hostname = "myhostname"
|
45
|
-
|
46
|
-
source {
|
47
|
-
type :tail
|
48
|
-
path "/var/log/httpd/access.part.log"
|
49
|
-
|
50
|
-
element {
|
51
|
-
name "foo"
|
52
|
-
}
|
53
|
-
|
54
|
-
match ('{foo,bar}.**') {
|
55
|
-
type "file"
|
56
|
-
path "/var/log/httpd/access.full.log"
|
57
|
-
}
|
58
|
-
}
|
59
|
-
]
|
60
|
-
|
61
|
-
DSL_CONFIG_RETURNS_NON_ELEMENT = %q[
|
62
|
-
worker {
|
63
|
-
}
|
64
|
-
[]
|
65
|
-
]
|
66
|
-
|
67
|
-
DSL_CONFIG_WRONG_SYNTAX1 = %q[
|
68
|
-
match
|
69
|
-
]
|
70
|
-
DSL_CONFIG_WRONG_SYNTAX2 = %q[
|
71
|
-
match('aa','bb'){
|
72
|
-
type :null
|
73
|
-
}
|
74
|
-
]
|
75
|
-
DSL_CONFIG_WRONG_SYNTAX3 = %q[
|
76
|
-
match('aa','bb')
|
77
|
-
]
|
78
|
-
|
79
|
-
describe Fluent::Config::DSL::Parser do
|
80
|
-
include_context 'config_helper'
|
81
|
-
|
82
|
-
context 'with worker tag on top level' do
|
83
|
-
root = nil
|
84
|
-
|
85
|
-
describe '.parse' do
|
86
|
-
it 'makes root element' do
|
87
|
-
root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE, 'dsl_config.rb')
|
88
|
-
|
89
|
-
expect(root.name).to eql('ROOT')
|
90
|
-
expect(root.arg).to be_empty
|
91
|
-
expect(root.keys.size).to eql(0)
|
92
|
-
end
|
93
|
-
|
94
|
-
it 'makes worker element for worker tag' do
|
95
|
-
expect(root.elements.size).to eql(1)
|
96
|
-
|
97
|
-
worker = root.elements.first
|
98
|
-
|
99
|
-
expect(worker.name).to eql('worker')
|
100
|
-
expect(worker.arg).to be_empty
|
101
|
-
expect(worker.keys.size).to eql(0)
|
102
|
-
expect(worker.elements.size).to eql(10)
|
103
|
-
end
|
104
|
-
|
105
|
-
it 'makes subsections for blocks, with variable substitution' do
|
106
|
-
ele4 = root.elements.first.elements[4]
|
107
|
-
|
108
|
-
expect(ele4.name).to eql('source')
|
109
|
-
expect(ele4.arg).to be_empty
|
110
|
-
expect(ele4.keys.size).to eql(2)
|
111
|
-
expect(ele4['type']).to eql('tail')
|
112
|
-
expect(ele4['path']).to eql("/var/log/httpd/access.part4.log")
|
113
|
-
end
|
114
|
-
|
115
|
-
it 'makes user-defined sections with blocks' do
|
116
|
-
filter0 = root.elements.first.elements[4].elements.first
|
117
|
-
|
118
|
-
expect(filter0.name).to eql('filter')
|
119
|
-
expect(filter0.arg).to eql('bar.**')
|
120
|
-
expect(filter0['type']).to eql('hoge')
|
121
|
-
expect(filter0['val1']).to eql('moge')
|
122
|
-
expect(filter0['val2']).to eql(JSON.dump(['foo', 'bar', 'baz']))
|
123
|
-
expect(filter0['val3']).to eql('10')
|
124
|
-
expect(filter0['id']).to eql('hoge')
|
125
|
-
|
126
|
-
expect(filter0.elements.size).to eql(2)
|
127
|
-
expect(filter0.elements[0].name).to eql('subsection')
|
128
|
-
expect(filter0.elements[0]['foo']).to eql('bar')
|
129
|
-
expect(filter0.elements[1].name).to eql('subsection')
|
130
|
-
expect(filter0.elements[1]['foo']).to eql('baz')
|
131
|
-
end
|
132
|
-
|
133
|
-
it 'makes values with user-assigned variable substitutions' do
|
134
|
-
match0 = root.elements.first.elements[4].elements.last
|
135
|
-
|
136
|
-
expect(match0.name).to eql('match')
|
137
|
-
expect(match0.arg).to eql('{foo,bar}.**')
|
138
|
-
expect(match0['type']).to eql('file')
|
139
|
-
expect(match0['path']).to eql('/var/log/httpd/access.myhostname.4.log')
|
140
|
-
end
|
141
|
-
end
|
142
|
-
end
|
143
|
-
|
144
|
-
context 'without worker tag on top level' do
|
145
|
-
root = nil
|
146
|
-
|
147
|
-
describe '.parse' do
|
148
|
-
it 'makes root element' do
|
149
|
-
root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_WITHOUT_WORKER, 'dsl_config_without_worker.rb')
|
150
|
-
|
151
|
-
expect(root.name).to eql('ROOT')
|
152
|
-
expect(root.arg).to be_empty
|
153
|
-
expect(root.keys.size).to eql(0)
|
154
|
-
end
|
155
|
-
|
156
|
-
it 'does not make worker element implicitly because DSL configuration does not support v10 compat mode' do
|
157
|
-
expect(root.elements.size).to eql(1)
|
158
|
-
expect(root.elements.first.name).to eql('source')
|
159
|
-
expect(root.elements.find{|e| e.name == 'worker'}).to be_falsey
|
160
|
-
end
|
161
|
-
end
|
162
|
-
end
|
163
|
-
|
164
|
-
context 'with configuration that returns non element on top' do
|
165
|
-
describe '.parse' do
|
166
|
-
it 'does not crash' do
|
167
|
-
root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_RETURNS_NON_ELEMENT, 'dsl_config_returns_non_element.rb')
|
168
|
-
end
|
169
|
-
end
|
170
|
-
end
|
171
|
-
|
172
|
-
context 'with configuration with wrong arguments for specific elements' do
|
173
|
-
describe '.parse' do
|
174
|
-
it 'raises ArgumentError correctly' do
|
175
|
-
expect{ Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX1, 'dsl_config_wrong_syntax1') }.to raise_error(ArgumentError)
|
176
|
-
expect{ Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX2, 'dsl_config_wrong_syntax1') }.to raise_error(ArgumentError)
|
177
|
-
expect{ Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX3, 'dsl_config_wrong_syntax1') }.to raise_error(ArgumentError)
|
178
|
-
end
|
179
|
-
end
|
180
|
-
end
|
181
|
-
|
182
|
-
context 'with ruby keyword, that provides ruby Kernel module features' do
|
183
|
-
describe '.parse' do
|
184
|
-
it 'can get result of Kernel.open() by ruby.open()' do
|
185
|
-
uname_string = `uname -a`
|
186
|
-
root = Fluent::Config::DSL::Parser.parse(<<DSL)
|
187
|
-
worker {
|
188
|
-
uname_str = ruby.open('|uname -a'){|out| out.read}
|
189
|
-
source {
|
190
|
-
uname uname_str
|
191
|
-
}
|
192
|
-
}
|
193
|
-
DSL
|
194
|
-
worker = root.elements.first
|
195
|
-
expect(worker.name).to eql('worker')
|
196
|
-
source = worker.elements.first
|
197
|
-
expect(source.name).to eql('source')
|
198
|
-
expect(source.keys.size).to eql(1)
|
199
|
-
expect(source['uname']).to eql(uname_string)
|
200
|
-
end
|
201
|
-
|
202
|
-
it 'accepts ruby keyword with block, which allow to use methods included from ::Kernel' do
|
203
|
-
root = Fluent::Config::DSL::Parser.parse(<<DSL)
|
204
|
-
worker {
|
205
|
-
ruby_version = ruby {
|
206
|
-
require 'erb'
|
207
|
-
ERB.new('<%= RUBY_VERSION %> from erb').result
|
208
|
-
}
|
209
|
-
source {
|
210
|
-
version ruby_version
|
211
|
-
}
|
212
|
-
}
|
213
|
-
DSL
|
214
|
-
worker = root.elements.first
|
215
|
-
expect(worker.name).to eql('worker')
|
216
|
-
source = worker.elements.first
|
217
|
-
expect(source.name).to eql('source')
|
218
|
-
expect(source.keys.size).to eql(1)
|
219
|
-
expect(source['version']).to eql("#{RUBY_VERSION} from erb")
|
220
|
-
end
|
221
|
-
|
222
|
-
it 'raises NoMethodError when configuration DSL elements are written in ruby block' do
|
223
|
-
conf = <<DSL
|
224
|
-
worker {
|
225
|
-
ruby {
|
226
|
-
source {
|
227
|
-
type "tail"
|
228
|
-
}
|
229
|
-
}
|
230
|
-
source {
|
231
|
-
uname uname_str
|
232
|
-
}
|
233
|
-
}
|
234
|
-
DSL
|
235
|
-
expect{ Fluent::Config::DSL::Parser.parse(conf) }.to raise_error(NoMethodError)
|
236
|
-
end
|
237
|
-
end
|
238
|
-
end
|
239
|
-
end
|
data/spec/config/helper.rb
DELETED
@@ -1,49 +0,0 @@
|
|
1
|
-
shared_context 'config_helper' do
|
2
|
-
RSpec::Matchers.define :be_parsed_as do |obj|
|
3
|
-
match do |text|
|
4
|
-
v = parse_text(text)
|
5
|
-
if obj.is_a?(Float)
|
6
|
-
v.is_a?(Float) && (v == obj || (v.nan? && obj.nan?) || (v - obj).abs < 0.000001)
|
7
|
-
else
|
8
|
-
v == obj
|
9
|
-
end
|
10
|
-
end
|
11
|
-
|
12
|
-
failure_message do |text|
|
13
|
-
msg = parse_text(text).inspect rescue 'failed'
|
14
|
-
"expected that #{text.inspect} would be a parsed as #{obj.inspect} but got #{msg}"
|
15
|
-
end
|
16
|
-
end
|
17
|
-
|
18
|
-
RSpec::Matchers.define :be_parsed_as_json do |obj|
|
19
|
-
match do |text|
|
20
|
-
v = JSON.parse(parse_text(text))
|
21
|
-
v == obj
|
22
|
-
end
|
23
|
-
|
24
|
-
failure_message do |text|
|
25
|
-
msg = parse_text(text).inspect rescue 'failed'
|
26
|
-
"expected that #{text.inspect} would be a parsed as #{obj.inspect} but got #{msg}"
|
27
|
-
end
|
28
|
-
end
|
29
|
-
|
30
|
-
RSpec::Matchers.define :be_parse_error do |obj|
|
31
|
-
match do |text|
|
32
|
-
begin
|
33
|
-
parse_text(text)
|
34
|
-
false
|
35
|
-
rescue Fluent::ConfigParseError
|
36
|
-
true
|
37
|
-
end
|
38
|
-
end
|
39
|
-
|
40
|
-
failure_message do |text|
|
41
|
-
begin
|
42
|
-
msg = parse_text(text).inspect
|
43
|
-
rescue
|
44
|
-
msg = $!.inspect
|
45
|
-
end
|
46
|
-
"expected that #{text.inspect} would cause a parse error but got #{msg}"
|
47
|
-
end
|
48
|
-
end
|
49
|
-
end
|
@@ -1,222 +0,0 @@
|
|
1
|
-
require "config/helper"
|
2
|
-
require "fluent/config/error"
|
3
|
-
require "fluent/config/literal_parser"
|
4
|
-
require "fluent/config/v1_parser"
|
5
|
-
|
6
|
-
describe Fluent::Config::LiteralParser do
|
7
|
-
include_context 'config_helper'
|
8
|
-
|
9
|
-
TestLiteralParserContext = Struct.new(:v1, :v2, :v3)
|
10
|
-
|
11
|
-
let(:v1) { :test }
|
12
|
-
let(:v2) { true }
|
13
|
-
let(:v3) { nil }
|
14
|
-
|
15
|
-
let(:eval_context) { TestLiteralParserContext.new(v1, v2, v3) }
|
16
|
-
|
17
|
-
def parse_text(text)
|
18
|
-
basepath = File.expand_path(File.dirname(__FILE__)+'/../../')
|
19
|
-
ss = StringScanner.new(text)
|
20
|
-
parser = Fluent::Config::V1Parser.new(ss, basepath, "(test)", eval_context)
|
21
|
-
parser.parse_literal
|
22
|
-
end
|
23
|
-
|
24
|
-
describe 'boolean parsing' do
|
25
|
-
it { expect('true').to be_parsed_as("true") }
|
26
|
-
it { expect('false').to be_parsed_as("false") }
|
27
|
-
it { expect('trueX').to be_parsed_as("trueX") }
|
28
|
-
it { expect('falseX').to be_parsed_as("falseX") }
|
29
|
-
end
|
30
|
-
|
31
|
-
describe 'integer parsing' do
|
32
|
-
it { expect('0').to be_parsed_as("0") }
|
33
|
-
it { expect('1').to be_parsed_as("1") }
|
34
|
-
it { expect('10').to be_parsed_as("10") }
|
35
|
-
it { expect('-1').to be_parsed_as("-1") }
|
36
|
-
it { expect('-10').to be_parsed_as("-10") }
|
37
|
-
it { expect('0 ').to be_parsed_as("0") }
|
38
|
-
it { expect(' -1 ').to be_parsed_as("-1") }
|
39
|
-
# string
|
40
|
-
it { expect('01').to be_parsed_as("01") }
|
41
|
-
it { expect('00').to be_parsed_as("00") }
|
42
|
-
it { expect('-01').to be_parsed_as("-01") }
|
43
|
-
it { expect('-00').to be_parsed_as("-00") }
|
44
|
-
it { expect('0x61').to be_parsed_as("0x61") }
|
45
|
-
it { expect('0s').to be_parsed_as("0s") }
|
46
|
-
end
|
47
|
-
|
48
|
-
describe 'float parsing' do
|
49
|
-
it { expect('1.1').to be_parsed_as("1.1") }
|
50
|
-
it { expect('0.1').to be_parsed_as("0.1") }
|
51
|
-
it { expect('0.0').to be_parsed_as("0.0") }
|
52
|
-
it { expect('-1.1').to be_parsed_as("-1.1") }
|
53
|
-
it { expect('-0.1').to be_parsed_as("-0.1") }
|
54
|
-
it { expect('1.10').to be_parsed_as("1.10") }
|
55
|
-
# string
|
56
|
-
it { expect('12e8').to be_parsed_as("12e8") }
|
57
|
-
it { expect('12.1e7').to be_parsed_as("12.1e7") }
|
58
|
-
it { expect('-12e8').to be_parsed_as("-12e8") }
|
59
|
-
it { expect('-12.1e7').to be_parsed_as("-12.1e7") }
|
60
|
-
it { expect('.0').to be_parsed_as(".0") }
|
61
|
-
it { expect('.1').to be_parsed_as(".1") }
|
62
|
-
it { expect('0.').to be_parsed_as("0.") }
|
63
|
-
it { expect('1.').to be_parsed_as("1.") }
|
64
|
-
it { expect('.0a').to be_parsed_as(".0a") }
|
65
|
-
it { expect('1.a').to be_parsed_as("1.a") }
|
66
|
-
it { expect('0@').to be_parsed_as("0@") }
|
67
|
-
end
|
68
|
-
|
69
|
-
describe 'float keywords parsing' do
|
70
|
-
it { expect('NaN').to be_parsed_as("NaN") }
|
71
|
-
it { expect('Infinity').to be_parsed_as("Infinity") }
|
72
|
-
it { expect('-Infinity').to be_parsed_as("-Infinity") }
|
73
|
-
it { expect('NaNX').to be_parsed_as("NaNX") }
|
74
|
-
it { expect('InfinityX').to be_parsed_as("InfinityX") }
|
75
|
-
it { expect('-InfinityX').to be_parsed_as("-InfinityX") }
|
76
|
-
end
|
77
|
-
|
78
|
-
describe 'quoted string' do
|
79
|
-
it { expect('""').to be_parsed_as("") }
|
80
|
-
it { expect('"text"').to be_parsed_as("text") }
|
81
|
-
it { expect('"\\""').to be_parsed_as("\"") }
|
82
|
-
it { expect('"\\t"').to be_parsed_as("\t") }
|
83
|
-
it { expect('"\\n"').to be_parsed_as("\n") }
|
84
|
-
it { expect('"\\r\\n"').to be_parsed_as("\r\n") }
|
85
|
-
it { expect('"\\f\\b"').to be_parsed_as("\f\b") }
|
86
|
-
it { expect('"\\.t"').to be_parsed_as(".t") }
|
87
|
-
it { expect('"\\$t"').to be_parsed_as("$t") }
|
88
|
-
it { expect('"\\#t"').to be_parsed_as("#t") }
|
89
|
-
it { expect('"\\z"').to be_parse_error } # unknown escaped character
|
90
|
-
it { expect('"\\0"').to be_parse_error } # unknown escaped character
|
91
|
-
it { expect('"\\1"').to be_parse_error } # unknown escaped character
|
92
|
-
it { expect('"t').to be_parse_error } # non-terminated quoted character
|
93
|
-
it { expect('t"').to be_parsed_as('t"') }
|
94
|
-
it { expect('"."').to be_parsed_as('.') }
|
95
|
-
it { expect('"*"').to be_parsed_as('*') }
|
96
|
-
it { expect('"@"').to be_parsed_as('@') }
|
97
|
-
it { expect('"\\#{test}"').to be_parsed_as("\#{test}") }
|
98
|
-
it { expect('"$"').to be_parsed_as('$') }
|
99
|
-
it { expect('"$t"').to be_parsed_as('$t') }
|
100
|
-
it { expect('"$}"').to be_parsed_as('$}') }
|
101
|
-
end
|
102
|
-
|
103
|
-
describe 'nonquoted string parsing' do
|
104
|
-
# empty
|
105
|
-
it { expect('').to be_parsed_as(nil) }
|
106
|
-
|
107
|
-
it { expect('t').to be_parsed_as('t') }
|
108
|
-
it { expect('T').to be_parsed_as('T') }
|
109
|
-
it { expect('_').to be_parsed_as('_') }
|
110
|
-
it { expect('T1').to be_parsed_as('T1') }
|
111
|
-
it { expect('_2').to be_parsed_as('_2') }
|
112
|
-
it { expect('t0').to be_parsed_as('t0') }
|
113
|
-
it { expect('t@').to be_parsed_as('t@') }
|
114
|
-
it { expect('t-').to be_parsed_as('t-') }
|
115
|
-
it { expect('t.').to be_parsed_as('t.') }
|
116
|
-
it { expect('t+').to be_parsed_as('t+') }
|
117
|
-
it { expect('t/').to be_parsed_as('t/') }
|
118
|
-
it { expect('t=').to be_parsed_as('t=') }
|
119
|
-
it { expect('t,').to be_parsed_as('t,') }
|
120
|
-
it { expect('0t').to be_parsed_as("0t") }
|
121
|
-
it { expect('@1t').to be_parsed_as('@1t') }
|
122
|
-
it { expect('-1t').to be_parsed_as('-1t') }
|
123
|
-
it { expect('.1t').to be_parsed_as('.1t') }
|
124
|
-
it { expect(',1t').to be_parsed_as(',1t') }
|
125
|
-
it { expect('.t').to be_parsed_as('.t') }
|
126
|
-
it { expect('*t').to be_parsed_as('*t') }
|
127
|
-
it { expect('@t').to be_parsed_as('@t') }
|
128
|
-
it { expect('$t').to be_parsed_as('$t') }
|
129
|
-
it { expect('{t').to be_parse_error } # '{' begins map
|
130
|
-
it { expect('t{').to be_parsed_as('t{') }
|
131
|
-
it { expect('}t').to be_parsed_as('}t') }
|
132
|
-
it { expect('[t').to be_parse_error } # '[' begins array
|
133
|
-
it { expect('t[').to be_parsed_as('t[') }
|
134
|
-
it { expect(']t').to be_parsed_as(']t') }
|
135
|
-
it { expect('$t').to be_parsed_as('$t') }
|
136
|
-
it { expect('t:').to be_parsed_as('t:') }
|
137
|
-
it { expect('t;').to be_parsed_as('t;') }
|
138
|
-
it { expect('t?').to be_parsed_as('t?') }
|
139
|
-
it { expect('t^').to be_parsed_as('t^') }
|
140
|
-
it { expect('t`').to be_parsed_as('t`') }
|
141
|
-
it { expect('t~').to be_parsed_as('t~') }
|
142
|
-
it { expect('t|').to be_parsed_as('t|') }
|
143
|
-
it { expect('t>').to be_parsed_as('t>') }
|
144
|
-
it { expect('t<').to be_parsed_as('t<') }
|
145
|
-
it { expect('t(').to be_parsed_as('t(') }
|
146
|
-
it { expect('t[').to be_parsed_as('t[') }
|
147
|
-
end
|
148
|
-
|
149
|
-
describe 'embedded ruby code parsing' do
|
150
|
-
it { expect('"#{v1}"').to be_parsed_as("#{v1}") }
|
151
|
-
it { expect('"#{v2}"').to be_parsed_as("#{v2}") }
|
152
|
-
it { expect('"#{v3}"').to be_parsed_as("#{v3}") }
|
153
|
-
it { expect('"#{1+1}"').to be_parsed_as("2") }
|
154
|
-
it { expect('"#{}"').to be_parsed_as("") }
|
155
|
-
it { expect('"t#{v1}"').to be_parsed_as("t#{v1}") }
|
156
|
-
it { expect('"t#{v1}t"').to be_parsed_as("t#{v1}t") }
|
157
|
-
it { expect('"#{"}"}"').to be_parsed_as("}") }
|
158
|
-
it { expect('"#{#}"').to be_parse_error } # error in embedded ruby code
|
159
|
-
it { expect("\"\#{\n=begin\n}\"").to be_parse_error } # error in embedded ruby code
|
160
|
-
end
|
161
|
-
|
162
|
-
describe 'array parsing' do
|
163
|
-
it { expect('[]').to be_parsed_as_json([]) }
|
164
|
-
it { expect('[1]').to be_parsed_as_json([1]) }
|
165
|
-
it { expect('[1,2]').to be_parsed_as_json([1,2]) }
|
166
|
-
it { expect('[1, 2]').to be_parsed_as_json([1,2]) }
|
167
|
-
it { expect('[ 1 , 2 ]').to be_parsed_as_json([1,2]) }
|
168
|
-
it { expect('[1,2,]').to be_parse_error } # TODO: Need trailing commas support?
|
169
|
-
it { expect("[\n1\n,\n2\n]").to be_parsed_as_json([1,2]) }
|
170
|
-
it { expect('["a"]').to be_parsed_as_json(["a"]) }
|
171
|
-
it { expect('["a","b"]').to be_parsed_as_json(["a","b"]) }
|
172
|
-
it { expect('[ "a" , "b" ]').to be_parsed_as_json(["a","b"]) }
|
173
|
-
it { expect("[\n\"a\"\n,\n\"b\"\n]").to be_parsed_as_json(["a","b"]) }
|
174
|
-
it { expect('["ab","cd"]').to be_parsed_as_json(["ab","cd"]) }
|
175
|
-
json_array_with_js_comment = <<EOA
|
176
|
-
[
|
177
|
-
"a", // this is a
|
178
|
-
"b", // this is b
|
179
|
-
"c" // this is c
|
180
|
-
]
|
181
|
-
EOA
|
182
|
-
it { expect(json_array_with_js_comment).to be_parsed_as_json(["a","b","c"]) }
|
183
|
-
json_array_with_comment = <<EOA
|
184
|
-
[
|
185
|
-
"a", # this is a
|
186
|
-
"b", # this is b
|
187
|
-
"c" # this is c
|
188
|
-
]
|
189
|
-
EOA
|
190
|
-
it { expect(json_array_with_comment).to be_parsed_as_json(["a","b","c"]) }
|
191
|
-
json_array_with_tailing_comma = <<EOA
|
192
|
-
[
|
193
|
-
"a", # this is a
|
194
|
-
"b", # this is b
|
195
|
-
"c", # this is c
|
196
|
-
]
|
197
|
-
EOA
|
198
|
-
it { expect(json_array_with_tailing_comma).to be_parse_error }
|
199
|
-
end
|
200
|
-
|
201
|
-
describe 'map parsing' do
|
202
|
-
it { expect('{}').to be_parsed_as_json({}) }
|
203
|
-
it { expect('{"a":1}').to be_parsed_as_json({"a"=>1}) }
|
204
|
-
it { expect('{"a":1,"b":2}').to be_parsed_as_json({"a"=>1,"b"=>2}) }
|
205
|
-
it { expect('{ "a" : 1 , "b" : 2 }').to be_parsed_as_json({"a"=>1,"b"=>2}) }
|
206
|
-
it { expect('{"a":1,"b":2,}').to be_parse_error } # TODO: Need trailing commas support?
|
207
|
-
it { expect("{\n\"a\"\n:\n1\n,\n\"b\"\n:\n2\n}").to be_parsed_as_json({"a"=>1,"b"=>2}) }
|
208
|
-
it { expect('{"a":"b"}').to be_parsed_as_json({"a"=>"b"}) }
|
209
|
-
it { expect('{"a":"b","c":"d"}').to be_parsed_as_json({"a"=>"b","c"=>"d"}) }
|
210
|
-
it { expect('{ "a" : "b" , "c" : "d" }').to be_parsed_as_json({"a"=>"b","c"=>"d"}) }
|
211
|
-
it { expect("{\n\"a\"\n:\n\"b\"\n,\n\"c\"\n:\n\"d\"\n}").to be_parsed_as_json({"a"=>"b","c"=>"d"}) }
|
212
|
-
json_hash_with_comment = <<EOH
|
213
|
-
{
|
214
|
-
"a": 1, # this is a
|
215
|
-
"b": 2, # this is b
|
216
|
-
"c": 3 # this is c
|
217
|
-
}
|
218
|
-
EOH
|
219
|
-
it { expect(json_hash_with_comment).to be_parsed_as_json({"a"=>1,"b"=>2,"c"=>3}) }
|
220
|
-
end
|
221
|
-
end
|
222
|
-
|