fluentd 0.10.45 → 0.10.46

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (63) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +3 -1
  3. data/ChangeLog +13 -0
  4. data/Rakefile +18 -2
  5. data/fluentd.gemspec +3 -1
  6. data/lib/fluent/command/fluentd.rb +5 -0
  7. data/lib/fluent/config.rb +17 -333
  8. data/lib/fluent/config/basic_parser.rb +108 -0
  9. data/lib/fluent/config/configure_proxy.rb +145 -0
  10. data/lib/fluent/{config_dsl.rb → config/dsl.rb} +5 -1
  11. data/lib/fluent/config/element.rb +82 -0
  12. data/lib/fluent/config/error.rb +7 -0
  13. data/lib/fluent/config/literal_parser.rb +158 -0
  14. data/lib/fluent/config/parser.rb +96 -0
  15. data/lib/fluent/config/section.rb +115 -0
  16. data/lib/fluent/config/types.rb +86 -0
  17. data/lib/fluent/config/v1_parser.rb +156 -0
  18. data/lib/fluent/configurable.rb +108 -0
  19. data/lib/fluent/engine.rb +4 -3
  20. data/lib/fluent/load.rb +0 -1
  21. data/lib/fluent/parser.rb +15 -5
  22. data/lib/fluent/plugin/buf_memory.rb +13 -5
  23. data/lib/fluent/plugin/in_forward.rb +18 -5
  24. data/lib/fluent/plugin/in_http.rb +4 -2
  25. data/lib/fluent/plugin/in_tail.rb +1 -1
  26. data/lib/fluent/plugin/out_forward.rb +33 -29
  27. data/lib/fluent/registry.rb +76 -0
  28. data/lib/fluent/supervisor.rb +2 -1
  29. data/lib/fluent/test/base.rb +3 -1
  30. data/lib/fluent/version.rb +1 -1
  31. data/spec/config/config_parser_spec.rb +176 -0
  32. data/spec/config/configurable_spec.rb +373 -0
  33. data/spec/config/configure_proxy_spec.rb +96 -0
  34. data/spec/config/dsl_spec.rb +239 -0
  35. data/spec/config/helper.rb +50 -0
  36. data/spec/config/literal_parser_spec.rb +190 -0
  37. data/spec/config/section_spec.rb +97 -0
  38. data/spec/spec_helper.rb +60 -0
  39. data/test/plugin/{in_exec.rb → test_in_exec.rb} +0 -0
  40. data/test/plugin/{in_forward.rb → test_in_forward.rb} +5 -0
  41. data/test/plugin/{in_gc_stat.rb → test_in_gc_stat.rb} +0 -0
  42. data/test/plugin/{in_http.rb → test_in_http.rb} +0 -0
  43. data/test/plugin/{in_object_space.rb → test_in_object_space.rb} +0 -0
  44. data/test/plugin/{in_status.rb → test_in_status.rb} +0 -0
  45. data/test/plugin/{in_stream.rb → test_in_stream.rb} +0 -0
  46. data/test/plugin/{in_syslog.rb → test_in_syslog.rb} +0 -0
  47. data/test/plugin/{in_tail.rb → test_in_tail.rb} +0 -0
  48. data/test/plugin/{out_copy.rb → test_out_copy.rb} +0 -0
  49. data/test/plugin/{out_exec.rb → test_out_exec.rb} +0 -0
  50. data/test/plugin/{out_exec_filter.rb → test_out_exec_filter.rb} +0 -0
  51. data/test/plugin/{out_file.rb → test_out_file.rb} +0 -0
  52. data/test/plugin/{out_forward.rb → test_out_forward.rb} +15 -0
  53. data/test/plugin/{out_roundrobin.rb → test_out_roundrobin.rb} +0 -0
  54. data/test/plugin/{out_stdout.rb → test_out_stdout.rb} +0 -0
  55. data/test/plugin/{out_stream.rb → test_out_stream.rb} +0 -0
  56. data/test/scripts/fluent/plugin/parser_known.rb +3 -0
  57. data/test/{config.rb → test_config.rb} +1 -0
  58. data/test/{configdsl.rb → test_configdsl.rb} +1 -1
  59. data/test/{match.rb → test_match.rb} +0 -0
  60. data/test/{mixin.rb → test_mixin.rb} +0 -0
  61. data/test/{output.rb → test_output.rb} +0 -0
  62. data/test/{parser.rb → test_parser.rb} +22 -5
  63. metadata +114 -51
@@ -0,0 +1,96 @@
1
+ require 'fluent/config/configure_proxy'
2
+
3
+ describe Fluent::Config::ConfigureProxy do
4
+ context 'to generate a instance' do
5
+ describe '#initialize' do
6
+ it 'has default values' do
7
+ proxy = Fluent::Config::ConfigureProxy.new('section')
8
+ expect(proxy.name).to eql(:section)
9
+
10
+ proxy = Fluent::Config::ConfigureProxy.new(:section)
11
+ expect(proxy.name).to eql(:section)
12
+ expect(proxy.param_name).to eql(:section)
13
+ expect(proxy.required).to be_nil
14
+ expect(proxy.required?).to be_false
15
+ expect(proxy.multi).to be_nil
16
+ expect(proxy.multi?).to be_true
17
+ end
18
+
19
+ it 'can specify param_name/required/multi with optional arguments' do
20
+ proxy = Fluent::Config::ConfigureProxy.new(:section, param_name: 'sections', required: false, multi: true)
21
+ expect(proxy.name).to eql(:section)
22
+ expect(proxy.param_name).to eql(:sections)
23
+ expect(proxy.required).to be_false
24
+ expect(proxy.required?).to be_false
25
+ expect(proxy.multi).to be_true
26
+ expect(proxy.multi?).to be_true
27
+
28
+ proxy = Fluent::Config::ConfigureProxy.new(:section, param_name: :sections, required: true, multi: false)
29
+ expect(proxy.name).to eql(:section)
30
+ expect(proxy.param_name).to eql(:sections)
31
+ expect(proxy.required).to be_true
32
+ expect(proxy.required?).to be_true
33
+ expect(proxy.multi).to be_false
34
+ expect(proxy.multi?).to be_false
35
+ end
36
+ end
37
+
38
+ describe '#merge' do
39
+ it 'generate a new instance which values are overwritten by the argument object' do
40
+ proxy = p1 = Fluent::Config::ConfigureProxy.new(:section)
41
+ expect(proxy.name).to eql(:section)
42
+ expect(proxy.param_name).to eql(:section)
43
+ expect(proxy.required).to be_nil
44
+ expect(proxy.required?).to be_false
45
+ expect(proxy.multi).to be_nil
46
+ expect(proxy.multi?).to be_true
47
+
48
+ p2 = Fluent::Config::ConfigureProxy.new(:section, param_name: :sections, required: true, multi: false)
49
+ proxy = p1.merge(p2)
50
+ expect(proxy.name).to eql(:section)
51
+ expect(proxy.param_name).to eql(:sections)
52
+ expect(proxy.required).to be_true
53
+ expect(proxy.required?).to be_true
54
+ expect(proxy.multi).to be_false
55
+ expect(proxy.multi?).to be_false
56
+ end
57
+
58
+ it 'does not overwrite with argument object without any specifications of required/multi' do
59
+ p1 = Fluent::Config::ConfigureProxy.new(:section1)
60
+ p2 = Fluent::Config::ConfigureProxy.new(:section2, param_name: :sections, required: true, multi: false)
61
+ p3 = Fluent::Config::ConfigureProxy.new(:section3)
62
+ proxy = p1.merge(p2).merge(p3)
63
+ expect(proxy.name).to eql(:section3)
64
+ expect(proxy.param_name).to eql(:section3)
65
+ expect(proxy.required).to be_true
66
+ expect(proxy.required?).to be_true
67
+ expect(proxy.multi).to be_false
68
+ expect(proxy.multi?).to be_false
69
+ end
70
+ end
71
+
72
+ describe '#config_param / #config_set_default / #config_argument' do
73
+ it 'does not permit config_set_default for param w/ :default option' do
74
+ proxy = Fluent::Config::ConfigureProxy.new(:section)
75
+ proxy.config_param(:name, :string, default: "name1")
76
+ expect{ proxy.config_set_default(:name, "name2") }.to raise_error(ArgumentError)
77
+ end
78
+
79
+ it 'does not permit default value specification twice' do
80
+ proxy = Fluent::Config::ConfigureProxy.new(:section)
81
+ proxy.config_param(:name, :string)
82
+ proxy.config_set_default(:name, "name1")
83
+ expect{ proxy.config_set_default(:name, "name2") }.to raise_error(ArgumentError)
84
+ end
85
+
86
+ it 'does not permit default value specification twice, even on config_argument' do
87
+ proxy = Fluent::Config::ConfigureProxy.new(:section)
88
+ proxy.config_param(:name, :string)
89
+ proxy.config_set_default(:name, "name1")
90
+
91
+ proxy.config_argument(:name)
92
+ expect{ proxy.config_argument(:name, default: "name2") }.to raise_error(ArgumentError)
93
+ end
94
+ end
95
+ end
96
+ end
@@ -0,0 +1,239 @@
1
+ require_relative "./helper"
2
+
3
+ require 'fluent/config/element'
4
+ require "fluent/config/dsl"
5
+
6
+ DSL_CONFIG_EXAMPLE = %q[
7
+ worker {
8
+ hostname = "myhostname"
9
+
10
+ (0..9).each { |i|
11
+ source {
12
+ type :tail
13
+ path "/var/log/httpd/access.part#{i}.log"
14
+
15
+ filter ('bar.**') {
16
+ type :hoge
17
+ val1 "moge"
18
+ val2 ["foo", "bar", "baz"]
19
+ val3 10
20
+ id :hoge
21
+
22
+ subsection {
23
+ foo "bar"
24
+ }
25
+ subsection {
26
+ foo "baz"
27
+ }
28
+ }
29
+
30
+ filter ('foo.**') {
31
+ type "pass"
32
+ }
33
+
34
+ match ('{foo,bar}.**') {
35
+ type "file"
36
+ path "/var/log/httpd/access.#{hostname}.#{i}.log"
37
+ }
38
+ }
39
+ }
40
+ }
41
+ ]
42
+
43
+ DSL_CONFIG_EXAMPLE_WITHOUT_WORKER = %q[
44
+ hostname = "myhostname"
45
+
46
+ source {
47
+ type :tail
48
+ path "/var/log/httpd/access.part.log"
49
+
50
+ element {
51
+ name "foo"
52
+ }
53
+
54
+ match ('{foo,bar}.**') {
55
+ type "file"
56
+ path "/var/log/httpd/access.full.log"
57
+ }
58
+ }
59
+ ]
60
+
61
+ DSL_CONFIG_RETURNS_NON_ELEMENT = %q[
62
+ worker {
63
+ }
64
+ []
65
+ ]
66
+
67
+ DSL_CONFIG_WRONG_SYNTAX1 = %q[
68
+ match
69
+ ]
70
+ DSL_CONFIG_WRONG_SYNTAX2 = %q[
71
+ match('aa','bb'){
72
+ type :null
73
+ }
74
+ ]
75
+ DSL_CONFIG_WRONG_SYNTAX3 = %q[
76
+ match('aa','bb')
77
+ ]
78
+
79
+ describe Fluent::Config::DSL::Parser do
80
+ include_context 'config_helper'
81
+
82
+ context 'with worker tag on top level' do
83
+ root = nil
84
+
85
+ describe '.parse' do
86
+ it 'makes root element' do
87
+ root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE, 'dsl_config.rb')
88
+
89
+ expect(root.name).to eql('ROOT')
90
+ expect(root.arg).to be_empty
91
+ expect(root.keys.size).to eql(0)
92
+ end
93
+
94
+ it 'makes worker element for worker tag' do
95
+ expect(root.elements.size).to eql(1)
96
+
97
+ worker = root.elements.first
98
+
99
+ expect(worker.name).to eql('worker')
100
+ expect(worker.arg).to be_empty
101
+ expect(worker.keys.size).to eql(0)
102
+ expect(worker.elements.size).to eql(10)
103
+ end
104
+
105
+ it 'makes subsections for blocks, with variable substitution' do
106
+ ele4 = root.elements.first.elements[4]
107
+
108
+ expect(ele4.name).to eql('source')
109
+ expect(ele4.arg).to be_empty
110
+ expect(ele4.keys.size).to eql(2)
111
+ expect(ele4['type']).to eql('tail')
112
+ expect(ele4['path']).to eql("/var/log/httpd/access.part4.log")
113
+ end
114
+
115
+ it 'makes user-defined sections with blocks' do
116
+ filter0 = root.elements.first.elements[4].elements.first
117
+
118
+ expect(filter0.name).to eql('filter')
119
+ expect(filter0.arg).to eql('bar.**')
120
+ expect(filter0['type']).to eql('hoge')
121
+ expect(filter0['val1']).to eql('moge')
122
+ expect(filter0['val2']).to eql(JSON.dump(['foo', 'bar', 'baz']))
123
+ expect(filter0['val3']).to eql('10')
124
+ expect(filter0['id']).to eql('hoge')
125
+
126
+ expect(filter0.elements.size).to eql(2)
127
+ expect(filter0.elements[0].name).to eql('subsection')
128
+ expect(filter0.elements[0]['foo']).to eql('bar')
129
+ expect(filter0.elements[1].name).to eql('subsection')
130
+ expect(filter0.elements[1]['foo']).to eql('baz')
131
+ end
132
+
133
+ it 'makes values with user-assigned variable substitutions' do
134
+ match0 = root.elements.first.elements[4].elements.last
135
+
136
+ expect(match0.name).to eql('match')
137
+ expect(match0.arg).to eql('{foo,bar}.**')
138
+ expect(match0['type']).to eql('file')
139
+ expect(match0['path']).to eql('/var/log/httpd/access.myhostname.4.log')
140
+ end
141
+ end
142
+ end
143
+
144
+ context 'without worker tag on top level' do
145
+ root = nil
146
+
147
+ describe '.parse' do
148
+ it 'makes root element' do
149
+ root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_WITHOUT_WORKER, 'dsl_config_without_worker.rb')
150
+
151
+ expect(root.name).to eql('ROOT')
152
+ expect(root.arg).to be_empty
153
+ expect(root.keys.size).to eql(0)
154
+ end
155
+
156
+ it 'does not make worker element implicitly because DSL configuration does not support v10 compat mode' do
157
+ expect(root.elements.size).to eql(1)
158
+ expect(root.elements.first.name).to eql('source')
159
+ expect(root.elements.find{|e| e.name == 'worker'}).to be_false
160
+ end
161
+ end
162
+ end
163
+
164
+ context 'with configuration that returns non element on top' do
165
+ describe '.parse' do
166
+ it 'does not crash' do
167
+ root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_RETURNS_NON_ELEMENT, 'dsl_config_returns_non_element.rb')
168
+ end
169
+ end
170
+ end
171
+
172
+ context 'with configuration with wrong arguments for specific elements' do
173
+ describe '.parse' do
174
+ it 'raises ArgumentError correctly' do
175
+ expect{ Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX1, 'dsl_config_wrong_syntax1') }.to raise_error(ArgumentError)
176
+ expect{ Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX2, 'dsl_config_wrong_syntax1') }.to raise_error(ArgumentError)
177
+ expect{ Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX3, 'dsl_config_wrong_syntax1') }.to raise_error(ArgumentError)
178
+ end
179
+ end
180
+ end
181
+
182
+ context 'with ruby keyword, that provides ruby Kernel module features' do
183
+ describe '.parse' do
184
+ it 'can get result of Kernel.open() by ruby.open()' do
185
+ uname_string = `uname -a`
186
+ root = Fluent::Config::DSL::Parser.parse(<<DSL)
187
+ worker {
188
+ uname_str = ruby.open('|uname -a'){|out| out.read}
189
+ source {
190
+ uname uname_str
191
+ }
192
+ }
193
+ DSL
194
+ worker = root.elements.first
195
+ expect(worker.name).to eql('worker')
196
+ source = worker.elements.first
197
+ expect(source.name).to eql('source')
198
+ expect(source.keys.size).to eql(1)
199
+ expect(source['uname']).to eql(uname_string)
200
+ end
201
+
202
+ it 'accepts ruby keyword with block, which allow to use methods included from ::Kernel' do
203
+ root = Fluent::Config::DSL::Parser.parse(<<DSL)
204
+ worker {
205
+ ruby_version = ruby {
206
+ require 'erb'
207
+ ERB.new('<%= RUBY_VERSION %> from erb').result
208
+ }
209
+ source {
210
+ version ruby_version
211
+ }
212
+ }
213
+ DSL
214
+ worker = root.elements.first
215
+ expect(worker.name).to eql('worker')
216
+ source = worker.elements.first
217
+ expect(source.name).to eql('source')
218
+ expect(source.keys.size).to eql(1)
219
+ expect(source['version']).to eql("#{RUBY_VERSION} from erb")
220
+ end
221
+
222
+ it 'raises NoMethodError when configuration DSL elements are written in ruby block' do
223
+ conf = <<DSL
224
+ worker {
225
+ ruby {
226
+ source {
227
+ type "tail"
228
+ }
229
+ }
230
+ source {
231
+ uname uname_str
232
+ }
233
+ }
234
+ DSL
235
+ expect{ Fluent::Config::DSL::Parser.parse(conf) }.to raise_error(NoMethodError)
236
+ end
237
+ end
238
+ end
239
+ end
@@ -0,0 +1,50 @@
1
+ shared_context 'config_helper' do
2
+ RSpec::Matchers.define :be_parsed_as do |obj|
3
+ match do |text|
4
+ v = parse_text(text)
5
+ if obj.is_a?(Float)
6
+ v.is_a?(Float) && (v == obj || (v.nan? && obj.nan?) || (v - obj).abs < 0.000001)
7
+ else
8
+ v == obj
9
+ end
10
+ end
11
+
12
+ failure_message_for_should do |text|
13
+ msg = parse_text(text).inspect rescue 'failed'
14
+ "expected that #{text.inspect} would be a parsed as #{obj.inspect} but got #{msg}"
15
+ end
16
+ end
17
+
18
+ RSpec::Matchers.define :be_parsed_as_json do |obj|
19
+ match do |text|
20
+ v = JSON.parse(parse_text(text))
21
+ v == obj
22
+ end
23
+
24
+ failure_message_for_should do |text|
25
+ msg = parse_text(text).inspect rescue 'failed'
26
+ "expected that #{text.inspect} would be a parsed as #{obj.inspect} but got #{msg}"
27
+ end
28
+ end
29
+
30
+ RSpec::Matchers.define :be_parse_error do |obj|
31
+ match do |text|
32
+ begin
33
+ parse_text(text)
34
+ false
35
+ rescue Fluent::ConfigParseError
36
+ true
37
+ end
38
+ end
39
+
40
+ failure_message_for_should do |text|
41
+ begin
42
+ msg = parse_text(text).inspect
43
+ rescue
44
+ msg = $!.inspect
45
+ end
46
+ "expected that #{text.inspect} would cause a parse error but got #{msg}"
47
+ end
48
+ end
49
+ end
50
+
@@ -0,0 +1,190 @@
1
+ require "config/helper"
2
+ require "fluent/config/error"
3
+ require "fluent/config/literal_parser"
4
+ require "fluent/config/v1_parser"
5
+
6
+ describe Fluent::Config::LiteralParser do
7
+ include_context 'config_helper'
8
+
9
+ TestLiteralParserContext = Struct.new(:v1, :v2, :v3)
10
+
11
+ let(:v1) { :test }
12
+ let(:v2) { true }
13
+ let(:v3) { nil }
14
+
15
+ let(:eval_context) { TestLiteralParserContext.new(v1, v2, v3) }
16
+
17
+ def parse_text(text)
18
+ basepath = File.expand_path(File.dirname(__FILE__)+'/../../')
19
+ ss = StringScanner.new(text)
20
+ parser = Fluent::Config::V1Parser.new(ss, basepath, "(test)", eval_context)
21
+ parser.parse_literal
22
+ end
23
+
24
+ describe 'boolean parsing' do
25
+ it { 'true'.should be_parsed_as("true") }
26
+ it { 'false'.should be_parsed_as("false") }
27
+ it { 'trueX'.should be_parsed_as("trueX") }
28
+ it { 'falseX'.should be_parsed_as("falseX") }
29
+ end
30
+
31
+ describe 'integer parsing' do
32
+ it { '0'.should be_parsed_as("0") }
33
+ it { '1'.should be_parsed_as("1") }
34
+ it { '10'.should be_parsed_as("10") }
35
+ it { '-1'.should be_parsed_as("-1") }
36
+ it { '-10'.should be_parsed_as("-10") }
37
+ it { '0 '.should be_parsed_as("0") }
38
+ it { ' -1 '.should be_parsed_as("-1") }
39
+ # string
40
+ it { '01'.should be_parsed_as("01") }
41
+ it { '00'.should be_parsed_as("00") }
42
+ it { '-01'.should be_parsed_as("-01") }
43
+ it { '-00'.should be_parsed_as("-00") }
44
+ it { '0x61'.should be_parsed_as("0x61") }
45
+ it { '0s'.should be_parsed_as("0s") }
46
+ end
47
+
48
+ describe 'float parsing' do
49
+ it { '1.1'.should be_parsed_as("1.1") }
50
+ it { '0.1'.should be_parsed_as("0.1") }
51
+ it { '0.0'.should be_parsed_as("0.0") }
52
+ it { '-1.1'.should be_parsed_as("-1.1") }
53
+ it { '-0.1'.should be_parsed_as("-0.1") }
54
+ it { '1.10'.should be_parsed_as("1.10") }
55
+ # string
56
+ it { '12e8'.should be_parsed_as("12e8") }
57
+ it { '12.1e7'.should be_parsed_as("12.1e7") }
58
+ it { '-12e8'.should be_parsed_as("-12e8") }
59
+ it { '-12.1e7'.should be_parsed_as("-12.1e7") }
60
+ it { '.0'.should be_parsed_as(".0") }
61
+ it { '.1'.should be_parsed_as(".1") }
62
+ it { '0.'.should be_parsed_as("0.") }
63
+ it { '1.'.should be_parsed_as("1.") }
64
+ it { '.0a'.should be_parsed_as(".0a") }
65
+ it { '1.a'.should be_parsed_as("1.a") }
66
+ it { '0@'.should be_parsed_as("0@") }
67
+ end
68
+
69
+ describe 'float keywords parsing' do
70
+ it { 'NaN'.should be_parsed_as("NaN") }
71
+ it { 'Infinity'.should be_parsed_as("Infinity") }
72
+ it { '-Infinity'.should be_parsed_as("-Infinity") }
73
+ it { 'NaNX'.should be_parsed_as("NaNX") }
74
+ it { 'InfinityX'.should be_parsed_as("InfinityX") }
75
+ it { '-InfinityX'.should be_parsed_as("-InfinityX") }
76
+ end
77
+
78
+ describe 'quoted string' do
79
+ it { '""'.should be_parsed_as("") }
80
+ it { '"text"'.should be_parsed_as("text") }
81
+ it { '"\\""'.should be_parsed_as("\"") }
82
+ it { '"\\t"'.should be_parsed_as("\t") }
83
+ it { '"\\n"'.should be_parsed_as("\n") }
84
+ it { '"\\r\\n"'.should be_parsed_as("\r\n") }
85
+ it { '"\\f\\b"'.should be_parsed_as("\f\b") }
86
+ it { '"\\.t"'.should be_parsed_as(".t") }
87
+ it { '"\\$t"'.should be_parsed_as("$t") }
88
+ it { '"\\#t"'.should be_parsed_as("#t") }
89
+ it { '"\\z"'.should be_parse_error } # unknown escaped character
90
+ it { '"\\0"'.should be_parse_error } # unknown escaped character
91
+ it { '"\\1"'.should be_parse_error } # unknown escaped character
92
+ it { '"t'.should be_parse_error } # non-terminated quoted character
93
+ it { 't"'.should be_parsed_as('t"') }
94
+ it { '"."'.should be_parsed_as('.') }
95
+ it { '"*"'.should be_parsed_as('*') }
96
+ it { '"@"'.should be_parsed_as('@') }
97
+ it { '"\\#{test}"'.should be_parsed_as("\#{test}") }
98
+ it { '"$"'.should be_parsed_as('$') }
99
+ it { '"$t"'.should be_parsed_as('$t') }
100
+ it { '"$}"'.should be_parsed_as('$}') }
101
+ end
102
+
103
+ describe 'nonquoted string parsing' do
104
+ # empty
105
+ it { ''.should be_parsed_as(nil) }
106
+
107
+ it { 't'.should be_parsed_as('t') }
108
+ it { 'T'.should be_parsed_as('T') }
109
+ it { '_'.should be_parsed_as('_') }
110
+ it { 'T1'.should be_parsed_as('T1') }
111
+ it { '_2'.should be_parsed_as('_2') }
112
+ it { 't0'.should be_parsed_as('t0') }
113
+ it { 't@'.should be_parsed_as('t@') }
114
+ it { 't-'.should be_parsed_as('t-') }
115
+ it { 't.'.should be_parsed_as('t.') }
116
+ it { 't+'.should be_parsed_as('t+') }
117
+ it { 't/'.should be_parsed_as('t/') }
118
+ it { 't='.should be_parsed_as('t=') }
119
+ it { 't,'.should be_parsed_as('t,') }
120
+ it { '0t'.should be_parsed_as("0t") }
121
+ it { '@1t'.should be_parsed_as('@1t') }
122
+ it { '-1t'.should be_parsed_as('-1t') }
123
+ it { '.1t'.should be_parsed_as('.1t') }
124
+ it { ',1t'.should be_parsed_as(',1t') }
125
+ it { '.t'.should be_parsed_as('.t') }
126
+ it { '*t'.should be_parsed_as('*t') }
127
+ it { '@t'.should be_parsed_as('@t') }
128
+ it { '$t'.should be_parsed_as('$t') }
129
+ it { '{t'.should be_parse_error } # '{' begins map
130
+ it { 't{'.should be_parsed_as('t{') }
131
+ it { '}t'.should be_parsed_as('}t') }
132
+ it { '[t'.should be_parse_error } # '[' begins array
133
+ it { 't['.should be_parsed_as('t[') }
134
+ it { ']t'.should be_parsed_as(']t') }
135
+ it { '$t'.should be_parsed_as('$t') }
136
+ it { 't:'.should be_parsed_as('t:') }
137
+ it { 't;'.should be_parsed_as('t;') }
138
+ it { 't?'.should be_parsed_as('t?') }
139
+ it { 't^'.should be_parsed_as('t^') }
140
+ it { 't`'.should be_parsed_as('t`') }
141
+ it { 't~'.should be_parsed_as('t~') }
142
+ it { 't|'.should be_parsed_as('t|') }
143
+ it { 't>'.should be_parsed_as('t>') }
144
+ it { 't<'.should be_parsed_as('t<') }
145
+ it { 't('.should be_parsed_as('t(') }
146
+ it { 't['.should be_parsed_as('t[') }
147
+ end
148
+
149
+ describe 'embedded ruby code parsing' do
150
+ it { '"#{v1}"'.should be_parsed_as("#{v1}") }
151
+ it { '"#{v2}"'.should be_parsed_as("#{v2}") }
152
+ it { '"#{v3}"'.should be_parsed_as("#{v3}") }
153
+ it { '"#{1+1}"'.should be_parsed_as("2") }
154
+ it { '"#{}"'.should be_parsed_as("") }
155
+ it { '"t#{v1}"'.should be_parsed_as("t#{v1}") }
156
+ it { '"t#{v1}t"'.should be_parsed_as("t#{v1}t") }
157
+ it { '"#{"}"}"'.should be_parsed_as("}") }
158
+ it { '"#{#}"'.should be_parse_error } # error in embedded ruby code
159
+ it { "\"\#{\n=begin\n}\"".should be_parse_error } # error in embedded ruby code
160
+ end
161
+
162
+ describe 'array parsing' do
163
+ it { '[]'.should be_parsed_as_json([]) }
164
+ it { '[1]'.should be_parsed_as_json([1]) }
165
+ it { '[1,2]'.should be_parsed_as_json([1,2]) }
166
+ it { '[1, 2]'.should be_parsed_as_json([1,2]) }
167
+ it { '[ 1 , 2 ]'.should be_parsed_as_json([1,2]) }
168
+ it { '[1,2,]'.should be_parse_error } # TODO: Need trailing commas support?
169
+ it { "[\n1\n,\n2\n]".should be_parsed_as_json([1,2]) }
170
+ it { '["a"]'.should be_parsed_as_json(["a"]) }
171
+ it { '["a","b"]'.should be_parsed_as_json(["a","b"]) }
172
+ it { '[ "a" , "b" ]'.should be_parsed_as_json(["a","b"]) }
173
+ it { "[\n\"a\"\n,\n\"b\"\n]".should be_parsed_as_json(["a","b"]) }
174
+ it { '["ab","cd"]'.should be_parsed_as_json(["ab","cd"]) }
175
+ end
176
+
177
+ describe 'map parsing' do
178
+ it { '{}'.should be_parsed_as_json({}) }
179
+ it { '{"a":1}'.should be_parsed_as_json({"a"=>1}) }
180
+ it { '{"a":1,"b":2}'.should be_parsed_as_json({"a"=>1,"b"=>2}) }
181
+ it { '{ "a" : 1 , "b" : 2 }'.should be_parsed_as_json({"a"=>1,"b"=>2}) }
182
+ it { '{"a":1,"b":2,}'.should be_parse_error } # TODO: Need trailing commas support?
183
+ it { "{\n\"a\"\n:\n1\n,\n\"b\"\n:\n2\n}".should be_parsed_as_json({"a"=>1,"b"=>2}) }
184
+ it { '{"a":"b"}'.should be_parsed_as_json({"a"=>"b"}) }
185
+ it { '{"a":"b","c":"d"}'.should be_parsed_as_json({"a"=>"b","c"=>"d"}) }
186
+ it { '{ "a" : "b" , "c" : "d" }'.should be_parsed_as_json({"a"=>"b","c"=>"d"}) }
187
+ it { "{\n\"a\"\n:\n\"b\"\n,\n\"c\"\n:\n\"d\"\n}".should be_parsed_as_json({"a"=>"b","c"=>"d"}) }
188
+ end
189
+ end
190
+