fluentd 0.12.0.pre.1 → 0.12.0.pre.2

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of fluentd might be problematic. Click here for more details.

Files changed (81) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +1 -1
  3. data/.travis.yml +1 -0
  4. data/ChangeLog +21 -0
  5. data/README.md +10 -2
  6. data/Rakefile +4 -13
  7. data/example/v1_literal_example.conf +36 -0
  8. data/fluentd.gemspec +4 -1
  9. data/lib/fluent/buffer.rb +73 -46
  10. data/lib/fluent/command/fluentd.rb +7 -2
  11. data/lib/fluent/config/basic_parser.rb +5 -0
  12. data/lib/fluent/config/element.rb +2 -5
  13. data/lib/fluent/config/literal_parser.rb +26 -7
  14. data/lib/fluent/config/section.rb +2 -0
  15. data/lib/fluent/config/v1_parser.rb +9 -2
  16. data/lib/fluent/formatter.rb +2 -1
  17. data/lib/fluent/mixin.rb +22 -7
  18. data/lib/fluent/output.rb +17 -8
  19. data/lib/fluent/parser.rb +14 -3
  20. data/lib/fluent/plugin/buf_file.rb +30 -15
  21. data/lib/fluent/plugin/filter_grep.rb +69 -0
  22. data/lib/fluent/plugin/filter_record_transformer.rb +183 -0
  23. data/lib/fluent/plugin/in_exec.rb +6 -0
  24. data/lib/fluent/plugin/in_forward.rb +34 -4
  25. data/lib/fluent/plugin/in_http.rb +1 -1
  26. data/lib/fluent/plugin/out_exec.rb +1 -1
  27. data/lib/fluent/plugin/out_exec_filter.rb +8 -1
  28. data/lib/fluent/plugin/out_forward.rb +82 -4
  29. data/lib/fluent/supervisor.rb +1 -1
  30. data/lib/fluent/timezone.rb +131 -0
  31. data/lib/fluent/version.rb +1 -1
  32. data/test/config/assertions.rb +42 -0
  33. data/test/config/test_config_parser.rb +385 -0
  34. data/test/config/test_configurable.rb +530 -0
  35. data/test/config/test_configure_proxy.rb +99 -0
  36. data/test/config/test_dsl.rb +237 -0
  37. data/test/config/test_literal_parser.rb +293 -0
  38. data/test/config/test_section.rb +112 -0
  39. data/test/config/test_system_config.rb +49 -0
  40. data/test/helper.rb +25 -0
  41. data/test/plugin/test_buf_file.rb +604 -0
  42. data/test/plugin/test_buf_memory.rb +204 -0
  43. data/test/plugin/test_filter_grep.rb +124 -0
  44. data/test/plugin/test_filter_record_transformer.rb +251 -0
  45. data/test/plugin/test_in_exec.rb +1 -0
  46. data/test/plugin/test_in_forward.rb +205 -2
  47. data/test/plugin/test_in_gc_stat.rb +1 -0
  48. data/test/plugin/test_in_http.rb +58 -2
  49. data/test/plugin/test_in_object_space.rb +1 -0
  50. data/test/plugin/test_in_status.rb +1 -0
  51. data/test/plugin/test_in_stream.rb +1 -1
  52. data/test/plugin/test_in_syslog.rb +1 -1
  53. data/test/plugin/test_in_tail.rb +1 -0
  54. data/test/plugin/test_in_tcp.rb +1 -1
  55. data/test/plugin/test_in_udp.rb +1 -1
  56. data/test/plugin/test_out_copy.rb +1 -0
  57. data/test/plugin/test_out_exec.rb +1 -0
  58. data/test/plugin/test_out_exec_filter.rb +1 -0
  59. data/test/plugin/test_out_file.rb +36 -0
  60. data/test/plugin/test_out_forward.rb +279 -8
  61. data/test/plugin/test_out_roundrobin.rb +1 -0
  62. data/test/plugin/test_out_stdout.rb +1 -0
  63. data/test/plugin/test_out_stream.rb +1 -1
  64. data/test/test_buffer.rb +530 -0
  65. data/test/test_config.rb +1 -1
  66. data/test/test_configdsl.rb +1 -1
  67. data/test/test_formatter.rb +223 -0
  68. data/test/test_match.rb +1 -2
  69. data/test/test_mixin.rb +74 -2
  70. data/test/test_parser.rb +7 -1
  71. metadata +88 -35
  72. data/lib/fluent/plugin/buf_zfile.rb +0 -75
  73. data/spec/config/config_parser_spec.rb +0 -314
  74. data/spec/config/configurable_spec.rb +0 -524
  75. data/spec/config/configure_proxy_spec.rb +0 -96
  76. data/spec/config/dsl_spec.rb +0 -239
  77. data/spec/config/helper.rb +0 -49
  78. data/spec/config/literal_parser_spec.rb +0 -222
  79. data/spec/config/section_spec.rb +0 -97
  80. data/spec/config/system_config_spec.rb +0 -49
  81. data/spec/spec_helper.rb +0 -60
@@ -0,0 +1,99 @@
1
+ require 'helper'
2
+ require 'fluent/config/configure_proxy'
3
+
4
+ module Fluent::Config
5
+ class TestConfigureProxy < ::Test::Unit::TestCase
6
+ sub_test_case 'to generate a instance' do
7
+ sub_test_case '#initialize' do
8
+ test 'has default values' do
9
+ proxy = Fluent::Config::ConfigureProxy.new('section')
10
+ assert_equal(:section, proxy.name)
11
+
12
+ proxy = Fluent::Config::ConfigureProxy.new(:section)
13
+ assert_equal(:section, proxy.name)
14
+ assert_equal(:section, proxy.param_name)
15
+ assert_nil(proxy.required)
16
+ assert_false(proxy.required?)
17
+ assert_nil(proxy.multi)
18
+ assert_true(proxy.multi?)
19
+ end
20
+
21
+ test 'can specify param_name/required/multi with optional arguments' do
22
+ proxy = Fluent::Config::ConfigureProxy.new(:section, param_name: 'sections', required: false, multi: true)
23
+ assert_equal(:section, proxy.name)
24
+ assert_equal(:sections, proxy.param_name)
25
+ assert_false(proxy.required)
26
+ assert_false(proxy.required?)
27
+ assert_true(proxy.multi)
28
+ assert_true(proxy.multi?)
29
+
30
+ proxy = Fluent::Config::ConfigureProxy.new(:section, param_name: :sections, required: true, multi: false)
31
+ assert_equal(:section, proxy.name)
32
+ assert_equal(:sections, proxy.param_name)
33
+ assert_true(proxy.required)
34
+ assert_true(proxy.required?)
35
+ assert_false(proxy.multi)
36
+ assert_false(proxy.multi?)
37
+ end
38
+ end
39
+
40
+ sub_test_case '#merge' do
41
+ test 'generate a new instance which values are overwritten by the argument object' do
42
+ proxy = p1 = Fluent::Config::ConfigureProxy.new(:section)
43
+ assert_equal(:section, proxy.name)
44
+ assert_equal(:section, proxy.param_name)
45
+ assert_nil(proxy.required)
46
+ assert_false(proxy.required?)
47
+ assert_nil(proxy.multi)
48
+ assert_true(proxy.multi?)
49
+
50
+ p2 = Fluent::Config::ConfigureProxy.new(:section, param_name: :sections, required: true, multi: false)
51
+ proxy = p1.merge(p2)
52
+ assert_equal(:section, proxy.name)
53
+ assert_equal(:sections, proxy.param_name)
54
+ assert_true(proxy.required)
55
+ assert_true(proxy.required?)
56
+ assert_false(proxy.multi)
57
+ assert_false(proxy.multi?)
58
+ end
59
+
60
+ test 'does not overwrite with argument object without any specifications of required/multi' do
61
+ p1 = Fluent::Config::ConfigureProxy.new(:section1)
62
+ p2 = Fluent::Config::ConfigureProxy.new(:section2, param_name: :sections, required: true, multi: false)
63
+ p3 = Fluent::Config::ConfigureProxy.new(:section3)
64
+ proxy = p1.merge(p2).merge(p3)
65
+ assert_equal(:section3, proxy.name)
66
+ assert_equal(:section3, proxy.param_name)
67
+ assert_true(proxy.required)
68
+ assert_true(proxy.required?)
69
+ assert_false(proxy.multi)
70
+ assert_false(proxy.multi?)
71
+ end
72
+ end
73
+
74
+ sub_test_case '#config_param / #config_set_default / #config_argument' do
75
+ test 'does not permit config_set_default for param w/ :default option' do
76
+ proxy = Fluent::Config::ConfigureProxy.new(:section)
77
+ proxy.config_param(:name, :string, default: "name1")
78
+ assert_raise(ArgumentError) { proxy.config_set_default(:name, "name2") }
79
+ end
80
+
81
+ test 'does not permit default value specification twice' do
82
+ proxy = Fluent::Config::ConfigureProxy.new(:section)
83
+ proxy.config_param(:name, :string)
84
+ proxy.config_set_default(:name, "name1")
85
+ assert_raise(ArgumentError) { proxy.config_set_default(:name, "name2") }
86
+ end
87
+
88
+ test 'does not permit default value specification twice, even on config_argument' do
89
+ proxy = Fluent::Config::ConfigureProxy.new(:section)
90
+ proxy.config_param(:name, :string)
91
+ proxy.config_set_default(:name, "name1")
92
+
93
+ proxy.config_argument(:name)
94
+ assert_raise(ArgumentError) { proxy.config_argument(:name, default: "name2") }
95
+ end
96
+ end
97
+ end
98
+ end
99
+ end
@@ -0,0 +1,237 @@
1
+ require 'helper'
2
+ require 'fluent/config/element'
3
+ require "fluent/config/dsl"
4
+
5
+ DSL_CONFIG_EXAMPLE = %q[
6
+ worker {
7
+ hostname = "myhostname"
8
+
9
+ (0..9).each { |i|
10
+ source {
11
+ type :tail
12
+ path "/var/log/httpd/access.part#{i}.log"
13
+
14
+ filter ('bar.**') {
15
+ type :hoge
16
+ val1 "moge"
17
+ val2 ["foo", "bar", "baz"]
18
+ val3 10
19
+ id :hoge
20
+
21
+ subsection {
22
+ foo "bar"
23
+ }
24
+ subsection {
25
+ foo "baz"
26
+ }
27
+ }
28
+
29
+ filter ('foo.**') {
30
+ type "pass"
31
+ }
32
+
33
+ match ('{foo,bar}.**') {
34
+ type "file"
35
+ path "/var/log/httpd/access.#{hostname}.#{i}.log"
36
+ }
37
+ }
38
+ }
39
+ }
40
+ ]
41
+
42
+ DSL_CONFIG_EXAMPLE_WITHOUT_WORKER = %q[
43
+ hostname = "myhostname"
44
+
45
+ source {
46
+ type :tail
47
+ path "/var/log/httpd/access.part.log"
48
+
49
+ element {
50
+ name "foo"
51
+ }
52
+
53
+ match ('{foo,bar}.**') {
54
+ type "file"
55
+ path "/var/log/httpd/access.full.log"
56
+ }
57
+ }
58
+ ]
59
+
60
+ DSL_CONFIG_RETURNS_NON_ELEMENT = %q[
61
+ worker {
62
+ }
63
+ []
64
+ ]
65
+
66
+ DSL_CONFIG_WRONG_SYNTAX1 = %q[
67
+ match
68
+ ]
69
+ DSL_CONFIG_WRONG_SYNTAX2 = %q[
70
+ match('aa','bb'){
71
+ type :null
72
+ }
73
+ ]
74
+ DSL_CONFIG_WRONG_SYNTAX3 = %q[
75
+ match('aa','bb')
76
+ ]
77
+
78
+ module Fluent::Config
79
+ class TestDSLParser < ::Test::Unit::TestCase
80
+ sub_test_case 'with worker tag on top level' do
81
+ def setup
82
+ @root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE, 'dsl_config.rb')
83
+ end
84
+
85
+ sub_test_case '.parse' do
86
+ test 'makes root element' do
87
+ assert_equal('ROOT', @root.name)
88
+ assert_predicate(@root.arg, :empty?)
89
+ assert_equal(0, @root.keys.size)
90
+ end
91
+
92
+ test 'makes worker element for worker tag' do
93
+ assert_equal(1, @root.elements.size)
94
+
95
+ worker = @root.elements.first
96
+ assert_equal('worker', worker.name)
97
+ assert_predicate(worker.arg, :empty?)
98
+ assert_equal(0, worker.keys.size)
99
+ assert_equal(10, worker.elements.size)
100
+ end
101
+
102
+ test 'makes subsections for blocks, with variable substitution' do
103
+ ele4 = @root.elements.first.elements[4]
104
+
105
+ assert_equal('source', ele4.name)
106
+ assert_predicate(ele4.arg, :empty?)
107
+ assert_equal(2, ele4.keys.size)
108
+ assert_equal('tail', ele4['type'])
109
+ assert_equal("/var/log/httpd/access.part4.log", ele4['path'])
110
+ end
111
+
112
+ test 'makes user-defined sections with blocks' do
113
+ filter0 = @root.elements.first.elements[4].elements.first
114
+
115
+ assert_equal('filter', filter0.name)
116
+ assert_equal('bar.**', filter0.arg)
117
+ assert_equal('hoge', filter0['type'])
118
+ assert_equal('moge', filter0['val1'])
119
+ assert_equal(JSON.dump(['foo', 'bar', 'baz']), filter0['val2'])
120
+ assert_equal('10', filter0['val3'])
121
+ assert_equal('hoge', filter0['id'])
122
+
123
+ assert_equal(2, filter0.elements.size)
124
+ assert_equal('subsection', filter0.elements[0].name)
125
+ assert_equal('bar', filter0.elements[0]['foo'])
126
+ assert_equal('subsection', filter0.elements[1].name)
127
+ assert_equal('baz', filter0.elements[1]['foo'])
128
+ end
129
+
130
+ test 'makes values with user-assigned variable substitutions' do
131
+ match0 = @root.elements.first.elements[4].elements.last
132
+
133
+ assert_equal('match', match0.name)
134
+ assert_equal('{foo,bar}.**', match0.arg)
135
+ assert_equal('file', match0['type'])
136
+ assert_equal('/var/log/httpd/access.myhostname.4.log', match0['path'])
137
+ end
138
+ end
139
+ end
140
+
141
+ sub_test_case 'without worker tag on top level' do
142
+ def setup
143
+ @root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_EXAMPLE_WITHOUT_WORKER, 'dsl_config_without_worker.rb')
144
+ end
145
+
146
+ sub_test_case '.parse' do
147
+ test 'makes root element' do
148
+ assert_equal('ROOT', @root.name)
149
+ assert_predicate(@root.arg, :empty?)
150
+ assert_equal(0, @root.keys.size)
151
+ end
152
+
153
+ test 'does not make worker element implicitly because DSL configuration does not support v10 compat mode' do
154
+ assert_equal(1, @root.elements.size)
155
+ assert_equal('source', @root.elements.first.name)
156
+ refute(@root.elements.find { |e| e.name == 'worker' })
157
+ end
158
+ end
159
+ end
160
+
161
+ sub_test_case 'with configuration that returns non element on top' do
162
+ sub_test_case '.parse' do
163
+ test 'does not crash' do
164
+ root = Fluent::Config::DSL::Parser.parse(DSL_CONFIG_RETURNS_NON_ELEMENT, 'dsl_config_returns_non_element.rb')
165
+ end
166
+ end
167
+ end
168
+
169
+ sub_test_case 'with configuration with wrong arguments for specific elements' do
170
+ sub_test_case '.parse' do
171
+ test 'raises ArgumentError correctly' do
172
+ assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX1, 'dsl_config_wrong_syntax1') }
173
+ assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX2, 'dsl_config_wrong_syntax1') }
174
+ assert_raise(ArgumentError) { Fluent::Config::DSL::Parser.parse(DSL_CONFIG_WRONG_SYNTAX3, 'dsl_config_wrong_syntax1') }
175
+ end
176
+ end
177
+ end
178
+
179
+ sub_test_case 'with ruby keyword, that provides ruby Kernel module features' do
180
+ sub_test_case '.parse' do
181
+ test 'can get result of Kernel.open() by ruby.open()' do
182
+ uname_string = `uname -a`
183
+ root = Fluent::Config::DSL::Parser.parse(<<DSL)
184
+ worker {
185
+ uname_str = ruby.open('|uname -a'){|out| out.read}
186
+ source {
187
+ uname uname_str
188
+ }
189
+ }
190
+ DSL
191
+ worker = root.elements.first
192
+ assert_equal('worker', worker.name)
193
+ source = worker.elements.first
194
+ assert_equal('source', source.name)
195
+ assert_equal(1, source.keys.size)
196
+ assert_equal(uname_string, source['uname'])
197
+ end
198
+
199
+ test 'accepts ruby keyword with block, which allow to use methods included from ::Kernel' do
200
+ root = Fluent::Config::DSL::Parser.parse(<<DSL)
201
+ worker {
202
+ ruby_version = ruby {
203
+ require 'erb'
204
+ ERB.new('<%= RUBY_VERSION %> from erb').result
205
+ }
206
+ source {
207
+ version ruby_version
208
+ }
209
+ }
210
+ DSL
211
+ worker = root.elements.first
212
+ assert_equal('worker', worker.name)
213
+ source = worker.elements.first
214
+ assert_equal('source', source.name)
215
+ assert_equal(1, source.keys.size)
216
+ assert_equal("#{RUBY_VERSION} from erb", source['version'])
217
+ end
218
+
219
+ test 'raises NoMethodError when configuration DSL elements are written in ruby block' do
220
+ conf = <<DSL
221
+ worker {
222
+ ruby {
223
+ source {
224
+ type "tail"
225
+ }
226
+ }
227
+ source {
228
+ uname uname_str
229
+ }
230
+ }
231
+ DSL
232
+ assert_raise(NoMethodError) { Fluent::Config::DSL::Parser.parse(conf) }
233
+ end
234
+ end
235
+ end
236
+ end
237
+ end
@@ -0,0 +1,293 @@
1
+ require "helper"
2
+ require 'config/assertions'
3
+ require "fluent/config/error"
4
+ require "fluent/config/literal_parser"
5
+ require "fluent/config/v1_parser"
6
+ require 'json'
7
+
8
+ module Fluent::Config
9
+ class TestLiteralParser < ::Test::Unit::TestCase
10
+ def parse_text(text)
11
+ basepath = File.expand_path(File.dirname(__FILE__)+'/../../')
12
+ ss = StringScanner.new(text)
13
+ parser = Fluent::Config::V1Parser.new(ss, basepath, "(test)", eval_context)
14
+ parser.parse_literal
15
+ end
16
+
17
+ TestLiteralParserContext = Struct.new(:v1, :v2, :v3)
18
+
19
+ def v1
20
+ :test
21
+ end
22
+
23
+ def v2
24
+ true
25
+ end
26
+
27
+ def v3
28
+ nil
29
+ end
30
+
31
+ def eval_context
32
+ @eval_context ||= TestLiteralParserContext.new(v1, v2, v3)
33
+ end
34
+
35
+ sub_test_case 'boolean parsing' do
36
+ def test_true
37
+ assert_text_parsed_as('true', "true")
38
+ end
39
+ def test_false
40
+ assert_text_parsed_as('false', "false")
41
+ end
42
+ def test_trueX
43
+ assert_text_parsed_as('trueX', "trueX")
44
+ end
45
+ def test_falseX
46
+ assert_text_parsed_as('falseX', "falseX")
47
+ end
48
+ end
49
+
50
+ sub_test_case 'integer parsing' do
51
+ test('0') { assert_text_parsed_as('0', "0") }
52
+ test('1') { assert_text_parsed_as('1', "1") }
53
+ test('10') { assert_text_parsed_as('10', "10") }
54
+ test('-1') { assert_text_parsed_as('-1', "-1") }
55
+ test('-10') { assert_text_parsed_as('-10', "-10") }
56
+ test('0 ') { assert_text_parsed_as('0', "0 ") }
57
+ test(' -1 ') { assert_text_parsed_as("-1", ' -1 ') }
58
+ # string
59
+ test('01') { assert_text_parsed_as('01', "01") }
60
+ test('00') { assert_text_parsed_as('00', "00") }
61
+ test('-01') { assert_text_parsed_as('-01', "-01") }
62
+ test('-00') { assert_text_parsed_as('-00', "-00") }
63
+ test('0x61') { assert_text_parsed_as('0x61', "0x61") }
64
+ test('0s') { assert_text_parsed_as('0s', "0s") }
65
+ end
66
+
67
+ sub_test_case 'float parsing' do
68
+ test('1.1') { assert_text_parsed_as('1.1', "1.1") }
69
+ test('0.1') { assert_text_parsed_as('0.1', "0.1") }
70
+ test('0.0') { assert_text_parsed_as('0.0', "0.0") }
71
+ test('-1.1') { assert_text_parsed_as('-1.1', "-1.1") }
72
+ test('-0.1') { assert_text_parsed_as('-0.1', "-0.1") }
73
+ test('1.10') { assert_text_parsed_as('1.10', "1.10") }
74
+ # string
75
+ test('12e8') { assert_text_parsed_as('12e8', "12e8") }
76
+ test('12.1e7') { assert_text_parsed_as('12.1e7', "12.1e7") }
77
+ test('-12e8') { assert_text_parsed_as('-12e8', "-12e8") }
78
+ test('-12.1e7') { assert_text_parsed_as('-12.1e7', "-12.1e7") }
79
+ test('.0') { assert_text_parsed_as('.0', ".0") }
80
+ test('.1') { assert_text_parsed_as('.1', ".1") }
81
+ test('0.') { assert_text_parsed_as('0.', "0.") }
82
+ test('1.') { assert_text_parsed_as('1.', "1.") }
83
+ test('.0a') { assert_text_parsed_as('.0a', ".0a") }
84
+ test('1.a') { assert_text_parsed_as('1.a', "1.a") }
85
+ test('0@') { assert_text_parsed_as('0@', "0@") }
86
+ end
87
+
88
+ sub_test_case 'float keywords parsing' do
89
+ test('NaN') { assert_text_parsed_as('NaN', "NaN") }
90
+ test('Infinity') { assert_text_parsed_as('Infinity', "Infinity") }
91
+ test('-Infinity') { assert_text_parsed_as('-Infinity', "-Infinity") }
92
+ test('NaNX') { assert_text_parsed_as('NaNX', "NaNX") }
93
+ test('InfinityX') { assert_text_parsed_as('InfinityX', "InfinityX") }
94
+ test('-InfinityX') { assert_text_parsed_as('-InfinityX', "-InfinityX") }
95
+ end
96
+
97
+ sub_test_case 'double quoted string' do
98
+ test('""') { assert_text_parsed_as("", '""') }
99
+ test('"text"') { assert_text_parsed_as("text", '"text"') }
100
+ test('"\\""') { assert_text_parsed_as("\"", '"\\""') }
101
+ test('"\\t"') { assert_text_parsed_as("\t", '"\\t"') }
102
+ test('"\\n"') { assert_text_parsed_as("\n", '"\\n"') }
103
+ test('"\\r\\n"') { assert_text_parsed_as("\r\n", '"\\r\\n"') }
104
+ test('"\\f\\b"') { assert_text_parsed_as("\f\b", '"\\f\\b"') }
105
+ test('"\\.t"') { assert_text_parsed_as(".t", '"\\.t"') }
106
+ test('"\\$t"') { assert_text_parsed_as("$t", '"\\$t"') }
107
+ test('"\\"') { assert_text_parsed_as("#t", '"\\#t"') }
108
+ test('"\\z"') { assert_parse_error('"\\z"') } # unknown escaped character
109
+ test('"\\0"') { assert_parse_error('"\\0"') } # unknown escaped character
110
+ test('"\\1"') { assert_parse_error('"\\1"') } # unknown escaped character
111
+ test('"t') { assert_parse_error('"t') } # non-terminated quoted character
112
+ test('t"') { assert_text_parsed_as('t"', 't"') }
113
+ test('"."') { assert_text_parsed_as('.', '"."') }
114
+ test('"*"') { assert_text_parsed_as('*', '"*"') }
115
+ test('"@"') { assert_text_parsed_as('@', '"@"') }
116
+ test('"\\#{test}"') { assert_text_parsed_as("\#{test}", '"\\#{test}"') }
117
+ test('"$"') { assert_text_parsed_as('$', '"$"') }
118
+ test('"$t"') { assert_text_parsed_as('$t', '"$t"') }
119
+ test('"$}"') { assert_text_parsed_as('$}', '"$}"') }
120
+ test('"\\\\"') { assert_text_parsed_as("\\", '"\\\\"') }
121
+ test('"\\["') { assert_text_parsed_as("[", '"\\["') }
122
+ end
123
+
124
+ sub_test_case 'single quoted string' do
125
+ test("''") { assert_text_parsed_as("", "''") }
126
+ test("'text'") { assert_text_parsed_as("text", "'text'") }
127
+ test("'\\''") { assert_text_parsed_as('\'', "'\\''") }
128
+ test("'\\t'") { assert_text_parsed_as('\t', "'\\t'") }
129
+ test("'\\n'") { assert_text_parsed_as('\n', "'\\n'") }
130
+ test("'\\r\\n'") { assert_text_parsed_as('\r\n', "'\\r\\n'") }
131
+ test("'\\f\\b'") { assert_text_parsed_as('\f\b', "'\\f\\b'") }
132
+ test("'\\.t'") { assert_text_parsed_as('\.t', "'\\.t'") }
133
+ test("'\\$t'") { assert_text_parsed_as('\$t', "'\\$t'") }
134
+ test("'\\#t'") { assert_text_parsed_as('\#t', "'\\#t'") }
135
+ test("'\\z'") { assert_text_parsed_as('\z', "'\\z'") }
136
+ test("'\\0'") { assert_text_parsed_as('\0', "'\\0'") }
137
+ test("'\\1'") { assert_text_parsed_as('\1', "'\\1'") }
138
+ test("'t") { assert_parse_error("'t") } # non-terminated quoted character
139
+ test("t'") { assert_text_parsed_as("t'", "t'") }
140
+ test("'.'") { assert_text_parsed_as('.', "'.'") }
141
+ test("'*'") { assert_text_parsed_as('*', "'*'") }
142
+ test("'@'") { assert_text_parsed_as('@', "'@'") }
143
+ test(%q['#{test}']) { assert_text_parsed_as('#{test}', %q['#{test}']) }
144
+ test("'$'") { assert_text_parsed_as('$', "'$'") }
145
+ test("'$t'") { assert_text_parsed_as('$t', "'$t'") }
146
+ test("'$}'") { assert_text_parsed_as('$}', "'$}'") }
147
+ test("'\\\\'") { assert_text_parsed_as('\\', "'\\\\'") }
148
+ test("'\\['") { assert_text_parsed_as('\[', "'\\['") }
149
+ end
150
+
151
+ sub_test_case 'nonquoted string parsing' do
152
+ test("''") { assert_text_parsed_as(nil, '') }
153
+ test('text') { assert_text_parsed_as('text', 'text') }
154
+ test('\"') { assert_text_parsed_as('\"', '\"') }
155
+ test('\t') { assert_text_parsed_as('\t', '\t') }
156
+ test('\n') { assert_text_parsed_as('\n', '\n') }
157
+ test('\r\n') { assert_text_parsed_as('\r\n', '\r\n') }
158
+ test('\f\b') { assert_text_parsed_as('\f\b', '\f\b') }
159
+ test('\.t') { assert_text_parsed_as('\.t', '\.t') }
160
+ test('\$t') { assert_text_parsed_as('\$t', '\$t') }
161
+ test('\#t') { assert_text_parsed_as('\#t', '\#t') }
162
+ test('\z') { assert_text_parsed_as('\z', '\z') }
163
+ test('\0') { assert_text_parsed_as('\0', '\0') }
164
+ test('\1') { assert_text_parsed_as('\1', '\1') }
165
+ test('.') { assert_text_parsed_as('.', '.') }
166
+ test('*') { assert_text_parsed_as('*', '*') }
167
+ test('@') { assert_text_parsed_as('@', '@') }
168
+ test('#{test}') { assert_text_parsed_as('#{test}', '#{test}') }
169
+ test('$') { assert_text_parsed_as('$', '$') }
170
+ test('$t') { assert_text_parsed_as('$t', '$t') }
171
+ test('$}') { assert_text_parsed_as('$}', '$}') }
172
+ test('\\\\') { assert_text_parsed_as('\\\\', '\\\\') }
173
+ test('\[') { assert_text_parsed_as('\[', '\[') }
174
+ test('#foo') { assert_text_parsed_as('#foo', '#foo') } # not comment out
175
+ test('foo#bar') { assert_text_parsed_as('foo#bar', 'foo#bar') } # not comment out
176
+ test(' text') { assert_text_parsed_as('text', ' text') } # remove starting spaces
177
+ test(' #foo') { assert_text_parsed_as('#foo', ' #foo') } # remove starting spaces
178
+ test('foo #bar') { assert_text_parsed_as('foo', 'foo #bar') } # comment out
179
+ test('foo\t#bar') { assert_text_parsed_as('foo', "foo\t#bar") } # comment out
180
+
181
+ test('t') { assert_text_parsed_as('t', 't') }
182
+ test('T') { assert_text_parsed_as('T', 'T') }
183
+ test('_') { assert_text_parsed_as('_', '_') }
184
+ test('T1') { assert_text_parsed_as('T1', 'T1') }
185
+ test('_2') { assert_text_parsed_as('_2', '_2') }
186
+ test('t0') { assert_text_parsed_as('t0', 't0') }
187
+ test('t@') { assert_text_parsed_as('t@', 't@') }
188
+ test('t-') { assert_text_parsed_as('t-', 't-') }
189
+ test('t.') { assert_text_parsed_as('t.', 't.') }
190
+ test('t+') { assert_text_parsed_as('t+', 't+') }
191
+ test('t/') { assert_text_parsed_as('t/', 't/') }
192
+ test('t=') { assert_text_parsed_as('t=', 't=') }
193
+ test('t,') { assert_text_parsed_as('t,', 't,') }
194
+ test('0t') { assert_text_parsed_as('0t', "0t") }
195
+ test('@1t') { assert_text_parsed_as('@1t', '@1t') }
196
+ test('-1t') { assert_text_parsed_as('-1t', '-1t') }
197
+ test('.1t') { assert_text_parsed_as('.1t', '.1t') }
198
+ test(',1t') { assert_text_parsed_as(',1t', ',1t') }
199
+ test('.t') { assert_text_parsed_as('.t', '.t') }
200
+ test('*t') { assert_text_parsed_as('*t', '*t') }
201
+ test('@t') { assert_text_parsed_as('@t', '@t') }
202
+ test('{t') { assert_parse_error('{t') } # '{' begins map
203
+ test('t{') { assert_text_parsed_as('t{', 't{') }
204
+ test('}t') { assert_text_parsed_as('}t', '}t') }
205
+ test('[t') { assert_parse_error('[t') } # '[' begins array
206
+ test('t[') { assert_text_parsed_as('t[', 't[') }
207
+ test(']t') { assert_text_parsed_as(']t', ']t') }
208
+ test('t:') { assert_text_parsed_as('t:', 't:') }
209
+ test('t;') { assert_text_parsed_as('t;', 't;') }
210
+ test('t?') { assert_text_parsed_as('t?', 't?') }
211
+ test('t^') { assert_text_parsed_as('t^', 't^') }
212
+ test('t`') { assert_text_parsed_as('t`', 't`') }
213
+ test('t~') { assert_text_parsed_as('t~', 't~') }
214
+ test('t|') { assert_text_parsed_as('t|', 't|') }
215
+ test('t>') { assert_text_parsed_as('t>', 't>') }
216
+ test('t<') { assert_text_parsed_as('t<', 't<') }
217
+ test('t(') { assert_text_parsed_as('t(', 't(') }
218
+ end
219
+
220
+ sub_test_case 'embedded ruby code parsing' do
221
+ test('"#{v1}"') { assert_text_parsed_as("#{v1}", '"#{v1}"') }
222
+ test('"#{v2}"') { assert_text_parsed_as("#{v2}", '"#{v2}"') }
223
+ test('"#{v3}"') { assert_text_parsed_as("#{v3}", '"#{v3}"') }
224
+ test('"#{1+1}"') { assert_text_parsed_as("2", '"#{1+1}"') }
225
+ test('"#{}"') { assert_text_parsed_as("", '"#{}"') }
226
+ test('"t#{v1}"') { assert_text_parsed_as("t#{v1}", '"t#{v1}"') }
227
+ test('"t#{v1}t"') { assert_text_parsed_as("t#{v1}t", '"t#{v1}t"') }
228
+ test('"#{"}"}"') { assert_text_parsed_as("}", '"#{"}"}"') }
229
+ test('"#{#}"') { assert_parse_error('"#{#}"') } # error in embedded ruby code
230
+ test("\"\#{\n=begin\n}\"") { assert_parse_error("\"\#{\n=begin\n}\"") } # error in embedded ruby code
231
+ end
232
+
233
+ sub_test_case 'array parsing' do
234
+ test('[]') { assert_text_parsed_as_json([], '[]') }
235
+ test('[1]') { assert_text_parsed_as_json([1], '[1]') }
236
+ test('[1,2]') { assert_text_parsed_as_json([1,2], '[1,2]') }
237
+ test('[1, 2]') { assert_text_parsed_as_json([1,2], '[1, 2]') }
238
+ test('[ 1 , 2 ]') { assert_text_parsed_as_json([1,2], '[ 1 , 2 ]') }
239
+ test('[1,2,]') { assert_parse_error('[1,2,]') } # TODO: Need trailing commas support?
240
+ test("[\n1\n,\n2\n]") { assert_text_parsed_as_json([1,2], "[\n1\n,\n2\n]") }
241
+ test('["a"]') { assert_text_parsed_as_json(["a"], '["a"]') }
242
+ test('["a","b"]') { assert_text_parsed_as_json(["a","b"], '["a","b"]') }
243
+ test('[ "a" , "b" ]') { assert_text_parsed_as_json(["a","b"], '[ "a" , "b" ]') }
244
+ test("[\n\"a\"\n,\n\"b\"\n]") { assert_text_parsed_as_json(["a","b"], "[\n\"a\"\n,\n\"b\"\n]") }
245
+ test('["ab","cd"]') { assert_text_parsed_as_json(["ab","cd"], '["ab","cd"]') }
246
+ json_array_with_js_comment = <<EOA
247
+ [
248
+ "a", // this is a
249
+ "b", // this is b
250
+ "c" // this is c
251
+ ]
252
+ EOA
253
+ test(json_array_with_js_comment) { assert_text_parsed_as_json(["a","b","c"], json_array_with_js_comment) }
254
+ json_array_with_comment = <<EOA
255
+ [
256
+ "a", # this is a
257
+ "b", # this is b
258
+ "c" # this is c
259
+ ]
260
+ EOA
261
+ test(json_array_with_comment) { assert_text_parsed_as_json(["a","b","c"], json_array_with_comment) }
262
+ json_array_with_tailing_comma = <<EOA
263
+ [
264
+ "a", # this is a
265
+ "b", # this is b
266
+ "c", # this is c
267
+ ]
268
+ EOA
269
+ test(json_array_with_tailing_comma) { assert_parse_error(json_array_with_tailing_comma) }
270
+ end
271
+
272
+ sub_test_case 'map parsing' do
273
+ test('{}') { assert_text_parsed_as_json({}, '{}') }
274
+ test('{"a":1}') { assert_text_parsed_as_json({"a"=>1}, '{"a":1}') }
275
+ test('{"a":1,"b":2}') { assert_text_parsed_as_json({"a"=>1,"b"=>2}, '{"a":1,"b":2}') }
276
+ test('{ "a" : 1 , "b" : 2 }') { assert_text_parsed_as_json({"a"=>1,"b"=>2}, '{ "a" : 1 , "b" : 2 }') }
277
+ test('{"a":1,"b":2,}') { assert_parse_error('{"a":1,"b":2,}') } # TODO: Need trailing commas support?
278
+ test('{\n\"a\"\n:\n1\n,\n\"b\"\n:\n2\n}') { assert_text_parsed_as_json({"a"=>1,"b"=>2}, "{\n\"a\"\n:\n1\n,\n\"b\"\n:\n2\n}") }
279
+ test('{"a":"b"}') { assert_text_parsed_as_json({"a"=>"b"}, '{"a":"b"}') }
280
+ test('{"a":"b","c":"d"}') { assert_text_parsed_as_json({"a"=>"b","c"=>"d"}, '{"a":"b","c":"d"}') }
281
+ test('{ "a" : "b" , "c" : "d" }') { assert_text_parsed_as_json({"a"=>"b","c"=>"d"}, '{ "a" : "b" , "c" : "d" }') }
282
+ test('{\n\"a\"\n:\n\"b\"\n,\n\"c\"\n:\n\"d\"\n}') { assert_text_parsed_as_json({"a"=>"b","c"=>"d"}, "{\n\"a\"\n:\n\"b\"\n,\n\"c\"\n:\n\"d\"\n}") }
283
+ json_hash_with_comment = <<EOH
284
+ {
285
+ "a": 1, # this is a
286
+ "b": 2, # this is b
287
+ "c": 3 # this is c
288
+ }
289
+ EOH
290
+ test(json_hash_with_comment) { assert_text_parsed_as_json({"a"=>1,"b"=>2,"c"=>3}, json_hash_with_comment) }
291
+ end
292
+ end
293
+ end