logstash-core 2.2.4.snapshot2-java → 2.3.0-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of logstash-core might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash/agent.rb +248 -140
- data/lib/logstash/config/defaults.rb +8 -0
- data/lib/logstash/config/loader.rb +90 -0
- data/lib/logstash/config/mixin.rb +44 -15
- data/lib/logstash/output_delegator.rb +1 -1
- data/lib/logstash/pipeline.rb +29 -28
- data/lib/logstash/runner.rb +5 -0
- data/lib/logstash/shutdown_watcher.rb +3 -2
- data/lib/logstash/special_agent.rb +8 -0
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +16 -6
- data/logstash-core.gemspec +2 -2
- data/spec/logstash/agent_spec.rb +278 -34
- data/spec/logstash/config/loader_spec.rb +36 -0
- data/spec/logstash/config/mixin_spec.rb +76 -4
- data/spec/logstash/json_spec.rb +15 -0
- data/spec/logstash/pipeline_spec.rb +2 -2
- data/spec/logstash/plugin_spec.rb +1 -1
- data/spec/logstash/runner_spec.rb +13 -22
- data/spec/logstash/shutdown_watcher_spec.rb +4 -0
- metadata +36 -32
data/logstash-core.gemspec
CHANGED
@@ -17,9 +17,9 @@ Gem::Specification.new do |gem|
|
|
17
17
|
gem.require_paths = ["lib"]
|
18
18
|
gem.version = LOGSTASH_CORE_VERSION
|
19
19
|
|
20
|
-
gem.add_runtime_dependency "logstash-core-event", "
|
20
|
+
gem.add_runtime_dependency "logstash-core-event-java", "2.3.0"
|
21
21
|
|
22
|
-
gem.add_runtime_dependency "cabin", "~> 0.
|
22
|
+
gem.add_runtime_dependency "cabin", "~> 0.8.0" #(Apache 2.0 license)
|
23
23
|
gem.add_runtime_dependency "pry", "~> 0.10.1" #(Ruby license)
|
24
24
|
gem.add_runtime_dependency "stud", "~> 0.0.19" #(Apache 2.0 license)
|
25
25
|
gem.add_runtime_dependency "clamp", "~> 0.6.5" #(MIT license) for command line args/flags
|
data/spec/logstash/agent_spec.rb
CHANGED
@@ -1,41 +1,246 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require 'spec_helper'
|
3
|
+
require 'stud/temporary'
|
4
|
+
require 'stud/task'
|
3
5
|
|
4
6
|
describe LogStash::Agent do
|
5
|
-
subject { LogStash::Agent.new('') }
|
6
|
-
let(:dummy_config) { 'input {}' }
|
7
7
|
|
8
|
-
|
9
|
-
|
10
|
-
|
8
|
+
let(:logger) { double("logger") }
|
9
|
+
let(:agent_args) { [] }
|
10
|
+
subject { LogStash::Agent.new("", "") }
|
11
11
|
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
12
|
+
before :each do
|
13
|
+
[:log, :info, :warn, :error, :fatal, :debug].each do |level|
|
14
|
+
allow(logger).to receive(level)
|
15
|
+
end
|
16
|
+
[:info?, :warn?, :error?, :fatal?, :debug?].each do |level|
|
17
|
+
allow(logger).to receive(level)
|
18
|
+
end
|
19
|
+
allow(logger).to receive(:level=)
|
20
|
+
allow(logger).to receive(:subscribe)
|
21
|
+
subject.parse(agent_args)
|
22
|
+
subject.instance_variable_set("@reload_interval", 0.01)
|
23
|
+
subject.instance_variable_set("@logger", logger)
|
24
|
+
end
|
25
|
+
|
26
|
+
describe "register_pipeline" do
|
27
|
+
let(:pipeline_id) { "main" }
|
28
|
+
let(:config_string) { "input { } filter { } output { }" }
|
29
|
+
let(:settings) { {
|
30
|
+
:config_string => config_string,
|
31
|
+
:pipeline_workers => 4
|
32
|
+
} }
|
33
|
+
|
34
|
+
it "should delegate settings to new pipeline" do
|
35
|
+
expect(LogStash::Pipeline).to receive(:new).with(settings[:config_string], hash_including(settings))
|
36
|
+
subject.register_pipeline(pipeline_id, settings)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
describe "#execute" do
|
41
|
+
let(:sample_config) { "input { generator { count => 100000 } } output { }" }
|
42
|
+
let(:config_file) { Stud::Temporary.pathname }
|
43
|
+
|
44
|
+
before :each do
|
45
|
+
File.open(config_file, "w") {|f| f.puts sample_config }
|
46
|
+
end
|
47
|
+
|
48
|
+
after :each do
|
49
|
+
File.unlink(config_file)
|
50
|
+
end
|
51
|
+
|
52
|
+
context "when auto_reload is false" do
|
53
|
+
let(:agent_args) { [ "--config", config_file] } #reload_interval => 0.01, :config_path => } }
|
54
|
+
let(:pipeline_id) { "main" }
|
55
|
+
let(:pipeline_settings) { { :config_path => config_file } }
|
56
|
+
|
57
|
+
before(:each) do
|
58
|
+
subject.register_pipeline(pipeline_id, pipeline_settings)
|
59
|
+
end
|
60
|
+
|
61
|
+
context "if state is clean" do
|
62
|
+
before :each do
|
63
|
+
allow(subject).to receive(:running_pipelines?).and_return(true)
|
64
|
+
allow(subject).to receive(:sleep)
|
65
|
+
allow(subject).to receive(:clean_state?).and_return(false)
|
66
|
+
end
|
67
|
+
|
68
|
+
it "should not reload_state!" do
|
69
|
+
expect(subject).to_not receive(:reload_state!)
|
70
|
+
t = Thread.new { subject.execute }
|
71
|
+
sleep 0.01 until subject.running_pipelines? && subject.pipelines.values.first.ready?
|
72
|
+
sleep 0.1
|
73
|
+
Stud.stop!(t)
|
74
|
+
t.join
|
16
75
|
end
|
17
76
|
end
|
18
77
|
|
19
|
-
context "
|
20
|
-
|
21
|
-
|
22
|
-
|
78
|
+
context "when calling reload_state!" do
|
79
|
+
context "with a config that contains reload incompatible plugins" do
|
80
|
+
let(:second_pipeline_config) { "input { stdin {} } filter { } output { }" }
|
81
|
+
|
82
|
+
it "does not reload if new config contains reload incompatible plugins" do
|
83
|
+
t = Thread.new { subject.execute }
|
84
|
+
sleep 0.01 until subject.running_pipelines? && subject.pipelines.values.first.ready?
|
85
|
+
expect(subject).to_not receive(:upgrade_pipeline)
|
86
|
+
File.open(config_file, "w") { |f| f.puts second_pipeline_config }
|
87
|
+
subject.send(:reload_state!)
|
88
|
+
sleep 0.1
|
89
|
+
Stud.stop!(t)
|
90
|
+
t.join
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
context "with a config that does not contain reload incompatible plugins" do
|
95
|
+
let(:second_pipeline_config) { "input { generator { } } filter { } output { }" }
|
96
|
+
|
97
|
+
it "does not reload if new config contains reload incompatible plugins" do
|
98
|
+
t = Thread.new { subject.execute }
|
99
|
+
sleep 0.01 until subject.running_pipelines? && subject.pipelines.values.first.ready?
|
100
|
+
expect(subject).to receive(:upgrade_pipeline)
|
101
|
+
File.open(config_file, "w") { |f| f.puts second_pipeline_config }
|
102
|
+
subject.send(:reload_state!)
|
103
|
+
sleep 0.1
|
104
|
+
Stud.stop!(t)
|
105
|
+
t.join
|
106
|
+
end
|
23
107
|
end
|
24
108
|
end
|
25
109
|
end
|
26
110
|
|
27
|
-
context "when
|
28
|
-
|
29
|
-
|
111
|
+
context "when auto_reload is true" do
|
112
|
+
let(:agent_args) { [ "--auto-reload", "--config", config_file] } #reload_interval => 0.01, :config_path => } }
|
113
|
+
let(:pipeline_id) { "main" }
|
114
|
+
let(:pipeline_settings) { { :config_path => config_file } }
|
115
|
+
|
116
|
+
before(:each) do
|
117
|
+
subject.register_pipeline(pipeline_id, pipeline_settings)
|
118
|
+
end
|
30
119
|
|
31
|
-
|
32
|
-
it
|
33
|
-
expect(subject
|
120
|
+
context "if state is clean" do
|
121
|
+
it "should periodically reload_state" do
|
122
|
+
expect(subject).to receive(:reload_state!).at_least(3).times
|
123
|
+
t = Thread.new(subject) {|subject| subject.execute }
|
124
|
+
sleep 0.01 until (subject.running_pipelines? && subject.pipelines.values.first.ready?)
|
125
|
+
# now that the pipeline has started, give time for reload_state! to happen a few times
|
126
|
+
sleep 0.1
|
127
|
+
Stud.stop!(t)
|
128
|
+
t.join
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
context "when calling reload_state!" do
|
133
|
+
context "with a config that contains reload incompatible plugins" do
|
134
|
+
let(:second_pipeline_config) { "input { stdin {} } filter { } output { }" }
|
135
|
+
|
136
|
+
it "does not reload if new config contains reload incompatible plugins" do
|
137
|
+
t = Thread.new { subject.execute }
|
138
|
+
sleep 0.01 until subject.running_pipelines? && subject.pipelines.values.first.ready?
|
139
|
+
expect(subject).to_not receive(:upgrade_pipeline)
|
140
|
+
File.open(config_file, "w") { |f| f.puts second_pipeline_config }
|
141
|
+
sleep 0.1
|
142
|
+
Stud.stop!(t)
|
143
|
+
t.join
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
context "with a config that does not contain reload incompatible plugins" do
|
148
|
+
let(:second_pipeline_config) { "input { generator { } } filter { } output { }" }
|
149
|
+
|
150
|
+
it "does not reload if new config contains reload incompatible plugins" do
|
151
|
+
t = Thread.new { subject.execute }
|
152
|
+
sleep 0.01 until subject.running_pipelines? && subject.pipelines.values.first.ready?
|
153
|
+
expect(subject).to receive(:upgrade_pipeline).at_least(2).times
|
154
|
+
File.open(config_file, "w") { |f| f.puts second_pipeline_config }
|
155
|
+
sleep 0.1
|
156
|
+
Stud.stop!(t)
|
157
|
+
t.join
|
158
|
+
end
|
34
159
|
end
|
35
160
|
end
|
36
161
|
end
|
37
162
|
end
|
38
163
|
|
164
|
+
describe "#reload_state!" do
|
165
|
+
let(:pipeline_id) { "main" }
|
166
|
+
let(:first_pipeline_config) { "input { } filter { } output { }" }
|
167
|
+
let(:second_pipeline_config) { "input { generator {} } filter { } output { }" }
|
168
|
+
let(:pipeline_settings) { {
|
169
|
+
:config_string => first_pipeline_config,
|
170
|
+
:pipeline_workers => 4
|
171
|
+
} }
|
172
|
+
|
173
|
+
before(:each) do
|
174
|
+
subject.register_pipeline(pipeline_id, pipeline_settings)
|
175
|
+
end
|
176
|
+
|
177
|
+
context "when fetching a new state" do
|
178
|
+
it "upgrades the state" do
|
179
|
+
expect(subject).to receive(:fetch_config).and_return(second_pipeline_config)
|
180
|
+
expect(subject).to receive(:upgrade_pipeline).with(pipeline_id, kind_of(LogStash::Pipeline))
|
181
|
+
subject.send(:reload_state!)
|
182
|
+
end
|
183
|
+
end
|
184
|
+
context "when fetching the same state" do
|
185
|
+
it "doesn't upgrade the state" do
|
186
|
+
expect(subject).to receive(:fetch_config).and_return(first_pipeline_config)
|
187
|
+
expect(subject).to_not receive(:upgrade_pipeline)
|
188
|
+
subject.send(:reload_state!)
|
189
|
+
end
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
describe "#upgrade_pipeline" do
|
194
|
+
let(:pipeline_id) { "main" }
|
195
|
+
let(:pipeline_config) { "input { } filter { } output { }" }
|
196
|
+
let(:pipeline_settings) { {
|
197
|
+
:config_string => pipeline_config,
|
198
|
+
:pipeline_workers => 4
|
199
|
+
} }
|
200
|
+
let(:new_pipeline_config) { "input { generator {} } output { }" }
|
201
|
+
|
202
|
+
before(:each) do
|
203
|
+
subject.register_pipeline(pipeline_id, pipeline_settings)
|
204
|
+
end
|
205
|
+
|
206
|
+
context "when the upgrade fails" do
|
207
|
+
before :each do
|
208
|
+
allow(subject).to receive(:fetch_config).and_return(new_pipeline_config)
|
209
|
+
allow(subject).to receive(:create_pipeline).and_return(nil)
|
210
|
+
allow(subject).to receive(:stop_pipeline)
|
211
|
+
end
|
212
|
+
|
213
|
+
it "leaves the state untouched" do
|
214
|
+
subject.send(:reload_state!)
|
215
|
+
expect(subject.pipelines[pipeline_id].config_str).to eq(pipeline_config)
|
216
|
+
end
|
217
|
+
|
218
|
+
context "and current state is empty" do
|
219
|
+
it "should not start a pipeline" do
|
220
|
+
expect(subject).to_not receive(:start_pipeline)
|
221
|
+
subject.send(:reload_state!)
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|
225
|
+
|
226
|
+
context "when the upgrade succeeds" do
|
227
|
+
let(:new_config) { "input { generator { count => 1 } } output { }" }
|
228
|
+
before :each do
|
229
|
+
allow(subject).to receive(:fetch_config).and_return(new_config)
|
230
|
+
allow(subject).to receive(:stop_pipeline)
|
231
|
+
end
|
232
|
+
it "updates the state" do
|
233
|
+
subject.send(:reload_state!)
|
234
|
+
expect(subject.pipelines[pipeline_id].config_str).to eq(new_config)
|
235
|
+
end
|
236
|
+
it "starts the pipeline" do
|
237
|
+
expect(subject).to receive(:stop_pipeline)
|
238
|
+
expect(subject).to receive(:start_pipeline)
|
239
|
+
subject.send(:reload_state!)
|
240
|
+
end
|
241
|
+
end
|
242
|
+
end
|
243
|
+
|
39
244
|
context "--pluginpath" do
|
40
245
|
let(:single_path) { "/some/path" }
|
41
246
|
let(:multiple_paths) { ["/some/path1", "/some/path2"] }
|
@@ -59,27 +264,66 @@ describe LogStash::Agent do
|
|
59
264
|
end
|
60
265
|
end
|
61
266
|
|
62
|
-
describe "
|
63
|
-
let(:
|
267
|
+
describe "#fetch_config" do
|
268
|
+
let(:file_config) { "input { generator { count => 100 } } output { }" }
|
269
|
+
let(:cli_config) { "filter { drop { } } " }
|
270
|
+
let(:tmp_config_path) { Stud::Temporary.pathname }
|
271
|
+
let(:agent_args) { [ "-e", "filter { drop { } } ", "-f", tmp_config_path ] }
|
272
|
+
|
273
|
+
before :each do
|
274
|
+
IO.write(tmp_config_path, file_config)
|
275
|
+
end
|
276
|
+
|
277
|
+
after :each do
|
278
|
+
File.unlink(tmp_config_path)
|
279
|
+
end
|
280
|
+
|
281
|
+
it "should join the config string and config path content" do
|
282
|
+
settings = { :config_path => tmp_config_path, :config_string => cli_config }
|
283
|
+
fetched_config = subject.send(:fetch_config, settings)
|
284
|
+
expect(fetched_config.strip).to eq(cli_config + IO.read(tmp_config_path))
|
285
|
+
end
|
286
|
+
end
|
287
|
+
|
288
|
+
context "--pluginpath" do
|
289
|
+
let(:single_path) { "/some/path" }
|
290
|
+
let(:multiple_paths) { ["/some/path1", "/some/path2"] }
|
291
|
+
|
292
|
+
it "should fail with single invalid dir path" do
|
293
|
+
expect(File).to receive(:directory?).and_return(false)
|
294
|
+
expect(LogStash::Environment).not_to receive(:add_plugin_path)
|
295
|
+
expect{subject.configure_plugin_paths(single_path)}.to raise_error(LogStash::ConfigurationError)
|
296
|
+
end
|
297
|
+
end
|
298
|
+
|
299
|
+
describe "pipeline settings" do
|
300
|
+
let(:pipeline_string) { "input { stdin {} } output { stdout {} }" }
|
301
|
+
let(:main_pipeline_settings) { { :pipeline_id => "main" } }
|
64
302
|
let(:pipeline) { double("pipeline") }
|
65
303
|
|
66
304
|
before(:each) do
|
67
|
-
|
305
|
+
task = Stud::Task.new { 1 }
|
306
|
+
allow(pipeline).to receive(:run).and_return(task)
|
307
|
+
allow(pipeline).to receive(:shutdown)
|
68
308
|
end
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
and_return(pipeline)
|
73
|
-
|
74
|
-
|
309
|
+
|
310
|
+
context "when :pipeline_workers is not defined by the user" do
|
311
|
+
it "should not pass the value to the pipeline" do
|
312
|
+
expect(LogStash::Pipeline).to receive(:new).once.with(pipeline_string, hash_excluding(:pipeline_workers)).and_return(pipeline)
|
313
|
+
args = ["-e", pipeline_string]
|
314
|
+
subject.run(args)
|
315
|
+
end
|
75
316
|
end
|
76
317
|
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
.and_return(pipeline)
|
81
|
-
|
82
|
-
|
318
|
+
context "when :pipeline_workers is defined by the user" do
|
319
|
+
it "should pass the value to the pipeline" do
|
320
|
+
main_pipeline_settings[:pipeline_workers] = 2
|
321
|
+
expect(LogStash::Pipeline).to receive(:new).with(pipeline_string, hash_including(main_pipeline_settings)).and_return(pipeline)
|
322
|
+
args = ["-w", "2", "-e", pipeline_string]
|
323
|
+
subject.run(args)
|
324
|
+
end
|
83
325
|
end
|
84
326
|
end
|
327
|
+
|
85
328
|
end
|
329
|
+
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "logstash/config/loader"
|
4
|
+
|
5
|
+
describe LogStash::Config::Loader do
|
6
|
+
subject { described_class.new(Cabin::Channel.get) }
|
7
|
+
context "when local" do
|
8
|
+
before { expect(subject).to receive(:local_config).with(path) }
|
9
|
+
|
10
|
+
context "unix" do
|
11
|
+
let(:path) { './test.conf' }
|
12
|
+
it 'works with relative path' do
|
13
|
+
subject.load_config(path)
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
context "windows" do
|
18
|
+
let(:path) { '.\test.conf' }
|
19
|
+
it 'work with relative windows path' do
|
20
|
+
subject.load_config(path)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
context "when remote" do
|
26
|
+
context 'supported scheme' do
|
27
|
+
let(:path) { "http://test.local/superconfig.conf" }
|
28
|
+
let(:dummy_config) { 'input {}' }
|
29
|
+
|
30
|
+
before { expect(Net::HTTP).to receive(:get) { dummy_config } }
|
31
|
+
it 'works with http' do
|
32
|
+
expect(subject.load_config(path)).to eq("#{dummy_config}\n")
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -96,10 +96,6 @@ describe LogStash::Config::Mixin do
|
|
96
96
|
clone = subject.class.new(subject.params)
|
97
97
|
expect(clone.password.value).to(be == secret)
|
98
98
|
end
|
99
|
-
|
100
|
-
it "should obfuscate original_params" do
|
101
|
-
expect(subject.original_params['password']).to(be_a(LogStash::Util::Password))
|
102
|
-
end
|
103
99
|
end
|
104
100
|
|
105
101
|
describe "obsolete settings" do
|
@@ -155,4 +151,80 @@ describe LogStash::Config::Mixin do
|
|
155
151
|
expect(subject.params).to include("password")
|
156
152
|
end
|
157
153
|
end
|
154
|
+
|
155
|
+
context "environment variable evaluation" do
|
156
|
+
let(:plugin_class) do
|
157
|
+
Class.new(LogStash::Filters::Base) do
|
158
|
+
config_name "one_plugin"
|
159
|
+
config :oneString, :validate => :string
|
160
|
+
config :oneBoolean, :validate => :boolean
|
161
|
+
config :oneNumber, :validate => :number
|
162
|
+
config :oneArray, :validate => :array
|
163
|
+
config :oneHash, :validate => :hash
|
164
|
+
end
|
165
|
+
end
|
166
|
+
|
167
|
+
context "when an environment variable is not set" do
|
168
|
+
context "and no default is given" do
|
169
|
+
before do
|
170
|
+
# Canary. Just in case somehow this is set.
|
171
|
+
expect(ENV["NoSuchVariable"]).to be_nil
|
172
|
+
end
|
173
|
+
|
174
|
+
it "should raise a configuration error" do
|
175
|
+
expect do
|
176
|
+
plugin_class.new("oneString" => "${NoSuchVariable}")
|
177
|
+
end.to raise_error(LogStash::ConfigurationError)
|
178
|
+
end
|
179
|
+
end
|
180
|
+
|
181
|
+
context "and a default is given" do
|
182
|
+
subject do
|
183
|
+
plugin_class.new(
|
184
|
+
"oneString" => "${notExistingVar:foo}",
|
185
|
+
"oneBoolean" => "${notExistingVar:true}",
|
186
|
+
"oneArray" => [ "first array value", "${notExistingVar:foo}", "${notExistingVar:}", "${notExistingVar: }", "${notExistingVar:foo bar}" ],
|
187
|
+
"oneHash" => { "key" => "${notExistingVar:foo}" }
|
188
|
+
)
|
189
|
+
end
|
190
|
+
|
191
|
+
it "should use the default" do
|
192
|
+
expect(subject.oneString).to(be == "foo")
|
193
|
+
expect(subject.oneBoolean).to be_truthy
|
194
|
+
expect(subject.oneArray).to(be == ["first array value", "foo", "", " ", "foo bar"])
|
195
|
+
expect(subject.oneHash).to(be == { "key" => "foo" })
|
196
|
+
end
|
197
|
+
end
|
198
|
+
end
|
199
|
+
|
200
|
+
context "when an environment variable is set" do
|
201
|
+
before do
|
202
|
+
ENV["FunString"] = "fancy"
|
203
|
+
ENV["FunBool"] = "true"
|
204
|
+
end
|
205
|
+
|
206
|
+
after do
|
207
|
+
ENV.delete("FunString")
|
208
|
+
ENV.delete("FunBool")
|
209
|
+
end
|
210
|
+
|
211
|
+
subject do
|
212
|
+
plugin_class.new(
|
213
|
+
"oneString" => "${FunString:foo}",
|
214
|
+
"oneBoolean" => "${FunBool:false}",
|
215
|
+
"oneArray" => [ "first array value", "${FunString:foo}" ],
|
216
|
+
"oneHash" => { "key1" => "${FunString:foo}", "key2" => "$FunString is ${FunBool}", "key3" => "${FunBool:false} or ${funbool:false}" }
|
217
|
+
)
|
218
|
+
end
|
219
|
+
|
220
|
+
it "should use the value in the variable" do
|
221
|
+
expect(subject.oneString).to(be == "fancy")
|
222
|
+
expect(subject.oneBoolean).to(be_truthy)
|
223
|
+
expect(subject.oneArray).to(be == [ "first array value", "fancy" ])
|
224
|
+
expect(subject.oneHash).to(be == { "key1" => "fancy", "key2" => "fancy is true", "key3" => "true or false" })
|
225
|
+
end
|
226
|
+
|
227
|
+
end
|
228
|
+
end
|
229
|
+
|
158
230
|
end
|