logstash-input-ow 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +4 -0
- data/LICENSE +202 -0
- data/README.md +122 -0
- data/lib/logstash/inputs/openwhisk.rb +302 -0
- data/logstash-input-openwhisk.gemspec +31 -0
- data/spec/inputs/openwhisk_spec.rb +541 -0
- metadata +192 -0
@@ -0,0 +1,31 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-input-ow'
|
3
|
+
s.version = '0.1.2'
|
4
|
+
s.licenses = ['Apache License (2.0)']
|
5
|
+
s.summary = "Retrieve OpenWhisk logs with Logstash."
|
6
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = [ "James Thomas" ]
|
8
|
+
s.email = 'cdbduque@gmail.com'
|
9
|
+
s.homepage = "https://github.com/cduque89/logstash-input-openwhiskk"
|
10
|
+
s.require_paths = ["lib"]
|
11
|
+
|
12
|
+
# Files
|
13
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
14
|
+
# Tests
|
15
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
16
|
+
|
17
|
+
# Special flag to let us know this is actually a logstash plugin
|
18
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
|
19
|
+
|
20
|
+
# Gem dependencies
|
21
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
22
|
+
s.add_runtime_dependency 'logstash-codec-plain'
|
23
|
+
s.add_runtime_dependency 'logstash-mixin-http_client', ">= 6.0.0", "< 7.0.0"
|
24
|
+
s.add_runtime_dependency 'stud', "~> 0.0.22"
|
25
|
+
s.add_runtime_dependency 'rufus-scheduler', "~>3.0.9"
|
26
|
+
|
27
|
+
s.add_development_dependency 'logstash-codec-json'
|
28
|
+
s.add_development_dependency 'logstash-devutils'
|
29
|
+
s.add_development_dependency 'flores'
|
30
|
+
s.add_development_dependency 'timecop'
|
31
|
+
end
|
@@ -0,0 +1,541 @@
|
|
1
|
+
require "logstash/devutils/rspec/spec_helper"
|
2
|
+
require 'logstash/inputs/openwhisk'
|
3
|
+
require 'flores/random'
|
4
|
+
require "timecop"
|
5
|
+
|
6
|
+
describe LogStash::Inputs::OpenWhisk do
|
7
|
+
let(:metadata_target) { "_openwhisk_metadata" }
|
8
|
+
let(:queue) { Queue.new }
|
9
|
+
let(:default_schedule) {
|
10
|
+
{ "cron" => "* * * * * UTC" }
|
11
|
+
}
|
12
|
+
let(:default_name) { "openwhisk" }
|
13
|
+
let(:default_hostname) { "localhost" }
|
14
|
+
let(:default_username) { "user@email.com" }
|
15
|
+
let(:default_password) { "my_password" }
|
16
|
+
let(:default_namespace) { "user_namespace" }
|
17
|
+
let(:default_opts) {
|
18
|
+
{
|
19
|
+
"schedule" => default_schedule,
|
20
|
+
"hostname" => default_hostname,
|
21
|
+
"username" => default_username,
|
22
|
+
"password" => default_password,
|
23
|
+
"namespace" => default_namespace,
|
24
|
+
"codec" => "json",
|
25
|
+
"metadata_target" => metadata_target
|
26
|
+
}
|
27
|
+
}
|
28
|
+
let(:klass) { LogStash::Inputs::OpenWhisk }
|
29
|
+
|
30
|
+
describe "instances" do
|
31
|
+
subject { klass.new(default_opts) }
|
32
|
+
|
33
|
+
before do
|
34
|
+
subject.register
|
35
|
+
end
|
36
|
+
|
37
|
+
describe "#register" do
|
38
|
+
it "should set logs since to time since epoch" do
|
39
|
+
expect(subject.instance_variable_get("@logs_since")).to eql(Time.now.to_i * 1000)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
describe "#run" do
|
44
|
+
it "should setup a scheduler" do
|
45
|
+
runner = Thread.new do
|
46
|
+
subject.run(double("queue"))
|
47
|
+
expect(subject.instance_variable_get("@scheduler")).to be_a_kind_of(Rufus::Scheduler)
|
48
|
+
end
|
49
|
+
runner.kill
|
50
|
+
runner.join
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
describe "#run_once" do
|
55
|
+
it "should issue an async request for each url" do
|
56
|
+
constructed_request = subject.send(:construct_request, default_opts)
|
57
|
+
expect(subject).to receive(:request_async).with(queue, default_name, constructed_request).once
|
58
|
+
|
59
|
+
subject.send(:run_once, queue) # :run_once is a private method
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
describe "#update_logs_since" do
|
64
|
+
context "given current time less than five minutes ahead of last poll activation" do
|
65
|
+
let(:now) { Time.now.to_i * 1000 }
|
66
|
+
let(:previous) {
|
67
|
+
now - (5 * 60 * 1000) + 1
|
68
|
+
}
|
69
|
+
before do
|
70
|
+
subject.instance_variable_set("@logs_since", previous)
|
71
|
+
subject.send(:update_logs_since, now)
|
72
|
+
end
|
73
|
+
|
74
|
+
it "should not update logs since" do
|
75
|
+
expect(subject.instance_variable_get("@logs_since")).to eql(previous)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
context "given current time more than five minutes ahead of last poll activation" do
|
80
|
+
let(:now) { Time.now.to_i * 1000 }
|
81
|
+
let(:previous) {
|
82
|
+
now - (5 * 60 * 1000) - 1
|
83
|
+
}
|
84
|
+
before do
|
85
|
+
subject.instance_variable_set("@logs_since", previous)
|
86
|
+
subject.send(:update_logs_since, now)
|
87
|
+
end
|
88
|
+
|
89
|
+
it "should update logs since x" do
|
90
|
+
expect(subject.instance_variable_get("@logs_since")).to eql(now - 5 * 60 * 1000)
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
describe "constructor" do
|
96
|
+
context "given options missing hostname" do
|
97
|
+
let(:opts) {
|
98
|
+
opts = default_opts.clone
|
99
|
+
opts.delete("hostname")
|
100
|
+
opts
|
101
|
+
}
|
102
|
+
|
103
|
+
it "should raise ConfigurationError" do
|
104
|
+
expect { klass.new(opts) }.to raise_error(LogStash::ConfigurationError)
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
context "given options missing username" do
|
109
|
+
let(:opts) {
|
110
|
+
opts = default_opts.clone
|
111
|
+
opts.delete("username")
|
112
|
+
opts
|
113
|
+
}
|
114
|
+
|
115
|
+
it "should raise ConfigurationError" do
|
116
|
+
expect { klass.new(opts) }.to raise_error(LogStash::ConfigurationError)
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
context "given options missing password" do
|
121
|
+
let(:opts) {
|
122
|
+
opts = default_opts.clone
|
123
|
+
opts.delete("password")
|
124
|
+
opts
|
125
|
+
}
|
126
|
+
|
127
|
+
it "should raise ConfigurationError" do
|
128
|
+
expect { klass.new(opts) }.to raise_error(LogStash::ConfigurationError)
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
context "given options missing namespace" do
|
133
|
+
let(:opts) {
|
134
|
+
opts = default_opts.clone
|
135
|
+
opts.delete("namespace")
|
136
|
+
opts
|
137
|
+
}
|
138
|
+
|
139
|
+
it "should use default namespace" do
|
140
|
+
instance = klass.new(opts)
|
141
|
+
expect(instance.namespace).to eql("_")
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
context "given options with namespace" do
|
146
|
+
it "should use options namespace" do
|
147
|
+
instance = klass.new(default_opts)
|
148
|
+
expect(instance.namespace).to eql(default_namespace)
|
149
|
+
end
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
describe "construct request spec" do
|
154
|
+
context "with normal options" do
|
155
|
+
let(:result) { subject.send(:construct_request, default_opts) }
|
156
|
+
|
157
|
+
it "should set method correctly" do
|
158
|
+
expect(result[0]).to eql(:get)
|
159
|
+
end
|
160
|
+
|
161
|
+
it "should set url correctly" do
|
162
|
+
expect(result[1]).to eql("https://#{default_hostname}/api/v1/namespaces/#{default_namespace}/activations")
|
163
|
+
end
|
164
|
+
|
165
|
+
it "should set auth correctly" do
|
166
|
+
expect(result[2][:auth]).to eql({user: default_username, pass: default_password})
|
167
|
+
end
|
168
|
+
|
169
|
+
it "should set query string correctly" do
|
170
|
+
expect(result[2][:query]).to eql({docs: true, limit: 0, skip: 0, since: subject.instance_variable_get('@logs_since')})
|
171
|
+
end
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
describe "#structure_request" do
|
176
|
+
it "Should turn a simple request into the expected structured request" do
|
177
|
+
expected = {"url" => "http://example.net", "method" => "get"}
|
178
|
+
expect(subject.send(:structure_request, ["get", "http://example.net"])).to eql(expected)
|
179
|
+
end
|
180
|
+
|
181
|
+
it "should turn a complex request into the expected structured one" do
|
182
|
+
headers = {
|
183
|
+
"X-Fry" => " Like a balloon, and... something bad happens! "
|
184
|
+
}
|
185
|
+
expected = {
|
186
|
+
"url" => "http://example.net",
|
187
|
+
"method" => "get",
|
188
|
+
"headers" => headers
|
189
|
+
}
|
190
|
+
expect(subject.send(:structure_request, ["get", "http://example.net", {"headers" => headers}])).to eql(expected)
|
191
|
+
end
|
192
|
+
end
|
193
|
+
end
|
194
|
+
|
195
|
+
describe "scheduler configuration" do
|
196
|
+
context "given an interval" do
|
197
|
+
let(:opts) {
|
198
|
+
{
|
199
|
+
"interval" => 2,
|
200
|
+
"hostname" => default_hostname,
|
201
|
+
"username" => default_username,
|
202
|
+
"password" => default_password,
|
203
|
+
"codec" => "json",
|
204
|
+
"metadata_target" => metadata_target
|
205
|
+
}
|
206
|
+
}
|
207
|
+
it "should run once in each interval" do
|
208
|
+
instance = klass.new(opts)
|
209
|
+
instance.register
|
210
|
+
queue = Queue.new
|
211
|
+
runner = Thread.new do
|
212
|
+
instance.run(queue)
|
213
|
+
end
|
214
|
+
#T 0123456
|
215
|
+
#events x x x x
|
216
|
+
#expects 3 events at T=5
|
217
|
+
sleep 5
|
218
|
+
instance.stop
|
219
|
+
runner.kill
|
220
|
+
runner.join
|
221
|
+
expect(queue.size).to eq(3)
|
222
|
+
end
|
223
|
+
end
|
224
|
+
|
225
|
+
context "given both interval and schedule options" do
|
226
|
+
let(:opts) {
|
227
|
+
{
|
228
|
+
"interval" => 1,
|
229
|
+
"schedule" => { "every" => "5s" },
|
230
|
+
"hostname" => default_hostname,
|
231
|
+
"username" => default_username,
|
232
|
+
"password" => default_password,
|
233
|
+
"codec" => "json",
|
234
|
+
"metadata_target" => metadata_target
|
235
|
+
}
|
236
|
+
}
|
237
|
+
it "should raise ConfigurationError" do
|
238
|
+
instance = klass.new(opts)
|
239
|
+
instance.register
|
240
|
+
queue = Queue.new
|
241
|
+
runner = Thread.new do
|
242
|
+
expect{instance.run(queue)}.to raise_error(LogStash::ConfigurationError)
|
243
|
+
end
|
244
|
+
instance.stop
|
245
|
+
runner.kill
|
246
|
+
runner.join
|
247
|
+
end
|
248
|
+
end
|
249
|
+
|
250
|
+
context "given 'cron' expression" do
|
251
|
+
let(:opts) {
|
252
|
+
{
|
253
|
+
"schedule" => { "cron" => "* * * * * UTC" },
|
254
|
+
"hostname" => default_hostname,
|
255
|
+
"username" => default_username,
|
256
|
+
"password" => default_password,
|
257
|
+
"codec" => "json",
|
258
|
+
"metadata_target" => metadata_target
|
259
|
+
}
|
260
|
+
}
|
261
|
+
it "should run at the schedule" do
|
262
|
+
instance = klass.new(opts)
|
263
|
+
instance.register
|
264
|
+
Timecop.travel(Time.new(2000,1,1,0,0,0,'+00:00'))
|
265
|
+
Timecop.scale(60)
|
266
|
+
queue = Queue.new
|
267
|
+
runner = Thread.new do
|
268
|
+
instance.run(queue)
|
269
|
+
end
|
270
|
+
sleep 3
|
271
|
+
instance.stop
|
272
|
+
runner.kill
|
273
|
+
runner.join
|
274
|
+
expect(queue.size).to eq(2)
|
275
|
+
Timecop.return
|
276
|
+
end
|
277
|
+
end
|
278
|
+
|
279
|
+
context "given 'at' expression" do
|
280
|
+
let(:opts) {
|
281
|
+
{
|
282
|
+
"schedule" => { "at" => "2000-01-01 00:05:00 +0000"},
|
283
|
+
"hostname" => default_hostname,
|
284
|
+
"username" => default_username,
|
285
|
+
"password" => default_password,
|
286
|
+
"codec" => "json",
|
287
|
+
"metadata_target" => metadata_target
|
288
|
+
}
|
289
|
+
}
|
290
|
+
it "should run at the schedule" do
|
291
|
+
instance = klass.new(opts)
|
292
|
+
instance.register
|
293
|
+
Timecop.travel(Time.new(2000,1,1,0,0,0,'+00:00'))
|
294
|
+
Timecop.scale(60 * 5)
|
295
|
+
queue = Queue.new
|
296
|
+
runner = Thread.new do
|
297
|
+
instance.run(queue)
|
298
|
+
end
|
299
|
+
sleep 2
|
300
|
+
instance.stop
|
301
|
+
runner.kill
|
302
|
+
runner.join
|
303
|
+
expect(queue.size).to eq(1)
|
304
|
+
Timecop.return
|
305
|
+
end
|
306
|
+
end
|
307
|
+
|
308
|
+
context "given 'every' expression" do
|
309
|
+
let(:opts) {
|
310
|
+
{
|
311
|
+
"schedule" => { "every" => "2s"},
|
312
|
+
"hostname" => default_hostname,
|
313
|
+
"username" => default_username,
|
314
|
+
"password" => default_password,
|
315
|
+
"codec" => "json",
|
316
|
+
"metadata_target" => metadata_target
|
317
|
+
}
|
318
|
+
}
|
319
|
+
it "should run at the schedule" do
|
320
|
+
instance = klass.new(opts)
|
321
|
+
instance.register
|
322
|
+
queue = Queue.new
|
323
|
+
runner = Thread.new do
|
324
|
+
instance.run(queue)
|
325
|
+
end
|
326
|
+
#T 0123456
|
327
|
+
#events x x x x
|
328
|
+
#expects 3 events at T=5
|
329
|
+
sleep 5
|
330
|
+
instance.stop
|
331
|
+
runner.kill
|
332
|
+
runner.join
|
333
|
+
expect(queue.size).to eq(3)
|
334
|
+
end
|
335
|
+
end
|
336
|
+
|
337
|
+
context "given 'in' expression" do
|
338
|
+
let(:opts) {
|
339
|
+
{
|
340
|
+
"schedule" => { "in" => "2s"},
|
341
|
+
"hostname" => default_hostname,
|
342
|
+
"username" => default_username,
|
343
|
+
"password" => default_password,
|
344
|
+
"codec" => "json",
|
345
|
+
"metadata_target" => metadata_target
|
346
|
+
}
|
347
|
+
}
|
348
|
+
it "should run at the schedule" do
|
349
|
+
instance = klass.new(opts)
|
350
|
+
instance.register
|
351
|
+
queue = Queue.new
|
352
|
+
runner = Thread.new do
|
353
|
+
instance.run(queue)
|
354
|
+
end
|
355
|
+
sleep 3
|
356
|
+
instance.stop
|
357
|
+
runner.kill
|
358
|
+
runner.join
|
359
|
+
expect(queue.size).to eq(1)
|
360
|
+
end
|
361
|
+
end
|
362
|
+
end
|
363
|
+
|
364
|
+
describe "events" do
|
365
|
+
shared_examples("matching metadata") {
|
366
|
+
let(:metadata) { event.get(metadata_target) }
|
367
|
+
|
368
|
+
it "should have the correct name" do
|
369
|
+
expect(metadata["name"]).to eql(name)
|
370
|
+
end
|
371
|
+
|
372
|
+
it "should have the correct request hostname" do
|
373
|
+
expect(metadata["hostname"]).to eql(hostname)
|
374
|
+
end
|
375
|
+
|
376
|
+
it "should have the correct code" do
|
377
|
+
expect(metadata["code"]).to eql(code)
|
378
|
+
end
|
379
|
+
}
|
380
|
+
|
381
|
+
shared_examples "unprocessable_requests" do
|
382
|
+
let(:poller) { LogStash::Inputs::OpenWhisk.new(settings) }
|
383
|
+
subject(:event) {
|
384
|
+
poller.send(:run_once, queue)
|
385
|
+
queue.pop(true)
|
386
|
+
}
|
387
|
+
|
388
|
+
before do
|
389
|
+
poller.register
|
390
|
+
allow(poller).to receive(:handle_failure).and_call_original
|
391
|
+
allow(poller).to receive(:handle_success)
|
392
|
+
event # materialize the subject
|
393
|
+
end
|
394
|
+
|
395
|
+
it "should enqueue a message" do
|
396
|
+
expect(event).to be_a(LogStash::Event)
|
397
|
+
end
|
398
|
+
|
399
|
+
it "should enqueue a message with 'http_request_failure' set" do
|
400
|
+
expect(event.get("http_request_failure")).to be_a(Hash)
|
401
|
+
end
|
402
|
+
|
403
|
+
it "should tag the event with '_http_request_failure'" do
|
404
|
+
expect(event.get("tags")).to include('_http_request_failure')
|
405
|
+
end
|
406
|
+
|
407
|
+
it "should invoke handle failure exactly once" do
|
408
|
+
expect(poller).to have_received(:handle_failure).once
|
409
|
+
end
|
410
|
+
|
411
|
+
it "should not invoke handle success at all" do
|
412
|
+
expect(poller).not_to have_received(:handle_success)
|
413
|
+
end
|
414
|
+
|
415
|
+
include_examples("matching metadata")
|
416
|
+
end
|
417
|
+
|
418
|
+
context "with a non responsive server" do
|
419
|
+
context "due to a non-existant hostname" do # Fail with handlers
|
420
|
+
let(:name) { default_name }
|
421
|
+
let(:hostname) { "http://thouetnhoeu89ueoueohtueohtneuohn" }
|
422
|
+
let(:code) { nil } # no response expected
|
423
|
+
|
424
|
+
let(:settings) { default_opts.merge("hostname" => hostname) }
|
425
|
+
|
426
|
+
include_examples("unprocessable_requests")
|
427
|
+
end
|
428
|
+
end
|
429
|
+
|
430
|
+
describe "a valid request and decoded response" do
|
431
|
+
let(:payload) { [{"start" => 1476818509288, "end" => 1476818509888, "activationId" => "some_id", "annotations" => []}] }
|
432
|
+
let(:opts) { default_opts }
|
433
|
+
let(:instance) {
|
434
|
+
klass.new(opts)
|
435
|
+
}
|
436
|
+
let(:name) { default_name }
|
437
|
+
let(:code) { 202 }
|
438
|
+
let(:hostname) { default_hostname }
|
439
|
+
|
440
|
+
subject(:event) {
|
441
|
+
queue.pop(true)
|
442
|
+
}
|
443
|
+
|
444
|
+
before do
|
445
|
+
instance.register
|
446
|
+
instance.instance_variable_set("@logs_since", 0)
|
447
|
+
# match any response
|
448
|
+
instance.client.stub(%r{.},
|
449
|
+
:body => LogStash::Json.dump(payload),
|
450
|
+
:code => code
|
451
|
+
)
|
452
|
+
allow(instance).to receive(:decorate)
|
453
|
+
instance.send(:run_once, queue)
|
454
|
+
end
|
455
|
+
|
456
|
+
it "should have a matching message" do
|
457
|
+
expect(event.to_hash).to include(payload[0])
|
458
|
+
end
|
459
|
+
|
460
|
+
it "should decorate the event" do
|
461
|
+
expect(instance).to have_received(:decorate).once
|
462
|
+
end
|
463
|
+
|
464
|
+
it "should update the time since" do
|
465
|
+
expect(instance.instance_variable_get("@logs_since")).to eql(payload[0]["end"] - (5 * 60 * 1000))
|
466
|
+
end
|
467
|
+
|
468
|
+
it "should retain activation ids" do
|
469
|
+
expect(instance.instance_variable_get("@activation_ids")).to eql(Set.new ["some_id"])
|
470
|
+
end
|
471
|
+
|
472
|
+
include_examples("matching metadata")
|
473
|
+
|
474
|
+
context "with metadata omitted" do
|
475
|
+
let(:opts) {
|
476
|
+
opts = default_opts.clone
|
477
|
+
opts.delete("metadata_target")
|
478
|
+
opts
|
479
|
+
}
|
480
|
+
|
481
|
+
it "should not have any metadata on the event" do
|
482
|
+
instance.send(:run_once, queue)
|
483
|
+
expect(event.get(metadata_target)).to be_nil
|
484
|
+
end
|
485
|
+
end
|
486
|
+
|
487
|
+
context "with a specified target" do
|
488
|
+
let(:target) { "mytarget" }
|
489
|
+
let(:opts) { default_opts.merge("target" => target) }
|
490
|
+
|
491
|
+
it "should store the event info in the target" do
|
492
|
+
# When events go through the pipeline they are java-ified
|
493
|
+
# this normalizes the payload to java types
|
494
|
+
payload_normalized = LogStash::Json.load(LogStash::Json.dump(payload))
|
495
|
+
expect(event.get(target)).to include(payload_normalized[0])
|
496
|
+
end
|
497
|
+
end
|
498
|
+
|
499
|
+
context "with annotations" do
|
500
|
+
let(:annotations) { [{"key" => "a", "value" => { "child": "val" } }, {"key" => "b", "value" => "some_string"}] }
|
501
|
+
let(:payload) { [{"start" => 1476818509288, "end" => 1476818509888, "activationId" => "some_id", "annotations" => annotations}] }
|
502
|
+
|
503
|
+
it "should serialise annotations to JSON strings" do
|
504
|
+
expect(event.to_hash["annotations"]).to eql([{"key" => "a", "value" => '{"child":"val"}'}, {"key" => "b", "value" => "\"some_string\""}])
|
505
|
+
end
|
506
|
+
end
|
507
|
+
|
508
|
+
context "with multiple activations" do
|
509
|
+
let(:payload) { [{"end" => 1476818509288, "activationId" => "1", "annotations" => []},{"end" => 1476818509289, "activationId" => "2", "annotations" => []},{"end" => 1476818509287, "activationId" => "3", "annotations" => []} ] }
|
510
|
+
|
511
|
+
it "should update logs since to latest epoch" do
|
512
|
+
instance.instance_variable_set("@logs_since", 0)
|
513
|
+
instance.instance_variable_set("@activation_ids", Set.new)
|
514
|
+
instance.send(:run_once, queue)
|
515
|
+
expect(instance.instance_variable_get("@logs_since")).to eql(payload[1]["end"] - (5 * 60 * 1000))
|
516
|
+
expect(instance.instance_variable_get("@activation_ids")).to eql(Set.new ["1", "2", "3"])
|
517
|
+
end
|
518
|
+
end
|
519
|
+
|
520
|
+
context "with previous activations" do
|
521
|
+
let(:payload) { [{"end" => 1476818509288, "activationId" => "some_id", "annotations" => []}] }
|
522
|
+
|
523
|
+
subject(:size) {
|
524
|
+
queue.size()
|
525
|
+
}
|
526
|
+
it "should not add activation to queue" do
|
527
|
+
instance.instance_variable_set("@activation_ids", Set.new(["some_id"]))
|
528
|
+
queue.clear()
|
529
|
+
instance.send(:run_once, queue)
|
530
|
+
expect(subject).to eql(0)
|
531
|
+
end
|
532
|
+
end
|
533
|
+
|
534
|
+
end
|
535
|
+
end
|
536
|
+
|
537
|
+
describe "stopping" do
|
538
|
+
let(:config) { default_opts }
|
539
|
+
it_behaves_like "an interruptible input plugin"
|
540
|
+
end
|
541
|
+
end
|