perfectqueue 0.8.44.1 → 0.8.45
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/ChangeLog +5 -2
- data/lib/perfectqueue/application/decider.rb +2 -2
- data/lib/perfectqueue/application/router.rb +1 -1
- data/lib/perfectqueue/backend/rdb_compat.rb +40 -58
- data/lib/perfectqueue/engine.rb +2 -5
- data/lib/perfectqueue/multiprocess/thread_processor.rb +1 -1
- data/lib/perfectqueue/task.rb +2 -1
- data/lib/perfectqueue/task_metadata.rb +2 -4
- data/lib/perfectqueue/version.rb +1 -1
- data/perfectqueue.gemspec +1 -2
- data/spec/application/base_spec.rb +81 -0
- data/spec/application/decider_spec.rb +56 -0
- data/spec/application/dispatch_spec.rb +22 -0
- data/spec/application/router_spec.rb +48 -0
- data/spec/backend_spec.rb +9 -0
- data/spec/blocking_flag_spec.rb +103 -0
- data/spec/client_spec.rb +28 -0
- data/spec/daemons_logger_spec.rb +35 -0
- data/spec/engine_spec.rb +159 -0
- data/spec/multiprocess/child_process_monitor_spec.rb +300 -0
- data/spec/multiprocess/child_process_spec.rb +160 -0
- data/spec/multiprocess/fork_processor_spec.rb +170 -0
- data/spec/multiprocess/thread_processor_spec.rb +52 -0
- data/spec/queue_spec.rb +73 -68
- data/spec/rdb_compat_backend_spec.rb +481 -19
- data/spec/runner_spec.rb +32 -0
- data/spec/signal_thread_spec.rb +43 -0
- data/spec/stress.rb +1 -1
- data/spec/supervisor_spec.rb +188 -33
- data/spec/task_metadata_spec.rb +69 -0
- data/spec/task_monitor_spec.rb +42 -0
- data/spec/task_spec.rb +70 -0
- metadata +40 -19
@@ -15,21 +15,21 @@ describe Backend::RDBCompatBackend do
|
|
15
15
|
it 'backward compatibility 1' do
|
16
16
|
backend.db["INSERT INTO test_tasks (id, timeout, data, created_at, resource) VALUES (?, ?, ?, ?, ?)", "merge_type.1339801200", 1339801201, {'url'=>nil}.to_json, 1339801201, "1"].insert
|
17
17
|
ts = backend.acquire(60, 1, {:now=>1339801203})
|
18
|
-
ts.
|
18
|
+
expect(ts).not_to eq(nil)
|
19
19
|
t = ts[0]
|
20
|
-
t.data.
|
21
|
-
t.type.
|
22
|
-
t.key.
|
20
|
+
expect(t.data).to eq({'url'=>nil})
|
21
|
+
expect(t.type).to eq('merge_type')
|
22
|
+
expect(t.key).to eq('merge_type.1339801200')
|
23
23
|
end
|
24
24
|
|
25
25
|
it 'backward compatibility 2' do
|
26
26
|
backend.db["INSERT INTO test_tasks (id, timeout, data, created_at, resource) VALUES (?, ?, ?, ?, ?)", "query.379474", 1339801201, {'query_id'=>32}.to_json, 1339801201, nil].insert
|
27
27
|
ts = backend.acquire(60, 1, {:now=>1339801203})
|
28
|
-
ts.
|
28
|
+
expect(ts).not_to eq(nil)
|
29
29
|
t = ts[0]
|
30
|
-
t.data.
|
31
|
-
t.type.
|
32
|
-
t.key.
|
30
|
+
expect(t.data).to eq({'query_id'=>32})
|
31
|
+
expect(t.type).to eq('query')
|
32
|
+
expect(t.key).to eq('query.379474')
|
33
33
|
end
|
34
34
|
|
35
35
|
it 'resource limit' do
|
@@ -41,25 +41,25 @@ describe Backend::RDBCompatBackend do
|
|
41
41
|
queue.submit("test_5", 'user02', {}, :now=>time, :user=>'u2', :max_running=>2)
|
42
42
|
|
43
43
|
task1 = queue.poll(:now=>time+10)
|
44
|
-
task1.
|
45
|
-
task1.type.
|
44
|
+
expect(task1).not_to eq(nil)
|
45
|
+
expect(task1.type).to eq('user01')
|
46
46
|
|
47
47
|
task2 = queue.poll(:now=>time+10)
|
48
|
-
task2.
|
49
|
-
task2.type.
|
48
|
+
expect(task2).not_to eq(nil)
|
49
|
+
expect(task2.type).to eq('user02')
|
50
50
|
|
51
51
|
task3 = queue.poll(:now=>time+10)
|
52
|
-
task3.
|
53
|
-
task3.type.
|
52
|
+
expect(task3).not_to eq(nil)
|
53
|
+
expect(task3.type).to eq('user01')
|
54
54
|
|
55
55
|
task4 = queue.poll(:now=>time+10)
|
56
|
-
task4.
|
56
|
+
expect(task4).to eq(nil)
|
57
57
|
|
58
58
|
task1.finish!
|
59
59
|
|
60
60
|
task5 = queue.poll(:now=>time+10)
|
61
|
-
task5.
|
62
|
-
task5.type.
|
61
|
+
expect(task5).not_to eq(nil)
|
62
|
+
expect(task5.type).to eq('user01')
|
63
63
|
end
|
64
64
|
|
65
65
|
it 'gzip data compression' do
|
@@ -67,8 +67,470 @@ describe Backend::RDBCompatBackend do
|
|
67
67
|
queue.submit("test", 'user01', {'data'=>'test'}, :now=>time, :user=>'u1', :max_running=>2, :compression=>'gzip')
|
68
68
|
|
69
69
|
task1 = queue.poll(:now=>time+10)
|
70
|
-
task1.
|
71
|
-
task1.data.
|
70
|
+
expect(task1).not_to eq(nil)
|
71
|
+
expect(task1.data).to eq({'data'=>'test'})
|
72
72
|
end
|
73
73
|
end
|
74
74
|
|
75
|
+
describe Backend::RDBCompatBackend do
|
76
|
+
let (:now){ Time.now.to_i }
|
77
|
+
let (:client){ double('client') }
|
78
|
+
let (:table){ 'test_queues' }
|
79
|
+
let (:config){ {url: 'mysql://root:@localhost/perfectqueue_test', table: table} }
|
80
|
+
let (:db) do
|
81
|
+
d = Backend::RDBCompatBackend.new(client, config)
|
82
|
+
s = d.db
|
83
|
+
s.tables.each{|t| s.drop_table(t) }
|
84
|
+
d.init_database({})
|
85
|
+
d
|
86
|
+
end
|
87
|
+
|
88
|
+
context '.new' do
|
89
|
+
let (:client){ double('client') }
|
90
|
+
let (:table){ double('table') }
|
91
|
+
it 'raises error unless url' do
|
92
|
+
expect{Backend::RDBCompatBackend.new(client, {})}.to raise_error(ConfigError)
|
93
|
+
end
|
94
|
+
it 'raises error unless table' do
|
95
|
+
expect{Backend::RDBCompatBackend.new(client, {url: ''})}.to raise_error(ConfigError)
|
96
|
+
end
|
97
|
+
it 'supports mysql' do
|
98
|
+
expect(Backend::RDBCompatBackend.new(client, config)).to be_an_instance_of(Backend::RDBCompatBackend)
|
99
|
+
expect(db.instance_variable_get(:@sql)).to include('max_running')
|
100
|
+
end
|
101
|
+
it 'doesn\'t support postgres' do
|
102
|
+
config = {url: 'postgres://localhost', table: table}
|
103
|
+
expect{Backend::RDBCompatBackend.new(client, config)}.to raise_error(ConfigError)
|
104
|
+
end
|
105
|
+
it 'with use_connection_pooling' do
|
106
|
+
config = {url: 'mysql://root:@localhost/perfectqueue_test', table: table, use_connection_pooling: true}
|
107
|
+
db = Backend::RDBCompatBackend.new(client, config)
|
108
|
+
expect(db.instance_variable_get(:@use_connection_pooling)).to eq true
|
109
|
+
end
|
110
|
+
it 'disable_resource_limit' do
|
111
|
+
config = {url: 'mysql://root:@localhost/perfectqueue_test', table: table, disable_resource_limit: true}
|
112
|
+
db = Backend::RDBCompatBackend.new(client, config)
|
113
|
+
expect(db.instance_variable_get(:@sql)).not_to include('max_running')
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
context '#init_database' do
|
118
|
+
let (:db) do
|
119
|
+
d = Backend::RDBCompatBackend.new(client, config)
|
120
|
+
s = d.db
|
121
|
+
s.tables.each{|t| s.drop_table(t) }
|
122
|
+
d
|
123
|
+
end
|
124
|
+
it 'creates the table' do
|
125
|
+
db.init_database({})
|
126
|
+
end
|
127
|
+
it 'raises DatabaseError if already exists' do
|
128
|
+
expect(STDERR).to receive(:puts)
|
129
|
+
db.init_database({})
|
130
|
+
expect{db.init_database({})}.to raise_error(Sequel::DatabaseError)
|
131
|
+
end
|
132
|
+
it 'drops the table if force: true' do
|
133
|
+
db.init_database({})
|
134
|
+
db.init_database({force: true})
|
135
|
+
end
|
136
|
+
end
|
137
|
+
|
138
|
+
context '#get_task_metadata' do
|
139
|
+
before do
|
140
|
+
db.submit('key', 'test', nil, {})
|
141
|
+
end
|
142
|
+
it 'fetches a metadata' do
|
143
|
+
expect(db.get_task_metadata('key', {})).to be_an_instance_of(TaskMetadata)
|
144
|
+
end
|
145
|
+
it 'raises error if non exist key' do
|
146
|
+
expect(STDERR).to receive(:puts)
|
147
|
+
expect{db.get_task_metadata('nonexistent', {})}.to raise_error(NotFoundError)
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
context '#preempt' do
|
152
|
+
subject { db.preempt(nil, nil, nil) }
|
153
|
+
it { expect{ subject }.to raise_error(NotSupportedError) }
|
154
|
+
end
|
155
|
+
|
156
|
+
context '#list' do
|
157
|
+
before do
|
158
|
+
db.submit('key', 'test', nil, {})
|
159
|
+
end
|
160
|
+
it 'lists a metadata' do
|
161
|
+
db.list({}) do |x|
|
162
|
+
expect(x).to be_an_instance_of(TaskWithMetadata)
|
163
|
+
expect(x.key).to eq('key')
|
164
|
+
end
|
165
|
+
end
|
166
|
+
end
|
167
|
+
|
168
|
+
context '#submit' do
|
169
|
+
it 'adds task' do
|
170
|
+
db.submit('key', 'test', nil, {})
|
171
|
+
end
|
172
|
+
it 'gzip' do
|
173
|
+
db.submit('key', 'test', nil, {compression: 'gzip'})
|
174
|
+
end
|
175
|
+
end
|
176
|
+
|
177
|
+
context '#acquire' do
|
178
|
+
let (:key){ 'key' }
|
179
|
+
let (:task_token){ Backend::RDBCompatBackend::Token.new(key) }
|
180
|
+
let (:alive_time){ 42 }
|
181
|
+
let (:max_acquire){ 42 }
|
182
|
+
context 'no tasks' do
|
183
|
+
it 'returns nil' do
|
184
|
+
expect(db.acquire(alive_time, max_acquire, {})).to be_nil
|
185
|
+
end
|
186
|
+
end
|
187
|
+
context 'some tasks' do
|
188
|
+
before do
|
189
|
+
db.submit(key, 'test', nil, {})
|
190
|
+
end
|
191
|
+
it 'returns a task' do
|
192
|
+
ary = db.acquire(alive_time, max_acquire, {})
|
193
|
+
expect(ary).to be_an_instance_of(Array)
|
194
|
+
expect(ary.size).to eq(1)
|
195
|
+
expect(ary[0]).to be_an_instance_of(AcquiredTask)
|
196
|
+
end
|
197
|
+
end
|
198
|
+
context 'some tasks' do
|
199
|
+
let :t0 do now - 100 end
|
200
|
+
before do
|
201
|
+
db.submit('key1', 'test1', nil, {now: t0})
|
202
|
+
db.submit('key2', 'test2', nil, {now: t0})
|
203
|
+
db.submit('key3', 'test3', nil, {now: t0})
|
204
|
+
end
|
205
|
+
it 'returns 3 tasks' do
|
206
|
+
now0 = Time.at(t0)
|
207
|
+
expect(now0).to receive(:to_time).exactly(3).times.and_call_original
|
208
|
+
db.list({}) do |task|
|
209
|
+
expect(task.timeout).to eq now0.to_time
|
210
|
+
end
|
211
|
+
ary = db.acquire(alive_time, max_acquire, {})
|
212
|
+
expect(ary).to be_an_instance_of(Array)
|
213
|
+
expect(ary.size).to eq(3)
|
214
|
+
expect(ary[0]).to be_an_instance_of(AcquiredTask)
|
215
|
+
expect(ary[1]).to be_an_instance_of(AcquiredTask)
|
216
|
+
expect(ary[2]).to be_an_instance_of(AcquiredTask)
|
217
|
+
|
218
|
+
now1 = Time.at(now + alive_time)
|
219
|
+
expect(now1).to receive(:to_time).exactly(3).times.and_call_original
|
220
|
+
db.list({}){|task| expect(task.timeout).to eq now1.to_time }
|
221
|
+
end
|
222
|
+
it 'returns 2 tasks' do
|
223
|
+
db.instance_variable_set(:@prefetch_break_types, 'test2')
|
224
|
+
ary = db.acquire(alive_time, max_acquire, {})
|
225
|
+
expect(ary).to be_an_instance_of(Array)
|
226
|
+
expect(ary.size).to eq(2)
|
227
|
+
expect(ary[0]).to be_an_instance_of(AcquiredTask)
|
228
|
+
expect(ary[1]).to be_an_instance_of(AcquiredTask)
|
229
|
+
end
|
230
|
+
end
|
231
|
+
end
|
232
|
+
|
233
|
+
context '#cancel_request' do
|
234
|
+
let (:key){ 'key' }
|
235
|
+
let (:task_token){ Backend::RDBCompatBackend::Token.new(key) }
|
236
|
+
let (:retention_time) { 42 }
|
237
|
+
let (:delete_timeout){ now + retention_time }
|
238
|
+
let (:options){ {now: now} }
|
239
|
+
context 'have a task' do
|
240
|
+
before do
|
241
|
+
db.submit(key, 'test', nil, {})
|
242
|
+
end
|
243
|
+
it 'returns nil' do
|
244
|
+
expect(db.cancel_request(key, options)).to be_nil
|
245
|
+
row = db.db.fetch("SELECT created_at FROM `#{table}` WHERE id=? LIMIT 1", key).first
|
246
|
+
expect(row[:created_at]).to eq 0
|
247
|
+
end
|
248
|
+
end
|
249
|
+
context 'already finished' do
|
250
|
+
before do
|
251
|
+
expect(STDERR).to receive(:puts)
|
252
|
+
db.submit(key, 'test', nil, {})
|
253
|
+
db.finish(task_token, retention_time, options)
|
254
|
+
end
|
255
|
+
it 'raises AlreadyFinishedError' do
|
256
|
+
expect{db.cancel_request(key, options)}.to raise_error(AlreadyFinishedError)
|
257
|
+
end
|
258
|
+
end
|
259
|
+
end
|
260
|
+
|
261
|
+
context '#force_finish' do
|
262
|
+
let (:key){ double('key') }
|
263
|
+
let (:token){ double('token') }
|
264
|
+
let (:retention_time){ double('retention_time') }
|
265
|
+
let (:options){ double('options') }
|
266
|
+
let (:ret){ double('ret') }
|
267
|
+
before { expect(Backend::RDBCompatBackend::Token).to receive(:new).with(key).and_return(token) }
|
268
|
+
it 'calls #finish' do
|
269
|
+
expect(db).to receive(:finish).with(token, retention_time, options).exactly(:once).and_return(ret)
|
270
|
+
expect(db.force_finish(key, retention_time, options)).to eq ret
|
271
|
+
end
|
272
|
+
end
|
273
|
+
|
274
|
+
context '#finish' do
|
275
|
+
let (:key){ 'key' }
|
276
|
+
let (:task_token){ Backend::RDBCompatBackend::Token.new(key) }
|
277
|
+
let (:retention_time) { 42 }
|
278
|
+
let (:delete_timeout){ now + retention_time }
|
279
|
+
let (:options){ {now: now} }
|
280
|
+
context 'have the task' do
|
281
|
+
before do
|
282
|
+
db.submit(key, 'test', nil, {})
|
283
|
+
expect(db.db).to receive(:[]).with(kind_of(String), delete_timeout, key).and_call_original
|
284
|
+
end
|
285
|
+
it 'returns nil' do
|
286
|
+
expect(db.finish(task_token, retention_time, options)).to be_nil
|
287
|
+
row = db.db.fetch("SELECT created_at FROM `#{table}` WHERE id=? LIMIT 1", key).first
|
288
|
+
expect(row[:created_at]).to be_nil
|
289
|
+
end
|
290
|
+
end
|
291
|
+
context 'already finished' do
|
292
|
+
it 'raises IdempotentAlreadyFinishedError' do
|
293
|
+
expect(STDERR).to receive(:puts)
|
294
|
+
expect{db.finish(task_token, retention_time, options)}.to raise_error(IdempotentAlreadyFinishedError)
|
295
|
+
end
|
296
|
+
end
|
297
|
+
end
|
298
|
+
|
299
|
+
context '#heartbeat' do
|
300
|
+
let (:key){ 'key' }
|
301
|
+
let (:task_token){ Backend::RDBCompatBackend::Token.new(key) }
|
302
|
+
let (:retention_time) { 42 }
|
303
|
+
let (:delete_timeout){ now + retention_time }
|
304
|
+
let (:options){ {now: now} }
|
305
|
+
before{ allow(STDERR).to receive(:puts) }
|
306
|
+
context 'have a queueuled task' do
|
307
|
+
before do
|
308
|
+
db.submit(key, 'test', nil, {})
|
309
|
+
end
|
310
|
+
it 'returns nil if next_run_time is not updated' do
|
311
|
+
expect(db.heartbeat(task_token, 0, {now: now})).to be_nil
|
312
|
+
end
|
313
|
+
it 'returns nil even if next_run_time is updated' do
|
314
|
+
expect(db.heartbeat(task_token, 1, {})).to be_nil
|
315
|
+
end
|
316
|
+
end
|
317
|
+
context 'no tasks' do
|
318
|
+
it 'raises PreemptedError' do
|
319
|
+
expect{db.heartbeat(task_token, 0, {})}.to raise_error(PreemptedError)
|
320
|
+
end
|
321
|
+
end
|
322
|
+
context 'finished task' do
|
323
|
+
before do
|
324
|
+
db.submit(key, 'test', nil, {})
|
325
|
+
db.finish(task_token, retention_time, options)
|
326
|
+
end
|
327
|
+
it 'raises PreemptedError' do
|
328
|
+
expect{db.heartbeat(task_token, 0, {})}.to raise_error(PreemptedError)
|
329
|
+
end
|
330
|
+
end
|
331
|
+
context 'canceled task' do
|
332
|
+
before do
|
333
|
+
db.submit(key, 'test', nil, {})
|
334
|
+
db.cancel_request(key, options)
|
335
|
+
end
|
336
|
+
it 'returns nil' do
|
337
|
+
expect( db.heartbeat(task_token, 0, {}) ).to be_nil
|
338
|
+
end
|
339
|
+
end
|
340
|
+
end
|
341
|
+
|
342
|
+
context '#connect' do
|
343
|
+
context 'normal' do
|
344
|
+
it 'returns now' do
|
345
|
+
expect(db.__send__(:connect){ }).to eq(now)
|
346
|
+
end
|
347
|
+
end
|
348
|
+
context 'error' do
|
349
|
+
it 'returns block result' do
|
350
|
+
expect(RuntimeError).to receive(:new).exactly(Backend::RDBCompatBackend::MAX_RETRY).and_call_original
|
351
|
+
allow(STDERR).to receive(:puts)
|
352
|
+
allow(db).to receive(:sleep)
|
353
|
+
expect do
|
354
|
+
db.__send__(:connect) do
|
355
|
+
raise RuntimeError.new('try restarting transaction')
|
356
|
+
end
|
357
|
+
end.to raise_error(RuntimeError)
|
358
|
+
end
|
359
|
+
end
|
360
|
+
end
|
361
|
+
|
362
|
+
context '#create_attributes' do
|
363
|
+
let (:data){ Hash.new }
|
364
|
+
let (:row) do
|
365
|
+
r = double('row')
|
366
|
+
allow(r).to receive(:[]){|k| data[k] }
|
367
|
+
r
|
368
|
+
end
|
369
|
+
it 'returns a hash consisting the data of the row' do
|
370
|
+
data[:timezone] = timezone = double('timezone')
|
371
|
+
data[:delay] = delay = double('delay')
|
372
|
+
data[:cron] = cron = double('cron')
|
373
|
+
data[:next_time] = next_time = double('next_time')
|
374
|
+
data[:timeout] = timeout = double('timeout')
|
375
|
+
data[:data] = '{"type":"foo.bar","a":"b"}'
|
376
|
+
data[:id] = 'hoge'
|
377
|
+
expect(db.__send__(:create_attributes, now, row)).to eq(
|
378
|
+
status: :finished,
|
379
|
+
created_at: nil,
|
380
|
+
data: {"a"=>"b"},
|
381
|
+
user: nil,
|
382
|
+
timeout: timeout,
|
383
|
+
max_running: nil,
|
384
|
+
type: 'foo.bar',
|
385
|
+
message: nil,
|
386
|
+
node: nil,
|
387
|
+
compression: nil,
|
388
|
+
)
|
389
|
+
end
|
390
|
+
it 'returns {} if data\'s JSON is broken' do
|
391
|
+
data[:data] = '}{'
|
392
|
+
data[:id] = 'foo.bar.baz'
|
393
|
+
expect(db.__send__(:create_attributes, now, row)).to eq(
|
394
|
+
status: :finished,
|
395
|
+
created_at: nil,
|
396
|
+
data: {},
|
397
|
+
user: nil,
|
398
|
+
timeout: nil,
|
399
|
+
max_running: nil,
|
400
|
+
type: 'foo',
|
401
|
+
message: nil,
|
402
|
+
node: nil,
|
403
|
+
compression: nil,
|
404
|
+
)
|
405
|
+
end
|
406
|
+
it 'uses id[/\A[^.]*/] if type is empty string' do
|
407
|
+
data[:data] = '{"type":""}'
|
408
|
+
data[:id] = 'foo.bar.baz'
|
409
|
+
expect(db.__send__(:create_attributes, now, row)).to eq(
|
410
|
+
status: :finished,
|
411
|
+
created_at: nil,
|
412
|
+
data: {},
|
413
|
+
user: nil,
|
414
|
+
timeout: nil,
|
415
|
+
max_running: nil,
|
416
|
+
type: 'foo',
|
417
|
+
message: nil,
|
418
|
+
node: nil,
|
419
|
+
compression: nil,
|
420
|
+
)
|
421
|
+
end
|
422
|
+
it 'uses id[/\A[^.]*/] if type is nil' do
|
423
|
+
data[:id] = 'foo.bar.baz'
|
424
|
+
expect(db.__send__(:create_attributes, now, row)).to eq(
|
425
|
+
status: :finished,
|
426
|
+
created_at: nil,
|
427
|
+
data: {},
|
428
|
+
user: nil,
|
429
|
+
timeout: nil,
|
430
|
+
max_running: nil,
|
431
|
+
type: 'foo',
|
432
|
+
message: nil,
|
433
|
+
node: nil,
|
434
|
+
compression: nil,
|
435
|
+
)
|
436
|
+
end
|
437
|
+
end
|
438
|
+
|
439
|
+
context '#connect_locked' do
|
440
|
+
let (:ret){ double('ret') }
|
441
|
+
before do
|
442
|
+
end
|
443
|
+
it 'ensures to unlock on error with use_connection_pooling' do
|
444
|
+
#expect(STDERR).to receive(:puts)
|
445
|
+
config = {url: 'mysql://root:@localhost/perfectqueue_test', table: table, use_connection_pooling: true}
|
446
|
+
db1 = Backend::RDBCompatBackend.new(client, config)
|
447
|
+
#expect{ db.__send__(:connect_locked){ raise } }.to raise_error(RuntimeError)
|
448
|
+
db1.__send__(:connect_locked){ ret }
|
449
|
+
stub_const('PerfectQueue::Backend::RDBCompatBackend::LOCK_WAIT_TIMEOUT', 5)
|
450
|
+
db2 = Backend::RDBCompatBackend.new(client, config)
|
451
|
+
Timeout.timeout(3) do
|
452
|
+
expect( db2.__send__(:connect_locked){ ret }).to eq ret
|
453
|
+
end
|
454
|
+
end
|
455
|
+
end
|
456
|
+
|
457
|
+
context '#create_attributes' do
|
458
|
+
let (:data){ {data: '{"type":"foo"}'} }
|
459
|
+
let (:timeout){ double('timeout') }
|
460
|
+
let (:row) do
|
461
|
+
r = double('row')
|
462
|
+
allow(r).to receive(:[]){|k| data[k] }
|
463
|
+
r
|
464
|
+
end
|
465
|
+
context 'created_at is nil' do
|
466
|
+
it 'returns a hash consisting the data of the row' do
|
467
|
+
data[:resource] = user = double('user')
|
468
|
+
data[:max_running] = max_running = double('max_running')
|
469
|
+
data[:cron] = cron = double('cron')
|
470
|
+
data[:next_time] = next_time = double('next_time')
|
471
|
+
data[:timeout] = timeout
|
472
|
+
data[:data] = '{"type":"foo.bar","a":"b"}'
|
473
|
+
data[:id] = 'hoge'
|
474
|
+
expect(db.__send__(:create_attributes, now, row)).to eq(
|
475
|
+
status: TaskStatus::FINISHED,
|
476
|
+
created_at: nil,
|
477
|
+
data: {"a"=>"b"},
|
478
|
+
type: 'foo.bar',
|
479
|
+
user: user,
|
480
|
+
timeout: timeout,
|
481
|
+
max_running: max_running,
|
482
|
+
message: nil,
|
483
|
+
node: nil,
|
484
|
+
compression: nil,
|
485
|
+
)
|
486
|
+
end
|
487
|
+
it 'returns {} if data\'s JSON is broken' do
|
488
|
+
data[:data] = '}{'
|
489
|
+
data[:id] = 'foo.bar.baz'
|
490
|
+
r = db.__send__(:create_attributes, now, row)
|
491
|
+
expect(r[:type]).to eq 'foo'
|
492
|
+
end
|
493
|
+
it 'uses id[/\A[^.]*/] if type is empty string' do
|
494
|
+
data[:data] = '{"type":""}'
|
495
|
+
data[:id] = 'foo.bar.baz'
|
496
|
+
r = db.__send__(:create_attributes, now, row)
|
497
|
+
expect(r[:type]).to eq 'foo'
|
498
|
+
end
|
499
|
+
it 'uses id[/\A[^.]*/] if type is nil' do
|
500
|
+
data[:id] = 'foo.bar.baz'
|
501
|
+
r = db.__send__(:create_attributes, now, row)
|
502
|
+
expect(r[:type]).to eq 'foo'
|
503
|
+
end
|
504
|
+
context 'created_at is nil' do
|
505
|
+
it 'status is :finished' do
|
506
|
+
data[:created_at] = nil
|
507
|
+
r = db.__send__(:create_attributes, now, row)
|
508
|
+
expect(r[:status]).to eq TaskStatus::FINISHED
|
509
|
+
end
|
510
|
+
end
|
511
|
+
end
|
512
|
+
context 'created_at is 0' do
|
513
|
+
it 'status is :cancel_requested' do
|
514
|
+
data[:created_at] = 0
|
515
|
+
r = db.__send__(:create_attributes, now, row)
|
516
|
+
expect(r[:status]).to eq TaskStatus::CANCEL_REQUESTED
|
517
|
+
end
|
518
|
+
end
|
519
|
+
context 'created_at > 0' do
|
520
|
+
context 'timeout' do
|
521
|
+
it 'status is :waiting' do
|
522
|
+
data[:created_at] = 1
|
523
|
+
data[:timeout] = 0
|
524
|
+
r = db.__send__(:create_attributes, now, row)
|
525
|
+
expect(r[:status]).to eq TaskStatus::WAITING
|
526
|
+
end
|
527
|
+
end
|
528
|
+
it 'status is :running' do
|
529
|
+
data[:created_at] = 1
|
530
|
+
data[:timeout] = now+100
|
531
|
+
r = db.__send__(:create_attributes, now, row)
|
532
|
+
expect(r[:status]).to eq TaskStatus::RUNNING
|
533
|
+
end
|
534
|
+
end
|
535
|
+
end
|
536
|
+
end
|