dynflow 0.7.7 → 0.7.8
Sign up to get free protection for your applications and to get access to all the features.
- data/README.md +1 -1
- data/doc/pages/Gemfile +1 -2
- data/doc/pages/Rakefile +3 -2
- data/lib/dynflow/action.rb +6 -1
- data/lib/dynflow/action/polling.rb +9 -0
- data/lib/dynflow/action/timeouts.rb +13 -0
- data/lib/dynflow/persistence.rb +4 -0
- data/lib/dynflow/persistence_adapters/abstract.rb +9 -1
- data/lib/dynflow/persistence_adapters/sequel.rb +20 -7
- data/lib/dynflow/testing/dummy_executor.rb +4 -0
- data/lib/dynflow/testing/factories.rb +3 -2
- data/lib/dynflow/testing/managed_clock.rb +13 -14
- data/lib/dynflow/version.rb +1 -1
- data/test/action_test.rb +112 -59
- data/test/persistence_test.rb +191 -0
- metadata +5 -4
- data/test/persistance_adapters_test.rb +0 -173
data/README.md
CHANGED
@@ -6,7 +6,7 @@
|
|
6
6
|
![Gem version](https://img.shields.io/gem/v/dynflow.svg?style=flat)
|
7
7
|
![License](https://img.shields.io/badge/license-MIT-brightgreen.svg?style=flat)
|
8
8
|
|
9
|
-
**Note:** *There is a project page and documentation being build in url <http://
|
9
|
+
**Note:** *There is a project page and documentation being build in url <http://dynflow.github.io/>.
|
10
10
|
It's still work in progress but you may find useful information there. It'll eventually replace
|
11
11
|
this README.*
|
12
12
|
|
data/doc/pages/Gemfile
CHANGED
data/doc/pages/Rakefile
CHANGED
@@ -17,9 +17,10 @@ task :publish do
|
|
17
17
|
system 'git fetch --all'
|
18
18
|
system 'git reset --hard origin/master'
|
19
19
|
end
|
20
|
-
|
20
|
+
sh 'jekyll build'
|
21
21
|
Dir.chdir(root + '/public') do
|
22
|
-
system 'git
|
22
|
+
system 'git add -A .'
|
23
|
+
system 'git commit -m Update'
|
23
24
|
system 'git push'
|
24
25
|
end
|
25
26
|
end
|
data/lib/dynflow/action.rb
CHANGED
@@ -371,9 +371,14 @@ module Dynflow
|
|
371
371
|
|
372
372
|
# DSL for run phase
|
373
373
|
|
374
|
+
def suspended_action
|
375
|
+
phase! Run
|
376
|
+
@suspended_action ||= Action::Suspended.new(self)
|
377
|
+
end
|
378
|
+
|
374
379
|
def suspend(&block)
|
375
380
|
phase! Run
|
376
|
-
block.call
|
381
|
+
block.call suspended_action if block
|
377
382
|
throw SUSPEND, SUSPEND
|
378
383
|
end
|
379
384
|
|
@@ -1,6 +1,12 @@
|
|
1
|
+
require 'dynflow/action/timeouts'
|
2
|
+
|
1
3
|
module Dynflow
|
2
4
|
module Action::Polling
|
3
5
|
|
6
|
+
def self.included(base)
|
7
|
+
base.send :include, Action::Timeouts
|
8
|
+
end
|
9
|
+
|
4
10
|
Poll = Algebrick.atom
|
5
11
|
|
6
12
|
def run(event = nil)
|
@@ -13,6 +19,9 @@ module Dynflow
|
|
13
19
|
end
|
14
20
|
when Poll
|
15
21
|
poll_external_task_with_rescue
|
22
|
+
when Action::Timeouts::Timeout
|
23
|
+
process_timeout
|
24
|
+
suspend
|
16
25
|
else
|
17
26
|
raise "unrecognized event #{event}"
|
18
27
|
end
|
data/lib/dynflow/persistence.rb
CHANGED
@@ -34,6 +34,10 @@ module Dynflow
|
|
34
34
|
end
|
35
35
|
end
|
36
36
|
|
37
|
+
def delete_execution_plans(filters, batch_size = 1000)
|
38
|
+
adapter.delete_execution_plans(filters, batch_size)
|
39
|
+
end
|
40
|
+
|
37
41
|
def load_execution_plan(id)
|
38
42
|
execution_plan_hash = adapter.load_execution_plan(id)
|
39
43
|
ExecutionPlan.new_from_hash(execution_plan_hash, @world)
|
@@ -30,12 +30,20 @@ module Dynflow
|
|
30
30
|
# @option options [Integer] per_page the number of the items on page
|
31
31
|
# @option options [Symbol] order_by name of the column to use for ordering
|
32
32
|
# @option options [true, false] desc set to true if order should be descending
|
33
|
-
# @option options [Hash{
|
33
|
+
# @option options [Hash{ String => Object,Array<object> }] filters hash represents
|
34
34
|
# set of allowed values for a given key representing column
|
35
35
|
def find_execution_plans(options = {})
|
36
36
|
raise NotImplementedError
|
37
37
|
end
|
38
38
|
|
39
|
+
# @param filters [Hash{ String => Object }] filters to determine
|
40
|
+
# what to delete
|
41
|
+
# @param batch_size the size of the chunks to iterate over when
|
42
|
+
# performing the deletion
|
43
|
+
def delete_execution_plans(filters, batch_size = 1000)
|
44
|
+
raise NotImplementedError
|
45
|
+
end
|
46
|
+
|
39
47
|
def load_execution_plan(execution_plan_id)
|
40
48
|
raise NotImplementedError
|
41
49
|
end
|
@@ -36,13 +36,26 @@ module Dynflow
|
|
36
36
|
end
|
37
37
|
|
38
38
|
def find_execution_plans(options = {})
|
39
|
-
data_set = filter(order(paginate(table(:execution_plan), options), options), options)
|
39
|
+
data_set = filter(order(paginate(table(:execution_plan), options), options), options[:filters])
|
40
40
|
|
41
41
|
data_set.map do |record|
|
42
42
|
HashWithIndifferentAccess.new(MultiJson.load(record[:data]))
|
43
43
|
end
|
44
44
|
end
|
45
45
|
|
46
|
+
def delete_execution_plans(filters, batch_size = 1000)
|
47
|
+
count = 0
|
48
|
+
filter(table(:execution_plan), filters).each_slice(batch_size) do |plans|
|
49
|
+
uuids = plans.map { |p| p.fetch(:uuid) }
|
50
|
+
@db.transaction do
|
51
|
+
table(:step).where(execution_plan_uuid: uuids).delete
|
52
|
+
table(:action).where(execution_plan_uuid: uuids).delete
|
53
|
+
count += table(:execution_plan).where(uuid: uuids).delete
|
54
|
+
end
|
55
|
+
end
|
56
|
+
return count
|
57
|
+
end
|
58
|
+
|
46
59
|
def load_execution_plan(execution_plan_id)
|
47
60
|
load :execution_plan, uuid: execution_plan_id
|
48
61
|
end
|
@@ -68,9 +81,9 @@ module Dynflow
|
|
68
81
|
end
|
69
82
|
|
70
83
|
def to_hash
|
71
|
-
{ execution_plans: table(:execution_plan).all,
|
72
|
-
steps: table(:step).all,
|
73
|
-
actions: table(:action).all }
|
84
|
+
{ execution_plans: table(:execution_plan).all.to_a,
|
85
|
+
steps: table(:step).all.to_a,
|
86
|
+
actions: table(:action).all.to_a }
|
74
87
|
end
|
75
88
|
|
76
89
|
private
|
@@ -148,10 +161,10 @@ module Dynflow
|
|
148
161
|
data_set.order_by options[:desc] ? ::Sequel.desc(order_by) : order_by
|
149
162
|
end
|
150
163
|
|
151
|
-
def filter(data_set,
|
152
|
-
|
164
|
+
def filter(data_set, filters)
|
165
|
+
Type! filters, NilClass, Hash
|
153
166
|
return data_set if filters.nil?
|
154
|
-
unknown = filters.keys - META_DATA.fetch(:execution_plan) - %w[caller_execution_plan_id caller_action_id]
|
167
|
+
unknown = filters.keys - META_DATA.fetch(:execution_plan) - %w[uuid caller_execution_plan_id caller_action_id]
|
155
168
|
|
156
169
|
if filters.key?('caller_action_id') && !filters.key?('caller_execution_plan_id')
|
157
170
|
raise ArgumentError, "caller_action_id given but caller_execution_plan_id missing"
|
@@ -12,11 +12,15 @@ module Dynflow
|
|
12
12
|
@events_to_process << [execution_plan_id, step_id, event, future]
|
13
13
|
end
|
14
14
|
|
15
|
+
# returns true if some event was processed.
|
15
16
|
def progress
|
16
17
|
events = @events_to_process.dup
|
17
18
|
clear
|
18
19
|
events.each do |execution_plan_id, step_id, event, future|
|
19
20
|
future.resolve true
|
21
|
+
if event && world.action.state != :suspended
|
22
|
+
return false
|
23
|
+
end
|
20
24
|
world.action.execute event
|
21
25
|
end
|
22
26
|
end
|
@@ -97,8 +97,9 @@ module Dynflow
|
|
97
97
|
|
98
98
|
def progress_action_time action
|
99
99
|
Match! action.phase, Action::Run
|
100
|
-
action.world.clock.progress
|
101
|
-
|
100
|
+
if action.world.clock.progress
|
101
|
+
return action.world.executor.progress
|
102
|
+
end
|
102
103
|
end
|
103
104
|
end
|
104
105
|
end
|
@@ -5,30 +5,29 @@ module Dynflow
|
|
5
5
|
attr_reader :pending_pings
|
6
6
|
|
7
7
|
include Algebrick::Types
|
8
|
-
Timer = Algebrick.type do
|
9
|
-
fields! who: Object, # to ping back
|
10
|
-
when: type { variants Time, Numeric }, # to deliver
|
11
|
-
what: Maybe[Object], # to send
|
12
|
-
where: Symbol # it should be delivered, which method
|
13
|
-
end
|
14
|
-
|
15
|
-
module Timer
|
16
|
-
include Clock::Timer
|
17
|
-
end
|
18
8
|
|
19
9
|
def initialize
|
20
10
|
@pending_pings = []
|
21
11
|
end
|
22
12
|
|
23
13
|
def ping(who, time, with_what = nil, where = :<<)
|
14
|
+
time = current_time + time if time.is_a? Numeric
|
24
15
|
with = with_what.nil? ? None : Some[Object][with_what]
|
25
|
-
@pending_pings << Timer[who, time, with, where]
|
16
|
+
@pending_pings << Clock::Timer[who, time, with, where]
|
17
|
+
@pending_pings.sort!
|
26
18
|
end
|
27
19
|
|
28
20
|
def progress
|
29
|
-
|
30
|
-
|
31
|
-
|
21
|
+
if next_ping = @pending_pings.shift
|
22
|
+
# we are testing an isolated system = we can move in time
|
23
|
+
# without actually waiting
|
24
|
+
@current_time = next_ping.when
|
25
|
+
next_ping.apply
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def current_time
|
30
|
+
@current_time ||= Time.now
|
32
31
|
end
|
33
32
|
|
34
33
|
def clear
|
data/lib/dynflow/version.rb
CHANGED
data/test/action_test.rb
CHANGED
@@ -172,85 +172,138 @@ module Dynflow
|
|
172
172
|
end
|
173
173
|
end
|
174
174
|
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
before do
|
180
|
-
TestPollingAction.config = TestPollingAction::Config.new
|
175
|
+
class NonRunningExternalService < ExternalService
|
176
|
+
def poll(id)
|
177
|
+
return { message: 'nothing changed' }
|
178
|
+
end
|
181
179
|
end
|
182
180
|
|
183
|
-
|
184
|
-
|
185
|
-
|
181
|
+
class TestTimeoutAction < TestPollingAction
|
182
|
+
class Config < TestPollingAction::Config
|
183
|
+
def initialize
|
184
|
+
super
|
185
|
+
@external_service = NonRunningExternalService.new
|
186
|
+
end
|
187
|
+
end
|
186
188
|
|
187
|
-
|
188
|
-
|
189
|
+
def done?
|
190
|
+
self.state == :error
|
191
|
+
end
|
189
192
|
|
190
|
-
|
193
|
+
def invoke_external_task
|
194
|
+
schedule_timeout(5)
|
195
|
+
super
|
196
|
+
end
|
191
197
|
end
|
192
198
|
|
193
|
-
|
194
|
-
|
199
|
+
describe'without timeout' do
|
200
|
+
let(:plan) do
|
201
|
+
create_and_plan_action TestPollingAction, { task_args: 'do something' }
|
202
|
+
end
|
195
203
|
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
action.state.must_equal :suspended
|
204
|
+
before do
|
205
|
+
TestPollingAction.config = TestPollingAction::Config.new
|
206
|
+
end
|
200
207
|
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
action.state.must_equal :success
|
205
|
-
end
|
208
|
+
def next_ping(action)
|
209
|
+
action.world.clock.pending_pings.first
|
210
|
+
end
|
206
211
|
|
207
|
-
|
208
|
-
|
209
|
-
action.output[:task][:progress].must_equal 0
|
210
|
-
run_action action
|
211
|
-
action.output[:task][:progress].must_equal 10
|
212
|
-
end
|
212
|
+
it 'initiates the external task' do
|
213
|
+
action = run_action plan
|
213
214
|
|
214
|
-
|
215
|
-
|
216
|
-
action.world.silence_logger!
|
217
|
-
action.external_service.will_fail
|
218
|
-
action.output[:task][:progress].must_equal 0
|
219
|
-
run_action action
|
220
|
-
action.output[:task][:progress].must_equal 0
|
221
|
-
end
|
215
|
+
action.output[:task][:task_id].must_equal 123
|
216
|
+
end
|
222
217
|
|
223
|
-
|
224
|
-
|
225
|
-
action.external_service.will_fail
|
226
|
-
action.world.silence_logger!
|
218
|
+
it 'polls till the task is done' do
|
219
|
+
action = run_action plan
|
227
220
|
|
228
|
-
|
229
|
-
|
230
|
-
progress_action_time action
|
231
|
-
action.poll_attempts[:failed].must_equal attempt
|
221
|
+
9.times { progress_action_time action }
|
222
|
+
action.done?.must_equal false
|
232
223
|
next_ping(action).wont_be_nil
|
233
224
|
action.state.must_equal :suspended
|
225
|
+
|
226
|
+
progress_action_time action
|
227
|
+
action.done?.must_equal true
|
228
|
+
next_ping(action).must_be_nil
|
229
|
+
action.state.must_equal :success
|
234
230
|
end
|
235
231
|
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
232
|
+
it 'tries to poll for the old task when resuming' do
|
233
|
+
action = run_action plan
|
234
|
+
action.output[:task][:progress].must_equal 0
|
235
|
+
run_action action
|
236
|
+
action.output[:task][:progress].must_equal 10
|
237
|
+
end
|
238
|
+
|
239
|
+
it 'invokes the external task again when polling on the old one fails' do
|
240
|
+
action = run_action plan
|
241
|
+
action.world.silence_logger!
|
242
|
+
action.external_service.will_fail
|
243
|
+
action.output[:task][:progress].must_equal 0
|
244
|
+
run_action action
|
245
|
+
action.output[:task][:progress].must_equal 0
|
246
|
+
end
|
247
|
+
|
248
|
+
it 'tolerates some failure while polling' do
|
249
|
+
action = run_action plan
|
250
|
+
action.external_service.will_fail
|
251
|
+
action.world.silence_logger!
|
252
|
+
|
253
|
+
TestPollingAction.config.poll_max_retries = 3
|
254
|
+
(1..2).each do |attempt|
|
255
|
+
progress_action_time action
|
256
|
+
action.poll_attempts[:failed].must_equal attempt
|
257
|
+
next_ping(action).wont_be_nil
|
258
|
+
action.state.must_equal :suspended
|
259
|
+
end
|
260
|
+
|
261
|
+
progress_action_time action
|
262
|
+
action.poll_attempts[:failed].must_equal 3
|
263
|
+
next_ping(action).must_be_nil
|
264
|
+
action.state.must_equal :error
|
265
|
+
end
|
241
266
|
|
242
|
-
|
243
|
-
|
244
|
-
|
267
|
+
it 'allows increasing poll interval in a time' do
|
268
|
+
TestPollingAction.config.poll_intervals = [1, 2]
|
269
|
+
TestPollingAction.config.attempts_before_next_interval = 2
|
245
270
|
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
271
|
+
action = run_action plan
|
272
|
+
pings = []
|
273
|
+
pings << next_ping(action)
|
274
|
+
progress_action_time action
|
275
|
+
pings << next_ping(action)
|
276
|
+
progress_action_time action
|
277
|
+
pings << next_ping(action)
|
278
|
+
progress_action_time action
|
279
|
+
(pings[1].when - pings[0].when).must_be_close_to 1
|
280
|
+
(pings[2].when - pings[1].when).must_be_close_to 2
|
281
|
+
end
|
252
282
|
end
|
253
283
|
|
284
|
+
describe 'with timeout' do
|
285
|
+
let(:plan) do
|
286
|
+
create_and_plan_action TestTimeoutAction, { task_args: 'do something' }
|
287
|
+
end
|
288
|
+
|
289
|
+
before do
|
290
|
+
TestTimeoutAction.config = TestTimeoutAction::Config.new
|
291
|
+
TestTimeoutAction.config.poll_intervals = [2]
|
292
|
+
end
|
293
|
+
|
294
|
+
it 'timesout' do
|
295
|
+
action = run_action plan
|
296
|
+
iterations = 0
|
297
|
+
while progress_action_time action
|
298
|
+
# we count the number of iterations till the timeout occurs
|
299
|
+
iterations += 1
|
300
|
+
end
|
301
|
+
action.state.must_equal :error
|
302
|
+
# two polls in 2 seconds intervals untill the 5 seconds
|
303
|
+
# timeout appears
|
304
|
+
iterations.must_equal 3
|
305
|
+
end
|
306
|
+
end
|
254
307
|
end
|
255
308
|
|
256
309
|
describe Action::WithSubPlans do
|
@@ -0,0 +1,191 @@
|
|
1
|
+
require_relative 'test_helper'
|
2
|
+
require 'fileutils'
|
3
|
+
|
4
|
+
module Dynflow
|
5
|
+
module PersistenceTest
|
6
|
+
describe 'persistence adapters' do
|
7
|
+
|
8
|
+
let :execution_plans_data do
|
9
|
+
[{ id: 'plan1', state: 'paused' },
|
10
|
+
{ id: 'plan2', state: 'stopped' },
|
11
|
+
{ id: 'plan3', state: 'paused' }]
|
12
|
+
end
|
13
|
+
|
14
|
+
let :action_data do
|
15
|
+
{ id: 1, caller_execution_plan_id: nil, caller_action_id: nil }
|
16
|
+
end
|
17
|
+
|
18
|
+
let :step_data do
|
19
|
+
{ id: 1,
|
20
|
+
state: 'success',
|
21
|
+
started_at: '2015-02-24 10:00',
|
22
|
+
ended_at: '2015-02-24 10:01',
|
23
|
+
real_time: 1.1,
|
24
|
+
execution_time: 0.1,
|
25
|
+
action_id: 1,
|
26
|
+
progress_done: 1,
|
27
|
+
progress_weight: 2.5 }
|
28
|
+
end
|
29
|
+
|
30
|
+
def prepare_plans
|
31
|
+
execution_plans_data.map do |h|
|
32
|
+
h.merge result: nil, started_at: (Time.now-20).to_s, ended_at: (Time.now-10).to_s,
|
33
|
+
real_time: 0.0, execution_time: 0.0
|
34
|
+
end.tap do |plans|
|
35
|
+
plans.each { |plan| adapter.save_execution_plan(plan[:id], plan) }
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def prepare_action(plan)
|
40
|
+
adapter.save_action(plan, action_data[:id], action_data)
|
41
|
+
end
|
42
|
+
|
43
|
+
def prepare_step(plan)
|
44
|
+
adapter.save_step(plan, step_data[:id], step_data)
|
45
|
+
end
|
46
|
+
|
47
|
+
def prepare_plans_with_actions
|
48
|
+
prepare_plans.each do |plan|
|
49
|
+
prepare_action(plan[:id])
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
def prepare_plans_with_steps
|
54
|
+
prepare_plans_with_actions.map do |plan|
|
55
|
+
prepare_step(plan[:id])
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
def self.it_acts_as_persistence_adapter
|
60
|
+
before do
|
61
|
+
# the tests expect clean field
|
62
|
+
adapter.delete_execution_plans({})
|
63
|
+
end
|
64
|
+
describe '#find_execution_plans' do
|
65
|
+
it 'supports pagination' do
|
66
|
+
prepare_plans
|
67
|
+
if adapter.pagination?
|
68
|
+
loaded_plans = adapter.find_execution_plans(page: 0, per_page: 1)
|
69
|
+
loaded_plans.map { |h| h[:id] }.must_equal ['plan1']
|
70
|
+
|
71
|
+
loaded_plans = adapter.find_execution_plans(page: 1, per_page: 1)
|
72
|
+
loaded_plans.map { |h| h[:id] }.must_equal ['plan2']
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
it 'supports ordering' do
|
77
|
+
prepare_plans
|
78
|
+
if adapter.ordering_by.include?(:state)
|
79
|
+
loaded_plans = adapter.find_execution_plans(order_by: 'state')
|
80
|
+
loaded_plans.map { |h| h[:id] }.must_equal ['plan1', 'plan3', 'plan2']
|
81
|
+
|
82
|
+
loaded_plans = adapter.find_execution_plans(order_by: 'state', desc: true)
|
83
|
+
loaded_plans.map { |h| h[:id] }.must_equal ['plan2', 'plan3', 'plan1']
|
84
|
+
end
|
85
|
+
end
|
86
|
+
|
87
|
+
it 'supports filtering' do
|
88
|
+
prepare_plans
|
89
|
+
if adapter.ordering_by.include?(:state)
|
90
|
+
loaded_plans = adapter.find_execution_plans(filters: { state: ['paused'] })
|
91
|
+
loaded_plans.map { |h| h[:id] }.must_equal ['plan1', 'plan3']
|
92
|
+
|
93
|
+
loaded_plans = adapter.find_execution_plans(filters: { state: ['stopped'] })
|
94
|
+
loaded_plans.map { |h| h[:id] }.must_equal ['plan2']
|
95
|
+
|
96
|
+
loaded_plans = adapter.find_execution_plans(filters: { state: [] })
|
97
|
+
loaded_plans.map { |h| h[:id] }.must_equal []
|
98
|
+
|
99
|
+
loaded_plans = adapter.find_execution_plans(filters: { state: ['stopped', 'paused'] })
|
100
|
+
loaded_plans.map { |h| h[:id] }.must_equal ['plan1', 'plan2', 'plan3']
|
101
|
+
|
102
|
+
loaded_plans = adapter.find_execution_plans(filters: { 'state' => ['stopped', 'paused'] })
|
103
|
+
loaded_plans.map { |h| h[:id] }.must_equal ['plan1', 'plan2', 'plan3']
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
107
|
+
|
108
|
+
describe '#load_execution_plan and #save_execution_plan' do
|
109
|
+
it 'serializes/deserializes the plan data' do
|
110
|
+
-> { adapter.load_execution_plan('plan1') }.must_raise KeyError
|
111
|
+
prepare_plans
|
112
|
+
adapter.load_execution_plan('plan1')[:id].must_equal 'plan1'
|
113
|
+
adapter.load_execution_plan('plan1')['id'].must_equal 'plan1'
|
114
|
+
adapter.load_execution_plan('plan1').keys.size.must_equal 7
|
115
|
+
|
116
|
+
adapter.save_execution_plan('plan1', nil)
|
117
|
+
-> { adapter.load_execution_plan('plan1') }.must_raise KeyError
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
describe '#delete_execution_plans' do
|
122
|
+
it 'deletes selected execution plans, including steps and actions' do
|
123
|
+
prepare_plans_with_steps
|
124
|
+
adapter.delete_execution_plans('uuid' => 'plan1').must_equal 1
|
125
|
+
-> { adapter.load_execution_plan('plan1') }.must_raise KeyError
|
126
|
+
-> { adapter.load_action('plan1', action_data[:id]) }.must_raise KeyError
|
127
|
+
-> { adapter.load_step('plan1', step_data[:id]) }.must_raise KeyError
|
128
|
+
|
129
|
+
# testing that no other plans where affected
|
130
|
+
adapter.load_execution_plan('plan2')
|
131
|
+
adapter.load_action('plan2', action_data[:id])
|
132
|
+
adapter.load_step('plan2', step_data[:id])
|
133
|
+
|
134
|
+
prepare_plans_with_steps
|
135
|
+
adapter.delete_execution_plans('state' => 'paused').must_equal 2
|
136
|
+
-> { adapter.load_execution_plan('plan1') }.must_raise KeyError
|
137
|
+
adapter.load_execution_plan('plan2') # nothing raised
|
138
|
+
-> { adapter.load_execution_plan('plan3') }.must_raise KeyError
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
describe '#load_action and #save_action' do
|
143
|
+
it 'serializes/deserializes the action data' do
|
144
|
+
prepare_plans
|
145
|
+
action_id = action_data[:id]
|
146
|
+
-> { adapter.load_action('plan1', action_id) }.must_raise KeyError
|
147
|
+
|
148
|
+
prepare_action('plan1')
|
149
|
+
loaded_action = adapter.load_action('plan1', action_id)
|
150
|
+
loaded_action[:id].must_equal action_id
|
151
|
+
loaded_action.must_equal(action_data.stringify_keys)
|
152
|
+
|
153
|
+
adapter.save_action('plan1', action_id, nil)
|
154
|
+
-> { adapter.load_action('plan1', action_id) }.must_raise KeyError
|
155
|
+
|
156
|
+
adapter.save_execution_plan('plan1', nil)
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
describe '#load_step and #save_step' do
|
161
|
+
it 'serializes/deserializes the step data' do
|
162
|
+
prepare_plans_with_actions
|
163
|
+
step_id = step_data[:id]
|
164
|
+
prepare_step('plan1')
|
165
|
+
loaded_step = adapter.load_step('plan1', step_id)
|
166
|
+
loaded_step[:id].must_equal step_id
|
167
|
+
loaded_step.must_equal(step_data.stringify_keys)
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
171
|
+
|
172
|
+
describe Dynflow::PersistenceAdapters::Sequel do
|
173
|
+
let(:adapter) { Dynflow::PersistenceAdapters::Sequel.new 'sqlite:/' }
|
174
|
+
|
175
|
+
it_acts_as_persistence_adapter
|
176
|
+
|
177
|
+
it 'allows inspecting the persisted content' do
|
178
|
+
plans = prepare_plans
|
179
|
+
|
180
|
+
plans.each do |original|
|
181
|
+
stored = adapter.to_hash.fetch(:execution_plans).find { |ep| ep[:uuid].strip == original[:id] }
|
182
|
+
stored.each { |k, v| stored[k] = v.to_s if v.is_a? Time }
|
183
|
+
adapter.class::META_DATA.fetch(:execution_plan).each do |name|
|
184
|
+
stored.fetch(name.to_sym).must_equal original.fetch(name.to_sym)
|
185
|
+
end
|
186
|
+
end
|
187
|
+
end
|
188
|
+
end
|
189
|
+
end
|
190
|
+
end
|
191
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: dynflow
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.7.
|
4
|
+
version: 0.7.8
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -10,7 +10,7 @@ authors:
|
|
10
10
|
autorequire:
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
|
-
date: 2015-
|
13
|
+
date: 2015-05-18 00:00:00.000000000 Z
|
14
14
|
dependencies:
|
15
15
|
- !ruby/object:Gem::Dependency
|
16
16
|
name: activesupport
|
@@ -348,6 +348,7 @@ files:
|
|
348
348
|
- lib/dynflow/action/progress.rb
|
349
349
|
- lib/dynflow/action/rescue.rb
|
350
350
|
- lib/dynflow/action/suspended.rb
|
351
|
+
- lib/dynflow/action/timeouts.rb
|
351
352
|
- lib/dynflow/action/with_sub_plans.rb
|
352
353
|
- lib/dynflow/clock.rb
|
353
354
|
- lib/dynflow/daemon.rb
|
@@ -432,7 +433,7 @@ files:
|
|
432
433
|
- test/execution_plan_test.rb
|
433
434
|
- test/executor_test.rb
|
434
435
|
- test/middleware_test.rb
|
435
|
-
- test/
|
436
|
+
- test/persistence_test.rb
|
436
437
|
- test/remote_via_socket_test.rb
|
437
438
|
- test/rescue_test.rb
|
438
439
|
- test/support/code_workflow_example.rb
|
@@ -493,7 +494,7 @@ test_files:
|
|
493
494
|
- test/execution_plan_test.rb
|
494
495
|
- test/executor_test.rb
|
495
496
|
- test/middleware_test.rb
|
496
|
-
- test/
|
497
|
+
- test/persistence_test.rb
|
497
498
|
- test/remote_via_socket_test.rb
|
498
499
|
- test/rescue_test.rb
|
499
500
|
- test/support/code_workflow_example.rb
|
@@ -1,173 +0,0 @@
|
|
1
|
-
require_relative 'test_helper'
|
2
|
-
require 'fileutils'
|
3
|
-
|
4
|
-
module PersistenceAdapterTest
|
5
|
-
def storage
|
6
|
-
raise NotImplementedError
|
7
|
-
end
|
8
|
-
|
9
|
-
def prepare_plans
|
10
|
-
proto_plans = [{ id: 'plan1', state: 'paused' },
|
11
|
-
{ id: 'plan2', state: 'stopped' },
|
12
|
-
{ id: 'plan3', state: 'paused' }]
|
13
|
-
proto_plans.map do |h|
|
14
|
-
h.merge result: nil, started_at: (Time.now-20).to_s, ended_at: (Time.now-10).to_s,
|
15
|
-
real_time: 0.0, execution_time: 0.0
|
16
|
-
end.tap do |plans|
|
17
|
-
plans.each { |plan| storage.save_execution_plan(plan[:id], plan) }
|
18
|
-
end
|
19
|
-
end
|
20
|
-
|
21
|
-
def test_load_execution_plans
|
22
|
-
plans = prepare_plans
|
23
|
-
loaded_plans = storage.find_execution_plans
|
24
|
-
loaded_plans.size.must_equal 3
|
25
|
-
loaded_plans.must_include plans[0].with_indifferent_access
|
26
|
-
loaded_plans.must_include plans[1].with_indifferent_access
|
27
|
-
end
|
28
|
-
|
29
|
-
def test_pagination
|
30
|
-
prepare_plans
|
31
|
-
if storage.pagination?
|
32
|
-
loaded_plans = storage.find_execution_plans(page: 0, per_page: 1)
|
33
|
-
loaded_plans.map { |h| h[:id] }.must_equal ['plan1']
|
34
|
-
|
35
|
-
loaded_plans = storage.find_execution_plans(page: 1, per_page: 1)
|
36
|
-
loaded_plans.map { |h| h[:id] }.must_equal ['plan2']
|
37
|
-
end
|
38
|
-
end
|
39
|
-
|
40
|
-
def test_ordering
|
41
|
-
prepare_plans
|
42
|
-
if storage.ordering_by.include?(:state)
|
43
|
-
loaded_plans = storage.find_execution_plans(order_by: 'state')
|
44
|
-
loaded_plans.map { |h| h[:id] }.must_equal ['plan1', 'plan3', 'plan2']
|
45
|
-
|
46
|
-
loaded_plans = storage.find_execution_plans(order_by: 'state', desc: true)
|
47
|
-
loaded_plans.map { |h| h[:id] }.must_equal ['plan2', 'plan3', 'plan1']
|
48
|
-
end
|
49
|
-
end
|
50
|
-
|
51
|
-
def test_filtering
|
52
|
-
prepare_plans
|
53
|
-
if storage.ordering_by.include?(:state)
|
54
|
-
loaded_plans = storage.find_execution_plans(filters: { state: ['paused'] })
|
55
|
-
loaded_plans.map { |h| h[:id] }.must_equal ['plan1', 'plan3']
|
56
|
-
|
57
|
-
loaded_plans = storage.find_execution_plans(filters: { state: ['stopped'] })
|
58
|
-
loaded_plans.map { |h| h[:id] }.must_equal ['plan2']
|
59
|
-
|
60
|
-
loaded_plans = storage.find_execution_plans(filters: { state: [] })
|
61
|
-
loaded_plans.map { |h| h[:id] }.must_equal []
|
62
|
-
|
63
|
-
loaded_plans = storage.find_execution_plans(filters: { state: ['stopped', 'paused'] })
|
64
|
-
loaded_plans.map { |h| h[:id] }.must_equal ['plan1', 'plan2', 'plan3']
|
65
|
-
|
66
|
-
loaded_plans = storage.find_execution_plans(filters: { 'state' => ['stopped', 'paused'] })
|
67
|
-
loaded_plans.map { |h| h[:id] }.must_equal ['plan1', 'plan2', 'plan3']
|
68
|
-
end
|
69
|
-
end
|
70
|
-
|
71
|
-
def test_save_execution_plan
|
72
|
-
plan = { id: 'plan1', state: :pending, result: nil, started_at: nil, ended_at: nil,
|
73
|
-
real_time: 0.0, execution_time: 0.0 }
|
74
|
-
-> { storage.load_execution_plan('plan1') }.must_raise KeyError
|
75
|
-
|
76
|
-
storage.save_execution_plan('plan1', plan)
|
77
|
-
storage.load_execution_plan('plan1')[:id].must_equal 'plan1'
|
78
|
-
storage.load_execution_plan('plan1')['id'].must_equal 'plan1'
|
79
|
-
storage.load_execution_plan('plan1').keys.size.must_equal 7
|
80
|
-
|
81
|
-
storage.save_execution_plan('plan1', nil)
|
82
|
-
-> { storage.load_execution_plan('plan1') }.must_raise KeyError
|
83
|
-
end
|
84
|
-
|
85
|
-
def test_save_action
|
86
|
-
plan = { id: 'plan1', state: :pending, result: nil, started_at: nil, ended_at: nil,
|
87
|
-
real_time: 0.0, execution_time: 0.0 }
|
88
|
-
storage.save_execution_plan('plan1', plan)
|
89
|
-
|
90
|
-
action = { id: 1, caller_execution_plan_id: nil, caller_action_id: nil }
|
91
|
-
-> { storage.load_action('plan1', 1) }.must_raise KeyError
|
92
|
-
|
93
|
-
storage.save_action('plan1', 1, action)
|
94
|
-
storage.load_action('plan1', 1)[:id].must_equal 1
|
95
|
-
storage.load_action('plan1', 1)['id'].must_equal 1
|
96
|
-
storage.load_action('plan1', 1).keys.must_equal %w[id caller_execution_plan_id caller_action_id]
|
97
|
-
|
98
|
-
storage.save_action('plan1', 1, nil)
|
99
|
-
-> { storage.load_action('plan1', 1) }.must_raise KeyError
|
100
|
-
|
101
|
-
storage.save_execution_plan('plan1', nil)
|
102
|
-
end
|
103
|
-
|
104
|
-
end
|
105
|
-
|
106
|
-
class SequelTest < MiniTest::Spec
|
107
|
-
include PersistenceAdapterTest
|
108
|
-
|
109
|
-
def storage
|
110
|
-
@storage ||= Dynflow::PersistenceAdapters::Sequel.new 'sqlite:/'
|
111
|
-
end
|
112
|
-
|
113
|
-
def test_stores_meta_data
|
114
|
-
plans = prepare_plans
|
115
|
-
|
116
|
-
plans.each do |original|
|
117
|
-
stored = storage.to_hash.fetch(:execution_plans).find { |ep| ep[:uuid] == original[:id] }
|
118
|
-
stored.each { |k, v| stored[k] = v.to_s if v.is_a? Time }
|
119
|
-
storage.class::META_DATA.fetch(:execution_plan).each do |name|
|
120
|
-
stored.fetch(name.to_sym).must_equal original.fetch(name.to_sym)
|
121
|
-
end
|
122
|
-
end
|
123
|
-
end
|
124
|
-
end
|
125
|
-
|
126
|
-
#class MemoryTest < MiniTest::Unit::TestCase
|
127
|
-
# include PersistenceAdapterTest
|
128
|
-
#
|
129
|
-
# def storage
|
130
|
-
# @storage ||= Dynflow::PersistenceAdapters::Memory.new
|
131
|
-
# end
|
132
|
-
#end
|
133
|
-
#
|
134
|
-
#class SimpleFileStorageTest < MiniTest::Unit::TestCase
|
135
|
-
# include PersistenceAdapterTest
|
136
|
-
#
|
137
|
-
# def storage_path
|
138
|
-
# "#{File.dirname(__FILE__)}/simple_file_storage"
|
139
|
-
# end
|
140
|
-
#
|
141
|
-
# def setup
|
142
|
-
# Dir.mkdir storage_path
|
143
|
-
# end
|
144
|
-
#
|
145
|
-
# def storage
|
146
|
-
# @storage ||= begin
|
147
|
-
# Dynflow::PersistenceAdapters::SimpleFileStorage.new storage_path
|
148
|
-
# end
|
149
|
-
# end
|
150
|
-
#
|
151
|
-
# def teardown
|
152
|
-
# FileUtils.rm_rf storage_path
|
153
|
-
# end
|
154
|
-
#end
|
155
|
-
#
|
156
|
-
#require 'dynflow/persistence_adapters/active_record'
|
157
|
-
#
|
158
|
-
#class ActiveRecordTest < MiniTest::Unit::TestCase
|
159
|
-
# include PersistenceAdapterTest
|
160
|
-
#
|
161
|
-
# def setup
|
162
|
-
# ActiveRecord::Base.establish_connection(adapter: 'sqlite3', database: ':memory:')
|
163
|
-
# ::ActiveRecord::Migrator.migrate Dynflow::PersistenceAdapters::ActiveRecord.migrations_path
|
164
|
-
# end
|
165
|
-
#
|
166
|
-
# def storage
|
167
|
-
# @storage ||= begin
|
168
|
-
# Dynflow::PersistenceAdapters::ActiveRecord.new
|
169
|
-
# end
|
170
|
-
# end
|
171
|
-
#end
|
172
|
-
|
173
|
-
|