rworkflow 0.6.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/MIT-LICENSE +20 -0
- data/Rakefile +117 -0
- data/lib/rworkflow/flow.rb +450 -0
- data/lib/rworkflow/flow_registry.rb +69 -0
- data/lib/rworkflow/lifecycle.rb +102 -0
- data/lib/rworkflow/minitest/test.rb +53 -0
- data/lib/rworkflow/minitest/worker.rb +17 -0
- data/lib/rworkflow/minitest.rb +8 -0
- data/lib/rworkflow/sidekiq_flow.rb +186 -0
- data/lib/rworkflow/sidekiq_helper.rb +84 -0
- data/lib/rworkflow/sidekiq_lifecycle.rb +8 -0
- data/lib/rworkflow/sidekiq_state.rb +42 -0
- data/lib/rworkflow/state.rb +104 -0
- data/lib/rworkflow/state_error.rb +13 -0
- data/lib/rworkflow/transition_error.rb +9 -0
- data/lib/rworkflow/version.rb +3 -0
- data/lib/rworkflow/worker.rb +62 -0
- data/lib/rworkflow.rb +15 -0
- data/lib/tasks/rworkflow_tasks.rake +4 -0
- data/test/dummy/README.rdoc +28 -0
- data/test/dummy/Rakefile +6 -0
- data/test/dummy/app/assets/javascripts/application.js +13 -0
- data/test/dummy/app/assets/stylesheets/application.css +15 -0
- data/test/dummy/app/controllers/application_controller.rb +5 -0
- data/test/dummy/app/helpers/application_helper.rb +2 -0
- data/test/dummy/app/views/layouts/application.html.erb +14 -0
- data/test/dummy/bin/bundle +3 -0
- data/test/dummy/bin/rails +4 -0
- data/test/dummy/bin/rake +4 -0
- data/test/dummy/config/application.rb +15 -0
- data/test/dummy/config/boot.rb +5 -0
- data/test/dummy/config/database.yml +25 -0
- data/test/dummy/config/environment.rb +5 -0
- data/test/dummy/config/environments/development.rb +37 -0
- data/test/dummy/config/environments/production.rb +83 -0
- data/test/dummy/config/environments/test.rb +41 -0
- data/test/dummy/config/initializers/backtrace_silencers.rb +7 -0
- data/test/dummy/config/initializers/cookies_serializer.rb +3 -0
- data/test/dummy/config/initializers/filter_parameter_logging.rb +4 -0
- data/test/dummy/config/initializers/inflections.rb +16 -0
- data/test/dummy/config/initializers/mime_types.rb +4 -0
- data/test/dummy/config/initializers/session_store.rb +3 -0
- data/test/dummy/config/initializers/wrap_parameters.rb +14 -0
- data/test/dummy/config/locales/en.yml +23 -0
- data/test/dummy/config/routes.rb +56 -0
- data/test/dummy/config/secrets.yml +22 -0
- data/test/dummy/config.ru +4 -0
- data/test/dummy/db/test.sqlite3 +0 -0
- data/test/dummy/log/test.log +516 -0
- data/test/dummy/public/404.html +67 -0
- data/test/dummy/public/422.html +67 -0
- data/test/dummy/public/500.html +66 -0
- data/test/dummy/public/favicon.ico +0 -0
- data/test/flow_test.rb +112 -0
- data/test/lifecycle_test.rb +81 -0
- data/test/rworkflow_test.rb +7 -0
- data/test/sidekiq_flow_test.rb +173 -0
- data/test/state_test.rb +99 -0
- data/test/test_helper.rb +32 -0
- metadata +199 -0
@@ -0,0 +1,102 @@
|
|
1
|
+
module Rworkflow
|
2
|
+
class Lifecycle
|
3
|
+
attr_reader :states
|
4
|
+
attr_accessor :initial, :default, :state_class, :state_options
|
5
|
+
|
6
|
+
CARDINALITY_ALL_STARTED = :all_started # Indicates a cardinality equal to the jobs pushed at the start of the workflow
|
7
|
+
|
8
|
+
DEFAULT_STATE_OPTIONS = {
|
9
|
+
cardinality: State::DEFAULT_CARDINALITY,
|
10
|
+
priority: State::DEFAULT_PRIORITY,
|
11
|
+
policy: State::STATE_POLICY_NO_WAIT
|
12
|
+
}
|
13
|
+
|
14
|
+
def initialize(state_class: State, state_options: {})
|
15
|
+
@state_options = DEFAULT_STATE_OPTIONS.merge(state_options)
|
16
|
+
@state_class = state_class
|
17
|
+
@states = {}
|
18
|
+
@default = nil
|
19
|
+
yield(self) if block_given?
|
20
|
+
end
|
21
|
+
|
22
|
+
def state(name, options = {})
|
23
|
+
options = @state_options.merge(options)
|
24
|
+
new_state = @state_class.new(**options)
|
25
|
+
|
26
|
+
yield(new_state) if block_given?
|
27
|
+
|
28
|
+
@states[name] = new_state
|
29
|
+
end
|
30
|
+
|
31
|
+
def transition(from, name)
|
32
|
+
from_state = @states[from]
|
33
|
+
fail(StateError, from) if from_state.nil?
|
34
|
+
|
35
|
+
return from_state.perform(name, @default)
|
36
|
+
end
|
37
|
+
|
38
|
+
def concat!(from, name, lifecycle, &state_merge_handler)
|
39
|
+
state_merge_handler ||= lambda do |_, original_state, concat_state|
|
40
|
+
original_state.merge(concat_state)
|
41
|
+
end
|
42
|
+
|
43
|
+
@states.merge!(lifecycle.states, &state_merge_handler)
|
44
|
+
|
45
|
+
next_state = lifecycle.initial
|
46
|
+
@states[from].transition(name, next_state)
|
47
|
+
return self
|
48
|
+
end
|
49
|
+
|
50
|
+
def rename_state(old_state_name, new_state_name)
|
51
|
+
old_state = @states[old_state_name]
|
52
|
+
@states[new_state_name] = old_state
|
53
|
+
@states.delete(old_state)
|
54
|
+
|
55
|
+
@initial = new_state_name if @initial == old_state_name
|
56
|
+
end
|
57
|
+
|
58
|
+
def to_h
|
59
|
+
return {
|
60
|
+
initial: @initial,
|
61
|
+
default: @default,
|
62
|
+
state_class: @state_class,
|
63
|
+
state_options: @state_options,
|
64
|
+
states: Hash[@states.map { |name, state| [name, state.to_h] }]
|
65
|
+
}
|
66
|
+
end
|
67
|
+
|
68
|
+
def to_graph
|
69
|
+
states = @states || []
|
70
|
+
return digraph do
|
71
|
+
states.each do |from, state|
|
72
|
+
state.transitions.each do |transition, to|
|
73
|
+
edge(from.to_s, to.to_s).label(transition.to_s)
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
def serialize
|
80
|
+
return self.class.serialize(self)
|
81
|
+
end
|
82
|
+
|
83
|
+
class << self
|
84
|
+
def serialize(lifecycle)
|
85
|
+
return lifecycle.to_h
|
86
|
+
end
|
87
|
+
|
88
|
+
def unserialize(hash)
|
89
|
+
return self.new do |lf|
|
90
|
+
lf.initial = hash[:initial]
|
91
|
+
lf.default = hash[:default]
|
92
|
+
lf.state_options = hash[:state_options]
|
93
|
+
lf.state_class = hash[:state_class]
|
94
|
+
|
95
|
+
hash[:states].each do |name, state_hash|
|
96
|
+
lf.states[name] = lf.state_class.unserialize(state_hash)
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
module Rworkflow
|
2
|
+
module Minitest
|
3
|
+
# Include in your test classes to add functionality for worker and workflow tests
|
4
|
+
module Test
|
5
|
+
def setup
|
6
|
+
super
|
7
|
+
rworkflow_setup
|
8
|
+
end
|
9
|
+
|
10
|
+
def teardown
|
11
|
+
super
|
12
|
+
rworkflow_teardown
|
13
|
+
end
|
14
|
+
|
15
|
+
def rworkflow_setup
|
16
|
+
end
|
17
|
+
protected :rworkflow_setup
|
18
|
+
|
19
|
+
def rworkflow_teardown
|
20
|
+
end
|
21
|
+
protected :rworkflow_teardown
|
22
|
+
|
23
|
+
# @params [Class] the worker class to instantiate
|
24
|
+
# @params [Hash] options hash
|
25
|
+
# @option [Class] :flow workflow class to instantiate; defaults to SidekiqFlow
|
26
|
+
# @option [Class] :name the state name
|
27
|
+
def rworkflow_worker(worker_class, flow: ::SidekiqFlow, name: nil, meta: {})
|
28
|
+
name ||= worker_class.name
|
29
|
+
worker = worker_class.new
|
30
|
+
workflow = flow.new(name)
|
31
|
+
meta.each { |key, value| workflow.set(key, value) }
|
32
|
+
|
33
|
+
worker.instance_variable_set(:@workflow, workflow)
|
34
|
+
worker.instance_variable_set(:@state_name, name)
|
35
|
+
|
36
|
+
workflow.extend(WorkerUnitTestFlow)
|
37
|
+
if defined?(flexmock)
|
38
|
+
flexmock(workflow.class).should_receive(:terminal?).and_return(true)
|
39
|
+
end
|
40
|
+
|
41
|
+
yield(workflow) if block_given?
|
42
|
+
|
43
|
+
return worker, workflow
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
module WorkerUnitTestFlow
|
48
|
+
def transition(_, name, objects)
|
49
|
+
push(objects, name)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
module Rworkflow
|
2
|
+
# Disable pushing back indefinitely
|
3
|
+
class Worker
|
4
|
+
def initialize(*args)
|
5
|
+
super
|
6
|
+
@__pushed_back = []
|
7
|
+
end
|
8
|
+
|
9
|
+
def pushed_back
|
10
|
+
return @__pushed_back
|
11
|
+
end
|
12
|
+
|
13
|
+
def push_back(objects)
|
14
|
+
@__pushed_back.concat(objects)
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,186 @@
|
|
1
|
+
module Rworkflow
|
2
|
+
class SidekiqFlow < Flow
|
3
|
+
|
4
|
+
STATE_POLICY_GATED = :gated
|
5
|
+
MAX_EXPECTED_DURATION = 4.hours
|
6
|
+
PRIORITIES = [:critical, :high, nil, :low]
|
7
|
+
|
8
|
+
def initialize(id)
|
9
|
+
super(id)
|
10
|
+
@open_gates = RedisRds::Set.new("#{@redis_key}__open_gates")
|
11
|
+
end
|
12
|
+
|
13
|
+
def cleanup
|
14
|
+
super()
|
15
|
+
@open_gates.delete()
|
16
|
+
end
|
17
|
+
|
18
|
+
def push(objects, name)
|
19
|
+
pushed = 0
|
20
|
+
pushed = super(objects, name)
|
21
|
+
ensure
|
22
|
+
create_jobs(name, pushed) if pushed > 0
|
23
|
+
return pushed
|
24
|
+
end
|
25
|
+
|
26
|
+
def expected_duration
|
27
|
+
return MAX_EXPECTED_DURATION
|
28
|
+
end
|
29
|
+
|
30
|
+
def paused?
|
31
|
+
return @flow_data.get(:paused).to_i > 0
|
32
|
+
end
|
33
|
+
|
34
|
+
def status
|
35
|
+
return (paused?) ? 'Paused' : super()
|
36
|
+
end
|
37
|
+
|
38
|
+
def pause
|
39
|
+
return if self.finished?
|
40
|
+
@flow_data.incr(:paused)
|
41
|
+
rescue StandardError => e
|
42
|
+
Rails.logger.error("Error pausing flow #{self.id}: #{e.message}")
|
43
|
+
end
|
44
|
+
|
45
|
+
# for now assumes
|
46
|
+
def continue
|
47
|
+
return if self.finished? || !self.valid? || !self.paused?
|
48
|
+
if @flow_data.decr(:paused) == 0
|
49
|
+
workers = Hash[get_counters.select { |name, _| !self.class.terminal?(name) && name != :processing }]
|
50
|
+
|
51
|
+
# enqueue jobs
|
52
|
+
workers.each { |worker, num_objects| create_jobs(worker, num_objects) }
|
53
|
+
end
|
54
|
+
rescue StandardError => e
|
55
|
+
Rails.logger.error("Error continuing flow #{self.id}: #{e.message}")
|
56
|
+
end
|
57
|
+
|
58
|
+
def create_jobs(state_name, num_objects)
|
59
|
+
return if paused? || num_objects < 1 || self.class.terminal?(state_name) || gated?(state_name)
|
60
|
+
state = @lifecycle.states[state_name]
|
61
|
+
worker_class = if state.respond_to?(:worker_class)
|
62
|
+
state.worker_class
|
63
|
+
else
|
64
|
+
begin
|
65
|
+
state_name.constantize
|
66
|
+
rescue NameError => _
|
67
|
+
Rails.logger.error("Trying to push to a non existent worker class #{state_name} in workflow #{@id}")
|
68
|
+
nil
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
if !worker_class.nil?
|
73
|
+
cardinality = get_state_cardinality(state_name)
|
74
|
+
|
75
|
+
if state.policy == State::STATE_POLICY_WAIT
|
76
|
+
amount = ((num_objects + get_state_list(state_name).size) / cardinality.to_f).floor
|
77
|
+
else
|
78
|
+
amount = (num_objects / cardinality.to_f).ceil
|
79
|
+
end
|
80
|
+
|
81
|
+
state_priority = self.priority || state.priority
|
82
|
+
amount.times { worker_class.enqueue_job_with_priority(state_priority, @id, state_name) }
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
def priority
|
87
|
+
return @priority ||= begin self.get(:priority) end
|
88
|
+
end
|
89
|
+
|
90
|
+
def gated?(state_name)
|
91
|
+
state = @lifecycle.states[state_name]
|
92
|
+
return state.policy == STATE_POLICY_GATED && !@open_gates.include?(state_name)
|
93
|
+
end
|
94
|
+
|
95
|
+
def open_gate(state_name)
|
96
|
+
@open_gates.add(state_name)
|
97
|
+
num_objects = count(state_name)
|
98
|
+
create_jobs(state_name, num_objects)
|
99
|
+
end
|
100
|
+
|
101
|
+
def close_gate(state_name)
|
102
|
+
@open_gates.remove(state_name)
|
103
|
+
end
|
104
|
+
|
105
|
+
class << self
|
106
|
+
def create(lifecycle, name = '', options)
|
107
|
+
workflow = super(lifecycle, name, options)
|
108
|
+
workflow.set(:priority, options[:priority]) unless options[:priority].nil?
|
109
|
+
|
110
|
+
return workflow
|
111
|
+
end
|
112
|
+
|
113
|
+
def get_manual_priority
|
114
|
+
return :high
|
115
|
+
end
|
116
|
+
|
117
|
+
def cleanup_broken_flows
|
118
|
+
broken = []
|
119
|
+
flows = self.all
|
120
|
+
flows.each do |flow|
|
121
|
+
if flow.valid?
|
122
|
+
if flow.finished? && !flow.public?
|
123
|
+
broken << [flow, 'finished']
|
124
|
+
elsif !flow.started? && flow.created_at < 1.day.ago
|
125
|
+
broken << [flow, 'never started']
|
126
|
+
end
|
127
|
+
else
|
128
|
+
broken << [flow, 'invalid']
|
129
|
+
end
|
130
|
+
end
|
131
|
+
|
132
|
+
broken.each do |flow_pair|
|
133
|
+
flow_pair.first.cleanup
|
134
|
+
puts "Cleaned up #{flow_pair.second} flow #{flow_pair.first.id}"
|
135
|
+
end
|
136
|
+
puts ">>> Cleaned up #{broken.size} broken flows <<<"
|
137
|
+
end
|
138
|
+
|
139
|
+
def enqueue_missing_jobs
|
140
|
+
queued_flow_map = build_flow_map
|
141
|
+
running_flows = self.all.select { |f| f.valid? && !f.finished? && !f.paused? }
|
142
|
+
running_flows.each do |flow|
|
143
|
+
state_map = queued_flow_map.fetch(flow.id, {})
|
144
|
+
create_missing_jobs(flow, state_map)
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
def build_flow_map
|
149
|
+
flow_map = {}
|
150
|
+
queues = SidekiqHelper.get_queue_sizes.keys
|
151
|
+
queues.each do |queue_name|
|
152
|
+
queue = Sidekiq::Queue.new(queue_name)
|
153
|
+
queue.each do |job|
|
154
|
+
klass = begin
|
155
|
+
job.klass.constantize
|
156
|
+
rescue NameError => _
|
157
|
+
nil
|
158
|
+
end
|
159
|
+
|
160
|
+
if !klass.nil? && klass <= Rworkflow::Worker
|
161
|
+
id = job.args.first
|
162
|
+
state_name = job.args.second
|
163
|
+
state_map = flow_map.fetch(id, {})
|
164
|
+
state_map[state_name] = state_map.fetch(state_name, 0) + 1
|
165
|
+
flow_map[id] = state_map
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
169
|
+
return flow_map
|
170
|
+
end
|
171
|
+
|
172
|
+
def create_missing_jobs(flow, state_map)
|
173
|
+
counters = flow.get_counters
|
174
|
+
counters.each do |state, num_objects|
|
175
|
+
next if flow.class.terminal?(state) || state == :processing
|
176
|
+
enqueued = state_map.fetch(state, 0) * flow.get_state_cardinality(state)
|
177
|
+
missing = num_objects - enqueued
|
178
|
+
if missing > 0
|
179
|
+
flow.create_jobs(state, missing)
|
180
|
+
puts "Created #{missing} missing jobs for state #{state} in flow #{flow.id}"
|
181
|
+
end
|
182
|
+
end
|
183
|
+
end
|
184
|
+
end
|
185
|
+
end
|
186
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
require 'sidekiq/api'
|
2
|
+
|
3
|
+
module Rworkflow
|
4
|
+
module SidekiqHelper
|
5
|
+
def self.included(klass)
|
6
|
+
klass.send :extend, ClassMethods
|
7
|
+
klass.send :include, InstanceMethods
|
8
|
+
end
|
9
|
+
|
10
|
+
module ClassMethods
|
11
|
+
# Mix-in methods
|
12
|
+
def enqueue_job(*params)
|
13
|
+
enqueue_job_with_priority(nil, *params)
|
14
|
+
end
|
15
|
+
|
16
|
+
def enqueue_job_with_priority(priority, *params)
|
17
|
+
if should_perform_job_async?
|
18
|
+
self.perform_with_priority(priority, *params)
|
19
|
+
else
|
20
|
+
inline_perform(params)
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
def enqueue_job_at(at_time, *params)
|
25
|
+
if should_perform_job_async?
|
26
|
+
self.perform_at(at_time, *params)
|
27
|
+
else
|
28
|
+
inline_perform(params)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def enqueue_job_in(time_diff, *params)
|
33
|
+
if should_perform_job_async?
|
34
|
+
self.perform_in(time_diff, *params)
|
35
|
+
else
|
36
|
+
inline_perform(params)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def should_perform_job_async?
|
41
|
+
return Rails.env.production?
|
42
|
+
end
|
43
|
+
|
44
|
+
def inline_perform(params)
|
45
|
+
worker = self.new
|
46
|
+
args = JSON.parse(params.to_json)
|
47
|
+
jid = Digest::MD5.hexdigest((Time.now.to_f * 1000).to_i.to_s)
|
48
|
+
worker.jid = jid
|
49
|
+
worker.perform(*args)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
module InstanceMethods
|
54
|
+
end
|
55
|
+
|
56
|
+
# Static methods
|
57
|
+
class << self
|
58
|
+
def configure_server host, port, db
|
59
|
+
Sidekiq.configure_server do |config|
|
60
|
+
config.redis = {:url => "redis://#{host}:#{port}/#{db}", :namespace => 'sidekiq'}
|
61
|
+
config.server_middleware do |chain|
|
62
|
+
chain.add SidekiqServerMiddleware
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
def configure_client host, port, db
|
68
|
+
Sidekiq.configure_client do |config|
|
69
|
+
config.redis = {:url => "redis://#{host}:#{port}/#{db}", :namespace => 'sidekiq'}
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def get_queue_sizes
|
74
|
+
stats = Sidekiq::Stats.new
|
75
|
+
return stats.queues
|
76
|
+
end
|
77
|
+
|
78
|
+
def get_queue_sizes_sum
|
79
|
+
stats = Sidekiq::Stats.new
|
80
|
+
return stats.enqueued
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
module Rworkflow
|
2
|
+
class SidekiqState < State
|
3
|
+
attr_accessor :worker_class
|
4
|
+
|
5
|
+
def initialize(worker: nil, **options)
|
6
|
+
super(**options)
|
7
|
+
@worker_class = worker
|
8
|
+
end
|
9
|
+
|
10
|
+
def merge!(state)
|
11
|
+
super
|
12
|
+
@worker_class = state.worker_class if state.respond_to?(:worker_class)
|
13
|
+
end
|
14
|
+
|
15
|
+
def clone
|
16
|
+
cloned = super
|
17
|
+
cloned.worker_class = @worker_class
|
18
|
+
|
19
|
+
return cloned
|
20
|
+
end
|
21
|
+
|
22
|
+
def ==(state)
|
23
|
+
return super && state.worker_class == @worker_class
|
24
|
+
end
|
25
|
+
|
26
|
+
def to_h
|
27
|
+
h = super
|
28
|
+
h[:worker_class] = @worker_class
|
29
|
+
|
30
|
+
return h
|
31
|
+
end
|
32
|
+
|
33
|
+
class << self
|
34
|
+
def unserialize(state_hash)
|
35
|
+
state = super(state_hash)
|
36
|
+
state.worker_class = state_hash[:worker_class]
|
37
|
+
|
38
|
+
return state
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,104 @@
|
|
1
|
+
module Rworkflow
|
2
|
+
class State
|
3
|
+
DEFAULT_CARDINALITY = 1
|
4
|
+
DEFAULT_PRIORITY = nil
|
5
|
+
|
6
|
+
# To be refactored into Policy objects
|
7
|
+
STATE_POLICY_WAIT = :wait
|
8
|
+
STATE_POLICY_NO_WAIT = :no_wait
|
9
|
+
|
10
|
+
attr_accessor :cardinality, :priority, :policy
|
11
|
+
attr_reader :transitions
|
12
|
+
|
13
|
+
def initialize(cardinality: DEFAULT_CARDINALITY, priority: DEFAULT_PRIORITY, policy: STATE_POLICY_NO_WAIT, **_)
|
14
|
+
@cardinality = cardinality
|
15
|
+
@priority = priority
|
16
|
+
@policy = policy
|
17
|
+
|
18
|
+
@transitions = {}
|
19
|
+
end
|
20
|
+
|
21
|
+
def transition(name, to)
|
22
|
+
@transitions[name] = to
|
23
|
+
end
|
24
|
+
|
25
|
+
def perform(name, default = nil)
|
26
|
+
to_state = @transitions[name] || default
|
27
|
+
raise TransitionError.new(name) if to_state.nil?
|
28
|
+
return to_state
|
29
|
+
end
|
30
|
+
|
31
|
+
# Default rule: new state overwrites old state when applicable
|
32
|
+
def merge!(state)
|
33
|
+
@cardinality = state.cardinality
|
34
|
+
@priority = state.priority
|
35
|
+
@policy = state.policy
|
36
|
+
|
37
|
+
@transitions.merge!(state.transitions) do |_, _, transition|
|
38
|
+
transition
|
39
|
+
end
|
40
|
+
|
41
|
+
return self
|
42
|
+
end
|
43
|
+
|
44
|
+
def merge(state)
|
45
|
+
return self.clone.merge!(state)
|
46
|
+
end
|
47
|
+
|
48
|
+
def clone
|
49
|
+
cloned = self.class.new(cardinality: @cardinality, priority: @priority, policy: @policy)
|
50
|
+
@transitions.each { |from, to| cloned.transition(from, to) }
|
51
|
+
return cloned
|
52
|
+
end
|
53
|
+
|
54
|
+
def ==(state)
|
55
|
+
return @cardinality == state.cardinality &&
|
56
|
+
@priority == state.priority &&
|
57
|
+
@policy == state.policy &&
|
58
|
+
@transitions == state.transitions
|
59
|
+
end
|
60
|
+
|
61
|
+
def to_h
|
62
|
+
return {
|
63
|
+
transitions: @transitions,
|
64
|
+
cardinality: @cardinality,
|
65
|
+
priority: @priority,
|
66
|
+
policy: @policy
|
67
|
+
}
|
68
|
+
end
|
69
|
+
|
70
|
+
def to_graph
|
71
|
+
transitions = @transitions # need to capture for block, as digraph rebinds context
|
72
|
+
|
73
|
+
return digraph do
|
74
|
+
transitions.each do |transition, to|
|
75
|
+
edge('self', to.to_s).label(transition.to_s)
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def inspect
|
81
|
+
return "[ Cardinality: #{@cardinality} ; Policy: #{@policy} ; Priority: #{@priority} ] -> #{to_graph.to_s}"
|
82
|
+
end
|
83
|
+
|
84
|
+
def serialize
|
85
|
+
return self.class.serialize(self)
|
86
|
+
end
|
87
|
+
|
88
|
+
class << self
|
89
|
+
def serialize(state)
|
90
|
+
return state.to_h
|
91
|
+
end
|
92
|
+
|
93
|
+
def unserialize(state_hash)
|
94
|
+
state = self.new(**state_hash)
|
95
|
+
|
96
|
+
state_hash[:transitions].each do |from, to|
|
97
|
+
state.transition(from, to)
|
98
|
+
end
|
99
|
+
|
100
|
+
return state
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
@@ -0,0 +1,62 @@
|
|
1
|
+
require 'sidekiq'
|
2
|
+
|
3
|
+
module Rworkflow
|
4
|
+
class Worker
|
5
|
+
include Sidekiq::Worker
|
6
|
+
include SidekiqHelper
|
7
|
+
|
8
|
+
sidekiq_options queue: :mysql
|
9
|
+
|
10
|
+
def perform(id, state_name)
|
11
|
+
@workflow = self.class.load_workflow(id)
|
12
|
+
@state_name = state_name
|
13
|
+
if !@workflow.nil?
|
14
|
+
if !@workflow.paused?
|
15
|
+
@workflow.fetch(self.jid, state_name) do |objects|
|
16
|
+
if objects.present?
|
17
|
+
Rails.logger.debug("Starting #{self.class}::process() (flow #{id})")
|
18
|
+
process(objects)
|
19
|
+
Rails.logger.debug("Finished #{self.class}::process() (flow #{id})")
|
20
|
+
else
|
21
|
+
Rails.logger.debug("No objects to process for #{self.class}")
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
rescue Exception => e
|
27
|
+
Rails.logger.error("Exception produced on #{@state_name} for flow #{id} on perform: #{e.message}\n#{e.backtrace}")
|
28
|
+
raise e
|
29
|
+
end
|
30
|
+
|
31
|
+
def transition(to_state, objects)
|
32
|
+
@workflow.transition(@state_name, to_state, objects)
|
33
|
+
Rails.logger.debug("State #{@state_name} transitioned #{Array.wrap(objects).size} objects to state #{to_state} (flow #{@workflow.id})")
|
34
|
+
end
|
35
|
+
|
36
|
+
def push_back(objects)
|
37
|
+
@workflow.push(objects, @state_name)
|
38
|
+
Rails.logger.debug("State #{@state_name} pushed back #{Array.wrap(objects).size} objects (flow #{@workflow.id})")
|
39
|
+
end
|
40
|
+
|
41
|
+
def process(_objects)
|
42
|
+
raise NotImplementedError
|
43
|
+
end
|
44
|
+
|
45
|
+
class << self
|
46
|
+
def generate_lifecycle(&block)
|
47
|
+
return Rworkflow::Lifecycle.new do |lc|
|
48
|
+
lc.state(self.class.name, worker: self.class, &block)
|
49
|
+
lc.initial = self.class.name
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
def load_workflow(id)
|
54
|
+
workflow = Flow.load(id)
|
55
|
+
return workflow if !workflow.nil? && workflow.valid?
|
56
|
+
|
57
|
+
Rails.logger.warn("Worker #{self.name} tried to load non existent workflow #{id}")
|
58
|
+
return nil
|
59
|
+
end
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|