dwf 0.1.7 → 0.1.11

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: fb0f6f6f30b7ed1ba19120a4a1f77e450940e45433153de51f47df9f108d0fae
4
- data.tar.gz: e0e86fdd169b7cbcdc811cf62f87b3043fbbd708bdf0f41fb006b392e4ab784b
3
+ metadata.gz: e545ff4c6f4b56071875f642f1262fee7762fabb63d86c816a0c943cb8e34657
4
+ data.tar.gz: dfe832f98724b81b8ff1198320cb66013869cca6ba2b2f89d47c4caf0b2108f9
5
5
  SHA512:
6
- metadata.gz: 4a3770d4d7f63b7ede02d59e1ce43cb2634b39cbd9e037c920e4f816acb7b393de0f9030fda735e8ea0873f3679da120ff405f725e87c60c1b9eb6a7fbf355e3
7
- data.tar.gz: 469e25cdf11441c59c80ff47197dcabcf40e0b086ab06babb6b39a0597483517c479ee9ea65767819b69afebf83db6dcdb5926a9839a2e40d02c03cf39a6eb57
6
+ metadata.gz: 7e77ca9abe614aa1da026818f1a49028bcc3878bd9bd5838e8cdfdf245ab09d124ab4300bd077eed1c3e8b07e46b321c1dd59d18face658fda3f6c636e28954f
7
+ data.tar.gz: 6a6ff8756a13fcec73966c41ded7b808cae24a73e1cec2bd301dacfdbfde6022812c7f27c6dacc12a5bb469aae241e59316eb544b2ca7ac1f0530e1c5f5ffa2c
data/.gitignore CHANGED
@@ -4,3 +4,4 @@ Gemfile.lock
4
4
  dwf-*.gem
5
5
  :w
6
6
  :W
7
+ coverage/
data/CHANGELOG.md CHANGED
@@ -1,5 +1,142 @@
1
1
  # Changelog
2
2
  All notable changes to this project will be documented in this file.
3
+ ## 0.1.11
4
+ ### Added
5
+ #### Subworkflow - Only support sidekiq pro
6
+ There might be a case when you want to reuse a workflow in another workflow
7
+
8
+ As an example, let's write a workflow which contain another workflow, expected that the SubWorkflow workflow execute after `SecondItem` and the `ThirdItem` execute after `SubWorkflow`
9
+
10
+ ```ruby
11
+ gem 'dwf', '~> 0.1.11'
12
+ ```
13
+
14
+ ### Setup
15
+ ```ruby
16
+ class FirstItem < Dwf::Item
17
+ def perform
18
+ puts "Main flow: #{self.class.name} running"
19
+ puts "Main flow: #{self.class.name} finish"
20
+ end
21
+ end
22
+
23
+ SecondItem = Class.new(FirstItem)
24
+ ThirtItem = Class.new(FirstItem)
25
+
26
+ class FirstSubItem < Dwf::Item
27
+ def perform
28
+ puts "Sub flow: #{self.class.name} running"
29
+ puts "Sub flow: #{self.class.name} finish"
30
+ end
31
+ end
32
+
33
+ SecondSubItem = Class.new(FirstSubItem)
34
+
35
+ class SubWorkflow < Dwf::Workflow
36
+ def configure
37
+ run FirstSubItem
38
+ run SecondSubItem, after: FirstSubItem
39
+ end
40
+ end
41
+
42
+
43
+ class TestWf < Dwf::Workflow
44
+ def configure
45
+ run FirstItem
46
+ run SecondItem, after: FirstItem
47
+ run SubWorkflow, after: SecondItem
48
+ run ThirtItem, after: SubWorkflow
49
+ end
50
+ end
51
+
52
+ wf = TestWf.create
53
+ wf.start!
54
+ ```
55
+
56
+ ### Result
57
+ ```
58
+ Main flow: FirstItem running
59
+ Main flow: FirstItem finish
60
+ Main flow: SecondItem running
61
+ Main flow: SecondItem finish
62
+ Sub flow: FirstSubItem running
63
+ Sub flow: FirstSubItem finish
64
+ Sub flow: SecondSubItem running
65
+ Sub flow: SecondSubItem finish
66
+ Main flow: ThirtItem running
67
+ Main flow: ThirtItem finish
68
+ ```
69
+
70
+ ## 0.1.10
71
+ ### Added
72
+ - Allow to use argument within workflow and update the defining callback way
73
+ ```
74
+ class TestWf < Dwf::Workflow
75
+ def configure(arguments)
76
+ run A
77
+ run B, after: A, params: argument
78
+ run C, after: A, params: argument
79
+ end
80
+ end
81
+
82
+ wf = TestWf.create(arguments)
83
+ wf.callback_type = Dwf::Workflow::SK_BATCH
84
+
85
+ ```
86
+ - Support `find` workflow and `reload` workflow
87
+ ```
88
+ wf = TestWf.create
89
+ Dwf::Workflow.find(wf.id)
90
+ wf.reload
91
+ ```
92
+
93
+ ## 0.1.9
94
+ ### Added
95
+ ### Fixed
96
+ - fix incorrect argument at configuration
97
+
98
+ ## 0.1.8
99
+ ### Added
100
+ - add pinlining feature
101
+
102
+ ```ruby
103
+ class SendOutput < Dwf::Item
104
+ def perform
105
+ output('it works')
106
+ end
107
+ end
108
+
109
+ ```
110
+
111
+ `output` method used to output data from the job to add outgoing jobs
112
+
113
+ ```ruby
114
+ class ReceiveOutput < Dwf::Item
115
+ def perform
116
+ message = payloads.first[:output] # it works
117
+ end
118
+ end
119
+ ```
120
+
121
+ `payloads` is an array that containing outputs from incoming jobs
122
+
123
+ ```
124
+ [
125
+ {
126
+ id: "SendOutput|1849a3f9-5fce-401e-a73a-91fc1048356",
127
+ class: "SendOutput",
128
+ output: 'it works'
129
+ }
130
+ ]
131
+ ```
132
+
133
+ ```ruby
134
+ Dwf.config do |config|
135
+ config.opts = { url 'redis://127.0.0.1:6379' }
136
+ config.namespace = 'dwf'
137
+ end
138
+ ```
139
+
3
140
  ## 0.1.7
4
141
  ### Added
5
142
  - Allow to config redis and queue
data/README.md CHANGED
@@ -4,22 +4,45 @@
4
4
  # Installation
5
5
  ## 1. Add `dwf` to Gemfile
6
6
  ```ruby
7
- gem 'dwf', '~> 0.1.6'
7
+ gem 'dwf', '~> 0.1.10'
8
8
  ```
9
- ## 2. Execute flow
9
+ ## 2. Execute flow example
10
10
  ### Declare jobs
11
11
 
12
12
  ```ruby
13
13
  require 'dwf'
14
14
 
15
- class A < Dwf::Item
15
+ class FirstItem < Dwf::Item
16
16
  def perform
17
- puts "#{self.class.name} Working"
18
- sleep 2
19
- puts params
20
- puts "#{self.class.name} Finished"
17
+ puts "#{self.class.name}: running"
18
+ puts "#{self.class.name}: finish"
21
19
  end
22
20
  end
21
+
22
+ class SecondItem < Dwf::Item
23
+ def perform
24
+ puts "#{self.class.name}: running"
25
+ output('Send to ThirdItem')
26
+ puts "#{self.class.name} finish"
27
+ end
28
+ end
29
+
30
+ class ThirdItem < Dwf::Item
31
+ def perform
32
+ puts "#{self.class.name}: running"
33
+ puts "#{self.class.name}: finish"
34
+ end
35
+ end
36
+
37
+ class FourthItem < Dwf::Item
38
+ def perform
39
+ puts "#{self.class.name}: running"
40
+ puts "payloads from incoming: #{payloads.inspect}"
41
+ puts "#{self.class.name}: finish"
42
+ end
43
+ end
44
+
45
+ FifthItem = Class.new(FirstItem)
23
46
  ```
24
47
 
25
48
  ### Declare flow
@@ -28,20 +51,23 @@ require 'dwf'
28
51
 
29
52
  class TestWf < Dwf::Workflow
30
53
  def configure
31
- run A
32
- run B, after: A
33
- run C, after: A
34
- run E, after: [B, C], params: 'E say hello'
35
- run D, after: [E], params: 'D say hello'
36
- run F, params: 'F say hello'
54
+ run FirstItem
55
+ run SecondItem, after: FirstItem
56
+ run ThirdItem, after: FirstItem
57
+ run FourthItem, after: [ThirdItem, SecondItem]
58
+ run FifthItem, after: FourthItem
37
59
  end
38
60
  end
39
61
  ```
40
-
62
+ ### Start background worker process
63
+ ```
64
+ bundle exec sidekiq -q dwf
65
+ ```
41
66
 
42
67
  ### Execute flow
43
68
  ```ruby
44
- wf = TestWf.create(callback_type: Dwf::Workflow::SK_BATCH)
69
+ wf = TestWf.create
70
+ wf.callback_type = Dwf::Workflow::SK_BATCH
45
71
  wf.start!
46
72
  ```
47
73
 
@@ -54,35 +80,128 @@ By default `dwf` will use `Dwf::Workflow::BUILD_IN` callback.
54
80
 
55
81
  ### Output
56
82
  ```
57
- A Working
58
- F Working
59
- A Finished
60
- F say hello
61
- F Finished
62
- C Working
63
- B Working
64
- C Finished
65
- B Finished
66
- E Working
67
- E say hello
68
- E Finished
69
- D Working
70
- D say hello
71
- D Finished
83
+ FirstItem: running
84
+ FirstItem: finish
85
+ SecondItem: running
86
+ SecondItem finish
87
+ ThirdItem: running
88
+ ThirdItem: finish
89
+ FourthItem: running
90
+ FourthItem: finish
91
+ FifthItem: running
92
+ FifthItem: finish
72
93
  ```
73
94
 
74
95
  # Config redis and default queue
96
+ `dwf` uses redis as the key value stograge through [redis-rb](https://github.com/redis/redis-rb), So you can pass redis configuration by `redis_opts`
75
97
  ```ruby
76
98
  Dwf.config do |config|
77
99
  SENTINELS = [
78
100
  { host: "127.0.0.1", port: 26380 },
79
101
  { host: "127.0.0.1", port: 26381 }
80
102
  ]
81
- config.opts = { host: 'mymaster', sentinels: SENTINELS, role: :master }
103
+ config.redis_opts = { host: 'mymaster', sentinels: SENTINELS, role: :master }
82
104
  config.namespace = 'dwf'
83
105
  end
84
106
  ```
107
+ # Advanced features
108
+ ## Pipelining
109
+ You can pass jobs result to next nodes
110
+
111
+ ```ruby
112
+ class SendOutput < Dwf::Item
113
+ def perform
114
+ output('it works')
115
+ end
116
+ end
117
+
118
+ ```
119
+
120
+ `output` method used to output data from the job to add outgoing jobs
121
+
122
+ ```ruby
123
+ class ReceiveOutput < Dwf::Item
124
+ def perform
125
+ message = payloads.first[:output] # it works
126
+ end
127
+ end
128
+ ```
129
+
130
+ `payloads` is an array that containing outputs from incoming jobs
131
+
132
+ ```ruby
133
+ [
134
+ {
135
+ id: "SendOutput|1849a3f9-5fce-401e-a73a-91fc1048356",
136
+ class: "SendOutput",
137
+ output: 'it works'
138
+ }
139
+ ]
140
+ ```
141
+ ## Subworkflow - Only support sidekiq pro
142
+ There might be a case when you want to reuse a workflow in another workflow
143
+
144
+ As an example, let's write a workflow which contain another workflow, expected that the SubWorkflow workflow execute after `SecondItem` and the `ThirdItem` execute after `SubWorkflow`
145
+
146
+ ```ruby
147
+ gem 'dwf', '~> 0.1.11'
148
+ ```
149
+
150
+ ### Setup
151
+ ```ruby
152
+ class FirstItem < Dwf::Item
153
+ def perform
154
+ puts "Main flow: #{self.class.name} running"
155
+ puts "Main flow: #{self.class.name} finish"
156
+ end
157
+ end
158
+
159
+ SecondItem = Class.new(FirstItem)
160
+ ThirtItem = Class.new(FirstItem)
161
+
162
+ class FirstSubItem < Dwf::Item
163
+ def perform
164
+ puts "Sub flow: #{self.class.name} running"
165
+ puts "Sub flow: #{self.class.name} finish"
166
+ end
167
+ end
168
+
169
+ SecondSubItem = Class.new(FirstSubItem)
170
+
171
+ class SubWorkflow < Dwf::Workflow
172
+ def configure
173
+ run FirstSubItem
174
+ run SecondSubItem, after: FirstSubItem
175
+ end
176
+ end
177
+
178
+
179
+ class TestWf < Dwf::Workflow
180
+ def configure
181
+ run FirstItem
182
+ run SecondItem, after: FirstItem
183
+ run SubWorkflow, after: SecondItem
184
+ run ThirtItem, after: SubWorkflow
185
+ end
186
+ end
187
+
188
+ wf = TestWf.create
189
+ wf.start!
190
+ ```
85
191
 
192
+ ### Result
193
+ ```
194
+ Main flow: FirstItem running
195
+ Main flow: FirstItem finish
196
+ Main flow: SecondItem running
197
+ Main flow: SecondItem finish
198
+ Sub flow: FirstSubItem running
199
+ Sub flow: FirstSubItem finish
200
+ Sub flow: SecondSubItem running
201
+ Sub flow: SecondSubItem finish
202
+ Main flow: ThirtItem running
203
+ Main flow: ThirtItem finish
204
+ ```
86
205
 
87
206
  # Todo
88
207
  - [x] Make it work
@@ -90,9 +209,12 @@ end
90
209
  - [x] Support with build-in callback
91
210
  - [x] Add github workflow
92
211
  - [x] Redis configurable
93
- - [ ] [WIP] Test
94
- - [ ] Transfer output through each node
212
+ - [x] Pipelining
213
+ - [X] Test
214
+ - [ ] WIP - subworkflow
95
215
  - [ ] Support [Resque](https://github.com/resque/resque)
216
+ - [ ] Key value store plugable
217
+ - [ ] research https://github.com/moneta-rb/moneta
96
218
 
97
219
  # References
98
220
  - https://github.com/chaps-io/gush
data/dwf.gemspec CHANGED
@@ -26,8 +26,9 @@ Gem::Specification.new do |spec|
26
26
  # guide at: https://bundler.io/guides/creating_gem.html
27
27
 
28
28
  spec.add_development_dependency 'byebug', '~> 11.1.3'
29
+ spec.add_development_dependency 'mock_redis', '~> 0.27.2'
29
30
  spec.add_dependency 'redis', '~> 4.2.0'
30
31
  spec.add_development_dependency 'rspec', '~> 3.2'
31
- spec.add_development_dependency 'mock_redis', '~> 0.27.2'
32
32
  spec.add_dependency 'sidekiq', '~> 6.2.0'
33
+ spec.add_development_dependency 'simplecov'
33
34
  end
data/lib/dwf/callback.rb CHANGED
@@ -9,8 +9,8 @@ module Dwf
9
9
  previous_job_names = options['names']
10
10
  workflow_id = options['workflow_id']
11
11
  processing_job_names = previous_job_names.map do |job_name|
12
- job = client.find_job(workflow_id, job_name)
13
- job.outgoing
12
+ node = client.find_node(job_name, workflow_id)
13
+ node.outgoing
14
14
  end.flatten.uniq
15
15
  return if processing_job_names.empty?
16
16
 
@@ -19,7 +19,7 @@ module Dwf
19
19
  end
20
20
 
21
21
  def start(job)
22
- job.outgoing.any? ? start_with_batch(job) : job.perform_async
22
+ job.outgoing.any? ? start_with_batch(job) : job.persist_and_perform_async!
23
23
  end
24
24
 
25
25
  private
@@ -40,11 +40,13 @@ module Dwf
40
40
  batch.on(
41
41
  :success,
42
42
  'Dwf::Callback#process_next_step',
43
- names: jobs.map(&:klass),
43
+ names: jobs.map(&:name),
44
44
  workflow_id: workflow_id
45
45
  )
46
46
  batch.jobs do
47
- jobs.each { |job| job.persist_and_perform_async! if job.ready_to_start? }
47
+ jobs.each do |job|
48
+ job.persist_and_perform_async! if job.ready_to_start?
49
+ end
48
50
  end
49
51
  end
50
52
 
@@ -61,7 +63,7 @@ module Dwf
61
63
 
62
64
  def fetch_jobs(processing_job_names, workflow_id)
63
65
  processing_job_names.map do |job_name|
64
- client.find_job(workflow_id, job_name)
66
+ client.find_node(job_name, workflow_id)
65
67
  end.compact
66
68
  end
67
69
 
@@ -71,15 +73,16 @@ module Dwf
71
73
  client.release_lock(workflow_id, job_name)
72
74
  end
73
75
 
74
- def start_with_batch(job)
76
+ def start_with_batch(node)
75
77
  batch = Sidekiq::Batch.new
78
+ workflow_id = node.is_a?(Dwf::Workflow) ? node.parent_id : node.workflow_id
76
79
  batch.on(
77
80
  :success,
78
81
  'Dwf::Callback#process_next_step',
79
- names: [job.name],
80
- workflow_id: job.workflow_id
82
+ names: [node.name],
83
+ workflow_id: workflow_id
81
84
  )
82
- batch.jobs { job.perform_async }
85
+ batch.jobs { node.persist_and_perform_async! }
83
86
  end
84
87
 
85
88
  def client
data/lib/dwf/client.rb CHANGED
@@ -1,3 +1,5 @@
1
+ require_relative 'errors'
2
+
1
3
  module Dwf
2
4
  class Client
3
5
  attr_reader :config
@@ -20,6 +22,43 @@ module Dwf
20
22
  Dwf::Item.from_hash(Dwf::Utils.symbolize_keys(data))
21
23
  end
22
24
 
25
+ def find_node(name, workflow_id)
26
+ if Utils.workflow_name?(name)
27
+ if name.include?('|')
28
+ _, id = name.split('|')
29
+ else
30
+ id = workflow_id(name, workflow_id)
31
+ end
32
+ find_workflow(id)
33
+ else
34
+ find_job(workflow_id, name)
35
+ end
36
+ end
37
+
38
+ def find_workflow(id)
39
+ key = redis.keys("dwf.workflows.#{id}*").first
40
+ data = redis.get(key)
41
+ raise WorkflowNotFound, "Workflow with given id doesn't exist" if data.nil?
42
+
43
+ hash = JSON.parse(data)
44
+ hash = Dwf::Utils.symbolize_keys(hash)
45
+ nodes = parse_nodes(id)
46
+ workflow_from_hash(hash, nodes)
47
+ end
48
+
49
+ def find_sub_workflow(name, parent_id)
50
+ find_workflow(workflow_id(name, parent_id))
51
+ end
52
+
53
+ def sub_workflows(id)
54
+ keys = redis.keys("dwf.workflows.*.*.#{id}")
55
+ keys.map do |key|
56
+ id = key.split('.')[2]
57
+
58
+ find_workflow(id)
59
+ end
60
+ end
61
+
23
62
  def persist_job(job)
24
63
  redis.hset("dwf.jobs.#{job.workflow_id}.#{job.klass}", job.id, job.as_json)
25
64
  end
@@ -39,7 +78,10 @@ module Dwf
39
78
  end
40
79
 
41
80
  def persist_workflow(workflow)
42
- redis.set("dwf.workflows.#{workflow.id}", workflow.as_json)
81
+ key = [
82
+ 'dwf', 'workflows', workflow.id, workflow.class.name, workflow.parent_id
83
+ ].compact.join('.')
84
+ redis.set(key, workflow.as_json)
43
85
  end
44
86
 
45
87
  def build_job_id(workflow_id, job_klass)
@@ -84,6 +126,13 @@ module Dwf
84
126
 
85
127
  private
86
128
 
129
+ def workflow_id(name, parent_id)
130
+ key = redis.keys("dwf.workflows.*.#{name}.#{parent_id}").first
131
+ return if key.nil?
132
+
133
+ key.split('.')[2]
134
+ end
135
+
87
136
  def find_job_by_klass_and_id(workflow_id, job_name)
88
137
  job_klass, job_id = job_name.split('|')
89
138
 
@@ -99,6 +148,32 @@ module Dwf
99
148
  job
100
149
  end
101
150
 
151
+ def parse_nodes(id)
152
+ keys = redis.scan_each(match: "dwf.jobs.#{id}.*")
153
+
154
+ items = keys.map do |key|
155
+ redis.hvals(key).map do |json|
156
+ node = Dwf::Utils.symbolize_keys JSON.parse(json)
157
+ Dwf::Item.from_hash(node)
158
+ end
159
+ end.flatten
160
+ workflows = sub_workflows(id)
161
+ items + workflows
162
+ end
163
+
164
+ def workflow_from_hash(hash, jobs = [])
165
+ flow = Module.const_get(hash[:klass]).new(*hash[:arguments])
166
+ flow.jobs = []
167
+ flow.outgoing = hash.fetch(:outgoing, [])
168
+ flow.parent_id = hash[:parent_id]
169
+ flow.incoming = hash.fetch(:incoming, [])
170
+ flow.stopped = hash.fetch(:stopped, false)
171
+ flow.callback_type = hash.fetch(:callback_type, Workflow::BUILD_IN)
172
+ flow.id = hash[:id]
173
+ flow.jobs = jobs
174
+ flow
175
+ end
176
+
102
177
  def redis
103
178
  @redis ||= Redis.new(config.redis_opts)
104
179
  end
@@ -0,0 +1,29 @@
1
+ module Dwf
2
+ module Concerns
3
+ module Checkable
4
+ def no_dependencies?
5
+ incoming.empty?
6
+ end
7
+
8
+ def leaf?
9
+ outgoing.empty?
10
+ end
11
+
12
+ def ready_to_start?
13
+ !running? && !enqueued? && !finished? && !failed? && parents_succeeded?
14
+ end
15
+
16
+ def succeeded?
17
+ finished? && !failed?
18
+ end
19
+
20
+ def running?
21
+ started? && !finished?
22
+ end
23
+
24
+ def started?
25
+ !!started_at
26
+ end
27
+ end
28
+ end
29
+ end
@@ -9,7 +9,7 @@ module Dwf
9
9
 
10
10
  def initialize(hash = {})
11
11
  @namespace = hash.fetch(:namespace, NAMESPACE)
12
- @redis_opts = hash.fetch(:redis_url, REDIS_OPTS)
12
+ @redis_opts = hash.fetch(:redis_opts, REDIS_OPTS)
13
13
  end
14
14
  end
15
15
  end
data/lib/dwf/errors.rb ADDED
@@ -0,0 +1,5 @@
1
+ module Dwf
2
+ class WorkflowNotFound < StandardError; end
3
+
4
+ class UnsupportCallback < StandardError; end
5
+ end