dwf 0.1.5 → 0.1.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/build_gem.yaml +2 -2
- data/.github/workflows/test.yaml +2 -2
- data/CHANGELOG.md +99 -0
- data/README.md +60 -10
- data/dwf.gemspec +2 -1
- data/lib/dwf/callback.rb +32 -9
- data/lib/dwf/client.rb +7 -1
- data/lib/dwf/configuration.rb +15 -0
- data/lib/dwf/item.rb +43 -17
- data/lib/dwf/version.rb +5 -0
- data/lib/dwf/worker.rb +3 -1
- data/lib/dwf.rb +9 -1
- data/spec/dwf/client_spec.rb +154 -0
- data/spec/dwf/configuration_spec.rb +12 -0
- data/spec/dwf/item_spec.rb +37 -1
- data/spec/dwf/worker_spec.rb +32 -0
- metadata +25 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d37bac690df59157001462d6017a8a63d3956a280689de789d9a99163a476832
|
4
|
+
data.tar.gz: 935d76e99ff5ca109f00be80c01950c20272969f556fc70d0e1ef5c026c024e4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c105d178cd44f12577ee9eeed5065768a23adc508da7d686e50c46d26e4eed9c160dc71b10942ab8041e38f6c732384742f40fa9d3076e0a924fbdfb1cd62845
|
7
|
+
data.tar.gz: ad9c6f1041f092847364e91408887be311435be62e0502ab54f2b408c92fc598a879792eabb00c81b05d5c97fb37ea4a19136765d54b0d4efe9dbdb9ad2f64b5
|
data/.github/workflows/test.yaml
CHANGED
data/CHANGELOG.md
ADDED
@@ -0,0 +1,99 @@
|
|
1
|
+
# Changelog
|
2
|
+
All notable changes to this project will be documented in this file.
|
3
|
+
## 0.1.9
|
4
|
+
### Added
|
5
|
+
### Fixed
|
6
|
+
- fix incorrect argument at configuration
|
7
|
+
|
8
|
+
## 0.1.8
|
9
|
+
### Added
|
10
|
+
- add pinlining feature
|
11
|
+
|
12
|
+
```ruby
|
13
|
+
class SendOutput < Dwf::Item
|
14
|
+
def perform
|
15
|
+
output('it works')
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
```
|
20
|
+
|
21
|
+
`output` method used to output data from the job to add outgoing jobs
|
22
|
+
|
23
|
+
```ruby
|
24
|
+
class ReceiveOutput < Dwf::Item
|
25
|
+
def perform
|
26
|
+
message = payloads.first[:output] # it works
|
27
|
+
end
|
28
|
+
end
|
29
|
+
```
|
30
|
+
|
31
|
+
`payloads` is an array that containing outputs from incoming jobs
|
32
|
+
|
33
|
+
```
|
34
|
+
[
|
35
|
+
{
|
36
|
+
id: "SendOutput|1849a3f9-5fce-401e-a73a-91fc1048356",
|
37
|
+
class: "SendOutput",
|
38
|
+
output: 'it works'
|
39
|
+
}
|
40
|
+
]
|
41
|
+
```
|
42
|
+
|
43
|
+
```ruby
|
44
|
+
Dwf.config do |config|
|
45
|
+
config.opts = { url 'redis://127.0.0.1:6379' }
|
46
|
+
config.namespace = 'dwf'
|
47
|
+
end
|
48
|
+
```
|
49
|
+
|
50
|
+
## 0.1.7
|
51
|
+
### Added
|
52
|
+
- Allow to config redis and queue
|
53
|
+
|
54
|
+
```ruby
|
55
|
+
Dwf.config do |config|
|
56
|
+
config.opts = { url 'redis://127.0.0.1:6379' }
|
57
|
+
config.namespace = 'dwf'
|
58
|
+
end
|
59
|
+
```
|
60
|
+
|
61
|
+
## 0.1.6
|
62
|
+
### Added
|
63
|
+
- Sidekiq batch callback: separate batches
|
64
|
+
|
65
|
+
## 0.1.5
|
66
|
+
### Added
|
67
|
+
- add github action with build and public gem flow
|
68
|
+
|
69
|
+
## 0.1.4
|
70
|
+
### Added
|
71
|
+
- Add testes
|
72
|
+
- add github action
|
73
|
+
|
74
|
+
### Fixed
|
75
|
+
- Remove Sidekiq pro by default
|
76
|
+
|
77
|
+
---
|
78
|
+
## 0.1.3
|
79
|
+
### Added
|
80
|
+
- Support both build in and [Sidekiq batches](https://github.com/mperham/sidekiq/wiki/Batches) callback
|
81
|
+
- Update readme
|
82
|
+
|
83
|
+
### Fixed
|
84
|
+
- Fix bug require development gem
|
85
|
+
|
86
|
+
---
|
87
|
+
## 0.1.2
|
88
|
+
### Added
|
89
|
+
- Support [Sidekiq batches](https://github.com/mperham/sidekiq/wiki/Batches) callback
|
90
|
+
- Update readme
|
91
|
+
|
92
|
+
### Fixed
|
93
|
+
- fix typo and remove development gem
|
94
|
+
|
95
|
+
---
|
96
|
+
## 0.1.0
|
97
|
+
### Added
|
98
|
+
- init app with basic idea following [Gush](https://github.com/chaps-io/gush) concept
|
99
|
+
- Support build in callback
|
data/README.md
CHANGED
@@ -4,7 +4,7 @@
|
|
4
4
|
# Installation
|
5
5
|
## 1. Add `dwf` to Gemfile
|
6
6
|
```ruby
|
7
|
-
gem 'dwf', '~> 0.1.
|
7
|
+
gem 'dwf', '~> 0.1.9'
|
8
8
|
```
|
9
9
|
## 2. Execute flow
|
10
10
|
### Declare jobs
|
@@ -38,19 +38,20 @@ class TestWf < Dwf::Workflow
|
|
38
38
|
end
|
39
39
|
```
|
40
40
|
|
41
|
-
#### Note
|
42
|
-
`dwf` supports 2 type of callback `Dwf::Workflow::BUILD_IN` and `Dwf::Workflow::SK_BATCH`
|
43
|
-
- `Dwf::Workflow::BUILD_IN` is a build-in callback
|
44
|
-
- `Dwf::Workflow::SK_BATCH` is [sidekiq batch](https://github.com/mperham/sidekiq/wiki/Batches) callback which required [`sidekiq-pro`](https://sidekiq.org/products/pro.html)
|
45
|
-
|
46
|
-
By default `dwf` will use `Dwf::Workflow::BUILD_IN` callback.
|
47
41
|
|
48
42
|
### Execute flow
|
49
43
|
```ruby
|
50
|
-
wf = TestWf.create
|
44
|
+
wf = TestWf.create(callback_type: Dwf::Workflow::SK_BATCH)
|
51
45
|
wf.start!
|
52
46
|
```
|
53
47
|
|
48
|
+
#### Note
|
49
|
+
`dwf` supports 2 callback types `Dwf::Workflow::BUILD_IN` and `Dwf::Workflow::SK_BATCH`
|
50
|
+
- `Dwf::Workflow::BUILD_IN` is a build-in callback
|
51
|
+
- `Dwf::Workflow::SK_BATCH` is [sidekiq batch](https://github.com/mperham/sidekiq/wiki/Batches) callback which required [`sidekiq-pro`](https://sidekiq.org/products/pro.html)
|
52
|
+
|
53
|
+
By default `dwf` will use `Dwf::Workflow::BUILD_IN` callback.
|
54
|
+
|
54
55
|
### Output
|
55
56
|
```
|
56
57
|
A Working
|
@@ -70,13 +71,62 @@ D say hello
|
|
70
71
|
D Finished
|
71
72
|
```
|
72
73
|
|
74
|
+
# Config redis and default queue
|
75
|
+
`dwf` uses redis as the key value stograge through [redis-rb](https://github.com/redis/redis-rb), So you can pass redis configuration by `redis_opts`
|
76
|
+
```ruby
|
77
|
+
Dwf.config do |config|
|
78
|
+
SENTINELS = [
|
79
|
+
{ host: "127.0.0.1", port: 26380 },
|
80
|
+
{ host: "127.0.0.1", port: 26381 }
|
81
|
+
]
|
82
|
+
config.redis_opts = { host: 'mymaster', sentinels: SENTINELS, role: :master }
|
83
|
+
config.namespace = 'dwf'
|
84
|
+
end
|
85
|
+
```
|
86
|
+
|
87
|
+
# Pinelining
|
88
|
+
You can pass jobs result to next nodes
|
89
|
+
|
90
|
+
```ruby
|
91
|
+
class SendOutput < Dwf::Item
|
92
|
+
def perform
|
93
|
+
output('it works')
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
```
|
98
|
+
|
99
|
+
`output` method used to output data from the job to add outgoing jobs
|
100
|
+
|
101
|
+
```ruby
|
102
|
+
class ReceiveOutput < Dwf::Item
|
103
|
+
def perform
|
104
|
+
message = payloads.first[:output] # it works
|
105
|
+
end
|
106
|
+
end
|
107
|
+
```
|
108
|
+
|
109
|
+
`payloads` is an array that containing outputs from incoming jobs
|
110
|
+
|
111
|
+
```ruby
|
112
|
+
[
|
113
|
+
{
|
114
|
+
id: "SendOutput|1849a3f9-5fce-401e-a73a-91fc1048356",
|
115
|
+
class: "SendOutput",
|
116
|
+
output: 'it works'
|
117
|
+
}
|
118
|
+
]
|
119
|
+
```
|
120
|
+
|
73
121
|
# Todo
|
74
122
|
- [x] Make it work
|
75
123
|
- [x] Support pass params
|
76
124
|
- [x] Support with build-in callback
|
77
125
|
- [x] Add github workflow
|
78
|
-
- [
|
79
|
-
- [
|
126
|
+
- [x] Redis configurable
|
127
|
+
- [x] Pinelining
|
128
|
+
- [ ] [WIP] Test
|
129
|
+
- [ ] Support [Resque](https://github.com/resque/resque)
|
80
130
|
|
81
131
|
# References
|
82
132
|
- https://github.com/chaps-io/gush
|
data/dwf.gemspec
CHANGED
@@ -6,7 +6,7 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
|
6
6
|
|
7
7
|
Gem::Specification.new do |spec|
|
8
8
|
spec.name = "dwf"
|
9
|
-
spec.version = '0.1.
|
9
|
+
spec.version = '0.1.9'
|
10
10
|
spec.authors = ["dthtien"]
|
11
11
|
spec.email = ["tiendt2311@gmail.com"]
|
12
12
|
|
@@ -27,5 +27,6 @@ Gem::Specification.new do |spec|
|
|
27
27
|
spec.add_development_dependency 'byebug', '~> 11.1.3'
|
28
28
|
spec.add_dependency 'redis', '~> 4.2.0'
|
29
29
|
spec.add_development_dependency 'rspec', '~> 3.2'
|
30
|
+
spec.add_development_dependency 'mock_redis', '~> 0.27.2'
|
30
31
|
spec.add_dependency 'sidekiq', '~> 6.2.0'
|
31
32
|
end
|
data/lib/dwf/callback.rb
CHANGED
@@ -1,7 +1,10 @@
|
|
1
|
+
# frozen_string_literal: true
|
1
2
|
require_relative 'client'
|
2
3
|
|
3
4
|
module Dwf
|
4
5
|
class Callback
|
6
|
+
DEFAULT_KEY = 'default_key'
|
7
|
+
|
5
8
|
def process_next_step(status, options)
|
6
9
|
previous_job_names = options['names']
|
7
10
|
workflow_id = options['workflow_id']
|
@@ -12,7 +15,7 @@ module Dwf
|
|
12
15
|
return if processing_job_names.empty?
|
13
16
|
|
14
17
|
overall = Sidekiq::Batch.new(status.parent_bid)
|
15
|
-
overall.jobs {
|
18
|
+
overall.jobs { setup_batches(processing_job_names, workflow_id) }
|
16
19
|
end
|
17
20
|
|
18
21
|
def start(job)
|
@@ -21,25 +24,45 @@ module Dwf
|
|
21
24
|
|
22
25
|
private
|
23
26
|
|
24
|
-
def
|
27
|
+
def setup_batches(processing_job_names, workflow_id)
|
28
|
+
jobs = fetch_jobs(processing_job_names, workflow_id)
|
29
|
+
jobs_classification = classify_jobs jobs
|
30
|
+
|
31
|
+
jobs_classification.each do |key, batch_jobs|
|
32
|
+
with_lock workflow_id, key do
|
33
|
+
setup_batch(batch_jobs, workflow_id)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
def setup_batch(jobs, workflow_id)
|
25
39
|
batch = Sidekiq::Batch.new
|
26
40
|
batch.on(
|
27
41
|
:success,
|
28
42
|
'Dwf::Callback#process_next_step',
|
29
|
-
names:
|
43
|
+
names: jobs.map(&:klass),
|
30
44
|
workflow_id: workflow_id
|
31
45
|
)
|
32
|
-
|
33
46
|
batch.jobs do
|
34
|
-
|
47
|
+
jobs.each { |job| job.persist_and_perform_async! if job.ready_to_start? }
|
35
48
|
end
|
36
49
|
end
|
37
50
|
|
38
|
-
def
|
39
|
-
|
40
|
-
|
41
|
-
|
51
|
+
def classify_jobs(jobs)
|
52
|
+
hash = {}
|
53
|
+
jobs.each do |job|
|
54
|
+
outgoing_jobs = job.outgoing
|
55
|
+
key = outgoing_jobs.empty? ? DEFAULT_KEY : outgoing_jobs.join
|
56
|
+
hash[key] = hash[key].nil? ? [job] : hash[key].push(job)
|
42
57
|
end
|
58
|
+
|
59
|
+
hash
|
60
|
+
end
|
61
|
+
|
62
|
+
def fetch_jobs(processing_job_names, workflow_id)
|
63
|
+
processing_job_names.map do |job_name|
|
64
|
+
client.find_job(workflow_id, job_name)
|
65
|
+
end.compact
|
43
66
|
end
|
44
67
|
|
45
68
|
def with_lock(workflow_id, job_name)
|
data/lib/dwf/client.rb
CHANGED
@@ -1,5 +1,11 @@
|
|
1
1
|
module Dwf
|
2
2
|
class Client
|
3
|
+
attr_reader :config
|
4
|
+
|
5
|
+
def initialize(config = Dwf.configuration)
|
6
|
+
@config = config
|
7
|
+
end
|
8
|
+
|
3
9
|
def find_job(workflow_id, job_name)
|
4
10
|
job_name_match = /(?<klass>\w*[^-])-(?<identifier>.*)/.match(job_name)
|
5
11
|
data = if job_name_match
|
@@ -94,7 +100,7 @@ module Dwf
|
|
94
100
|
end
|
95
101
|
|
96
102
|
def redis
|
97
|
-
@redis ||= Redis.new
|
103
|
+
@redis ||= Redis.new(config.redis_opts)
|
98
104
|
end
|
99
105
|
end
|
100
106
|
end
|
@@ -0,0 +1,15 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Dwf
|
4
|
+
class Configuration
|
5
|
+
NAMESPACE = 'dwf'
|
6
|
+
REDIS_OPTS = { url: 'redis://localhost:6379' }.freeze
|
7
|
+
|
8
|
+
attr_accessor :redis_opts, :namespace
|
9
|
+
|
10
|
+
def initialize(hash = {})
|
11
|
+
@namespace = hash.fetch(:namespace, NAMESPACE)
|
12
|
+
@redis_opts = hash.fetch(:redis_opts, REDIS_OPTS)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
data/lib/dwf/item.rb
CHANGED
@@ -1,27 +1,15 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
|
+
|
2
3
|
require_relative 'client'
|
3
4
|
|
4
5
|
module Dwf
|
5
6
|
class Item
|
6
|
-
DEFAULT_QUEUE = 'default'
|
7
|
-
|
8
7
|
attr_reader :workflow_id, :id, :params, :queue, :klass, :started_at,
|
9
|
-
:enqueued_at, :finished_at, :failed_at, :callback_type
|
8
|
+
:enqueued_at, :finished_at, :failed_at, :callback_type, :output_payload
|
10
9
|
attr_accessor :incoming, :outgoing
|
11
10
|
|
12
11
|
def initialize(options = {})
|
13
|
-
|
14
|
-
@id = options[:id]
|
15
|
-
@params = options[:params]
|
16
|
-
@queue = options[:queue] || DEFAULT_QUEUE
|
17
|
-
@incoming = options[:incoming] || []
|
18
|
-
@outgoing = options[:outgoing] || []
|
19
|
-
@klass = options[:klass] || self.class
|
20
|
-
@failed_at = options[:failed_at]
|
21
|
-
@finished_at = options[:finished_at]
|
22
|
-
@enqueued_at = options[:enqueued_at]
|
23
|
-
@started_at = options[:started_at]
|
24
|
-
@callback_type = options[:callback_type]
|
12
|
+
assign_attributes(options)
|
25
13
|
end
|
26
14
|
|
27
15
|
def self.from_hash(hash)
|
@@ -40,14 +28,24 @@ module Dwf
|
|
40
28
|
callback_type == Dwf::Workflow::BUILD_IN
|
41
29
|
end
|
42
30
|
|
31
|
+
def reload
|
32
|
+
item = client.find_job(workflow_id, name)
|
33
|
+
assign_attributes(item.to_hash)
|
34
|
+
end
|
35
|
+
|
43
36
|
def perform_async
|
44
|
-
Dwf::Worker.set(queue: queue
|
37
|
+
Dwf::Worker.set(queue: queue || client.config.namespace)
|
38
|
+
.perform_async(workflow_id, name)
|
45
39
|
end
|
46
40
|
|
47
41
|
def name
|
48
42
|
@name ||= "#{klass}|#{id}"
|
49
43
|
end
|
50
44
|
|
45
|
+
def output(data)
|
46
|
+
@output_payload = data
|
47
|
+
end
|
48
|
+
|
51
49
|
def no_dependencies?
|
52
50
|
incoming.empty?
|
53
51
|
end
|
@@ -58,6 +56,17 @@ module Dwf
|
|
58
56
|
end
|
59
57
|
end
|
60
58
|
|
59
|
+
def payloads
|
60
|
+
incoming.map do |job_name|
|
61
|
+
job = client.find_job(workflow_id, job_name)
|
62
|
+
{
|
63
|
+
id: job.name,
|
64
|
+
class: job.klass.to_s,
|
65
|
+
output: job.output_payload
|
66
|
+
}
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
61
70
|
def enqueue!
|
62
71
|
@enqueued_at = current_timestamp
|
63
72
|
@started_at = nil
|
@@ -142,7 +151,8 @@ module Dwf
|
|
142
151
|
failed_at: failed_at,
|
143
152
|
params: params,
|
144
153
|
workflow_id: workflow_id,
|
145
|
-
callback_type: callback_type
|
154
|
+
callback_type: callback_type,
|
155
|
+
output_payload: output_payload
|
146
156
|
}
|
147
157
|
end
|
148
158
|
|
@@ -159,5 +169,21 @@ module Dwf
|
|
159
169
|
def client
|
160
170
|
@client ||= Dwf::Client.new
|
161
171
|
end
|
172
|
+
|
173
|
+
def assign_attributes(options)
|
174
|
+
@workflow_id = options[:workflow_id]
|
175
|
+
@id = options[:id]
|
176
|
+
@params = options[:params]
|
177
|
+
@queue = options[:queue]
|
178
|
+
@incoming = options[:incoming] || []
|
179
|
+
@outgoing = options[:outgoing] || []
|
180
|
+
@klass = options[:klass] || self.class
|
181
|
+
@failed_at = options[:failed_at]
|
182
|
+
@finished_at = options[:finished_at]
|
183
|
+
@enqueued_at = options[:enqueued_at]
|
184
|
+
@started_at = options[:started_at]
|
185
|
+
@callback_type = options[:callback_type]
|
186
|
+
@output_payload = options[:output_payload]
|
187
|
+
end
|
162
188
|
end
|
163
189
|
end
|
data/lib/dwf/version.rb
ADDED
data/lib/dwf/worker.rb
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require 'sidekiq'
|
2
4
|
require_relative 'client'
|
3
5
|
|
@@ -7,7 +9,7 @@ module Dwf
|
|
7
9
|
|
8
10
|
def perform(workflow_id, job_name)
|
9
11
|
job = client.find_job(workflow_id, job_name)
|
10
|
-
return job.enqueue_outgoing_jobs if job.
|
12
|
+
return job.enqueue_outgoing_jobs if job.succeeded?
|
11
13
|
|
12
14
|
job.mark_as_started
|
13
15
|
job.perform
|
data/lib/dwf.rb
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
|
+
|
2
3
|
require "bundler/setup"
|
3
4
|
|
4
5
|
require 'sidekiq'
|
@@ -12,8 +13,15 @@ require_relative 'dwf/item'
|
|
12
13
|
require_relative 'dwf/client'
|
13
14
|
require_relative 'dwf/worker'
|
14
15
|
require_relative 'dwf/callback'
|
16
|
+
require_relative 'dwf/configuration'
|
15
17
|
|
16
18
|
module Dwf
|
17
|
-
|
19
|
+
def self.configuration
|
20
|
+
@configuration ||= Configuration.new
|
21
|
+
end
|
22
|
+
|
23
|
+
def self.config
|
24
|
+
yield configuration
|
25
|
+
end
|
18
26
|
end
|
19
27
|
|
@@ -0,0 +1,154 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
require 'mock_redis'
|
5
|
+
|
6
|
+
describe Dwf::Client, client: true do
|
7
|
+
let(:client) { described_class.new }
|
8
|
+
let(:workflow_id) { SecureRandom.uuid }
|
9
|
+
let(:id) { SecureRandom.uuid }
|
10
|
+
let(:redis) { Redis.new }
|
11
|
+
before do
|
12
|
+
redis_instance = MockRedis.new
|
13
|
+
allow(Redis).to receive(:new).and_return redis_instance
|
14
|
+
end
|
15
|
+
|
16
|
+
describe '#find_job' do
|
17
|
+
let!(:job) do
|
18
|
+
j = Dwf::Item.new(workflow_id: workflow_id, id: id)
|
19
|
+
j.persist!
|
20
|
+
j
|
21
|
+
end
|
22
|
+
|
23
|
+
context 'find by item class name' do
|
24
|
+
it {
|
25
|
+
item = client.find_job(workflow_id, Dwf::Item.name)
|
26
|
+
expect(item.workflow_id).to eq workflow_id
|
27
|
+
expect(item.id).to eq id
|
28
|
+
expect(item.name).to eq job.name
|
29
|
+
}
|
30
|
+
end
|
31
|
+
|
32
|
+
context 'find by item name' do
|
33
|
+
it {
|
34
|
+
item = client.find_job(workflow_id, job.name)
|
35
|
+
expect(item.workflow_id).to eq workflow_id
|
36
|
+
expect(item.id).to eq id
|
37
|
+
expect(item.name).to eq job.name
|
38
|
+
}
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
describe '#persist_job' do
|
43
|
+
let!(:job) { Dwf::Item.new(workflow_id: workflow_id, id: id) }
|
44
|
+
|
45
|
+
it do
|
46
|
+
expect(redis.exists?("dwf.jobs.#{job.workflow_id}.#{job.klass}"))
|
47
|
+
.to be_falsy
|
48
|
+
|
49
|
+
client.persist_job(job)
|
50
|
+
|
51
|
+
expect(redis.exists?("dwf.jobs.#{job.workflow_id}.#{job.klass}"))
|
52
|
+
.to be_truthy
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
describe '#persist_workflow' do
|
57
|
+
let(:workflow) { Dwf::Workflow.new }
|
58
|
+
|
59
|
+
it do
|
60
|
+
expect(redis.exists?("dwf.workflows.#{workflow.id}")).to be_falsy
|
61
|
+
client.persist_workflow(workflow)
|
62
|
+
expect(redis.exists?("dwf.workflows.#{workflow.id}")).to be_truthy
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
describe '#check_or_lock' do
|
67
|
+
before do
|
68
|
+
allow_any_instance_of(described_class).to receive(:sleep)
|
69
|
+
end
|
70
|
+
|
71
|
+
context 'job is running' do
|
72
|
+
let(:job_name) { 'ahihi' }
|
73
|
+
|
74
|
+
before do
|
75
|
+
allow(client).to receive(:set)
|
76
|
+
redis.set("wf_enqueue_outgoing_jobs_#{workflow_id}-#{job_name}", 'running')
|
77
|
+
client.check_or_lock(workflow_id, job_name)
|
78
|
+
end
|
79
|
+
|
80
|
+
it { expect(client).not_to have_received(:set) }
|
81
|
+
end
|
82
|
+
|
83
|
+
context 'job is not running' do
|
84
|
+
let(:job_name) { 'ahihi' }
|
85
|
+
|
86
|
+
before do
|
87
|
+
allow(redis).to receive(:set)
|
88
|
+
client.check_or_lock(workflow_id, job_name)
|
89
|
+
end
|
90
|
+
|
91
|
+
it do
|
92
|
+
expect(redis).to have_received(:set)
|
93
|
+
.with("wf_enqueue_outgoing_jobs_#{workflow_id}-#{job_name}", 'running')
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
describe '#release_lock' do
|
99
|
+
before do
|
100
|
+
allow(redis).to receive(:del)
|
101
|
+
client.release_lock(workflow_id, 'ahihi')
|
102
|
+
end
|
103
|
+
|
104
|
+
it do
|
105
|
+
expect(redis).to have_received(:del)
|
106
|
+
.with("dwf_enqueue_outgoing_jobs_#{workflow_id}-ahihi")
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
describe '#build_job_id' do
|
111
|
+
before do
|
112
|
+
allow(redis).to receive(:hexists)
|
113
|
+
client.build_job_id(workflow_id, 'ahihi')
|
114
|
+
end
|
115
|
+
|
116
|
+
it { expect(redis).to have_received(:hexists) }
|
117
|
+
end
|
118
|
+
|
119
|
+
describe '#build_workflow_id' do
|
120
|
+
before do
|
121
|
+
allow(redis).to receive(:exists?)
|
122
|
+
client.build_workflow_id
|
123
|
+
end
|
124
|
+
|
125
|
+
it { expect(redis).to have_received(:exists?) }
|
126
|
+
end
|
127
|
+
|
128
|
+
describe '#key_exists?' do
|
129
|
+
before do
|
130
|
+
allow(redis).to receive(:exists?)
|
131
|
+
client.key_exists?('ahihi')
|
132
|
+
end
|
133
|
+
|
134
|
+
it { expect(redis).to have_received(:exists?).with('ahihi') }
|
135
|
+
end
|
136
|
+
|
137
|
+
describe '#set' do
|
138
|
+
before do
|
139
|
+
allow(redis).to receive(:set)
|
140
|
+
client.set('ahihi', 'a')
|
141
|
+
end
|
142
|
+
|
143
|
+
it { expect(redis).to have_received(:set).with('ahihi', 'a') }
|
144
|
+
end
|
145
|
+
|
146
|
+
describe '#delete' do
|
147
|
+
before do
|
148
|
+
allow(redis).to receive(:del)
|
149
|
+
client.delete('ahihi')
|
150
|
+
end
|
151
|
+
|
152
|
+
it { expect(redis).to have_received(:del).with('ahihi') }
|
153
|
+
end
|
154
|
+
end
|
@@ -0,0 +1,12 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
|
5
|
+
describe Dwf::Configuration, configuration: true do
|
6
|
+
let(:configuration) { described_class.new }
|
7
|
+
|
8
|
+
specify do
|
9
|
+
expect(configuration.namespace).to eq described_class::NAMESPACE
|
10
|
+
expect(configuration.redis_opts).to eq described_class::REDIS_OPTS
|
11
|
+
end
|
12
|
+
end
|
data/spec/dwf/item_spec.rb
CHANGED
@@ -16,7 +16,7 @@ describe Dwf::Item, item: true do
|
|
16
16
|
params: {},
|
17
17
|
incoming: incoming,
|
18
18
|
outgoing: outgoing,
|
19
|
-
queue: Dwf::
|
19
|
+
queue: Dwf::Configuration::NAMESPACE,
|
20
20
|
klass: 'Dwf::Item',
|
21
21
|
started_at: started_at,
|
22
22
|
finished_at: finished_at,
|
@@ -181,4 +181,40 @@ describe Dwf::Item, item: true do
|
|
181
181
|
it { expect(a_item).not_to have_received(:persist_and_perform_async!) }
|
182
182
|
end
|
183
183
|
end
|
184
|
+
|
185
|
+
describe '#output' do
|
186
|
+
before { item.output(1) }
|
187
|
+
|
188
|
+
it { expect(item.output_payload).to eq 1 }
|
189
|
+
end
|
190
|
+
|
191
|
+
describe '#payloads' do
|
192
|
+
let(:incoming) { ["A|#{SecureRandom.uuid}"] }
|
193
|
+
let(:client_double) { double(find_job: nil) }
|
194
|
+
let!(:a_item) do
|
195
|
+
described_class.new(
|
196
|
+
workflow_id: SecureRandom.uuid,
|
197
|
+
id: SecureRandom.uuid,
|
198
|
+
finished_at: finished_at,
|
199
|
+
output_payload: 1
|
200
|
+
)
|
201
|
+
end
|
202
|
+
|
203
|
+
before do
|
204
|
+
allow(Dwf::Client).to receive(:new).and_return client_double
|
205
|
+
allow(client_double)
|
206
|
+
.to receive(:find_job).and_return a_item
|
207
|
+
end
|
208
|
+
|
209
|
+
it do
|
210
|
+
expected_payload = [
|
211
|
+
{
|
212
|
+
class: a_item.class.name,
|
213
|
+
id: a_item.name,
|
214
|
+
output: 1
|
215
|
+
}
|
216
|
+
]
|
217
|
+
expect(item.payloads).to eq expected_payload
|
218
|
+
end
|
219
|
+
end
|
184
220
|
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'spec_helper'
|
4
|
+
require 'mock_redis'
|
5
|
+
require 'sidekiq/testing'
|
6
|
+
|
7
|
+
describe Dwf::Worker, client: true do
|
8
|
+
let(:workflow_id) { SecureRandom.uuid }
|
9
|
+
let(:id) { SecureRandom.uuid }
|
10
|
+
let(:redis) { Redis.new }
|
11
|
+
let(:worker) { described_class.perform_async(workflow_id, job.name) }
|
12
|
+
before do
|
13
|
+
redis_instance = MockRedis.new
|
14
|
+
allow(Redis).to receive(:new).and_return redis_instance
|
15
|
+
end
|
16
|
+
|
17
|
+
describe '#find_job' do
|
18
|
+
let!(:job) do
|
19
|
+
j = Dwf::Item.new(workflow_id: workflow_id, id: id)
|
20
|
+
j.persist!
|
21
|
+
j
|
22
|
+
end
|
23
|
+
|
24
|
+
before do
|
25
|
+
worker
|
26
|
+
Sidekiq::Worker.drain_all
|
27
|
+
job.reload
|
28
|
+
end
|
29
|
+
|
30
|
+
it { expect(job.finished?).to be_truthy }
|
31
|
+
end
|
32
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: dwf
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.9
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- dthtien
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-09-
|
11
|
+
date: 2021-09-10 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: byebug
|
@@ -52,6 +52,20 @@ dependencies:
|
|
52
52
|
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
54
|
version: '3.2'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: mock_redis
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: 0.27.2
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: 0.27.2
|
55
69
|
- !ruby/object:Gem::Dependency
|
56
70
|
name: sidekiq
|
57
71
|
requirement: !ruby/object:Gem::Requirement
|
@@ -78,6 +92,7 @@ files:
|
|
78
92
|
- ".gitignore"
|
79
93
|
- ".rspec"
|
80
94
|
- ".ruby-version"
|
95
|
+
- CHANGELOG.md
|
81
96
|
- Gemfile
|
82
97
|
- LICENSE.txt
|
83
98
|
- README.md
|
@@ -85,12 +100,17 @@ files:
|
|
85
100
|
- lib/dwf.rb
|
86
101
|
- lib/dwf/callback.rb
|
87
102
|
- lib/dwf/client.rb
|
103
|
+
- lib/dwf/configuration.rb
|
88
104
|
- lib/dwf/item.rb
|
89
105
|
- lib/dwf/utils.rb
|
106
|
+
- lib/dwf/version.rb
|
90
107
|
- lib/dwf/worker.rb
|
91
108
|
- lib/dwf/workflow.rb
|
109
|
+
- spec/dwf/client_spec.rb
|
110
|
+
- spec/dwf/configuration_spec.rb
|
92
111
|
- spec/dwf/item_spec.rb
|
93
112
|
- spec/dwf/utils_spec.rb
|
113
|
+
- spec/dwf/worker_spec.rb
|
94
114
|
- spec/spec_helper.rb
|
95
115
|
homepage: https://github.com/dthtien/wf
|
96
116
|
licenses:
|
@@ -117,6 +137,9 @@ specification_version: 4
|
|
117
137
|
summary: Gush cloned without ActiveJob but requried Sidekiq. This project is for researching
|
118
138
|
DSL purpose
|
119
139
|
test_files:
|
140
|
+
- spec/dwf/client_spec.rb
|
141
|
+
- spec/dwf/configuration_spec.rb
|
120
142
|
- spec/dwf/item_spec.rb
|
121
143
|
- spec/dwf/utils_spec.rb
|
144
|
+
- spec/dwf/worker_spec.rb
|
122
145
|
- spec/spec_helper.rb
|