gush 1.1.1 → 2.0.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9cfd59c5cadd225e30c41bcd7a5293893d78041056409c2e1fd15316d2cc4d75
4
- data.tar.gz: ef3e28f72d5ed90bb175f9452e189111fbc5bb95facc9585df1c660fd691038f
3
+ metadata.gz: 12fdb9a62d33353f827194c198c011ff42d7491669d0ab9717be0920a8066313
4
+ data.tar.gz: 7f3d4ada23215e818f0cb801c92f4752e80b21b344f3b48d9ab4b83787c029ec
5
5
  SHA512:
6
- metadata.gz: 38578ba5a3f159577d38d6af662137fc5c9b95a394c01f498b22d6ff2931fc3e78eb9634f9c0ae1da683d197412f0ee38c43efa6035d223f1ece1d09b483cb9d
7
- data.tar.gz: bf8954ea6d409a1fc9db19e24c4dae322f15cf2d3e9240df17d2ff847503318fbdcf0f50b27c08910273fb6dfc3223919f79679d1986670b0483b2b27f627384
6
+ metadata.gz: bec4bcb3e251bdb1a2e184b6b62ae896fd40af4a0cd4bad2ec7ee4925ab356e36a3388d2cb78ed21582c0f6cfdd429c304f0591a5ee3760c73255d03a2b1d80e
7
+ data.tar.gz: 67fba0b65b575449114233a4ff9cfaf1cb6ac3ff61633f94b7c595988165b3315fe7ae5d0744792315d9329795da0e42ae840789e3d4f72c84ef8956081c5fbd
data/.gitignore CHANGED
@@ -19,3 +19,5 @@ tmp
19
19
  test.rb
20
20
  /Gushfile
21
21
  dump.rdb
22
+ .ruby-version
23
+ .ruby-gemset
data/.travis.yml CHANGED
@@ -4,6 +4,9 @@ rvm:
4
4
  - 2.2.2
5
5
  - 2.3.4
6
6
  - 2.4.1
7
+ - 2.5
8
+ - 2.6
9
+ - 2.7
7
10
  services:
8
11
  - redis-server
9
12
  email:
data/CHANGELOG.md CHANGED
@@ -5,6 +5,24 @@ All notable changes to this project will be documented in this file.
5
5
  The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
6
6
  and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
7
7
 
8
+ ## 2.0.1
9
+
10
+ ### Fixed
11
+
12
+ - Fix bug when retried jobs didn't correctly reset their failed flag when ran again (Thanks to @theo-delaune-argus and @mickael-palma-argus! [See issue](https://github.com/chaps-io/gush/issues/61))
13
+
14
+ ## 2.0.0
15
+
16
+ ## Changed
17
+
18
+ - *[BREAKING]* Store gush jobs on redis hash instead of plain keys - this improves performance when retrieving keys (Thanks to @Saicheg! [See pull request](https://github.com/chaps-io/gush/pull/56))
19
+
20
+
21
+ ## Added
22
+
23
+ - Allow setting queue for each job via `:queue` option in `run` method (Thanks to @devilankur18! [See pull request](https://github.com/chaps-io/gush/pull/58))
24
+
25
+
8
26
  ## 1.1.1 - 2018-06-09
9
27
 
10
28
  ## Changed
data/README.md CHANGED
@@ -1,7 +1,5 @@
1
1
  # Gush [![Build Status](https://travis-ci.org/chaps-io/gush.svg?branch=master)](https://travis-ci.org/chaps-io/gush)
2
2
 
3
- ## [![](http://i.imgur.com/ya8Wnyl.png)](https://chaps.io) proudly made by [Chaps](https://chaps.io)
4
-
5
3
  Gush is a parallel workflow runner using only Redis as storage and [ActiveJob](http://guides.rubyonrails.org/v4.2/active_job_basics.html#introduction) for scheduling and executing jobs.
6
4
 
7
5
  ## Theory
data/gush.gemspec CHANGED
@@ -4,7 +4,7 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
4
 
5
5
  Gem::Specification.new do |spec|
6
6
  spec.name = "gush"
7
- spec.version = "1.1.1"
7
+ spec.version = "2.0.2"
8
8
  spec.authors = ["Piotrek Okoński"]
9
9
  spec.email = ["piotrek@okonski.org"]
10
10
  spec.summary = "Fast and distributed workflow runner based on ActiveJob and Redis"
@@ -17,17 +17,18 @@ Gem::Specification.new do |spec|
17
17
  spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
18
18
  spec.require_paths = ["lib"]
19
19
 
20
- spec.add_dependency "activejob", ">= 4.2.7", "< 6.0"
21
- spec.add_dependency "connection_pool", "~> 2.2.1"
20
+ spec.add_dependency "activejob", ">= 4.2.7", "< 7.0"
21
+ spec.add_dependency "concurrent-ruby", "~> 1.0"
22
22
  spec.add_dependency "multi_json", "~> 1.11"
23
23
  spec.add_dependency "redis", ">= 3.2", "< 5"
24
+ spec.add_dependency "redis-mutex", "~> 4.0.1"
24
25
  spec.add_dependency "hiredis", "~> 0.6"
25
26
  spec.add_dependency "ruby-graphviz", "~> 1.2"
26
27
  spec.add_dependency "terminal-table", "~> 1.4"
27
28
  spec.add_dependency "colorize", "~> 0.7"
28
29
  spec.add_dependency "thor", "~> 0.19"
29
30
  spec.add_dependency "launchy", "~> 2.4"
30
- spec.add_development_dependency "bundler", "~> 1.5"
31
+ spec.add_development_dependency "bundler"
31
32
  spec.add_development_dependency "rake", "~> 10.4"
32
33
  spec.add_development_dependency "rspec", '~> 3.0'
33
34
  spec.add_development_dependency "pry", '~> 0.10'
data/lib/gush/client.rb CHANGED
@@ -1,9 +1,22 @@
1
- require 'connection_pool'
1
+ require 'redis'
2
+ require 'concurrent-ruby'
2
3
 
3
4
  module Gush
4
5
  class Client
5
6
  attr_reader :configuration
6
7
 
8
+ @@redis_connection = Concurrent::ThreadLocalVar.new(nil)
9
+
10
+ def self.redis_connection(config)
11
+ cached = (@@redis_connection.value ||= { url: config.redis_url, connection: nil })
12
+ return cached[:connection] if !cached[:connection].nil? && config.redis_url == cached[:url]
13
+
14
+ Redis.new(url: config.redis_url).tap do |instance|
15
+ RedisClassy.redis = instance
16
+ @@redis_connection.value = { url: config.redis_url, connection: instance }
17
+ end
18
+ end
19
+
7
20
  def initialize(config = Gush.configuration)
8
21
  @configuration = config
9
22
  end
@@ -42,28 +55,24 @@ module Gush
42
55
  persist_workflow(workflow)
43
56
  end
44
57
 
45
- def next_free_job_id(workflow_id,job_klass)
46
- job_identifier = nil
58
+ def next_free_job_id(workflow_id, job_klass)
59
+ job_id = nil
60
+
47
61
  loop do
48
- id = SecureRandom.uuid
49
- job_identifier = "#{job_klass}-#{id}"
50
- available = connection_pool.with do |redis|
51
- !redis.exists("gush.jobs.#{workflow_id}.#{job_identifier}")
52
- end
62
+ job_id = SecureRandom.uuid
63
+ available = !redis.hexists("gush.jobs.#{workflow_id}.#{job_klass}", job_id)
53
64
 
54
65
  break if available
55
66
  end
56
67
 
57
- job_identifier
68
+ job_id
58
69
  end
59
70
 
60
71
  def next_free_workflow_id
61
72
  id = nil
62
73
  loop do
63
74
  id = SecureRandom.uuid
64
- available = connection_pool.with do |redis|
65
- !redis.exists("gush.workflow.#{id}")
66
- end
75
+ available = !redis.exists("gush.workflow.#{id}")
67
76
 
68
77
  break if available
69
78
  end
@@ -72,58 +81,50 @@ module Gush
72
81
  end
73
82
 
74
83
  def all_workflows
75
- connection_pool.with do |redis|
76
- redis.scan_each(match: "gush.workflows.*").map do |key|
77
- id = key.sub("gush.workflows.", "")
78
- find_workflow(id)
79
- end
84
+ redis.scan_each(match: "gush.workflows.*").map do |key|
85
+ id = key.sub("gush.workflows.", "")
86
+ find_workflow(id)
80
87
  end
81
88
  end
82
89
 
83
90
  def find_workflow(id)
84
- connection_pool.with do |redis|
85
- data = redis.get("gush.workflows.#{id}")
86
-
87
- unless data.nil?
88
- hash = Gush::JSON.decode(data, symbolize_keys: true)
89
- keys = redis.scan_each(match: "gush.jobs.#{id}.*")
90
- nodes = redis.mget(*keys).map { |json| Gush::JSON.decode(json, symbolize_keys: true) }
91
- workflow_from_hash(hash, nodes)
92
- else
93
- raise WorkflowNotFound.new("Workflow with given id doesn't exist")
91
+ data = redis.get("gush.workflows.#{id}")
92
+
93
+ unless data.nil?
94
+ hash = Gush::JSON.decode(data, symbolize_keys: true)
95
+ keys = redis.scan_each(match: "gush.jobs.#{id}.*")
96
+
97
+ nodes = keys.each_with_object([]) do |key, array|
98
+ array.concat redis.hvals(key).map { |json| Gush::JSON.decode(json, symbolize_keys: true) }
94
99
  end
100
+
101
+ workflow_from_hash(hash, nodes)
102
+ else
103
+ raise WorkflowNotFound.new("Workflow with given id doesn't exist")
95
104
  end
96
105
  end
97
106
 
98
107
  def persist_workflow(workflow)
99
- connection_pool.with do |redis|
100
- redis.set("gush.workflows.#{workflow.id}", workflow.to_json)
101
- end
108
+ redis.set("gush.workflows.#{workflow.id}", workflow.to_json)
102
109
 
103
110
  workflow.jobs.each {|job| persist_job(workflow.id, job) }
104
111
  workflow.mark_as_persisted
112
+
105
113
  true
106
114
  end
107
115
 
108
116
  def persist_job(workflow_id, job)
109
- connection_pool.with do |redis|
110
- redis.set("gush.jobs.#{workflow_id}.#{job.name}", job.to_json)
111
- end
117
+ redis.hset("gush.jobs.#{workflow_id}.#{job.klass}", job.id, job.to_json)
112
118
  end
113
119
 
114
- def find_job(workflow_id, job_id)
115
- job_name_match = /(?<klass>\w*[^-])-(?<identifier>.*)/.match(job_id)
116
- hypen = '-' if job_name_match.nil?
117
-
118
- keys = connection_pool.with do |redis|
119
- redis.scan_each(match: "gush.jobs.#{workflow_id}.#{job_id}#{hypen}*").to_a
120
- end
120
+ def find_job(workflow_id, job_name)
121
+ job_name_match = /(?<klass>\w*[^-])-(?<identifier>.*)/.match(job_name)
121
122
 
122
- return nil if keys.nil?
123
-
124
- data = connection_pool.with do |redis|
125
- redis.get(keys.first)
126
- end
123
+ data = if job_name_match
124
+ find_job_by_klass_and_id(workflow_id, job_name)
125
+ else
126
+ find_job_by_klass(workflow_id, job_name)
127
+ end
127
128
 
128
129
  return nil if data.nil?
129
130
 
@@ -132,42 +133,50 @@ module Gush
132
133
  end
133
134
 
134
135
  def destroy_workflow(workflow)
135
- connection_pool.with do |redis|
136
- redis.del("gush.workflows.#{workflow.id}")
137
- end
136
+ redis.del("gush.workflows.#{workflow.id}")
138
137
  workflow.jobs.each {|job| destroy_job(workflow.id, job) }
139
138
  end
140
139
 
141
140
  def destroy_job(workflow_id, job)
142
- connection_pool.with do |redis|
143
- redis.del("gush.jobs.#{workflow_id}.#{job.name}")
144
- end
141
+ redis.del("gush.jobs.#{workflow_id}.#{job.klass}")
145
142
  end
146
143
 
147
144
  def expire_workflow(workflow, ttl=nil)
148
145
  ttl = ttl || configuration.ttl
149
- connection_pool.with do |redis|
150
- redis.expire("gush.workflows.#{workflow.id}", ttl)
151
- end
146
+ redis.expire("gush.workflows.#{workflow.id}", ttl)
152
147
  workflow.jobs.each {|job| expire_job(workflow.id, job, ttl) }
153
148
  end
154
149
 
155
150
  def expire_job(workflow_id, job, ttl=nil)
156
151
  ttl = ttl || configuration.ttl
157
- connection_pool.with do |redis|
158
- redis.expire("gush.jobs.#{workflow_id}.#{job.name}", ttl)
159
- end
152
+ redis.expire("gush.jobs.#{workflow_id}.#{job.klass}", ttl)
160
153
  end
161
154
 
162
155
  def enqueue_job(workflow_id, job)
163
156
  job.enqueue!
164
157
  persist_job(workflow_id, job)
158
+ queue = job.queue || configuration.namespace
165
159
 
166
- Gush::Worker.set(queue: configuration.namespace).perform_later(*[workflow_id, job.name])
160
+ Gush::Worker.set(queue: queue).perform_later(*[workflow_id, job.name])
167
161
  end
168
162
 
169
163
  private
170
164
 
165
+ def find_job_by_klass_and_id(workflow_id, job_name)
166
+ job_klass, job_id = job_name.split('|')
167
+
168
+ redis.hget("gush.jobs.#{workflow_id}.#{job_klass}", job_id)
169
+ end
170
+
171
+ def find_job_by_klass(workflow_id, job_name)
172
+ new_cursor, result = redis.hscan("gush.jobs.#{workflow_id}.#{job_name}", 0, count: 1)
173
+ return nil if result.empty?
174
+
175
+ job_id, job = *result[0]
176
+
177
+ job
178
+ end
179
+
171
180
  def workflow_from_hash(hash, nodes = [])
172
181
  flow = hash[:klass].constantize.new(*hash[:arguments])
173
182
  flow.jobs = []
@@ -181,12 +190,8 @@ module Gush
181
190
  flow
182
191
  end
183
192
 
184
- def build_redis
185
- Redis.new(url: configuration.redis_url)
186
- end
187
-
188
- def connection_pool
189
- @connection_pool ||= ConnectionPool.new(size: configuration.concurrency, timeout: 1) { build_redis }
193
+ def redis
194
+ self.class.redis_connection(configuration)
190
195
  end
191
196
  end
192
197
  end
data/lib/gush/job.rb CHANGED
@@ -1,8 +1,8 @@
1
1
  module Gush
2
2
  class Job
3
3
  attr_accessor :workflow_id, :incoming, :outgoing, :params,
4
- :finished_at, :failed_at, :started_at, :enqueued_at, :payloads, :klass
5
- attr_reader :name, :output_payload, :params
4
+ :finished_at, :failed_at, :started_at, :enqueued_at, :payloads, :klass, :queue
5
+ attr_reader :id, :klass, :output_payload, :params
6
6
 
7
7
  def initialize(opts = {})
8
8
  options = opts.dup
@@ -11,8 +11,9 @@ module Gush
11
11
 
12
12
  def as_json
13
13
  {
14
- name: name,
15
- klass: self.class.to_s,
14
+ id: id,
15
+ klass: klass.to_s,
16
+ queue: queue,
16
17
  incoming: incoming,
17
18
  outgoing: outgoing,
18
19
  finished_at: finished_at,
@@ -25,6 +26,10 @@ module Gush
25
26
  }
26
27
  end
27
28
 
29
+ def name
30
+ @name ||= "#{klass}|#{id}"
31
+ end
32
+
28
33
  def to_json(options = {})
29
34
  Gush::JSON.encode(as_json)
30
35
  end
@@ -42,6 +47,7 @@ module Gush
42
47
 
43
48
  def start!
44
49
  @started_at = current_timestamp
50
+ @failed_at = nil
45
51
  end
46
52
 
47
53
  def enqueue!
@@ -108,7 +114,7 @@ module Gush
108
114
  end
109
115
 
110
116
  def assign_variables(opts)
111
- @name = opts[:name]
117
+ @id = opts[:id]
112
118
  @incoming = opts[:incoming] || []
113
119
  @outgoing = opts[:outgoing] || []
114
120
  @failed_at = opts[:failed_at]
@@ -116,9 +122,10 @@ module Gush
116
122
  @started_at = opts[:started_at]
117
123
  @enqueued_at = opts[:enqueued_at]
118
124
  @params = opts[:params] || {}
119
- @klass = opts[:klass]
125
+ @klass = opts[:klass] || self.class
120
126
  @output_payload = opts[:output_payload]
121
127
  @workflow_id = opts[:workflow_id]
128
+ @queue = opts[:queue]
122
129
  end
123
130
  end
124
131
  end
data/lib/gush/worker.rb CHANGED
@@ -1,10 +1,17 @@
1
1
  require 'active_job'
2
+ require 'redis-mutex'
2
3
 
3
4
  module Gush
4
5
  class Worker < ::ActiveJob::Base
5
6
  def perform(workflow_id, job_id)
6
7
  setup_job(workflow_id, job_id)
7
8
 
9
+ if job.succeeded?
10
+ # Try to enqueue outgoing jobs again because the last job has redis mutex lock error
11
+ enqueue_outgoing_jobs
12
+ return
13
+ end
14
+
8
15
  job.payloads = incoming_payloads
9
16
 
10
17
  error = nil
@@ -66,11 +73,16 @@ module Gush
66
73
 
67
74
  def enqueue_outgoing_jobs
68
75
  job.outgoing.each do |job_name|
69
- out = client.find_job(workflow_id, job_name)
70
- if out.ready_to_start?
71
- client.enqueue_job(workflow_id, out)
76
+ RedisMutex.with_lock("gush_enqueue_outgoing_jobs_#{workflow_id}-#{job_name}", sleep: 0.3, block: 2) do
77
+ out = client.find_job(workflow_id, job_name)
78
+
79
+ if out.ready_to_start?
80
+ client.enqueue_job(workflow_id, out)
81
+ end
72
82
  end
73
83
  end
84
+ rescue RedisMutex::LockError
85
+ Worker.set(wait: 2.seconds).perform_later(workflow_id, job.name)
74
86
  end
75
87
  end
76
88
  end
data/lib/gush/workflow.rb CHANGED
@@ -77,11 +77,13 @@ module Gush
77
77
 
78
78
  def find_job(name)
79
79
  match_data = /(?<klass>\w*[^-])-(?<identifier>.*)/.match(name.to_s)
80
+
80
81
  if match_data.nil?
81
- job = jobs.find { |node| node.class.to_s == name.to_s }
82
+ job = jobs.find { |node| node.klass.to_s == name.to_s }
82
83
  else
83
84
  job = jobs.find { |node| node.name.to_s == name.to_s }
84
85
  end
86
+
85
87
  job
86
88
  end
87
89
 
@@ -108,18 +110,21 @@ module Gush
108
110
  def run(klass, opts = {})
109
111
  node = klass.new({
110
112
  workflow_id: id,
111
- name: client.next_free_job_id(id, klass.to_s),
112
- params: opts.fetch(:params, {})
113
+ id: client.next_free_job_id(id, klass.to_s),
114
+ params: opts.fetch(:params, {}),
115
+ queue: opts[:queue]
113
116
  })
114
117
 
115
118
  jobs << node
116
119
 
117
120
  deps_after = [*opts[:after]]
121
+
118
122
  deps_after.each do |dep|
119
123
  @dependencies << {from: dep.to_s, to: node.name.to_s }
120
124
  end
121
125
 
122
126
  deps_before = [*opts[:before]]
127
+
123
128
  deps_before.each do |dep|
124
129
  @dependencies << {from: node.name.to_s, to: dep.to_s }
125
130
  end
@@ -15,6 +15,53 @@ describe "Workflows" do
15
15
  end
16
16
  end
17
17
 
18
+ context 'when one of the jobs fails initally' do
19
+ it 'succeeds when the job retries' do
20
+ FAIL_THEN_SUCCEED_SPY = double()
21
+ allow(FAIL_THEN_SUCCEED_SPY).to receive(:foo).and_return('failure', 'success')
22
+
23
+ class FailsThenSucceeds < Gush::Job
24
+ def perform
25
+ if FAIL_THEN_SUCCEED_SPY.foo == 'failure'
26
+ raise NameError
27
+ end
28
+ end
29
+ end
30
+
31
+ class SecondChanceWorkflow < Gush::Workflow
32
+ def configure
33
+ run Prepare
34
+ run FailsThenSucceeds, after: Prepare
35
+ run NormalizeJob, after: FailsThenSucceeds
36
+ end
37
+ end
38
+
39
+ flow = SecondChanceWorkflow.create
40
+ flow.start!
41
+
42
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(['Prepare']))
43
+ perform_one
44
+
45
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(['FailsThenSucceeds']))
46
+ expect do
47
+ perform_one
48
+ end.to raise_error(NameError)
49
+
50
+ expect(flow.reload).to be_failed
51
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(['FailsThenSucceeds']))
52
+
53
+ # Retry the same job again, but this time succeeds
54
+ perform_one
55
+
56
+ expect(Gush::Worker).to have_jobs(flow.id, jobs_with_id(['NormalizeJob']))
57
+ perform_one
58
+
59
+ flow = flow.reload
60
+ expect(flow).to be_finished
61
+ expect(flow).to_not be_failed
62
+ end
63
+ end
64
+
18
65
  it "runs the whole workflow in proper order" do
19
66
  flow = TestWorkflow.create
20
67
  flow.start!
@@ -77,8 +124,6 @@ describe "Workflows" do
77
124
 
78
125
  perform_one
79
126
  expect(flow.reload.find_job("PrependJob").output_payload).to eq("A prefix: SOME TEXT")
80
-
81
-
82
127
  end
83
128
 
84
129
  it "passes payloads from workflow that runs multiple same class jobs with nameized payloads" do
@@ -158,4 +203,45 @@ describe "Workflows" do
158
203
  expect(flow).to be_finished
159
204
  expect(flow).to_not be_failed
160
205
  end
206
+
207
+ it 'executes job with multiple ancestors only once' do
208
+ NO_DUPS_INTERNAL_SPY = double('spy')
209
+ expect(NO_DUPS_INTERNAL_SPY).to receive(:some_method).exactly(1).times
210
+
211
+ class FirstAncestor < Gush::Job
212
+ def perform
213
+ end
214
+ end
215
+
216
+ class SecondAncestor < Gush::Job
217
+ def perform
218
+ end
219
+ end
220
+
221
+ class FinalJob < Gush::Job
222
+ def perform
223
+ NO_DUPS_INTERNAL_SPY.some_method
224
+ end
225
+ end
226
+
227
+ class NoDuplicatesWorkflow < Gush::Workflow
228
+ def configure
229
+ run FirstAncestor
230
+ run SecondAncestor
231
+
232
+ run FinalJob, after: [FirstAncestor, SecondAncestor]
233
+ end
234
+ end
235
+
236
+ flow = NoDuplicatesWorkflow.create
237
+ flow.start!
238
+
239
+ 5.times do
240
+ perform_one
241
+ end
242
+
243
+ flow = flow.reload
244
+ expect(flow).to be_finished
245
+ expect(flow).to_not be_failed
246
+ end
161
247
  end
@@ -95,12 +95,18 @@ describe Gush::Client do
95
95
  end
96
96
 
97
97
  describe "#expire_workflow" do
98
+ let(:ttl) { 2000 }
99
+
98
100
  it "sets TTL for all Redis keys related to the workflow" do
99
101
  workflow = TestWorkflow.create
100
102
 
101
- client.expire_workflow(workflow, -1)
103
+ client.expire_workflow(workflow, ttl)
104
+
105
+ expect(redis.ttl("gush.workflows.#{workflow.id}")).to eq(ttl)
102
106
 
103
- # => TODO - I believe fakeredis does not handle TTL the same.
107
+ workflow.jobs.each do |job|
108
+ expect(redis.ttl("gush.jobs.#{workflow.id}.#{job.klass}")).to eq(ttl)
109
+ end
104
110
  end
105
111
  end
106
112
 
@@ -52,19 +52,27 @@ describe Gush::Job do
52
52
  describe "#start!" do
53
53
  it "resets flags and marks as running" do
54
54
  job = described_class.new(name: "a-job")
55
+
56
+ job.enqueue!
57
+ job.fail!
58
+
59
+ now = Time.now.to_i
60
+ expect(job.started_at).to eq(nil)
61
+ expect(job.failed_at).to eq(now)
62
+
55
63
  job.start!
64
+
56
65
  expect(job.started_at).to eq(Time.now.to_i)
57
- expect(job.enqueued?).to eq(false)
58
- expect(job.running?).to eq(true)
66
+ expect(job.failed_at).to eq(nil)
59
67
  end
60
68
  end
61
69
 
62
70
  describe "#as_json" do
63
71
  context "finished and enqueued set to true" do
64
72
  it "returns correct hash" do
65
- job = described_class.new(workflow_id: 123, name: "a-job", finished_at: 123, enqueued_at: 120)
73
+ job = described_class.new(workflow_id: 123, id: "702bced5-bb72-4bba-8f6f-15a3afa358bd", finished_at: 123, enqueued_at: 120)
66
74
  expected = {
67
- name: "a-job",
75
+ id: '702bced5-bb72-4bba-8f6f-15a3afa358bd',
68
76
  klass: "Gush::Job",
69
77
  incoming: [],
70
78
  outgoing: [],
@@ -73,6 +81,7 @@ describe Gush::Job do
73
81
  finished_at: 123,
74
82
  enqueued_at: 120,
75
83
  params: {},
84
+ queue: nil,
76
85
  output_payload: nil,
77
86
  workflow_id: 123
78
87
  }
@@ -86,7 +95,7 @@ describe Gush::Job do
86
95
  job = described_class.from_hash(
87
96
  {
88
97
  klass: 'Gush::Job',
89
- name: 'gob',
98
+ id: '702bced5-bb72-4bba-8f6f-15a3afa358bd',
90
99
  incoming: ['a', 'b'],
91
100
  outgoing: ['c'],
92
101
  failed_at: 123,
@@ -96,7 +105,8 @@ describe Gush::Job do
96
105
  }
97
106
  )
98
107
 
99
- expect(job.name).to eq('gob')
108
+ expect(job.id).to eq('702bced5-bb72-4bba-8f6f-15a3afa358bd')
109
+ expect(job.name).to eq('Gush::Job|702bced5-bb72-4bba-8f6f-15a3afa358bd')
100
110
  expect(job.class).to eq(Gush::Job)
101
111
  expect(job.klass).to eq("Gush::Job")
102
112
  expect(job.finished?).to eq(true)
@@ -39,6 +39,18 @@ describe Gush::Worker do
39
39
  end
40
40
  end
41
41
 
42
+ context 'when job failed to enqueue outgoing jobs' do
43
+ it 'enqeues another job to handling enqueue_outgoing_jobs' do
44
+ allow(RedisMutex).to receive(:with_lock).and_raise(RedisMutex::LockError)
45
+ subject.perform(workflow.id, 'Prepare')
46
+ expect(Gush::Worker).to have_no_jobs(workflow.id, jobs_with_id(["FetchFirstJob", "FetchSecondJob"]))
47
+
48
+ allow(RedisMutex).to receive(:with_lock).and_call_original
49
+ perform_one
50
+ expect(Gush::Worker).to have_jobs(workflow.id, jobs_with_id(["FetchFirstJob", "FetchSecondJob"]))
51
+ end
52
+ end
53
+
42
54
  it "calls job.perform method" do
43
55
  SPY = double()
44
56
  expect(SPY).to receive(:some_method)
@@ -135,6 +135,7 @@ describe Gush::Workflow do
135
135
  klass1 = Class.new(Gush::Job)
136
136
  klass2 = Class.new(Gush::Job)
137
137
  klass3 = Class.new(Gush::Job)
138
+
138
139
  tree.run(klass1)
139
140
  tree.run(klass2, after: [klass1, klass3])
140
141
  tree.run(klass3)
data/spec/spec_helper.rb CHANGED
@@ -14,7 +14,7 @@ class PersistSecondJob < Gush::Job; end
14
14
  class NormalizeJob < Gush::Job; end
15
15
  class BobJob < Gush::Job; end
16
16
 
17
- GUSHFILE = Pathname.new(__FILE__).parent.join("Gushfile")
17
+ GUSHFILE = Pathname.new(__FILE__).parent.join("Gushfile")
18
18
 
19
19
  class TestWorkflow < Gush::Workflow
20
20
  def configure
@@ -26,7 +26,6 @@ class TestWorkflow < Gush::Workflow
26
26
  run FetchSecondJob, after: Prepare, before: NormalizeJob
27
27
 
28
28
  run PersistFirstJob, after: FetchFirstJob, before: NormalizeJob
29
-
30
29
  end
31
30
  end
32
31
 
@@ -62,7 +61,7 @@ module GushHelpers
62
61
  end
63
62
 
64
63
  def job_with_id(job_name)
65
- /#{job_name}-(?<identifier>.*)/
64
+ /#{job_name}|(?<identifier>.*)/
66
65
  end
67
66
  end
68
67
 
@@ -79,6 +78,19 @@ RSpec::Matchers.define :have_jobs do |flow, jobs|
79
78
  end
80
79
  end
81
80
 
81
+ RSpec::Matchers.define :have_no_jobs do |flow, jobs|
82
+ match do |actual|
83
+ expected = jobs.map do |job|
84
+ hash_including(args: include(flow, job))
85
+ end
86
+ expect(ActiveJob::Base.queue_adapter.enqueued_jobs).not_to match_array(expected)
87
+ end
88
+
89
+ failure_message do |actual|
90
+ "expected queue to have no #{jobs}, but instead has: #{ActiveJob::Base.queue_adapter.enqueued_jobs.map{ |j| j[:args][1]}}"
91
+ end
92
+ end
93
+
82
94
  RSpec.configure do |config|
83
95
  config.include ActiveJob::TestHelper
84
96
  config.include GushHelpers
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: gush
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.1
4
+ version: 2.0.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Piotrek Okoński
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-06-09 00:00:00.000000000 Z
11
+ date: 2022-03-10 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activejob
@@ -19,7 +19,7 @@ dependencies:
19
19
  version: 4.2.7
20
20
  - - "<"
21
21
  - !ruby/object:Gem::Version
22
- version: '6.0'
22
+ version: '7.0'
23
23
  type: :runtime
24
24
  prerelease: false
25
25
  version_requirements: !ruby/object:Gem::Requirement
@@ -29,21 +29,21 @@ dependencies:
29
29
  version: 4.2.7
30
30
  - - "<"
31
31
  - !ruby/object:Gem::Version
32
- version: '6.0'
32
+ version: '7.0'
33
33
  - !ruby/object:Gem::Dependency
34
- name: connection_pool
34
+ name: concurrent-ruby
35
35
  requirement: !ruby/object:Gem::Requirement
36
36
  requirements:
37
37
  - - "~>"
38
38
  - !ruby/object:Gem::Version
39
- version: 2.2.1
39
+ version: '1.0'
40
40
  type: :runtime
41
41
  prerelease: false
42
42
  version_requirements: !ruby/object:Gem::Requirement
43
43
  requirements:
44
44
  - - "~>"
45
45
  - !ruby/object:Gem::Version
46
- version: 2.2.1
46
+ version: '1.0'
47
47
  - !ruby/object:Gem::Dependency
48
48
  name: multi_json
49
49
  requirement: !ruby/object:Gem::Requirement
@@ -78,6 +78,20 @@ dependencies:
78
78
  - - "<"
79
79
  - !ruby/object:Gem::Version
80
80
  version: '5'
81
+ - !ruby/object:Gem::Dependency
82
+ name: redis-mutex
83
+ requirement: !ruby/object:Gem::Requirement
84
+ requirements:
85
+ - - "~>"
86
+ - !ruby/object:Gem::Version
87
+ version: 4.0.1
88
+ type: :runtime
89
+ prerelease: false
90
+ version_requirements: !ruby/object:Gem::Requirement
91
+ requirements:
92
+ - - "~>"
93
+ - !ruby/object:Gem::Version
94
+ version: 4.0.1
81
95
  - !ruby/object:Gem::Dependency
82
96
  name: hiredis
83
97
  requirement: !ruby/object:Gem::Requirement
@@ -166,16 +180,16 @@ dependencies:
166
180
  name: bundler
167
181
  requirement: !ruby/object:Gem::Requirement
168
182
  requirements:
169
- - - "~>"
183
+ - - ">="
170
184
  - !ruby/object:Gem::Version
171
- version: '1.5'
185
+ version: '0'
172
186
  type: :development
173
187
  prerelease: false
174
188
  version_requirements: !ruby/object:Gem::Requirement
175
189
  requirements:
176
- - - "~>"
190
+ - - ">="
177
191
  - !ruby/object:Gem::Version
178
- version: '1.5'
192
+ version: '0'
179
193
  - !ruby/object:Gem::Dependency
180
194
  name: rake
181
195
  requirement: !ruby/object:Gem::Requirement
@@ -293,8 +307,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
293
307
  - !ruby/object:Gem::Version
294
308
  version: '0'
295
309
  requirements: []
296
- rubyforge_project:
297
- rubygems_version: 2.7.6
310
+ rubygems_version: 3.1.4
298
311
  signing_key:
299
312
  specification_version: 4
300
313
  summary: Fast and distributed workflow runner based on ActiveJob and Redis