dirty_pipeline 0.5.0 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +1 -0
- data/dirty_pipeline.gemspec +8 -0
- data/lib/dirty_pipeline.rb +1 -0
- data/lib/dirty_pipeline/base.rb +17 -24
- data/lib/dirty_pipeline/event.rb +9 -21
- data/lib/dirty_pipeline/queue.rb +14 -18
- data/lib/dirty_pipeline/railway.rb +28 -18
- data/lib/dirty_pipeline/storage.rb +57 -63
- data/lib/dirty_pipeline/transaction.rb +15 -19
- data/lib/dirty_pipeline/transition.rb +7 -5
- data/lib/dirty_pipeline/version.rb +1 -1
- metadata +86 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: e4eef96b779d46b3c749e665cf467f69577b8b1dc1bac3dd1e107be269619a3d
|
4
|
+
data.tar.gz: 0a42107025850c20d9d2f4bfbeb5fb4b3fd42610c9eae3aacc3ba80e3600ad6d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 9fcb6bcf6655ab497f97fcf0f72d54a46b4a12407d5d2567d6027442e2652d4c901f0ef1a5031de184c0714821c27f466511c0db803e072082bd0d121968cbf3
|
7
|
+
data.tar.gz: '085965b95df37be6654977fb820b3d8937abd2097bc01a340119a8e99170206ce3670ffdcc2698723af7a484403fe76359b9b0e55548d6d11f1e2573c2ade88b'
|
data/.gitignore
CHANGED
data/dirty_pipeline.gemspec
CHANGED
@@ -21,7 +21,15 @@ Gem::Specification.new do |spec|
|
|
21
21
|
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
22
22
|
spec.require_paths = ["lib"]
|
23
23
|
|
24
|
+
# temporary dependency
|
25
|
+
spec.add_runtime_dependency "sidekiq", "~> 5.0"
|
26
|
+
spec.add_runtime_dependency "redis", "~> 4.0"
|
27
|
+
spec.add_runtime_dependency "nanoid", "~> 0.2.0"
|
28
|
+
|
24
29
|
spec.add_development_dependency "bundler", "~> 1.16"
|
25
30
|
spec.add_development_dependency "rake", "~> 10.0"
|
26
31
|
spec.add_development_dependency "rspec", "~> 3.0"
|
32
|
+
spec.add_development_dependency "dotenv", "~> 2.2"
|
33
|
+
spec.add_development_dependency "timecop", "~> 0.9"
|
34
|
+
spec.add_development_dependency "pry", "~> 0.11"
|
27
35
|
end
|
data/lib/dirty_pipeline.rb
CHANGED
data/lib/dirty_pipeline/base.rb
CHANGED
@@ -22,7 +22,8 @@ module DirtyPipeline
|
|
22
22
|
using StringCamelcase
|
23
23
|
|
24
24
|
def transition(name, from:, to:, action: nil, attempts: 1)
|
25
|
-
action ||= const_get(name.to_s.camelcase)
|
25
|
+
action ||= const_get(name.to_s.camelcase(:upper)) rescue nil
|
26
|
+
action ||= method(name) if respond_to?(name)
|
26
27
|
@transitions_map[name.to_s] = {
|
27
28
|
action: action,
|
28
29
|
from: Array(from).map(&:to_s),
|
@@ -55,7 +56,7 @@ module DirtyPipeline
|
|
55
56
|
end
|
56
57
|
|
57
58
|
def clear!
|
58
|
-
storage.
|
59
|
+
storage.reset!
|
59
60
|
reset!
|
60
61
|
end
|
61
62
|
|
@@ -66,7 +67,7 @@ module DirtyPipeline
|
|
66
67
|
|
67
68
|
def call
|
68
69
|
return self if (serialized_event = railway.next).nil?
|
69
|
-
execute(load_event(serialized_event))
|
70
|
+
execute(load_event(serialized_event), tx_method: :call)
|
70
71
|
end
|
71
72
|
alias :call_next :call
|
72
73
|
|
@@ -78,7 +79,7 @@ module DirtyPipeline
|
|
78
79
|
|
79
80
|
def retry
|
80
81
|
return unless (event = load_event(railway.queue.processing_event))
|
81
|
-
execute(event, :retry)
|
82
|
+
execute(event, tx_id: :retry)
|
82
83
|
end
|
83
84
|
|
84
85
|
def schedule_cleanup
|
@@ -116,8 +117,8 @@ module DirtyPipeline
|
|
116
117
|
|
117
118
|
private
|
118
119
|
|
119
|
-
def execute(event,
|
120
|
-
transaction(event).public_send(
|
120
|
+
def execute(event, tx_method:)
|
121
|
+
transaction(event).public_send(tx_method) do |destination, action, *args|
|
121
122
|
state_changes = process_action(action, event, *args)
|
122
123
|
next if status.failure?
|
123
124
|
Success(event, state_changes, destination)
|
@@ -134,9 +135,7 @@ module DirtyPipeline
|
|
134
135
|
def process_action(action, event, *args)
|
135
136
|
return catch(:success) do
|
136
137
|
return if interupt_on_error(event) do
|
137
|
-
|
138
|
-
throw :success, run_operation(action, event, *args); nil
|
139
|
-
end
|
138
|
+
throw :success, run_operation(action, event, *args)
|
140
139
|
end
|
141
140
|
nil
|
142
141
|
end
|
@@ -151,14 +150,10 @@ module DirtyPipeline
|
|
151
150
|
end
|
152
151
|
|
153
152
|
def interupt_on_error(event)
|
154
|
-
return
|
153
|
+
return unless (fail_cause = catch(:fail_operation) { yield; nil })
|
155
154
|
Failure(event, fail_cause)
|
156
155
|
end
|
157
156
|
|
158
|
-
def interupt_on_abort(event)
|
159
|
-
Abort(event) if catch(:abort) { yield; nil }
|
160
|
-
end
|
161
|
-
|
162
157
|
def find_subject_args
|
163
158
|
subject.id
|
164
159
|
end
|
@@ -172,20 +167,18 @@ module DirtyPipeline
|
|
172
167
|
end
|
173
168
|
|
174
169
|
def transaction(event)
|
175
|
-
|
170
|
+
Transaction.new(self, event)
|
176
171
|
end
|
177
172
|
|
178
173
|
def Failure(event, cause)
|
179
|
-
event.failure!
|
180
|
-
railway.switch_to(:undo)
|
181
|
-
@status = Status.failure(cause, tag: :error)
|
182
|
-
throw :abort_transaction, true
|
183
|
-
end
|
184
|
-
|
185
|
-
def Abort(event)
|
186
|
-
event.failure!
|
187
174
|
railway.switch_to(:undo)
|
188
|
-
|
175
|
+
if cause.eql?(:abort)
|
176
|
+
event.abort!
|
177
|
+
@status = Status.failure(subject, tag: :aborted)
|
178
|
+
else
|
179
|
+
event.failure!
|
180
|
+
@status = Status.failure(cause, tag: :error)
|
181
|
+
end
|
189
182
|
throw :abort_transaction, true
|
190
183
|
end
|
191
184
|
|
data/lib/dirty_pipeline/event.rb
CHANGED
@@ -5,8 +5,9 @@ module DirtyPipeline
|
|
5
5
|
NEW = "new".freeze
|
6
6
|
START = "started".freeze
|
7
7
|
FAILURE = "failed".freeze
|
8
|
+
ABORT = "aborted".freeze
|
8
9
|
RETRY = "retry".freeze
|
9
|
-
SUCCESS = "
|
10
|
+
SUCCESS = "succeeded".freeze
|
10
11
|
|
11
12
|
def self.create(transition, *args, tx_id:)
|
12
13
|
new(
|
@@ -19,33 +20,20 @@ module DirtyPipeline
|
|
19
20
|
)
|
20
21
|
end
|
21
22
|
|
22
|
-
def self.load(json)
|
23
|
-
return unless json
|
24
|
-
new(JSON.load(json))
|
25
|
-
end
|
26
|
-
|
27
|
-
def self.dump(event)
|
28
|
-
JSON.dump(event.to_h)
|
29
|
-
end
|
30
|
-
|
31
|
-
def dump
|
32
|
-
self.class.dump(self)
|
33
|
-
end
|
34
|
-
|
35
23
|
attr_reader :id, :tx_id, :error, :data
|
36
24
|
def initialize(options = {}, data: nil, error: nil)
|
37
25
|
unless options.empty?
|
38
26
|
options_hash = options.to_h
|
39
27
|
data ||= options_hash["data"]
|
40
28
|
error ||= options_hash["error"]
|
41
|
-
transition = options_hash["transition"]
|
42
|
-
args = options_hash["args"]
|
43
29
|
end
|
44
30
|
|
45
31
|
data_hash = data.to_h
|
46
32
|
|
47
|
-
@tx_id
|
48
|
-
@id
|
33
|
+
@tx_id = data_hash.fetch("transaction_uuid")
|
34
|
+
@id = data_hash.fetch("uuid")
|
35
|
+
transition = data_hash.fetch("transition")
|
36
|
+
args = data_hash.fetch("args").to_a
|
49
37
|
@data = {
|
50
38
|
"uuid" => @id,
|
51
39
|
"transaction_uuid" => @tx_id,
|
@@ -67,7 +55,7 @@ module DirtyPipeline
|
|
67
55
|
define_method("#{method_name}") { @data[method_name] }
|
68
56
|
end
|
69
57
|
|
70
|
-
%w(new start retry failure).each do |method_name|
|
58
|
+
%w(new start retry failure success abort).each do |method_name|
|
71
59
|
define_method("#{method_name}?") do
|
72
60
|
@data["status"] == self.class.const_get(method_name.upcase)
|
73
61
|
end
|
@@ -81,7 +69,7 @@ module DirtyPipeline
|
|
81
69
|
@error = {
|
82
70
|
"exception" => exception.class.to_s,
|
83
71
|
"exception_message" => exception.message,
|
84
|
-
"created_at" => Time.
|
72
|
+
"created_at" => Time.now,
|
85
73
|
}
|
86
74
|
failure!
|
87
75
|
end
|
@@ -90,7 +78,7 @@ module DirtyPipeline
|
|
90
78
|
@data["attempts_count"].to_i
|
91
79
|
end
|
92
80
|
|
93
|
-
def attempt_retry
|
81
|
+
def attempt_retry!
|
94
82
|
@data["updated_at"] = Time.now
|
95
83
|
@data["attempts_count"] = attempts_count + 1
|
96
84
|
end
|
data/lib/dirty_pipeline/queue.rb
CHANGED
@@ -1,8 +1,7 @@
|
|
1
1
|
module DirtyPipeline
|
2
2
|
class Queue
|
3
|
-
|
4
|
-
|
5
|
-
@root = "dirty-pipeline-queue:#{subject.class}:#{subject.id}:" \
|
3
|
+
def initialize(operation, subject_class, subject_id, transaction_id)
|
4
|
+
@root = "dirty-pipeline-queue:#{subject_class}:#{subject_id}:" \
|
6
5
|
"op_#{operation}:txid_#{transaction_id}"
|
7
6
|
end
|
8
7
|
|
@@ -14,37 +13,34 @@ module DirtyPipeline
|
|
14
13
|
end
|
15
14
|
|
16
15
|
def to_a
|
17
|
-
DirtyPipeline.with_redis
|
16
|
+
DirtyPipeline.with_redis do |r|
|
17
|
+
r.lrange(events_queue_key, 0, -1).map! do |packed_event|
|
18
|
+
unpack(packed_event)
|
19
|
+
end
|
20
|
+
end
|
18
21
|
end
|
19
22
|
|
20
23
|
def push(event)
|
21
24
|
DirtyPipeline.with_redis { |r| r.rpush(events_queue_key, pack(event)) }
|
25
|
+
self
|
22
26
|
end
|
23
27
|
alias :<< :push
|
24
28
|
|
25
29
|
def unshift(event)
|
26
30
|
DirtyPipeline.with_redis { |r| r.lpush(events_queue_key, pack(event)) }
|
31
|
+
self
|
27
32
|
end
|
28
33
|
|
29
|
-
def
|
34
|
+
def pop
|
30
35
|
DirtyPipeline.with_redis do |r|
|
31
36
|
data = r.lpop(events_queue_key)
|
32
37
|
data.nil? ? r.del(active_event_key) : r.set(active_event_key, data)
|
33
|
-
|
34
|
-
end
|
35
|
-
end
|
36
|
-
alias :pop :dequeue
|
37
|
-
|
38
|
-
def event_in_progress?(event = nil)
|
39
|
-
if event.nil?
|
40
|
-
!processing_event.nil?
|
41
|
-
else
|
42
|
-
processing_event.id == event.id
|
38
|
+
unpack(data)
|
43
39
|
end
|
44
40
|
end
|
45
41
|
|
46
42
|
def processing_event
|
47
|
-
DirtyPipeline.with_redis { |r| unpack
|
43
|
+
DirtyPipeline.with_redis { |r| unpack(r.get(active_event_key)) }
|
48
44
|
end
|
49
45
|
|
50
46
|
private
|
@@ -72,11 +68,11 @@ module DirtyPipeline
|
|
72
68
|
end
|
73
69
|
|
74
70
|
def events_queue_key
|
75
|
-
"#{root}:events"
|
71
|
+
"#{@root}:events"
|
76
72
|
end
|
77
73
|
|
78
74
|
def active_event_key
|
79
|
-
"#{root}:active"
|
75
|
+
"#{@root}:active"
|
80
76
|
end
|
81
77
|
end
|
82
78
|
end
|
@@ -1,37 +1,45 @@
|
|
1
1
|
module DirtyPipeline
|
2
2
|
class Railway
|
3
|
-
|
3
|
+
DEFAULT_OPERATIONS = %w(call undo finalize)
|
4
4
|
|
5
5
|
def initialize(subject, transaction_id)
|
6
6
|
@tx_id = transaction_id
|
7
|
-
@
|
8
|
-
|
7
|
+
@subject_class = subject.class.to_s
|
8
|
+
@subject_id = subject.id.to_s
|
9
|
+
@root = "dirty-pipeline-rail:#{subject.class}:#{subject.id}:"
|
9
10
|
@queues = Hash[
|
10
|
-
|
11
|
-
[operation,
|
11
|
+
DEFAULT_OPERATIONS.map do |operation|
|
12
|
+
[operation, create_queue(operation)]
|
12
13
|
end
|
13
14
|
]
|
14
15
|
end
|
15
16
|
|
16
17
|
def clear!
|
17
18
|
@queues.values.each(&:clear!)
|
18
|
-
DirtyPipeline.with_redis
|
19
|
+
DirtyPipeline.with_redis do |r|
|
20
|
+
r.multi do |mr|
|
21
|
+
mr.del(active_operation_key)
|
22
|
+
mr.del(active_transaction_key)
|
23
|
+
end
|
24
|
+
end
|
19
25
|
end
|
20
26
|
|
21
27
|
def next
|
22
28
|
return if other_transaction_in_progress?
|
23
|
-
start_transaction!
|
29
|
+
start_transaction! unless running_transaction
|
24
30
|
|
25
31
|
queue.pop.tap { |event| finish_transaction! if event.nil? }
|
26
32
|
end
|
27
33
|
|
28
|
-
def queue(
|
29
|
-
@queues
|
34
|
+
def queue(operation_name = active)
|
35
|
+
@queues.fetch(operation_name.to_s) do
|
36
|
+
@queues.store(operation_name, create_queue(operation_name))
|
37
|
+
end
|
30
38
|
end
|
31
39
|
alias :[] :queue
|
32
40
|
|
33
41
|
def switch_to(name)
|
34
|
-
raise ArgumentError unless
|
42
|
+
raise ArgumentError unless DEFAULT_OPERATIONS.include?(name.to_s)
|
35
43
|
return if name.to_s == active
|
36
44
|
DirtyPipeline.with_redis { |r| r.set(active_operation_key, name) }
|
37
45
|
end
|
@@ -41,8 +49,16 @@ module DirtyPipeline
|
|
41
49
|
end
|
42
50
|
alias :operation :active
|
43
51
|
|
52
|
+
def running_transaction
|
53
|
+
DirtyPipeline.with_redis { |r| r.get(active_transaction_key) }
|
54
|
+
end
|
55
|
+
|
44
56
|
private
|
45
57
|
|
58
|
+
def create_queue(operation_name)
|
59
|
+
Queue.new(operation_name, @subject_class, @subject_id, @tx_id)
|
60
|
+
end
|
61
|
+
|
46
62
|
def active_transaction_key
|
47
63
|
"#{@root}:active_transaction"
|
48
64
|
end
|
@@ -52,20 +68,14 @@ module DirtyPipeline
|
|
52
68
|
end
|
53
69
|
|
54
70
|
def start_transaction!
|
55
|
-
switch_to(
|
71
|
+
switch_to(DEFAULT_OPERATIONS.first) unless active
|
56
72
|
DirtyPipeline.with_redis { |r| r.set(active_transaction_key, @tx_id) }
|
57
73
|
end
|
58
74
|
|
59
75
|
def finish_transaction!
|
60
|
-
|
61
|
-
DirtyPipeline.with_redis { |r| r.del(active_transaction_key) }
|
62
|
-
@queues.values.each(&:clear!)
|
76
|
+
clear! if running_transaction == @tx_id
|
63
77
|
end
|
64
78
|
|
65
|
-
def running_transaction
|
66
|
-
DirtyPipeline.with_redis { |r| r.get(active_transaction_key) }
|
67
|
-
end
|
68
|
-
|
69
79
|
def other_transaction_in_progress?
|
70
80
|
return false if running_transaction.nil?
|
71
81
|
running_transaction != @tx_id
|
@@ -1,68 +1,45 @@
|
|
1
1
|
module DirtyPipeline
|
2
|
+
# Storage structure
|
3
|
+
# {
|
4
|
+
# status: :errored,
|
5
|
+
# state: {
|
6
|
+
# field: "value",
|
7
|
+
# },
|
8
|
+
# errors: {
|
9
|
+
# "<event_id>": {
|
10
|
+
# error: "RuPost::API::Error",
|
11
|
+
# error_message: "Timeout error",
|
12
|
+
# created_at: 2018-01-01T13:22Z
|
13
|
+
# },
|
14
|
+
# },
|
15
|
+
# events: {
|
16
|
+
# <event_id>: {
|
17
|
+
# transition: "Create",
|
18
|
+
# args: ...,
|
19
|
+
# changes: ...,
|
20
|
+
# created_at: ...,
|
21
|
+
# updated_at: ...,
|
22
|
+
# attempts_count: 2,
|
23
|
+
# },
|
24
|
+
# <event_id>: {...},
|
25
|
+
# }
|
26
|
+
# }
|
2
27
|
class Storage
|
3
|
-
SUCCESS_STATUS = "success".freeze
|
4
|
-
FAILURE_STATUS = "failure".freeze
|
5
|
-
RETRY_STATUS = "retry".freeze
|
6
|
-
PROCESSING_STATUS = "processing".freeze
|
7
28
|
class InvalidPipelineStorage < StandardError; end
|
8
29
|
|
9
|
-
attr_reader :subject, :field, :
|
10
|
-
attr_accessor :store
|
30
|
+
attr_reader :subject, :field, :store
|
11
31
|
alias :to_h :store
|
12
32
|
def initialize(subject, field)
|
13
33
|
@subject = subject
|
14
34
|
@field = field
|
15
|
-
|
35
|
+
@store = subject.send(@field).to_h
|
36
|
+
reset if @store.empty?
|
37
|
+
raise InvalidPipelineStorage, store unless valid_store?
|
16
38
|
end
|
17
39
|
|
18
|
-
def
|
19
|
-
|
20
|
-
|
21
|
-
return if valid_store?
|
22
|
-
raise InvalidPipelineStorage, store
|
23
|
-
end
|
24
|
-
|
25
|
-
def valid_store?
|
26
|
-
(store.keys & %w(status events errors state)).size.eql?(4)
|
27
|
-
end
|
28
|
-
|
29
|
-
# PG JSONB column
|
30
|
-
# {
|
31
|
-
# status: :errored,
|
32
|
-
# state: {
|
33
|
-
# field: "value",
|
34
|
-
# },
|
35
|
-
# errors: {
|
36
|
-
# "<event_id>": {
|
37
|
-
# error: "RuPost::API::Error",
|
38
|
-
# error_message: "Timeout error",
|
39
|
-
# created_at: 2018-01-01T13:22Z
|
40
|
-
# },
|
41
|
-
# },
|
42
|
-
# events: {
|
43
|
-
# <event_id>: {
|
44
|
-
# action: Init,
|
45
|
-
# input: ...,
|
46
|
-
# created_at: ...,
|
47
|
-
# updated_at: ...,
|
48
|
-
# attempts_count: 2,
|
49
|
-
# },
|
50
|
-
# <event_id>: {...},
|
51
|
-
# }
|
52
|
-
# }
|
53
|
-
def clear
|
54
|
-
self.store = subject.send(
|
55
|
-
"#{field}=",
|
56
|
-
"status" => nil,
|
57
|
-
"state" => {},
|
58
|
-
"events" => {},
|
59
|
-
"errors" => {}
|
60
|
-
)
|
61
|
-
end
|
62
|
-
|
63
|
-
def clear!
|
64
|
-
clear
|
65
|
-
subject.update_attributes!(field => store)
|
40
|
+
def reset!
|
41
|
+
reset
|
42
|
+
save!
|
66
43
|
end
|
67
44
|
|
68
45
|
def status
|
@@ -71,21 +48,38 @@ module DirtyPipeline
|
|
71
48
|
|
72
49
|
def commit!(event)
|
73
50
|
store["status"] = event.destination if event.destination
|
74
|
-
require'pry';binding.pry unless event.changes.respond_to?(:to_h)
|
75
51
|
store["state"].merge!(event.changes) unless event.changes.to_h.empty?
|
76
52
|
store["errors"][event.id] = event.error unless event.error.to_h.empty?
|
77
53
|
store["events"][event.id] = event.data unless event.data.to_h.empty?
|
78
|
-
|
79
|
-
subject.save!
|
54
|
+
save!
|
80
55
|
end
|
81
56
|
|
82
|
-
def
|
83
|
-
|
57
|
+
def find_event(event_id)
|
58
|
+
return unless (found_event = store.dig("events", event_id))
|
59
|
+
Event.new(data: found_event, error: store.dig("errors", event_id))
|
84
60
|
end
|
85
61
|
|
86
|
-
|
87
|
-
|
88
|
-
|
62
|
+
private
|
63
|
+
|
64
|
+
def valid_store?
|
65
|
+
(store.keys & %w(status events errors state)).size.eql?(4)
|
66
|
+
end
|
67
|
+
|
68
|
+
def save!
|
69
|
+
subject.send("#{field}=", store)
|
70
|
+
subject.save!
|
71
|
+
end
|
72
|
+
|
73
|
+
def reset
|
74
|
+
@store = subject.send(
|
75
|
+
"#{field}=",
|
76
|
+
{
|
77
|
+
"status" => nil,
|
78
|
+
"state" => {},
|
79
|
+
"events" => {},
|
80
|
+
"errors" => {}
|
81
|
+
}
|
82
|
+
)
|
89
83
|
end
|
90
84
|
end
|
91
85
|
end
|
@@ -1,56 +1,52 @@
|
|
1
1
|
module DirtyPipeline
|
2
2
|
class Transaction
|
3
|
-
attr_reader :locker, :storage, :subject, :pipeline, :
|
4
|
-
def initialize(pipeline,
|
3
|
+
attr_reader :locker, :storage, :subject, :pipeline, :event
|
4
|
+
def initialize(pipeline, event)
|
5
5
|
@pipeline = pipeline
|
6
6
|
@subject = pipeline.subject
|
7
7
|
@storage = pipeline.storage
|
8
|
-
@queue = queue
|
9
8
|
@event = event
|
10
9
|
end
|
11
10
|
|
12
|
-
def retry
|
13
|
-
event.attempt_retry!
|
14
|
-
pipeline.schedule_cleanup
|
15
|
-
|
16
|
-
with_transaction { |*targs| yield(*targs) }
|
17
|
-
end
|
18
|
-
|
19
11
|
def call
|
20
|
-
# return unless queue.event_in_progress?(event)
|
21
|
-
|
22
12
|
event.start!
|
23
|
-
|
13
|
+
with_transaction { |*targs| yield(*targs) }
|
14
|
+
end
|
24
15
|
|
16
|
+
def retry
|
17
|
+
event.attempt_retry!
|
25
18
|
with_transaction { |*targs| yield(*targs) }
|
26
19
|
end
|
27
20
|
|
28
21
|
private
|
29
22
|
|
30
23
|
def with_transaction
|
24
|
+
pipeline.schedule_cleanup
|
25
|
+
|
31
26
|
destination, action, max_attempts_count =
|
32
27
|
pipeline.find_transition(event.transition)
|
33
28
|
.values_at(:to, :action, :attempts)
|
34
29
|
|
35
30
|
storage.commit!(event)
|
36
31
|
|
37
|
-
# status.action_pool.unshift(action)
|
38
32
|
subject.transaction(requires_new: true) do
|
39
|
-
|
40
|
-
yield(destination, action, *event.args); nil
|
41
|
-
end
|
33
|
+
with_abort_handling { yield(destination, action, *event.args) }
|
42
34
|
end
|
43
35
|
rescue => exception
|
44
36
|
event.link_exception(exception)
|
45
37
|
if max_attempts_count.to_i > event.attempts_count
|
46
38
|
event.retry!
|
47
39
|
pipeline.schedule_retry
|
48
|
-
else
|
49
|
-
pipeline.schedule_cleanup
|
50
40
|
end
|
51
41
|
raise
|
52
42
|
ensure
|
53
43
|
storage.commit!(event)
|
54
44
|
end
|
45
|
+
|
46
|
+
def with_abort_handling
|
47
|
+
return unless catch(:abort_transaction) { yield; nil }
|
48
|
+
event.abort! unless event.abort?
|
49
|
+
raise ActiveRecord::Rollback
|
50
|
+
end
|
55
51
|
end
|
56
52
|
end
|
@@ -1,11 +1,11 @@
|
|
1
1
|
module DirtyPipeline
|
2
2
|
class Transition
|
3
3
|
def Abort()
|
4
|
-
throw :
|
4
|
+
throw :fail_operation, :abort
|
5
5
|
end
|
6
6
|
|
7
7
|
def Error(error)
|
8
|
-
throw :
|
8
|
+
throw :fail_operation, error
|
9
9
|
end
|
10
10
|
|
11
11
|
def Success(changes = nil)
|
@@ -15,8 +15,8 @@ module DirtyPipeline
|
|
15
15
|
def self.finalize(*args, **kwargs)
|
16
16
|
event, pipeline, *args = args
|
17
17
|
instance = new(event, *args, **kwargs)
|
18
|
-
pipeline.railway.switch_to(:call)
|
19
18
|
return unless instance.respond_to?(:finalize)
|
19
|
+
pipeline.railway.switch_to(:call)
|
20
20
|
instance.finalize(pipeline.subject)
|
21
21
|
end
|
22
22
|
|
@@ -31,8 +31,10 @@ module DirtyPipeline
|
|
31
31
|
event, pipeline, *args = args
|
32
32
|
instance = new(event, *args, **kwargs)
|
33
33
|
pipeline.railway[:undo] << event if instance.respond_to?(:undo)
|
34
|
-
|
35
|
-
|
34
|
+
if instance.respond_to?(:finalize)
|
35
|
+
pipeline.railway[:finalize] << event
|
36
|
+
pipeline.railway.switch_to(:finalize)
|
37
|
+
end
|
36
38
|
new(event, *args, **kwargs).call(pipeline.subject)
|
37
39
|
end
|
38
40
|
|
metadata
CHANGED
@@ -1,15 +1,57 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: dirty_pipeline
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Sergey Dolganov
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-09-
|
11
|
+
date: 2018-09-03 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: sidekiq
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '5.0'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '5.0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: redis
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '4.0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '4.0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: nanoid
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: 0.2.0
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: 0.2.0
|
13
55
|
- !ruby/object:Gem::Dependency
|
14
56
|
name: bundler
|
15
57
|
requirement: !ruby/object:Gem::Requirement
|
@@ -52,6 +94,48 @@ dependencies:
|
|
52
94
|
- - "~>"
|
53
95
|
- !ruby/object:Gem::Version
|
54
96
|
version: '3.0'
|
97
|
+
- !ruby/object:Gem::Dependency
|
98
|
+
name: dotenv
|
99
|
+
requirement: !ruby/object:Gem::Requirement
|
100
|
+
requirements:
|
101
|
+
- - "~>"
|
102
|
+
- !ruby/object:Gem::Version
|
103
|
+
version: '2.2'
|
104
|
+
type: :development
|
105
|
+
prerelease: false
|
106
|
+
version_requirements: !ruby/object:Gem::Requirement
|
107
|
+
requirements:
|
108
|
+
- - "~>"
|
109
|
+
- !ruby/object:Gem::Version
|
110
|
+
version: '2.2'
|
111
|
+
- !ruby/object:Gem::Dependency
|
112
|
+
name: timecop
|
113
|
+
requirement: !ruby/object:Gem::Requirement
|
114
|
+
requirements:
|
115
|
+
- - "~>"
|
116
|
+
- !ruby/object:Gem::Version
|
117
|
+
version: '0.9'
|
118
|
+
type: :development
|
119
|
+
prerelease: false
|
120
|
+
version_requirements: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - "~>"
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: '0.9'
|
125
|
+
- !ruby/object:Gem::Dependency
|
126
|
+
name: pry
|
127
|
+
requirement: !ruby/object:Gem::Requirement
|
128
|
+
requirements:
|
129
|
+
- - "~>"
|
130
|
+
- !ruby/object:Gem::Version
|
131
|
+
version: '0.11'
|
132
|
+
type: :development
|
133
|
+
prerelease: false
|
134
|
+
version_requirements: !ruby/object:Gem::Requirement
|
135
|
+
requirements:
|
136
|
+
- - "~>"
|
137
|
+
- !ruby/object:Gem::Version
|
138
|
+
version: '0.11'
|
55
139
|
description: Simple state machine designed for non-pure transitions. E.g. for wizard-like
|
56
140
|
systems with a lot of external API calls.
|
57
141
|
email:
|