async_request_reply 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/lib/async_request_reply/config.rb +81 -0
- data/lib/async_request_reply/methods_chain.rb +59 -0
- data/lib/async_request_reply/repository_adapters/abstract_repository_adapter.rb +17 -0
- data/lib/async_request_reply/repository_adapters/redis_repository_adapter.rb +30 -0
- data/lib/async_request_reply/worker.rb +263 -0
- data/lib/async_request_reply/worker_in_batch.rb +229 -0
- data/lib/async_request_reply/workers_engine/sidekiq.rb +14 -0
- data/lib/async_request_reply.rb +24 -0
- metadata +137 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 82844bb3ba5b9fd8531eaa9d2d4c7423dc75fc05328231f3453312e40c969874
|
4
|
+
data.tar.gz: fcf69875b436ac90400db0fea59d1f014930735cc3aa83595f671156b8802977
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 32b54b50cb07199f46c6ec62dc65975eb672aee8c18810f8378eaa4c588c337904a3469342580c097089ecb3a493bffa690fb0d5eec037a8da511565fbe1d1cc
|
7
|
+
data.tar.gz: 77f45585605802ed482bc32a3a1aa3d681825e7d96d58ebaa7f0cdf85dd2d9cc5ca544f10a0a57c1f1cf13bcd8d7969426a1600b21ef5a0e991055ed62014265
|
@@ -0,0 +1,81 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'ostruct'
|
3
|
+
require 'logger'
|
4
|
+
|
5
|
+
module AsyncRequestReply
|
6
|
+
class Config
|
7
|
+
DEFAULTS = {
|
8
|
+
repository_adapter: :redis,
|
9
|
+
redis_url_conection: 'redis://localhost:6379',
|
10
|
+
async_engine: :sidekiq,
|
11
|
+
logger: Logger.new(STDOUT)
|
12
|
+
}
|
13
|
+
|
14
|
+
@@message_packer_factories = []
|
15
|
+
|
16
|
+
attr_accessor :config
|
17
|
+
|
18
|
+
def initialize
|
19
|
+
@config ||= OpenStruct.new
|
20
|
+
config.repository_adapter = DEFAULTS[:repository_adapter]
|
21
|
+
config.redis_url_conection = DEFAULTS[:redis_url_conection]
|
22
|
+
config.async_engine = DEFAULTS[:async_engine]
|
23
|
+
config.logger = DEFAULTS[:logger]
|
24
|
+
super
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.instance
|
28
|
+
@instance ||= new
|
29
|
+
end
|
30
|
+
|
31
|
+
def configure
|
32
|
+
yield(self)
|
33
|
+
end
|
34
|
+
|
35
|
+
def repository_adapter
|
36
|
+
return AsyncRequestReply::RepositoryAdapters::RedisRepositoryAdapter if config.repository_adapter == :redis
|
37
|
+
config.repository_adapter
|
38
|
+
end
|
39
|
+
|
40
|
+
def redis_url_conection
|
41
|
+
config.redis_url_conection
|
42
|
+
end
|
43
|
+
|
44
|
+
def async_engine
|
45
|
+
return AsyncRequestReply::WorkersEngine::Async if config.async_engine == :async
|
46
|
+
return AsyncRequestReply::WorkersEngine::Sidekiq if config.async_engine == :sidekiq
|
47
|
+
|
48
|
+
config.async_engine
|
49
|
+
end
|
50
|
+
|
51
|
+
def async_engine=(value)
|
52
|
+
config.async_engine = value
|
53
|
+
end
|
54
|
+
|
55
|
+
def repository_adapter=(value)
|
56
|
+
config.repository_adapter = value
|
57
|
+
end
|
58
|
+
|
59
|
+
def redis_url_conection=(value)
|
60
|
+
config.redis_url_conection = value
|
61
|
+
end
|
62
|
+
|
63
|
+
def logger
|
64
|
+
config.logger
|
65
|
+
end
|
66
|
+
|
67
|
+
def message_packer_factories
|
68
|
+
@@message_packer_factories
|
69
|
+
end
|
70
|
+
|
71
|
+
def add_message_pack_factory
|
72
|
+
factory = yield({first_byte: nil, klass: nil, packer: nil, unpacker: nil})
|
73
|
+
factory[:klass].class_eval do
|
74
|
+
def as_json
|
75
|
+
self
|
76
|
+
end
|
77
|
+
end
|
78
|
+
@@message_packer_factories.push(factory)
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module AsyncRequestReply
|
4
|
+
# A module providing functionality for chaining method calls on a constant.
|
5
|
+
# This class provides a method to execute a series of method calls in sequence on a given constant.
|
6
|
+
#
|
7
|
+
# == Example:
|
8
|
+
# AsyncRequestReply::MethodsChain.run_methods_chain(1, [[:+, 1], [:*, 2]])
|
9
|
+
# # => 4
|
10
|
+
#
|
11
|
+
# The methods in `attrs_methods` will be called in the order they are provided.
|
12
|
+
# If a method requires arguments, the arguments will be passed, otherwise the method will be called without arguments.
|
13
|
+
#
|
14
|
+
class MethodsChain
|
15
|
+
class << self
|
16
|
+
# Executes a chain of method calls on a given constant.
|
17
|
+
#
|
18
|
+
# This method allows chaining of method calls on a constant, where each method can optionally receive parameters.
|
19
|
+
#
|
20
|
+
# The constant is first constantized (if it is a string, it will be converted to a constant), and then methods
|
21
|
+
# from the `attrs_methods` array are invoked on it in order.
|
22
|
+
#
|
23
|
+
# @param constant [Object] The constant (or any object) on which methods will be called.
|
24
|
+
# @param attrs_methods [Array<Array<Symbol, Object>>] An array of method names and corresponding arguments.
|
25
|
+
# Each element should be a 2-element array where the first element is the method name (as a symbol),
|
26
|
+
# and the second element is the argument to pass to that method. If the method does not require arguments,
|
27
|
+
# the second element can be omitted.
|
28
|
+
#
|
29
|
+
# @return [Object] The result of the last method call in the chain.
|
30
|
+
#
|
31
|
+
# @example
|
32
|
+
# AsyncRequestReply::MethodsChain.run_methods_chain(1, [[:+, 1], [:*, 2]])
|
33
|
+
# # => 4
|
34
|
+
#
|
35
|
+
# @example
|
36
|
+
# AsyncRequestReply::MethodsChain.run_methods_chain("Math::PI", [[:*, 2], [:+, 1]])
|
37
|
+
# # => 7.141592653589793
|
38
|
+
def run_methods_chain(constant, attrs_methods = [])
|
39
|
+
# The constant is either a string that needs to be constantized or an already defined constant.
|
40
|
+
attrs_methods.inject(constant.is_a?(String) ? constant.constantize : constant) do |constantized, method|
|
41
|
+
|
42
|
+
if method[1]
|
43
|
+
args = [method[1]].flatten.select{|arg| !arg.is_a?(Hash)}
|
44
|
+
kwargs = ([method[1]].flatten.find{|arg| arg.is_a?(Hash)} || {}).symbolize_keys
|
45
|
+
|
46
|
+
# If the argument is a Proc, pass it as a block to the method call.
|
47
|
+
next constantized.send(method[0], &args[0]) if args.size == 1 && args[0].is_a?(Proc)
|
48
|
+
|
49
|
+
constantized.send(method[0], *args, **kwargs)
|
50
|
+
|
51
|
+
else
|
52
|
+
# If no argument is provided, call the method without parameters.
|
53
|
+
constantized.send(method[0])
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
module AsyncRequestReply
|
2
|
+
module RepositoryAdapters
|
3
|
+
class AbstractRepositoryAdapter
|
4
|
+
class << self
|
5
|
+
def get(uuid)
|
6
|
+
raise NotImplementedError
|
7
|
+
end
|
8
|
+
def del(uuid)
|
9
|
+
raise NotImplementedError
|
10
|
+
end
|
11
|
+
def setex(uuid, ttl, payload)
|
12
|
+
raise NotImplementedError
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require_relative '../config'
|
3
|
+
require_relative 'abstract_repository_adapter'
|
4
|
+
require 'redis-client'
|
5
|
+
|
6
|
+
module AsyncRequestReply
|
7
|
+
module RepositoryAdapters
|
8
|
+
class RedisRepositoryAdapter < AbstractRepositoryAdapter
|
9
|
+
class << self
|
10
|
+
def get(uuid)
|
11
|
+
client.call("GET", uuid)
|
12
|
+
end
|
13
|
+
|
14
|
+
def del(uuid)
|
15
|
+
client.call("DEL", uuid)
|
16
|
+
end
|
17
|
+
|
18
|
+
def setex(uuid, ttl, payload)
|
19
|
+
raise "Redis can`t save key #{uuid}" unless client.call("SET", uuid, payload, ex: ttl)
|
20
|
+
get(uuid)
|
21
|
+
end
|
22
|
+
|
23
|
+
def client
|
24
|
+
#TODO: ADD CONFIGURATION timeout and size of pool
|
25
|
+
@@redis ||= RedisClient.config(url: AsyncRequestReply::Config.instance.redis_url_conection).new_pool(timeout: 0.5, size: 5)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,263 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require_relative 'config'
|
3
|
+
require_relative 'methods_chain'
|
4
|
+
|
5
|
+
module AsyncRequestReply
|
6
|
+
class Worker
|
7
|
+
# TODO-2023-10-22: Adicinar mais logs a classe.
|
8
|
+
|
9
|
+
@@config = AsyncRequestReply::Config.instance
|
10
|
+
|
11
|
+
STATUS = %i[waiting processing done unprocessable_entity internal_server_error]
|
12
|
+
ONE_HOUR = 60*60
|
13
|
+
LIVE_TIMEOUT = ONE_HOUR # TODO-2023-10-22: Isso limita o processamento de máximo 1 hora.
|
14
|
+
|
15
|
+
attr_accessor :status, :uuid, :status_url, :redirect_url,
|
16
|
+
:class_instance, :methods_chain, :success,
|
17
|
+
:redirect_url, :failure, :_ttl
|
18
|
+
attr_reader :new_record, :errors, :start_time, :end_time, :handle_async_engine
|
19
|
+
|
20
|
+
def initialize(attrs = {})
|
21
|
+
attrs.transform_keys(&:to_sym)
|
22
|
+
@uuid = new_record?(attrs[:uuid]) ? "async_request:#{SecureRandom.uuid}" : attrs[:uuid]
|
23
|
+
|
24
|
+
# INFO: Remover do repositório depois que async_request for processado
|
25
|
+
# TODO-2023-10-22: Entender a relação entre número de objetos criados e
|
26
|
+
# consulmo de mémoria no host onde está o redis. Definir uma estrátegia
|
27
|
+
# que limite o tamanho máximo de uma instância da classe e controle do ciclo
|
28
|
+
# de vida de cada instancia no banco pra ofecer melhor controle pra cada caso
|
29
|
+
# de uso.
|
30
|
+
destroy(30.seconds.to_i) if !new_record?(attrs[:uuid]) && attrs[:status].to_sym == :done
|
31
|
+
|
32
|
+
# Assigners attributes
|
33
|
+
assign_attributes(default_attributes.merge(attrs))
|
34
|
+
end
|
35
|
+
|
36
|
+
def valid?
|
37
|
+
@errors = []
|
38
|
+
@errors << "class_instance can't be blank." if class_instance.nil?
|
39
|
+
|
40
|
+
@errors.empty?
|
41
|
+
end
|
42
|
+
|
43
|
+
def attributes
|
44
|
+
{ 'uuid' => uuid,
|
45
|
+
'status' => status,
|
46
|
+
'success' => success,
|
47
|
+
'failure' => failure,
|
48
|
+
'methods_chain' => methods_chain,
|
49
|
+
'class_instance' => class_instance,
|
50
|
+
'redirect_url' => redirect_url,
|
51
|
+
'start_time' => start_time,
|
52
|
+
'end_time' => end_time
|
53
|
+
}
|
54
|
+
end
|
55
|
+
|
56
|
+
def default_attributes
|
57
|
+
{
|
58
|
+
methods_chain: [],
|
59
|
+
'status' => :waiting,
|
60
|
+
success: {
|
61
|
+
class_instance: 'self',
|
62
|
+
methods_chain: []
|
63
|
+
},
|
64
|
+
failure: {
|
65
|
+
class_instance: 'self',
|
66
|
+
methods_chain: []
|
67
|
+
}
|
68
|
+
}
|
69
|
+
end
|
70
|
+
|
71
|
+
def new_record?(p_uuid)
|
72
|
+
return true if p_uuid.nil?
|
73
|
+
|
74
|
+
@@config.repository_adapter.get(p_uuid).nil?
|
75
|
+
end
|
76
|
+
|
77
|
+
def id
|
78
|
+
uuid
|
79
|
+
end
|
80
|
+
|
81
|
+
def self.find(p_uuid)
|
82
|
+
resource = _find(p_uuid)
|
83
|
+
return nil if resource.empty?
|
84
|
+
|
85
|
+
new(resource)
|
86
|
+
end
|
87
|
+
|
88
|
+
def self._find(p_uuid)
|
89
|
+
resource = @@config.repository_adapter.get(p_uuid)
|
90
|
+
return nil unless resource
|
91
|
+
|
92
|
+
unpack(resource)
|
93
|
+
end
|
94
|
+
|
95
|
+
def elapsed
|
96
|
+
(@end_time || Process.clock_gettime(Process::CLOCK_MONOTONIC)) - @start_time
|
97
|
+
end
|
98
|
+
|
99
|
+
|
100
|
+
def update(attrs)
|
101
|
+
assign_attributes(attrs)
|
102
|
+
save
|
103
|
+
end
|
104
|
+
|
105
|
+
def reload!
|
106
|
+
assign_attributes(self.class._find(self.uuid))
|
107
|
+
end
|
108
|
+
|
109
|
+
##
|
110
|
+
# Remove request from data store. Can pass as params
|
111
|
+
# integer value for how many seconds you want remove
|
112
|
+
# from data store
|
113
|
+
def destroy(seconds_in = 0.seconds.to_i)
|
114
|
+
return @@config.repository_adapter.del(id) if seconds_in.zero?
|
115
|
+
|
116
|
+
self._ttl = seconds_in
|
117
|
+
save
|
118
|
+
end
|
119
|
+
|
120
|
+
def save
|
121
|
+
return nil unless valid?
|
122
|
+
attributes = self.class.unpack(@@config.repository_adapter.setex(uuid, (_ttl || LIVE_TIMEOUT), to_msgpack))
|
123
|
+
assign_attributes(attributes)
|
124
|
+
end
|
125
|
+
|
126
|
+
def perform_async
|
127
|
+
save
|
128
|
+
handle_async_engine.perform_async(id)
|
129
|
+
end
|
130
|
+
|
131
|
+
def async_engine
|
132
|
+
handle_async_engine
|
133
|
+
end
|
134
|
+
|
135
|
+
def with_async_engine(engine_class)
|
136
|
+
@handle_async_engine = engine_class
|
137
|
+
self
|
138
|
+
end
|
139
|
+
|
140
|
+
# Serializa a intância usando o MessagePack.
|
141
|
+
# Além de ser mais rápido e menor que JSON
|
142
|
+
# é uma boa opção para serializar arquivos.
|
143
|
+
# Ref.: https://msgpack.org/
|
144
|
+
# Ref.: https://github.com/msgpack/msgpack-ruby#extension-types
|
145
|
+
def to_msgpack
|
146
|
+
self.class.message_pack_factory.dump(attributes.as_json)
|
147
|
+
end
|
148
|
+
|
149
|
+
def self.unpack(packer)
|
150
|
+
message_pack_factory.load(packer)
|
151
|
+
end
|
152
|
+
|
153
|
+
# TODO: Desacoplar message pack factory
|
154
|
+
def self.message_pack_factory
|
155
|
+
factory = MessagePack::Factory.new
|
156
|
+
|
157
|
+
@@config.message_packer_factories.each do |fac|
|
158
|
+
factory.register_type(
|
159
|
+
fac[:first_byte],
|
160
|
+
fac[:klass],
|
161
|
+
packer: fac[:packer],
|
162
|
+
unpacker: fac[:unpacker],
|
163
|
+
recursive: true
|
164
|
+
)
|
165
|
+
end
|
166
|
+
|
167
|
+
factory
|
168
|
+
end
|
169
|
+
|
170
|
+
def success
|
171
|
+
# TODO-2023-10-22: Entender em que momento do ciclo de vida
|
172
|
+
# do objeto que esse atributo é nil pra corrigir o problema
|
173
|
+
# corretamente.
|
174
|
+
@success&.transform_keys(&:to_sym)
|
175
|
+
end
|
176
|
+
|
177
|
+
def failure
|
178
|
+
# TODO-2023-10-22: Entender em que momento do ciclo de vida
|
179
|
+
# do objeto que esse atributo é nil pra corrigir o problema
|
180
|
+
# corretamente.
|
181
|
+
@failure&.transform_keys(&:to_sym)
|
182
|
+
end
|
183
|
+
|
184
|
+
def perform
|
185
|
+
begin
|
186
|
+
@start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
187
|
+
@@config.logger.info("Start perform worker #{self.uuid}")
|
188
|
+
|
189
|
+
raise "Can't update worker while it's performing" unless update(status: :processing)
|
190
|
+
|
191
|
+
if element = MethodsChain.run_methods_chain(class_instance, methods_chain)
|
192
|
+
@@config.logger.info("successful workflow perform worker #{self.uuid}")
|
193
|
+
|
194
|
+
klass_after = success[:class_instance] == 'self' ? element : success[:class_instance]
|
195
|
+
methods_after = success[:methods_chain]
|
196
|
+
|
197
|
+
result = MethodsChain.run_methods_chain(klass_after, methods_after)
|
198
|
+
|
199
|
+
@end_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
200
|
+
|
201
|
+
raise "Can't update worker while it's performing" unless update(status: :done)
|
202
|
+
@@config.logger.info("Done perform worker #{self.uuid}")
|
203
|
+
result
|
204
|
+
else
|
205
|
+
@@config.logger.error("failure workflow perform worker #{self.uuid}")
|
206
|
+
klass_reject_after = failure[:class_instance] == 'self' ? element : failure[:class_instance]
|
207
|
+
methods_reject_after = failure[:methods_chain]
|
208
|
+
|
209
|
+
result = MethodsChain.run_methods_chain(klass_reject_after,methods_reject_after)
|
210
|
+
|
211
|
+
@end_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
212
|
+
|
213
|
+
raise "Can't update worker while it's performing" unless update(
|
214
|
+
status: :unprocessable_entity,
|
215
|
+
errors: formated_erros_to_json(result))
|
216
|
+
|
217
|
+
@@config.logger.error("Done perform worker #{self.uuid} with fails #{formated_erros_to_json(result)}")
|
218
|
+
result
|
219
|
+
end
|
220
|
+
rescue StandardError => e
|
221
|
+
@@config.logger.fatal("Fatal perform worker #{self.uuid} with fails #{formated_erros_to_json(e.message)}")
|
222
|
+
@end_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
223
|
+
update(status: :internal_server_error, errors: formated_erros_to_json(e.message))
|
224
|
+
nil
|
225
|
+
end
|
226
|
+
end
|
227
|
+
|
228
|
+
def formated_erros_to_json(errors)
|
229
|
+
resouce = if errors.respond_to?(:map)
|
230
|
+
errors.map { |title, error| { title: title, detail: error } }
|
231
|
+
else
|
232
|
+
[{ title: errors }]
|
233
|
+
end
|
234
|
+
|
235
|
+
resouce.map { |error| error.select { |_k, v| !v.nil? && !(v.respond_to?(:empty?) && v.empty?) } }
|
236
|
+
end
|
237
|
+
|
238
|
+
|
239
|
+
def errors=(value)
|
240
|
+
@errors = value
|
241
|
+
end
|
242
|
+
|
243
|
+
private
|
244
|
+
|
245
|
+
def handle_async_engine
|
246
|
+
@handle_async_engine || @@config.async_engine
|
247
|
+
end
|
248
|
+
|
249
|
+
def assign_attributes(attrs)
|
250
|
+
attrs.each do |attribute,value|
|
251
|
+
send("#{attribute}=", value)
|
252
|
+
end
|
253
|
+
end
|
254
|
+
|
255
|
+
def start_time=(value)
|
256
|
+
@start_time = value
|
257
|
+
end
|
258
|
+
|
259
|
+
def end_time=(value)
|
260
|
+
@end_time = value
|
261
|
+
end
|
262
|
+
end
|
263
|
+
end
|
@@ -0,0 +1,229 @@
|
|
1
|
+
# AsyncRequestReply::WorkerInBatch - [Made for gpt]
|
2
|
+
#
|
3
|
+
# This class represents a batch of workers processing asynchronous requests.
|
4
|
+
# It manages worker records, tracks their status, and provides methods to
|
5
|
+
# manipulate and query the workers in the batch.
|
6
|
+
#
|
7
|
+
# Attributes:
|
8
|
+
# - `worker_ids` [Array<String>] The UUIDs of the workers in this batch.
|
9
|
+
# - `uuid` [String] The unique identifier for the batch.
|
10
|
+
|
11
|
+
module AsyncRequestReply
|
12
|
+
class WorkerInBatch
|
13
|
+
# @private
|
14
|
+
class WorkerInBatchNotFound < StandardError
|
15
|
+
attr_accessor :uuid
|
16
|
+
|
17
|
+
def initialize(uuid)
|
18
|
+
@uuid = uuid
|
19
|
+
super
|
20
|
+
end
|
21
|
+
|
22
|
+
def message
|
23
|
+
"WorkerInBatch not found with id #{@uuid}"
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
# The worker UUIDs associated with this batch.
|
28
|
+
# @return [Array<String>] an array of worker UUIDs.
|
29
|
+
attr_accessor :meta, :worker_ids, :uuid, :processing, :waiting, :successes, :failures, :start_time, :end_time
|
30
|
+
|
31
|
+
# @private
|
32
|
+
ONE_HOUR = 3600
|
33
|
+
|
34
|
+
# @private
|
35
|
+
LIVE_TIMEOUT = ONE_HOUR
|
36
|
+
|
37
|
+
# @private
|
38
|
+
@@config = AsyncRequestReply::Config.instance
|
39
|
+
|
40
|
+
# Initializes a new batch of workers with an optional UUID.
|
41
|
+
#
|
42
|
+
# If a UUID is not provided or the UUID is invalid, a new UUID is generated.
|
43
|
+
#
|
44
|
+
# @param uuid [String, nil] The UUID of the batch. If nil, a new UUID is generated.
|
45
|
+
def initialize(uuid = nil)
|
46
|
+
@worker_ids = []
|
47
|
+
@meta = {}
|
48
|
+
@uuid = new_record?(uuid) ? "async_request_in_batch:#{SecureRandom.uuid}" : uuid
|
49
|
+
|
50
|
+
@waiting = []
|
51
|
+
@processing = []
|
52
|
+
@failures = []
|
53
|
+
@successes = []
|
54
|
+
end
|
55
|
+
|
56
|
+
# Assigns workers to the batch.
|
57
|
+
#
|
58
|
+
# The workers are saved, and their UUIDs are stored in the batch.
|
59
|
+
#
|
60
|
+
# @param workers [Array<AsyncRequestReply::Worker>] The workers to assign to the batch.
|
61
|
+
def workers=(workers)
|
62
|
+
workers.map do |worker|
|
63
|
+
worker.save
|
64
|
+
@worker_ids << worker.uuid
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
# Finds a `WorkerInBatch` by its UUID raise exception case not found.
|
69
|
+
#
|
70
|
+
# @param p_uuid [String] The UUID of the batch to find.
|
71
|
+
# @return [AsyncRequestReply::WorkerInBatch, nil] The found batch or nil if not found.
|
72
|
+
def self.find!(p_uuid)
|
73
|
+
resource = find(p_uuid)
|
74
|
+
raise(WorkerInBatchNotFound, p_uuid) unless resource
|
75
|
+
|
76
|
+
resource
|
77
|
+
end
|
78
|
+
|
79
|
+
# Finds a `WorkerInBatch` by its UUID.
|
80
|
+
#
|
81
|
+
# @param p_uuid [String] The UUID of the batch to find.
|
82
|
+
# @return [AsyncRequestReply::WorkerInBatch, nil] The found batch or nil if not found.
|
83
|
+
def self.find(p_uuid)
|
84
|
+
resource = _find(p_uuid)
|
85
|
+
return nil unless resource
|
86
|
+
|
87
|
+
instance = new(resource['uuid'])
|
88
|
+
instance.worker_ids = resource['worker_ids']
|
89
|
+
instance.start_time = resource['start_time']
|
90
|
+
instance.end_time = resource['end_time']
|
91
|
+
instance.worker_ids = resource['worker_ids']
|
92
|
+
instance.waiting = resource['waiting']
|
93
|
+
instance.processing = resource['processing']
|
94
|
+
instance.failures = resource['failures']
|
95
|
+
instance.successes = resource['successes']
|
96
|
+
instance.meta = resource['meta']
|
97
|
+
instance
|
98
|
+
end
|
99
|
+
|
100
|
+
# Returns the UUID of the batch.
|
101
|
+
#
|
102
|
+
# @return [String] The UUID of the batch.
|
103
|
+
def id
|
104
|
+
uuid
|
105
|
+
end
|
106
|
+
|
107
|
+
# Returns the UUID of the batch.
|
108
|
+
#
|
109
|
+
# @return [String] The UUID of the batch.
|
110
|
+
attr_reader :uuid
|
111
|
+
|
112
|
+
# Saves the current state of the batch to the repository.
|
113
|
+
#
|
114
|
+
# The batch data is serialized to JSON and stored in the repository with an
|
115
|
+
# expiration time of 1 hour.
|
116
|
+
#
|
117
|
+
# @return [void]
|
118
|
+
def save
|
119
|
+
# TODO-2024-11-27: Decide serializer strategy (e.g., json, message_packer).
|
120
|
+
@@config.repository_adapter.setex(uuid, LIVE_TIMEOUT, as_json.to_json)
|
121
|
+
end
|
122
|
+
|
123
|
+
# Returns the total number of workers in the batch.
|
124
|
+
#
|
125
|
+
# @return [Integer] The total count of workers in the batch.
|
126
|
+
def total
|
127
|
+
worker_ids.count
|
128
|
+
end
|
129
|
+
|
130
|
+
# Returns the number of processed workers (successes + failures).
|
131
|
+
#
|
132
|
+
# @return [Integer] The number of processed workers.
|
133
|
+
def processed
|
134
|
+
@successes + @failures
|
135
|
+
end
|
136
|
+
|
137
|
+
# Returns the elapsed time for the batch.
|
138
|
+
#
|
139
|
+
# The elapsed time is the difference between the start time and end time.
|
140
|
+
# If the end time is unavailable, the current process time is used.
|
141
|
+
#
|
142
|
+
# @return [Float, nil] The elapsed time in seconds or nil if start time is unavailable.
|
143
|
+
def elapsed
|
144
|
+
return nil unless @start_time
|
145
|
+
|
146
|
+
(@end_time || Process.clock_gettime(Process::CLOCK_MONOTONIC)) - @start_time
|
147
|
+
end
|
148
|
+
|
149
|
+
# Starts the asynchronous processing of all workers in the batch.
|
150
|
+
#
|
151
|
+
# @return [void]
|
152
|
+
def perform
|
153
|
+
# TODO: Add concurrency model.
|
154
|
+
@start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
155
|
+
save
|
156
|
+
@waiting = worker_ids.dup
|
157
|
+
@waiting.size.times do
|
158
|
+
@processing.push(@waiting.pop)
|
159
|
+
save
|
160
|
+
worker_id = @processing.last
|
161
|
+
worker = AsyncRequestReply::Worker.find(worker_id)
|
162
|
+
worker.perform
|
163
|
+
worker.reload!
|
164
|
+
if %w[unprocessable_entity internal_server_error].include?(worker.status)
|
165
|
+
@failures.push(@processing.pop)
|
166
|
+
else
|
167
|
+
@successes.push(@processing.pop)
|
168
|
+
end
|
169
|
+
save
|
170
|
+
end
|
171
|
+
|
172
|
+
@end_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
173
|
+
save
|
174
|
+
end
|
175
|
+
|
176
|
+
def perform_async
|
177
|
+
save
|
178
|
+
AsyncRequestReply::Worker.new(class_instance: AsyncRequestReply::WorkerInBatch,
|
179
|
+
methods_chain: [[:find, id],
|
180
|
+
[:perform]]).perform_async
|
181
|
+
end
|
182
|
+
|
183
|
+
# Returns a JSON representation of the batch.
|
184
|
+
#
|
185
|
+
# @return [Hash] The JSON-compatible representation of the batch.
|
186
|
+
def as_json
|
187
|
+
{
|
188
|
+
uuid: @uuid,
|
189
|
+
start_time: @start_time,
|
190
|
+
end_time: @end_time,
|
191
|
+
worker_ids: @worker_ids,
|
192
|
+
waiting: @waiting,
|
193
|
+
qtd_waiting: @waiting.count,
|
194
|
+
processing: @processing,
|
195
|
+
qtd_processing: @processing.count,
|
196
|
+
failures: @failures,
|
197
|
+
qtd_fail: @failures.count,
|
198
|
+
successes: @successes,
|
199
|
+
qtd_success: @successes.count,
|
200
|
+
meta: @meta,
|
201
|
+
qtd_processed: processed.count,
|
202
|
+
total: total
|
203
|
+
}
|
204
|
+
end
|
205
|
+
|
206
|
+
private
|
207
|
+
|
208
|
+
# Helper method to retrieve batch data from the repository.
|
209
|
+
#
|
210
|
+
# @param p_uuid [String] The UUID of the batch.
|
211
|
+
# @return [Hash, nil] The parsed JSON resource or nil if not found.
|
212
|
+
def self._find(p_uuid)
|
213
|
+
resource = @@config.repository_adapter.get(p_uuid)
|
214
|
+
return nil unless resource
|
215
|
+
|
216
|
+
JSON.parse(resource)
|
217
|
+
end
|
218
|
+
|
219
|
+
# Checks if the given UUID is new (i.e., not present in the repository).
|
220
|
+
#
|
221
|
+
# @param p_uuid [String, nil] The UUID to check.
|
222
|
+
# @return [Boolean] True if the UUID is new, false otherwise.
|
223
|
+
def new_record?(p_uuid)
|
224
|
+
return true if p_uuid.nil?
|
225
|
+
|
226
|
+
@@config.repository_adapter.get(p_uuid).nil?
|
227
|
+
end
|
228
|
+
end
|
229
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
require 'sidekiq'
|
2
|
+
module AsyncRequestReply
|
3
|
+
module WorkersEngine
|
4
|
+
class Sidekiq
|
5
|
+
include ::Sidekiq::Worker
|
6
|
+
sidekiq_options retry: 4
|
7
|
+
|
8
|
+
def perform(async_request_id)
|
9
|
+
worker = ::AsyncRequestReply::Worker.find(async_request_id)
|
10
|
+
worker.perform
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'msgpack'
|
3
|
+
require 'active_support'
|
4
|
+
require 'active_support/core_ext'
|
5
|
+
|
6
|
+
module AsyncRequestReply
|
7
|
+
autoload :Config, "async_request_reply/config"
|
8
|
+
autoload :MethodsChain, "async_request_reply/methods_chain"
|
9
|
+
autoload :Worker, "async_request_reply/worker"
|
10
|
+
autoload :WorkerInBatch, "async_request_reply/worker_in_batch"
|
11
|
+
require "async_request_reply/repository_adapters/redis_repository_adapter"
|
12
|
+
require "async_request_reply/workers_engine/sidekiq"
|
13
|
+
|
14
|
+
# Load default configs
|
15
|
+
AsyncRequestReply::Config.instance
|
16
|
+
|
17
|
+
def self.configure(&block)
|
18
|
+
config.configure(&block)
|
19
|
+
end
|
20
|
+
|
21
|
+
def self.config
|
22
|
+
AsyncRequestReply::Config.instance
|
23
|
+
end
|
24
|
+
end
|
metadata
ADDED
@@ -0,0 +1,137 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: async_request_reply
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.3.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Luiz Filipe Neves Costa, Rafael Pinheiro
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2025-04-25 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: connection_pool
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '2.4'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '2.4'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: enumerize
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '2.3'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '2.3'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: msgpack
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '1.0'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '1.0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: redis-client
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ">="
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :runtime
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: sidekiq
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '5.0'
|
76
|
+
type: :runtime
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '5.0'
|
83
|
+
- !ruby/object:Gem::Dependency
|
84
|
+
name: activesupport
|
85
|
+
requirement: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - ">="
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
type: :runtime
|
91
|
+
prerelease: false
|
92
|
+
version_requirements: !ruby/object:Gem::Requirement
|
93
|
+
requirements:
|
94
|
+
- - ">="
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
version: '0'
|
97
|
+
description: Asynchronous Request-Reply pattern ruby implementation.
|
98
|
+
email:
|
99
|
+
- luizfilipeneves@gmail.com
|
100
|
+
- luiz.neves@prosas.com.br
|
101
|
+
- rafa.pinheiro.pinheiro@gmail.com
|
102
|
+
executables: []
|
103
|
+
extensions: []
|
104
|
+
extra_rdoc_files: []
|
105
|
+
files:
|
106
|
+
- lib/async_request_reply.rb
|
107
|
+
- lib/async_request_reply/config.rb
|
108
|
+
- lib/async_request_reply/methods_chain.rb
|
109
|
+
- lib/async_request_reply/repository_adapters/abstract_repository_adapter.rb
|
110
|
+
- lib/async_request_reply/repository_adapters/redis_repository_adapter.rb
|
111
|
+
- lib/async_request_reply/worker.rb
|
112
|
+
- lib/async_request_reply/worker_in_batch.rb
|
113
|
+
- lib/async_request_reply/workers_engine/sidekiq.rb
|
114
|
+
homepage: https://github.com/prosas/async_request_reply
|
115
|
+
licenses:
|
116
|
+
- MIT
|
117
|
+
metadata: {}
|
118
|
+
post_install_message:
|
119
|
+
rdoc_options: []
|
120
|
+
require_paths:
|
121
|
+
- lib
|
122
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
123
|
+
requirements:
|
124
|
+
- - ">="
|
125
|
+
- !ruby/object:Gem::Version
|
126
|
+
version: 2.7.0
|
127
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
128
|
+
requirements:
|
129
|
+
- - ">="
|
130
|
+
- !ruby/object:Gem::Version
|
131
|
+
version: '0'
|
132
|
+
requirements: []
|
133
|
+
rubygems_version: 3.4.19
|
134
|
+
signing_key:
|
135
|
+
specification_version: 4
|
136
|
+
summary: Asynchronous Request-Reply pattern ruby implementation
|
137
|
+
test_files: []
|