cosmonats 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. checksums.yaml +7 -0
  2. data/LICENSE.txt +169 -0
  3. data/README.md +515 -0
  4. data/bin/cosmo +7 -0
  5. data/lib/cosmo/cli.rb +201 -0
  6. data/lib/cosmo/client.rb +54 -0
  7. data/lib/cosmo/config.rb +101 -0
  8. data/lib/cosmo/defaults.yml +69 -0
  9. data/lib/cosmo/engine.rb +46 -0
  10. data/lib/cosmo/job/data.rb +74 -0
  11. data/lib/cosmo/job/processor.rb +132 -0
  12. data/lib/cosmo/job.rb +67 -0
  13. data/lib/cosmo/logger.rb +66 -0
  14. data/lib/cosmo/processor.rb +56 -0
  15. data/lib/cosmo/publisher.rb +38 -0
  16. data/lib/cosmo/stream/data.rb +21 -0
  17. data/lib/cosmo/stream/message.rb +31 -0
  18. data/lib/cosmo/stream/processor.rb +94 -0
  19. data/lib/cosmo/stream/serializer.rb +19 -0
  20. data/lib/cosmo/stream.rb +76 -0
  21. data/lib/cosmo/utils/hash.rb +66 -0
  22. data/lib/cosmo/utils/json.rb +23 -0
  23. data/lib/cosmo/utils/signal.rb +24 -0
  24. data/lib/cosmo/utils/stopwatch.rb +32 -0
  25. data/lib/cosmo/utils/string.rb +24 -0
  26. data/lib/cosmo/utils/thread_pool.rb +41 -0
  27. data/lib/cosmo/version.rb +5 -0
  28. data/lib/cosmo.rb +39 -0
  29. data/lib/cosmonats.rb +3 -0
  30. data/sig/cosmo/cli.rbs +25 -0
  31. data/sig/cosmo/client.rbs +30 -0
  32. data/sig/cosmo/config.rbs +48 -0
  33. data/sig/cosmo/engine.rbs +21 -0
  34. data/sig/cosmo/job/data.rbs +35 -0
  35. data/sig/cosmo/job/processor.rbs +23 -0
  36. data/sig/cosmo/job.rbs +35 -0
  37. data/sig/cosmo/logger.rbs +39 -0
  38. data/sig/cosmo/message.rbs +38 -0
  39. data/sig/cosmo/processor.rbs +29 -0
  40. data/sig/cosmo/publisher.rbs +21 -0
  41. data/sig/cosmo/stream/data.rbs +7 -0
  42. data/sig/cosmo/stream/processor.rbs +26 -0
  43. data/sig/cosmo/stream/serializer.rbs +13 -0
  44. data/sig/cosmo/stream.rbs +38 -0
  45. data/sig/cosmo/utils/hash.rbs +25 -0
  46. data/sig/cosmo/utils/json.rbs +13 -0
  47. data/sig/cosmo/utils/signal.rbs +15 -0
  48. data/sig/cosmo/utils/stopwatch.rbs +19 -0
  49. data/sig/cosmo/utils/string.rbs +13 -0
  50. data/sig/cosmo/utils/thread_pool.rbs +18 -0
  51. data/sig/cosmo.rbs +20 -0
  52. metadata +125 -0
data/lib/cosmo/job.rb ADDED
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "cosmo/job/data"
4
+ require "cosmo/job/processor"
5
+
6
+ module Cosmo
7
+ module Job
8
+ def self.included(base)
9
+ base.extend(ClassMethods)
10
+ end
11
+
12
+ module ClassMethods
13
+ def options(stream: nil, retry: nil, dead: nil)
14
+ default_options.merge!({ stream:, retry:, dead: }.compact)
15
+ end
16
+
17
+ def perform(*args, async: true, **options)
18
+ data = Data.new(name, args, default_options.merge(options))
19
+ unless async
20
+ payload = Utils::Json.parse(data.to_args[1])
21
+ raise ArgumentError, "Cannot parse payload" unless payload
22
+
23
+ new.perform(*payload[:args])
24
+ return
25
+ end
26
+
27
+ Publisher.publish_job(data)
28
+ end
29
+
30
+ def perform_async(*args)
31
+ perform(*args)
32
+ end
33
+
34
+ def perform_at(timestamp, *args)
35
+ perform(*args, at: timestamp)
36
+ end
37
+
38
+ def perform_in(interval, *args)
39
+ perform(*args, in: interval)
40
+ end
41
+
42
+ def perform_sync(*args)
43
+ perform(*args, async: false)
44
+ end
45
+
46
+ def default_options
47
+ @default_options ||= (superclass.respond_to?(:default_options) ? superclass.default_options : Data::DEFAULTS).dup
48
+ end
49
+
50
+ private
51
+
52
+ def client
53
+ @client ||= Client.instance
54
+ end
55
+ end
56
+
57
+ attr_reader :jid
58
+
59
+ def perform(...)
60
+ raise NotImplementedError, "#{self.class}#perform must be implemented"
61
+ end
62
+
63
+ def logger
64
+ Logger.instance
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "logger"
4
+ require "forwardable"
5
+
6
+ module Cosmo
7
+ module Logger
8
+ module Context
9
+ KEY = :cosmo_context
10
+
11
+ def self.with(**options)
12
+ prev = current
13
+ Thread.current[KEY] = prev.merge(options)
14
+ yield if block_given?
15
+ ensure
16
+ Thread.current[KEY] = prev if block_given?
17
+ end
18
+
19
+ def self.without(*keys)
20
+ Thread.current[KEY] = current.except(*keys)
21
+ nil
22
+ end
23
+
24
+ def self.current
25
+ Thread.current[KEY] ||= {}
26
+ Thread.current[KEY]
27
+ end
28
+ end
29
+
30
+ class BaseFormatter < ::Logger::Formatter
31
+ def tid
32
+ (Thread.current.object_id ^ pid).to_s(36)
33
+ end
34
+
35
+ def pid
36
+ ::Process.pid
37
+ end
38
+ end
39
+
40
+ class SimpleFormatter < BaseFormatter
41
+ def call(severity, time, _, msg)
42
+ options = Context.current.compact.map { |k, v| "#{k}=#{v}" }.join(" ")
43
+ options &&= " #{options}" unless options.empty?
44
+ "#{time.utc.iso8601(3)} #{severity} pid=#{pid} tid=#{tid}#{options}: #{msg2str(msg)}\n"
45
+ end
46
+ end
47
+
48
+ class << self
49
+ extend Forwardable
50
+
51
+ delegate %i[info error debug warn fatal] => :instance
52
+ end
53
+
54
+ def self.with(...)
55
+ Context.with(...)
56
+ end
57
+
58
+ def self.without(...)
59
+ Context.without(...)
60
+ end
61
+
62
+ def self.instance
63
+ @instance ||= ::Logger.new($stdout).tap { _1.formatter = SimpleFormatter.new }
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cosmo
4
+ class Processor
5
+ def self.run(...)
6
+ new(...).tap(&:run)
7
+ end
8
+
9
+ def initialize(pool, running)
10
+ @pool = pool
11
+ @running = running
12
+ @consumers = {}
13
+ end
14
+
15
+ def run
16
+ setup
17
+ return unless @consumers.any?
18
+
19
+ @running.make_true
20
+ run_loop
21
+ end
22
+
23
+ private
24
+
25
+ def run_loop
26
+ raise NotImplementedError
27
+ end
28
+
29
+ def setup
30
+ raise NotImplementedError
31
+ end
32
+
33
+ def process(...)
34
+ raise NotImplementedError
35
+ end
36
+
37
+ def running?
38
+ @running.true?
39
+ end
40
+
41
+ def fetch_messages(stream_name, batch_size:, timeout:)
42
+ messages = @consumers[stream_name].fetch(batch_size, timeout:)
43
+ block_given? ? yield(messages) : process(stream_name, messages)
44
+ rescue NATS::Timeout
45
+ # No messages, continue
46
+ end
47
+
48
+ def client
49
+ Client.instance
50
+ end
51
+
52
+ def stopwatch
53
+ Utils::Stopwatch.new
54
+ end
55
+ end
56
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "forwardable"
4
+
5
+ module Cosmo
6
+ class Publisher
7
+ class << self
8
+ extend Forwardable
9
+
10
+ delegate %i[publish publish_job publish_batch] => :instance
11
+ end
12
+
13
+ def self.instance
14
+ @instance ||= new
15
+ end
16
+
17
+ def initialize
18
+ @client = Client.instance
19
+ end
20
+
21
+ def publish(subject, data, serializer: nil, **options)
22
+ payload = (serializer || Stream::Serializer).serialize(data)
23
+ @client.publish(subject, payload, **options)
24
+ end
25
+
26
+ def publish_job(data)
27
+ subject, payload, params = data.to_args
28
+ @client.publish(subject, payload, **params)
29
+ data.jid
30
+ rescue NATS::JetStream::Error::NoStreamResponse
31
+ raise StreamNotFoundError, params[:stream].to_s
32
+ end
33
+
34
+ def publish_batch(subject, batch, **options)
35
+ batch.each { publish(subject, _1, **options) }
36
+ end
37
+ end
38
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+
5
+ module Cosmo
6
+ module Stream
7
+ class Data
8
+ DEFAULTS = {
9
+ batch_size: 100,
10
+ consumer: {
11
+ ack_policy: "explicit",
12
+ max_deliver: 1,
13
+ max_ack_pending: 3,
14
+ ack_wait: 30,
15
+ subjects: ["%{name}.>"]
16
+ },
17
+ publisher: { subject: "%{name}.default", serializer: nil }
18
+ }.freeze
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "forwardable"
4
+
5
+ module Cosmo
6
+ module Stream
7
+ class Message
8
+ extend Forwardable
9
+
10
+ delegate %i[subject reply header metadata ack nack term in_progress] => :@msg
11
+ delegate %i[timestamp num_delivered num_pending] => :metadata
12
+
13
+ def initialize(msg, serializer: nil)
14
+ @msg = msg
15
+ @serializer = serializer || Serializer
16
+ end
17
+
18
+ def data
19
+ @serializer.deserialize(@msg.data)
20
+ end
21
+
22
+ def stream_sequence
23
+ metadata.sequence.stream
24
+ end
25
+
26
+ def consumer_sequence
27
+ metadata.sequence.consumer
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,94 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cosmo
4
+ module Stream
5
+ class Processor < ::Cosmo::Processor
6
+ def initialize(pool, running)
7
+ super
8
+ @configs = {}
9
+ @processors = {}
10
+ end
11
+
12
+ private
13
+
14
+ def run_loop
15
+ Thread.new { work_loop }
16
+ end
17
+
18
+ def setup
19
+ setup_configs
20
+ setup_processors
21
+ setup_consumers
22
+ end
23
+
24
+ def work_loop
25
+ while running?
26
+ @consumers.each_key do |stream_name|
27
+ break unless running?
28
+
29
+ begin
30
+ batch_size = @configs[stream_name][:batch_size]
31
+ timeout = ENV.fetch("COSMO_STREAMS_FETCH_TIMEOUT", 0.1).to_f
32
+ @pool.post { fetch_messages(stream_name, batch_size:, timeout:) }
33
+ rescue Concurrent::RejectedExecutionError
34
+ break # pool doesn't accept new jobs, we are shutting down
35
+ end
36
+
37
+ break unless running?
38
+ end
39
+ end
40
+ end
41
+
42
+ def process(stream_name, messages) # rubocop:disable Metrics/AbcSize, Metrics/MethodLength
43
+ metadata = messages.last.metadata
44
+ processor = @processors[stream_name]
45
+ serializer = processor.class.default_options.dig(:publisher, :serializer)
46
+ messages = messages.map { Message.new(_1, serializer:) }
47
+
48
+ Logger.with(
49
+ seq_stream: metadata.sequence.stream,
50
+ seq_consumer: metadata.sequence.consumer,
51
+ num_pending: metadata.num_pending,
52
+ timestamp: metadata.timestamp
53
+ ) { Logger.info "start" }
54
+
55
+ sw = stopwatch
56
+ processor.process(messages)
57
+ Logger.with(elapsed: sw.elapsed_seconds) { Logger.info "done" }
58
+ rescue StandardError => e
59
+ Logger.debug e
60
+ Logger.with(elapsed: sw.elapsed_seconds) { Logger.info "fail" }
61
+ rescue Exception # rubocop:disable Lint/RescueException
62
+ Logger.with(elapsed: sw.elapsed_seconds) { Logger.info "fail" }
63
+ raise
64
+ end
65
+
66
+ def setup_configs # rubocop:disable Metrics/AbcSize
67
+ @configs.merge!(
68
+ Config.dig(:consumers, :streams).to_h do |config|
69
+ klass = Utils::String.safe_constantize(config[:class])
70
+ [config[:stream].to_sym, klass ? config.merge(class: klass) : nil]
71
+ end.compact
72
+ )
73
+ @configs.merge!(
74
+ Config.system[:streams].to_h do |klass|
75
+ [klass.default_options[:stream].to_sym, klass.default_options.merge(class: klass)]
76
+ end
77
+ )
78
+ end
79
+
80
+ def setup_processors
81
+ @configs.each { |s, c| @processors[s] = c[:class].new }
82
+ end
83
+
84
+ def setup_consumers
85
+ @configs.each do |stream_name, config|
86
+ subjects = config.dig(:consumer, :subjects)
87
+ deliver_policy = Config.deliver_policy(config[:start_position])
88
+ config, consumer_name = config.values_at(:consumer, :consumer_name)
89
+ @consumers[stream_name] = client.subscribe(subjects, consumer_name, config.merge(deliver_policy))
90
+ end
91
+ end
92
+ end
93
+ end
94
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+
5
+ module Cosmo
6
+ module Stream
7
+ module Serializer
8
+ module_function
9
+
10
+ def serialize(data)
11
+ Utils::Json.dump(data)
12
+ end
13
+
14
+ def deserialize(payload)
15
+ Utils::Json.parse(payload, symbolize_names: false)
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,76 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "cosmo/stream/data"
4
+ require "cosmo/stream/message"
5
+ require "cosmo/stream/processor"
6
+ require "cosmo/stream/serializer"
7
+
8
+ module Cosmo
9
+ module Stream
10
+ def self.included(base)
11
+ base.extend(ClassMethods)
12
+ base.register
13
+ end
14
+
15
+ module ClassMethods
16
+ def options(stream: nil, consumer_name: nil, batch_size: nil, start_position: nil, consumer: nil, publisher: nil) # rubocop:disable Metrics/ParameterLists
17
+ default_options.merge!({ stream: stream, consumer_name: consumer_name, batch_size:, start_position:, consumer:, publisher: }.compact)
18
+ end
19
+
20
+ def publish(data, subject: nil, **options)
21
+ stream = default_options[:stream]
22
+ subject ||= default_options.dig(:publisher, :subject)
23
+ Publisher.publish(subject, data, stream: stream, serializer: default_options.dig(:publisher, :serializer), **options)
24
+ end
25
+
26
+ def default_options
27
+ @default_options ||= Utils::Hash.dup(superclass.respond_to?(:default_options) ? superclass.default_options : Data::DEFAULTS)
28
+ end
29
+
30
+ def register # rubocop:disable Metrics/AbcSize
31
+ Config.system[:streams] ||= []
32
+ Config.system[:streams] << self
33
+
34
+ # settings are inherited, don't try to modify them
35
+ return if default_options != Data::DEFAULTS
36
+
37
+ class_name = Utils::String.underscore(name)
38
+ default_options.merge!(stream: class_name,
39
+ consumer_name: "consumer-#{class_name}",
40
+ publisher: { subject: "#{class_name}.default" })
41
+ subjects = default_options.dig(:consumer, :subjects)
42
+ subjects&.map! { format(_1, name: class_name) }
43
+
44
+ subject = default_options[:publisher][:subject]
45
+ default_options[:publisher][:subject] = format(subject, name: class_name)
46
+ end
47
+ end
48
+
49
+ def process(messages)
50
+ messages.each do |message|
51
+ Thread.current[:cosmo_message] = message
52
+ process_one
53
+ ensure
54
+ Thread.current[:cosmo_message] = nil
55
+ end
56
+ end
57
+ alias process_many process
58
+ alias process_batch process
59
+
60
+ def process_one
61
+ raise NotImplementedError, "#{self.class}#process_one must be implemented"
62
+ end
63
+
64
+ def logger
65
+ Logger.instance
66
+ end
67
+
68
+ def message
69
+ Thread.current[:cosmo_message]
70
+ end
71
+
72
+ def publish(data, subject, **options)
73
+ self.class.publish(data, subject:, **options)
74
+ end
75
+ end
76
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cosmo
4
+ module Utils
5
+ module Hash
6
+ module_function
7
+
8
+ def symbolize_keys!(obj)
9
+ case obj
10
+ when ::Hash
11
+ obj.keys.each do |key|
12
+ raise ArgumentError, "key cannot be converted to symbol" unless key.respond_to?(:to_sym)
13
+
14
+ sym = key.to_sym
15
+ value = obj.delete(key)
16
+ obj[sym] = symbolize_keys!(value)
17
+ end
18
+ obj
19
+ when ::Array
20
+ obj.map! { |v| symbolize_keys!(v) }
21
+ else
22
+ obj
23
+ end
24
+ end
25
+
26
+ # deep dup
27
+ def dup(hash)
28
+ Marshal.load(Marshal.dump(hash))
29
+ end
30
+
31
+ # dig keys
32
+ def keys?(hash, *keys)
33
+ keys.reduce(hash) do |base, key|
34
+ return false if !base.is_a?(::Hash) || !base.key?(key)
35
+
36
+ base[key]
37
+ end
38
+
39
+ true
40
+ end
41
+
42
+ # deep set
43
+ def set(hash, *keys, value)
44
+ last_key = keys.pop
45
+ target = keys.reduce(hash) do |base, key|
46
+ base[key] ||= {}
47
+ base[key]
48
+ end
49
+ target[last_key] = value
50
+ end
51
+
52
+ # deep merge
53
+ def merge(hash1, hash2)
54
+ return hash1 unless hash2
55
+
56
+ hash1.merge(hash2) do |_key, old_val, new_val|
57
+ if old_val.is_a?(::Hash) && new_val.is_a?(::Hash)
58
+ merge(old_val, new_val)
59
+ else
60
+ new_val
61
+ end
62
+ end
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "json"
4
+
5
+ module Cosmo
6
+ module Utils
7
+ module Json
8
+ module_function
9
+
10
+ def parse(value, default: nil, symbolize_names: true, **options)
11
+ JSON.parse(value, options.merge(symbolize_names:))
12
+ rescue TypeError, JSON::ParserError
13
+ default
14
+ end
15
+
16
+ def dump(value, default: nil)
17
+ ::JSON.generate(value)
18
+ rescue TypeError, JSON::NestingError
19
+ default
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cosmo
4
+ module Utils
5
+ class Signal
6
+ def self.trap(...)
7
+ new(...)
8
+ end
9
+
10
+ def initialize(*signals)
11
+ @queue = Queue.new
12
+ signals.each { |s| ::Signal.trap(s) { push(s) } }
13
+ end
14
+
15
+ def wait
16
+ @queue.pop # Wait indefinitely for a signal
17
+ end
18
+
19
+ def push(signal)
20
+ @queue.push(signal)
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cosmo
4
+ module Utils
5
+ class Stopwatch
6
+ def initialize
7
+ reset
8
+ end
9
+
10
+ # @return [Float] A number of elapsed milliseconds
11
+ def elapsed_millis
12
+ (clock_time - @started_at).round(2)
13
+ end
14
+
15
+ # @return [Float] A number of elapsed seconds
16
+ def elapsed_seconds
17
+ (elapsed_millis / 1_000).round(2)
18
+ end
19
+
20
+ def reset
21
+ @started_at = clock_time
22
+ end
23
+
24
+ private
25
+
26
+ # @return [Float]
27
+ def clock_time
28
+ Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond)
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cosmo
4
+ module Utils
5
+ module String
6
+ module_function
7
+
8
+ def underscore(value)
9
+ value
10
+ .to_s
11
+ .gsub("::", "-")
12
+ .gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
13
+ .gsub(/([a-z\d])([A-Z])/, '\1_\2')
14
+ .downcase
15
+ end
16
+
17
+ def safe_constantize(value)
18
+ Object.const_get(value)
19
+ rescue NameError
20
+ # nop
21
+ end
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "forwardable"
4
+
5
+ module Cosmo
6
+ module Utils
7
+ # A thread pool that reuses a fixed number of threads operating off a fixed size queue.
8
+ # At any point, at most `num_threads` will be active processing tasks. When all threads are busy new
9
+ # tasks posted to the thread pool are blocked until a thread becomes available.
10
+ # Should a thread crash for any reason the thread will immediately be removed
11
+ # from the pool and replaced.
12
+ class ThreadPool
13
+ extend Forwardable
14
+
15
+ delegate %i[shutdown wait_for_termination] => :@pool
16
+
17
+ def initialize(concurrency)
18
+ @mutex = Thread::Mutex.new
19
+ @available = concurrency
20
+ @cond = ConditionVariable.new
21
+ @pool = Concurrent::FixedThreadPool.new(concurrency)
22
+ end
23
+
24
+ def post
25
+ @mutex.synchronize do
26
+ @cond.wait(@mutex) while @available <= 0
27
+ @available -= 1
28
+ end
29
+
30
+ @pool.post do
31
+ yield
32
+ ensure
33
+ @mutex.synchronize do
34
+ @available += 1
35
+ @cond.signal
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cosmo
4
+ VERSION = "0.1.0"
5
+ end