sourced 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,131 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sourced
4
+ # Projectors react to events
5
+ # and update views of current state somewhere (a DB, files, etc)
6
+ class Projector
7
+ include Evolve
8
+ include Sync
9
+ extend Consumer
10
+
11
+ class << self
12
+ def handled_events = handled_events_for_evolve
13
+ end
14
+
15
+ attr_reader :id, :seq, :state
16
+
17
+ def initialize(id, backend: Sourced.config.backend, logger: Sourced.config.logger)
18
+ @id = id
19
+ @seq = 0
20
+ @backend = backend
21
+ @logger = logger
22
+ @state = init_state(id)
23
+ end
24
+
25
+ def inspect
26
+ %(<#{self.class} id:#{id} seq:#{seq}>)
27
+ end
28
+
29
+ def handle_events(events)
30
+ evolve(state, events)
31
+ save
32
+ [] # no commands
33
+ end
34
+
35
+ private
36
+
37
+ attr_reader :backend, :logger
38
+
39
+ def init_state(_id)
40
+ nil
41
+ end
42
+
43
+ def save
44
+ backend.transaction do
45
+ run_sync_blocks(state, nil, [])
46
+ end
47
+ end
48
+
49
+ # A StateStored projector fetches initial state from
50
+ # storage somewhere (DB, files, API)
51
+ # And then after reacting to events and updating state,
52
+ # it can save it back to the same or different storage.
53
+ # @example
54
+ #
55
+ # class CartListings < Sourced::Projector::StateStored
56
+ # # Fetch listing record from DB, or new one.
57
+ # def init_state(id)
58
+ # CartListing.find_or_initialize(id)
59
+ # end
60
+ #
61
+ # # Evolve listing record from events
62
+ # evolve Carts::ItemAdded do |listing, event|
63
+ # listing.total += event.payload.price
64
+ # end
65
+ #
66
+ # # Sync listing record back to DB
67
+ # sync do |listing, _, _|
68
+ # listing.save!
69
+ # end
70
+ # end
71
+ class StateStored < self
72
+ class << self
73
+ def handle_events(events)
74
+ instance = new(events.first.stream_id)
75
+ instance.handle_events(events)
76
+ end
77
+ end
78
+ end
79
+
80
+ # An EventSourced projector fetches initial state from
81
+ # past events in the event store.
82
+ # And then after reacting to events and updating state,
83
+ # it can save it to a DB table, a file, etc.
84
+ # @example
85
+ #
86
+ # class CartListings < Sourced::Projector::EventSourced
87
+ # # Initial in-memory state
88
+ # def init_state(id)
89
+ # { id:, total: 0 }
90
+ # end
91
+ #
92
+ # # Evolve listing record from events
93
+ # evolve Carts::ItemAdded do |listing, event|
94
+ # listing[:total] += event.payload.price
95
+ # end
96
+ #
97
+ # # Sync listing record to a file
98
+ # sync do |listing, _, _|
99
+ # File.write("/listings/#{listing[:id]}.json", JSON.dump(listing))
100
+ # end
101
+ # end
102
+ class EventSourced < self
103
+ class << self
104
+ def handle_events(events)
105
+ # The current state already includes
106
+ # the new events, so we need to load upto events.first.seq
107
+ instance = load(events.first.stream_id, upto: events.first.seq - 1)
108
+ instance.handle_events(events)
109
+ end
110
+
111
+ # Load from event history
112
+ #
113
+ # @param stream_id [String] the stream id
114
+ # @return [Sourced::Projector::EventSourced]
115
+ def load(stream_id, upto: nil)
116
+ new(stream_id).load(upto:)
117
+ end
118
+ end
119
+
120
+ # TODO: this is also in Decider. DRY up?
121
+ def load(after: nil, upto: nil)
122
+ events = backend.read_event_stream(id, after:, upto:)
123
+ if events.any?
124
+ @seq = events.last.seq
125
+ evolve(state, events)
126
+ end
127
+ self
128
+ end
129
+ end
130
+ end
131
+ end
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rails/generators'
4
+ require 'rails/generators/active_record'
5
+
6
+ module Sourced
7
+ module Rails
8
+ class InstallGenerator < ::Rails::Generators::Base
9
+ include ActiveRecord::Generators::Migration
10
+
11
+ source_root File.expand_path('templates', __dir__)
12
+
13
+ class_option :prefix, type: :string, default: 'sourced'
14
+
15
+ def copy_initializer_file
16
+ create_file 'config/initializers/sourced.rb' do
17
+ <<~CONTENT
18
+ # frozen_string_literal: true
19
+
20
+ require 'sourced'
21
+ require 'sourced/backends/active_record_backend'
22
+
23
+ # This table prefix is used to generate the initial database migrations.
24
+ # If you change the table prefix here,
25
+ # make sure to migrate your database to the new table names.
26
+ Sourced::Backends::ActiveRecordBackend.table_prefix = '#{table_prefix}'
27
+
28
+ # Configure Sors to use the ActiveRecord backend
29
+ Sourced.configure do |config|
30
+ config.backend = Sourced::Backends::ActiveRecordBackend.new
31
+ config.logger = Rails.logger
32
+ end
33
+ CONTENT
34
+ end
35
+ end
36
+
37
+ def copy_bin_file
38
+ copy_file 'bin_sourced', 'bin/sourced'
39
+ chmod 'bin/sourced', 0o755
40
+ end
41
+
42
+ def create_migration_file
43
+ migration_template 'create_sourced_tables.rb.erb', File.join(db_migrate_path, 'create_sourced_tables.rb')
44
+ end
45
+
46
+ private
47
+
48
+ def migration_version
49
+ "[#{ActiveRecord::VERSION::STRING.to_f}]"
50
+ end
51
+
52
+ def table_prefix
53
+ options['prefix']
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sourced
4
+ module Rails
5
+ class Railtie < ::Rails::Railtie
6
+ # TODO: review this.
7
+ # Workers use Async, so this is needed
8
+ # but not sure this can be safely used with non Async servers like Puma.
9
+ # config.active_support.isolation_level = :fiber
10
+
11
+ generators do
12
+ require 'sourced/rails/install_generator'
13
+ end
14
+ end
15
+ end
16
+ end
@@ -0,0 +1,8 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require_relative "../config/environment"
4
+ require "sourced"
5
+
6
+ ActiveRecord::Base.logger = nil
7
+
8
+ Sourced::Supervisor.start
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ class CreateSorsTables < ActiveRecord::Migration<%= migration_version %>
4
+ def change
5
+ # Uncomment for Postgres v12 or earlier to enable gen_random_uuid() support
6
+ # enable_extension 'pgcrypto'
7
+
8
+ if connection.class.name == 'ActiveRecord::ConnectionAdapters::SQLite3Adapter'
9
+ create_table :<%= table_prefix %>_events, id: false do |t|
10
+ t.string :id, null: false, index: { unique: true }
11
+ t.bigint :global_seq, primary_key: true
12
+ t.bigint :seq
13
+ t.string :stream_id, null: false, index: true
14
+ t.string :type, null: false
15
+ t.datetime :created_at
16
+ t.string :producer
17
+ t.string :causation_id, index: true
18
+ t.string :correlation_id
19
+ t.text :payload
20
+ end
21
+ else
22
+ create_table :<%= table_prefix %>_events, id: :uuid do |t|
23
+ t.bigserial :global_seq, index: true
24
+ t.bigint :seq
25
+ t.string :stream_id, null: false, index: true
26
+ t.string :type, null: false
27
+ t.datetime :created_at
28
+ t.string :producer
29
+ t.uuid :causation_id, index: true
30
+ t.uuid :correlation_id
31
+ t.jsonb :payload
32
+ end
33
+ end
34
+
35
+ add_index :<%= table_prefix %>_events, %i[stream_id seq], unique: true
36
+
37
+ create_table :<%= table_prefix %>_streams do |t|
38
+ t.text :stream_id, null: false, index: { unique: true }
39
+ t.boolean :locked, default: false, null: false
40
+ end
41
+
42
+ create_table :<%= table_prefix %>_commands do |t|
43
+ t.string :stream_id, null: false
44
+ if t.class.name == 'ActiveRecord::ConnectionAdapters::SQLite3::TableDefinition'
45
+ t.text :data, null: false
46
+ t.datetime :scheduled_at, null: false, default: -> { 'CURRENT_TIMESTAMP' }
47
+ else
48
+ t.jsonb :data, null: false
49
+ t.datetime :scheduled_at, null: false, default: -> { 'NOW()' }
50
+ end
51
+ end
52
+
53
+ add_foreign_key :<%= table_prefix %>_commands, :<%= table_prefix %>_streams, column: :stream_id, primary_key: :stream_id
54
+ end
55
+ end
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sourced
4
+ module React
5
+ PREFIX = 'reaction'
6
+
7
+ def self.included(base)
8
+ super
9
+ base.extend ClassMethods
10
+ end
11
+
12
+ def react(events)
13
+ events.flat_map { |event| __handle_reaction(event) }
14
+ end
15
+
16
+ private
17
+
18
+ def __handle_reaction(event)
19
+ method_name = Sourced.message_method_name(React::PREFIX, event.class.to_s)
20
+ return [] unless respond_to?(method_name)
21
+
22
+ cmds = send(method_name, event)
23
+ [cmds].flatten.compact.map do |cmd|
24
+ cmd.with_metadata(producer: self.class.consumer_info.group_id)
25
+ end
26
+ end
27
+
28
+ module ClassMethods
29
+ def inherited(subclass)
30
+ super
31
+ handled_events_for_react.each do |evt_type|
32
+ subclass.handled_events_for_react << evt_type
33
+ end
34
+ end
35
+
36
+ # Override this with extend Sourced::Consumer
37
+ def consumer_info
38
+ Sourced::Consumer::ConsumerInfo.new(group_id: name)
39
+ end
40
+
41
+ # These two are the Reactor interface
42
+ # expected by Worker
43
+ def handle_events(_events)
44
+ raise NoMethodError, "implement .handle_events(Array<Event>) in #{self}"
45
+ end
46
+
47
+ def handled_events_for_react
48
+ @handled_events_for_react ||= []
49
+ end
50
+
51
+ def react(event_type, &block)
52
+ handled_events_for_react << event_type unless event_type.is_a?(Symbol)
53
+ define_method(Sourced.message_method_name(React::PREFIX, event_type.to_s), &block) if block_given?
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,148 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'singleton'
4
+
5
+ module Sourced
6
+ class Router
7
+ include Singleton
8
+
9
+ PID = Process.pid
10
+
11
+ class << self
12
+ public :new
13
+
14
+ def register(...)
15
+ instance.register(...)
16
+ end
17
+
18
+ def handle_command(command)
19
+ instance.handle_command(command)
20
+ end
21
+
22
+ def dispatch_next_command
23
+ instance.dispatch_next_command
24
+ end
25
+
26
+ def handle_events(events)
27
+ instance.handle_events(events)
28
+ end
29
+
30
+ def async_reactors
31
+ instance.async_reactors
32
+ end
33
+
34
+ def handle_and_ack_events_for_reactor(reactor, events)
35
+ instance.handle_and_ack_events_for_reactor(reactor, events)
36
+ end
37
+
38
+ def handle_next_event_for_reactor(reactor, process_name = nil)
39
+ instance.handle_next_event_for_reactor(reactor, process_name)
40
+ end
41
+ end
42
+
43
+ attr_reader :sync_reactors, :async_reactors, :backend, :logger
44
+
45
+ def initialize(backend: Sourced.config.backend, logger: Sourced.config.logger)
46
+ @backend = backend
47
+ @logger = logger
48
+ @decider_lookup = {}
49
+ @sync_reactors = Set.new
50
+ @async_reactors = Set.new
51
+ end
52
+
53
+ def register(thing)
54
+ if DeciderInterface === thing
55
+ thing.handled_commands.each do |cmd_type|
56
+ @decider_lookup[cmd_type] = thing
57
+ end
58
+ end
59
+
60
+ return unless ReactorInterface === thing
61
+
62
+ if thing.consumer_info.async
63
+ @async_reactors << thing
64
+ else
65
+ @sync_reactors << thing
66
+ end
67
+ end
68
+
69
+ def handle_command(command)
70
+ decider = @decider_lookup.fetch(command.class)
71
+ decider.handle_command(command)
72
+ end
73
+
74
+ def handle_events(events)
75
+ event_classes = events.map(&:class)
76
+ reactors = sync_reactors.filter do |r|
77
+ r.handled_events.intersect?(event_classes)
78
+ end
79
+ # TODO
80
+ # Reactors can return commands to run next
81
+ # I need to think about how to best to handle this safely
82
+ # Also this could potential lead to infinite recursion!
83
+ reactors.each do |r|
84
+ handle_and_ack_events_for_reactor(r, events)
85
+ end
86
+ end
87
+
88
+ def handle_next_event_for_reactor(reactor, process_name = nil)
89
+ backend.reserve_next_for_reactor(reactor) do |event|
90
+ # We're dealing with one event at a time now
91
+ # So reactors should return a single command, or nothing
92
+ log_event('handling event', reactor, event, process_name)
93
+ commands = reactor.handle_events([event])
94
+ if commands.any?
95
+ # This will run a new decider
96
+ # which may be expensive, timeout, or raise an exception
97
+ # TODO: handle decider errors
98
+ backend.schedule_commands(commands)
99
+ end
100
+
101
+ event
102
+ end
103
+ end
104
+
105
+ # When in sync mode, we want both events
106
+ # and any resulting commands to be processed syncronously
107
+ # and in the same transaction as events are appended to store.
108
+ # We could handle commands in threads or fibers,
109
+ # if they belong to different streams than the events,
110
+ # but we need to make sure to raise exceptions in the main thread.
111
+ # so that the transaction is rolled back.
112
+ def handle_and_ack_events_for_reactor(reactor, events)
113
+ backend.ack_on(reactor.consumer_info.group_id, events.last.id) do
114
+ commands = reactor.handle_events(events)
115
+ if commands && commands.any?
116
+ # TODO: Commands may or may not belong to he same stream as events
117
+ # if they belong to the same stream,
118
+ # hey need to be dispached in order to preserve per stream order
119
+ # If they belong to different streams, they can be dispatched in parallel
120
+ # or put in a command bus.
121
+ # TODO2: we also need to handle exceptions here
122
+ # TODO3: this is not tested
123
+ commands.each do |cmd|
124
+ log_event(' -> produced command', reactor, cmd)
125
+ handle_command(cmd)
126
+ end
127
+ end
128
+ end
129
+ end
130
+
131
+ def dispatch_next_command
132
+ backend.next_command do |cmd|
133
+ #  TODO: error handling
134
+ handle_command(cmd)
135
+ end
136
+ end
137
+
138
+ private
139
+
140
+ def log_event(label, reactor, event, process_name = PID)
141
+ logger.info "[#{process_name}]: #{reactor.consumer_info.group_id} #{label} #{event_info(event)}"
142
+ end
143
+
144
+ def event_info(event)
145
+ %([#{event.type}] stream_id:#{event.stream_id} seq:#{event.seq})
146
+ end
147
+ end
148
+ end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'async'
4
+ require 'console'
5
+ require 'sourced/worker'
6
+
7
+ module Sourced
8
+ class Supervisor
9
+ def self.start(...)
10
+ new(...).start
11
+ end
12
+
13
+ def initialize(logger: Sourced.config.logger, count: 2)
14
+ @logger = logger
15
+ @count = count
16
+ @workers = []
17
+ end
18
+
19
+ def start
20
+ logger.info("Starting sync supervisor with #{@count} workers")
21
+ set_signal_handlers
22
+ @workers = @count.times.map do |i|
23
+ Worker.new(logger:, name: "worker-#{i}")
24
+ end
25
+ Sync do |task|
26
+ @workers.each do |wrk|
27
+ task.async do
28
+ wrk.poll
29
+ end
30
+ end
31
+ end
32
+ end
33
+
34
+ def stop
35
+ logger.info("Stopping #{@workers.size} workers")
36
+ @workers.each(&:stop)
37
+ logger.info('All workers stopped')
38
+ end
39
+
40
+ def set_signal_handlers
41
+ Signal.trap('INT') { stop }
42
+ Signal.trap('TERM') { stop }
43
+ end
44
+
45
+ private
46
+
47
+ attr_reader :logger
48
+ end
49
+ end
@@ -0,0 +1,80 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sourced
4
+ module Sync
5
+ def self.included(base)
6
+ super
7
+ base.extend ClassMethods
8
+ end
9
+
10
+ def run_sync_blocks(state, command, events)
11
+ self.class.sync_blocks.each do |blk|
12
+ case blk
13
+ when Proc
14
+ if blk.arity == 2 # (command, events)
15
+ instance_exec(command, events, &blk)
16
+ else # (state, command, events)
17
+ instance_exec(state, command, events, &blk)
18
+ end
19
+ else
20
+ blk.call(state, command, events)
21
+ end
22
+ end
23
+ end
24
+
25
+ CallableInterface = Sourced::Types::Interface[:call]
26
+
27
+ class SyncReactor < SimpleDelegator
28
+ def handle_events(events)
29
+ Router.handle_and_ack_events_for_reactor(__getobj__, events)
30
+ end
31
+
32
+ def call(_state, _command, events)
33
+ handle_events(events)
34
+ end
35
+ end
36
+
37
+ module ClassMethods
38
+ def inherited(subclass)
39
+ super
40
+ sync_blocks.each do |blk|
41
+ subclass.sync_blocks << blk
42
+ end
43
+ end
44
+
45
+ def sync_blocks
46
+ @sync_blocks ||= []
47
+ end
48
+
49
+ def sync(callable = nil, &block)
50
+ callable ||= block
51
+ callable = case callable
52
+ when Proc
53
+ unless (2..3).include?(callable.arity)
54
+ raise ArgumentError,
55
+ 'sync block must accept 2 or 3 arguments'
56
+ end
57
+
58
+ callable
59
+ when ReactorInterface
60
+ # Wrap reactors here
61
+ # TODO:
62
+ # If the sync reactor runs successfully
63
+ # A). we want to ACK processed events for it in the offsets table
64
+ # so that if the reactor is moved to async execution
65
+ # it doesn't reprocess the same events again
66
+ # B). The reactors .handle_events may return commands
67
+ # Do we want to dispatch those commands inline?
68
+ # Or is this another reason to have a separate async command bus
69
+ SyncReactor.new(callable)
70
+ when CallableInterface
71
+ callable
72
+ else
73
+ raise ArgumentError, 'sync block must be a Proc, Sourced::ReactorInterface or #call interface'
74
+ end
75
+
76
+ sync_blocks << callable
77
+ end
78
+ end
79
+ end
80
+ end
@@ -0,0 +1,24 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'plumb'
4
+ require 'time'
5
+ require 'securerandom'
6
+
7
+ module Sourced
8
+ module Types
9
+ include Plumb::Types
10
+
11
+ # A UUID string, or generate a new one
12
+ AutoUUID = UUID::V4.default { SecureRandom.uuid }
13
+
14
+ # Deeply symbolize keys of a hash
15
+ # Usage:
16
+ # SymbolizedHash.parse({ 'a' => { 'b' => 'c' } }) # => { a: { b: 'c' } }
17
+ SymbolizedHash = Hash[
18
+ # String keys are converted to symbols
19
+ (Symbol | String.transform(::Symbol, &:to_sym)),
20
+ # Hash values are recursively symbolized
21
+ Any.defer { SymbolizedHash } | Any
22
+ ]
23
+ end
24
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sourced
4
+ VERSION = '0.0.1'
5
+ end