sequent 0.1.10 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/db/sequent_schema.rb +0 -3
  3. data/lib/sequent/configuration.rb +10 -0
  4. data/lib/sequent/core/aggregate_repository.rb +41 -8
  5. data/lib/sequent/core/aggregate_root.rb +0 -24
  6. data/lib/sequent/core/aggregate_snapshotter.rb +2 -2
  7. data/lib/sequent/core/base_command_handler.rb +2 -6
  8. data/lib/sequent/core/base_event_handler.rb +1 -1
  9. data/lib/sequent/core/command.rb +23 -15
  10. data/lib/sequent/core/command_service.rb +1 -1
  11. data/lib/sequent/core/core.rb +1 -1
  12. data/lib/sequent/core/event.rb +0 -21
  13. data/lib/sequent/core/event_record.rb +11 -1
  14. data/lib/sequent/core/event_store.rb +122 -18
  15. data/lib/sequent/core/ext/ext.rb +20 -0
  16. data/lib/sequent/core/helpers/array_with_type.rb +4 -0
  17. data/lib/sequent/core/helpers/association_validator.rb +22 -7
  18. data/lib/sequent/core/helpers/attribute_support.rb +15 -6
  19. data/lib/sequent/core/helpers/param_support.rb +28 -29
  20. data/lib/sequent/core/helpers/self_applier.rb +4 -3
  21. data/lib/sequent/core/helpers/string_to_value_parsers.rb +10 -3
  22. data/lib/sequent/core/helpers/type_conversion_support.rb +5 -0
  23. data/lib/sequent/core/helpers/uuid_helper.rb +2 -2
  24. data/lib/sequent/core/helpers/value_validators.rb +2 -2
  25. data/lib/sequent/core/random_uuid_generator.rb +9 -0
  26. data/lib/sequent/core/record_sessions/active_record_session.rb +11 -5
  27. data/lib/sequent/core/record_sessions/replay_events_session.rb +138 -108
  28. data/lib/sequent/core/sequent_oj.rb +4 -3
  29. data/lib/sequent/core/stream_record.rb +1 -1
  30. data/lib/sequent/core/transactions/active_record_transaction_provider.rb +1 -1
  31. data/lib/sequent/migrations/migrate_events.rb +22 -15
  32. data/lib/sequent/rake/tasks.rb +102 -0
  33. data/lib/sequent/sequent.rb +4 -0
  34. data/lib/sequent/support.rb +3 -0
  35. data/lib/sequent/support/database.rb +55 -0
  36. data/lib/sequent/support/view_projection.rb +58 -0
  37. data/lib/sequent/support/view_schema.rb +22 -0
  38. data/lib/sequent/test/command_handler_helpers.rb +21 -8
  39. data/lib/sequent/test/event_handler_helpers.rb +7 -3
  40. data/lib/version.rb +1 -1
  41. metadata +54 -9
  42. data/lib/sequent/core/tenant_event_store.rb +0 -24
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 17b7ed3c1ef96f46ab49ba1f980e06d596b50a30
4
- data.tar.gz: cc6ab65e25cfbe2822a88aaf4308683d4ba18eab
3
+ metadata.gz: f1c376e79b2afb9c653058f4cb38e7d24bb7d742
4
+ data.tar.gz: 751c997c10407d7dd073b70b268ed69d16d1b459
5
5
  SHA512:
6
- metadata.gz: 0cc5a1eabb3991255cbbf40fc4bdb50a25bb57dcec04f7a81a624307beb719efe225acfdf541291524b48e699ea91f3dc0886e1ce4965de1b2bfb8155466ea90
7
- data.tar.gz: 22461d8b930f1fea40c72032bd9115af5e21ca19ef63cd460a7ebe80ddd8cf5e7a61518b3971923937fa14ca0d024d2bb42747c73ad8202881235a8a8fe18766
6
+ metadata.gz: 042064fb131306a5197e035330dceb3e467c2031f73932777286ec72219545ef7fcc5bbd3b0322177801d994e87adf66735be07e4b03fbf2e725d209b11e8b09
7
+ data.tar.gz: 777558493270ceeb8e80038e6b2336bfebbdbb103e74c341f676e53599971e8723862b3b6aa424d008f294ff78108a580652f31a90600fa90a275387696ba5f2
@@ -2,7 +2,6 @@ ActiveRecord::Schema.define do
2
2
 
3
3
  create_table "event_records", :force => true do |t|
4
4
  t.string "aggregate_id", :null => false
5
- t.string "organization_id"
6
5
  t.integer "sequence_number", :null => false
7
6
  t.datetime "created_at", :null => false
8
7
  t.string "event_type", :null => false
@@ -12,7 +11,6 @@ ActiveRecord::Schema.define do
12
11
  end
13
12
 
14
13
  create_table "command_records", :force => true do |t|
15
- t.string "organization_id"
16
14
  t.string "user_id"
17
15
  t.string "aggregate_id"
18
16
  t.string "command_type", :null => false
@@ -31,7 +29,6 @@ CREATE UNIQUE INDEX unique_event_per_aggregate ON event_records (
31
29
  CREATE INDEX snapshot_events ON event_records (aggregate_id, sequence_number DESC) WHERE event_type = 'Sequent::Core::SnapshotEvent'
32
30
  }
33
31
  add_index "event_records", ["command_record_id"], :name => "index_event_records_on_command_record_id"
34
- add_index "event_records", ["organization_id"], :name => "index_event_records_on_organization_id"
35
32
  add_index "event_records", ["event_type"], :name => "index_event_records_on_event_type"
36
33
  add_index "event_records", ["created_at"], :name => "index_event_records_on_created_at"
37
34
 
@@ -19,6 +19,10 @@ module Sequent
19
19
 
20
20
  attr_accessor :event_handlers
21
21
 
22
+ attr_accessor :uuid_generator
23
+
24
+ attr_accessor :disable_event_handlers
25
+
22
26
  def self.instance
23
27
  @instance ||= new
24
28
  end
@@ -27,6 +31,10 @@ module Sequent
27
31
  @instance = new
28
32
  end
29
33
 
34
+ def self.restore(configuration)
35
+ @instance = configuration
36
+ end
37
+
30
38
  def initialize
31
39
  self.command_handlers = []
32
40
  self.command_filters = []
@@ -38,6 +46,8 @@ module Sequent
38
46
  self.stream_record_class = Sequent::Core::StreamRecord
39
47
  self.snapshot_event_class = Sequent::Core::SnapshotEvent
40
48
  self.transaction_provider = Sequent::Core::Transactions::NoTransactions.new
49
+ self.uuid_generator = Sequent::Core::RandomUuidGenerator
50
+ self.disable_event_handlers = false
41
51
  end
42
52
 
43
53
  def event_store=(event_store)
@@ -18,13 +18,13 @@ module Sequent
18
18
 
19
19
  attr_reader :event_store
20
20
 
21
- class NonUniqueAggregateId < Exception
21
+ class NonUniqueAggregateId < StandardError
22
22
  def initialize(existing, new)
23
23
  super "Duplicate aggregate #{new} with same key as existing #{existing}"
24
24
  end
25
25
  end
26
26
 
27
- class AggregateNotFound < Exception
27
+ class AggregateNotFound < StandardError
28
28
  def initialize(id)
29
29
  super "Aggregate with id #{id} not found"
30
30
  end
@@ -56,16 +56,49 @@ module Sequent
56
56
  # Loads aggregate by given id and class
57
57
  # Returns the one in the current Unit Of Work otherwise loads it from history.
58
58
  def load_aggregate(aggregate_id, clazz = nil)
59
- result = aggregates.fetch(aggregate_id) do |_|
60
- stream, events = @event_store.load_events(aggregate_id)
61
- raise AggregateNotFound.new(aggregate_id) unless stream
59
+ load_aggregates([aggregate_id], clazz)[0]
60
+ end
61
+
62
+ ##
63
+ # Loads multiple aggregates at once.
64
+ # Returns the ones in the current Unit Of Work otherwise loads it from history.
65
+ #
66
+ # Note: This will load all the aggregates in memory, so querying 100s of aggregates
67
+ # with 100s of events could cause memory issues.
68
+ #
69
+ # Returns all aggregates or raises +AggregateNotFound+
70
+ # If +clazz+ is given and one of the aggregates is not of the correct type
71
+ # a +TypeError+ is raised.
72
+ #
73
+ # +aggregate_ids+ The ids of the aggregates to be loaded
74
+ # +clazz+ Optional argument that checks if all aggregates are of type +clazz+
75
+ def load_aggregates(aggregate_ids, clazz = nil)
76
+ fail ArgumentError.new('aggregate_ids is required') unless aggregate_ids
77
+ return [] if aggregate_ids.empty?
78
+
79
+ _aggregate_ids = aggregate_ids.uniq
80
+ _aggregates = aggregates.values_at(*_aggregate_ids).compact
81
+ _query_ids = _aggregate_ids - _aggregates.map(&:id)
82
+
83
+ _aggregates += @event_store.load_events_for_aggregates(_query_ids).map do |stream, events|
62
84
  aggregate_class = Class.const_get(stream.aggregate_type)
63
- aggregates[aggregate_id] = aggregate_class.load_from_history(stream, events)
85
+ aggregate_class.load_from_history(stream, events)
86
+ end
87
+
88
+ if _aggregates.count != _aggregate_ids.count
89
+ missing_aggregate_ids = _aggregate_ids - _aggregates.map(&:id)
90
+ raise AggregateNotFound.new(missing_aggregate_ids)
64
91
  end
65
92
 
66
- raise TypeError, "#{result.class} is not a #{clazz}" if result && clazz && !(result.class <= clazz)
93
+ if clazz
94
+ _aggregates.each do |aggregate|
95
+ raise TypeError, "#{aggregate.class} is not a #{clazz}" if !(aggregate.class <= clazz)
96
+ end
97
+ end
67
98
 
68
- result
99
+ _aggregates.map do |aggregate|
100
+ aggregates[aggregate.id] = aggregate
101
+ end
69
102
  end
70
103
 
71
104
  ##
@@ -103,29 +103,5 @@ module Sequent
103
103
  @uncommitted_events << event
104
104
  end
105
105
  end
106
-
107
- # You can use this class when running in a multi tenant environment
108
- # It basically makes sure that the +organization_id+ (the tenant_id for historic reasons)
109
- # is available for the subclasses
110
- class TenantAggregateRoot < AggregateRoot
111
- attr_reader :organization_id
112
-
113
- def initialize(id, organization_id)
114
- super(id)
115
- @organization_id = organization_id
116
- end
117
-
118
- def load_from_history(stream, events)
119
- raise "Empty history" if events.empty?
120
- @organization_id = events.first.organization_id
121
- super
122
- end
123
-
124
- protected
125
-
126
- def build_event(event, params = {})
127
- super(event, params.merge({organization_id: @organization_id}))
128
- end
129
- end
130
106
  end
131
107
  end
@@ -6,7 +6,7 @@ module Sequent
6
6
 
7
7
  class AggregateSnapshotter < BaseCommandHandler
8
8
 
9
- def handles_message?(message)
9
+ def self.handles_message?(message)
10
10
  message.is_a? SnapshotCommand
11
11
  end
12
12
 
@@ -27,7 +27,7 @@ module Sequent
27
27
  Sequent.logger.info "Taking snapshot for aggregate #{aggregate}"
28
28
  aggregate.take_snapshot!
29
29
  rescue => e
30
- Sequent.logger.warn "Failed to take snapshot for aggregate #{aggregate_id}: #{e}", e.inspect
30
+ Sequent.logger.error("Failed to take snapshot for aggregate #{aggregate_id}: #{e}, #{e.inspect}")
31
31
  end
32
32
  end
33
33
  end
@@ -26,16 +26,12 @@ module Sequent
26
26
  @repository = repository
27
27
  end
28
28
 
29
- def handles_message?(command)
30
- self.class.message_mapping.keys.include? command.class
31
- end
32
-
33
29
  protected
34
- def do_with_aggregate(command, clazz, aggregate_id = nil)
30
+
31
+ def do_with_aggregate(command, clazz = nil, aggregate_id = nil)
35
32
  aggregate = @repository.load_aggregate(aggregate_id.nil? ? command.aggregate_id : aggregate_id, clazz)
36
33
  yield aggregate if block_given?
37
34
  end
38
-
39
35
  end
40
36
  end
41
37
  end
@@ -44,7 +44,7 @@ module Sequent
44
44
 
45
45
  def_delegators :@record_session, :update_record, :create_record, :create_or_update_record, :get_record!, :get_record,
46
46
  :delete_all_records, :update_all_records, :do_with_records, :do_with_record, :delete_record,
47
- :find_records, :last_record
47
+ :find_records, :last_record, :execute
48
48
 
49
49
  end
50
50
 
@@ -25,6 +25,9 @@ module Sequent
25
25
  @created_at = DateTime.now
26
26
  end
27
27
 
28
+ def self.inherited(subclass)
29
+ Commands << subclass
30
+ end
28
31
  end
29
32
 
30
33
  module UpdateSequenceNumber
@@ -36,6 +39,26 @@ module Sequent
36
39
  end
37
40
  end
38
41
 
42
+ class Commands
43
+ class << self
44
+ def commands
45
+ @commands ||= []
46
+ end
47
+
48
+ def all
49
+ commands
50
+ end
51
+
52
+ def <<(command)
53
+ commands << command
54
+ end
55
+
56
+ def find(command_name)
57
+ commands.find { |c| c.name == command_name }
58
+ end
59
+ end
60
+ end
61
+
39
62
  # Most commonly used command
40
63
  # Command can be instantiated just by using:
41
64
  #
@@ -53,25 +76,10 @@ module Sequent
53
76
  raise ArgumentError, "Missing aggregate_id" if args[:aggregate_id].nil?
54
77
  super
55
78
  end
56
-
57
79
  end
58
80
 
59
81
  class UpdateCommand < Command
60
82
  include UpdateSequenceNumber
61
83
  end
62
-
63
- class TenantCommand < Command
64
- attrs organization_id: String
65
-
66
- def initialize(args = {})
67
- raise ArgumentError, "Missing organization_id" if args[:organization_id].nil?
68
- super
69
- end
70
- end
71
-
72
- class UpdateTenantCommand < TenantCommand
73
- include UpdateSequenceNumber
74
- end
75
-
76
84
  end
77
85
  end
@@ -41,7 +41,7 @@ module Sequent
41
41
 
42
42
  raise CommandNotValid.new(command) unless command.valid?
43
43
  parsed_command = command.parse_attrs_to_correct_types
44
- command_handlers.select { |h| h.handles_message?(parsed_command) }.each { |h| h.handle_message parsed_command }
44
+ command_handlers.select { |h| h.class.handles_message?(parsed_command) }.each { |h| h.handle_message parsed_command }
45
45
  repository.commit(parsed_command)
46
46
  end
47
47
  end
@@ -11,8 +11,8 @@ require_relative 'command_service'
11
11
  require_relative 'value_object'
12
12
  require_relative 'base_event_handler'
13
13
  require_relative 'event_store'
14
- require_relative 'tenant_event_store'
15
14
  require_relative 'event_record'
16
15
  require_relative 'command_record'
17
16
  require_relative 'aggregate_snapshotter'
18
17
  require_relative 'workflow'
18
+ require_relative 'random_uuid_generator'
@@ -42,29 +42,8 @@ module Sequent
42
42
 
43
43
  end
44
44
 
45
- class TenantEvent < Event
46
-
47
- attrs organization_id: String
48
-
49
- def initialize(args = {})
50
- super
51
- raise "Missing organization_id" unless @organization_id
52
- end
53
-
54
- protected
55
- def payload_variables
56
- super << :"@organization_id"
57
- end
58
-
59
- end
60
-
61
- class CreateEvent < TenantEvent
62
-
63
- end
64
-
65
45
  class SnapshotEvent < Event
66
46
  attrs data: String
67
47
  end
68
-
69
48
  end
70
49
  end
@@ -16,7 +16,17 @@ module Sequent
16
16
  self.organization_id = event.organization_id if event.respond_to?(:organization_id)
17
17
  self.event_type = event.class.name
18
18
  self.created_at = event.created_at
19
- self.event_json = Sequent::Core::Oj.dump(event.attributes)
19
+ self.event_json = self.class.serialize_to_json(event)
20
+ end
21
+
22
+ module ClassMethods
23
+ def serialize_to_json(event)
24
+ Sequent::Core::Oj.dump(event)
25
+ end
26
+ end
27
+
28
+ def self.included(host_class)
29
+ host_class.extend(ClassMethods)
20
30
  end
21
31
  end
22
32
 
@@ -9,6 +9,35 @@ module Sequent
9
9
  include ActiveRecord::ConnectionAdapters::Quoting
10
10
  extend Forwardable
11
11
 
12
+ class PublishEventError < RuntimeError
13
+ attr_reader :event_handler_class, :event
14
+
15
+ def initialize(event_handler_class, event)
16
+ @event_handler_class = event_handler_class
17
+ @event = event
18
+ end
19
+
20
+ def message
21
+ "Event Handler: #{@event_handler_class.inspect}\nEvent: #{@event.inspect}\nCause: #{cause.inspect}"
22
+ end
23
+ end
24
+
25
+ class OptimisticLockingError < RuntimeError
26
+ end
27
+
28
+ class DeserializeEventError < RuntimeError
29
+ attr_reader :event_hash
30
+
31
+ def initialize(event_hash)
32
+ @event_hash = event_hash
33
+ end
34
+
35
+ def message
36
+ "Event hash: #{event_hash.inspect}\nCause: #{cause.inspect}"
37
+ end
38
+
39
+ end
40
+
12
41
  attr_accessor :configuration
13
42
  def_delegators :@configuration, :stream_record_class, :event_record_class, :snapshot_event_class, :event_handlers
14
43
 
@@ -26,28 +55,37 @@ module Sequent
26
55
  #
27
56
  def commit_events(command, streams_with_events)
28
57
  store_events(command, streams_with_events)
29
- publish_events(streams_with_events.flat_map {|_, events| events}, event_handlers)
58
+ publish_events(streams_with_events.flat_map { |_, events| events }, event_handlers)
30
59
  end
31
60
 
32
61
  ##
33
62
  # Returns all events for the aggregate ordered by sequence_number
34
63
  #
35
64
  def load_events(aggregate_id)
36
- stream = stream_record_class.where(aggregate_id: aggregate_id).first
37
- return nil unless stream
65
+ load_events_for_aggregates([aggregate_id])[0]
66
+ end
67
+
68
+ def load_events_for_aggregates(aggregate_ids)
69
+ return [] if aggregate_ids.none?
70
+
71
+ streams = stream_record_class.where(aggregate_id: aggregate_ids)
72
+
38
73
  events = event_record_class.connection.select_all(%Q{
39
74
  SELECT event_type, event_json
40
75
  FROM #{quote_table_name event_record_class.table_name}
41
- WHERE aggregate_id = #{quote aggregate_id}
42
- AND sequence_number >= COALESCE((SELECT MAX(sequence_number)
76
+ WHERE aggregate_id in (#{aggregate_ids.map{ |aggregate_id| quote(aggregate_id)}.join(",")})
77
+ AND sequence_number >= COALESCE((SELECT MAX(sequence_number)
43
78
  FROM #{quote_table_name event_record_class.table_name}
44
79
  WHERE event_type = #{quote snapshot_event_class.name}
45
- AND aggregate_id = #{quote aggregate_id}), 0)
46
- ORDER BY sequence_number ASC, (CASE event_type WHEN #{quote snapshot_event_class.name} THEN 0 ELSE 1 END) ASC
80
+ AND aggregate_id in (#{aggregate_ids.map{ |aggregate_id| quote(aggregate_id)}.join(",")})), 0)
81
+ ORDER BY sequence_number ASC, (CASE event_type WHEN #{quote snapshot_event_class.name} THEN 0 ELSE 1 END) ASC
47
82
  }).map! do |event_hash|
48
83
  deserialize_event(event_hash)
49
84
  end
50
- [stream.event_stream, events]
85
+
86
+ events
87
+ .group_by { |event| event.aggregate_id }
88
+ .map { |aggregate_id, _events| [streams.find { |stream_record| stream_record.aggregate_id == aggregate_id }.event_stream, _events] }
51
89
  end
52
90
 
53
91
  def stream_exists?(aggregate_id)
@@ -58,11 +96,47 @@ SELECT event_type, event_json
58
96
  # Replays all events in the event store to the registered event_handlers.
59
97
  #
60
98
  # @param block that returns the events.
99
+ # <b>DEPRECATED:</b> use <tt>replay_events_from_cursor</tt> instead.
61
100
  def replay_events
62
- events = yield.map {|event_hash| deserialize_event(event_hash)}
101
+ warn "[DEPRECATION] `replay_events` is deprecated in favor of `replay_events_from_cursor`"
102
+ events = yield.map { |event_hash| deserialize_event(event_hash) }
63
103
  publish_events(events, event_handlers)
64
104
  end
65
105
 
106
+ ##
107
+ # Replays all events on an `EventRecord` cursor from the given block.
108
+ #
109
+ # Prefer this replay method if your db adapter supports cursors.
110
+ #
111
+ # @param get_events lambda that returns the events cursor
112
+ # @param on_progress lambda that gets called on substantial progress
113
+ def replay_events_from_cursor(block_size: 2000,
114
+ get_events:,
115
+ on_progress: PRINT_PROGRESS)
116
+ progress = 0
117
+ cursor = get_events.call
118
+ ids_replayed = []
119
+ cursor.each_row(block_size: block_size).each do |record|
120
+ event = deserialize_event(record)
121
+ publish_events([event], event_handlers)
122
+ progress += 1
123
+ ids_replayed << record['id']
124
+ if progress % block_size == 0
125
+ on_progress[progress, false, ids_replayed]
126
+ ids_replayed.clear
127
+ end
128
+ end
129
+ on_progress[progress, true, ids_replayed]
130
+ end
131
+
132
+ PRINT_PROGRESS = lambda do |progress, done, _|
133
+ if done
134
+ puts "Done replaying #{progress} events"
135
+ else
136
+ puts "Replayed #{progress} events"
137
+ end
138
+ end
139
+
66
140
  ##
67
141
  # Returns the ids of aggregates that need a new snapshot.
68
142
  #
@@ -72,7 +146,7 @@ SELECT event_type, event_json
72
146
  query = %Q{
73
147
  SELECT aggregate_id
74
148
  FROM #{stream_table} stream
75
- WHERE aggregate_id > COALESCE(#{quote last_aggregate_id}, '')
149
+ WHERE aggregate_id::varchar > COALESCE(#{quote last_aggregate_id}, '')
76
150
  AND snapshot_threshold IS NOT NULL
77
151
  AND snapshot_threshold <= (
78
152
  (SELECT MAX(events.sequence_number) FROM #{event_table} events WHERE events.event_type <> #{quote snapshot_event_class.name} AND stream.aggregate_id = events.aggregate_id) -
@@ -81,7 +155,7 @@ SELECT aggregate_id
81
155
  LIMIT #{quote limit}
82
156
  FOR UPDATE
83
157
  }
84
- event_record_class.connection.select_all(query).map {|x| x['aggregate_id']}
158
+ event_record_class.connection.select_all(query).map { |x| x['aggregate_id'] }
85
159
  end
86
160
 
87
161
  def find_event_stream(aggregate_id)
@@ -95,10 +169,16 @@ SELECT aggregate_id
95
169
 
96
170
  private
97
171
 
172
+ def column_names
173
+ @column_names ||= event_record_class.column_names.reject { |c| c == 'id' }
174
+ end
175
+
98
176
  def deserialize_event(event_hash)
99
177
  event_type = event_hash.fetch("event_type")
100
178
  event_json = Sequent::Core::Oj.strict_load(event_hash.fetch("event_json"))
101
179
  resolve_event_type(event_type).deserialize_from_json(event_json)
180
+ rescue
181
+ raise DeserializeEventError.new(event_hash)
102
182
  end
103
183
 
104
184
  def resolve_event_type(event_type)
@@ -106,28 +186,52 @@ SELECT aggregate_id
106
186
  end
107
187
 
108
188
  def publish_events(events, event_handlers)
109
- events.each do |event|
110
- event_handlers.each do |handler|
111
- handler.handle_message event
189
+ return if configuration.disable_event_handlers
190
+ event_handlers.each do |handler|
191
+ events.each do |event|
192
+ begin
193
+ handler.handle_message event
194
+ rescue
195
+ raise PublishEventError.new(handler.class, event)
196
+ end
112
197
  end
113
198
  end
114
199
  end
115
200
 
116
201
  def store_events(command, streams_with_events = [])
117
202
  command_record = CommandRecord.create!(command: command)
118
- streams_with_events.each do |event_stream, uncommitted_events|
203
+ event_records = streams_with_events.flat_map do |event_stream, uncommitted_events|
119
204
  unless event_stream.stream_record_id
120
205
  stream_record = stream_record_class.new
121
206
  stream_record.event_stream = event_stream
122
207
  stream_record.save!
123
208
  event_stream.stream_record_id = stream_record.id
124
209
  end
125
- uncommitted_events.each do |event|
126
- event_record_class.create!(command_record: command_record, stream_record_id: event_stream.stream_record_id, event: event)
210
+ uncommitted_events.map do |event|
211
+ values = {
212
+ command_record_id: command_record.id,
213
+ stream_record_id: event_stream.stream_record_id,
214
+ aggregate_id: event.aggregate_id,
215
+ sequence_number: event.sequence_number,
216
+ event_type: event.class.name,
217
+ event_json: event_record_class.serialize_to_json(event),
218
+ created_at: event.created_at
219
+ }
220
+ values = values.merge(organization_id: event.organization_id) if event.respond_to?(:organization_id)
221
+
222
+ event_record_class.new(values)
127
223
  end
128
224
  end
225
+ connection = event_record_class.connection
226
+ values = event_records
227
+ .map { |r| "(#{column_names.map { |c| connection.quote(r[c.to_sym]) }.join(',')})" }
228
+ .join(',')
229
+ columns = column_names.map { |c| connection.quote_column_name(c) }.join(',')
230
+ sql = %Q{insert into #{connection.quote_table_name(event_record_class.table_name)} (#{columns}) values #{values}}
231
+ event_record_class.connection.insert(sql)
232
+ rescue ActiveRecord::RecordNotUnique
233
+ fail OptimisticLockingError.new
129
234
  end
130
235
  end
131
-
132
236
  end
133
237
  end