sequent 7.1.1 → 8.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (78) hide show
  1. checksums.yaml +4 -4
  2. data/bin/sequent +6 -107
  3. data/db/sequent_8_migration.sql +120 -0
  4. data/db/sequent_pgsql.sql +416 -0
  5. data/db/sequent_schema.rb +11 -57
  6. data/db/sequent_schema_indexes.sql +37 -0
  7. data/db/sequent_schema_partitions.sql +34 -0
  8. data/db/sequent_schema_tables.sql +74 -0
  9. data/lib/sequent/cli/app.rb +132 -0
  10. data/lib/sequent/cli/sequent_8_migration.rb +180 -0
  11. data/lib/sequent/configuration.rb +11 -8
  12. data/lib/sequent/core/aggregate_repository.rb +2 -2
  13. data/lib/sequent/core/aggregate_root.rb +32 -9
  14. data/lib/sequent/core/aggregate_snapshotter.rb +8 -6
  15. data/lib/sequent/core/command_record.rb +27 -18
  16. data/lib/sequent/core/command_service.rb +2 -2
  17. data/lib/sequent/core/event_publisher.rb +1 -1
  18. data/lib/sequent/core/event_record.rb +37 -17
  19. data/lib/sequent/core/event_store.rb +101 -119
  20. data/lib/sequent/core/helpers/array_with_type.rb +1 -1
  21. data/lib/sequent/core/helpers/association_validator.rb +2 -2
  22. data/lib/sequent/core/helpers/attribute_support.rb +8 -8
  23. data/lib/sequent/core/helpers/equal_support.rb +3 -3
  24. data/lib/sequent/core/helpers/message_matchers/has_attrs.rb +2 -0
  25. data/lib/sequent/core/helpers/message_router.rb +2 -2
  26. data/lib/sequent/core/helpers/param_support.rb +1 -3
  27. data/lib/sequent/core/helpers/pgsql_helpers.rb +32 -0
  28. data/lib/sequent/core/helpers/string_support.rb +1 -1
  29. data/lib/sequent/core/helpers/string_to_value_parsers.rb +1 -1
  30. data/lib/sequent/core/persistors/active_record_persistor.rb +1 -1
  31. data/lib/sequent/core/persistors/replay_optimized_postgres_persistor.rb +3 -4
  32. data/lib/sequent/core/projector.rb +1 -1
  33. data/lib/sequent/core/snapshot_record.rb +44 -0
  34. data/lib/sequent/core/snapshot_store.rb +105 -0
  35. data/lib/sequent/core/stream_record.rb +10 -15
  36. data/lib/sequent/dry_run/read_only_replay_optimized_postgres_persistor.rb +1 -1
  37. data/lib/sequent/dry_run/view_schema.rb +2 -3
  38. data/lib/sequent/generator/project.rb +5 -7
  39. data/lib/sequent/generator/template_aggregate/template_aggregate/commands.rb +2 -0
  40. data/lib/sequent/generator/template_aggregate/template_aggregate/events.rb +2 -0
  41. data/lib/sequent/generator/template_aggregate/template_aggregate/template_aggregate.rb +2 -0
  42. data/lib/sequent/generator/template_aggregate/template_aggregate/template_aggregate_command_handler.rb +2 -0
  43. data/lib/sequent/generator/template_aggregate/template_aggregate.rb +2 -0
  44. data/lib/sequent/generator/template_project/Gemfile +7 -5
  45. data/lib/sequent/generator/template_project/Rakefile +4 -2
  46. data/lib/sequent/generator/template_project/app/projectors/post_projector.rb +2 -0
  47. data/lib/sequent/generator/template_project/app/records/post_record.rb +2 -0
  48. data/lib/sequent/generator/template_project/config/initializers/sequent.rb +3 -8
  49. data/lib/sequent/generator/template_project/db/migrations.rb +3 -3
  50. data/lib/sequent/generator/template_project/lib/post/commands.rb +2 -0
  51. data/lib/sequent/generator/template_project/lib/post/events.rb +2 -0
  52. data/lib/sequent/generator/template_project/lib/post/post.rb +2 -0
  53. data/lib/sequent/generator/template_project/lib/post/post_command_handler.rb +2 -0
  54. data/lib/sequent/generator/template_project/lib/post.rb +2 -0
  55. data/lib/sequent/generator/template_project/my_app.rb +2 -1
  56. data/lib/sequent/generator/template_project/spec/app/projectors/post_projector_spec.rb +2 -0
  57. data/lib/sequent/generator/template_project/spec/lib/post/post_command_handler_spec.rb +9 -2
  58. data/lib/sequent/generator/template_project/spec/spec_helper.rb +4 -7
  59. data/lib/sequent/generator.rb +1 -1
  60. data/lib/sequent/internal/aggregate_type.rb +12 -0
  61. data/lib/sequent/internal/command_type.rb +12 -0
  62. data/lib/sequent/internal/event_type.rb +12 -0
  63. data/lib/sequent/internal/internal.rb +14 -0
  64. data/lib/sequent/internal/partitioned_aggregate.rb +26 -0
  65. data/lib/sequent/internal/partitioned_command.rb +16 -0
  66. data/lib/sequent/internal/partitioned_event.rb +29 -0
  67. data/lib/sequent/migrations/grouper.rb +90 -0
  68. data/lib/sequent/migrations/sequent_schema.rb +2 -1
  69. data/lib/sequent/migrations/view_schema.rb +76 -77
  70. data/lib/sequent/rake/migration_tasks.rb +49 -24
  71. data/lib/sequent/sequent.rb +1 -0
  72. data/lib/sequent/support/database.rb +20 -16
  73. data/lib/sequent/test/time_comparison.rb +1 -1
  74. data/lib/sequent/util/timer.rb +1 -1
  75. data/lib/version.rb +1 -1
  76. metadata +102 -21
  77. data/lib/sequent/generator/template_project/db/sequent_schema.rb +0 -52
  78. data/lib/sequent/generator/template_project/ruby-version +0 -1
@@ -12,8 +12,7 @@ module Sequent
12
12
  module ClassMethods
13
13
  ##
14
14
  # Enable snapshots for this aggregate. The aggregate instance
15
- # must define the *load_from_snapshot* and *save_to_snapshot*
16
- # methods.
15
+ # must define the *take_snapshot* methods.
17
16
  #
18
17
  def enable_snapshots(default_threshold: 20)
19
18
  @snapshot_default_threshold = default_threshold
@@ -41,7 +40,8 @@ module Sequent
41
40
  include SnapshotConfiguration
42
41
  extend ActiveSupport::DescendantsTracker
43
42
 
44
- attr_reader :id, :uncommitted_events, :sequence_number, :event_stream
43
+ attr_reader :id, :uncommitted_events, :sequence_number
44
+ attr_accessor :latest_snapshot_sequence_number
45
45
 
46
46
  def self.load_from_history(stream, events)
47
47
  first, *rest = events
@@ -49,6 +49,7 @@ module Sequent
49
49
  # rubocop:disable Security/MarshalLoad
50
50
  aggregate_root = Marshal.load(Base64.decode64(first.data))
51
51
  # rubocop:enable Security/MarshalLoad
52
+ aggregate_root.latest_snapshot_sequence_number = first.sequence_number
52
53
  rest.each { |x| aggregate_root.apply_event(x) }
53
54
  else
54
55
  aggregate_root = allocate # allocate without calling new
@@ -61,9 +62,6 @@ module Sequent
61
62
  @id = id
62
63
  @uncommitted_events = []
63
64
  @sequence_number = 1
64
- @event_stream = EventStream.new aggregate_type: self.class.name,
65
- aggregate_id: id,
66
- snapshot_threshold: self.class.snapshot_default_threshold
67
65
  end
68
66
 
69
67
  def load_from_history(stream, events)
@@ -100,13 +98,38 @@ module Sequent
100
98
  "#{self.class.name}: #{@id}"
101
99
  end
102
100
 
101
+ def event_stream
102
+ EventStream.new(
103
+ aggregate_type: self.class.name,
104
+ aggregate_id: id,
105
+ events_partition_key: events_partition_key,
106
+ snapshot_outdated_at: snapshot_outdated? ? Time.now : nil,
107
+ )
108
+ end
109
+
110
+ # Provide the partitioning key for storing events. This value
111
+ # must be a string and will be used by PostgreSQL to store the
112
+ # events in the right partition.
113
+ #
114
+ # The value may change over the lifetime of the aggregate, old
115
+ # events will be moved to the correct partition after a
116
+ # change. This can be an expensive database operation.
117
+ def events_partition_key
118
+ nil
119
+ end
120
+
103
121
  def clear_events
104
122
  @uncommitted_events = []
105
123
  end
106
124
 
107
- def take_snapshot!
108
- snapshot = build_event SnapshotEvent, data: Base64.encode64(Marshal.dump(self))
109
- @uncommitted_events << snapshot
125
+ def snapshot_outdated?
126
+ snapshot_threshold = self.class.snapshot_default_threshold
127
+ events_since_latest_snapshot = @sequence_number - (latest_snapshot_sequence_number || 1)
128
+ snapshot_threshold.present? && events_since_latest_snapshot >= snapshot_threshold
129
+ end
130
+
131
+ def take_snapshot
132
+ build_event SnapshotEvent, data: Base64.encode64(Marshal.dump(self))
110
133
  end
111
134
 
112
135
  def apply_event(event)
@@ -24,23 +24,25 @@ module Sequent
24
24
  @last_aggregate_id,
25
25
  command.limit,
26
26
  )
27
- aggregate_ids.each do |aggregate_id|
28
- take_snapshot!(aggregate_id)
29
- end
27
+ snapshots = aggregate_ids.filter_map { |aggregate_id| take_snapshot(aggregate_id) }
28
+ Sequent.configuration.event_store.store_snapshots(snapshots)
29
+
30
30
  @last_aggregate_id = aggregate_ids.last
31
31
  throw :done if @last_aggregate_id.nil?
32
32
  end
33
33
 
34
34
  on TakeSnapshot do |command|
35
- take_snapshot!(command.aggregate_id)
35
+ snapshot = take_snapshot(command.aggregate_id)
36
+ Sequent.configuration.event_store.store_snapshots([snapshot]) if snapshot
36
37
  end
37
38
 
38
- def take_snapshot!(aggregate_id)
39
+ def take_snapshot(aggregate_id)
39
40
  aggregate = repository.load_aggregate(aggregate_id)
40
41
  Sequent.logger.info "Taking snapshot for aggregate #{aggregate}"
41
- aggregate.take_snapshot!
42
+ aggregate.take_snapshot
42
43
  rescue StandardError => e
43
44
  Sequent.logger.error("Failed to take snapshot for aggregate #{aggregate_id}: #{e}, #{e.inspect}")
45
+ nil
44
46
  end
45
47
  end
46
48
  end
@@ -7,7 +7,7 @@ module Sequent
7
7
  module Core
8
8
  module SerializesCommand
9
9
  def command
10
- args = Sequent::Core::Oj.strict_load(command_json)
10
+ args = serialize_json? ? Sequent::Core::Oj.strict_load(command_json) : command_json
11
11
  Class.const_get(command_type).deserialize_from_json(args)
12
12
  end
13
13
 
@@ -16,7 +16,7 @@ module Sequent
16
16
  self.aggregate_id = command.aggregate_id if command.respond_to? :aggregate_id
17
17
  self.user_id = command.user_id if command.respond_to? :user_id
18
18
  self.command_type = command.class.name
19
- self.command_json = Sequent::Core::Oj.dump(command.attributes)
19
+ self.command_json = serialize_json? ? Sequent::Core::Oj.dump(command.attributes) : command.attributes
20
20
 
21
21
  # optional attributes (here for historic reasons)
22
22
  # this should be moved to a configurable CommandSerializer
@@ -30,6 +30,13 @@ module Sequent
30
30
 
31
31
  private
32
32
 
33
+ def serialize_json?
34
+ return true unless self.class.respond_to? :columns_hash
35
+
36
+ json_column_type = self.class.columns_hash['command_json'].sql_type_metadata.type
37
+ %i[json jsonb].exclude? json_column_type
38
+ end
39
+
33
40
  def serialize_attribute?(command, attribute)
34
41
  [self, command].all? { |obj| obj.respond_to?(attribute) }
35
42
  end
@@ -39,32 +46,34 @@ module Sequent
39
46
  class CommandRecord < Sequent::ApplicationRecord
40
47
  include SerializesCommand
41
48
 
49
+ self.primary_key = :id
42
50
  self.table_name = 'command_records'
43
51
 
44
- has_many :event_records
52
+ has_many :child_events,
53
+ inverse_of: :parent_command,
54
+ class_name: :EventRecord,
55
+ foreign_key: :command_record_id
45
56
 
46
57
  validates_presence_of :command_type, :command_json
47
58
 
48
- def parent
49
- EventRecord
50
- .where(aggregate_id: event_aggregate_id, sequence_number: event_sequence_number)
51
- .where('event_type != ?', Sequent::Core::SnapshotEvent.name)
52
- .first
53
- end
59
+ # A `belongs_to` association fails in weird ways with ActiveRecord 7.1, probably due to the use of composite
60
+ # primary keys so use an explicit query here and cache the result.
61
+ def parent_event
62
+ return nil unless event_aggregate_id && event_sequence_number
54
63
 
55
- def children
56
- event_records
64
+ @parent_event ||= EventRecord.find_by(aggregate_id: event_aggregate_id, sequence_number: event_sequence_number)
57
65
  end
58
66
 
59
- def origin
60
- parent.present? ? find_origin(parent) : self
67
+ def origin_command
68
+ parent_event&.parent_command&.origin_command || self
61
69
  end
62
70
 
63
- def find_origin(record)
64
- return find_origin(record.parent) if record.parent.present?
65
-
66
- record
67
- end
71
+ # @deprecated
72
+ alias parent parent_event
73
+ # @deprecated
74
+ alias children child_events
75
+ # @deprecated
76
+ alias origin origin_command
68
77
  end
69
78
  end
70
79
  end
@@ -67,7 +67,7 @@ module Sequent
67
67
  def process_command(command)
68
68
  fail ArgumentError, 'command is required' if command.nil?
69
69
 
70
- Sequent.logger.debug("[CommandService] Processing command #{command.class}")
70
+ Sequent.logger.debug("[CommandService] Processing command #{command.class}") if Sequent.logger.debug?
71
71
 
72
72
  filters.each { |filter| filter.execute(command) }
73
73
 
@@ -118,7 +118,7 @@ module Sequent
118
118
  def initialize(command)
119
119
  @command = command
120
120
  msg = @command.respond_to?(:aggregate_id) ? " #{@command.aggregate_id}" : ''
121
- super "Invalid command #{@command.class}#{msg}, errors: #{@command.validation_errors}"
121
+ super("Invalid command #{@command.class}#{msg}, errors: #{@command.validation_errors}")
122
122
  end
123
123
 
124
124
  def errors(prefix = nil)
@@ -51,7 +51,7 @@ module Sequent
51
51
  def process_event(event)
52
52
  fail ArgumentError, 'event is required' if event.nil?
53
53
 
54
- Sequent.logger.debug("[EventPublisher] Publishing event #{event.class}")
54
+ Sequent.logger.debug("[EventPublisher] Publishing event #{event.class}") if Sequent.logger.debug?
55
55
 
56
56
  configuration.event_handlers.each do |handler|
57
57
  handler.handle_message event
@@ -46,7 +46,7 @@ module Sequent
46
46
 
47
47
  module SerializesEvent
48
48
  def event
49
- payload = Sequent::Core::Oj.strict_load(event_json)
49
+ payload = serialize_json? ? Sequent::Core::Oj.strict_load(event_json) : event_json
50
50
  Class.const_get(event_type).deserialize_from_json(payload)
51
51
  end
52
52
 
@@ -56,7 +56,7 @@ module Sequent
56
56
  self.organization_id = event.organization_id if event.respond_to?(:organization_id)
57
57
  self.event_type = event.class.name
58
58
  self.created_at = event.created_at
59
- self.event_json = self.class.serialize_to_json(event)
59
+ self.event_json = serialize_json? ? self.class.serialize_to_json(event) : event.attributes
60
60
 
61
61
  Sequent.configuration.event_record_hooks_class.after_serialization(self, event)
62
62
  end
@@ -65,42 +65,62 @@ module Sequent
65
65
  def serialize_to_json(event)
66
66
  Sequent::Core::Oj.dump(event)
67
67
  end
68
+
69
+ def serialize_json?
70
+ return true unless respond_to? :columns_hash
71
+
72
+ json_column_type = columns_hash['event_json'].sql_type_metadata.type
73
+ %i[json jsonb].exclude? json_column_type
74
+ end
68
75
  end
69
76
 
70
77
  def self.included(host_class)
71
78
  host_class.extend(ClassMethods)
72
79
  end
80
+
81
+ def serialize_json?
82
+ self.class.serialize_json?
83
+ end
73
84
  end
74
85
 
75
86
  class EventRecord < Sequent::ApplicationRecord
76
87
  include SerializesEvent
77
88
 
89
+ self.primary_key = %i[aggregate_id sequence_number]
78
90
  self.table_name = 'event_records'
79
91
  self.ignored_columns = %w[xact_id]
80
92
 
81
- belongs_to :stream_record
82
- belongs_to :command_record
93
+ belongs_to :stream_record, foreign_key: :aggregate_id, primary_key: :aggregate_id
83
94
 
84
- validates_presence_of :aggregate_id, :sequence_number, :event_type, :event_json, :stream_record, :command_record
85
- validates_numericality_of :sequence_number, only_integer: true, greater_than: 0
95
+ belongs_to :parent_command, class_name: :CommandRecord, foreign_key: :command_record_id
86
96
 
87
- def parent
88
- command_record
97
+ if Gem.loaded_specs['activerecord'].version < Gem::Version.create('7.2')
98
+ has_many :child_commands,
99
+ class_name: :CommandRecord,
100
+ primary_key: %i[aggregate_id sequence_number],
101
+ query_constraints: %i[event_aggregate_id event_sequence_number]
102
+ else
103
+ has_many :child_commands,
104
+ class_name: :CommandRecord,
105
+ primary_key: %i[aggregate_id sequence_number],
106
+ foreign_key: %i[event_aggregate_id event_sequence_number]
89
107
  end
90
108
 
91
- def children
92
- CommandRecord.where(event_aggregate_id: aggregate_id, event_sequence_number: sequence_number)
93
- end
109
+ validates_presence_of :aggregate_id, :sequence_number, :event_type, :event_json, :stream_record, :parent_command
110
+ validates_numericality_of :sequence_number, only_integer: true, greater_than: 0
94
111
 
95
- def origin
96
- parent.present? ? find_origin(parent) : self
112
+ def self.find_by_event(event)
113
+ find_by(aggregate_id: event.aggregate_id, sequence_number: event.sequence_number)
97
114
  end
98
115
 
99
- def find_origin(record)
100
- return find_origin(record.parent) if record.parent.present?
101
-
102
- record
116
+ def origin_command
117
+ parent_command&.origin_command
103
118
  end
119
+
120
+ # @deprecated
121
+ alias parent parent_command
122
+ alias children child_commands
123
+ alias origin origin_command
104
124
  end
105
125
  end
106
126
  end
@@ -2,11 +2,16 @@
2
2
 
3
3
  require 'forwardable'
4
4
  require_relative 'event_record'
5
+ require_relative 'helpers/pgsql_helpers'
5
6
  require_relative 'sequent_oj'
7
+ require_relative 'snapshot_record'
8
+ require_relative 'snapshot_store'
6
9
 
7
10
  module Sequent
8
11
  module Core
9
12
  class EventStore
13
+ include Helpers::PgsqlHelpers
14
+ include SnapshotStore
10
15
  include ActiveRecord::ConnectionAdapters::Quoting
11
16
  extend Forwardable
12
17
 
@@ -26,15 +31,6 @@ module Sequent
26
31
  end
27
32
  end
28
33
 
29
- ##
30
- # Disables event type caching (ie. for in development).
31
- #
32
- class NoEventTypesCache
33
- def fetch_or_store(event_type)
34
- yield(event_type)
35
- end
36
- end
37
-
38
34
  ##
39
35
  # Stores the events in the EventStore and publishes the events
40
36
  # to the registered event_handlers.
@@ -56,7 +52,7 @@ module Sequent
56
52
  end
57
53
 
58
54
  ##
59
- # Returns all events for the AggregateRoot ordered by sequence_number, disregarding snapshot events.
55
+ # Returns all events for the AggregateRoot ordered by sequence_number, disregarding snapshots.
60
56
  #
61
57
  # This streaming is done in batches to prevent loading many events in memory all at once. A usecase for ignoring
62
58
  # the snapshots is when events of a nested AggregateRoot need to be loaded up until a certain moment in time.
@@ -68,16 +64,20 @@ module Sequent
68
64
  stream = find_event_stream(aggregate_id)
69
65
  fail ArgumentError, 'no stream found for this aggregate' if stream.blank?
70
66
 
71
- q = Sequent
72
- .configuration
73
- .event_record_class
74
- .where(aggregate_id: aggregate_id)
75
- .where.not(event_type: Sequent.configuration.snapshot_event_class.name)
76
- .order(:sequence_number)
77
- q = q.where('created_at < ?', load_until) if load_until.present?
78
67
  has_events = false
79
68
 
80
- q.select('event_type, event_json').each_row do |event_hash|
69
+ # PostgreSQLCursor::Cursor does not support bind parameters, so bind parameters manually instead.
70
+ sql = ActiveRecord::Base.sanitize_sql_array(
71
+ [
72
+ 'SELECT * FROM load_events(:aggregate_ids, FALSE, :load_until)',
73
+ {
74
+ aggregate_ids: [aggregate_id].to_json,
75
+ load_until: load_until,
76
+ },
77
+ ],
78
+ )
79
+
80
+ PostgreSQLCursor::Cursor.new(sql, {connection: connection}).each_row do |event_hash|
81
81
  has_events = true
82
82
  event = deserialize_event(event_hash)
83
83
  block.call([stream, event])
@@ -85,6 +85,11 @@ module Sequent
85
85
  fail ArgumentError, 'no events for this aggregate' unless has_events
86
86
  end
87
87
 
88
+ def load_event(aggregate_id, sequence_number)
89
+ event_hash = query_function(connection, 'load_event', [aggregate_id, sequence_number]).first
90
+ deserialize_event(event_hash) if event_hash
91
+ end
92
+
88
93
  ##
89
94
  # Returns all events for the aggregate ordered by sequence_number, loading them from the latest snapshot
90
95
  # event onwards, if a snapshot is present
@@ -96,40 +101,21 @@ module Sequent
96
101
  def load_events_for_aggregates(aggregate_ids)
97
102
  return [] if aggregate_ids.none?
98
103
 
99
- streams = Sequent.configuration.stream_record_class.where(aggregate_id: aggregate_ids)
100
-
101
- query = aggregate_ids.uniq.map { |aggregate_id| aggregate_query(aggregate_id) }.join(' UNION ALL ')
102
- events = Sequent.configuration.event_record_class.connection.select_all(query).map do |event_hash|
103
- deserialize_event(event_hash)
104
- end
105
-
106
- events
107
- .group_by(&:aggregate_id)
108
- .map do |aggregate_id, es|
104
+ query_events(aggregate_ids)
105
+ .group_by { |row| row['aggregate_id'] }
106
+ .values
107
+ .map do |rows|
109
108
  [
110
- streams.find do |stream_record|
111
- stream_record.aggregate_id == aggregate_id
112
- end.event_stream,
113
- es,
109
+ EventStream.new(
110
+ aggregate_type: rows.first['aggregate_type'],
111
+ aggregate_id: rows.first['aggregate_id'],
112
+ events_partition_key: rows.first['events_partition_key'],
113
+ ),
114
+ rows.map { |row| deserialize_event(row) },
114
115
  ]
115
116
  end
116
117
  end
117
118
 
118
- def aggregate_query(aggregate_id)
119
- <<~SQL.chomp
120
- (
121
- SELECT event_type, event_json
122
- FROM #{quote_table_name Sequent.configuration.event_record_class.table_name} AS o
123
- WHERE aggregate_id = #{quote(aggregate_id)}
124
- AND sequence_number >= COALESCE((SELECT MAX(sequence_number)
125
- FROM #{quote_table_name Sequent.configuration.event_record_class.table_name} AS i
126
- WHERE event_type = #{quote Sequent.configuration.snapshot_event_class.name}
127
- AND i.aggregate_id = #{quote(aggregate_id)}), 0)
128
- ORDER BY sequence_number ASC, (CASE event_type WHEN #{quote Sequent.configuration.snapshot_event_class.name} THEN 0 ELSE 1 END) ASC
129
- )
130
- SQL
131
- end
132
-
133
119
  def stream_exists?(aggregate_id)
134
120
  Sequent.configuration.stream_record_class.exists?(aggregate_id: aggregate_id)
135
121
  end
@@ -137,6 +123,7 @@ module Sequent
137
123
  def events_exists?(aggregate_id)
138
124
  Sequent.configuration.event_record_class.exists?(aggregate_id: aggregate_id)
139
125
  end
126
+
140
127
  ##
141
128
  # Replays all events in the event store to the registered event_handlers.
142
129
  #
@@ -164,7 +151,7 @@ module Sequent
164
151
  event = deserialize_event(record)
165
152
  publish_events([event])
166
153
  progress += 1
167
- ids_replayed << record['id']
154
+ ids_replayed << record['aggregate_id']
168
155
  if progress % block_size == 0
169
156
  on_progress[progress, false, ids_replayed]
170
157
  ids_replayed.clear
@@ -174,110 +161,105 @@ module Sequent
174
161
  end
175
162
 
176
163
  PRINT_PROGRESS = ->(progress, done, _) do
164
+ next unless Sequent.logger.debug?
165
+
177
166
  if done
178
- Sequent.logger.debug "Done replaying #{progress} events"
167
+ Sequent.logger.debug("Done replaying #{progress} events")
179
168
  else
180
- Sequent.logger.debug "Replayed #{progress} events"
169
+ Sequent.logger.debug("Replayed #{progress} events")
181
170
  end
182
171
  end
183
172
 
184
- ##
185
- # Returns the ids of aggregates that need a new snapshot.
186
- #
187
- def aggregates_that_need_snapshots(last_aggregate_id, limit = 10)
188
- stream_table = quote_table_name Sequent.configuration.stream_record_class.table_name
189
- event_table = quote_table_name Sequent.configuration.event_record_class.table_name
190
- query = <<~SQL.chomp
191
- SELECT aggregate_id
192
- FROM #{stream_table} stream
193
- WHERE aggregate_id::varchar > COALESCE(#{quote last_aggregate_id}, '')
194
- AND snapshot_threshold IS NOT NULL
195
- AND snapshot_threshold <= (
196
- (SELECT MAX(events.sequence_number) FROM #{event_table} events WHERE events.event_type <> #{quote Sequent.configuration.snapshot_event_class.name} AND stream.aggregate_id = events.aggregate_id) -
197
- COALESCE((SELECT MAX(snapshots.sequence_number) FROM #{event_table} snapshots WHERE snapshots.event_type = #{quote Sequent.configuration.snapshot_event_class.name} AND stream.aggregate_id = snapshots.aggregate_id), 0))
198
- ORDER BY aggregate_id
199
- LIMIT #{quote limit}
200
- FOR UPDATE
201
- SQL
202
- Sequent.configuration.event_record_class.connection.select_all(query).map { |x| x['aggregate_id'] }
203
- end
204
-
205
173
  def find_event_stream(aggregate_id)
206
174
  record = Sequent.configuration.stream_record_class.where(aggregate_id: aggregate_id).first
207
175
  record&.event_stream
208
176
  end
209
177
 
210
- private
178
+ def permanently_delete_event_stream(aggregate_id)
179
+ permanently_delete_event_streams([aggregate_id])
180
+ end
211
181
 
212
- def quote_table_name(table_name)
213
- Sequent.configuration.event_record_class.connection.quote_table_name(table_name)
182
+ def permanently_delete_event_streams(aggregate_ids)
183
+ call_procedure(connection, 'permanently_delete_event_streams', [aggregate_ids.to_json])
214
184
  end
215
185
 
216
- def event_types
217
- @event_types = if Sequent.configuration.event_store_cache_event_types
218
- ThreadSafe::Cache.new
219
- else
220
- NoEventTypesCache.new
221
- end
186
+ def permanently_delete_commands_without_events(aggregate_id: nil, organization_id: nil)
187
+ unless aggregate_id || organization_id
188
+ fail ArgumentError, 'aggregate_id and/or organization_id must be specified'
189
+ end
190
+
191
+ call_procedure(connection, 'permanently_delete_commands_without_events', [aggregate_id, organization_id])
222
192
  end
223
193
 
224
- def column_names
225
- @column_names ||= Sequent
226
- .configuration
227
- .event_record_class
228
- .column_names
229
- .reject { |c| c == primary_key_event_records }
194
+ private
195
+
196
+ def connection
197
+ Sequent.configuration.event_record_class.connection
230
198
  end
231
199
 
232
- def primary_key_event_records
233
- @primary_key_event_records ||= Sequent.configuration.event_record_class.primary_key
200
+ def query_events(aggregate_ids, use_snapshots = true, load_until = nil)
201
+ query_function(connection, 'load_events', [aggregate_ids.to_json, use_snapshots, load_until])
234
202
  end
235
203
 
236
204
  def deserialize_event(event_hash)
237
- event_type = event_hash.fetch('event_type')
238
- event_json = Sequent::Core::Oj.strict_load(event_hash.fetch('event_json'))
239
- resolve_event_type(event_type).deserialize_from_json(event_json)
205
+ should_serialize_json = Sequent.configuration.event_record_class.serialize_json?
206
+ record = Sequent.configuration.event_record_class.new
207
+ record.event_type = event_hash.fetch('event_type')
208
+ record.event_json =
209
+ if should_serialize_json
210
+ event_hash.fetch('event_json')
211
+ else
212
+ # When the column type is JSON or JSONB the event record
213
+ # class expects the JSON to be deserialized into a hash
214
+ # already.
215
+ Sequent::Core::Oj.strict_load(event_hash.fetch('event_json'))
216
+ end
217
+ record.event
240
218
  rescue StandardError
241
219
  raise DeserializeEventError, event_hash
242
220
  end
243
221
 
244
- def resolve_event_type(event_type)
245
- event_types.fetch_or_store(event_type) { |k| Class.const_get(k) }
246
- end
247
-
248
222
  def publish_events(events)
249
223
  Sequent.configuration.event_publisher.publish_events(events)
250
224
  end
251
225
 
252
226
  def store_events(command, streams_with_events = [])
253
- command_record = CommandRecord.create!(command: command)
254
- event_records = streams_with_events.flat_map do |event_stream, uncommitted_events|
255
- unless event_stream.stream_record_id
256
- stream_record = Sequent.configuration.stream_record_class.new
257
- stream_record.event_stream = event_stream
258
- stream_record.save!
259
- event_stream.stream_record_id = stream_record.id
260
- end
261
- uncommitted_events.map do |event|
262
- Sequent.configuration.event_record_class.new.tap do |record|
263
- record.command_record_id = command_record.id
264
- record.stream_record_id = event_stream.stream_record_id
265
- record.event = event
266
- end
267
- end
227
+ command_record = {
228
+ created_at: convert_timestamp(command.created_at&.to_time || Time.now),
229
+ command_type: command.class.name,
230
+ command_json: command,
231
+ }
232
+
233
+ events = streams_with_events.map do |stream, uncommitted_events|
234
+ [
235
+ Sequent::Core::Oj.strict_load(Sequent::Core::Oj.dump(stream)),
236
+ uncommitted_events.map do |event|
237
+ {
238
+ created_at: convert_timestamp(event.created_at.to_time),
239
+ event_type: event.class.name,
240
+ event_json: event,
241
+ }
242
+ end,
243
+ ]
268
244
  end
269
- connection = Sequent.configuration.event_record_class.connection
270
- values = event_records
271
- .map { |r| "(#{column_names.map { |c| connection.quote(r[c.to_sym]) }.join(',')})" }
272
- .join(',')
273
- columns = column_names.map { |c| connection.quote_column_name(c) }.join(',')
274
- sql = <<~SQL.chomp
275
- insert into #{connection.quote_table_name(Sequent.configuration.event_record_class.table_name)} (#{columns}) values #{values}
276
- SQL
277
- Sequent.configuration.event_record_class.connection.insert(sql, nil, primary_key_event_records)
245
+ call_procedure(
246
+ connection,
247
+ 'store_events',
248
+ [
249
+ Sequent::Core::Oj.dump(command_record),
250
+ Sequent::Core::Oj.dump(events),
251
+ ],
252
+ )
278
253
  rescue ActiveRecord::RecordNotUnique
279
254
  raise OptimisticLockingError
280
255
  end
256
+
257
+ def convert_timestamp(timestamp)
258
+ # Since ActiveRecord uses `TIMESTAMP WITHOUT TIME ZONE`
259
+ # we need to manually convert database timestamps to the
260
+ # ActiveRecord default time zone on serialization.
261
+ ActiveRecord.default_timezone == :utc ? timestamp.getutc : timestamp.getlocal
262
+ end
281
263
  end
282
264
  end
283
265
  end
@@ -13,7 +13,7 @@ module Sequent
13
13
  end
14
14
 
15
15
  def deserialize_from_json(value)
16
- value.nil? ? nil : value.map { |item| item_type.deserialize_from_json(item) }
16
+ value&.map { |item| item_type.deserialize_from_json(item) }
17
17
  end
18
18
 
19
19
  def to_s
@@ -35,7 +35,7 @@ module Sequent
35
35
  value = record.instance_variable_get("@#{association}")
36
36
  if value && incorrect_type?(value, record, association)
37
37
  record.errors.add(association, "is not of type #{describe_type(record.class.types[association])}")
38
- elsif value&.is_a?(Array)
38
+ elsif value.is_a?(Array)
39
39
  item_type = record.class.types.fetch(association).item_type
40
40
  record.errors.add(association, 'is invalid') unless validate_all(value, item_type).all?
41
41
  elsif value&.invalid?
@@ -47,7 +47,7 @@ module Sequent
47
47
  private
48
48
 
49
49
  def incorrect_type?(value, record, association)
50
- return unless record.class.respond_to?(:types)
50
+ return false unless record.class.respond_to?(:types)
51
51
 
52
52
  type = record.class.types[association]
53
53
  if type.respond_to?(:candidate?)