ruby_event_store 1.3.0 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. checksums.yaml +4 -4
  2. data/.mutant.yml +1 -0
  3. data/Gemfile +2 -4
  4. data/Gemfile.lock +118 -0
  5. data/Makefile +13 -3
  6. data/lib/ruby_event_store.rb +4 -7
  7. data/lib/ruby_event_store/broker.rb +3 -3
  8. data/lib/ruby_event_store/client.rb +47 -23
  9. data/lib/ruby_event_store/composed_dispatcher.rb +2 -2
  10. data/lib/ruby_event_store/constants.rb +1 -0
  11. data/lib/ruby_event_store/errors.rb +0 -1
  12. data/lib/ruby_event_store/event.rb +8 -1
  13. data/lib/ruby_event_store/immediate_async_dispatcher.rb +2 -2
  14. data/lib/ruby_event_store/in_memory_repository.rb +98 -59
  15. data/lib/ruby_event_store/instrumented_dispatcher.rb +2 -2
  16. data/lib/ruby_event_store/mappers/default.rb +28 -6
  17. data/lib/ruby_event_store/mappers/deprecated_wrapper.rb +33 -0
  18. data/lib/ruby_event_store/mappers/encryption_mapper.rb +1 -4
  19. data/lib/ruby_event_store/mappers/instrumented_mapper.rb +8 -4
  20. data/lib/ruby_event_store/mappers/json_mapper.rb +2 -4
  21. data/lib/ruby_event_store/mappers/pipeline.rb +26 -5
  22. data/lib/ruby_event_store/mappers/pipeline_mapper.rb +6 -2
  23. data/lib/ruby_event_store/mappers/transformation/domain_event.rb +16 -8
  24. data/lib/ruby_event_store/mappers/transformation/encryption.rb +20 -12
  25. data/lib/ruby_event_store/mappers/transformation/event_class_remapper.rb +11 -4
  26. data/lib/ruby_event_store/mappers/transformation/serialization.rb +16 -14
  27. data/lib/ruby_event_store/mappers/transformation/stringify_metadata_keys.rb +12 -7
  28. data/lib/ruby_event_store/mappers/transformation/symbolize_metadata_keys.rb +12 -7
  29. data/lib/ruby_event_store/null.rb +13 -0
  30. data/lib/ruby_event_store/projection.rb +2 -13
  31. data/lib/ruby_event_store/record.rb +68 -0
  32. data/lib/ruby_event_store/serialized_record.rb +23 -4
  33. data/lib/ruby_event_store/spec/broker_lint.rb +9 -9
  34. data/lib/ruby_event_store/spec/event_repository_lint.rb +200 -36
  35. data/lib/ruby_event_store/spec/mapper_lint.rb +6 -6
  36. data/lib/ruby_event_store/spec/subscriptions_lint.rb +6 -0
  37. data/lib/ruby_event_store/specification.rb +100 -7
  38. data/lib/ruby_event_store/specification_reader.rb +2 -2
  39. data/lib/ruby_event_store/specification_result.rb +86 -2
  40. data/lib/ruby_event_store/version.rb +1 -1
  41. data/ruby_event_store.gemspec +0 -2
  42. metadata +8 -9
  43. data/lib/ruby_event_store/mappers/protobuf.rb +0 -24
  44. data/lib/ruby_event_store/mappers/transformation/item.rb +0 -56
  45. data/lib/ruby_event_store/mappers/transformation/proto_event.rb +0 -17
  46. data/lib/ruby_event_store/mappers/transformation/protobuf_encoder.rb +0 -30
  47. data/lib/ruby_event_store/mappers/transformation/protobuf_nested_struct_metadata.rb +0 -30
  48. data/lib/ruby_event_store/mappers/transformation/serialized_record.rb +0 -27
@@ -4,19 +4,40 @@ require 'ostruct'
4
4
  module RubyEventStore
5
5
  class InMemoryRepository
6
6
 
7
- def initialize
8
- @streams = Hash.new
9
- @mutex = Mutex.new
10
- @global = Array.new
7
+ def initialize(serializer: NULL)
8
+ @serializer = serializer
9
+ @streams = Hash.new { |h, k| h[k] = Array.new }
10
+ @mutex = Mutex.new
11
+ @storage = Hash.new
11
12
  end
12
13
 
13
- def append_to_stream(events, stream, expected_version)
14
- add_to_stream(events, expected_version, stream, true)
14
+ def append_to_stream(records, stream, expected_version)
15
+ serialized_records = Array(records).map{ |record| record.serialize(serializer) }
16
+
17
+ with_synchronize(expected_version, stream) do |resolved_version|
18
+ raise WrongExpectedEventVersion unless last_stream_version(stream).equal?(resolved_version)
19
+
20
+ serialized_records.each do |serialized_record|
21
+ raise EventDuplicatedInStream if has_event?(serialized_record.event_id)
22
+ storage[serialized_record.event_id] = serialized_record
23
+ streams[stream.name] << serialized_record.event_id
24
+ end
25
+ end
26
+ self
15
27
  end
16
28
 
17
29
  def link_to_stream(event_ids, stream, expected_version)
18
- events = Array(event_ids).map {|eid| read_event(eid)}
19
- add_to_stream(events, expected_version, stream, nil)
30
+ serialized_records = Array(event_ids).map { |id| read_event(id) }
31
+
32
+ with_synchronize(expected_version, stream) do |resolved_version|
33
+ raise WrongExpectedEventVersion unless last_stream_version(stream).equal?(resolved_version)
34
+
35
+ serialized_records.each do |serialized_record|
36
+ raise EventDuplicatedInStream if has_event_in_stream?(serialized_record.event_id, stream.name)
37
+ streams[stream.name] << serialized_record.event_id
38
+ end
39
+ end
40
+ self
20
41
  end
21
42
 
22
43
  def delete_stream(stream)
@@ -24,24 +45,34 @@ module RubyEventStore
24
45
  end
25
46
 
26
47
  def has_event?(event_id)
27
- global.any?{ |item| item.event_id.eql?(event_id) }
48
+ storage.has_key?(event_id)
28
49
  end
29
50
 
30
51
  def last_stream_event(stream)
31
- stream_of(stream.name).last
52
+ last_id = event_ids_of_stream(stream).last
53
+ storage.fetch(last_id).deserialize(serializer) if last_id
32
54
  end
33
55
 
34
56
  def read(spec)
35
- events = read_scope(spec)
57
+ serialized_records = read_scope(spec)
36
58
  if spec.batched?
37
- batch_reader = ->(offset, limit) { events.drop(offset).take(limit) }
38
- BatchEnumerator.new(spec.batch_size, events.size, batch_reader).each
59
+ batch_reader = ->(offset, limit) do
60
+ serialized_records
61
+ .drop(offset)
62
+ .take(limit)
63
+ .map{|serialized_record| serialized_record.deserialize(serializer) }
64
+ end
65
+ BatchEnumerator.new(spec.batch_size, serialized_records.size, batch_reader).each
39
66
  elsif spec.first?
40
- events.first
67
+ serialized_records.first&.deserialize(serializer)
41
68
  elsif spec.last?
42
- events.last
69
+ serialized_records.last&.deserialize(serializer)
43
70
  else
44
- events.each
71
+ Enumerator.new do |y|
72
+ serialized_records.each do |serialized_record|
73
+ y << serialized_record.deserialize(serializer)
74
+ end
75
+ end
45
76
  end
46
77
  end
47
78
 
@@ -49,53 +80,73 @@ module RubyEventStore
49
80
  read_scope(spec).count
50
81
  end
51
82
 
52
- def update_messages(messages)
53
- messages.each do |new_msg|
54
- location = global.index{|m| new_msg.event_id.eql?(m.event_id)}
55
- raise EventNotFound.new(new_msg.event_id) unless location
56
- global[location] = new_msg
57
- streams.values.each do |str|
58
- location = str.index{|m| new_msg.event_id.eql?(m.event_id)}
59
- str[location] = new_msg if location
60
- end
83
+ def update_messages(records)
84
+ records.each do |record|
85
+ read_event(record.event_id)
86
+ serialized_record =
87
+ Record.new(
88
+ event_id: record.event_id,
89
+ event_type: record.event_type,
90
+ data: record.data,
91
+ metadata: record.metadata,
92
+ timestamp: Time.iso8601(storage.fetch(record.event_id).timestamp),
93
+ valid_at: record.valid_at,
94
+ ).serialize(serializer)
95
+ storage[record.event_id] = serialized_record
61
96
  end
62
97
  end
63
98
 
64
99
  def streams_of(event_id)
65
- streams.select do |_, stream_events|
66
- stream_events.any? { |event| event.event_id.eql?(event_id) }
67
- end.map { |name, | Stream.new(name) }
100
+ streams
101
+ .select { |name,| has_event_in_stream?(event_id, name) }
102
+ .map { |name,| Stream.new(name) }
68
103
  end
69
104
 
70
105
  private
71
106
  def read_scope(spec)
72
- events = spec.stream.global? ? global : stream_of(spec.stream.name)
73
- events = events.select{|e| spec.with_ids.any?{|x| x.eql?(e.event_id)}} if spec.with_ids?
74
- events = events.select{|e| spec.with_types.any?{|x| x.eql?(e.event_type)}} if spec.with_types?
75
- events = events.reverse if spec.backward?
76
- events = events.drop(index_of(events, spec.start) + 1) if spec.start
77
- events = events.take(index_of(events, spec.stop)) if spec.stop
78
- events = events[0...spec.limit] if spec.limit?
79
- events
107
+ serialized_records = serialized_records_of_stream(spec.stream)
108
+ serialized_records = ordered(serialized_records, spec)
109
+ serialized_records = serialized_records.select{|e| spec.with_ids.any?{|x| x.eql?(e.event_id)}} if spec.with_ids?
110
+ serialized_records = serialized_records.select{|e| spec.with_types.any?{|x| x.eql?(e.event_type)}} if spec.with_types?
111
+ serialized_records = serialized_records.reverse if spec.backward?
112
+ serialized_records = serialized_records.drop(index_of(serialized_records, spec.start) + 1) if spec.start
113
+ serialized_records = serialized_records.take(index_of(serialized_records, spec.stop)) if spec.stop
114
+ serialized_records = serialized_records.take(spec.limit) if spec.limit?
115
+ serialized_records = serialized_records.select { |sr| Time.iso8601(sr.timestamp) < spec.older_than } if spec.older_than
116
+ serialized_records = serialized_records.select { |sr| Time.iso8601(sr.timestamp) <= spec.older_than_or_equal } if spec.older_than_or_equal
117
+ serialized_records = serialized_records.select { |sr| Time.iso8601(sr.timestamp) > spec.newer_than } if spec.newer_than
118
+ serialized_records = serialized_records.select { |sr| Time.iso8601(sr.timestamp) >= spec.newer_than_or_equal } if spec.newer_than_or_equal
119
+ serialized_records
80
120
  end
81
121
 
82
122
  def read_event(event_id)
83
- global.find {|e| event_id.eql?(e.event_id)} or raise EventNotFound.new(event_id)
123
+ storage.fetch(event_id) { raise EventNotFound.new(event_id) }
84
124
  end
85
125
 
86
- def stream_of(name)
87
- streams.fetch(name, Array.new)
126
+ def event_ids_of_stream(stream)
127
+ streams.fetch(stream.name, Array.new)
88
128
  end
89
129
 
90
- def add_to_stream(events, expected_version, stream, include_global)
91
- append_with_synchronize(Array(events), expected_version, stream, include_global)
130
+ def serialized_records_of_stream(stream)
131
+ stream.global? ? storage.values : storage.fetch_values(*event_ids_of_stream(stream))
132
+ end
133
+
134
+ def ordered(serialized_records, spec)
135
+ case spec.time_sort_by
136
+ when :as_at
137
+ serialized_records.sort_by(&:timestamp)
138
+ when :as_of
139
+ serialized_records.sort_by(&:valid_at)
140
+ else
141
+ serialized_records
142
+ end
92
143
  end
93
144
 
94
145
  def last_stream_version(stream)
95
- stream_of(stream.name).size - 1
146
+ event_ids_of_stream(stream).size - 1
96
147
  end
97
148
 
98
- def append_with_synchronize(events, expected_version, stream, include_global)
149
+ def with_synchronize(expected_version, stream, &block)
99
150
  resolved_version = expected_version.resolve_for(stream, method(:last_stream_version))
100
151
 
101
152
  # expected_version :auto assumes external lock is used
@@ -108,30 +159,18 @@ module RubyEventStore
108
159
  Thread.pass
109
160
  mutex.synchronize do
110
161
  resolved_version = last_stream_version(stream) if expected_version.any?
111
- append(events, resolved_version, stream, include_global)
162
+ block.call(resolved_version)
112
163
  end
113
164
  end
114
165
 
115
- def append(events, resolved_version, stream, include_global)
116
- stream_events = stream_of(stream.name)
117
- raise WrongExpectedEventVersion unless last_stream_version(stream).equal?(resolved_version)
118
-
119
- events.each do |event|
120
- raise EventDuplicatedInStream if stream_events.any? {|ev| ev.event_id.eql?(event.event_id)}
121
- if include_global
122
- raise EventDuplicatedInStream if has_event?(event.event_id)
123
- global.push(event)
124
- end
125
- stream_events.push(event)
126
- end
127
- streams[stream.name] = stream_events
128
- self
166
+ def has_event_in_stream?(event_id, stream_name)
167
+ streams.fetch(stream_name, Array.new).any? { |id| id.eql?(event_id) }
129
168
  end
130
169
 
131
170
  def index_of(source, event_id)
132
171
  source.index {|item| item.event_id.eql?(event_id)}
133
172
  end
134
173
 
135
- attr_reader :streams, :mutex, :global
174
+ attr_reader :streams, :mutex, :storage, :serializer
136
175
  end
137
176
  end
@@ -7,9 +7,9 @@ module RubyEventStore
7
7
  @instrumentation = instrumentation
8
8
  end
9
9
 
10
- def call(subscriber, event, serialized_event)
10
+ def call(subscriber, event, record)
11
11
  instrumentation.instrument("call.dispatcher.rails_event_store", event: event, subscriber: subscriber) do
12
- dispatcher.call(subscriber, event, serialized_event)
12
+ dispatcher.call(subscriber, event, record)
13
13
  end
14
14
  end
15
15
 
@@ -5,13 +5,35 @@ require 'yaml'
5
5
  module RubyEventStore
6
6
  module Mappers
7
7
  class Default < PipelineMapper
8
- def initialize(serializer: YAML, events_class_remapping: {})
8
+ UNSET = Object.new.freeze
9
+
10
+ attr_reader :serializer
11
+
12
+ def initialize(serializer: UNSET, events_class_remapping: {})
13
+ case serializer
14
+ when UNSET
15
+ @serializer = YAML
16
+ else
17
+ warn <<~EOW
18
+ Passing serializer: to #{self.class} has been deprecated.
19
+
20
+ Pass it directly to the repository and the scheduler. For example:
21
+
22
+ Rails.configuration.event_store = RailsEventStore::Client.new(
23
+ mapper: RubyEventStore::Mappers::Default.new,
24
+ repository: RailsEventStoreActiveRecord::EventRepository.new(serializer: #{serializer}),
25
+ dispatcher: RubyEventStore::ComposedDispatcher.new(
26
+ RailsEventStore::AfterCommitAsyncDispatcher.new(scheduler: RailsEventStore::ActiveJobScheduler.new(serializer: #{serializer}),
27
+ RubyEventStore::Dispatcher.new
28
+ )
29
+ )
30
+ EOW
31
+ @serializer = serializer
32
+ end
33
+
9
34
  super(Pipeline.new(
10
- transformations: [
11
- Transformation::EventClassRemapper.new(events_class_remapping),
12
- Transformation::SymbolizeMetadataKeys.new,
13
- Transformation::Serialization.new(serializer: serializer),
14
- ]
35
+ Transformation::EventClassRemapper.new(events_class_remapping),
36
+ Transformation::SymbolizeMetadataKeys.new,
15
37
  ))
16
38
  end
17
39
  end
@@ -0,0 +1,33 @@
1
+ module RubyEventStore
2
+ module Mappers
3
+ class DeprecatedWrapper
4
+ def initialize(mapper)
5
+ @mapper = mapper
6
+ end
7
+
8
+ def serializer
9
+ @mapper.serializer
10
+ end
11
+
12
+ def event_to_record(any)
13
+ @mapper.event_to_record(any)
14
+ rescue NoMethodError => e
15
+ raise unless e.message =~ /undefined method `event_to_record/
16
+ warn <<~EOW
17
+ Deprecation: Please rename #{@mapper.class}#event_to_serialized_record to #{@mapper.class}#event_to_record.
18
+ EOW
19
+ @mapper.event_to_serialized_record(any)
20
+ end
21
+
22
+ def record_to_event(any)
23
+ @mapper.record_to_event(any)
24
+ rescue NoMethodError => e
25
+ raise unless e.message =~ /undefined method `record_to_event/
26
+ warn <<~EOW
27
+ Deprecation: Please rename #{@mapper.class}#serialized_record_to_event to #{@mapper.class}#record_to_event.
28
+ EOW
29
+ @mapper.serialized_record_to_event(any)
30
+ end
31
+ end
32
+ end
33
+ end
@@ -5,10 +5,7 @@ module RubyEventStore
5
5
  class EncryptionMapper < PipelineMapper
6
6
  def initialize(key_repository, serializer: YAML, forgotten_data: ForgottenData.new)
7
7
  super(Pipeline.new(
8
- transformations: [
9
- Transformation::Encryption.new(key_repository, serializer: serializer, forgotten_data: forgotten_data),
10
- Transformation::Serialization.new(serializer: serializer),
11
- ]
8
+ Transformation::Encryption.new(key_repository, serializer: serializer, forgotten_data: forgotten_data),
12
9
  ))
13
10
  end
14
11
  end
@@ -8,18 +8,22 @@ module RubyEventStore
8
8
  @instrumentation = instrumentation
9
9
  end
10
10
 
11
- def event_to_serialized_record(domain_event)
11
+ def event_to_record(domain_event)
12
12
  instrumentation.instrument("serialize.mapper.rails_event_store", domain_event: domain_event) do
13
- mapper.event_to_serialized_record(domain_event)
13
+ mapper.event_to_record(domain_event)
14
14
  end
15
15
  end
16
16
 
17
- def serialized_record_to_event(record)
17
+ def record_to_event(record)
18
18
  instrumentation.instrument("deserialize.mapper.rails_event_store", record: record) do
19
- mapper.serialized_record_to_event(record)
19
+ mapper.record_to_event(record)
20
20
  end
21
21
  end
22
22
 
23
+ def serializer
24
+ mapper.serializer
25
+ end
26
+
23
27
  private
24
28
 
25
29
  attr_reader :instrumentation, :mapper
@@ -5,10 +5,8 @@ module RubyEventStore
5
5
  class JSONMapper < PipelineMapper
6
6
  def initialize(events_class_remapping: {})
7
7
  super(Pipeline.new(
8
- transformations: [
9
- Transformation::EventClassRemapper.new(events_class_remapping),
10
- Transformation::SymbolizeMetadataKeys.new,
11
- ],
8
+ Transformation::EventClassRemapper.new(events_class_remapping),
9
+ Transformation::SymbolizeMetadataKeys.new,
12
10
  ))
13
11
  end
14
12
  end
@@ -3,13 +3,13 @@
3
3
  module RubyEventStore
4
4
  module Mappers
5
5
  class Pipeline
6
- def initialize(to_domain_event: Transformation::DomainEvent.new,
7
- to_serialized_record: Transformation::SerializedRecord.new,
8
- transformations: nil)
6
+ UNSET = Object.new.freeze
7
+
8
+ def initialize(*transformations_, transformations: UNSET, to_domain_event: Transformation::DomainEvent.new)
9
9
  @transformations = [
10
10
  to_domain_event,
11
- Array(transformations),
12
- to_serialized_record
11
+ deprecated_transformations(transformations),
12
+ transformations_,
13
13
  ].flatten.freeze
14
14
  end
15
15
 
@@ -26,6 +26,27 @@ module RubyEventStore
26
26
  end
27
27
 
28
28
  attr_reader :transformations
29
+
30
+ private
31
+
32
+ def deprecated_transformations(transformations)
33
+ case transformations
34
+ when UNSET
35
+ []
36
+ else
37
+ warn <<~EOW
38
+ Passing transformations via keyword parameter is deprecated.
39
+ Please use positional arguments from now on.
40
+
41
+ Was:
42
+ RubyEventStore::Mappers::Pipeline.new(transformations: transformations)
43
+
44
+ Is now:
45
+ RubyEventStore::Mappers::Pipeline.new(*transformations)
46
+ EOW
47
+ transformations
48
+ end
49
+ end
29
50
  end
30
51
  end
31
52
  end
@@ -7,14 +7,18 @@ module RubyEventStore
7
7
  @pipeline = pipeline
8
8
  end
9
9
 
10
- def event_to_serialized_record(domain_event)
10
+ def event_to_record(domain_event)
11
11
  pipeline.dump(domain_event)
12
12
  end
13
13
 
14
- def serialized_record_to_event(record)
14
+ def record_to_event(record)
15
15
  pipeline.load(record)
16
16
  end
17
17
 
18
+ def serializer
19
+ NULL
20
+ end
21
+
18
22
  private
19
23
  attr_reader :pipeline
20
24
  end
@@ -5,19 +5,27 @@ module RubyEventStore
5
5
  module Transformation
6
6
  class DomainEvent
7
7
  def dump(domain_event)
8
- Item.new(
8
+ metadata = domain_event.metadata.to_h
9
+ timestamp = metadata.delete(:timestamp)
10
+ valid_at = metadata.delete(:valid_at)
11
+ Record.new(
9
12
  event_id: domain_event.event_id,
10
- metadata: domain_event.metadata.to_h,
13
+ metadata: metadata,
11
14
  data: domain_event.data,
12
- event_type: domain_event.event_type
15
+ event_type: domain_event.event_type,
16
+ timestamp: timestamp,
17
+ valid_at: valid_at,
13
18
  )
14
19
  end
15
20
 
16
- def load(item)
17
- Object.const_get(item.event_type).new(
18
- event_id: item.event_id,
19
- metadata: item.metadata,
20
- data: item.data
21
+ def load(record)
22
+ Object.const_get(record.event_type).new(
23
+ event_id: record.event_id,
24
+ metadata: record.metadata.merge(
25
+ timestamp: record.timestamp,
26
+ valid_at: record.valid_at,
27
+ ),
28
+ data: record.data,
21
29
  )
22
30
  end
23
31
  end