ruby_event_store 1.3.1 → 2.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/Gemfile +2 -4
  3. data/Gemfile.lock +121 -0
  4. data/Makefile +13 -3
  5. data/lib/ruby_event_store/broker.rb +3 -3
  6. data/lib/ruby_event_store/client.rb +47 -23
  7. data/lib/ruby_event_store/composed_dispatcher.rb +2 -2
  8. data/lib/ruby_event_store/constants.rb +1 -0
  9. data/lib/ruby_event_store/errors.rb +0 -1
  10. data/lib/ruby_event_store/event.rb +8 -1
  11. data/lib/ruby_event_store/immediate_async_dispatcher.rb +2 -2
  12. data/lib/ruby_event_store/in_memory_repository.rb +98 -59
  13. data/lib/ruby_event_store/instrumented_dispatcher.rb +2 -2
  14. data/lib/ruby_event_store/mappers/default.rb +28 -6
  15. data/lib/ruby_event_store/mappers/deprecated_wrapper.rb +33 -0
  16. data/lib/ruby_event_store/mappers/encryption_mapper.rb +1 -4
  17. data/lib/ruby_event_store/mappers/instrumented_mapper.rb +8 -4
  18. data/lib/ruby_event_store/mappers/json_mapper.rb +2 -4
  19. data/lib/ruby_event_store/mappers/pipeline.rb +26 -5
  20. data/lib/ruby_event_store/mappers/pipeline_mapper.rb +6 -2
  21. data/lib/ruby_event_store/mappers/transformation/domain_event.rb +16 -8
  22. data/lib/ruby_event_store/mappers/transformation/encryption.rb +20 -12
  23. data/lib/ruby_event_store/mappers/transformation/event_class_remapper.rb +11 -4
  24. data/lib/ruby_event_store/mappers/transformation/serialization.rb +16 -14
  25. data/lib/ruby_event_store/mappers/transformation/stringify_metadata_keys.rb +12 -7
  26. data/lib/ruby_event_store/mappers/transformation/symbolize_metadata_keys.rb +12 -7
  27. data/lib/ruby_event_store/null.rb +13 -0
  28. data/lib/ruby_event_store/projection.rb +2 -13
  29. data/lib/ruby_event_store/record.rb +68 -0
  30. data/lib/ruby_event_store/serialized_record.rb +23 -4
  31. data/lib/ruby_event_store/spec/broker_lint.rb +9 -9
  32. data/lib/ruby_event_store/spec/event_repository_lint.rb +200 -36
  33. data/lib/ruby_event_store/spec/mapper_lint.rb +6 -6
  34. data/lib/ruby_event_store/spec/subscriptions_lint.rb +6 -0
  35. data/lib/ruby_event_store/specification.rb +100 -7
  36. data/lib/ruby_event_store/specification_reader.rb +2 -2
  37. data/lib/ruby_event_store/specification_result.rb +86 -2
  38. data/lib/ruby_event_store/version.rb +1 -1
  39. data/lib/ruby_event_store.rb +4 -7
  40. data/ruby_event_store.gemspec +1 -3
  41. metadata +7 -9
  42. data/lib/ruby_event_store/mappers/protobuf.rb +0 -24
  43. data/lib/ruby_event_store/mappers/transformation/item.rb +0 -56
  44. data/lib/ruby_event_store/mappers/transformation/proto_event.rb +0 -17
  45. data/lib/ruby_event_store/mappers/transformation/protobuf_encoder.rb +0 -30
  46. data/lib/ruby_event_store/mappers/transformation/protobuf_nested_struct_metadata.rb +0 -30
  47. data/lib/ruby_event_store/mappers/transformation/serialized_record.rb +0 -27
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 88193e00bf9ebd58347de1cb3cf58848cc4b09abc603ffef5fb85b2565bf0b74
4
- data.tar.gz: 744d44d76c00843f897c321919eed85b9597063d44a52328f7616c6682063d90
3
+ metadata.gz: 285893bc28288225780ac559aa8a3ef11e1b5a769870f5821161273b7087b3bb
4
+ data.tar.gz: 43112f7f7bc9e0e1cbc38f0a5d3e574575b91e94f208803e533ffe0f4b782b04
5
5
  SHA512:
6
- metadata.gz: ffd5f11e365d901d7bdf7a6c5d78cad674166416dcd44cd8448982dbe23d8a243ea72b3f66e02c4cab134ef87e3f5d0e519e4de0a913bf26390f7867cc5318d8
7
- data.tar.gz: 3044ebfc2ca93c1b5c5fa40afd250c8aac8edc1c5299444b6094df17d1b5214b19400070761a408f8816dbe507fb52c2063587fdd428f911470310ef26434778
6
+ metadata.gz: 376292ff1199abb79bc64adfea0335b8c9b898963313a9d136b943eea666319cefeeceb8891c305d7fe51e1985fe151a27713ae14dc33038858f6c4a34bb024d
7
+ data.tar.gz: 62f53d5723681398e5ad972b2807248912e7e0b610b3bc8790bba5c9a107a1f8da132e279b79ac246dff80efac8b3a7c895f2691496a4bf156d8720bc36727ef
data/Gemfile CHANGED
@@ -3,9 +3,7 @@ git_source(:github) { |repo| "https://github.com/#{repo}.git" }
3
3
 
4
4
  gemspec
5
5
 
6
- eval_gemfile File.expand_path('../support/bundler/Gemfile.shared', __dir__)
6
+ eval_gemfile '../support/bundler/Gemfile.shared'
7
7
 
8
- gem 'protobuf_nested_struct'
9
- gem 'google-protobuf', '~> 3.12.2', '>= 3.12.2'
10
- gem 'activesupport', '~> 5.0'
8
+ gem 'activesupport', '6.0.3.4'
11
9
  gem 'concurrent-ruby', github: 'ruby-concurrency/concurrent-ruby', ref: 'c4cbc968c55e5b983dae953095761896220c46d1'
data/Gemfile.lock ADDED
@@ -0,0 +1,121 @@
1
+ GIT
2
+ remote: https://github.com/ruby-concurrency/concurrent-ruby.git
3
+ revision: c4cbc968c55e5b983dae953095761896220c46d1
4
+ ref: c4cbc968c55e5b983dae953095761896220c46d1
5
+ specs:
6
+ concurrent-ruby (1.1.7)
7
+
8
+ PATH
9
+ remote: .
10
+ specs:
11
+ ruby_event_store (2.0.3)
12
+ concurrent-ruby (~> 1.0, >= 1.1.6)
13
+
14
+ GEM
15
+ remote: https://oss:7AXfeZdAfCqL1PvHm2nvDJO6Zd9UW8IK@gem.mutant.dev/
16
+ specs:
17
+ abstract_type (0.0.7)
18
+ adamantium (0.2.0)
19
+ ice_nine (~> 0.11.0)
20
+ memoizable (~> 0.4.0)
21
+ anima (0.3.2)
22
+ abstract_type (~> 0.0.7)
23
+ adamantium (~> 0.2)
24
+ equalizer (~> 0.0.11)
25
+ ast (2.4.1)
26
+ concord (0.1.6)
27
+ adamantium (~> 0.2.0)
28
+ equalizer (~> 0.0.9)
29
+ diff-lcs (1.4.4)
30
+ equalizer (0.0.11)
31
+ i18n (1.8.5)
32
+ concurrent-ruby (~> 1.0)
33
+ ice_nine (0.11.2)
34
+ memoizable (0.4.2)
35
+ thread_safe (~> 0.3, >= 0.3.1)
36
+ minitest (5.14.2)
37
+ mprelude (0.1.0)
38
+ abstract_type (~> 0.0.7)
39
+ adamantium (~> 0.2.0)
40
+ concord (~> 0.1.5)
41
+ equalizer (~> 0.0.9)
42
+ ice_nine (~> 0.11.1)
43
+ procto (~> 0.0.2)
44
+ mutant-license (0.1.1.2.1627430819213747598431630701693729869473.0)
45
+ parser (3.0.0.0)
46
+ ast (~> 2.4.1)
47
+ procto (0.0.3)
48
+ rspec-core (3.10.1)
49
+ rspec-support (~> 3.10.0)
50
+ rspec-expectations (3.10.1)
51
+ diff-lcs (>= 1.2.0, < 2.0)
52
+ rspec-support (~> 3.10.0)
53
+ rspec-mocks (3.10.1)
54
+ diff-lcs (>= 1.2.0, < 2.0)
55
+ rspec-support (~> 3.10.0)
56
+ rspec-support (3.10.1)
57
+ thread_safe (0.3.6)
58
+ tzinfo (1.2.9)
59
+ thread_safe (~> 0.1)
60
+ unparser (0.5.6)
61
+ abstract_type (~> 0.0.7)
62
+ adamantium (~> 0.2.0)
63
+ anima (~> 0.3.1)
64
+ concord (~> 0.1.5)
65
+ diff-lcs (~> 1.3)
66
+ equalizer (~> 0.0.9)
67
+ mprelude (~> 0.1.0)
68
+ parser (>= 3.0.0)
69
+ procto (~> 0.0.2)
70
+ variable (0.0.1)
71
+ equalizer (~> 0.0.11)
72
+ zeitwerk (2.4.2)
73
+
74
+ GEM
75
+ remote: https://rubygems.org/
76
+ specs:
77
+ activesupport (6.0.3.4)
78
+ concurrent-ruby (~> 1.0, >= 1.0.2)
79
+ i18n (>= 0.7, < 2)
80
+ minitest (~> 5.1)
81
+ tzinfo (~> 1.1)
82
+ zeitwerk (~> 2.2, >= 2.2.2)
83
+ mutant (0.10.22)
84
+ abstract_type (~> 0.0.7)
85
+ adamantium (~> 0.2.0)
86
+ anima (~> 0.3.1)
87
+ ast (~> 2.2)
88
+ concord (~> 0.1.5)
89
+ diff-lcs (~> 1.3)
90
+ equalizer (~> 0.0.9)
91
+ ice_nine (~> 0.11.1)
92
+ memoizable (~> 0.4.2)
93
+ mprelude (~> 0.1.0)
94
+ parser (~> 3.0.0)
95
+ procto (~> 0.0.2)
96
+ unparser (~> 0.5.6)
97
+ variable (~> 0.0.1)
98
+ mutant-rspec (0.10.22)
99
+ mutant (= 0.10.22)
100
+ rspec-core (>= 3.8.0, < 4.0.0)
101
+ rake (13.0.3)
102
+ rspec (3.10.0)
103
+ rspec-core (~> 3.10.0)
104
+ rspec-expectations (~> 3.10.0)
105
+ rspec-mocks (~> 3.10.0)
106
+
107
+ PLATFORMS
108
+ ruby
109
+
110
+ DEPENDENCIES
111
+ activesupport (= 6.0.3.4)
112
+ concurrent-ruby!
113
+ mutant (~> 0.10.21)
114
+ mutant-license!
115
+ mutant-rspec (~> 0.10.21)
116
+ rake (>= 10.0)
117
+ rspec (~> 3.6)
118
+ ruby_event_store!
119
+
120
+ BUNDLED WITH
121
+ 2.2.27
data/Makefile CHANGED
@@ -1,17 +1,27 @@
1
1
  GEM_VERSION = $(shell cat ../RES_VERSION)
2
2
  GEM_NAME = ruby_event_store
3
3
  REQUIRE = $(GEM_NAME)
4
- IGNORE = RubyEventStore::InMemoryRepository\#append_with_synchronize \
4
+ IGNORE = RubyEventStore::InMemoryRepository\#with_synchronize \
5
+ RubyEventStore::Client\#initialize \
6
+ RubyEventStore::Client\#default_clock \
7
+ RubyEventStore::Client\#default_correlation_id_generator \
5
8
  RubyEventStore::Client::Within\#add_thread_subscribers \
6
9
  RubyEventStore::Client::Within\#add_thread_global_subscribers \
7
10
  RubyEventStore::Client::Within\#call \
8
- RubyEventStore::Client\#default_correlation_id_generator \
9
11
  RubyEventStore::Mappers::InMemoryEncryptionKeyRepository\#prepare_encrypt \
10
12
  RubyEventStore::Mappers::EncryptionKey\#prepare_encrypt \
11
13
  RubyEventStore::Mappers::EncryptionKey\#prepare_decrypt \
12
14
  RubyEventStore::Mappers::EncryptionKey\#prepare_auth_data \
13
15
  RubyEventStore::Mappers::EncryptionKey\#encrypt_authenticated \
14
- RubyEventStore::Mappers::EncryptionKey\#ciphertext_from_authenticated
16
+ RubyEventStore::Mappers::EncryptionKey\#ciphertext_from_authenticated \
17
+ RubyEventStore::Mappers::Default\#serializer \
18
+ RubyEventStore::Mappers::JSONMapper\#serializer \
19
+ RubyEventStore::Mappers::NullMapper\#serializer \
20
+ RubyEventStore::Mappers::EncryptionMapper\#serializer \
21
+ RubyEventStore::Mappers::PipelineMapper\#serializer \
22
+ RubyEventStore::Mappers::DeprecatedWrapper\#serializer \
23
+ RubyEventStore::Mappers::InstrumentedMapper\#serializer \
24
+ RubyEventStore::Mappers::Transformation::Serialization*
15
25
 
16
26
  SUBJECT ?= RubyEventStore*
17
27
 
@@ -7,10 +7,10 @@ module RubyEventStore
7
7
  @dispatcher = dispatcher
8
8
  end
9
9
 
10
- def call(event, serialized_event)
10
+ def call(event, record)
11
11
  subscribers = subscriptions.all_for(event.event_type)
12
12
  subscribers.each do |subscriber|
13
- dispatcher.call(subscriber, event, serialized_event)
13
+ dispatcher.call(subscriber, event, record)
14
14
  end
15
15
  end
16
16
 
@@ -35,7 +35,7 @@ module RubyEventStore
35
35
  end
36
36
 
37
37
  private
38
- attr_reader :subscriptions, :dispatcher
38
+ attr_reader :dispatcher, :subscriptions
39
39
 
40
40
  def verify_subscription(subscriber)
41
41
  raise SubscriberNotExist, "subscriber must be first argument or block" unless subscriber
@@ -10,8 +10,11 @@ module RubyEventStore
10
10
  dispatcher: Dispatcher.new,
11
11
  clock: default_clock,
12
12
  correlation_id_generator: default_correlation_id_generator)
13
+
14
+
13
15
  @repository = repository
14
- @mapper = mapper
16
+ @mapper = Mappers::DeprecatedWrapper.new(mapper)
17
+ @subscriptions = subscriptions
15
18
  @broker = Broker.new(subscriptions: subscriptions, dispatcher: dispatcher)
16
19
  @clock = clock
17
20
  @metadata = Concurrent::ThreadLocalVar.new
@@ -21,20 +24,20 @@ module RubyEventStore
21
24
 
22
25
  # Persists events and notifies subscribed handlers about them
23
26
  #
24
- # @param events [Array<Event, Proto>, Event, Proto] event(s)
27
+ # @param events [Array<Event>, Event] event(s)
25
28
  # @param stream_name [String] name of the stream for persisting events.
26
29
  # @param expected_version [:any, :auto, :none, Integer] controls optimistic locking strategy. {http://railseventstore.org/docs/expected_version/ Read more}
27
30
  # @return [self]
28
31
  def publish(events, stream_name: GLOBAL_STREAM, expected_version: :any)
29
32
  enriched_events = enrich_events_metadata(events)
30
- serialized_events = serialize_events(enriched_events)
31
- append_to_stream_serialized_events(serialized_events, stream_name: stream_name, expected_version: expected_version)
32
- enriched_events.zip(serialized_events) do |event, serialized_event|
33
+ records = transform(enriched_events)
34
+ append_records_to_stream(records, stream_name: stream_name, expected_version: expected_version)
35
+ enriched_events.zip(records) do |event, record|
33
36
  with_metadata(
34
37
  correlation_id: event.metadata.fetch(:correlation_id),
35
38
  causation_id: event.event_id,
36
39
  ) do
37
- broker.(event, serialized_event)
40
+ broker.(event, record)
38
41
  end
39
42
  end
40
43
  self
@@ -45,8 +48,11 @@ module RubyEventStore
45
48
  # @param (see #publish)
46
49
  # @return [self]
47
50
  def append(events, stream_name: GLOBAL_STREAM, expected_version: :any)
48
- serialized_events = serialize_events(enrich_events_metadata(events))
49
- append_to_stream_serialized_events(serialized_events, stream_name: stream_name, expected_version: expected_version)
51
+ append_records_to_stream(
52
+ transform(enrich_events_metadata(events)),
53
+ stream_name: stream_name,
54
+ expected_version: expected_version
55
+ )
50
56
  self
51
57
  end
52
58
 
@@ -121,6 +127,14 @@ module RubyEventStore
121
127
  broker.add_global_subscription(subscriber || proc)
122
128
  end
123
129
 
130
+ # Get list of handlers subscribed to an event
131
+ #
132
+ # @param to [Class, String] type of events to get list of sybscribed handlers
133
+ # @return [Array<Object, Class>]
134
+ def subscribers_for(event_type)
135
+ subscriptions.all_for(event_type.to_s)
136
+ end
137
+
124
138
  # Builder object for collecting temporary handlers (subscribers)
125
139
  # which are active only during the invocation of the provided
126
140
  # block of code.
@@ -222,9 +236,22 @@ module RubyEventStore
222
236
  # Deserialize event which was serialized for async event handlers
223
237
  # {http://railseventstore.org/docs/subscribe/#async-handlers Read more}
224
238
  #
225
- # @return [Event, Proto] deserialized event
226
- def deserialize(event_type:, event_id:, data:, metadata:)
227
- mapper.serialized_record_to_event(SerializedRecord.new(event_type: event_type, event_id: event_id, data: data, metadata: metadata))
239
+ # @return [Event] deserialized event
240
+ def deserialize(serializer:, event_type:, event_id:, data:, metadata:, timestamp: nil, valid_at: nil)
241
+ extract_timestamp = lambda do |m|
242
+ (m[:timestamp] || Time.parse(m.fetch('timestamp'))).iso8601
243
+ end
244
+
245
+ mapper.record_to_event(
246
+ SerializedRecord.new(
247
+ event_type: event_type,
248
+ event_id: event_id,
249
+ data: data,
250
+ metadata: metadata,
251
+ timestamp: timestamp || timestamp_ = extract_timestamp[serializer.load(metadata)],
252
+ valid_at: valid_at || timestamp_,
253
+ ).deserialize(serializer)
254
+ )
228
255
  end
229
256
 
230
257
  # Read additional metadata which will be added for published events
@@ -262,12 +289,10 @@ module RubyEventStore
262
289
  # end
263
290
  # event_store.overwrite(events)
264
291
  #
265
- # @param events [Array<Event, Proto>, Event, Proto] event(s) to serialize and overwrite again
292
+ # @param events [Array<Event>, Event] event(s) to serialize and overwrite again
266
293
  # @return [self]
267
294
  def overwrite(events_or_event)
268
- events = Array(events_or_event)
269
- serialized_events = serialize_events(events)
270
- repository.update_messages(serialized_events)
295
+ repository.update_messages(transform(Array(events_or_event)))
271
296
  self
272
297
  end
273
298
 
@@ -280,10 +305,8 @@ module RubyEventStore
280
305
 
281
306
  private
282
307
 
283
- def serialize_events(events)
284
- events.map do |ev|
285
- mapper.event_to_serialized_record(ev)
286
- end
308
+ def transform(events)
309
+ events.map { |ev| mapper.event_to_record(ev) }
287
310
  end
288
311
 
289
312
  def enrich_events_metadata(events)
@@ -295,11 +318,12 @@ module RubyEventStore
295
318
  def enrich_event_metadata(event)
296
319
  metadata.each { |key, value| event.metadata[key] ||= value }
297
320
  event.metadata[:timestamp] ||= clock.call
321
+ event.metadata[:valid_at] ||= event.metadata.fetch(:timestamp)
298
322
  event.metadata[:correlation_id] ||= correlation_id_generator.call
299
323
  end
300
324
 
301
- def append_to_stream_serialized_events(serialized_events, stream_name:, expected_version:)
302
- repository.append_to_stream(serialized_events, Stream.new(stream_name), ExpectedVersion.new(expected_version))
325
+ def append_records_to_stream(records, stream_name:, expected_version:)
326
+ repository.append_to_stream(records, Stream.new(stream_name), ExpectedVersion.new(expected_version))
303
327
  end
304
328
 
305
329
  protected
@@ -309,13 +333,13 @@ module RubyEventStore
309
333
  end
310
334
 
311
335
  def default_clock
312
- ->{ Time.now.utc }
336
+ ->{ Time.now.utc.round(TIMESTAMP_PRECISION) }
313
337
  end
314
338
 
315
339
  def default_correlation_id_generator
316
340
  ->{ SecureRandom.uuid }
317
341
  end
318
342
 
319
- attr_reader :repository, :mapper, :broker, :clock, :correlation_id_generator
343
+ attr_reader :repository, :mapper, :subscriptions, :broker, :clock, :correlation_id_generator
320
344
  end
321
345
  end
@@ -6,10 +6,10 @@ module RubyEventStore
6
6
  @dispatchers = dispatchers
7
7
  end
8
8
 
9
- def call(subscriber, event, serialized_event)
9
+ def call(subscriber, event, record)
10
10
  @dispatchers.each do |dispatcher|
11
11
  if dispatcher.verify(subscriber)
12
- dispatcher.call(subscriber, event, serialized_event)
12
+ dispatcher.call(subscriber, event, record)
13
13
  break
14
14
  end
15
15
  end
@@ -3,4 +3,5 @@
3
3
  module RubyEventStore
4
4
  GLOBAL_STREAM = Object.new
5
5
  PAGE_SIZE = 100.freeze
6
+ TIMESTAMP_PRECISION = 6
6
7
  end
@@ -12,7 +12,6 @@ module RubyEventStore
12
12
  EventDuplicatedInStream = Class.new(Error)
13
13
  ReservedInternalName = Class.new(Error)
14
14
  InvalidHandler = Class.new(Error)
15
- ProtobufEncodingFailed = Class.new(Error)
16
15
 
17
16
  class EventNotFound < Error
18
17
  attr_reader :event_id
@@ -41,6 +41,13 @@ module RubyEventStore
41
41
  metadata[:timestamp]
42
42
  end
43
43
 
44
+ # Validity time from metadata
45
+ #
46
+ # @return [Time, nil]
47
+ def valid_at
48
+ metadata[:valid_at]
49
+ end
50
+
44
51
  # Two events are equal if:
45
52
  # * they are of the same class
46
53
  # * have identical event id
@@ -116,7 +123,7 @@ module RubyEventStore
116
123
  # on correlation_id and message_id of the provided message.
117
124
  # {http://railseventstore.org/docs/correlation_causation/ Find out more}
118
125
  #
119
- # @param other_message [Event, Proto, command] message to correlate with. Most likely an event or a command. Must respond to correlation_id and message_id.
126
+ # @param other_message [Event, command] message to correlate with. Most likely an event or a command. Must respond to correlation_id and message_id.
120
127
  # @return [String] set causation_id
121
128
  def correlate_with(other_message)
122
129
  self.correlation_id = other_message.correlation_id || other_message.message_id
@@ -6,8 +6,8 @@ module RubyEventStore
6
6
  @scheduler = scheduler
7
7
  end
8
8
 
9
- def call(subscriber, _, serialized_event)
10
- @scheduler.call(subscriber, serialized_event)
9
+ def call(subscriber, _, record)
10
+ @scheduler.call(subscriber, record)
11
11
  end
12
12
 
13
13
  def verify(subscriber)
@@ -4,19 +4,40 @@ require 'ostruct'
4
4
  module RubyEventStore
5
5
  class InMemoryRepository
6
6
 
7
- def initialize
8
- @streams = Hash.new
9
- @mutex = Mutex.new
10
- @global = Array.new
7
+ def initialize(serializer: NULL)
8
+ @serializer = serializer
9
+ @streams = Hash.new { |h, k| h[k] = Array.new }
10
+ @mutex = Mutex.new
11
+ @storage = Hash.new
11
12
  end
12
13
 
13
- def append_to_stream(events, stream, expected_version)
14
- add_to_stream(events, expected_version, stream, true)
14
+ def append_to_stream(records, stream, expected_version)
15
+ serialized_records = Array(records).map{ |record| record.serialize(serializer) }
16
+
17
+ with_synchronize(expected_version, stream) do |resolved_version|
18
+ raise WrongExpectedEventVersion unless last_stream_version(stream).equal?(resolved_version)
19
+
20
+ serialized_records.each do |serialized_record|
21
+ raise EventDuplicatedInStream if has_event?(serialized_record.event_id)
22
+ storage[serialized_record.event_id] = serialized_record
23
+ streams[stream.name] << serialized_record.event_id
24
+ end
25
+ end
26
+ self
15
27
  end
16
28
 
17
29
  def link_to_stream(event_ids, stream, expected_version)
18
- events = Array(event_ids).map {|eid| read_event(eid)}
19
- add_to_stream(events, expected_version, stream, nil)
30
+ serialized_records = Array(event_ids).map { |id| read_event(id) }
31
+
32
+ with_synchronize(expected_version, stream) do |resolved_version|
33
+ raise WrongExpectedEventVersion unless last_stream_version(stream).equal?(resolved_version)
34
+
35
+ serialized_records.each do |serialized_record|
36
+ raise EventDuplicatedInStream if has_event_in_stream?(serialized_record.event_id, stream.name)
37
+ streams[stream.name] << serialized_record.event_id
38
+ end
39
+ end
40
+ self
20
41
  end
21
42
 
22
43
  def delete_stream(stream)
@@ -24,24 +45,34 @@ module RubyEventStore
24
45
  end
25
46
 
26
47
  def has_event?(event_id)
27
- global.any?{ |item| item.event_id.eql?(event_id) }
48
+ storage.has_key?(event_id)
28
49
  end
29
50
 
30
51
  def last_stream_event(stream)
31
- stream_of(stream.name).last
52
+ last_id = event_ids_of_stream(stream).last
53
+ storage.fetch(last_id).deserialize(serializer) if last_id
32
54
  end
33
55
 
34
56
  def read(spec)
35
- events = read_scope(spec)
57
+ serialized_records = read_scope(spec)
36
58
  if spec.batched?
37
- batch_reader = ->(offset, limit) { events.drop(offset).take(limit) }
38
- BatchEnumerator.new(spec.batch_size, events.size, batch_reader).each
59
+ batch_reader = ->(offset, limit) do
60
+ serialized_records
61
+ .drop(offset)
62
+ .take(limit)
63
+ .map{|serialized_record| serialized_record.deserialize(serializer) }
64
+ end
65
+ BatchEnumerator.new(spec.batch_size, serialized_records.size, batch_reader).each
39
66
  elsif spec.first?
40
- events.first
67
+ serialized_records.first&.deserialize(serializer)
41
68
  elsif spec.last?
42
- events.last
69
+ serialized_records.last&.deserialize(serializer)
43
70
  else
44
- events.each
71
+ Enumerator.new do |y|
72
+ serialized_records.each do |serialized_record|
73
+ y << serialized_record.deserialize(serializer)
74
+ end
75
+ end
45
76
  end
46
77
  end
47
78
 
@@ -49,53 +80,73 @@ module RubyEventStore
49
80
  read_scope(spec).count
50
81
  end
51
82
 
52
- def update_messages(messages)
53
- messages.each do |new_msg|
54
- location = global.index{|m| new_msg.event_id.eql?(m.event_id)}
55
- raise EventNotFound.new(new_msg.event_id) unless location
56
- global[location] = new_msg
57
- streams.values.each do |str|
58
- location = str.index{|m| new_msg.event_id.eql?(m.event_id)}
59
- str[location] = new_msg if location
60
- end
83
+ def update_messages(records)
84
+ records.each do |record|
85
+ read_event(record.event_id)
86
+ serialized_record =
87
+ Record.new(
88
+ event_id: record.event_id,
89
+ event_type: record.event_type,
90
+ data: record.data,
91
+ metadata: record.metadata,
92
+ timestamp: Time.iso8601(storage.fetch(record.event_id).timestamp),
93
+ valid_at: record.valid_at,
94
+ ).serialize(serializer)
95
+ storage[record.event_id] = serialized_record
61
96
  end
62
97
  end
63
98
 
64
99
  def streams_of(event_id)
65
- streams.select do |_, stream_events|
66
- stream_events.any? { |event| event.event_id.eql?(event_id) }
67
- end.map { |name, | Stream.new(name) }
100
+ streams
101
+ .select { |name,| has_event_in_stream?(event_id, name) }
102
+ .map { |name,| Stream.new(name) }
68
103
  end
69
104
 
70
105
  private
71
106
  def read_scope(spec)
72
- events = spec.stream.global? ? global : stream_of(spec.stream.name)
73
- events = events.select{|e| spec.with_ids.any?{|x| x.eql?(e.event_id)}} if spec.with_ids?
74
- events = events.select{|e| spec.with_types.any?{|x| x.eql?(e.event_type)}} if spec.with_types?
75
- events = events.reverse if spec.backward?
76
- events = events.drop(index_of(events, spec.start) + 1) if spec.start
77
- events = events.take(index_of(events, spec.stop)) if spec.stop
78
- events = events[0...spec.limit] if spec.limit?
79
- events
107
+ serialized_records = serialized_records_of_stream(spec.stream)
108
+ serialized_records = ordered(serialized_records, spec)
109
+ serialized_records = serialized_records.select{|e| spec.with_ids.any?{|x| x.eql?(e.event_id)}} if spec.with_ids?
110
+ serialized_records = serialized_records.select{|e| spec.with_types.any?{|x| x.eql?(e.event_type)}} if spec.with_types?
111
+ serialized_records = serialized_records.reverse if spec.backward?
112
+ serialized_records = serialized_records.drop(index_of(serialized_records, spec.start) + 1) if spec.start
113
+ serialized_records = serialized_records.take(index_of(serialized_records, spec.stop)) if spec.stop
114
+ serialized_records = serialized_records.take(spec.limit) if spec.limit?
115
+ serialized_records = serialized_records.select { |sr| Time.iso8601(sr.timestamp) < spec.older_than } if spec.older_than
116
+ serialized_records = serialized_records.select { |sr| Time.iso8601(sr.timestamp) <= spec.older_than_or_equal } if spec.older_than_or_equal
117
+ serialized_records = serialized_records.select { |sr| Time.iso8601(sr.timestamp) > spec.newer_than } if spec.newer_than
118
+ serialized_records = serialized_records.select { |sr| Time.iso8601(sr.timestamp) >= spec.newer_than_or_equal } if spec.newer_than_or_equal
119
+ serialized_records
80
120
  end
81
121
 
82
122
  def read_event(event_id)
83
- global.find {|e| event_id.eql?(e.event_id)} or raise EventNotFound.new(event_id)
123
+ storage.fetch(event_id) { raise EventNotFound.new(event_id) }
84
124
  end
85
125
 
86
- def stream_of(name)
87
- streams.fetch(name, Array.new)
126
+ def event_ids_of_stream(stream)
127
+ streams.fetch(stream.name, Array.new)
88
128
  end
89
129
 
90
- def add_to_stream(events, expected_version, stream, include_global)
91
- append_with_synchronize(Array(events), expected_version, stream, include_global)
130
+ def serialized_records_of_stream(stream)
131
+ stream.global? ? storage.values : storage.fetch_values(*event_ids_of_stream(stream))
132
+ end
133
+
134
+ def ordered(serialized_records, spec)
135
+ case spec.time_sort_by
136
+ when :as_at
137
+ serialized_records.sort_by(&:timestamp)
138
+ when :as_of
139
+ serialized_records.sort_by(&:valid_at)
140
+ else
141
+ serialized_records
142
+ end
92
143
  end
93
144
 
94
145
  def last_stream_version(stream)
95
- stream_of(stream.name).size - 1
146
+ event_ids_of_stream(stream).size - 1
96
147
  end
97
148
 
98
- def append_with_synchronize(events, expected_version, stream, include_global)
149
+ def with_synchronize(expected_version, stream, &block)
99
150
  resolved_version = expected_version.resolve_for(stream, method(:last_stream_version))
100
151
 
101
152
  # expected_version :auto assumes external lock is used
@@ -108,30 +159,18 @@ module RubyEventStore
108
159
  Thread.pass
109
160
  mutex.synchronize do
110
161
  resolved_version = last_stream_version(stream) if expected_version.any?
111
- append(events, resolved_version, stream, include_global)
162
+ block.call(resolved_version)
112
163
  end
113
164
  end
114
165
 
115
- def append(events, resolved_version, stream, include_global)
116
- stream_events = stream_of(stream.name)
117
- raise WrongExpectedEventVersion unless last_stream_version(stream).equal?(resolved_version)
118
-
119
- events.each do |event|
120
- raise EventDuplicatedInStream if stream_events.any? {|ev| ev.event_id.eql?(event.event_id)}
121
- if include_global
122
- raise EventDuplicatedInStream if has_event?(event.event_id)
123
- global.push(event)
124
- end
125
- stream_events.push(event)
126
- end
127
- streams[stream.name] = stream_events
128
- self
166
+ def has_event_in_stream?(event_id, stream_name)
167
+ streams.fetch(stream_name, Array.new).any? { |id| id.eql?(event_id) }
129
168
  end
130
169
 
131
170
  def index_of(source, event_id)
132
171
  source.index {|item| item.event_id.eql?(event_id)}
133
172
  end
134
173
 
135
- attr_reader :streams, :mutex, :global
174
+ attr_reader :streams, :mutex, :storage, :serializer
136
175
  end
137
176
  end
@@ -7,9 +7,9 @@ module RubyEventStore
7
7
  @instrumentation = instrumentation
8
8
  end
9
9
 
10
- def call(subscriber, event, serialized_event)
10
+ def call(subscriber, event, record)
11
11
  instrumentation.instrument("call.dispatcher.rails_event_store", event: event, subscriber: subscriber) do
12
- dispatcher.call(subscriber, event, serialized_event)
12
+ dispatcher.call(subscriber, event, record)
13
13
  end
14
14
  end
15
15