synapse-core 0.2.0 → 0.4.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (69) hide show
  1. data/lib/synapse.rb +3 -0
  2. data/lib/synapse/command/simple_command_bus.rb +2 -2
  3. data/lib/synapse/common/concurrency/identifier_lock.rb +71 -0
  4. data/lib/synapse/common/concurrency/public_lock.rb +96 -0
  5. data/lib/synapse/event_bus/simple_event_bus.rb +1 -1
  6. data/lib/synapse/event_bus/wiring.rb +0 -4
  7. data/lib/synapse/event_sourcing/member.rb +0 -4
  8. data/lib/synapse/event_sourcing/snapshot/count_trigger.rb +2 -2
  9. data/lib/synapse/event_store.rb +1 -9
  10. data/lib/synapse/partitioning.rb +0 -2
  11. data/lib/synapse/process_manager.rb +12 -0
  12. data/lib/synapse/process_manager/lock_manager.rb +22 -0
  13. data/lib/synapse/process_manager/pessimistic_lock_manager.rb +23 -0
  14. data/lib/synapse/process_manager/process.rb +2 -0
  15. data/lib/synapse/process_manager/process_factory.rb +52 -0
  16. data/lib/synapse/process_manager/process_manager.rb +170 -0
  17. data/lib/synapse/process_manager/process_repository.rb +53 -0
  18. data/lib/synapse/process_manager/repository/in_memory.rb +63 -0
  19. data/lib/synapse/process_manager/resource_injector.rb +12 -0
  20. data/lib/synapse/process_manager/simple_process_manager.rb +48 -0
  21. data/lib/synapse/process_manager/wiring/process.rb +27 -0
  22. data/lib/synapse/process_manager/wiring/process_manager.rb +72 -0
  23. data/lib/synapse/repository.rb +1 -0
  24. data/lib/synapse/repository/locking.rb +1 -1
  25. data/lib/synapse/repository/optimistic_lock_manager.rb +128 -0
  26. data/lib/synapse/repository/pessimistic_lock_manager.rb +4 -37
  27. data/lib/synapse/serialization.rb +1 -1
  28. data/lib/synapse/serialization/{converter/factory.rb → converter_factory.rb} +0 -0
  29. data/lib/synapse/serialization/serializer.rb +5 -3
  30. data/lib/synapse/uow/listener_collection.rb +59 -1
  31. data/lib/synapse/version.rb +1 -1
  32. data/lib/synapse/wiring/message_wiring.rb +7 -3
  33. data/lib/synapse/wiring/wire.rb +7 -2
  34. data/test/common/concurrency/identifier_lock_test.rb +36 -0
  35. data/test/common/concurrency/public_lock_test.rb +83 -0
  36. data/test/partitioning/packing/json_test.rb +2 -1
  37. data/test/process_manager/in_memory_test.rb +57 -0
  38. data/test/process_manager/process_factory_test.rb +31 -0
  39. data/test/process_manager/simple_process_manager_test.rb +130 -0
  40. data/test/process_manager/wiring/fixtures.rb +42 -0
  41. data/test/process_manager/wiring/process_manager_test.rb +73 -0
  42. data/test/process_manager/wiring/process_test.rb +35 -0
  43. data/test/repository/optimistic_test.rb +41 -0
  44. data/test/repository/pessimistic_test.rb +20 -0
  45. data/test/serialization/converter/chain_test.rb +31 -0
  46. data/test/serialization/lazy_object_test.rb +1 -1
  47. data/test/serialization/message/serialization_aware_message_test.rb +4 -2
  48. data/test/serialization/message/serialized_message_builder_test.rb +1 -1
  49. data/test/serialization/message/serialized_message_test.rb +3 -2
  50. data/test/serialization/serializer/marshal_test.rb +1 -1
  51. data/test/serialization/serializer/oj_test.rb +1 -1
  52. data/test/serialization/serializer/ox_test.rb +1 -1
  53. data/test/serialization/serializer_test.rb +1 -1
  54. data/test/test_ext.rb +5 -2
  55. data/test/wiring/wire_registry_test.rb +10 -10
  56. data/test/wiring/wire_test.rb +5 -5
  57. metadata +29 -16
  58. data/lib/synapse/event_store/mongo.rb +0 -8
  59. data/lib/synapse/event_store/mongo/cursor_event_stream.rb +0 -63
  60. data/lib/synapse/event_store/mongo/event_store.rb +0 -86
  61. data/lib/synapse/event_store/mongo/per_commit_strategy.rb +0 -253
  62. data/lib/synapse/event_store/mongo/per_event_strategy.rb +0 -143
  63. data/lib/synapse/event_store/mongo/storage_strategy.rb +0 -113
  64. data/lib/synapse/event_store/mongo/template.rb +0 -73
  65. data/lib/synapse/partitioning/amqp.rb +0 -3
  66. data/lib/synapse/partitioning/amqp/amqp_queue_reader.rb +0 -50
  67. data/lib/synapse/partitioning/amqp/amqp_queue_writer.rb +0 -31
  68. data/lib/synapse/partitioning/amqp/key_resolver.rb +0 -26
  69. data/lib/synapse/serialization/converter/bson.rb +0 -28
@@ -1,8 +0,0 @@
1
- require 'synapse/event_store/mongo/cursor_event_stream'
2
- require 'synapse/event_store/mongo/event_store'
3
-
4
- require 'synapse/event_store/mongo/storage_strategy'
5
- require 'synapse/event_store/mongo/per_commit_strategy'
6
- require 'synapse/event_store/mongo/per_event_strategy'
7
-
8
- require 'synapse/event_store/mongo/template'
@@ -1,63 +0,0 @@
1
- module Synapse
2
- module EventStore
3
- module Mongo
4
- # TODO Document me
5
- class CursorDomainEventStream < Domain::DomainEventStream
6
- # @param [StorageStrategy] storage_strategy
7
- # @param [Mongo::Cursor] cursor
8
- # @param [Array] last_snapshot_commit
9
- # @param [Object] aggregate_id
10
- # @return [undefined]
11
- def initialize(storage_strategy, cursor, last_snapshot_commit, aggregate_id)
12
- @storage_strategy = storage_strategy
13
- @cursor = cursor
14
- @aggregate_id = aggregate_id
15
-
16
- if last_snapshot_commit
17
- # Current batch is an enumerator
18
- @current_batch = last_snapshot_commit.each
19
- else
20
- @current_batch = [].each
21
- end
22
-
23
- initialize_next_event
24
- end
25
-
26
- # @return [Boolean]
27
- def end?
28
- @next.nil?
29
- end
30
-
31
- # @return [DomainEventMessage]
32
- def next_event
33
- @next.tap do
34
- initialize_next_event
35
- end
36
- end
37
-
38
- # @return [DomainEventMessage]
39
- def peek
40
- @next
41
- end
42
-
43
- private
44
-
45
- # @return [undefined]
46
- def initialize_next_event
47
- begin
48
- @next = @current_batch.next
49
- rescue StopIteration
50
- if @cursor.has_next?
51
- document = @cursor.next
52
- @current_batch = @storage_strategy.extract_events(document, @aggregate_id).each
53
-
54
- retry
55
- else
56
- @next = nil
57
- end
58
- end
59
- end
60
- end # CursorDomainEventStream
61
- end
62
- end
63
- end
@@ -1,86 +0,0 @@
1
- module Synapse
2
- module EventStore
3
- module Mongo
4
- # Implementation of an event store backed by a Mongo database
5
- class MongoEventStore < SnapshotEventStore
6
- # @param [MongoTemplate] template
7
- # @param [StorageStrategy] storage_strategy
8
- # @return [undefined]
9
- def initialize(template, storage_strategy)
10
- @storage_strategy = storage_strategy
11
- @template = template
12
- end
13
-
14
- # @return [undefined]
15
- def ensure_indexes
16
- @storage_strategy.ensure_indexes
17
- end
18
-
19
- # @raise [EventStoreError] If an error occurs while reading the stream from the store
20
- # @param [String] type_identifier Type descriptor of the aggregate to retrieve
21
- # @param [Object] aggregate_id
22
- # @return [DomainEventStream]
23
- def read_events(type_identifier, aggregate_id)
24
- first_sequence_number = -1
25
-
26
- last_snapshot_commit = load_last_snapshot type_identifier, aggregate_id
27
- if last_snapshot_commit and last_snapshot_commit.size > 0
28
- first_sequence_number = last_snapshot_commit[0].sequence_number
29
- end
30
-
31
- cursor = @storage_strategy.fetch_events type_identifier, aggregate_id, first_sequence_number
32
-
33
- unless last_snapshot_commit or cursor.has_next?
34
- raise StreamNotFoundError.new type_identifier, aggregate_id
35
- end
36
-
37
- CursorDomainEventStream.new @storage_strategy, cursor, last_snapshot_commit, aggregate_id
38
- end
39
-
40
- # @raise [EventStoreError] If an error occurs while appending the stream to the store
41
- # @param [String] type_identifier Type descriptor of the aggregate to append to
42
- # @param [DomainEventStream] stream
43
- # @return [undefined]
44
- def append_events(type_identifier, stream)
45
- events = stream.to_a
46
- documents = @storage_strategy.create_documents type_identifier, events
47
-
48
- begin
49
- @template.event_collection.insert documents
50
- rescue Mongo::OperationFailure => ex
51
- if e.error_code == 11000
52
- raise Repository::ConcurrencyException,
53
- 'Event for this aggregate and sequence number already present'
54
- end
55
-
56
- raise ex
57
- end
58
- end
59
-
60
- # @raise [EventStoreError] If an error occurs while appending the event to the store
61
- # @param [String] type_identifier Type descriptor of the aggregate to append to
62
- # @param [DomainEventMessage] snapshot_event
63
- # @return [undefined]
64
- def append_snapshot_event(type_identifier, snapshot_event)
65
- documents = @storage_strategy.create_documents type_identifier, [snapshot_event]
66
- @template.snapshot_collection.insert documents
67
- end
68
-
69
- private
70
-
71
- # @param [String] type_identifier Type descriptor of the aggregate to retrieve
72
- # @param [Object] aggregate_id
73
- def load_last_snapshot(type_identifier, aggregate_id)
74
- cursor = @storage_strategy.fetch_last_snapshot type_identifier, aggregate_id
75
-
76
- unless cursor.has_next?
77
- return
78
- end
79
-
80
- first = cursor.next_document
81
- @storage_strategy.extract_events first, aggregate_id
82
- end
83
- end # MongoEventStore
84
- end # Mongo
85
- end # EventStore
86
- end # Synapse
@@ -1,253 +0,0 @@
1
- module Synapse
2
- module EventStore
3
- module Mongo
4
- # Storage strategy that stores all events in a commit operation in a single document
5
- #
6
- # Since Mongo doesn't support transactions, this can be used as a substitute to guarantee
7
- # atomic storage of events. The only downside is that it may be harder to query events
8
- # from the event store.
9
- #
10
- # Performance also seems to be better using this strategy
11
- class DocumentPerCommitStrategy < StorageStrategy
12
- # @param [String] type_identifier Type identifier for the aggregate
13
- # @param [Array] events Domain events to be committed
14
- # @return [Array]
15
- def create_documents(type_identifier, events)
16
- document = CommitDocument.new
17
- document.from_events(type_identifier, events, @serializer).to_hash
18
- end
19
-
20
- # @param [Hash] hash
21
- # @param [Object] aggregate_id
22
- # @return [Array]
23
- def extract_events(hash, aggregate_id)
24
- document = CommitDocument.new
25
- document.from_hash(hash).to_events(aggregate_id, @serializer, @upcaster_chain)
26
- end
27
-
28
- # Mongo document that represents a commit containing one or more events
29
- class CommitDocument
30
- # @return [Object]
31
- attr_reader :aggregate_id
32
-
33
- # @param [String] type_identifier
34
- # @param [Array] events
35
- # @param [Serializer] serializer
36
- # @return [CommitDocument]
37
- def from_events(type_identifier, events, serializer)
38
- first_event = events.first
39
- last_event = events.last
40
-
41
- @aggregate_type = type_identifier
42
- @aggregate_id = first_event.aggregate_id.to_s
43
- @first_sequence_number = first_event.sequence_number
44
- @last_sequence_number = last_event.sequence_number
45
- @first_timestamp = first_event.timestamp
46
- @last_timestamp = last_event.timestamp
47
-
48
- @events = Array.new
49
- events.each do |event|
50
- event_document = EventDocument.new
51
- event_document.from_event event, serializer
52
-
53
- @events.push event_document
54
- end
55
-
56
- self
57
- end
58
-
59
- # @param [Hash] hash
60
- # @return [CommitDocument]
61
- def from_hash(hash)
62
- hash.symbolize_keys!
63
-
64
- @aggregate_id = hash.fetch :aggregate_id
65
- @aggregate_type = hash.fetch :aggregate_type
66
- @first_sequence_number = hash.fetch :first_sequence_number
67
- @last_sequence_number = hash.fetch :last_sequence_number
68
- @first_timestamp = hash.fetch :first_timestamp
69
- @last_timestamp = hash.fetch :last_timestamp
70
-
71
- @events = Array.new
72
-
73
- event_hashes = hash.fetch :events
74
- event_hashes.each do |event_hash|
75
- event_document = EventDocument.new
76
- event_document.from_hash event_hash
77
-
78
- @events.push event_document
79
- end
80
-
81
- self
82
- end
83
-
84
- # @return [Hash]
85
- def to_hash
86
- events = Array.new
87
- @events.each do |event|
88
- events.push event.to_hash
89
- end
90
-
91
- { aggregate_id: @aggregate_id,
92
- aggregate_type: @aggregate_type,
93
- # Allows us to use the same query to filter events as DocumentPerEvent
94
- sequence_number: @first_sequence_number,
95
- first_sequence_number: @first_sequence_number,
96
- last_sequence_number: @last_sequence_number,
97
- # Allows us to use the same query to filter events as DocumentPerEvent
98
- timestamp: @first_timestamp,
99
- first_timestamp: @first_timestamp,
100
- last_timestamp: @last_timestamp,
101
- events: events }
102
- end
103
-
104
- # @param [Object] aggregate_id The actual aggregate identifier used to query the evnet store
105
- # @param [Serializer] serializer
106
- # @param [UpcasterChain] upcaster_chain
107
- # @return [Array]
108
- def to_events(aggregate_id, serializer, upcaster_chain)
109
- events = Array.new
110
-
111
- @events.each do |event_document|
112
- event_data = DocumentDomainEventData.new aggregate_id, event_document
113
- context = Upcasting::SerializedDomainEventUpcastingContext.new event_data, aggregate_id, serializer
114
-
115
- upcast_objects = upcaster_chain.upcast event_document.payload, context
116
- upcast_objects.each do |upcast_object|
117
- upcast_data = Upcasting::UpcastSerializedDomainEventData.new event_data, aggregate_id, upcast_object
118
-
119
- builder = Serialization::SerializedDomainEventMessageBuilder.new
120
-
121
- # Prevent duplicate serialization of metadata if it was accessed during upcasting
122
- metadata = context.serialized_metadata
123
- if metadata.deserialized?
124
- builder.metadata = Serialization::DeserializedObject.new metadata.deserialized
125
- end
126
-
127
- builder.from_data upcast_data, serializer
128
-
129
- events.push builder.build
130
- end
131
- end
132
-
133
- events
134
- end
135
- end # CommitDocument
136
-
137
- # Mongo document that represents a single event as part of a commit document
138
- class EventDocument
139
- # @return [String]
140
- attr_reader :id
141
-
142
- # @return [Time]
143
- attr_reader :timestamp
144
-
145
- # @return [Integer]
146
- attr_reader :sequence_number
147
-
148
- # @return [SerializedObject]
149
- def metadata
150
- Serialization::SerializedMetadata.new @metadata, @metadata.class
151
- end
152
-
153
- # @return [SerializedObject]
154
- def payload
155
- Serialization::SerializedObject.new @payload, @payload.class,
156
- Serialization::SerializedType.new(@payload_type, @payload_revision)
157
- end
158
-
159
- # @param [EventMessage] event
160
- # @param [Serializer] serializer
161
- # @return [EventDocument]
162
- def from_event(event, serializer)
163
- serialization_target = String
164
- if serializer.can_serialize_to? Hash
165
- serialization_target = Hash
166
- end
167
-
168
- serialized_metadata = serializer.serialize_metadata event, serialization_target
169
- serialized_payload = serializer.serialize_payload event, serialization_target
170
-
171
- @id = event.id
172
- @metadata = serialized_metadata.content
173
- @payload = serialized_payload.content
174
- @payload_type = serialized_payload.type.name
175
- @payload_revision = serialized_payload.type.revision
176
- @timestamp = event.timestamp
177
- @sequence_number = event.sequence_number
178
-
179
- self
180
- end
181
-
182
- # @param [Hash] hash
183
- # @return [EventDocument]
184
- def from_hash(hash)
185
- hash.symbolize_keys!
186
-
187
- @id = hash.fetch :id
188
- @metadata = hash.fetch :metadata
189
- @payload = hash.fetch :payload
190
- @payload_type = hash.fetch :payload_type
191
- @payload_revision = hash.fetch :payload_revision
192
- @timestamp = hash.fetch :timestamp
193
- @sequence_number = hash.fetch :sequence_number
194
-
195
- self
196
- end
197
-
198
- # @return [Hash]
199
- def to_hash
200
- { id: @id,
201
- metadata: @metadata,
202
- payload: @payload,
203
- payload_type: @payload_type,
204
- payload_revision: @payload_revision,
205
- timestamp: @timestamp,
206
- sequence_number: @sequence_number }
207
- end
208
- end # EventDocument
209
-
210
- # Serialized domain event data from an event document
211
- class DocumentDomainEventData < Serialization::SerializedDomainEventData
212
- # @param [Object] aggregate_id
213
- # @param [EventDocument] event_document
214
- # @return [undefined]
215
- def initialize(aggregate_id, event_document)
216
- @aggregate_id = aggregate_id
217
- @event_document = event_document
218
- end
219
-
220
- # @return [String]
221
- def id
222
- @event_document.id
223
- end
224
-
225
- # @return [SerializedObject]
226
- def metadata
227
- @event_document.metadata
228
- end
229
-
230
- # @return [SerializedObject]
231
- def payload
232
- @event_document.payload
233
- end
234
-
235
- # @return [Time]
236
- def timestamp
237
- @event_document.timestamp
238
- end
239
-
240
- # @return [Object]
241
- def aggregate_id
242
- @aggregate_id
243
- end
244
-
245
- # @return [Integer]
246
- def sequence_number
247
- @event_document.sequence_number
248
- end
249
- end # DocumentDomainEventData
250
- end # DocumentPerCommitStrategy
251
- end # Mongo
252
- end # EventStore
253
- end # Synapse
@@ -1,143 +0,0 @@
1
- module Synapse
2
- module EventStore
3
- module Mongo
4
- # Storage strategy that stores each event as its own document
5
- class DocumentPerEventStrategy < StorageStrategy
6
- # @param [String] type_identifier Type identifier for the aggregate
7
- # @param [Array] events Domain events to be committed
8
- # @return [Array]
9
- def create_documents(type_identifier, events)
10
- documents = Array.new
11
-
12
- events.each do |event|
13
- document = EventDocument.new
14
- document.from_event event, type_identifier, @serializer
15
-
16
- documents.push document.to_hash
17
- end
18
-
19
- documents
20
- end
21
-
22
- # @param [Hash] hash
23
- # @param [Object] aggregate_id
24
- # @return [Array]
25
- def extract_events(hash, aggregate_id)
26
- document = EventDocument.new
27
- document.from_hash(hash).to_events(aggregate_id, @serializer, @upcaster_chain)
28
- end
29
-
30
- # Mongo document that represents a single domain event
31
- class EventDocument < Serialization::SerializedDomainEventData
32
- # @return [String]
33
- attr_reader :id
34
-
35
- # @return [Time]
36
- attr_reader :timestamp
37
-
38
- # @return [Object]
39
- attr_reader :aggregate_id
40
-
41
- # @return [Integer]
42
- attr_reader :sequence_number
43
-
44
- # @param [SerializedObject]
45
- def metadata
46
- Serialization::SerializedMetadata.new @metadata, @metadata.class
47
- end
48
-
49
- # @param [SerializedObject]
50
- def payload
51
- Serialization::SerializedObject.new @payload, @payload.class,
52
- Serialization::SerializedType.new(@payload_type, @payload_revision)
53
- end
54
-
55
- # @param [DomainEventMessage] event
56
- # @param [String] type_identifier
57
- # @param [Serializer] serializer
58
- # @return [EventDocument]
59
- def from_event(event, type_identifier, serializer)
60
- serialization_target = String
61
- if serializer.can_serialize_to? Hash
62
- serialization_target = Hash
63
- end
64
-
65
- serialized_metadata = serializer.serialize_metadata event, serialization_target
66
- serialized_payload = serializer.serialize_payload event, serialization_target
67
-
68
- @id = event.id
69
- @metadata = serialized_metadata.content
70
- @payload = serialized_payload.content
71
- @payload_type = serialized_payload.type.name
72
- @payload_revision = serialized_payload.type.revision
73
- @timestamp = event.timestamp
74
- @aggregate_id = event.aggregate_id
75
- @aggregate_type = type_identifier
76
- @sequence_number = event.sequence_number
77
-
78
- self
79
- end
80
-
81
- # @param [Hash] hash
82
- # @return [EventDocument]
83
- def from_hash(hash)
84
- hash.symbolize_keys!
85
-
86
- @id = hash.fetch :_id
87
- @metadata = hash.fetch :metadata
88
- @payload = hash.fetch :payload
89
- @payload_type = hash.fetch :payload_type
90
- @payload_revision = hash.fetch :payload_revision
91
- @timestamp = hash.fetch :timestamp
92
- @aggregate_id = hash.fetch :aggregate_id
93
- @aggregate_type = hash.fetch :aggregate_type
94
- @sequence_number = hash.fetch :sequence_number
95
-
96
- self
97
- end
98
-
99
- # @return [Hash]
100
- def to_hash
101
- { _id: @id,
102
- metadata: @metadata,
103
- payload: @payload,
104
- payload_type: @payload_type,
105
- payload_revision: @payload_revision,
106
- timestamp: @timestamp,
107
- aggregate_id: @aggregate_id,
108
- aggregate_type: @aggregate_type,
109
- sequence_number: @sequence_number }
110
- end
111
-
112
- # @param [Object] aggregate_id
113
- # @param [Serializer] serializer
114
- # @param [UpcasterChain] upcaster_chain
115
- # @return [Array]
116
- def to_events(aggregate_id, serializer, upcaster_chain)
117
- events = Array.new
118
-
119
- context = Upcasting::SerializedDomainEventUpcastingContext.new self, aggregate_id, serializer
120
- upcast_objects = upcaster_chain.upcast payload, context
121
- upcast_objects.each do |upcast_object|
122
- upcast_data = Upcasting::UpcastSerializedDomainEventData.new self, aggregate_id, upcast_object
123
-
124
- builder = Serialization::SerializedDomainEventMessageBuilder.new
125
-
126
- # Prevent duplicate serialization of metadata if it was accessed during upcasting
127
- metadata = context.serialized_metadata
128
- if metadata.deserialized?
129
- builder.metadata = Serialization::DeserializedObject.new metadata.deserialized
130
- end
131
-
132
- builder.from_data upcast_data, serializer
133
-
134
- events.push builder.build
135
- end
136
-
137
- events
138
- end
139
- end # EventDocument
140
- end # DocumentPerEventStrategy
141
- end # Mongo
142
- end # EventStore
143
- end # Synapse