synapse-mongo 0.1.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,24 @@
1
+ require 'active_support'
2
+ require 'mongo'
3
+ require 'synapse'
4
+
5
+ require 'synapse/mongo/version'
6
+
7
+ module Synapse
8
+ module Common
9
+ # Utility classes used by Mongo components
10
+ module Mongo
11
+ extend ActiveSupport::Autoload
12
+
13
+ autoload :BaseTemplate
14
+ end
15
+ end
16
+
17
+ module EventStore
18
+ autoload :Mongo
19
+ end
20
+
21
+ module Serialization
22
+ autoload :OrderedHashToHashConverter, 'synapse/serialization/converter/bson'
23
+ end
24
+ end
@@ -0,0 +1,40 @@
1
+ module Synapse
2
+ module Common
3
+ module Mongo
4
+ # Represents a mechanism for accessing collections required by a component
5
+ # @abstract
6
+ class BaseTemplate
7
+ # @return [String] Name of the database to use
8
+ attr_accessor :database_name
9
+
10
+ # @return [String] Username to authenticate with (optional)
11
+ attr_accessor :username
12
+
13
+ # @return [String] Password to authenticate with (optional)
14
+ attr_accessor :password
15
+
16
+ # @param [Mongo::MongoClient] client
17
+ # @return [undefined]
18
+ def initialize(client)
19
+ @client = client
20
+ @database_name = 'synapse'
21
+ end
22
+
23
+ protected
24
+
25
+ # @return [Mongo::DB]
26
+ def database
27
+ unless @database
28
+ @database = @client.db @database_name
29
+
30
+ if @username and @password
31
+ @database.authenticate @username, @password
32
+ end
33
+ end
34
+
35
+ @database
36
+ end
37
+ end # BaseTemplate
38
+ end # Mongo
39
+ end # Common
40
+ end
@@ -0,0 +1,8 @@
1
+ require 'synapse/event_store/mongo/cursor_event_stream'
2
+ require 'synapse/event_store/mongo/event_store'
3
+
4
+ require 'synapse/event_store/mongo/storage_strategy'
5
+ require 'synapse/event_store/mongo/per_commit_strategy'
6
+ require 'synapse/event_store/mongo/per_event_strategy'
7
+
8
+ require 'synapse/event_store/mongo/template'
@@ -0,0 +1,63 @@
1
+ module Synapse
2
+ module EventStore
3
+ module Mongo
4
+ # TODO Document me
5
+ class CursorDomainEventStream < Domain::DomainEventStream
6
+ # @param [StorageStrategy] storage_strategy
7
+ # @param [Mongo::Cursor] cursor
8
+ # @param [Array] last_snapshot_commit
9
+ # @param [Object] aggregate_id
10
+ # @return [undefined]
11
+ def initialize(storage_strategy, cursor, last_snapshot_commit, aggregate_id)
12
+ @storage_strategy = storage_strategy
13
+ @cursor = cursor
14
+ @aggregate_id = aggregate_id
15
+
16
+ if last_snapshot_commit
17
+ # Current batch is an enumerator
18
+ @current_batch = last_snapshot_commit.each
19
+ else
20
+ @current_batch = [].each
21
+ end
22
+
23
+ initialize_next_event
24
+ end
25
+
26
+ # @return [Boolean]
27
+ def end?
28
+ @next.nil?
29
+ end
30
+
31
+ # @return [DomainEventMessage]
32
+ def next_event
33
+ @next.tap do
34
+ initialize_next_event
35
+ end
36
+ end
37
+
38
+ # @return [DomainEventMessage]
39
+ def peek
40
+ @next
41
+ end
42
+
43
+ private
44
+
45
+ # @return [undefined]
46
+ def initialize_next_event
47
+ begin
48
+ @next = @current_batch.next
49
+ rescue StopIteration
50
+ if @cursor.has_next?
51
+ document = @cursor.next
52
+ @current_batch = @storage_strategy.extract_events(document, @aggregate_id).each
53
+
54
+ retry
55
+ else
56
+ @next = nil
57
+ end
58
+ end
59
+ end
60
+ end # CursorDomainEventStream
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,88 @@
1
+ module Synapse
2
+ module EventStore
3
+ module Mongo
4
+ # Implementation of an event store backed by a Mongo database
5
+ class MongoEventStore < SnapshotEventStore
6
+ # @param [MongoTemplate] template
7
+ # @param [StorageStrategy] storage_strategy
8
+ # @return [undefined]
9
+ def initialize(template, storage_strategy)
10
+ @storage_strategy = storage_strategy
11
+ @template = template
12
+ end
13
+
14
+ # @return [undefined]
15
+ def ensure_indexes
16
+ @storage_strategy.ensure_indexes
17
+ end
18
+
19
+ # @raise [EventStoreError] If an error occurs while reading the stream from the store
20
+ # @param [String] type_identifier Type descriptor of the aggregate to retrieve
21
+ # @param [Object] aggregate_id
22
+ # @return [DomainEventStream]
23
+ def read_events(type_identifier, aggregate_id)
24
+ first_sequence_number = -1
25
+
26
+ last_snapshot_commit = load_last_snapshot type_identifier, aggregate_id
27
+ if last_snapshot_commit and last_snapshot_commit.size > 0
28
+ first_sequence_number = last_snapshot_commit[0].sequence_number
29
+ end
30
+
31
+ first_sequence_number = first_sequence_number.next
32
+
33
+ cursor = @storage_strategy.fetch_events type_identifier, aggregate_id, first_sequence_number
34
+
35
+ unless last_snapshot_commit or cursor.has_next?
36
+ raise StreamNotFoundError.new type_identifier, aggregate_id
37
+ end
38
+
39
+ CursorDomainEventStream.new @storage_strategy, cursor, last_snapshot_commit, aggregate_id
40
+ end
41
+
42
+ # @raise [EventStoreError] If an error occurs while appending the stream to the store
43
+ # @param [String] type_identifier Type descriptor of the aggregate to append to
44
+ # @param [DomainEventStream] stream
45
+ # @return [undefined]
46
+ def append_events(type_identifier, stream)
47
+ events = stream.to_a
48
+ documents = @storage_strategy.create_documents type_identifier, events
49
+
50
+ begin
51
+ @template.event_collection.insert documents
52
+ rescue ::Mongo::OperationFailure => exception
53
+ if exception.error_code == 11000
54
+ raise Repository::ConcurrencyError,
55
+ 'Event for this aggregate and sequence number already present'
56
+ end
57
+
58
+ raise ex
59
+ end
60
+ end
61
+
62
+ # @raise [EventStoreError] If an error occurs while appending the event to the store
63
+ # @param [String] type_identifier Type descriptor of the aggregate to append to
64
+ # @param [DomainEventMessage] snapshot_event
65
+ # @return [undefined]
66
+ def append_snapshot_event(type_identifier, snapshot_event)
67
+ documents = @storage_strategy.create_documents type_identifier, [snapshot_event]
68
+ @template.snapshot_collection.insert documents
69
+ end
70
+
71
+ private
72
+
73
+ # @param [String] type_identifier Type descriptor of the aggregate to retrieve
74
+ # @param [Object] aggregate_id
75
+ def load_last_snapshot(type_identifier, aggregate_id)
76
+ cursor = @storage_strategy.fetch_last_snapshot type_identifier, aggregate_id
77
+
78
+ unless cursor.has_next?
79
+ return
80
+ end
81
+
82
+ first = cursor.next_document
83
+ @storage_strategy.extract_events first, aggregate_id
84
+ end
85
+ end # MongoEventStore
86
+ end # Mongo
87
+ end # EventStore
88
+ end # Synapse
@@ -0,0 +1,253 @@
1
+ module Synapse
2
+ module EventStore
3
+ module Mongo
4
+ # Storage strategy that stores all events in a commit operation in a single document
5
+ #
6
+ # Since Mongo doesn't support transactions, this can be used as a substitute to guarantee
7
+ # atomic storage of events. The only downside is that it may be harder to query events
8
+ # from the event store.
9
+ #
10
+ # Performance also seems to be better using this strategy
11
+ class DocumentPerCommitStrategy < StorageStrategy
12
+ # @param [String] type_identifier Type identifier for the aggregate
13
+ # @param [Array] events Domain events to be committed
14
+ # @return [Array]
15
+ def create_documents(type_identifier, events)
16
+ document = CommitDocument.new
17
+ document.from_events(type_identifier, events, @serializer).to_hash
18
+ end
19
+
20
+ # @param [Hash] hash
21
+ # @param [Object] aggregate_id
22
+ # @return [Array]
23
+ def extract_events(hash, aggregate_id)
24
+ document = CommitDocument.new
25
+ document.from_hash(hash).to_events(aggregate_id, @serializer, @upcaster_chain)
26
+ end
27
+
28
+ # Mongo document that represents a commit containing one or more events
29
+ class CommitDocument
30
+ # @return [Object]
31
+ attr_reader :aggregate_id
32
+
33
+ # @param [String] type_identifier
34
+ # @param [Array] events
35
+ # @param [Serializer] serializer
36
+ # @return [CommitDocument]
37
+ def from_events(type_identifier, events, serializer)
38
+ first_event = events.first
39
+ last_event = events.last
40
+
41
+ @aggregate_type = type_identifier
42
+ @aggregate_id = first_event.aggregate_id.to_s
43
+ @first_sequence_number = first_event.sequence_number
44
+ @last_sequence_number = last_event.sequence_number
45
+ @first_timestamp = first_event.timestamp
46
+ @last_timestamp = last_event.timestamp
47
+
48
+ @events = Array.new
49
+ events.each do |event|
50
+ event_document = EventDocument.new
51
+ event_document.from_event event, serializer
52
+
53
+ @events.push event_document
54
+ end
55
+
56
+ self
57
+ end
58
+
59
+ # @param [Hash] hash
60
+ # @return [CommitDocument]
61
+ def from_hash(hash)
62
+ hash.symbolize_keys!
63
+
64
+ @aggregate_id = hash.fetch :aggregate_id
65
+ @aggregate_type = hash.fetch :aggregate_type
66
+ @first_sequence_number = hash.fetch :first_sequence_number
67
+ @last_sequence_number = hash.fetch :last_sequence_number
68
+ @first_timestamp = hash.fetch :first_timestamp
69
+ @last_timestamp = hash.fetch :last_timestamp
70
+
71
+ @events = Array.new
72
+
73
+ event_hashes = hash.fetch :events
74
+ event_hashes.each do |event_hash|
75
+ event_document = EventDocument.new
76
+ event_document.from_hash event_hash
77
+
78
+ @events.push event_document
79
+ end
80
+
81
+ self
82
+ end
83
+
84
+ # @return [Hash]
85
+ def to_hash
86
+ events = Array.new
87
+ @events.each do |event|
88
+ events.push event.to_hash
89
+ end
90
+
91
+ { aggregate_id: @aggregate_id,
92
+ aggregate_type: @aggregate_type,
93
+ # Allows us to use the same query to filter events as DocumentPerEvent
94
+ sequence_number: @first_sequence_number,
95
+ first_sequence_number: @first_sequence_number,
96
+ last_sequence_number: @last_sequence_number,
97
+ # Allows us to use the same query to filter events as DocumentPerEvent
98
+ timestamp: @first_timestamp,
99
+ first_timestamp: @first_timestamp,
100
+ last_timestamp: @last_timestamp,
101
+ events: events }
102
+ end
103
+
104
+ # @param [Object] aggregate_id The actual aggregate identifier used to query the evnet store
105
+ # @param [Serializer] serializer
106
+ # @param [UpcasterChain] upcaster_chain
107
+ # @return [Array]
108
+ def to_events(aggregate_id, serializer, upcaster_chain)
109
+ events = Array.new
110
+
111
+ @events.each do |event_document|
112
+ event_data = DocumentDomainEventData.new aggregate_id, event_document
113
+ context = Upcasting::SerializedDomainEventUpcastingContext.new event_data, aggregate_id, serializer
114
+
115
+ upcast_objects = upcaster_chain.upcast event_document.payload, context
116
+ upcast_objects.each do |upcast_object|
117
+ upcast_data = Upcasting::UpcastSerializedDomainEventData.new event_data, aggregate_id, upcast_object
118
+
119
+ builder = Serialization::SerializedDomainEventMessageBuilder.new
120
+
121
+ # Prevent duplicate serialization of metadata if it was accessed during upcasting
122
+ metadata = context.serialized_metadata
123
+ if metadata.deserialized?
124
+ builder.metadata = Serialization::DeserializedObject.new metadata.deserialized
125
+ end
126
+
127
+ builder.from_data upcast_data, serializer
128
+
129
+ events.push builder.build
130
+ end
131
+ end
132
+
133
+ events
134
+ end
135
+ end # CommitDocument
136
+
137
+ # Mongo document that represents a single event as part of a commit document
138
+ class EventDocument
139
+ # @return [String]
140
+ attr_reader :id
141
+
142
+ # @return [Time]
143
+ attr_reader :timestamp
144
+
145
+ # @return [Integer]
146
+ attr_reader :sequence_number
147
+
148
+ # @return [SerializedObject]
149
+ def metadata
150
+ Serialization::SerializedMetadata.new @metadata, @metadata.class
151
+ end
152
+
153
+ # @return [SerializedObject]
154
+ def payload
155
+ Serialization::SerializedObject.new @payload, @payload.class,
156
+ Serialization::SerializedType.new(@payload_type, @payload_revision)
157
+ end
158
+
159
+ # @param [EventMessage] event
160
+ # @param [Serializer] serializer
161
+ # @return [EventDocument]
162
+ def from_event(event, serializer)
163
+ serialization_target = String
164
+ if serializer.can_serialize_to? Hash
165
+ serialization_target = Hash
166
+ end
167
+
168
+ serialized_metadata = serializer.serialize_metadata event, serialization_target
169
+ serialized_payload = serializer.serialize_payload event, serialization_target
170
+
171
+ @id = event.id
172
+ @metadata = serialized_metadata.content
173
+ @payload = serialized_payload.content
174
+ @payload_type = serialized_payload.type.name
175
+ @payload_revision = serialized_payload.type.revision
176
+ @timestamp = event.timestamp
177
+ @sequence_number = event.sequence_number
178
+
179
+ self
180
+ end
181
+
182
+ # @param [Hash] hash
183
+ # @return [EventDocument]
184
+ def from_hash(hash)
185
+ hash.symbolize_keys!
186
+
187
+ @id = hash.fetch :id
188
+ @metadata = hash.fetch :metadata
189
+ @payload = hash.fetch :payload
190
+ @payload_type = hash.fetch :payload_type
191
+ @payload_revision = hash.fetch :payload_revision
192
+ @timestamp = hash.fetch :timestamp
193
+ @sequence_number = hash.fetch :sequence_number
194
+
195
+ self
196
+ end
197
+
198
+ # @return [Hash]
199
+ def to_hash
200
+ { id: @id,
201
+ metadata: @metadata,
202
+ payload: @payload,
203
+ payload_type: @payload_type,
204
+ payload_revision: @payload_revision,
205
+ timestamp: @timestamp,
206
+ sequence_number: @sequence_number }
207
+ end
208
+ end # EventDocument
209
+
210
+ # Serialized domain event data from an event document
211
+ class DocumentDomainEventData < Serialization::SerializedDomainEventData
212
+ # @param [Object] aggregate_id
213
+ # @param [EventDocument] event_document
214
+ # @return [undefined]
215
+ def initialize(aggregate_id, event_document)
216
+ @aggregate_id = aggregate_id
217
+ @event_document = event_document
218
+ end
219
+
220
+ # @return [String]
221
+ def id
222
+ @event_document.id
223
+ end
224
+
225
+ # @return [SerializedObject]
226
+ def metadata
227
+ @event_document.metadata
228
+ end
229
+
230
+ # @return [SerializedObject]
231
+ def payload
232
+ @event_document.payload
233
+ end
234
+
235
+ # @return [Time]
236
+ def timestamp
237
+ @event_document.timestamp
238
+ end
239
+
240
+ # @return [Object]
241
+ def aggregate_id
242
+ @aggregate_id
243
+ end
244
+
245
+ # @return [Integer]
246
+ def sequence_number
247
+ @event_document.sequence_number
248
+ end
249
+ end # DocumentDomainEventData
250
+ end # DocumentPerCommitStrategy
251
+ end # Mongo
252
+ end # EventStore
253
+ end # Synapse
@@ -0,0 +1,143 @@
1
+ module Synapse
2
+ module EventStore
3
+ module Mongo
4
+ # Storage strategy that stores each event as its own document
5
+ class DocumentPerEventStrategy < StorageStrategy
6
+ # @param [String] type_identifier Type identifier for the aggregate
7
+ # @param [Array] events Domain events to be committed
8
+ # @return [Array]
9
+ def create_documents(type_identifier, events)
10
+ documents = Array.new
11
+
12
+ events.each do |event|
13
+ document = EventDocument.new
14
+ document.from_event event, type_identifier, @serializer
15
+
16
+ documents.push document.to_hash
17
+ end
18
+
19
+ documents
20
+ end
21
+
22
+ # @param [Hash] hash
23
+ # @param [Object] aggregate_id
24
+ # @return [Array]
25
+ def extract_events(hash, aggregate_id)
26
+ document = EventDocument.new
27
+ document.from_hash(hash).to_events(aggregate_id, @serializer, @upcaster_chain)
28
+ end
29
+
30
+ # Mongo document that represents a single domain event
31
+ class EventDocument < Serialization::SerializedDomainEventData
32
+ # @return [String]
33
+ attr_reader :id
34
+
35
+ # @return [Time]
36
+ attr_reader :timestamp
37
+
38
+ # @return [Object]
39
+ attr_reader :aggregate_id
40
+
41
+ # @return [Integer]
42
+ attr_reader :sequence_number
43
+
44
+ # @param [SerializedObject]
45
+ def metadata
46
+ Serialization::SerializedMetadata.new @metadata, @metadata.class
47
+ end
48
+
49
+ # @param [SerializedObject]
50
+ def payload
51
+ Serialization::SerializedObject.new @payload, @payload.class,
52
+ Serialization::SerializedType.new(@payload_type, @payload_revision)
53
+ end
54
+
55
+ # @param [DomainEventMessage] event
56
+ # @param [String] type_identifier
57
+ # @param [Serializer] serializer
58
+ # @return [EventDocument]
59
+ def from_event(event, type_identifier, serializer)
60
+ serialization_target = String
61
+ if serializer.can_serialize_to? Hash
62
+ serialization_target = Hash
63
+ end
64
+
65
+ serialized_metadata = serializer.serialize_metadata event, serialization_target
66
+ serialized_payload = serializer.serialize_payload event, serialization_target
67
+
68
+ @id = event.id
69
+ @metadata = serialized_metadata.content
70
+ @payload = serialized_payload.content
71
+ @payload_type = serialized_payload.type.name
72
+ @payload_revision = serialized_payload.type.revision
73
+ @timestamp = event.timestamp
74
+ @aggregate_id = event.aggregate_id
75
+ @aggregate_type = type_identifier
76
+ @sequence_number = event.sequence_number
77
+
78
+ self
79
+ end
80
+
81
+ # @param [Hash] hash
82
+ # @return [EventDocument]
83
+ def from_hash(hash)
84
+ hash.symbolize_keys!
85
+
86
+ @id = hash.fetch :_id
87
+ @metadata = hash.fetch :metadata
88
+ @payload = hash.fetch :payload
89
+ @payload_type = hash.fetch :payload_type
90
+ @payload_revision = hash.fetch :payload_revision
91
+ @timestamp = hash.fetch :timestamp
92
+ @aggregate_id = hash.fetch :aggregate_id
93
+ @aggregate_type = hash.fetch :aggregate_type
94
+ @sequence_number = hash.fetch :sequence_number
95
+
96
+ self
97
+ end
98
+
99
+ # @return [Hash]
100
+ def to_hash
101
+ { _id: @id,
102
+ metadata: @metadata,
103
+ payload: @payload,
104
+ payload_type: @payload_type,
105
+ payload_revision: @payload_revision,
106
+ timestamp: @timestamp,
107
+ aggregate_id: @aggregate_id,
108
+ aggregate_type: @aggregate_type,
109
+ sequence_number: @sequence_number }
110
+ end
111
+
112
+ # @param [Object] aggregate_id
113
+ # @param [Serializer] serializer
114
+ # @param [UpcasterChain] upcaster_chain
115
+ # @return [Array]
116
+ def to_events(aggregate_id, serializer, upcaster_chain)
117
+ events = Array.new
118
+
119
+ context = Upcasting::SerializedDomainEventUpcastingContext.new self, aggregate_id, serializer
120
+ upcast_objects = upcaster_chain.upcast payload, context
121
+ upcast_objects.each do |upcast_object|
122
+ upcast_data = Upcasting::UpcastSerializedDomainEventData.new self, aggregate_id, upcast_object
123
+
124
+ builder = Serialization::SerializedDomainEventMessageBuilder.new
125
+
126
+ # Prevent duplicate serialization of metadata if it was accessed during upcasting
127
+ metadata = context.serialized_metadata
128
+ if metadata.deserialized?
129
+ builder.metadata = Serialization::DeserializedObject.new metadata.deserialized
130
+ end
131
+
132
+ builder.from_data upcast_data, serializer
133
+
134
+ events.push builder.build
135
+ end
136
+
137
+ events
138
+ end
139
+ end # EventDocument
140
+ end # DocumentPerEventStrategy
141
+ end # Mongo
142
+ end # EventStore
143
+ end # Synapse
@@ -0,0 +1,113 @@
1
+ module Synapse
2
+ module EventStore
3
+ module Mongo
4
+ # Represents a mechanism used to structure how events are stored in the database
5
+ # @abstract
6
+ class StorageStrategy
7
+ # @param [MongoTemplate] template
8
+ # @param [Serializer] serializer
9
+ # @param [UpcasterChain] upcaster_chain
10
+ # @return [undefined]
11
+ def initialize(template, serializer, upcaster_chain)
12
+ @template = template
13
+ @serializer = Serialization::MessageSerializer.new serializer
14
+ @upcaster_chain = upcaster_chain
15
+ end
16
+
17
+ # Creates documents that will represent the events being committed to the event store
18
+ #
19
+ # @abstract
20
+ # @param [String] type_identifier Type identifier for the aggregate
21
+ # @param [Array] events Domain events to be committed
22
+ # @return [Array]
23
+ def create_documents(type_identifier, events); end
24
+
25
+ # Extracts individual event messages from the given document
26
+ #
27
+ # The given aggregate identifier is passed so that event messages can have the actual
28
+ # identifier object instead of the serialized aggregate identifier.
29
+ #
30
+ # @abstract
31
+ # @param [Hash] document
32
+ # @param [Object] aggregate_id
33
+ # @return [Array]
34
+ def extract_events(document, aggregate_id); end
35
+
36
+ # Aliases of the Mongo constants for ascending and descending
37
+ ASCENDING = ::Mongo::ASCENDING
38
+ DESCENDING = ::Mongo::DESCENDING
39
+
40
+ # Provides a cursor for accessing all events for an aggregate with the given identifier
41
+ # and type identifier, with a sequence number equal to or greater than the given first
42
+ # sequence number
43
+ #
44
+ # The returned documents should be ordered chronologically, typically by using the
45
+ # sequence number.
46
+ #
47
+ # @param [String] type_identifier
48
+ # @param [Object] aggregate_id
49
+ # @param [Integer] first_sequence_number
50
+ # @return [Mongo::Cursor]
51
+ def fetch_events(type_identifier, aggregate_id, first_sequence_number)
52
+ filter = {
53
+ aggregate_id: aggregate_id,
54
+ aggregate_type: type_identifier,
55
+ sequence_number: {
56
+ '$gte' => first_sequence_number
57
+ }
58
+ }
59
+
60
+ sort = {
61
+ sequence_number: ASCENDING
62
+ }
63
+
64
+ @template.event_collection.find(filter).sort(sort)
65
+ end
66
+
67
+ # Finds the document containing the most recent snapshot event for an aggregate with the
68
+ # given identifier and type identifier
69
+ #
70
+ # @param [String] type_identifier
71
+ # @param [Object] aggregate_id
72
+ # @return [Mongo::Cursor]
73
+ def fetch_last_snapshot(type_identifier, aggregate_id)
74
+ filter = {
75
+ aggregate_id: aggregate_id,
76
+ aggregate_type: type_identifier
77
+ }
78
+
79
+ sort = {
80
+ sequence_number: DESCENDING
81
+ }
82
+
83
+ @template.snapshot_collection.find(filter).sort(sort).limit(1)
84
+ end
85
+
86
+ # Ensures that the correct indexes are in place
87
+ # @return [undefined]
88
+ def ensure_indexes
89
+ options = {
90
+ name: 'unique_aggregate_index',
91
+ unique: true
92
+ }
93
+
94
+ spec = {
95
+ aggregate_id: ASCENDING,
96
+ aggregate_type: ASCENDING,
97
+ sequence_number: ASCENDING
98
+ }
99
+
100
+ @template.event_collection.ensure_index spec, options
101
+
102
+ spec = {
103
+ aggregate_id: ASCENDING,
104
+ aggregate_type: ASCENDING,
105
+ sequence_number: DESCENDING
106
+ }
107
+
108
+ @template.snapshot_collection.ensure_index spec, options
109
+ end
110
+ end # StorageStrategy
111
+ end # Mongo
112
+ end # EventStore
113
+ end # Synapse
@@ -0,0 +1,33 @@
1
+ module Synapse
2
+ module EventStore
3
+ module Mongo
4
+ # Template for accessing collections needed by the event store
5
+ class Template < Common::Mongo::BaseTemplate
6
+ # @return [String] Name of the collection containing domain events
7
+ attr_accessor :event_collection_name
8
+
9
+ # @return [String] Name of the collection containing snapshot events
10
+ attr_accessor :snapshot_collection_name
11
+
12
+ # @param [Mongo::MongoClient] client
13
+ # @return [undefined]
14
+ def initialize(client)
15
+ super
16
+
17
+ @event_collection_name = 'domain_events'
18
+ @snapshot_collection_name = 'snapshot_events'
19
+ end
20
+
21
+ # @return [Mongo::Collection]
22
+ def event_collection
23
+ database.collection @event_collection_name
24
+ end
25
+
26
+ # @return [Mongo::Collection]
27
+ def snapshot_collection
28
+ database.collection @snapshot_collection_name
29
+ end
30
+ end # MongoTemplate
31
+ end # Mongo
32
+ end # EventStore
33
+ end # Synapse
@@ -0,0 +1,5 @@
1
+ module Synapse
2
+ module Mongo
3
+ VERSION = '0.1.1'
4
+ end
5
+ end
@@ -0,0 +1,26 @@
1
+ module Synapse
2
+ module Serialization
3
+ # Converter that converts an ordered hash from BSON into a regular Ruby hash
4
+ class OrderedHashToHashConverter
5
+ include Converter
6
+
7
+ converts BSON::OrderedHash, Hash
8
+
9
+ # @param [Object] original
10
+ # @return [Object]
11
+ def convert_content(original)
12
+ converted = Hash.new
13
+
14
+ original.each do |key, value|
15
+ if value.is_a? BSON::OrderedHash
16
+ value = convert_content value
17
+ end
18
+
19
+ converted[key] = value
20
+ end
21
+
22
+ converted
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,35 @@
1
+ require 'test_helper'
2
+
3
+ module Synapse
4
+ module Common
5
+ module Mongo
6
+
7
+ class BaseTemplateTest < Test::Unit::TestCase
8
+ def test_database
9
+ client = Object.new
10
+ database = Object.new
11
+
12
+ database_name = 'test_database'
13
+ username = 'test_username'
14
+ password = 'test_password'
15
+
16
+ mock(client).db(database_name) do
17
+ database
18
+ end
19
+
20
+ mock(database).authenticate(username, password)
21
+
22
+ template = BaseTemplate.new client
23
+ template.database_name = database_name
24
+ template.username = username
25
+ template.password = password
26
+
27
+ template.send :database
28
+ # Additional calls to database should return the same database
29
+ template.send :database
30
+ end
31
+ end
32
+
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,89 @@
1
+ require 'test_helper'
2
+
3
+ module Synapse
4
+ module EventStore
5
+ module Mongo
6
+
7
+ class MongoEventStoreTest < Test::Unit::TestCase
8
+ def test_integration
9
+ client = ::Mongo::MongoClient.new
10
+ template = Template.new client
11
+
12
+ serializer = Serialization::MarshalSerializer.new
13
+ upcaster_chain = Upcasting::UpcasterChain.new serializer.converter_factory
14
+
15
+ [DocumentPerCommitStrategy, DocumentPerEventStrategy].each do |type|
16
+ strategy = type.new template, serializer, upcaster_chain
17
+ test_integration_with template, strategy
18
+ end
19
+
20
+ end
21
+
22
+ private
23
+
24
+ def test_integration_with(template, strategy)
25
+ store = MongoEventStore.new template, strategy
26
+ store.ensure_indexes
27
+
28
+ type_identifier = 'TestAggregate'
29
+
30
+ metadata = {
31
+ foo: 0,
32
+ bar: 1
33
+ }
34
+ payload = TestEvent.new 1, 2
35
+ aggregate_id = SecureRandom.uuid
36
+
37
+
38
+ x = 0
39
+
40
+ # Create two sets of 50 events each
41
+ # After this, we'll add an artificial snapshot at seq num 49
42
+ 2.times do
43
+ events = Array.new
44
+
45
+ 50.times do
46
+ events.push create_event metadata, payload, aggregate_id, x
47
+ x = x.next
48
+ end
49
+
50
+ append_stream = Domain::SimpleDomainEventStream.new events
51
+ store.append_events type_identifier, append_stream
52
+ end
53
+
54
+ read_stream = store.read_events type_identifier, aggregate_id
55
+ read_array = read_stream.to_a
56
+
57
+ assert_equal 100, read_array.count
58
+
59
+ snapshot = create_event metadata, payload, aggregate_id, 49
60
+ store.append_snapshot_event type_identifier, snapshot
61
+
62
+ read_stream = store.read_events type_identifier, aggregate_id
63
+ read_array = read_stream.to_a
64
+
65
+ assert_equal 51, read_array.count
66
+ end
67
+
68
+ def create_event(metadata, payload, aggregate_id, sequence_number)
69
+ Domain::DomainEventMessage.build do |builder|
70
+ builder.metadata = metadata
71
+ builder.payload = payload
72
+ builder.aggregate_id = aggregate_id
73
+ builder.sequence_number = sequence_number
74
+ end
75
+ end
76
+ end
77
+
78
+ class TestEvent
79
+ attr_accessor :foo
80
+ attr_accessor :bar
81
+
82
+ def initialize(foo, bar)
83
+ @foo, @bar = foo, bar
84
+ end
85
+ end
86
+
87
+ end
88
+ end
89
+ end
@@ -0,0 +1,28 @@
1
+ require 'test_helper'
2
+
3
+ module Synapse
4
+ module Serialization
5
+ class OrderedHashToHashConverterTest < Test::Unit::TestCase
6
+ def test_convert
7
+ converter = OrderedHashToHashConverter.new
8
+
9
+ assert_equal BSON::OrderedHash, converter.source_type
10
+ assert_equal Hash, converter.target_type
11
+
12
+ source = BSON::OrderedHash.new
13
+ source[:foo] = 0
14
+ source[:bar] = BSON::OrderedHash.new
15
+ source[:bar][:baz] = 1
16
+
17
+ target = {
18
+ :foo => 0,
19
+ :bar => {
20
+ :baz => 1
21
+ }
22
+ }
23
+
24
+ assert_equal target, converter.convert_content(source)
25
+ end
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,14 @@
1
+ require 'simplecov'
2
+
3
+ SimpleCov.start do
4
+ add_filter '/test/'
5
+ end
6
+
7
+ require 'pp'
8
+ require 'test/unit'
9
+ require 'rr'
10
+ require 'synapse-mongo'
11
+
12
+ class Test::Unit::TestCase
13
+ include RR::Adapters::TestUnit
14
+ end
metadata ADDED
@@ -0,0 +1,92 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: synapse-mongo
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.1
5
+ prerelease:
6
+ platform: ruby
7
+ authors:
8
+ - Ian Unruh
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2013-05-12 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: synapse-core
16
+ requirement: !ruby/object:Gem::Requirement
17
+ none: false
18
+ requirements:
19
+ - - ! '>='
20
+ - !ruby/object:Gem::Version
21
+ version: 0.2.0
22
+ type: :runtime
23
+ prerelease: false
24
+ version_requirements: !ruby/object:Gem::Requirement
25
+ none: false
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: 0.2.0
30
+ - !ruby/object:Gem::Dependency
31
+ name: mongo
32
+ requirement: !ruby/object:Gem::Requirement
33
+ none: false
34
+ requirements:
35
+ - - ! '>='
36
+ - !ruby/object:Gem::Version
37
+ version: '0'
38
+ type: :runtime
39
+ prerelease: false
40
+ version_requirements: !ruby/object:Gem::Requirement
41
+ none: false
42
+ requirements:
43
+ - - ! '>='
44
+ - !ruby/object:Gem::Version
45
+ version: '0'
46
+ description: MongoDB implementations for the Synapse CQRS framework
47
+ email: ianunruh@gmail.com
48
+ executables: []
49
+ extensions: []
50
+ extra_rdoc_files: []
51
+ files:
52
+ - lib/synapse-mongo.rb
53
+ - lib/synapse/mongo/version.rb
54
+ - lib/synapse/event_store/mongo/cursor_event_stream.rb
55
+ - lib/synapse/event_store/mongo/event_store.rb
56
+ - lib/synapse/event_store/mongo/storage_strategy.rb
57
+ - lib/synapse/event_store/mongo/template.rb
58
+ - lib/synapse/event_store/mongo/per_commit_strategy.rb
59
+ - lib/synapse/event_store/mongo/per_event_strategy.rb
60
+ - lib/synapse/event_store/mongo.rb
61
+ - lib/synapse/common/mongo/base_template.rb
62
+ - lib/synapse/serialization/converter/bson.rb
63
+ - test/test_helper.rb
64
+ - test/event_store/mongo/event_store_test.rb
65
+ - test/common/mongo/base_template_test.rb
66
+ - test/serialization/converter/bson_test.rb
67
+ homepage: https://github.com/iunruh/synapse-mongo
68
+ licenses: []
69
+ post_install_message:
70
+ rdoc_options: []
71
+ require_paths:
72
+ - lib
73
+ required_ruby_version: !ruby/object:Gem::Requirement
74
+ none: false
75
+ requirements:
76
+ - - ! '>='
77
+ - !ruby/object:Gem::Version
78
+ version: '0'
79
+ required_rubygems_version: !ruby/object:Gem::Requirement
80
+ none: false
81
+ requirements:
82
+ - - ! '>='
83
+ - !ruby/object:Gem::Version
84
+ version: '0'
85
+ requirements: []
86
+ rubyforge_project:
87
+ rubygems_version: 1.8.25
88
+ signing_key:
89
+ specification_version: 3
90
+ summary: MongoDB implementations for the Synapse CQRS framework
91
+ test_files: []
92
+ has_rdoc: