synapse-core 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- data/lib/synapse.rb +351 -0
- data/lib/synapse/command/command_bus.rb +45 -0
- data/lib/synapse/command/command_callback.rb +18 -0
- data/lib/synapse/command/command_filter.rb +17 -0
- data/lib/synapse/command/command_handler.rb +13 -0
- data/lib/synapse/command/dispatch_interceptor.rb +16 -0
- data/lib/synapse/command/duplication.rb +43 -0
- data/lib/synapse/command/errors.rb +27 -0
- data/lib/synapse/command/filters/validation.rb +32 -0
- data/lib/synapse/command/gateway.rb +34 -0
- data/lib/synapse/command/interceptor_chain.rb +31 -0
- data/lib/synapse/command/interceptors/serialization.rb +35 -0
- data/lib/synapse/command/message.rb +19 -0
- data/lib/synapse/command/rollback_policy.rb +22 -0
- data/lib/synapse/command/simple_command_bus.rb +138 -0
- data/lib/synapse/command/wiring.rb +47 -0
- data/lib/synapse/domain/aggregate_root.rb +121 -0
- data/lib/synapse/domain/event_container.rb +127 -0
- data/lib/synapse/domain/message.rb +82 -0
- data/lib/synapse/domain/message_builder.rb +34 -0
- data/lib/synapse/domain/stream.rb +108 -0
- data/lib/synapse/duplication.rb +60 -0
- data/lib/synapse/errors.rb +13 -0
- data/lib/synapse/event_bus/event_bus.rb +40 -0
- data/lib/synapse/event_bus/event_listener.rb +16 -0
- data/lib/synapse/event_bus/event_listener_proxy.rb +12 -0
- data/lib/synapse/event_bus/simple_event_bus.rb +69 -0
- data/lib/synapse/event_bus/wiring.rb +23 -0
- data/lib/synapse/event_sourcing/aggregate_factory.rb +69 -0
- data/lib/synapse/event_sourcing/aggregate_root.rb +104 -0
- data/lib/synapse/event_sourcing/conflict_resolver.rb +80 -0
- data/lib/synapse/event_sourcing/entity.rb +64 -0
- data/lib/synapse/event_sourcing/member.rb +72 -0
- data/lib/synapse/event_sourcing/repository.rb +119 -0
- data/lib/synapse/event_sourcing/snapshot/count_stream.rb +86 -0
- data/lib/synapse/event_sourcing/snapshot/count_trigger.rb +91 -0
- data/lib/synapse/event_sourcing/snapshot/taker.rb +73 -0
- data/lib/synapse/event_sourcing/storage_listener.rb +34 -0
- data/lib/synapse/event_sourcing/stream_decorator.rb +25 -0
- data/lib/synapse/event_store/errors.rb +16 -0
- data/lib/synapse/event_store/event_store.rb +43 -0
- data/lib/synapse/event_store/in_memory.rb +59 -0
- data/lib/synapse/event_store/mongo/cursor_event_stream.rb +63 -0
- data/lib/synapse/event_store/mongo/event_store.rb +86 -0
- data/lib/synapse/event_store/mongo/per_commit_strategy.rb +253 -0
- data/lib/synapse/event_store/mongo/per_event_strategy.rb +143 -0
- data/lib/synapse/event_store/mongo/storage_strategy.rb +113 -0
- data/lib/synapse/event_store/mongo/template.rb +73 -0
- data/lib/synapse/identifier.rb +23 -0
- data/lib/synapse/message.rb +101 -0
- data/lib/synapse/message_builder.rb +38 -0
- data/lib/synapse/process_manager/correlation.rb +32 -0
- data/lib/synapse/process_manager/correlation_resolver.rb +14 -0
- data/lib/synapse/process_manager/correlation_set.rb +58 -0
- data/lib/synapse/process_manager/process.rb +71 -0
- data/lib/synapse/repository/errors.rb +26 -0
- data/lib/synapse/repository/lock_manager.rb +40 -0
- data/lib/synapse/repository/locking.rb +97 -0
- data/lib/synapse/repository/pessimistic_lock_manager.rb +61 -0
- data/lib/synapse/repository/repository.rb +109 -0
- data/lib/synapse/serialization/converter.rb +39 -0
- data/lib/synapse/serialization/converter/chain.rb +45 -0
- data/lib/synapse/serialization/converter/factory.rb +68 -0
- data/lib/synapse/serialization/converter/identity.rb +29 -0
- data/lib/synapse/serialization/converter/json.rb +31 -0
- data/lib/synapse/serialization/converter/ox.rb +31 -0
- data/lib/synapse/serialization/errors.rb +12 -0
- data/lib/synapse/serialization/lazy_object.rb +61 -0
- data/lib/synapse/serialization/message/data.rb +25 -0
- data/lib/synapse/serialization/message/metadata.rb +13 -0
- data/lib/synapse/serialization/message/serialization_aware.rb +17 -0
- data/lib/synapse/serialization/message/serialization_aware_message.rb +66 -0
- data/lib/synapse/serialization/message/serialized_message.rb +201 -0
- data/lib/synapse/serialization/message/serialized_message_builder.rb +64 -0
- data/lib/synapse/serialization/message/serialized_object_cache.rb +50 -0
- data/lib/synapse/serialization/message/serializer.rb +47 -0
- data/lib/synapse/serialization/revision_resolver.rb +30 -0
- data/lib/synapse/serialization/serialized_object.rb +37 -0
- data/lib/synapse/serialization/serialized_type.rb +31 -0
- data/lib/synapse/serialization/serializer.rb +98 -0
- data/lib/synapse/serialization/serializer/marshal.rb +32 -0
- data/lib/synapse/serialization/serializer/oj.rb +34 -0
- data/lib/synapse/serialization/serializer/ox.rb +31 -0
- data/lib/synapse/uow/factory.rb +28 -0
- data/lib/synapse/uow/listener.rb +79 -0
- data/lib/synapse/uow/listener_collection.rb +93 -0
- data/lib/synapse/uow/nesting.rb +262 -0
- data/lib/synapse/uow/provider.rb +71 -0
- data/lib/synapse/uow/storage_listener.rb +14 -0
- data/lib/synapse/uow/transaction_manager.rb +27 -0
- data/lib/synapse/uow/uow.rb +178 -0
- data/lib/synapse/upcasting/chain.rb +78 -0
- data/lib/synapse/upcasting/context.rb +58 -0
- data/lib/synapse/upcasting/data.rb +30 -0
- data/lib/synapse/upcasting/single_upcaster.rb +57 -0
- data/lib/synapse/upcasting/upcaster.rb +55 -0
- data/lib/synapse/version.rb +3 -0
- data/lib/synapse/wiring/message_wiring.rb +41 -0
- data/lib/synapse/wiring/wire.rb +55 -0
- data/lib/synapse/wiring/wire_registry.rb +61 -0
- data/test/command/duplication_test.rb +54 -0
- data/test/command/gateway_test.rb +25 -0
- data/test/command/interceptor_chain_test.rb +26 -0
- data/test/command/serialization_test.rb +37 -0
- data/test/command/simple_command_bus_test.rb +141 -0
- data/test/command/validation_test.rb +42 -0
- data/test/command/wiring_test.rb +73 -0
- data/test/domain/aggregate_root_test.rb +57 -0
- data/test/domain/fixtures.rb +31 -0
- data/test/domain/message_test.rb +61 -0
- data/test/domain/stream_test.rb +35 -0
- data/test/duplication_test.rb +40 -0
- data/test/event_bus/wiring_test.rb +46 -0
- data/test/event_sourcing/aggregate_factory_test.rb +28 -0
- data/test/event_sourcing/aggregate_root_test.rb +76 -0
- data/test/event_sourcing/entity_test.rb +34 -0
- data/test/event_sourcing/fixtures.rb +85 -0
- data/test/event_sourcing/repository_test.rb +102 -0
- data/test/event_sourcing/snapshot/aggregate_taker_test.rb +39 -0
- data/test/event_sourcing/snapshot/deferred_taker_test.rb +19 -0
- data/test/event_sourcing/snapshot/integration_test.rb +65 -0
- data/test/event_sourcing/storage_listener_test.rb +77 -0
- data/test/event_store/in_memory_test.rb +47 -0
- data/test/process_manager/correlation_set_test.rb +49 -0
- data/test/process_manager/correlation_test.rb +24 -0
- data/test/process_manager/process_test.rb +52 -0
- data/test/repository/locking_test.rb +101 -0
- data/test/serialization/converter/factory_test.rb +33 -0
- data/test/serialization/converter/identity_test.rb +17 -0
- data/test/serialization/converter/json_test.rb +31 -0
- data/test/serialization/converter/ox_test.rb +40 -0
- data/test/serialization/fixtures.rb +17 -0
- data/test/serialization/lazy_object_test.rb +32 -0
- data/test/serialization/message/metadata_test.rb +19 -0
- data/test/serialization/message/serialization_aware_message_test.rb +88 -0
- data/test/serialization/message/serialized_message_builder_test.rb +41 -0
- data/test/serialization/message/serialized_message_test.rb +140 -0
- data/test/serialization/message/serializer_test.rb +50 -0
- data/test/serialization/revision_resolver_test.rb +12 -0
- data/test/serialization/serialized_object_test.rb +36 -0
- data/test/serialization/serialized_type_test.rb +27 -0
- data/test/serialization/serializer/marshal_test.rb +22 -0
- data/test/serialization/serializer/oj_test.rb +24 -0
- data/test/serialization/serializer/ox_test.rb +36 -0
- data/test/serialization/serializer_test.rb +20 -0
- data/test/test_helper.rb +19 -0
- data/test/uow/factory_test.rb +23 -0
- data/test/uow/outer_commit_listener_test.rb +50 -0
- data/test/uow/provider_test.rb +70 -0
- data/test/uow/uow_test.rb +337 -0
- data/test/upcasting/chain_test.rb +29 -0
- data/test/upcasting/fixtures.rb +66 -0
- data/test/wiring/wire_registry_test.rb +60 -0
- data/test/wiring/wire_test.rb +51 -0
- metadata +263 -0
@@ -0,0 +1,43 @@
|
|
1
|
+
module Synapse
|
2
|
+
module EventStore
|
3
|
+
# Represents a mechanism for reading and appending streams of domain events
|
4
|
+
# @abstract
|
5
|
+
class EventStore
|
6
|
+
# Fetches an event stream for the aggregate identified by the given type identifier and
|
7
|
+
# the given aggregate identifier. This stream can be used to rebuild the state of the
|
8
|
+
# aggregate.
|
9
|
+
#
|
10
|
+
# Implementations may omit or replace events (for example, with snapshot events) from the
|
11
|
+
# stream for performance purposes.
|
12
|
+
#
|
13
|
+
# @abstract
|
14
|
+
# @raise [EventStoreError] If an error occurs while reading the stream from the store
|
15
|
+
# @param [String] type_identifier Type descriptor of the aggregate to retrieve
|
16
|
+
# @param [Object] aggregate_id
|
17
|
+
# @return [DomainEventStream]
|
18
|
+
def read_events(type_identifier, aggregate_id); end
|
19
|
+
|
20
|
+
# Appends the domain events in the given stream to the event store
|
21
|
+
#
|
22
|
+
# @abstract
|
23
|
+
# @raise [EventStoreError] If an error occurs while appending the stream to the store
|
24
|
+
# @param [String] type_identifier Type descriptor of the aggregate to append to
|
25
|
+
# @param [DomainEventStream] stream
|
26
|
+
# @return [undefined]
|
27
|
+
def append_events(type_identifier, stream); end
|
28
|
+
end
|
29
|
+
|
30
|
+
# Represents an event store with the capability to manage aggregate snapshots
|
31
|
+
# @abstract
|
32
|
+
class SnapshotEventStore < EventStore
|
33
|
+
# Appends the given snapshot event to the event store
|
34
|
+
#
|
35
|
+
# @abstract
|
36
|
+
# @raise [EventStoreError] If an error occurs while appending the event to the store
|
37
|
+
# @param [String] type_identifier Type descriptor of the aggregate to append to
|
38
|
+
# @param [DomainEventMessage] snapshot_event
|
39
|
+
# @return [undefined]
|
40
|
+
def append_snapshot_event(type_identifier, snapshot_event); end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
module Synapse
|
2
|
+
module EventStore
|
3
|
+
# Implementation of an event store that stores events in memory; for testing purposes and
|
4
|
+
# not thread safe
|
5
|
+
class InMemoryEventStore < EventStore
|
6
|
+
def initialize
|
7
|
+
@streams = Hash.new
|
8
|
+
end
|
9
|
+
|
10
|
+
# Clears all streams from this event store
|
11
|
+
def clear
|
12
|
+
@streams.clear
|
13
|
+
end
|
14
|
+
|
15
|
+
# @raise [StreamNotFoundError] If the stream for the given aggregate identifier is empty
|
16
|
+
# @param [String] type_identifier Type descriptor of the aggregate to retrieve
|
17
|
+
# @param [Object] aggregate_id
|
18
|
+
# @return [DomainEventStream]
|
19
|
+
def read_events(type_identifier, aggregate_id)
|
20
|
+
events = events_for aggregate_id
|
21
|
+
|
22
|
+
if events.empty?
|
23
|
+
raise StreamNotFoundError.new type_identifier, aggregate_id
|
24
|
+
end
|
25
|
+
|
26
|
+
Domain::SimpleDomainEventStream.new events
|
27
|
+
end
|
28
|
+
|
29
|
+
# Appends any events in the given stream to the end of the aggregate's stream
|
30
|
+
#
|
31
|
+
# @param [String] type_identifier Type descriptor of the aggregate to append to
|
32
|
+
# @param [DomainEventStream] stream
|
33
|
+
# @return [undefined]
|
34
|
+
def append_events(type_identifier, stream)
|
35
|
+
if stream.end?
|
36
|
+
return
|
37
|
+
end
|
38
|
+
|
39
|
+
events = events_for stream.peek.aggregate_id
|
40
|
+
|
41
|
+
until stream.end?
|
42
|
+
events.push stream.next_event
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
# Creates and/or retrieves an array of events for the given aggregate identifier
|
47
|
+
#
|
48
|
+
# @param [Object] aggregate_id
|
49
|
+
# @return [Array<DomainEventMessage>]
|
50
|
+
def events_for(aggregate_id)
|
51
|
+
if @streams.has_key? aggregate_id
|
52
|
+
return @streams.fetch aggregate_id
|
53
|
+
end
|
54
|
+
|
55
|
+
@streams.store aggregate_id, Array.new
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
module Synapse
|
2
|
+
module EventStore
|
3
|
+
module Mongo
|
4
|
+
# TODO Document me
|
5
|
+
class CursorDomainEventStream < Domain::DomainEventStream
|
6
|
+
# @param [StorageStrategy] storage_strategy
|
7
|
+
# @param [Mongo::Cursor] cursor
|
8
|
+
# @param [Array] last_snapshot_commit
|
9
|
+
# @param [Object] aggregate_id
|
10
|
+
# @return [undefined]
|
11
|
+
def initialize(storage_strategy, cursor, last_snapshot_commit, aggregate_id)
|
12
|
+
@storage_strategy = storage_strategy
|
13
|
+
@cursor = cursor
|
14
|
+
@aggregate_id = aggregate_id
|
15
|
+
|
16
|
+
if last_snapshot_commit
|
17
|
+
# Current batch is an enumerator
|
18
|
+
@current_batch = last_snapshot_commit.each
|
19
|
+
else
|
20
|
+
@current_batch = [].each
|
21
|
+
end
|
22
|
+
|
23
|
+
initialize_next_event
|
24
|
+
end
|
25
|
+
|
26
|
+
# @return [Boolean]
|
27
|
+
def end?
|
28
|
+
@next.nil?
|
29
|
+
end
|
30
|
+
|
31
|
+
# @return [DomainEventMessage]
|
32
|
+
def next_event
|
33
|
+
current = @next
|
34
|
+
initialize_next_event
|
35
|
+
current
|
36
|
+
end
|
37
|
+
|
38
|
+
# @return [DomainEventMessage]
|
39
|
+
def peek
|
40
|
+
@next
|
41
|
+
end
|
42
|
+
|
43
|
+
private
|
44
|
+
|
45
|
+
# @return [undefined]
|
46
|
+
def initialize_next_event
|
47
|
+
begin
|
48
|
+
@next = @current_batch.next
|
49
|
+
rescue StopIteration
|
50
|
+
if @cursor.has_next?
|
51
|
+
document = @cursor.next
|
52
|
+
@current_batch = @storage_strategy.extract_events(document, @aggregate_id).each
|
53
|
+
|
54
|
+
retry
|
55
|
+
else
|
56
|
+
@next = nil
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end # CursorDomainEventStream
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -0,0 +1,86 @@
|
|
1
|
+
module Synapse
|
2
|
+
module EventStore
|
3
|
+
module Mongo
|
4
|
+
# Implementation of an event store backed by a Mongo database
|
5
|
+
class MongoEventStore < SnapshotEventStore
|
6
|
+
# @param [MongoTemplate] template
|
7
|
+
# @param [StorageStrategy] storage_strategy
|
8
|
+
# @return [undefined]
|
9
|
+
def initialize(template, storage_strategy)
|
10
|
+
@storage_strategy = storage_strategy
|
11
|
+
@template = template
|
12
|
+
end
|
13
|
+
|
14
|
+
# @return [undefined]
|
15
|
+
def ensure_indexes
|
16
|
+
@storage_strategy.ensure_indexes
|
17
|
+
end
|
18
|
+
|
19
|
+
# @raise [EventStoreError] If an error occurs while reading the stream from the store
|
20
|
+
# @param [String] type_identifier Type descriptor of the aggregate to retrieve
|
21
|
+
# @param [Object] aggregate_id
|
22
|
+
# @return [DomainEventStream]
|
23
|
+
def read_events(type_identifier, aggregate_id)
|
24
|
+
first_sequence_number = -1
|
25
|
+
|
26
|
+
last_snapshot_commit = load_last_snapshot type_identifier, aggregate_id
|
27
|
+
if last_snapshot_commit and last_snapshot_commit.size > 0
|
28
|
+
first_sequence_number = last_snapshot_commit[0].sequence_number
|
29
|
+
end
|
30
|
+
|
31
|
+
cursor = @storage_strategy.fetch_events type_identifier, aggregate_id, first_sequence_number
|
32
|
+
|
33
|
+
unless last_snapshot_commit or cursor.has_next?
|
34
|
+
raise StreamNotFoundError.new type_identifier, aggregate_id
|
35
|
+
end
|
36
|
+
|
37
|
+
CursorDomainEventStream.new @storage_strategy, cursor, last_snapshot_commit, aggregate_id
|
38
|
+
end
|
39
|
+
|
40
|
+
# @raise [EventStoreError] If an error occurs while appending the stream to the store
|
41
|
+
# @param [String] type_identifier Type descriptor of the aggregate to append to
|
42
|
+
# @param [DomainEventStream] stream
|
43
|
+
# @return [undefined]
|
44
|
+
def append_events(type_identifier, stream)
|
45
|
+
events = stream.to_a
|
46
|
+
documents = @storage_strategy.create_documents type_identifier, events
|
47
|
+
|
48
|
+
begin
|
49
|
+
@template.event_collection.insert documents
|
50
|
+
rescue Mongo::OperationFailure => ex
|
51
|
+
if e.error_code == 11000
|
52
|
+
raise Repository::ConcurrencyException,
|
53
|
+
'Event for this aggregate and sequence number already present'
|
54
|
+
end
|
55
|
+
|
56
|
+
raise ex
|
57
|
+
end
|
58
|
+
end
|
59
|
+
|
60
|
+
# @raise [EventStoreError] If an error occurs while appending the event to the store
|
61
|
+
# @param [String] type_identifier Type descriptor of the aggregate to append to
|
62
|
+
# @param [DomainEventMessage] snapshot_event
|
63
|
+
# @return [undefined]
|
64
|
+
def append_snapshot_event(type_identifier, snapshot_event)
|
65
|
+
documents = @storage_strategy.create_documents type_identifier, [snapshot_event]
|
66
|
+
@template.snapshot_collection.insert documents
|
67
|
+
end
|
68
|
+
|
69
|
+
private
|
70
|
+
|
71
|
+
# @param [String] type_identifier Type descriptor of the aggregate to retrieve
|
72
|
+
# @param [Object] aggregate_id
|
73
|
+
def load_last_snapshot(type_identifier, aggregate_id)
|
74
|
+
cursor = @storage_strategy.fetch_last_snapshot type_identifier, aggregate_id
|
75
|
+
|
76
|
+
unless cursor.has_next?
|
77
|
+
return
|
78
|
+
end
|
79
|
+
|
80
|
+
first = cursor.next_document
|
81
|
+
@storage_strategy.extract_events first, aggregate_id
|
82
|
+
end
|
83
|
+
end # MongoEventStore
|
84
|
+
end # Mongo
|
85
|
+
end # EventStore
|
86
|
+
end # Synapse
|
@@ -0,0 +1,253 @@
|
|
1
|
+
module Synapse
|
2
|
+
module EventStore
|
3
|
+
module Mongo
|
4
|
+
# Storage strategy that stores all events in a commit operation in a single document
|
5
|
+
#
|
6
|
+
# Since Mongo doesn't support transactions, this can be used as a substitute to guarantee
|
7
|
+
# atomic storage of events. The only downside is that it may be harder to query events
|
8
|
+
# from the event store.
|
9
|
+
#
|
10
|
+
# Performance also seems to be better using this strategy
|
11
|
+
class DocumentPerCommitStrategy < StorageStrategy
|
12
|
+
# @param [String] type_identifier Type identifier for the aggregate
|
13
|
+
# @param [Array] events Domain events to be committed
|
14
|
+
# @return [Array]
|
15
|
+
def create_documents(type_identifier, events)
|
16
|
+
document = CommitDocument.new
|
17
|
+
document.from_events(type_identifier, events, @serializer).to_hash
|
18
|
+
end
|
19
|
+
|
20
|
+
# @param [Hash] hash
|
21
|
+
# @param [Object] aggregate_id
|
22
|
+
# @return [Array]
|
23
|
+
def extract_events(hash, aggregate_id)
|
24
|
+
document = CommitDocument.new
|
25
|
+
document.from_hash(hash).to_events(aggregate_id, @serializer, @upcaster_chain)
|
26
|
+
end
|
27
|
+
|
28
|
+
# Mongo document that represents a commit containing one or more events
|
29
|
+
class CommitDocument
|
30
|
+
# @return [Object]
|
31
|
+
attr_reader :aggregate_id
|
32
|
+
|
33
|
+
# @param [String] type_identifier
|
34
|
+
# @param [Array] events
|
35
|
+
# @param [Serializer] serializer
|
36
|
+
# @return [CommitDocument]
|
37
|
+
def from_events(type_identifier, events, serializer)
|
38
|
+
first_event = events.first
|
39
|
+
last_event = events.last
|
40
|
+
|
41
|
+
@aggregate_type = type_identifier
|
42
|
+
@aggregate_id = first_event.aggregate_id.to_s
|
43
|
+
@first_sequence_number = first_event.sequence_number
|
44
|
+
@last_sequence_number = last_event.sequence_number
|
45
|
+
@first_timestamp = first_event.timestamp
|
46
|
+
@last_timestamp = last_event.timestamp
|
47
|
+
|
48
|
+
@events = Array.new
|
49
|
+
events.each do |event|
|
50
|
+
event_document = EventDocument.new
|
51
|
+
event_document.from_event event, serializer
|
52
|
+
|
53
|
+
@events.push event_document
|
54
|
+
end
|
55
|
+
|
56
|
+
self
|
57
|
+
end
|
58
|
+
|
59
|
+
# @param [Hash] hash
|
60
|
+
# @return [CommitDocument]
|
61
|
+
def from_hash(hash)
|
62
|
+
hash.symbolize_keys!
|
63
|
+
|
64
|
+
@aggregate_id = hash.fetch :aggregate_id
|
65
|
+
@aggregate_type = hash.fetch :aggregate_type
|
66
|
+
@first_sequence_number = hash.fetch :first_sequence_number
|
67
|
+
@last_sequence_number = hash.fetch :last_sequence_number
|
68
|
+
@first_timestamp = hash.fetch :first_timestamp
|
69
|
+
@last_timestamp = hash.fetch :last_timestamp
|
70
|
+
|
71
|
+
@events = Array.new
|
72
|
+
|
73
|
+
event_hashes = hash.fetch :events
|
74
|
+
event_hashes.each do |event_hash|
|
75
|
+
event_document = EventDocument.new
|
76
|
+
event_document.from_hash event_hash
|
77
|
+
|
78
|
+
@events.push event_document
|
79
|
+
end
|
80
|
+
|
81
|
+
self
|
82
|
+
end
|
83
|
+
|
84
|
+
# @return [Hash]
|
85
|
+
def to_hash
|
86
|
+
events = Array.new
|
87
|
+
@events.each do |event|
|
88
|
+
events.push event.to_hash
|
89
|
+
end
|
90
|
+
|
91
|
+
{ aggregate_id: @aggregate_id,
|
92
|
+
aggregate_type: @aggregate_type,
|
93
|
+
# Allows us to use the same query to filter events as DocumentPerEvent
|
94
|
+
sequence_number: @first_sequence_number,
|
95
|
+
first_sequence_number: @first_sequence_number,
|
96
|
+
last_sequence_number: @last_sequence_number,
|
97
|
+
# Allows us to use the same query to filter events as DocumentPerEvent
|
98
|
+
timestamp: @first_timestamp,
|
99
|
+
first_timestamp: @first_timestamp,
|
100
|
+
last_timestamp: @last_timestamp,
|
101
|
+
events: events }
|
102
|
+
end
|
103
|
+
|
104
|
+
# @param [Object] aggregate_id The actual aggregate identifier used to query the evnet store
|
105
|
+
# @param [Serializer] serializer
|
106
|
+
# @param [UpcasterChain] upcaster_chain
|
107
|
+
# @return [Array]
|
108
|
+
def to_events(aggregate_id, serializer, upcaster_chain)
|
109
|
+
events = Array.new
|
110
|
+
|
111
|
+
@events.each do |event_document|
|
112
|
+
event_data = DocumentDomainEventData.new aggregate_id, event_document
|
113
|
+
context = Upcasting::SerializedDomainEventUpcastingContext.new event_data, aggregate_id, serializer
|
114
|
+
|
115
|
+
upcast_objects = upcaster_chain.upcast event_document.payload, context
|
116
|
+
upcast_objects.each do |upcast_object|
|
117
|
+
upcast_data = Upcasting::UpcastSerializedDomainEventData.new event_data, aggregate_id, upcast_object
|
118
|
+
|
119
|
+
builder = Serialization::SerializedDomainEventMessageBuilder.new
|
120
|
+
|
121
|
+
# Prevent duplicate serialization of metadata if it was accessed during upcasting
|
122
|
+
metadata = context.serialized_metadata
|
123
|
+
if metadata.deserialized?
|
124
|
+
builder.metadata = Serialization::DeserializedObject.new metadata.deserialized
|
125
|
+
end
|
126
|
+
|
127
|
+
builder.from_data upcast_data, serializer
|
128
|
+
|
129
|
+
events.push builder.build
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
events
|
134
|
+
end
|
135
|
+
end # CommitDocument
|
136
|
+
|
137
|
+
# Mongo document that represents a single event as part of a commit document
|
138
|
+
class EventDocument
|
139
|
+
# @return [String]
|
140
|
+
attr_reader :id
|
141
|
+
|
142
|
+
# @return [Time]
|
143
|
+
attr_reader :timestamp
|
144
|
+
|
145
|
+
# @return [Integer]
|
146
|
+
attr_reader :sequence_number
|
147
|
+
|
148
|
+
# @return [SerializedObject]
|
149
|
+
def metadata
|
150
|
+
Serialization::SerializedMetadata.new @metadata, @metadata.class
|
151
|
+
end
|
152
|
+
|
153
|
+
# @return [SerializedObject]
|
154
|
+
def payload
|
155
|
+
Serialization::SerializedObject.new @payload, @payload.class,
|
156
|
+
Serialization::SerializedType.new(@payload_type, @payload_revision)
|
157
|
+
end
|
158
|
+
|
159
|
+
# @param [EventMessage] event
|
160
|
+
# @param [Serializer] serializer
|
161
|
+
# @return [EventDocument]
|
162
|
+
def from_event(event, serializer)
|
163
|
+
serialization_target = String
|
164
|
+
if serializer.can_serialize_to? Hash
|
165
|
+
serialization_target = Hash
|
166
|
+
end
|
167
|
+
|
168
|
+
serialized_metadata = serializer.serialize_metadata event, serialization_target
|
169
|
+
serialized_payload = serializer.serialize_payload event, serialization_target
|
170
|
+
|
171
|
+
@id = event.id
|
172
|
+
@metadata = serialized_metadata.content
|
173
|
+
@payload = serialized_payload.content
|
174
|
+
@payload_type = serialized_payload.type.name
|
175
|
+
@payload_revision = serialized_payload.type.revision
|
176
|
+
@timestamp = event.timestamp
|
177
|
+
@sequence_number = event.sequence_number
|
178
|
+
|
179
|
+
self
|
180
|
+
end
|
181
|
+
|
182
|
+
# @param [Hash] hash
|
183
|
+
# @return [EventDocument]
|
184
|
+
def from_hash(hash)
|
185
|
+
hash.symbolize_keys!
|
186
|
+
|
187
|
+
@id = hash.fetch :id
|
188
|
+
@metadata = hash.fetch :metadata
|
189
|
+
@payload = hash.fetch :payload
|
190
|
+
@payload_type = hash.fetch :payload_type
|
191
|
+
@payload_revision = hash.fetch :payload_revision
|
192
|
+
@timestamp = hash.fetch :timestamp
|
193
|
+
@sequence_number = hash.fetch :sequence_number
|
194
|
+
|
195
|
+
self
|
196
|
+
end
|
197
|
+
|
198
|
+
# @return [Hash]
|
199
|
+
def to_hash
|
200
|
+
{ id: @id,
|
201
|
+
metadata: @metadata,
|
202
|
+
payload: @payload,
|
203
|
+
payload_type: @payload_type,
|
204
|
+
payload_revision: @payload_revision,
|
205
|
+
timestamp: @timestamp,
|
206
|
+
sequence_number: @sequence_number }
|
207
|
+
end
|
208
|
+
end # EventDocument
|
209
|
+
|
210
|
+
# Serialized domain event data from an event document
|
211
|
+
class DocumentDomainEventData < Serialization::SerializedDomainEventData
|
212
|
+
# @param [Object] aggregate_id
|
213
|
+
# @param [EventDocument] event_document
|
214
|
+
# @return [undefined]
|
215
|
+
def initialize(aggregate_id, event_document)
|
216
|
+
@aggregate_id = aggregate_id
|
217
|
+
@event_document = event_document
|
218
|
+
end
|
219
|
+
|
220
|
+
# @return [String]
|
221
|
+
def id
|
222
|
+
@event_document.id
|
223
|
+
end
|
224
|
+
|
225
|
+
# @return [SerializedObject]
|
226
|
+
def metadata
|
227
|
+
@event_document.metadata
|
228
|
+
end
|
229
|
+
|
230
|
+
# @return [SerializedObject]
|
231
|
+
def payload
|
232
|
+
@event_document.payload
|
233
|
+
end
|
234
|
+
|
235
|
+
# @return [Time]
|
236
|
+
def timestamp
|
237
|
+
@event_document.timestamp
|
238
|
+
end
|
239
|
+
|
240
|
+
# @return [Object]
|
241
|
+
def aggregate_id
|
242
|
+
@aggregate_id
|
243
|
+
end
|
244
|
+
|
245
|
+
# @return [Integer]
|
246
|
+
def sequence_number
|
247
|
+
@event_document.sequence_number
|
248
|
+
end
|
249
|
+
end # DocumentDomainEventData
|
250
|
+
end # DocumentPerCommitStrategy
|
251
|
+
end # Mongo
|
252
|
+
end # EventStore
|
253
|
+
end # Synapse
|