event_sourcery 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +37 -0
- data/.rspec +3 -0
- data/.travis.yml +8 -0
- data/CHANGELOG.md +82 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Gemfile +5 -0
- data/LICENSE.txt +21 -0
- data/README.md +399 -0
- data/Rakefile +6 -0
- data/bin/console +6 -0
- data/bin/setup +15 -0
- data/event_sourcery.gemspec +28 -0
- data/lib/event_sourcery.rb +49 -0
- data/lib/event_sourcery/aggregate_root.rb +68 -0
- data/lib/event_sourcery/config.rb +43 -0
- data/lib/event_sourcery/errors.rb +19 -0
- data/lib/event_sourcery/event.rb +49 -0
- data/lib/event_sourcery/event_body_serializer.rb +42 -0
- data/lib/event_sourcery/event_processing/error_handlers/constant_retry.rb +23 -0
- data/lib/event_sourcery/event_processing/error_handlers/error_handler.rb +20 -0
- data/lib/event_sourcery/event_processing/error_handlers/exponential_backoff_retry.rb +40 -0
- data/lib/event_sourcery/event_processing/error_handlers/no_retry.rb +19 -0
- data/lib/event_sourcery/event_processing/esp_process.rb +41 -0
- data/lib/event_sourcery/event_processing/esp_runner.rb +105 -0
- data/lib/event_sourcery/event_processing/event_stream_processor.rb +125 -0
- data/lib/event_sourcery/event_processing/event_stream_processor_registry.rb +29 -0
- data/lib/event_sourcery/event_store/each_by_range.rb +25 -0
- data/lib/event_sourcery/event_store/event_builder.rb +19 -0
- data/lib/event_sourcery/event_store/event_sink.rb +18 -0
- data/lib/event_sourcery/event_store/event_source.rb +21 -0
- data/lib/event_sourcery/event_store/event_type_serializers/class_name.rb +19 -0
- data/lib/event_sourcery/event_store/event_type_serializers/legacy.rb +17 -0
- data/lib/event_sourcery/event_store/event_type_serializers/underscored.rb +68 -0
- data/lib/event_sourcery/event_store/poll_waiter.rb +18 -0
- data/lib/event_sourcery/event_store/signal_handling_subscription_master.rb +22 -0
- data/lib/event_sourcery/event_store/subscription.rb +43 -0
- data/lib/event_sourcery/memory/event_store.rb +76 -0
- data/lib/event_sourcery/memory/tracker.rb +27 -0
- data/lib/event_sourcery/repository.rb +31 -0
- data/lib/event_sourcery/rspec/event_store_shared_examples.rb +352 -0
- data/lib/event_sourcery/version.rb +3 -0
- metadata +158 -0
@@ -0,0 +1,18 @@
|
|
1
|
+
module EventSourcery
|
2
|
+
module EventStore
|
3
|
+
class PollWaiter
|
4
|
+
def initialize(interval: 0.5)
|
5
|
+
@interval = interval
|
6
|
+
end
|
7
|
+
|
8
|
+
def poll(&block)
|
9
|
+
catch(:stop) do
|
10
|
+
loop do
|
11
|
+
block.call
|
12
|
+
sleep @interval
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
module EventSourcery
|
2
|
+
module EventStore
|
3
|
+
class SignalHandlingSubscriptionMaster
|
4
|
+
def initialize
|
5
|
+
@shutdown_requested = false
|
6
|
+
setup_graceful_shutdown
|
7
|
+
end
|
8
|
+
|
9
|
+
def shutdown_if_requested
|
10
|
+
throw :stop if @shutdown_requested
|
11
|
+
end
|
12
|
+
|
13
|
+
private
|
14
|
+
|
15
|
+
def setup_graceful_shutdown
|
16
|
+
%i(TERM INT).each do |signal|
|
17
|
+
Signal.trap(signal) { @shutdown_requested = true }
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,43 @@
|
|
1
|
+
module EventSourcery
|
2
|
+
module EventStore
|
3
|
+
class Subscription
|
4
|
+
def initialize(event_store:,
|
5
|
+
poll_waiter:,
|
6
|
+
from_event_id:,
|
7
|
+
event_types: nil,
|
8
|
+
on_new_events:,
|
9
|
+
subscription_master:,
|
10
|
+
events_table_name: :events)
|
11
|
+
@event_store = event_store
|
12
|
+
@from_event_id = from_event_id
|
13
|
+
@poll_waiter = poll_waiter
|
14
|
+
@event_types = event_types
|
15
|
+
@on_new_events = on_new_events
|
16
|
+
@subscription_master = subscription_master
|
17
|
+
@current_event_id = from_event_id - 1
|
18
|
+
end
|
19
|
+
|
20
|
+
def start
|
21
|
+
catch(:stop) do
|
22
|
+
@poll_waiter.poll do
|
23
|
+
read_events
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
private
|
29
|
+
|
30
|
+
def read_events
|
31
|
+
loop do
|
32
|
+
@subscription_master.shutdown_if_requested
|
33
|
+
events = @event_store.get_next_from(@current_event_id + 1, event_types: @event_types)
|
34
|
+
break if events.empty?
|
35
|
+
EventSourcery.logger.debug { "New events in subscription: #{events.inspect}" }
|
36
|
+
@on_new_events.call(events)
|
37
|
+
@current_event_id = events.last.id
|
38
|
+
EventSourcery.logger.debug { "Position in stream: #{@current_event_id}" }
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
end
|
@@ -0,0 +1,76 @@
|
|
1
|
+
module EventSourcery
|
2
|
+
module Memory
|
3
|
+
class EventStore
|
4
|
+
include EventSourcery::EventStore::EachByRange
|
5
|
+
|
6
|
+
def initialize(events = [], event_builder: EventSourcery.config.event_builder)
|
7
|
+
@events = events
|
8
|
+
@event_builder = event_builder
|
9
|
+
end
|
10
|
+
|
11
|
+
def sink(event_or_events, expected_version: nil)
|
12
|
+
events = Array(event_or_events)
|
13
|
+
ensure_one_aggregate(events)
|
14
|
+
|
15
|
+
if expected_version && version_for(events.first.aggregate_id) != expected_version
|
16
|
+
raise ConcurrencyError
|
17
|
+
end
|
18
|
+
|
19
|
+
events.each do |event|
|
20
|
+
@events << @event_builder.build(
|
21
|
+
id: @events.size + 1,
|
22
|
+
aggregate_id: event.aggregate_id,
|
23
|
+
type: event.type,
|
24
|
+
version: next_version(event.aggregate_id),
|
25
|
+
body: EventBodySerializer.serialize(event.body),
|
26
|
+
created_at: event.created_at || Time.now.utc,
|
27
|
+
uuid: event.uuid,
|
28
|
+
correlation_id: event.correlation_id,
|
29
|
+
causation_id: event.causation_id,
|
30
|
+
)
|
31
|
+
end
|
32
|
+
|
33
|
+
true
|
34
|
+
end
|
35
|
+
|
36
|
+
def get_next_from(id, event_types: nil, limit: 1000)
|
37
|
+
events = if event_types.nil?
|
38
|
+
@events
|
39
|
+
else
|
40
|
+
@events.select { |e| event_types.include?(e.type) }
|
41
|
+
end
|
42
|
+
|
43
|
+
events.select { |event| event.id >= id }.first(limit)
|
44
|
+
end
|
45
|
+
|
46
|
+
def latest_event_id(event_types: nil)
|
47
|
+
events = if event_types.nil?
|
48
|
+
@events
|
49
|
+
else
|
50
|
+
@events.select { |e| event_types.include?(e.type) }
|
51
|
+
end
|
52
|
+
|
53
|
+
events.empty? ? 0 : events.last.id
|
54
|
+
end
|
55
|
+
|
56
|
+
def get_events_for_aggregate_id(id)
|
57
|
+
stringified_id = id.to_str
|
58
|
+
@events.select { |event| event.aggregate_id == stringified_id }
|
59
|
+
end
|
60
|
+
|
61
|
+
def next_version(aggregate_id)
|
62
|
+
version_for(aggregate_id) + 1
|
63
|
+
end
|
64
|
+
|
65
|
+
def version_for(aggregate_id)
|
66
|
+
get_events_for_aggregate_id(aggregate_id).count
|
67
|
+
end
|
68
|
+
|
69
|
+
def ensure_one_aggregate(events)
|
70
|
+
unless events.map(&:aggregate_id).uniq.one?
|
71
|
+
raise AtomicWriteToMultipleAggregatesNotSupported
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
module EventSourcery
|
2
|
+
module Memory
|
3
|
+
class Tracker
|
4
|
+
def initialize
|
5
|
+
@state = Hash.new(0)
|
6
|
+
end
|
7
|
+
|
8
|
+
def setup(processor_name)
|
9
|
+
@state[processor_name.to_s] = 0
|
10
|
+
end
|
11
|
+
|
12
|
+
def processed_event(processor_name, event_id)
|
13
|
+
@state[processor_name.to_s] = event_id
|
14
|
+
end
|
15
|
+
|
16
|
+
alias :reset_last_processed_event_id :setup
|
17
|
+
|
18
|
+
def last_processed_event_id(processor_name)
|
19
|
+
@state[processor_name.to_s]
|
20
|
+
end
|
21
|
+
|
22
|
+
def tracked_processors
|
23
|
+
@state.keys
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
module EventSourcery
|
2
|
+
class Repository
|
3
|
+
def self.load(aggregate_class, aggregate_id, event_source:, event_sink:)
|
4
|
+
new(event_source: event_source, event_sink: event_sink)
|
5
|
+
.load(aggregate_class, aggregate_id)
|
6
|
+
end
|
7
|
+
|
8
|
+
def initialize(event_source:, event_sink:)
|
9
|
+
@event_source = event_source
|
10
|
+
@event_sink = event_sink
|
11
|
+
end
|
12
|
+
|
13
|
+
def load(aggregate_class, aggregate_id)
|
14
|
+
events = event_source.get_events_for_aggregate_id(aggregate_id)
|
15
|
+
aggregate_class.new(aggregate_id, events)
|
16
|
+
end
|
17
|
+
|
18
|
+
def save(aggregate)
|
19
|
+
new_events = aggregate.changes
|
20
|
+
if new_events.any?
|
21
|
+
event_sink.sink(new_events,
|
22
|
+
expected_version: aggregate.version - new_events.count)
|
23
|
+
end
|
24
|
+
aggregate.clear_changes
|
25
|
+
end
|
26
|
+
|
27
|
+
private
|
28
|
+
|
29
|
+
attr_reader :event_source, :event_sink
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,352 @@
|
|
1
|
+
RSpec.shared_examples 'an event store' do
|
2
|
+
let(:aggregate_id) { SecureRandom.uuid }
|
3
|
+
|
4
|
+
def new_event(aggregate_id: SecureRandom.uuid, type: 'test_event', body: {},
|
5
|
+
id: nil, version: 1, created_at: nil, uuid: SecureRandom.uuid,
|
6
|
+
correlation_id: SecureRandom.uuid, causation_id: SecureRandom.uuid)
|
7
|
+
EventSourcery::Event.new(id: id,
|
8
|
+
aggregate_id: aggregate_id,
|
9
|
+
type: type,
|
10
|
+
body: body,
|
11
|
+
version: version,
|
12
|
+
created_at: created_at,
|
13
|
+
uuid: uuid,
|
14
|
+
correlation_id: correlation_id,
|
15
|
+
causation_id: causation_id)
|
16
|
+
end
|
17
|
+
|
18
|
+
describe '#sink' do
|
19
|
+
it 'assigns auto incrementing event IDs' do
|
20
|
+
event_store.sink(new_event)
|
21
|
+
event_store.sink(new_event)
|
22
|
+
event_store.sink(new_event)
|
23
|
+
events = event_store.get_next_from(1)
|
24
|
+
expect(events.count).to eq 3
|
25
|
+
expect(events.map(&:id)).to eq [1, 2, 3]
|
26
|
+
end
|
27
|
+
|
28
|
+
it 'assigns UUIDs' do
|
29
|
+
uuid = SecureRandom.uuid
|
30
|
+
event_store.sink(new_event(uuid: uuid))
|
31
|
+
event = event_store.get_next_from(1).first
|
32
|
+
expect(event.uuid).to eq uuid
|
33
|
+
end
|
34
|
+
|
35
|
+
it 'returns true' do
|
36
|
+
expect(event_store.sink(new_event)).to eq true
|
37
|
+
end
|
38
|
+
|
39
|
+
it 'serializes the event body' do
|
40
|
+
time = Time.now
|
41
|
+
event = new_event(body: { 'time' => time })
|
42
|
+
expect(event_store.sink(event)).to eq true
|
43
|
+
expect(event_store.get_next_from(1, limit: 1).first.body).to eq('time' => time.iso8601)
|
44
|
+
end
|
45
|
+
|
46
|
+
it 'saves the causation_id' do
|
47
|
+
causation_id = SecureRandom.uuid
|
48
|
+
event = new_event(causation_id: causation_id)
|
49
|
+
event_store.sink(event)
|
50
|
+
expect(event_store.get_next_from(1, limit: 1).first.causation_id).to eq(causation_id)
|
51
|
+
end
|
52
|
+
|
53
|
+
it 'saves the correlation_id' do
|
54
|
+
correlation_id = SecureRandom.uuid
|
55
|
+
event = new_event(correlation_id: correlation_id)
|
56
|
+
event_store.sink(event)
|
57
|
+
expect(event_store.get_next_from(1, limit: 1).first.correlation_id).to eq(correlation_id)
|
58
|
+
end
|
59
|
+
|
60
|
+
it 'writes multiple events' do
|
61
|
+
event_store.sink([new_event(aggregate_id: aggregate_id, body: {e: 1}),
|
62
|
+
new_event(aggregate_id: aggregate_id, body: {e: 2}),
|
63
|
+
new_event(aggregate_id: aggregate_id, body: {e: 3})])
|
64
|
+
events = event_store.get_next_from(1)
|
65
|
+
expect(events.count).to eq 3
|
66
|
+
expect(events.map(&:id)).to eq [1, 2, 3]
|
67
|
+
expect(events.map(&:body)).to eq [{'e' => 1}, {'e' => 2}, {'e' => 3}]
|
68
|
+
expect(events.map(&:version)).to eq [1, 2, 3]
|
69
|
+
end
|
70
|
+
|
71
|
+
it 'sets the correct aggregates version' do
|
72
|
+
event_store.sink([new_event(aggregate_id: aggregate_id, body: {e: 1}),
|
73
|
+
new_event(aggregate_id: aggregate_id, body: {e: 2})])
|
74
|
+
# this will throw a unique constrain error if the aggregate version was not set correctly ^
|
75
|
+
event_store.sink([new_event(aggregate_id: aggregate_id, body: {e: 1}),
|
76
|
+
new_event(aggregate_id: aggregate_id, body: {e: 2})])
|
77
|
+
events = event_store.get_next_from(1)
|
78
|
+
expect(events.count).to eq 4
|
79
|
+
expect(events.map(&:id)).to eq [1, 2, 3, 4]
|
80
|
+
end
|
81
|
+
|
82
|
+
context 'with no existing aggregate stream' do
|
83
|
+
it 'saves an event' do
|
84
|
+
event = new_event(aggregate_id: aggregate_id,
|
85
|
+
type: :test_event_2,
|
86
|
+
body: { 'my' => 'data' })
|
87
|
+
event_store.sink(event)
|
88
|
+
events = event_store.get_next_from(1)
|
89
|
+
expect(events.count).to eq 1
|
90
|
+
expect(events.first.id).to eq 1
|
91
|
+
expect(events.first.aggregate_id).to eq aggregate_id
|
92
|
+
expect(events.first.type).to eq 'test_event_2'
|
93
|
+
expect(events.first.body).to eq({ 'my' => 'data' }) # should we symbolize keys when hydrating events?
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
context 'with an existing aggregate stream' do
|
98
|
+
before do
|
99
|
+
event_store.sink(new_event(aggregate_id: aggregate_id))
|
100
|
+
end
|
101
|
+
|
102
|
+
it 'saves an event' do
|
103
|
+
event = new_event(aggregate_id: aggregate_id,
|
104
|
+
type: :test_event_2,
|
105
|
+
body: { 'my' => 'data' })
|
106
|
+
event_store.sink(event)
|
107
|
+
events = event_store.get_next_from(1)
|
108
|
+
expect(events.count).to eq 2
|
109
|
+
expect(events.last.id).to eq 2
|
110
|
+
expect(events.last.aggregate_id).to eq aggregate_id
|
111
|
+
expect(events.last.type).to eq :test_event_2.to_s # shouldn't you get back what you put in, a symbol?
|
112
|
+
expect(events.last.body).to eq({ 'my' => 'data' }) # should we symbolize keys when hydrating events?
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
it 'correctly inserts created at times when inserting multiple events atomically' do
|
117
|
+
time = Time.parse('2016-10-14T00:00:00.646191Z')
|
118
|
+
event_store.sink([new_event(aggregate_id: aggregate_id, created_at: nil), new_event(aggregate_id: aggregate_id, created_at: time)])
|
119
|
+
created_ats = event_store.get_next_from(0).map(&:created_at)
|
120
|
+
expect(created_ats.map(&:class)).to eq [Time, Time]
|
121
|
+
expect(created_ats.last).to eq time
|
122
|
+
end
|
123
|
+
|
124
|
+
it 'raises an error if the events given are for more than one aggregate' do
|
125
|
+
expect {
|
126
|
+
event_store.sink([new_event(aggregate_id: aggregate_id), new_event(aggregate_id: SecureRandom.uuid)])
|
127
|
+
}.to raise_error(EventSourcery::AtomicWriteToMultipleAggregatesNotSupported)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
describe '#get_next_from' do
|
132
|
+
it 'gets a subset of events' do
|
133
|
+
event_store.sink(new_event(aggregate_id: aggregate_id))
|
134
|
+
event_store.sink(new_event(aggregate_id: aggregate_id))
|
135
|
+
expect(event_store.get_next_from(1, limit: 1).map(&:id)).to eq [1]
|
136
|
+
expect(event_store.get_next_from(2, limit: 1).map(&:id)).to eq [2]
|
137
|
+
expect(event_store.get_next_from(1, limit: 2).map(&:id)).to eq [1, 2]
|
138
|
+
end
|
139
|
+
|
140
|
+
it 'returns the event as expected' do
|
141
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'item_added', body: { 'my' => 'data' }))
|
142
|
+
event = event_store.get_next_from(1, limit: 1).first
|
143
|
+
expect(event.aggregate_id).to eq aggregate_id
|
144
|
+
expect(event.type).to eq 'item_added'
|
145
|
+
expect(event.body).to eq({ 'my' => 'data' })
|
146
|
+
expect(event.created_at).to be_instance_of(Time)
|
147
|
+
end
|
148
|
+
|
149
|
+
it 'filters by event type' do
|
150
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'user_signed_up'))
|
151
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'item_added'))
|
152
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'item_added'))
|
153
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'item_rejected'))
|
154
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'user_signed_up'))
|
155
|
+
events = event_store.get_next_from(1, event_types: ['user_signed_up'])
|
156
|
+
expect(events.count).to eq 2
|
157
|
+
expect(events.map(&:id)).to eq [1, 5]
|
158
|
+
end
|
159
|
+
end
|
160
|
+
|
161
|
+
describe '#latest_event_id' do
|
162
|
+
it 'returns the latest event id' do
|
163
|
+
event_store.sink(new_event(aggregate_id: aggregate_id))
|
164
|
+
event_store.sink(new_event(aggregate_id: aggregate_id))
|
165
|
+
expect(event_store.latest_event_id).to eq 2
|
166
|
+
end
|
167
|
+
|
168
|
+
context 'with no events' do
|
169
|
+
it 'returns 0' do
|
170
|
+
expect(event_store.latest_event_id).to eq 0
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
context 'with event type filtering' do
|
175
|
+
it 'gets the latest event ID for a set of event types' do
|
176
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'type_1'))
|
177
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'type_1'))
|
178
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'type_2'))
|
179
|
+
|
180
|
+
expect(event_store.latest_event_id(event_types: ['type_1'])).to eq 2
|
181
|
+
expect(event_store.latest_event_id(event_types: ['type_2'])).to eq 3
|
182
|
+
expect(event_store.latest_event_id(event_types: ['type_1', 'type_2'])).to eq 3
|
183
|
+
end
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
describe '#get_events_for_aggregate_id' do
|
188
|
+
RSpec.shared_examples 'gets events for a specific aggregate id' do
|
189
|
+
before do
|
190
|
+
event_store.sink(new_event(aggregate_id: aggregate_id, type: 'item_added', body: { 'my' => 'body' }))
|
191
|
+
event_store.sink(new_event(aggregate_id: aggregate_id))
|
192
|
+
event_store.sink(new_event(aggregate_id: SecureRandom.uuid))
|
193
|
+
end
|
194
|
+
|
195
|
+
subject(:events) { event_store.get_events_for_aggregate_id(uuid) }
|
196
|
+
|
197
|
+
specify do
|
198
|
+
expect(events.map(&:id)).to eq([1, 2])
|
199
|
+
expect(events.first.aggregate_id).to eq aggregate_id
|
200
|
+
expect(events.first.type).to eq 'item_added'
|
201
|
+
expect(events.first.body).to eq({ 'my' => 'body' })
|
202
|
+
expect(events.first.created_at).to be_instance_of(Time)
|
203
|
+
end
|
204
|
+
end
|
205
|
+
|
206
|
+
context 'when aggregate_id is a string' do
|
207
|
+
include_examples 'gets events for a specific aggregate id' do
|
208
|
+
let(:uuid) { aggregate_id }
|
209
|
+
end
|
210
|
+
end
|
211
|
+
|
212
|
+
context 'when aggregate_id is convertible to a string' do
|
213
|
+
include_examples 'gets events for a specific aggregate id' do
|
214
|
+
let(:uuid) { double(to_str: aggregate_id) }
|
215
|
+
end
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
219
|
+
describe '#each_by_range' do
|
220
|
+
before do
|
221
|
+
(1..21).each do |i|
|
222
|
+
event_store.sink(new_event(aggregate_id: aggregate_id,
|
223
|
+
type: 'item_added',
|
224
|
+
body: {}))
|
225
|
+
end
|
226
|
+
end
|
227
|
+
|
228
|
+
def events_by_range(*args)
|
229
|
+
[].tap do |events|
|
230
|
+
event_store.each_by_range(*args) do |event|
|
231
|
+
events << event
|
232
|
+
end
|
233
|
+
end
|
234
|
+
end
|
235
|
+
|
236
|
+
context "the range doesn't include the latest event ID" do
|
237
|
+
it 'returns only the events in the range' do
|
238
|
+
events = events_by_range(1, 20)
|
239
|
+
expect(events.count).to eq 20
|
240
|
+
expect(events.map(&:id)).to eq((1..20).to_a)
|
241
|
+
end
|
242
|
+
end
|
243
|
+
|
244
|
+
context 'the range includes the latest event ID' do
|
245
|
+
it 'returns all the events' do
|
246
|
+
events = events_by_range(1, 21)
|
247
|
+
expect(events.count).to eq 21
|
248
|
+
expect(events.map(&:id)).to eq((1..21).to_a)
|
249
|
+
end
|
250
|
+
end
|
251
|
+
|
252
|
+
context 'the range exceeds the latest event ID' do
|
253
|
+
it 'returns all the events' do
|
254
|
+
events = events_by_range(1, 25)
|
255
|
+
expect(events.count).to eq 21
|
256
|
+
expect(events.map(&:id)).to eq((1..21).to_a)
|
257
|
+
end
|
258
|
+
end
|
259
|
+
|
260
|
+
context 'the range filters by event type' do
|
261
|
+
it 'returns only events of the given type' do
|
262
|
+
expect(events_by_range(1, 21, event_types: ['user_signed_up']).count).to eq 0
|
263
|
+
expect(events_by_range(1, 21, event_types: ['item_added']).count).to eq 21
|
264
|
+
end
|
265
|
+
end
|
266
|
+
end
|
267
|
+
|
268
|
+
def save_event(expected_version: nil)
|
269
|
+
event_store.sink(new_event(aggregate_id: aggregate_id,
|
270
|
+
type: :billing_details_provided,
|
271
|
+
body: { my_event: 'data' }),
|
272
|
+
expected_version: expected_version)
|
273
|
+
end
|
274
|
+
|
275
|
+
def add_event
|
276
|
+
event_store.sink(new_event(aggregate_id: aggregate_id))
|
277
|
+
end
|
278
|
+
|
279
|
+
def last_event
|
280
|
+
event_store.get_next_from(0).last
|
281
|
+
end
|
282
|
+
|
283
|
+
context 'optimistic concurrency control' do
|
284
|
+
context "when the aggregate doesn't exist" do
|
285
|
+
context 'and the expected version is correct - 0' do
|
286
|
+
it 'saves the event with and sets the aggregate version to version 1' do
|
287
|
+
save_event(expected_version: 0)
|
288
|
+
expect(last_event.version).to eq 1
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
292
|
+
context 'and the expected version is incorrect - 1' do
|
293
|
+
it 'raises a ConcurrencyError' do
|
294
|
+
expect {
|
295
|
+
save_event(expected_version: 1)
|
296
|
+
}.to raise_error(EventSourcery::ConcurrencyError)
|
297
|
+
end
|
298
|
+
end
|
299
|
+
|
300
|
+
context 'with no expected version' do
|
301
|
+
it 'saves the event with and sets the aggregate version to version 1' do
|
302
|
+
save_event
|
303
|
+
expect(last_event.version).to eq 1
|
304
|
+
end
|
305
|
+
end
|
306
|
+
end
|
307
|
+
|
308
|
+
context 'when the aggregate exists' do
|
309
|
+
before do
|
310
|
+
add_event
|
311
|
+
end
|
312
|
+
|
313
|
+
context 'with an incorrect expected version - 0' do
|
314
|
+
it 'raises a ConcurrencyError' do
|
315
|
+
expect {
|
316
|
+
save_event(expected_version: 0)
|
317
|
+
}.to raise_error(EventSourcery::ConcurrencyError)
|
318
|
+
end
|
319
|
+
end
|
320
|
+
|
321
|
+
context 'with a correct expected version - 1' do
|
322
|
+
it 'saves the event with and sets the aggregate version to version 2' do
|
323
|
+
save_event
|
324
|
+
expect(last_event.version).to eq 2
|
325
|
+
end
|
326
|
+
end
|
327
|
+
|
328
|
+
context 'with no aggregate version' do
|
329
|
+
it 'automatically sets the version on the event and aggregate' do
|
330
|
+
save_event
|
331
|
+
expect(last_event.version).to eq 2
|
332
|
+
end
|
333
|
+
end
|
334
|
+
end
|
335
|
+
|
336
|
+
it 'allows overriding the created_at timestamp for events' do
|
337
|
+
time = Time.parse('2016-10-14T00:00:00.646191Z')
|
338
|
+
event_store.sink(new_event(aggregate_id: aggregate_id,
|
339
|
+
type: :billing_details_provided,
|
340
|
+
body: { my_event: 'data' },
|
341
|
+
created_at: time))
|
342
|
+
expect(last_event.created_at).to eq time
|
343
|
+
end
|
344
|
+
|
345
|
+
it "sets a created_at time when one isn't provided in the event" do
|
346
|
+
event_store.sink(new_event(aggregate_id: aggregate_id,
|
347
|
+
type: :billing_details_provided,
|
348
|
+
body: { my_event: 'data' }))
|
349
|
+
expect(last_event.created_at).to be_instance_of(Time)
|
350
|
+
end
|
351
|
+
end
|
352
|
+
end
|