ruby_event_store 2.0.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +3 -1
- data/lib/ruby_event_store/client.rb +37 -6
- data/lib/ruby_event_store/errors.rb +1 -0
- data/lib/ruby_event_store/event.rb +5 -2
- data/lib/ruby_event_store/in_memory_repository.rb +67 -15
- data/lib/ruby_event_store/instrumented_dispatcher.rb +12 -2
- data/lib/ruby_event_store/instrumented_repository.rb +17 -11
- data/lib/ruby_event_store/mappers/default.rb +1 -28
- data/lib/ruby_event_store/mappers/encryption_mapper.rb +2 -0
- data/lib/ruby_event_store/mappers/forgotten_data.rb +1 -1
- data/lib/ruby_event_store/mappers/in_memory_encryption_key_repository.rb +1 -1
- data/lib/ruby_event_store/mappers/instrumented_mapper.rb +0 -4
- data/lib/ruby_event_store/mappers/json_mapper.rb +7 -5
- data/lib/ruby_event_store/mappers/pipeline.rb +2 -26
- data/lib/ruby_event_store/mappers/pipeline_mapper.rb +0 -4
- data/lib/ruby_event_store/mappers/transformation/domain_event.rb +13 -3
- data/lib/ruby_event_store/mappers/transformation/upcast.rb +37 -0
- data/lib/ruby_event_store/metadata.rb +3 -3
- data/lib/ruby_event_store/projection.rb +2 -2
- data/lib/ruby_event_store/spec/broker_lint.rb +11 -11
- data/lib/ruby_event_store/spec/event_lint.rb +9 -9
- data/lib/ruby_event_store/spec/event_repository_lint.rb +338 -281
- data/lib/ruby_event_store/spec/subscriptions_lint.rb +41 -33
- data/lib/ruby_event_store/subscriptions.rb +24 -9
- data/lib/ruby_event_store/transform_keys.rb +5 -5
- data/lib/ruby_event_store/version.rb +1 -1
- data/lib/ruby_event_store.rb +43 -44
- metadata +26 -17
- data/.mutant.yml +0 -1
- data/CHANGELOG.md +0 -93
- data/Gemfile +0 -9
- data/Gemfile.lock +0 -118
- data/Makefile +0 -32
- data/lib/ruby_event_store/mappers/deprecated_wrapper.rb +0 -33
- data/lib/ruby_event_store/mappers/transformation/serialization.rb +0 -36
- data/ruby_event_store.gemspec +0 -27
@@ -2,12 +2,14 @@
|
|
2
2
|
|
3
3
|
module RubyEventStore
|
4
4
|
module Mappers
|
5
|
-
class JSONMapper <
|
5
|
+
class JSONMapper < Default
|
6
6
|
def initialize(events_class_remapping: {})
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
7
|
+
warn <<~EOW
|
8
|
+
Please replace RubyEventStore::Mappers::JSONMapper with RubyEventStore::Mappers::Default
|
9
|
+
|
10
|
+
They're now identical and the former will be removed in next major release.
|
11
|
+
EOW
|
12
|
+
super
|
11
13
|
end
|
12
14
|
end
|
13
15
|
end
|
@@ -3,13 +3,10 @@
|
|
3
3
|
module RubyEventStore
|
4
4
|
module Mappers
|
5
5
|
class Pipeline
|
6
|
-
|
7
|
-
|
8
|
-
def initialize(*transformations_, transformations: UNSET, to_domain_event: Transformation::DomainEvent.new)
|
6
|
+
def initialize(*transformations, to_domain_event: Transformation::DomainEvent.new)
|
9
7
|
@transformations = [
|
10
8
|
to_domain_event,
|
11
|
-
|
12
|
-
transformations_,
|
9
|
+
transformations,
|
13
10
|
].flatten.freeze
|
14
11
|
end
|
15
12
|
|
@@ -26,27 +23,6 @@ module RubyEventStore
|
|
26
23
|
end
|
27
24
|
|
28
25
|
attr_reader :transformations
|
29
|
-
|
30
|
-
private
|
31
|
-
|
32
|
-
def deprecated_transformations(transformations)
|
33
|
-
case transformations
|
34
|
-
when UNSET
|
35
|
-
[]
|
36
|
-
else
|
37
|
-
warn <<~EOW
|
38
|
-
Passing transformations via keyword parameter is deprecated.
|
39
|
-
Please use positional arguments from now on.
|
40
|
-
|
41
|
-
Was:
|
42
|
-
RubyEventStore::Mappers::Pipeline.new(transformations: transformations)
|
43
|
-
|
44
|
-
Is now:
|
45
|
-
RubyEventStore::Mappers::Pipeline.new(*transformations)
|
46
|
-
EOW
|
47
|
-
transformations
|
48
|
-
end
|
49
|
-
end
|
50
26
|
end
|
51
27
|
end
|
52
28
|
end
|
@@ -5,9 +5,9 @@ module RubyEventStore
|
|
5
5
|
module Transformation
|
6
6
|
class DomainEvent
|
7
7
|
def dump(domain_event)
|
8
|
-
metadata
|
8
|
+
metadata = domain_event.metadata.dup.to_h
|
9
9
|
timestamp = metadata.delete(:timestamp)
|
10
|
-
valid_at
|
10
|
+
valid_at = metadata.delete(:valid_at)
|
11
11
|
Record.new(
|
12
12
|
event_id: domain_event.event_id,
|
13
13
|
metadata: metadata,
|
@@ -21,11 +21,21 @@ module RubyEventStore
|
|
21
21
|
def load(record)
|
22
22
|
Object.const_get(record.event_type).new(
|
23
23
|
event_id: record.event_id,
|
24
|
+
data: record.data,
|
24
25
|
metadata: record.metadata.merge(
|
25
26
|
timestamp: record.timestamp,
|
26
|
-
valid_at:
|
27
|
+
valid_at: record.valid_at,
|
27
28
|
),
|
29
|
+
)
|
30
|
+
rescue NameError
|
31
|
+
Event.new(
|
32
|
+
event_id: record.event_id,
|
28
33
|
data: record.data,
|
34
|
+
metadata: record.metadata.merge(
|
35
|
+
timestamp: record.timestamp,
|
36
|
+
valid_at: record.valid_at,
|
37
|
+
event_type: record.event_type,
|
38
|
+
),
|
29
39
|
)
|
30
40
|
end
|
31
41
|
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyEventStore
|
4
|
+
module Mappers
|
5
|
+
module Transformation
|
6
|
+
class Upcast
|
7
|
+
class RecordUpcaster
|
8
|
+
def initialize(upcast_map)
|
9
|
+
@upcast_map = upcast_map
|
10
|
+
end
|
11
|
+
|
12
|
+
def call(record)
|
13
|
+
identity = lambda { |r| r }
|
14
|
+
new_record = @upcast_map.fetch(record.event_type, identity)[record]
|
15
|
+
if new_record.equal?(record)
|
16
|
+
record
|
17
|
+
else
|
18
|
+
call(new_record)
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def initialize(upcast_map)
|
24
|
+
@record_upcaster = RecordUpcaster.new(upcast_map)
|
25
|
+
end
|
26
|
+
|
27
|
+
def dump(record)
|
28
|
+
record
|
29
|
+
end
|
30
|
+
|
31
|
+
def load(record)
|
32
|
+
@record_upcaster.call(record)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -72,14 +72,14 @@ module RubyEventStore
|
|
72
72
|
end
|
73
73
|
|
74
74
|
def reduce_from_streams(event_store, start, count)
|
75
|
-
raise ArgumentError.new(
|
75
|
+
raise ArgumentError.new("Start must be an array with event ids") unless valid_starting_point?(start)
|
76
76
|
streams.zip(start_events(start)).reduce(initial_state) do |state, (stream_name, start_event_id)|
|
77
77
|
read_scope(event_store, stream_name, count, start_event_id).reduce(state, &method(:transition))
|
78
78
|
end
|
79
79
|
end
|
80
80
|
|
81
81
|
def reduce_from_all_streams(event_store, start, count)
|
82
|
-
raise ArgumentError.new(
|
82
|
+
raise ArgumentError.new("Start must be valid event id") unless valid_starting_point?(start)
|
83
83
|
read_scope(event_store, nil, count, start).reduce(initial_state, &method(:transition))
|
84
84
|
end
|
85
85
|
|
@@ -1,5 +1,5 @@
|
|
1
1
|
RSpec.shared_examples :broker do |broker_klass|
|
2
|
-
let(:event) { instance_double(::RubyEventStore::Event, event_type:
|
2
|
+
let(:event) { instance_double(::RubyEventStore::Event, event_type: "EventType") }
|
3
3
|
let(:record) { instance_double(::RubyEventStore::Record) }
|
4
4
|
let(:handler) { HandlerClass.new }
|
5
5
|
let(:subscriptions) { ::RubyEventStore::Subscriptions.new }
|
@@ -7,38 +7,38 @@ RSpec.shared_examples :broker do |broker_klass|
|
|
7
7
|
let(:broker) { broker_klass.new(subscriptions: subscriptions, dispatcher: dispatcher) }
|
8
8
|
|
9
9
|
specify "no dispatch when no subscriptions" do
|
10
|
-
expect(subscriptions).to receive(:all_for).with(
|
10
|
+
expect(subscriptions).to receive(:all_for).with("EventType").and_return([])
|
11
11
|
expect(dispatcher).not_to receive(:call)
|
12
12
|
broker.call(event, record)
|
13
13
|
end
|
14
14
|
|
15
15
|
specify "calls subscription" do
|
16
|
-
expect(subscriptions).to receive(:all_for).with(
|
16
|
+
expect(subscriptions).to receive(:all_for).with("EventType").and_return([handler])
|
17
17
|
expect(dispatcher).to receive(:call).with(handler, event, record)
|
18
18
|
broker.call(event, record)
|
19
19
|
end
|
20
20
|
|
21
21
|
specify "calls subscribed class" do
|
22
|
-
expect(subscriptions).to receive(:all_for).with(
|
22
|
+
expect(subscriptions).to receive(:all_for).with("EventType").and_return([HandlerClass])
|
23
23
|
expect(dispatcher).to receive(:call).with(HandlerClass, event, record)
|
24
24
|
broker.call(event, record)
|
25
25
|
end
|
26
26
|
|
27
27
|
specify "calls all subscriptions" do
|
28
|
-
expect(subscriptions).to receive(:all_for).with(
|
28
|
+
expect(subscriptions).to receive(:all_for).with("EventType").and_return([handler, HandlerClass])
|
29
29
|
expect(dispatcher).to receive(:call).with(handler, event, record)
|
30
30
|
expect(dispatcher).to receive(:call).with(HandlerClass, event, record)
|
31
31
|
broker.call(event, record)
|
32
32
|
end
|
33
33
|
|
34
|
-
specify
|
34
|
+
specify "raise error when no subscriber" do
|
35
35
|
expect { broker.add_subscription(nil, [])}.to raise_error(RubyEventStore::SubscriberNotExist, "subscriber must be first argument or block")
|
36
36
|
expect { broker.add_global_subscription(nil)}.to raise_error(RubyEventStore::SubscriberNotExist), "subscriber must be first argument or block"
|
37
37
|
expect { broker.add_thread_subscription(nil, []).call}.to raise_error(RubyEventStore::SubscriberNotExist), "subscriber must be first argument or block"
|
38
38
|
expect { broker.add_thread_global_subscription(nil).call}.to raise_error(RubyEventStore::SubscriberNotExist), "subscriber must be first argument or block"
|
39
39
|
end
|
40
40
|
|
41
|
-
specify
|
41
|
+
specify "raise error when wrong subscriber" do
|
42
42
|
allow(dispatcher).to receive(:verify).and_return(false)
|
43
43
|
expect do
|
44
44
|
broker.add_subscription(HandlerClass, [])
|
@@ -56,8 +56,8 @@ RSpec.shared_examples :broker do |broker_klass|
|
|
56
56
|
|
57
57
|
specify "verify and add - local subscriptions" do
|
58
58
|
expect(dispatcher).to receive(:verify).with(handler).and_return(true)
|
59
|
-
expect(subscriptions).to receive(:add_subscription).with(handler, [
|
60
|
-
broker.add_subscription(handler, [
|
59
|
+
expect(subscriptions).to receive(:add_subscription).with(handler, ["EventType"])
|
60
|
+
broker.add_subscription(handler, ["EventType"])
|
61
61
|
end
|
62
62
|
|
63
63
|
specify "verify and add - global subscriptions" do
|
@@ -68,8 +68,8 @@ RSpec.shared_examples :broker do |broker_klass|
|
|
68
68
|
|
69
69
|
specify "verify and add - thread local subscriptions" do
|
70
70
|
expect(dispatcher).to receive(:verify).with(handler).and_return(true)
|
71
|
-
expect(subscriptions).to receive(:add_thread_subscription).with(handler, [
|
72
|
-
broker.add_thread_subscription(handler, [
|
71
|
+
expect(subscriptions).to receive(:add_thread_subscription).with(handler, ["EventType"])
|
72
|
+
broker.add_thread_subscription(handler, ["EventType"])
|
73
73
|
end
|
74
74
|
|
75
75
|
specify "verify and add - thread global subscriptions" do
|
@@ -1,36 +1,36 @@
|
|
1
1
|
RSpec.shared_examples :event do |event_class, data, metadata|
|
2
|
-
it
|
2
|
+
it "allows initialization" do
|
3
3
|
expect {
|
4
4
|
event_class.new(event_id: Object.new, data: data || Object.new, metadata: metadata || {})
|
5
5
|
}.not_to raise_error
|
6
6
|
end
|
7
7
|
|
8
|
-
it
|
8
|
+
it "provides event_id as string" do
|
9
9
|
event = event_class.new
|
10
10
|
expect(event.event_id).to be_an_instance_of(String)
|
11
|
-
expect(event.event_id).not_to eq
|
11
|
+
expect(event.event_id).not_to eq ""
|
12
12
|
expect(event.event_id).not_to eq nil
|
13
13
|
end
|
14
14
|
|
15
|
-
it
|
15
|
+
it "provides message_id as string" do
|
16
16
|
event = event_class.new
|
17
17
|
expect(event.message_id).to be_an_instance_of(String)
|
18
18
|
end
|
19
19
|
|
20
|
-
it
|
20
|
+
it "message_id is the same as event_id" do
|
21
21
|
event = event_class.new
|
22
22
|
expect(event.event_id).to eq event.message_id
|
23
23
|
end
|
24
24
|
|
25
|
-
it
|
25
|
+
it "exposes given event_id to string" do
|
26
26
|
event = event_class.new(event_id: 1234567890)
|
27
|
-
expect(event.event_id).to eq
|
27
|
+
expect(event.event_id).to eq "1234567890"
|
28
28
|
end
|
29
29
|
|
30
|
-
it
|
30
|
+
it "provides event type as string" do
|
31
31
|
event = event_class.new
|
32
32
|
expect(event.event_type).to be_an_instance_of(String)
|
33
|
-
expect(event.event_type).not_to eq
|
33
|
+
expect(event.event_type).not_to eq ""
|
34
34
|
expect(event.event_type).not_to eq nil
|
35
35
|
end
|
36
36
|
|
@@ -5,7 +5,7 @@ module RubyEventStore
|
|
5
5
|
event_id: SecureRandom.uuid,
|
6
6
|
data: {},
|
7
7
|
metadata: {},
|
8
|
-
event_type:
|
8
|
+
event_type: "SRecordTestEvent",
|
9
9
|
timestamp: Time.new.utc,
|
10
10
|
valid_at: nil
|
11
11
|
)
|
@@ -26,50 +26,17 @@ module RubyEventStore
|
|
26
26
|
Type2 = Class.new(RubyEventStore::Event)
|
27
27
|
# @private
|
28
28
|
Type3 = Class.new(RubyEventStore::Event)
|
29
|
-
|
30
|
-
# @private
|
31
|
-
class EventRepositoryHelper
|
32
|
-
def supports_concurrent_auto?
|
33
|
-
true
|
34
|
-
end
|
35
|
-
|
36
|
-
def supports_concurrent_any?
|
37
|
-
true
|
38
|
-
end
|
39
|
-
|
40
|
-
def supports_binary?
|
41
|
-
true
|
42
|
-
end
|
43
|
-
|
44
|
-
def supports_upsert?
|
45
|
-
true
|
46
|
-
end
|
47
|
-
|
48
|
-
def has_connection_pooling?
|
49
|
-
false
|
50
|
-
end
|
51
|
-
|
52
|
-
def connection_pool_size
|
53
|
-
end
|
54
|
-
|
55
|
-
def cleanup_concurrency_test
|
56
|
-
end
|
57
|
-
|
58
|
-
def rescuable_concurrency_test_errors
|
59
|
-
[]
|
60
|
-
end
|
61
|
-
end
|
62
29
|
end
|
63
30
|
|
64
31
|
module RubyEventStore
|
65
|
-
::RSpec.shared_examples :event_repository do
|
66
|
-
let(:
|
32
|
+
::RSpec.shared_examples :event_repository do |mk_repository, helper|
|
33
|
+
let(:repository) { mk_repository.call }
|
67
34
|
let(:specification) { Specification.new(SpecificationReader.new(repository, Mappers::NullMapper.new)) }
|
68
35
|
let(:global_stream) { Stream.new(GLOBAL_STREAM) }
|
69
36
|
let(:stream) { Stream.new(SecureRandom.uuid) }
|
70
|
-
let(:stream_flow) { Stream.new(
|
71
|
-
let(:stream_other) { Stream.new(
|
72
|
-
let(:stream_test) { Stream.new(
|
37
|
+
let(:stream_flow) { Stream.new("flow") }
|
38
|
+
let(:stream_other) { Stream.new("other") }
|
39
|
+
let(:stream_test) { Stream.new("test") }
|
73
40
|
let(:version_none) { ExpectedVersion.none }
|
74
41
|
let(:version_auto) { ExpectedVersion.auto }
|
75
42
|
let(:version_any) { ExpectedVersion.any }
|
@@ -78,12 +45,12 @@ module RubyEventStore
|
|
78
45
|
let(:version_2) { ExpectedVersion.new(2) }
|
79
46
|
let(:version_3) { ExpectedVersion.new(3) }
|
80
47
|
|
81
|
-
def verify_conncurency_assumptions
|
48
|
+
def verify_conncurency_assumptions(helper)
|
82
49
|
return unless helper.has_connection_pooling?
|
83
|
-
expect(helper.connection_pool_size).to eq(5)
|
50
|
+
expect(helper.connection_pool_size).to eq(5), "expected connection pool of size 5, got #{helper.connection_pool_size}"
|
84
51
|
end
|
85
52
|
|
86
|
-
def read_events(scope, stream = nil, from: nil, to: nil, count: nil)
|
53
|
+
def read_events(repository, scope, stream = nil, from: nil, to: nil, count: nil)
|
87
54
|
scope = scope.stream(stream.name) if stream
|
88
55
|
scope = scope.from(from) if from
|
89
56
|
scope = scope.to(to) if to
|
@@ -91,44 +58,44 @@ module RubyEventStore
|
|
91
58
|
repository.read(scope.result).to_a
|
92
59
|
end
|
93
60
|
|
94
|
-
def read_events_forward(
|
95
|
-
read_events(specification, stream, from: from, to: to, count: count)
|
61
|
+
def read_events_forward(repository, stream = nil, from: nil, to: nil, count: nil)
|
62
|
+
read_events(repository, specification, stream, from: from, to: to, count: count)
|
96
63
|
end
|
97
64
|
|
98
|
-
def read_events_backward(
|
99
|
-
read_events(specification.backward, stream, from: from, to: to, count: count)
|
65
|
+
def read_events_backward(repository, stream = nil, from: nil, to: nil, count: nil)
|
66
|
+
read_events(repository, specification.backward, stream, from: from, to: to, count: count)
|
100
67
|
end
|
101
68
|
|
102
|
-
it
|
69
|
+
it "just created is empty" do
|
103
70
|
expect(read_events_forward(repository)).to be_empty
|
104
71
|
end
|
105
72
|
|
106
|
-
specify
|
73
|
+
specify "append_to_stream returns self" do
|
107
74
|
repository
|
108
|
-
.append_to_stream(event = SRecord.new, stream, version_none)
|
109
|
-
.append_to_stream(event = SRecord.new, stream, version_0)
|
75
|
+
.append_to_stream([event = SRecord.new], stream, version_none)
|
76
|
+
.append_to_stream([event = SRecord.new], stream, version_0)
|
110
77
|
end
|
111
78
|
|
112
|
-
specify
|
79
|
+
specify "link_to_stream returns self" do
|
113
80
|
event0 = SRecord.new
|
114
81
|
event1 = SRecord.new
|
115
82
|
repository
|
116
83
|
.append_to_stream([event0, event1], stream, version_none)
|
117
|
-
.link_to_stream(event0.event_id, stream_flow, version_none)
|
118
|
-
.link_to_stream(event1.event_id, stream_flow, version_0)
|
84
|
+
.link_to_stream([event0.event_id], stream_flow, version_none)
|
85
|
+
.link_to_stream([event1.event_id], stream_flow, version_0)
|
119
86
|
end
|
120
87
|
|
121
|
-
specify
|
122
|
-
repository.append_to_stream(event = SRecord.new, stream, version_none)
|
88
|
+
specify "adds an initial event to a new stream" do
|
89
|
+
repository.append_to_stream([event = SRecord.new], stream, version_none)
|
123
90
|
expect(read_events_forward(repository).first).to eq(event)
|
124
91
|
expect(read_events_forward(repository, stream).first).to eq(event)
|
125
92
|
expect(read_events_forward(repository, stream_other)).to be_empty
|
126
93
|
end
|
127
94
|
|
128
|
-
specify
|
95
|
+
specify "links an initial event to a new stream" do
|
129
96
|
repository
|
130
|
-
.append_to_stream(event = SRecord.new, stream, version_none)
|
131
|
-
.link_to_stream(event.event_id, stream_flow, version_none)
|
97
|
+
.append_to_stream([event = SRecord.new], stream, version_none)
|
98
|
+
.link_to_stream([event.event_id], stream_flow, version_none)
|
132
99
|
|
133
100
|
expect(read_events_forward(repository, count: 1).first).to eq(event)
|
134
101
|
expect(read_events_forward(repository, stream).first).to eq(event)
|
@@ -136,7 +103,7 @@ module RubyEventStore
|
|
136
103
|
expect(read_events_forward(repository, stream_other)).to be_empty
|
137
104
|
end
|
138
105
|
|
139
|
-
specify
|
106
|
+
specify "adds multiple initial events to a new stream" do
|
140
107
|
repository.append_to_stream([
|
141
108
|
event0 = SRecord.new,
|
142
109
|
event1 = SRecord.new,
|
@@ -145,7 +112,7 @@ module RubyEventStore
|
|
145
112
|
expect(read_events_forward(repository, stream)).to eq([event0, event1])
|
146
113
|
end
|
147
114
|
|
148
|
-
specify
|
115
|
+
specify "links multiple initial events to a new stream" do
|
149
116
|
repository.append_to_stream([
|
150
117
|
event0 = SRecord.new,
|
151
118
|
event1 = SRecord.new,
|
@@ -157,7 +124,7 @@ module RubyEventStore
|
|
157
124
|
expect(read_events_forward(repository, stream_flow)).to eq([event0, event1])
|
158
125
|
end
|
159
126
|
|
160
|
-
specify
|
127
|
+
specify "correct expected version on second write" do
|
161
128
|
repository.append_to_stream([
|
162
129
|
event0 = SRecord.new,
|
163
130
|
event1 = SRecord.new,
|
@@ -170,7 +137,7 @@ module RubyEventStore
|
|
170
137
|
expect(read_events_forward(repository, stream)).to eq([event0, event1, event2, event3])
|
171
138
|
end
|
172
139
|
|
173
|
-
specify
|
140
|
+
specify "correct expected version on second link" do
|
174
141
|
repository.append_to_stream([
|
175
142
|
event0 = SRecord.new,
|
176
143
|
event1 = SRecord.new,
|
@@ -185,7 +152,7 @@ module RubyEventStore
|
|
185
152
|
expect(read_events_forward(repository, stream_flow)).to eq([event2, event3, event0, event1])
|
186
153
|
end
|
187
154
|
|
188
|
-
specify
|
155
|
+
specify "incorrect expected version on second write" do
|
189
156
|
repository.append_to_stream([
|
190
157
|
event0 = SRecord.new,
|
191
158
|
event1 = SRecord.new,
|
@@ -201,7 +168,7 @@ module RubyEventStore
|
|
201
168
|
expect(read_events_forward(repository, stream)).to eq([event0, event1])
|
202
169
|
end
|
203
170
|
|
204
|
-
specify
|
171
|
+
specify "incorrect expected version on second link" do
|
205
172
|
repository.append_to_stream([
|
206
173
|
event0 = SRecord.new,
|
207
174
|
event1 = SRecord.new,
|
@@ -221,7 +188,7 @@ module RubyEventStore
|
|
221
188
|
expect(read_events_forward(repository, stream)).to eq([event0, event1])
|
222
189
|
end
|
223
190
|
|
224
|
-
specify
|
191
|
+
specify ":none on first and subsequent write" do
|
225
192
|
repository.append_to_stream([
|
226
193
|
eventA = SRecord.new,
|
227
194
|
], stream, version_none)
|
@@ -234,7 +201,7 @@ module RubyEventStore
|
|
234
201
|
expect(read_events_forward(repository, stream)).to eq([eventA])
|
235
202
|
end
|
236
203
|
|
237
|
-
specify
|
204
|
+
specify ":none on first and subsequent link" do
|
238
205
|
repository.append_to_stream([
|
239
206
|
eventA = SRecord.new,
|
240
207
|
eventB = SRecord.new,
|
@@ -249,7 +216,7 @@ module RubyEventStore
|
|
249
216
|
expect(read_events_forward(repository, stream_flow)).to eq([eventA])
|
250
217
|
end
|
251
218
|
|
252
|
-
specify
|
219
|
+
specify ":any allows stream with best-effort order and no guarantee" do
|
253
220
|
repository.append_to_stream([
|
254
221
|
event0 = SRecord.new,
|
255
222
|
event1 = SRecord.new,
|
@@ -262,7 +229,7 @@ module RubyEventStore
|
|
262
229
|
expect(read_events_forward(repository, stream).to_set).to eq(Set.new([event0, event1, event2, event3]))
|
263
230
|
end
|
264
231
|
|
265
|
-
specify
|
232
|
+
specify ":any allows linking in stream with best-effort order and no guarantee" do
|
266
233
|
repository.append_to_stream([
|
267
234
|
event0 = SRecord.new,
|
268
235
|
event1 = SRecord.new,
|
@@ -281,7 +248,7 @@ module RubyEventStore
|
|
281
248
|
expect(read_events_forward(repository, stream_flow).to_set).to eq(Set.new([event0, event1, event2, event3]))
|
282
249
|
end
|
283
250
|
|
284
|
-
specify
|
251
|
+
specify ":auto queries for last position in given stream" do
|
285
252
|
repository.append_to_stream([
|
286
253
|
eventA = SRecord.new,
|
287
254
|
eventB = SRecord.new,
|
@@ -297,7 +264,7 @@ module RubyEventStore
|
|
297
264
|
], stream, version_1)
|
298
265
|
end
|
299
266
|
|
300
|
-
specify
|
267
|
+
specify ":auto queries for last position in given stream when linking" do
|
301
268
|
repository.append_to_stream([
|
302
269
|
eventA = SRecord.new,
|
303
270
|
eventB = SRecord.new,
|
@@ -314,7 +281,7 @@ module RubyEventStore
|
|
314
281
|
], stream, version_1)
|
315
282
|
end
|
316
283
|
|
317
|
-
specify
|
284
|
+
specify ":auto starts from 0" do
|
318
285
|
repository.append_to_stream([
|
319
286
|
event0 = SRecord.new,
|
320
287
|
], stream, version_auto)
|
@@ -325,7 +292,7 @@ module RubyEventStore
|
|
325
292
|
end.to raise_error(WrongExpectedEventVersion)
|
326
293
|
end
|
327
294
|
|
328
|
-
specify
|
295
|
+
specify ":auto linking starts from 0" do
|
329
296
|
repository.append_to_stream([
|
330
297
|
event0 = SRecord.new,
|
331
298
|
], stream_other, version_auto)
|
@@ -339,7 +306,7 @@ module RubyEventStore
|
|
339
306
|
end.to raise_error(WrongExpectedEventVersion)
|
340
307
|
end
|
341
308
|
|
342
|
-
specify
|
309
|
+
specify ":auto queries for last position and follows in incremental way" do
|
343
310
|
# It is expected that there is higher level lock
|
344
311
|
# So this query is safe from race conditions
|
345
312
|
repository.append_to_stream([
|
@@ -357,7 +324,7 @@ module RubyEventStore
|
|
357
324
|
expect(read_events_forward(repository, stream)).to eq([event0, event1, event2, event3])
|
358
325
|
end
|
359
326
|
|
360
|
-
specify
|
327
|
+
specify ":auto queries for last position and follows in incremental way when linking" do
|
361
328
|
repository.append_to_stream([
|
362
329
|
event0 = SRecord.new,
|
363
330
|
event1 = SRecord.new,
|
@@ -377,7 +344,7 @@ module RubyEventStore
|
|
377
344
|
expect(read_events_forward(repository, stream_flow)).to eq([event0, event1, event2, event3])
|
378
345
|
end
|
379
346
|
|
380
|
-
specify
|
347
|
+
specify ":auto is compatible with manual expectation" do
|
381
348
|
repository.append_to_stream([
|
382
349
|
event0 = SRecord.new,
|
383
350
|
event1 = SRecord.new,
|
@@ -390,7 +357,7 @@ module RubyEventStore
|
|
390
357
|
expect(read_events_forward(repository, stream)).to eq([event0, event1, event2, event3])
|
391
358
|
end
|
392
359
|
|
393
|
-
specify
|
360
|
+
specify ":auto is compatible with manual expectation when linking" do
|
394
361
|
repository.append_to_stream([
|
395
362
|
event0 = SRecord.new,
|
396
363
|
event1 = SRecord.new,
|
@@ -405,7 +372,7 @@ module RubyEventStore
|
|
405
372
|
expect(read_events_forward(repository, stream_flow)).to eq([event0, event1,])
|
406
373
|
end
|
407
374
|
|
408
|
-
specify
|
375
|
+
specify "manual is compatible with auto expectation" do
|
409
376
|
repository.append_to_stream([
|
410
377
|
event0 = SRecord.new,
|
411
378
|
event1 = SRecord.new,
|
@@ -418,7 +385,7 @@ module RubyEventStore
|
|
418
385
|
expect(read_events_forward(repository, stream)).to eq([event0, event1, event2, event3])
|
419
386
|
end
|
420
387
|
|
421
|
-
specify
|
388
|
+
specify "manual is compatible with auto expectation when linking" do
|
422
389
|
repository.append_to_stream([
|
423
390
|
event0 = SRecord.new,
|
424
391
|
event1 = SRecord.new,
|
@@ -433,9 +400,9 @@ module RubyEventStore
|
|
433
400
|
expect(read_events_forward(repository, stream_flow)).to eq([event0, event1])
|
434
401
|
end
|
435
402
|
|
436
|
-
specify
|
403
|
+
specify "unlimited concurrency for :any - everything should succeed", timeout: 10, mutant: false do
|
437
404
|
skip unless helper.supports_concurrent_any?
|
438
|
-
verify_conncurency_assumptions
|
405
|
+
verify_conncurency_assumptions(helper)
|
439
406
|
begin
|
440
407
|
concurrency_level = 4
|
441
408
|
fail_occurred = false
|
@@ -466,14 +433,12 @@ module RubyEventStore
|
|
466
433
|
ev.event_id.start_with?("0-")
|
467
434
|
end
|
468
435
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
469
|
-
ensure
|
470
|
-
helper.cleanup_concurrency_test
|
471
436
|
end
|
472
437
|
end
|
473
438
|
|
474
|
-
specify
|
439
|
+
specify "unlimited concurrency for :any - everything should succeed when linking", timeout: 10, mutant: false do
|
475
440
|
skip unless helper.supports_concurrent_any?
|
476
|
-
verify_conncurency_assumptions
|
441
|
+
verify_conncurency_assumptions(helper)
|
477
442
|
begin
|
478
443
|
concurrency_level = 4
|
479
444
|
fail_occurred = false
|
@@ -494,7 +459,7 @@ module RubyEventStore
|
|
494
459
|
begin
|
495
460
|
100.times do |j|
|
496
461
|
eid = "0000000#{i}-#{sprintf("%04d", j)}-0000-0000-000000000000"
|
497
|
-
repository.link_to_stream(eid, stream_flow, version_any)
|
462
|
+
repository.link_to_stream([eid], stream_flow, version_any)
|
498
463
|
end
|
499
464
|
rescue WrongExpectedEventVersion
|
500
465
|
fail_occurred = true
|
@@ -511,14 +476,12 @@ module RubyEventStore
|
|
511
476
|
ev.event_id.start_with?("0-")
|
512
477
|
end
|
513
478
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
514
|
-
ensure
|
515
|
-
helper.cleanup_concurrency_test
|
516
479
|
end
|
517
480
|
end
|
518
481
|
|
519
|
-
specify
|
482
|
+
specify "limited concurrency for :auto - some operations will fail without outside lock, stream is ordered", mutant: false do
|
520
483
|
skip unless helper.supports_concurrent_auto?
|
521
|
-
verify_conncurency_assumptions
|
484
|
+
verify_conncurency_assumptions(helper)
|
522
485
|
begin
|
523
486
|
concurrency_level = 4
|
524
487
|
|
@@ -535,7 +498,7 @@ module RubyEventStore
|
|
535
498
|
SRecord.new(event_id: eid),
|
536
499
|
], stream, version_auto)
|
537
500
|
sleep(rand(concurrency_level) / 1000.0)
|
538
|
-
rescue WrongExpectedEventVersion
|
501
|
+
rescue WrongExpectedEventVersion
|
539
502
|
fail_occurred +=1
|
540
503
|
end
|
541
504
|
end
|
@@ -551,15 +514,15 @@ module RubyEventStore
|
|
551
514
|
ev.event_id.start_with?("0-")
|
552
515
|
end
|
553
516
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
554
|
-
|
555
|
-
|
556
|
-
|
517
|
+
|
518
|
+
positions = repository.read(specification.stream(stream.name).result).map { |r| repository.position_in_stream(r.event_id, stream) }
|
519
|
+
expect(positions).to eq((0...positions.size).to_a)
|
557
520
|
end
|
558
521
|
end
|
559
522
|
|
560
|
-
specify
|
523
|
+
specify "limited concurrency for :auto - some operations will fail without outside lock, stream is ordered", mutant: false do
|
561
524
|
skip unless helper.supports_concurrent_auto?
|
562
|
-
verify_conncurency_assumptions
|
525
|
+
verify_conncurency_assumptions(helper)
|
563
526
|
begin
|
564
527
|
concurrency_level = 4
|
565
528
|
|
@@ -581,9 +544,9 @@ module RubyEventStore
|
|
581
544
|
100.times do |j|
|
582
545
|
begin
|
583
546
|
eid = "0000000#{i}-#{sprintf("%04d", j)}-0000-0000-000000000000"
|
584
|
-
repository.link_to_stream(eid, stream, version_auto)
|
547
|
+
repository.link_to_stream([eid], stream, version_auto)
|
585
548
|
sleep(rand(concurrency_level) / 1000.0)
|
586
|
-
rescue WrongExpectedEventVersion
|
549
|
+
rescue WrongExpectedEventVersion
|
587
550
|
fail_occurred +=1
|
588
551
|
end
|
589
552
|
end
|
@@ -599,51 +562,51 @@ module RubyEventStore
|
|
599
562
|
ev.event_id.start_with?("0-")
|
600
563
|
end
|
601
564
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
602
|
-
|
603
|
-
|
604
|
-
|
565
|
+
|
566
|
+
positions = repository.read(specification.stream(stream.name).result).map { |r| repository.position_in_stream(r.event_id, stream) }
|
567
|
+
expect(positions).to eq((0...positions.size).to_a)
|
605
568
|
end
|
606
569
|
end
|
607
570
|
|
608
|
-
it
|
571
|
+
it "appended event is stored in given stream" do
|
609
572
|
expected_event = SRecord.new
|
610
|
-
repository.append_to_stream(expected_event, stream, version_any)
|
573
|
+
repository.append_to_stream([expected_event], stream, version_any)
|
611
574
|
expect(read_events_forward(repository, count: 1).first).to eq(expected_event)
|
612
575
|
expect(read_events_forward(repository, stream).first).to eq(expected_event)
|
613
576
|
expect(read_events_forward(repository, stream_other)).to be_empty
|
614
577
|
end
|
615
578
|
|
616
|
-
it
|
579
|
+
it "data attributes are retrieved" do
|
617
580
|
event = SRecord.new(data: { "order_id" => 3 })
|
618
|
-
repository.append_to_stream(event, stream, version_any)
|
581
|
+
repository.append_to_stream([event], stream, version_any)
|
619
582
|
retrieved_event = read_events_forward(repository, count: 1).first
|
620
583
|
expect(retrieved_event.data).to eq({ "order_id" => 3 })
|
621
584
|
end
|
622
585
|
|
623
|
-
it
|
586
|
+
it "metadata attributes are retrieved" do
|
624
587
|
event = SRecord.new(metadata: { "request_id" => 3 })
|
625
|
-
repository.append_to_stream(event, stream, version_any)
|
588
|
+
repository.append_to_stream([event], stream, version_any)
|
626
589
|
retrieved_event = read_events_forward(repository, count: 1).first
|
627
590
|
expect(retrieved_event.metadata).to eq({ "request_id" => 3 })
|
628
591
|
end
|
629
592
|
|
630
|
-
it
|
593
|
+
it "data and metadata attributes are retrieved when linking" do
|
631
594
|
event = SRecord.new(
|
632
595
|
data: { "order_id" => 3 },
|
633
596
|
metadata: { "request_id" => 4},
|
634
597
|
)
|
635
598
|
repository
|
636
|
-
.append_to_stream(event, stream, version_any)
|
637
|
-
.link_to_stream(event.event_id, stream_flow, version_any)
|
599
|
+
.append_to_stream([event], stream, version_any)
|
600
|
+
.link_to_stream([event.event_id], stream_flow, version_any)
|
638
601
|
retrieved_event = read_events_forward(repository, stream_flow).first
|
639
602
|
expect(retrieved_event.metadata).to eq({ "request_id" => 4 })
|
640
603
|
expect(retrieved_event.data).to eq({ "order_id" => 3 })
|
641
604
|
expect(event).to eq(retrieved_event)
|
642
605
|
end
|
643
606
|
|
644
|
-
it
|
645
|
-
repository.append_to_stream(e1 = SRecord.new, stream, version_none)
|
646
|
-
repository.append_to_stream(e2 = SRecord.new, stream_other, version_none)
|
607
|
+
it "does not have deleted streams" do
|
608
|
+
repository.append_to_stream([e1 = SRecord.new], stream, version_none)
|
609
|
+
repository.append_to_stream([e2 = SRecord.new], stream_other, version_none)
|
647
610
|
|
648
611
|
repository.delete_stream(stream)
|
649
612
|
expect(read_events_forward(repository, stream)).to be_empty
|
@@ -651,41 +614,108 @@ module RubyEventStore
|
|
651
614
|
expect(read_events_forward(repository, count: 10)).to eq([e1,e2])
|
652
615
|
end
|
653
616
|
|
654
|
-
it
|
617
|
+
it "does not have deleted streams with linked events" do
|
655
618
|
repository
|
656
|
-
.append_to_stream(e1 = SRecord.new, stream, version_none)
|
657
|
-
.link_to_stream(e1.event_id, stream_flow, version_none)
|
619
|
+
.append_to_stream([e1 = SRecord.new], stream, version_none)
|
620
|
+
.link_to_stream([e1.event_id], stream_flow, version_none)
|
658
621
|
|
659
622
|
repository.delete_stream(stream_flow)
|
660
623
|
expect(read_events_forward(repository, stream_flow)).to be_empty
|
661
624
|
expect(read_events_forward(repository, count: 10)).to eq([e1])
|
662
625
|
end
|
663
626
|
|
664
|
-
it
|
665
|
-
just_an_id =
|
666
|
-
repository.append_to_stream(SRecord.new(event_id: just_an_id), stream, version_none)
|
627
|
+
it "has or has not domain event" do
|
628
|
+
just_an_id = "d5c134c2-db65-4e87-b6ea-d196f8f1a292"
|
629
|
+
repository.append_to_stream([SRecord.new(event_id: just_an_id)], stream, version_none)
|
667
630
|
|
668
631
|
expect(repository.has_event?(just_an_id)).to be_truthy
|
669
632
|
expect(repository.has_event?(just_an_id.clone)).to be_truthy
|
670
|
-
expect(repository.has_event?(
|
633
|
+
expect(repository.has_event?("any other id")).to be false
|
671
634
|
|
672
635
|
repository.delete_stream(stream)
|
673
636
|
expect(repository.has_event?(just_an_id)).to be_truthy
|
674
637
|
expect(repository.has_event?(just_an_id.clone)).to be_truthy
|
675
638
|
end
|
676
639
|
|
677
|
-
it
|
678
|
-
|
679
|
-
repository.append_to_stream(
|
640
|
+
it "#position_in_stream happy path" do
|
641
|
+
skip unless helper.supports_position_queries?
|
642
|
+
repository.append_to_stream([
|
643
|
+
event0 = SRecord.new,
|
644
|
+
event1 = SRecord.new
|
645
|
+
], stream, version_auto)
|
646
|
+
|
647
|
+
expect(repository.position_in_stream(event0.event_id, stream)).to eq(0)
|
648
|
+
expect(repository.position_in_stream(event1.event_id, stream)).to eq(1)
|
649
|
+
end
|
650
|
+
|
651
|
+
it "#position_in_stream happy path with linking" do
|
652
|
+
skip unless helper.supports_position_queries?
|
653
|
+
repository.append_to_stream([
|
654
|
+
event0 = SRecord.new,
|
655
|
+
event1 = SRecord.new
|
656
|
+
], stream, version_auto)
|
657
|
+
repository.link_to_stream([
|
658
|
+
event1.event_id,
|
659
|
+
event0.event_id,
|
660
|
+
], stream_other, version_auto)
|
661
|
+
|
662
|
+
expect(repository.position_in_stream(event0.event_id, stream)).to eq(0)
|
663
|
+
expect(repository.position_in_stream(event1.event_id, stream)).to eq(1)
|
664
|
+
expect(repository.position_in_stream(event1.event_id, stream_other)).to eq(0)
|
665
|
+
expect(repository.position_in_stream(event0.event_id, stream_other)).to eq(1)
|
666
|
+
end
|
667
|
+
|
668
|
+
it "#position_in_stream when event is not in the stream" do
|
669
|
+
skip unless helper.supports_position_queries?
|
670
|
+
just_an_id = "d5c134c2-db65-4e87-b6ea-d196f8f1a292"
|
671
|
+
|
672
|
+
expect do
|
673
|
+
repository.position_in_stream(just_an_id, stream)
|
674
|
+
end.to raise_error(EventNotFoundInStream)
|
675
|
+
end
|
676
|
+
|
677
|
+
it "#position_in_stream when event is published without position" do
|
678
|
+
skip unless helper.supports_position_queries?
|
679
|
+
repository.append_to_stream([event0 = SRecord.new], stream, version_any)
|
680
|
+
|
681
|
+
expect(repository.position_in_stream(event0.event_id, stream)).to eq(nil)
|
682
|
+
end
|
683
|
+
|
684
|
+
it "#global_position happy path" do
|
685
|
+
skip unless helper.supports_position_queries?
|
686
|
+
repository.append_to_stream([
|
687
|
+
event0 = SRecord.new,
|
688
|
+
event1 = SRecord.new
|
689
|
+
], stream, version_any)
|
690
|
+
|
691
|
+
expect(repository.global_position(event0.event_id)).to eq(0)
|
692
|
+
expect(repository.global_position(event1.event_id)).to eq(1)
|
693
|
+
end
|
694
|
+
|
695
|
+
it "#global_position for not existing event" do
|
696
|
+
skip unless helper.supports_position_queries?
|
697
|
+
just_an_id = "d5c134c2-db65-4e87-b6ea-d196f8f1a292"
|
698
|
+
|
699
|
+
expect do
|
700
|
+
repository.global_position(just_an_id)
|
701
|
+
end.to raise_error do |err|
|
702
|
+
expect(err).to be_a(EventNotFound)
|
703
|
+
expect(err.event_id).to eq(just_an_id)
|
704
|
+
end
|
705
|
+
end
|
706
|
+
|
707
|
+
it "knows last event in stream" do
|
708
|
+
repository.append_to_stream([a =SRecord.new(event_id: "00000000-0000-0000-0000-000000000001")], stream, version_none)
|
709
|
+
repository.append_to_stream([b = SRecord.new(event_id: "00000000-0000-0000-0000-000000000002")], stream, version_0)
|
680
710
|
|
681
711
|
expect(repository.last_stream_event(stream)).to eq(b)
|
682
712
|
expect(repository.last_stream_event(stream_other)).to be_nil
|
683
713
|
end
|
684
714
|
|
685
|
-
it
|
715
|
+
it "knows last event in stream when linked" do
|
686
716
|
repository.append_to_stream([
|
687
|
-
e0 = SRecord.new(event_id:
|
688
|
-
e1 = SRecord.new(event_id:
|
717
|
+
e0 = SRecord.new(event_id: "00000000-0000-0000-0000-000000000001"),
|
718
|
+
e1 = SRecord.new(event_id: "00000000-0000-0000-0000-000000000002"),
|
689
719
|
],
|
690
720
|
stream,
|
691
721
|
version_none
|
@@ -693,7 +723,7 @@ module RubyEventStore
|
|
693
723
|
expect(repository.last_stream_event(stream_flow)).to eq(e0)
|
694
724
|
end
|
695
725
|
|
696
|
-
it
|
726
|
+
it "reads batch of events from stream forward & backward" do
|
697
727
|
events = %w[
|
698
728
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
699
729
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -706,11 +736,11 @@ module RubyEventStore
|
|
706
736
|
ab60114c-011d-4d58-ab31-7ba65d99975e
|
707
737
|
868cac42-3d19-4b39-84e8-cd32d65c2445
|
708
738
|
].map { |id| SRecord.new(event_id: id) }
|
709
|
-
repository.append_to_stream(SRecord.new, stream_other, version_none)
|
739
|
+
repository.append_to_stream([SRecord.new], stream_other, version_none)
|
710
740
|
events.each.with_index do |event, index|
|
711
|
-
repository.append_to_stream(event, stream, ExpectedVersion.new(index - 1))
|
741
|
+
repository.append_to_stream([event], stream, ExpectedVersion.new(index - 1))
|
712
742
|
end
|
713
|
-
repository.append_to_stream(SRecord.new, stream_other, version_0)
|
743
|
+
repository.append_to_stream([SRecord.new], stream_other, version_0)
|
714
744
|
|
715
745
|
expect(read_events_forward(repository, stream, count: 3)).to eq(events.first(3))
|
716
746
|
expect(read_events_forward(repository, stream, count: 100)).to eq(events)
|
@@ -728,7 +758,7 @@ module RubyEventStore
|
|
728
758
|
expect(read_events_backward(repository, stream, to: events[4].event_id, count: 100)).to eq(events.last(5).reverse)
|
729
759
|
end
|
730
760
|
|
731
|
-
it
|
761
|
+
it "reads batch of linked events from stream forward & backward" do
|
732
762
|
events = %w[
|
733
763
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
734
764
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -741,13 +771,13 @@ module RubyEventStore
|
|
741
771
|
ab60114c-011d-4d58-ab31-7ba65d99975e
|
742
772
|
868cac42-3d19-4b39-84e8-cd32d65c2445
|
743
773
|
].map { |id| SRecord.new(event_id: id) }
|
744
|
-
repository.append_to_stream(SRecord.new, stream_other, version_none)
|
774
|
+
repository.append_to_stream([SRecord.new], stream_other, version_none)
|
745
775
|
events.each.with_index do |event, index|
|
746
776
|
repository
|
747
|
-
.append_to_stream(event, stream, ExpectedVersion.new(index - 1))
|
748
|
-
.link_to_stream(event.event_id, stream_flow, ExpectedVersion.new(index - 1))
|
777
|
+
.append_to_stream([event], stream, ExpectedVersion.new(index - 1))
|
778
|
+
.link_to_stream([event.event_id], stream_flow, ExpectedVersion.new(index - 1))
|
749
779
|
end
|
750
|
-
repository.append_to_stream(SRecord.new, stream_other, version_0)
|
780
|
+
repository.append_to_stream([SRecord.new], stream_other, version_0)
|
751
781
|
|
752
782
|
expect(read_events_forward(repository, stream_flow, count: 3)).to eq(events.first(3))
|
753
783
|
expect(read_events_forward(repository, stream_flow, count: 100)).to eq(events)
|
@@ -764,39 +794,39 @@ module RubyEventStore
|
|
764
794
|
expect(read_events_backward(repository, stream_flow, to: events[4].event_id, count: 100)).to eq(events[5..9].reverse)
|
765
795
|
end
|
766
796
|
|
767
|
-
it
|
797
|
+
it "reads all stream events forward & backward" do
|
768
798
|
s1 = stream
|
769
799
|
s2 = stream_other
|
770
800
|
repository
|
771
|
-
.append_to_stream(a = SRecord.new(event_id:
|
772
|
-
.append_to_stream(b = SRecord.new(event_id:
|
773
|
-
.append_to_stream(c = SRecord.new(event_id:
|
774
|
-
.append_to_stream(d = SRecord.new(event_id:
|
775
|
-
.append_to_stream(e = SRecord.new(event_id:
|
801
|
+
.append_to_stream([a = SRecord.new(event_id: "7010d298-ab69-4bb1-9251-f3466b5d1282")], s1, version_none)
|
802
|
+
.append_to_stream([b = SRecord.new(event_id: "34f88aca-aaba-4ca0-9256-8017b47528c5")], s2, version_none)
|
803
|
+
.append_to_stream([c = SRecord.new(event_id: "8e61c864-ceae-4684-8726-97c34eb8fc4f")], s1, version_0)
|
804
|
+
.append_to_stream([d = SRecord.new(event_id: "30963ed9-6349-450b-ac9b-8ea50115b3bd")], s2, version_0)
|
805
|
+
.append_to_stream([e = SRecord.new(event_id: "5bdc58b7-e8a7-4621-afd6-ccb828d72457")], s2, version_1)
|
776
806
|
|
777
807
|
expect(read_events_forward(repository, s1)).to eq [a,c]
|
778
808
|
expect(read_events_backward(repository, s1)).to eq [c,a]
|
779
809
|
end
|
780
810
|
|
781
|
-
it
|
811
|
+
it "reads all stream linked events forward & backward" do
|
782
812
|
s1, fs1, fs2 = stream, stream_flow, stream_other
|
783
813
|
repository
|
784
|
-
.append_to_stream(a = SRecord.new(event_id:
|
785
|
-
.append_to_stream(b = SRecord.new(event_id:
|
786
|
-
.append_to_stream(c = SRecord.new(event_id:
|
787
|
-
.append_to_stream(d = SRecord.new(event_id:
|
788
|
-
.append_to_stream(e = SRecord.new(event_id:
|
789
|
-
.link_to_stream(
|
790
|
-
.link_to_stream(
|
791
|
-
.link_to_stream(
|
792
|
-
.link_to_stream(
|
793
|
-
.link_to_stream(
|
814
|
+
.append_to_stream([a = SRecord.new(event_id: "7010d298-ab69-4bb1-9251-f3466b5d1282")], s1, version_none)
|
815
|
+
.append_to_stream([b = SRecord.new(event_id: "34f88aca-aaba-4ca0-9256-8017b47528c5")], s1, version_0)
|
816
|
+
.append_to_stream([c = SRecord.new(event_id: "8e61c864-ceae-4684-8726-97c34eb8fc4f")], s1, version_1)
|
817
|
+
.append_to_stream([d = SRecord.new(event_id: "30963ed9-6349-450b-ac9b-8ea50115b3bd")], s1, version_2)
|
818
|
+
.append_to_stream([e = SRecord.new(event_id: "5bdc58b7-e8a7-4621-afd6-ccb828d72457")], s1, version_3)
|
819
|
+
.link_to_stream(["7010d298-ab69-4bb1-9251-f3466b5d1282"], fs1, version_none)
|
820
|
+
.link_to_stream(["34f88aca-aaba-4ca0-9256-8017b47528c5"], fs2, version_none)
|
821
|
+
.link_to_stream(["8e61c864-ceae-4684-8726-97c34eb8fc4f"], fs1, version_0)
|
822
|
+
.link_to_stream(["30963ed9-6349-450b-ac9b-8ea50115b3bd"], fs2, version_0)
|
823
|
+
.link_to_stream(["5bdc58b7-e8a7-4621-afd6-ccb828d72457"], fs2, version_1)
|
794
824
|
|
795
825
|
expect(read_events_forward(repository, fs1)).to eq [a,c]
|
796
826
|
expect(read_events_backward(repository, fs1)).to eq [c,a]
|
797
827
|
end
|
798
828
|
|
799
|
-
it
|
829
|
+
it "reads batch of events from all streams forward & backward" do
|
800
830
|
events = %w[
|
801
831
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
802
832
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -810,7 +840,7 @@ module RubyEventStore
|
|
810
840
|
868cac42-3d19-4b39-84e8-cd32d65c2445
|
811
841
|
].map { |id| SRecord.new(event_id: id) }
|
812
842
|
events.each do |ev|
|
813
|
-
repository.append_to_stream(ev, Stream.new(SecureRandom.uuid), version_none)
|
843
|
+
repository.append_to_stream([ev], Stream.new(SecureRandom.uuid), version_none)
|
814
844
|
end
|
815
845
|
|
816
846
|
expect(read_events_forward(repository, count: 3)).to eq(events.first(3))
|
@@ -828,7 +858,7 @@ module RubyEventStore
|
|
828
858
|
expect(read_events_backward(repository, to: events[4].event_id, count: 100)).to eq(events.last(5).reverse)
|
829
859
|
end
|
830
860
|
|
831
|
-
it
|
861
|
+
it "linked events do not affect reading from all streams - no duplicates" do
|
832
862
|
events = %w[
|
833
863
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
834
864
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -843,8 +873,8 @@ module RubyEventStore
|
|
843
873
|
].map { |id| SRecord.new(event_id: id) }
|
844
874
|
events.each do |ev|
|
845
875
|
repository
|
846
|
-
.append_to_stream(ev, Stream.new(SecureRandom.uuid), version_none)
|
847
|
-
.link_to_stream(ev.event_id, Stream.new(SecureRandom.uuid), version_none)
|
876
|
+
.append_to_stream([ev], Stream.new(SecureRandom.uuid), version_none)
|
877
|
+
.link_to_stream([ev.event_id], Stream.new(SecureRandom.uuid), version_none)
|
848
878
|
end
|
849
879
|
|
850
880
|
expect(read_events_forward(repository, count: 3)).to eq(events.first(3))
|
@@ -862,13 +892,13 @@ module RubyEventStore
|
|
862
892
|
expect(read_events_backward(repository, to: events[4].event_id, count: 100)).to eq(events.last(5).reverse)
|
863
893
|
end
|
864
894
|
|
865
|
-
it
|
895
|
+
it "reads events different uuid object but same content" do
|
866
896
|
events = %w[
|
867
897
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
868
898
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
869
899
|
].map{|id| SRecord.new(event_id: id) }
|
870
|
-
repository.append_to_stream(events.first, stream, version_none)
|
871
|
-
repository.append_to_stream(events.last, stream, version_0)
|
900
|
+
repository.append_to_stream([events.first], stream, version_none)
|
901
|
+
repository.append_to_stream([events.last], stream, version_0)
|
872
902
|
|
873
903
|
expect(read_events_forward(repository, from: "96c920b1-cdd0-40f4-907c-861b9fff7d02")).to eq([events.last])
|
874
904
|
expect(read_events_backward(repository, from: "56404f79-0ba0-4aa0-8524-dc3436368ca0")).to eq([events.first])
|
@@ -881,84 +911,82 @@ module RubyEventStore
|
|
881
911
|
expect(read_events_backward(repository, stream, to: "96c920b1-cdd0-40f4-907c-861b9fff7d02", count: 1)).to eq([events.last])
|
882
912
|
end
|
883
913
|
|
884
|
-
it
|
914
|
+
it "does not allow same event twice in a stream" do
|
885
915
|
repository.append_to_stream(
|
886
|
-
SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef"),
|
916
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
887
917
|
stream,
|
888
918
|
version_none
|
889
919
|
)
|
890
920
|
expect do
|
891
921
|
repository.append_to_stream(
|
892
|
-
SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef"),
|
922
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
893
923
|
stream,
|
894
924
|
version_0
|
895
925
|
)
|
896
926
|
end.to raise_error(EventDuplicatedInStream)
|
897
927
|
end
|
898
928
|
|
899
|
-
it
|
929
|
+
it "does not allow same event twice" do
|
900
930
|
repository.append_to_stream(
|
901
|
-
SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef"),
|
931
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
902
932
|
stream,
|
903
933
|
version_none
|
904
934
|
)
|
905
935
|
expect do
|
906
936
|
repository.append_to_stream(
|
907
|
-
SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef"),
|
937
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
908
938
|
stream_other,
|
909
939
|
version_none
|
910
940
|
)
|
911
941
|
end.to raise_error(EventDuplicatedInStream)
|
912
942
|
end
|
913
943
|
|
914
|
-
it
|
915
|
-
repository.append_to_stream(
|
916
|
-
|
917
|
-
|
944
|
+
it "does not allow linking same event twice in a stream" do
|
945
|
+
repository.append_to_stream(
|
946
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
947
|
+
stream,
|
918
948
|
version_none
|
919
|
-
).link_to_stream("a1b49edb-7636-416f-874a-88f94b859bef", stream_flow, version_none)
|
949
|
+
).link_to_stream(["a1b49edb-7636-416f-874a-88f94b859bef"], stream_flow, version_none)
|
920
950
|
expect do
|
921
|
-
repository.link_to_stream("a1b49edb-7636-416f-874a-88f94b859bef", stream_flow, version_0)
|
951
|
+
repository.link_to_stream(["a1b49edb-7636-416f-874a-88f94b859bef"], stream_flow, version_0)
|
922
952
|
end.to raise_error(EventDuplicatedInStream)
|
923
953
|
end
|
924
954
|
|
925
|
-
it
|
955
|
+
it "allows appending to GLOBAL_STREAM explicitly" do
|
926
956
|
event = SRecord.new(event_id: "df8b2ba3-4e2c-4888-8d14-4364855fa80e")
|
927
|
-
repository.append_to_stream(event, global_stream, version_any)
|
957
|
+
repository.append_to_stream([event], global_stream, version_any)
|
928
958
|
|
929
959
|
expect(read_events_forward(repository, count: 10)).to eq([event])
|
930
960
|
end
|
931
961
|
|
932
962
|
specify "events not persisted if append failed" do
|
933
|
-
repository.append_to_stream([
|
934
|
-
SRecord.new,
|
935
|
-
], stream, version_none)
|
963
|
+
repository.append_to_stream([SRecord.new], stream, version_none)
|
936
964
|
|
937
965
|
expect do
|
938
|
-
repository.append_to_stream(
|
939
|
-
SRecord.new(
|
940
|
-
|
941
|
-
|
942
|
-
|
966
|
+
repository.append_to_stream(
|
967
|
+
[SRecord.new(event_id: "9bedf448-e4d0-41a3-a8cd-f94aec7aa763")],
|
968
|
+
stream,
|
969
|
+
version_none
|
970
|
+
)
|
943
971
|
end.to raise_error(WrongExpectedEventVersion)
|
944
|
-
expect(repository.has_event?(
|
972
|
+
expect(repository.has_event?("9bedf448-e4d0-41a3-a8cd-f94aec7aa763")).to be false
|
945
973
|
end
|
946
974
|
|
947
|
-
specify
|
975
|
+
specify "linking non-existent event" do
|
948
976
|
expect do
|
949
|
-
repository.link_to_stream(
|
977
|
+
repository.link_to_stream(["72922e65-1b32-4e97-8023-03ae81dd3a27"], stream_flow, version_none)
|
950
978
|
end.to raise_error do |err|
|
951
979
|
expect(err).to be_a(EventNotFound)
|
952
|
-
expect(err.event_id).to eq(
|
953
|
-
expect(err.message).to eq(
|
980
|
+
expect(err.event_id).to eq("72922e65-1b32-4e97-8023-03ae81dd3a27")
|
981
|
+
expect(err.message).to eq("Event not found: 72922e65-1b32-4e97-8023-03ae81dd3a27")
|
954
982
|
end
|
955
983
|
end
|
956
984
|
|
957
|
-
specify
|
985
|
+
specify "read returns enumerator" do
|
958
986
|
expect(repository.read(specification.result)).to be_kind_of(Enumerator)
|
959
987
|
end
|
960
988
|
|
961
|
-
specify
|
989
|
+
specify "can store arbitrary binary data" do
|
962
990
|
skip unless helper.supports_binary?
|
963
991
|
binary = "\xB0"
|
964
992
|
expect(binary.valid_encoding?).to eq(false)
|
@@ -966,7 +994,7 @@ module RubyEventStore
|
|
966
994
|
expect(binary.valid_encoding?).to eq(true)
|
967
995
|
|
968
996
|
repository.append_to_stream(
|
969
|
-
event = SRecord.new(data: binary, metadata: binary),
|
997
|
+
[event = SRecord.new(data: binary, metadata: binary)],
|
970
998
|
stream,
|
971
999
|
version_none
|
972
1000
|
)
|
@@ -974,6 +1002,8 @@ module RubyEventStore
|
|
974
1002
|
|
975
1003
|
specify do
|
976
1004
|
expect(repository.read(specification.in_batches.result)).to be_kind_of(Enumerator)
|
1005
|
+
expect(repository.read(specification.in_batches.as_at.result)).to be_kind_of(Enumerator)
|
1006
|
+
expect(repository.read(specification.in_batches.as_of.result)).to be_kind_of(Enumerator)
|
977
1007
|
events = Array.new(10) { SRecord.new }
|
978
1008
|
repository.append_to_stream(
|
979
1009
|
events,
|
@@ -981,6 +1011,9 @@ module RubyEventStore
|
|
981
1011
|
ExpectedVersion.none
|
982
1012
|
)
|
983
1013
|
expect(repository.read(specification.in_batches.result)).to be_kind_of(Enumerator)
|
1014
|
+
expect(repository.read(specification.in_batches.as_at.result)).to be_kind_of(Enumerator)
|
1015
|
+
expect(repository.read(specification.in_batches.as_of.result)).to be_kind_of(Enumerator)
|
1016
|
+
|
984
1017
|
end
|
985
1018
|
|
986
1019
|
specify do
|
@@ -1177,50 +1210,50 @@ module RubyEventStore
|
|
1177
1210
|
end
|
1178
1211
|
|
1179
1212
|
specify do
|
1180
|
-
event_1 = SRecord.new(event_id:
|
1181
|
-
event_2 = SRecord.new(event_id:
|
1182
|
-
event_3 = SRecord.new(event_id:
|
1183
|
-
stream_a = Stream.new(
|
1184
|
-
stream_b = Stream.new(
|
1185
|
-
stream_c = Stream.new(
|
1213
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea")
|
1214
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7")
|
1215
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e")
|
1216
|
+
stream_a = Stream.new("Stream A")
|
1217
|
+
stream_b = Stream.new("Stream B")
|
1218
|
+
stream_c = Stream.new("Stream C")
|
1186
1219
|
repository.append_to_stream([event_1, event_2], stream_a, version_any)
|
1187
1220
|
repository.append_to_stream([event_3], stream_b, version_any)
|
1188
|
-
repository.link_to_stream(event_1.event_id, stream_c, version_none)
|
1221
|
+
repository.link_to_stream([event_1.event_id], stream_c, version_none)
|
1189
1222
|
|
1190
|
-
expect(repository.streams_of(
|
1191
|
-
expect(repository.streams_of(
|
1192
|
-
expect(repository.streams_of(
|
1193
|
-
expect(repository.streams_of(
|
1223
|
+
expect(repository.streams_of("8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea")).to eq [stream_a, stream_c]
|
1224
|
+
expect(repository.streams_of("8cee1139-4f96-483a-a175-2b947283c3c7")).to eq [stream_a]
|
1225
|
+
expect(repository.streams_of("d345f86d-b903-4d78-803f-38990c078d9e")).to eq [stream_b]
|
1226
|
+
expect(repository.streams_of("d10c8fe9-2163-418d-ba47-88c9a1f9391b")).to eq []
|
1194
1227
|
end
|
1195
1228
|
|
1196
1229
|
specify do
|
1197
|
-
e1 = SRecord.new(event_id:
|
1198
|
-
e2 = SRecord.new(event_id:
|
1199
|
-
e3 = SRecord.new(event_id:
|
1200
|
-
stream = Stream.new(
|
1230
|
+
e1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea")
|
1231
|
+
e2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7")
|
1232
|
+
e3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e")
|
1233
|
+
stream = Stream.new("Stream A")
|
1201
1234
|
repository.append_to_stream([e1, e2, e3], stream, version_any)
|
1202
1235
|
|
1203
1236
|
expect(repository.read(specification.with_id([
|
1204
|
-
|
1237
|
+
"8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea"
|
1205
1238
|
]).read_first.result)).to eq(e1)
|
1206
1239
|
expect(repository.read(specification.with_id([
|
1207
|
-
|
1240
|
+
"d345f86d-b903-4d78-803f-38990c078d9e"
|
1208
1241
|
]).read_first.result)).to eq(e3)
|
1209
1242
|
expect(repository.read(specification.with_id([
|
1210
|
-
|
1243
|
+
"c31b327c-0da1-4178-a3cd-d2f6bb5d0688"
|
1211
1244
|
]).read_first.result)).to eq(nil)
|
1212
1245
|
expect(repository.read(specification.with_id([
|
1213
|
-
|
1214
|
-
|
1246
|
+
"8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea",
|
1247
|
+
"d345f86d-b903-4d78-803f-38990c078d9e"
|
1215
1248
|
]).in_batches.result).to_a[0]).to eq([e1,e3])
|
1216
|
-
expect(repository.read(specification.stream(
|
1217
|
-
|
1249
|
+
expect(repository.read(specification.stream("Stream A").with_id([
|
1250
|
+
"8cee1139-4f96-483a-a175-2b947283c3c7"
|
1218
1251
|
]).read_first.result)).to eq(e2)
|
1219
|
-
expect(repository.read(specification.stream(
|
1220
|
-
|
1252
|
+
expect(repository.read(specification.stream("Stream B").with_id([
|
1253
|
+
"8cee1139-4f96-483a-a175-2b947283c3c7"
|
1221
1254
|
]).read_first.result)).to eq(nil)
|
1222
|
-
expect(repository.read(specification.stream(
|
1223
|
-
|
1255
|
+
expect(repository.read(specification.stream("Stream B").with_id([
|
1256
|
+
"c31b327c-0da1-4178-a3cd-d2f6bb5d0688"
|
1224
1257
|
]).read_first.result)).to eq(nil)
|
1225
1258
|
expect(repository.read(specification.with_id([]).result).to_a).to eq([])
|
1226
1259
|
end
|
@@ -1229,7 +1262,7 @@ module RubyEventStore
|
|
1229
1262
|
e1 = SRecord.new(event_type: Type1.to_s)
|
1230
1263
|
e2 = SRecord.new(event_type: Type2.to_s)
|
1231
1264
|
e3 = SRecord.new(event_type: Type1.to_s)
|
1232
|
-
stream = Stream.new(
|
1265
|
+
stream = Stream.new("Stream A")
|
1233
1266
|
repository.append_to_stream([e1, e2, e3], stream, version_any)
|
1234
1267
|
|
1235
1268
|
expect(repository.read(specification.of_type([Type1]).result).to_a).to eq([e1,e3])
|
@@ -1239,8 +1272,8 @@ module RubyEventStore
|
|
1239
1272
|
end
|
1240
1273
|
|
1241
1274
|
specify do
|
1242
|
-
stream = Stream.new(
|
1243
|
-
dummy = Stream.new(
|
1275
|
+
stream = Stream.new("Stream A")
|
1276
|
+
dummy = Stream.new("Dummy")
|
1244
1277
|
|
1245
1278
|
expect(repository.count(specification.result)).to eq(0)
|
1246
1279
|
(1..3).each do
|
@@ -1258,8 +1291,8 @@ module RubyEventStore
|
|
1258
1291
|
expect(repository.count(specification.with_id([not_existing_uuid]).result)).to eq(0)
|
1259
1292
|
|
1260
1293
|
expect(repository.count(specification.stream(stream.name).result)).to eq(3)
|
1261
|
-
expect(repository.count(specification.stream(
|
1262
|
-
expect(repository.count(specification.stream(
|
1294
|
+
expect(repository.count(specification.stream("Dummy").result)).to eq(1)
|
1295
|
+
expect(repository.count(specification.stream("not-existing-stream").result)).to eq(0)
|
1263
1296
|
|
1264
1297
|
repository.append_to_stream([SRecord.new(event_type: Type1.to_s)], dummy, version_any)
|
1265
1298
|
expect(repository.count(specification.from(event_id).result)).to eq(1)
|
@@ -1279,100 +1312,100 @@ module RubyEventStore
|
|
1279
1312
|
expect(repository.count(specification.stream(stream.name).of_type([Type3]).result)).to eq(0)
|
1280
1313
|
end
|
1281
1314
|
|
1282
|
-
specify
|
1315
|
+
specify "timestamp precision" do
|
1283
1316
|
time = Time.utc(2020, 9, 11, 12, 26, 0, 123456)
|
1284
|
-
repository.append_to_stream(SRecord.new(timestamp: time), stream, version_none)
|
1317
|
+
repository.append_to_stream([SRecord.new(timestamp: time)], stream, version_none)
|
1285
1318
|
event = read_events_forward(repository, count: 1).first
|
1286
1319
|
|
1287
1320
|
expect(event.timestamp).to eq(time)
|
1288
1321
|
end
|
1289
1322
|
|
1290
|
-
specify
|
1291
|
-
event_1 = SRecord.new(event_id:
|
1292
|
-
event_2 = SRecord.new(event_id:
|
1293
|
-
event_3 = SRecord.new(event_id:
|
1294
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1323
|
+
specify "fetching records older than specified date in stream" do
|
1324
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1325
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1326
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1327
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1295
1328
|
|
1296
|
-
expect(repository.read(specification.stream(
|
1329
|
+
expect(repository.read(specification.stream("whatever").older_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1])
|
1297
1330
|
end
|
1298
1331
|
|
1299
|
-
specify
|
1300
|
-
event_1 = SRecord.new(event_id:
|
1301
|
-
event_2 = SRecord.new(event_id:
|
1302
|
-
event_3 = SRecord.new(event_id:
|
1303
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1332
|
+
specify "fetching records older than or equal to specified date in stream" do
|
1333
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1334
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1335
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1336
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1304
1337
|
|
1305
|
-
expect(repository.read(specification.stream(
|
1338
|
+
expect(repository.read(specification.stream("whatever").older_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1, event_2])
|
1306
1339
|
end
|
1307
1340
|
|
1308
|
-
specify
|
1309
|
-
event_1 = SRecord.new(event_id:
|
1310
|
-
event_2 = SRecord.new(event_id:
|
1311
|
-
event_3 = SRecord.new(event_id:
|
1312
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1341
|
+
specify "fetching records newer than specified date in stream" do
|
1342
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1343
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1344
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1345
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1313
1346
|
|
1314
|
-
expect(repository.read(specification.stream(
|
1347
|
+
expect(repository.read(specification.stream("whatever").newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_3])
|
1315
1348
|
end
|
1316
1349
|
|
1317
|
-
specify
|
1318
|
-
event_1 = SRecord.new(event_id:
|
1319
|
-
event_2 = SRecord.new(event_id:
|
1320
|
-
event_3 = SRecord.new(event_id:
|
1321
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1350
|
+
specify "fetching records newer than or equal to specified date in stream" do
|
1351
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1352
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1353
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1354
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1322
1355
|
|
1323
|
-
expect(repository.read(specification.stream(
|
1356
|
+
expect(repository.read(specification.stream("whatever").newer_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_2, event_3])
|
1324
1357
|
end
|
1325
1358
|
|
1326
|
-
specify
|
1327
|
-
event_1 = SRecord.new(event_id:
|
1328
|
-
event_2 = SRecord.new(event_id:
|
1329
|
-
event_3 = SRecord.new(event_id:
|
1330
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1359
|
+
specify "fetching records older than specified date" do
|
1360
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1361
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1362
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1363
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1331
1364
|
|
1332
1365
|
expect(repository.read(specification.older_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1])
|
1333
1366
|
end
|
1334
1367
|
|
1335
|
-
specify
|
1336
|
-
event_1 = SRecord.new(event_id:
|
1337
|
-
event_2 = SRecord.new(event_id:
|
1338
|
-
event_3 = SRecord.new(event_id:
|
1339
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1368
|
+
specify "fetching records older than or equal to specified date" do
|
1369
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1370
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1371
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1372
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1340
1373
|
|
1341
1374
|
expect(repository.read(specification.older_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1, event_2])
|
1342
1375
|
end
|
1343
1376
|
|
1344
|
-
specify
|
1345
|
-
event_1 = SRecord.new(event_id:
|
1346
|
-
event_2 = SRecord.new(event_id:
|
1347
|
-
event_3 = SRecord.new(event_id:
|
1348
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1377
|
+
specify "fetching records newer than specified date" do
|
1378
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1379
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1380
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1381
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1349
1382
|
|
1350
1383
|
expect(repository.read(specification.newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_3])
|
1351
1384
|
end
|
1352
1385
|
|
1353
|
-
specify
|
1354
|
-
event_1 = SRecord.new(event_id:
|
1355
|
-
event_2 = SRecord.new(event_id:
|
1356
|
-
event_3 = SRecord.new(event_id:
|
1357
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1386
|
+
specify "fetching records newer than or equal to specified date" do
|
1387
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1388
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1389
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1390
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1358
1391
|
|
1359
1392
|
expect(repository.read(specification.newer_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_2, event_3])
|
1360
1393
|
end
|
1361
1394
|
|
1362
|
-
specify
|
1363
|
-
event_1 = SRecord.new(event_id:
|
1364
|
-
event_2 = SRecord.new(event_id:
|
1365
|
-
event_3 = SRecord.new(event_id:
|
1366
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1395
|
+
specify "fetching records from disjoint periods" do
|
1396
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1397
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1398
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1399
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1367
1400
|
|
1368
1401
|
expect(repository.read(specification.older_than(Time.utc(2020, 1, 2)).newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([])
|
1369
1402
|
end
|
1370
1403
|
|
1371
|
-
specify
|
1372
|
-
event_1 = SRecord.new(event_id:
|
1373
|
-
event_2 = SRecord.new(event_id:
|
1374
|
-
event_3 = SRecord.new(event_id:
|
1375
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1404
|
+
specify "fetching records within time range" do
|
1405
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1406
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1407
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1408
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1376
1409
|
|
1377
1410
|
expect(repository.read(specification.between(Time.utc(2020, 1, 1)...Time.utc(2020, 1, 3)).result).to_a).to eq([event_1, event_2])
|
1378
1411
|
end
|
@@ -1386,11 +1419,35 @@ module RubyEventStore
|
|
1386
1419
|
Stream.new("Dummy"),
|
1387
1420
|
ExpectedVersion.any
|
1388
1421
|
)
|
1389
|
-
expect(repository.read(specification.result)
|
1390
|
-
expect(repository.read(specification.as_at.result)
|
1391
|
-
expect(repository.read(specification.as_at.backward.result)
|
1392
|
-
expect(repository.read(specification.as_of.result)
|
1393
|
-
expect(repository.read(specification.as_of.backward.result)
|
1422
|
+
expect(repository.read(specification.result)).to eq_ids([e1, e2, e3])
|
1423
|
+
expect(repository.read(specification.as_at.result)).to eq_ids([e1, e3, e2])
|
1424
|
+
expect(repository.read(specification.as_at.backward.result)).to eq_ids([e2, e3, e1])
|
1425
|
+
expect(repository.read(specification.as_of.result)).to eq_ids([e3, e2, e1])
|
1426
|
+
expect(repository.read(specification.as_of.backward.result)).to eq_ids([e1, e2, e3])
|
1427
|
+
end
|
1428
|
+
|
1429
|
+
specify "time order is respected with batches" do
|
1430
|
+
repository.append_to_stream([
|
1431
|
+
SRecord.new(event_id: e1 = SecureRandom.uuid, timestamp: Time.new(2020,1,1), valid_at: Time.new(2020,1,9)),
|
1432
|
+
SRecord.new(event_id: e2 = SecureRandom.uuid, timestamp: Time.new(2020,1,3), valid_at: Time.new(2020,1,6)),
|
1433
|
+
SRecord.new(event_id: e3 = SecureRandom.uuid, timestamp: Time.new(2020,1,2), valid_at: Time.new(2020,1,3)),
|
1434
|
+
],
|
1435
|
+
Stream.new("Dummy"),
|
1436
|
+
ExpectedVersion.any
|
1437
|
+
)
|
1438
|
+
expect(repository.read(specification.in_batches.result).to_a.flatten).to eq_ids([e1, e2, e3])
|
1439
|
+
expect(repository.read(specification.in_batches.as_at.result).to_a.flatten).to eq_ids([e1, e3, e2])
|
1440
|
+
expect(repository.read(specification.in_batches.as_at.backward.result).to_a.flatten).to eq_ids([e2, e3, e1])
|
1441
|
+
expect(repository.read(specification.in_batches.as_of.result).to_a.flatten).to eq_ids([e3, e2, e1])
|
1442
|
+
expect(repository.read(specification.in_batches.as_of.backward.result).to_a.flatten).to eq_ids([e1, e2, e3])
|
1443
|
+
end
|
1444
|
+
end
|
1445
|
+
|
1446
|
+
::RSpec::Matchers.define :eq_ids do |expected_ids|
|
1447
|
+
match do |enum|
|
1448
|
+
@actual = enum.map(&:event_id)
|
1449
|
+
expected_ids == @actual
|
1394
1450
|
end
|
1451
|
+
diffable
|
1395
1452
|
end
|
1396
1453
|
end
|