ruby_event_store 1.3.0 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +3 -1
- data/lib/ruby_event_store.rb +43 -47
- data/lib/ruby_event_store/broker.rb +3 -3
- data/lib/ruby_event_store/client.rb +51 -23
- data/lib/ruby_event_store/composed_dispatcher.rb +2 -2
- data/lib/ruby_event_store/constants.rb +1 -0
- data/lib/ruby_event_store/errors.rb +0 -1
- data/lib/ruby_event_store/event.rb +8 -1
- data/lib/ruby_event_store/immediate_async_dispatcher.rb +2 -2
- data/lib/ruby_event_store/in_memory_repository.rb +98 -59
- data/lib/ruby_event_store/instrumented_dispatcher.rb +2 -2
- data/lib/ruby_event_store/instrumented_repository.rb +3 -3
- data/lib/ruby_event_store/mappers/default.rb +3 -8
- data/lib/ruby_event_store/mappers/encryption_mapper.rb +3 -4
- data/lib/ruby_event_store/mappers/instrumented_mapper.rb +4 -4
- data/lib/ruby_event_store/mappers/json_mapper.rb +7 -7
- data/lib/ruby_event_store/mappers/pipeline.rb +2 -5
- data/lib/ruby_event_store/mappers/pipeline_mapper.rb +2 -2
- data/lib/ruby_event_store/mappers/transformation/domain_event.rb +16 -8
- data/lib/ruby_event_store/mappers/transformation/encryption.rb +20 -12
- data/lib/ruby_event_store/mappers/transformation/event_class_remapper.rb +11 -4
- data/lib/ruby_event_store/mappers/transformation/stringify_metadata_keys.rb +12 -7
- data/lib/ruby_event_store/mappers/transformation/symbolize_metadata_keys.rb +12 -7
- data/lib/ruby_event_store/mappers/transformation/upcast.rb +37 -0
- data/lib/ruby_event_store/null.rb +13 -0
- data/lib/ruby_event_store/projection.rb +2 -13
- data/lib/ruby_event_store/record.rb +68 -0
- data/lib/ruby_event_store/serialized_record.rb +23 -4
- data/lib/ruby_event_store/spec/broker_lint.rb +9 -9
- data/lib/ruby_event_store/spec/event_repository_lint.rb +288 -105
- data/lib/ruby_event_store/spec/mapper_lint.rb +6 -6
- data/lib/ruby_event_store/spec/subscriptions_lint.rb +14 -0
- data/lib/ruby_event_store/specification.rb +100 -7
- data/lib/ruby_event_store/specification_reader.rb +2 -2
- data/lib/ruby_event_store/specification_result.rb +86 -2
- data/lib/ruby_event_store/subscriptions.rb +23 -8
- data/lib/ruby_event_store/transform_keys.rb +5 -5
- data/lib/ruby_event_store/version.rb +1 -1
- metadata +15 -21
- data/CHANGELOG.md +0 -93
- data/Gemfile +0 -11
- data/Makefile +0 -22
- data/lib/ruby_event_store/mappers/protobuf.rb +0 -24
- data/lib/ruby_event_store/mappers/transformation/item.rb +0 -56
- data/lib/ruby_event_store/mappers/transformation/proto_event.rb +0 -17
- data/lib/ruby_event_store/mappers/transformation/protobuf_encoder.rb +0 -30
- data/lib/ruby_event_store/mappers/transformation/protobuf_nested_struct_metadata.rb +0 -30
- data/lib/ruby_event_store/mappers/transformation/serialization.rb +0 -34
- data/lib/ruby_event_store/mappers/transformation/serialized_record.rb +0 -27
- data/ruby_event_store.gemspec +0 -29
@@ -3,18 +3,20 @@
|
|
3
3
|
module RubyEventStore
|
4
4
|
class SerializedRecord
|
5
5
|
StringsRequired = Class.new(StandardError)
|
6
|
-
def initialize(event_id:, data:, metadata:, event_type:)
|
6
|
+
def initialize(event_id:, data:, metadata:, event_type:, timestamp:, valid_at:)
|
7
7
|
raise StringsRequired unless [event_id, event_type].all? { |v| v.instance_of?(String) }
|
8
8
|
@event_id = event_id
|
9
9
|
@data = data
|
10
10
|
@metadata = metadata
|
11
11
|
@event_type = event_type
|
12
|
+
@timestamp = timestamp
|
13
|
+
@valid_at = valid_at
|
12
14
|
freeze
|
13
15
|
end
|
14
16
|
|
15
|
-
attr_reader :event_id, :data, :metadata, :event_type
|
17
|
+
attr_reader :event_id, :data, :metadata, :event_type, :timestamp, :valid_at
|
16
18
|
|
17
|
-
BIG_VALUE =
|
19
|
+
BIG_VALUE = 0b110011100100000010010010110011101011110101010101001100111110111
|
18
20
|
def hash
|
19
21
|
[
|
20
22
|
self.class,
|
@@ -22,6 +24,8 @@ module RubyEventStore
|
|
22
24
|
data,
|
23
25
|
metadata,
|
24
26
|
event_type,
|
27
|
+
timestamp,
|
28
|
+
valid_at,
|
25
29
|
].hash ^ BIG_VALUE
|
26
30
|
end
|
27
31
|
|
@@ -30,7 +34,9 @@ module RubyEventStore
|
|
30
34
|
other.event_id.eql?(event_id) &&
|
31
35
|
other.data.eql?(data) &&
|
32
36
|
other.metadata.eql?(metadata) &&
|
33
|
-
other.event_type.eql?(event_type)
|
37
|
+
other.event_type.eql?(event_type) &&
|
38
|
+
other.timestamp.eql?(timestamp) &&
|
39
|
+
other.valid_at.eql?(valid_at)
|
34
40
|
end
|
35
41
|
|
36
42
|
def to_h
|
@@ -39,9 +45,22 @@ module RubyEventStore
|
|
39
45
|
data: data,
|
40
46
|
metadata: metadata,
|
41
47
|
event_type: event_type,
|
48
|
+
timestamp: timestamp,
|
49
|
+
valid_at: valid_at,
|
42
50
|
}
|
43
51
|
end
|
44
52
|
|
53
|
+
def deserialize(serializer)
|
54
|
+
Record.new(
|
55
|
+
event_id: event_id,
|
56
|
+
event_type: event_type,
|
57
|
+
data: serializer.load(data),
|
58
|
+
metadata: serializer.load(metadata),
|
59
|
+
timestamp: Time.iso8601(timestamp),
|
60
|
+
valid_at: Time.iso8601(valid_at),
|
61
|
+
)
|
62
|
+
end
|
63
|
+
|
45
64
|
alias_method :eql?, :==
|
46
65
|
end
|
47
66
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
RSpec.shared_examples :broker do |broker_klass|
|
2
2
|
let(:event) { instance_double(::RubyEventStore::Event, event_type: 'EventType') }
|
3
|
-
let(:
|
3
|
+
let(:record) { instance_double(::RubyEventStore::Record) }
|
4
4
|
let(:handler) { HandlerClass.new }
|
5
5
|
let(:subscriptions) { ::RubyEventStore::Subscriptions.new }
|
6
6
|
let(:dispatcher) { ::RubyEventStore::Dispatcher.new }
|
@@ -9,26 +9,26 @@ RSpec.shared_examples :broker do |broker_klass|
|
|
9
9
|
specify "no dispatch when no subscriptions" do
|
10
10
|
expect(subscriptions).to receive(:all_for).with('EventType').and_return([])
|
11
11
|
expect(dispatcher).not_to receive(:call)
|
12
|
-
broker.call(event,
|
12
|
+
broker.call(event, record)
|
13
13
|
end
|
14
14
|
|
15
15
|
specify "calls subscription" do
|
16
16
|
expect(subscriptions).to receive(:all_for).with('EventType').and_return([handler])
|
17
|
-
expect(dispatcher).to receive(:call).with(handler, event,
|
18
|
-
broker.call(event,
|
17
|
+
expect(dispatcher).to receive(:call).with(handler, event, record)
|
18
|
+
broker.call(event, record)
|
19
19
|
end
|
20
20
|
|
21
21
|
specify "calls subscribed class" do
|
22
22
|
expect(subscriptions).to receive(:all_for).with('EventType').and_return([HandlerClass])
|
23
|
-
expect(dispatcher).to receive(:call).with(HandlerClass, event,
|
24
|
-
broker.call(event,
|
23
|
+
expect(dispatcher).to receive(:call).with(HandlerClass, event, record)
|
24
|
+
broker.call(event, record)
|
25
25
|
end
|
26
26
|
|
27
27
|
specify "calls all subscriptions" do
|
28
28
|
expect(subscriptions).to receive(:all_for).with('EventType').and_return([handler, HandlerClass])
|
29
|
-
expect(dispatcher).to receive(:call).with(handler, event,
|
30
|
-
expect(dispatcher).to receive(:call).with(HandlerClass, event,
|
31
|
-
broker.call(event,
|
29
|
+
expect(dispatcher).to receive(:call).with(handler, event, record)
|
30
|
+
expect(dispatcher).to receive(:call).with(HandlerClass, event, record)
|
31
|
+
broker.call(event, record)
|
32
32
|
end
|
33
33
|
|
34
34
|
specify 'raise error when no subscriber' do
|
@@ -3,15 +3,19 @@ module RubyEventStore
|
|
3
3
|
class SRecord
|
4
4
|
def self.new(
|
5
5
|
event_id: SecureRandom.uuid,
|
6
|
-
data:
|
7
|
-
metadata:
|
8
|
-
event_type: 'SRecordTestEvent'
|
6
|
+
data: {},
|
7
|
+
metadata: {},
|
8
|
+
event_type: 'SRecordTestEvent',
|
9
|
+
timestamp: Time.new.utc,
|
10
|
+
valid_at: nil
|
9
11
|
)
|
10
|
-
|
12
|
+
Record.new(
|
11
13
|
event_id: event_id,
|
12
14
|
data: data,
|
13
15
|
metadata: metadata,
|
14
16
|
event_type: event_type,
|
17
|
+
timestamp: timestamp.round(TIMESTAMP_PRECISION),
|
18
|
+
valid_at: (valid_at || timestamp).round(TIMESTAMP_PRECISION),
|
15
19
|
)
|
16
20
|
end
|
17
21
|
end
|
@@ -22,13 +26,45 @@ module RubyEventStore
|
|
22
26
|
Type2 = Class.new(RubyEventStore::Event)
|
23
27
|
# @private
|
24
28
|
Type3 = Class.new(RubyEventStore::Event)
|
29
|
+
|
30
|
+
# @private
|
31
|
+
class EventRepositoryHelper
|
32
|
+
def supports_concurrent_auto?
|
33
|
+
true
|
34
|
+
end
|
35
|
+
|
36
|
+
def supports_concurrent_any?
|
37
|
+
true
|
38
|
+
end
|
39
|
+
|
40
|
+
def supports_binary?
|
41
|
+
true
|
42
|
+
end
|
43
|
+
|
44
|
+
def supports_upsert?
|
45
|
+
true
|
46
|
+
end
|
47
|
+
|
48
|
+
def has_connection_pooling?
|
49
|
+
false
|
50
|
+
end
|
51
|
+
|
52
|
+
def connection_pool_size
|
53
|
+
end
|
54
|
+
|
55
|
+
def cleanup_concurrency_test
|
56
|
+
end
|
57
|
+
|
58
|
+
def rescuable_concurrency_test_errors
|
59
|
+
[]
|
60
|
+
end
|
61
|
+
end
|
25
62
|
end
|
26
63
|
|
27
64
|
module RubyEventStore
|
28
|
-
RSpec.shared_examples :event_repository do
|
29
|
-
let(:
|
30
|
-
let(:
|
31
|
-
let(:specification) { Specification.new(SpecificationReader.new(repository, mapper)) }
|
65
|
+
::RSpec.shared_examples :event_repository do
|
66
|
+
let(:helper) { EventRepositoryHelper.new }
|
67
|
+
let(:specification) { Specification.new(SpecificationReader.new(repository, Mappers::NullMapper.new)) }
|
32
68
|
let(:global_stream) { Stream.new(GLOBAL_STREAM) }
|
33
69
|
let(:stream) { Stream.new(SecureRandom.uuid) }
|
34
70
|
let(:stream_flow) { Stream.new('flow') }
|
@@ -42,6 +78,11 @@ module RubyEventStore
|
|
42
78
|
let(:version_2) { ExpectedVersion.new(2) }
|
43
79
|
let(:version_3) { ExpectedVersion.new(3) }
|
44
80
|
|
81
|
+
def verify_conncurency_assumptions
|
82
|
+
return unless helper.has_connection_pooling?
|
83
|
+
expect(helper.connection_pool_size).to eq(5)
|
84
|
+
end
|
85
|
+
|
45
86
|
def read_events(scope, stream = nil, from: nil, to: nil, count: nil)
|
46
87
|
scope = scope.stream(stream.name) if stream
|
47
88
|
scope = scope.from(from) if from
|
@@ -64,8 +105,8 @@ module RubyEventStore
|
|
64
105
|
|
65
106
|
specify 'append_to_stream returns self' do
|
66
107
|
repository
|
67
|
-
.append_to_stream(event = SRecord.new, stream, version_none)
|
68
|
-
.append_to_stream(event = SRecord.new, stream, version_0)
|
108
|
+
.append_to_stream([event = SRecord.new], stream, version_none)
|
109
|
+
.append_to_stream([event = SRecord.new], stream, version_0)
|
69
110
|
end
|
70
111
|
|
71
112
|
specify 'link_to_stream returns self' do
|
@@ -73,12 +114,12 @@ module RubyEventStore
|
|
73
114
|
event1 = SRecord.new
|
74
115
|
repository
|
75
116
|
.append_to_stream([event0, event1], stream, version_none)
|
76
|
-
.link_to_stream(event0.event_id, stream_flow, version_none)
|
77
|
-
.link_to_stream(event1.event_id, stream_flow, version_0)
|
117
|
+
.link_to_stream([event0.event_id], stream_flow, version_none)
|
118
|
+
.link_to_stream([event1.event_id], stream_flow, version_0)
|
78
119
|
end
|
79
120
|
|
80
121
|
specify 'adds an initial event to a new stream' do
|
81
|
-
repository.append_to_stream(event = SRecord.new, stream, version_none)
|
122
|
+
repository.append_to_stream([event = SRecord.new], stream, version_none)
|
82
123
|
expect(read_events_forward(repository).first).to eq(event)
|
83
124
|
expect(read_events_forward(repository, stream).first).to eq(event)
|
84
125
|
expect(read_events_forward(repository, stream_other)).to be_empty
|
@@ -86,8 +127,8 @@ module RubyEventStore
|
|
86
127
|
|
87
128
|
specify 'links an initial event to a new stream' do
|
88
129
|
repository
|
89
|
-
.append_to_stream(event = SRecord.new, stream, version_none)
|
90
|
-
.link_to_stream(event.event_id, stream_flow, version_none)
|
130
|
+
.append_to_stream([event = SRecord.new], stream, version_none)
|
131
|
+
.link_to_stream([event.event_id], stream_flow, version_none)
|
91
132
|
|
92
133
|
expect(read_events_forward(repository, count: 1).first).to eq(event)
|
93
134
|
expect(read_events_forward(repository, stream).first).to eq(event)
|
@@ -393,7 +434,7 @@ module RubyEventStore
|
|
393
434
|
end
|
394
435
|
|
395
436
|
specify 'unlimited concurrency for :any - everything should succeed', timeout: 10, mutant: false do
|
396
|
-
skip unless
|
437
|
+
skip unless helper.supports_concurrent_any?
|
397
438
|
verify_conncurency_assumptions
|
398
439
|
begin
|
399
440
|
concurrency_level = 4
|
@@ -426,12 +467,12 @@ module RubyEventStore
|
|
426
467
|
end
|
427
468
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
428
469
|
ensure
|
429
|
-
cleanup_concurrency_test
|
470
|
+
helper.cleanup_concurrency_test
|
430
471
|
end
|
431
472
|
end
|
432
473
|
|
433
474
|
specify 'unlimited concurrency for :any - everything should succeed when linking', timeout: 10, mutant: false do
|
434
|
-
skip unless
|
475
|
+
skip unless helper.supports_concurrent_any?
|
435
476
|
verify_conncurency_assumptions
|
436
477
|
begin
|
437
478
|
concurrency_level = 4
|
@@ -453,7 +494,7 @@ module RubyEventStore
|
|
453
494
|
begin
|
454
495
|
100.times do |j|
|
455
496
|
eid = "0000000#{i}-#{sprintf("%04d", j)}-0000-0000-000000000000"
|
456
|
-
repository.link_to_stream(eid, stream_flow, version_any)
|
497
|
+
repository.link_to_stream([eid], stream_flow, version_any)
|
457
498
|
end
|
458
499
|
rescue WrongExpectedEventVersion
|
459
500
|
fail_occurred = true
|
@@ -471,12 +512,12 @@ module RubyEventStore
|
|
471
512
|
end
|
472
513
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
473
514
|
ensure
|
474
|
-
cleanup_concurrency_test
|
515
|
+
helper.cleanup_concurrency_test
|
475
516
|
end
|
476
517
|
end
|
477
518
|
|
478
519
|
specify 'limited concurrency for :auto - some operations will fail without outside lock, stream is ordered', mutant: false do
|
479
|
-
skip unless
|
520
|
+
skip unless helper.supports_concurrent_auto?
|
480
521
|
verify_conncurency_assumptions
|
481
522
|
begin
|
482
523
|
concurrency_level = 4
|
@@ -494,7 +535,7 @@ module RubyEventStore
|
|
494
535
|
SRecord.new(event_id: eid),
|
495
536
|
], stream, version_auto)
|
496
537
|
sleep(rand(concurrency_level) / 1000.0)
|
497
|
-
rescue WrongExpectedEventVersion, *rescuable_concurrency_test_errors
|
538
|
+
rescue WrongExpectedEventVersion, *helper.rescuable_concurrency_test_errors
|
498
539
|
fail_occurred +=1
|
499
540
|
end
|
500
541
|
end
|
@@ -510,14 +551,14 @@ module RubyEventStore
|
|
510
551
|
ev.event_id.start_with?("0-")
|
511
552
|
end
|
512
553
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
513
|
-
additional_limited_concurrency_for_auto_check
|
554
|
+
additional_limited_concurrency_for_auto_check if defined? additional_limited_concurrency_for_auto_check
|
514
555
|
ensure
|
515
|
-
cleanup_concurrency_test
|
556
|
+
helper.cleanup_concurrency_test
|
516
557
|
end
|
517
558
|
end
|
518
559
|
|
519
560
|
specify 'limited concurrency for :auto - some operations will fail without outside lock, stream is ordered', mutant: false do
|
520
|
-
skip unless
|
561
|
+
skip unless helper.supports_concurrent_auto?
|
521
562
|
verify_conncurency_assumptions
|
522
563
|
begin
|
523
564
|
concurrency_level = 4
|
@@ -540,9 +581,9 @@ module RubyEventStore
|
|
540
581
|
100.times do |j|
|
541
582
|
begin
|
542
583
|
eid = "0000000#{i}-#{sprintf("%04d", j)}-0000-0000-000000000000"
|
543
|
-
repository.link_to_stream(eid, stream, version_auto)
|
584
|
+
repository.link_to_stream([eid], stream, version_auto)
|
544
585
|
sleep(rand(concurrency_level) / 1000.0)
|
545
|
-
rescue WrongExpectedEventVersion, *rescuable_concurrency_test_errors
|
586
|
+
rescue WrongExpectedEventVersion, *helper.rescuable_concurrency_test_errors
|
546
587
|
fail_occurred +=1
|
547
588
|
end
|
548
589
|
end
|
@@ -558,51 +599,51 @@ module RubyEventStore
|
|
558
599
|
ev.event_id.start_with?("0-")
|
559
600
|
end
|
560
601
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
561
|
-
additional_limited_concurrency_for_auto_check
|
602
|
+
additional_limited_concurrency_for_auto_check if defined? additional_limited_concurrency_for_auto_check
|
562
603
|
ensure
|
563
|
-
cleanup_concurrency_test
|
604
|
+
helper.cleanup_concurrency_test
|
564
605
|
end
|
565
606
|
end
|
566
607
|
|
567
608
|
it 'appended event is stored in given stream' do
|
568
609
|
expected_event = SRecord.new
|
569
|
-
repository.append_to_stream(expected_event, stream, version_any)
|
610
|
+
repository.append_to_stream([expected_event], stream, version_any)
|
570
611
|
expect(read_events_forward(repository, count: 1).first).to eq(expected_event)
|
571
612
|
expect(read_events_forward(repository, stream).first).to eq(expected_event)
|
572
613
|
expect(read_events_forward(repository, stream_other)).to be_empty
|
573
614
|
end
|
574
615
|
|
575
616
|
it 'data attributes are retrieved' do
|
576
|
-
event = SRecord.new(data:
|
577
|
-
repository.append_to_stream(event, stream, version_any)
|
617
|
+
event = SRecord.new(data: { "order_id" => 3 })
|
618
|
+
repository.append_to_stream([event], stream, version_any)
|
578
619
|
retrieved_event = read_events_forward(repository, count: 1).first
|
579
|
-
expect(retrieved_event.data).to eq(
|
620
|
+
expect(retrieved_event.data).to eq({ "order_id" => 3 })
|
580
621
|
end
|
581
622
|
|
582
623
|
it 'metadata attributes are retrieved' do
|
583
|
-
event = SRecord.new(metadata:
|
584
|
-
repository.append_to_stream(event, stream, version_any)
|
624
|
+
event = SRecord.new(metadata: { "request_id" => 3 })
|
625
|
+
repository.append_to_stream([event], stream, version_any)
|
585
626
|
retrieved_event = read_events_forward(repository, count: 1).first
|
586
|
-
expect(retrieved_event.metadata).to eq(
|
627
|
+
expect(retrieved_event.metadata).to eq({ "request_id" => 3 })
|
587
628
|
end
|
588
629
|
|
589
630
|
it 'data and metadata attributes are retrieved when linking' do
|
590
631
|
event = SRecord.new(
|
591
|
-
data:
|
592
|
-
metadata:
|
632
|
+
data: { "order_id" => 3 },
|
633
|
+
metadata: { "request_id" => 4},
|
593
634
|
)
|
594
635
|
repository
|
595
|
-
.append_to_stream(event, stream, version_any)
|
596
|
-
.link_to_stream(event.event_id, stream_flow, version_any)
|
636
|
+
.append_to_stream([event], stream, version_any)
|
637
|
+
.link_to_stream([event.event_id], stream_flow, version_any)
|
597
638
|
retrieved_event = read_events_forward(repository, stream_flow).first
|
598
|
-
expect(retrieved_event.metadata).to eq(
|
599
|
-
expect(retrieved_event.data).to eq(
|
639
|
+
expect(retrieved_event.metadata).to eq({ "request_id" => 4 })
|
640
|
+
expect(retrieved_event.data).to eq({ "order_id" => 3 })
|
600
641
|
expect(event).to eq(retrieved_event)
|
601
642
|
end
|
602
643
|
|
603
644
|
it 'does not have deleted streams' do
|
604
|
-
repository.append_to_stream(e1 = SRecord.new, stream, version_none)
|
605
|
-
repository.append_to_stream(e2 = SRecord.new, stream_other, version_none)
|
645
|
+
repository.append_to_stream([e1 = SRecord.new], stream, version_none)
|
646
|
+
repository.append_to_stream([e2 = SRecord.new], stream_other, version_none)
|
606
647
|
|
607
648
|
repository.delete_stream(stream)
|
608
649
|
expect(read_events_forward(repository, stream)).to be_empty
|
@@ -612,8 +653,8 @@ module RubyEventStore
|
|
612
653
|
|
613
654
|
it 'does not have deleted streams with linked events' do
|
614
655
|
repository
|
615
|
-
.append_to_stream(e1 = SRecord.new, stream, version_none)
|
616
|
-
.link_to_stream(e1.event_id, stream_flow, version_none)
|
656
|
+
.append_to_stream([e1 = SRecord.new], stream, version_none)
|
657
|
+
.link_to_stream([e1.event_id], stream_flow, version_none)
|
617
658
|
|
618
659
|
repository.delete_stream(stream_flow)
|
619
660
|
expect(read_events_forward(repository, stream_flow)).to be_empty
|
@@ -622,7 +663,7 @@ module RubyEventStore
|
|
622
663
|
|
623
664
|
it 'has or has not domain event' do
|
624
665
|
just_an_id = 'd5c134c2-db65-4e87-b6ea-d196f8f1a292'
|
625
|
-
repository.append_to_stream(SRecord.new(event_id: just_an_id), stream, version_none)
|
666
|
+
repository.append_to_stream([SRecord.new(event_id: just_an_id)], stream, version_none)
|
626
667
|
|
627
668
|
expect(repository.has_event?(just_an_id)).to be_truthy
|
628
669
|
expect(repository.has_event?(just_an_id.clone)).to be_truthy
|
@@ -634,8 +675,8 @@ module RubyEventStore
|
|
634
675
|
end
|
635
676
|
|
636
677
|
it 'knows last event in stream' do
|
637
|
-
repository.append_to_stream(a =SRecord.new(event_id: '00000000-0000-0000-0000-000000000001'), stream, version_none)
|
638
|
-
repository.append_to_stream(b = SRecord.new(event_id: '00000000-0000-0000-0000-000000000002'), stream, version_0)
|
678
|
+
repository.append_to_stream([a =SRecord.new(event_id: '00000000-0000-0000-0000-000000000001')], stream, version_none)
|
679
|
+
repository.append_to_stream([b = SRecord.new(event_id: '00000000-0000-0000-0000-000000000002')], stream, version_0)
|
639
680
|
|
640
681
|
expect(repository.last_stream_event(stream)).to eq(b)
|
641
682
|
expect(repository.last_stream_event(stream_other)).to be_nil
|
@@ -665,11 +706,11 @@ module RubyEventStore
|
|
665
706
|
ab60114c-011d-4d58-ab31-7ba65d99975e
|
666
707
|
868cac42-3d19-4b39-84e8-cd32d65c2445
|
667
708
|
].map { |id| SRecord.new(event_id: id) }
|
668
|
-
repository.append_to_stream(SRecord.new, stream_other, version_none)
|
709
|
+
repository.append_to_stream([SRecord.new], stream_other, version_none)
|
669
710
|
events.each.with_index do |event, index|
|
670
|
-
repository.append_to_stream(event, stream, ExpectedVersion.new(index - 1))
|
711
|
+
repository.append_to_stream([event], stream, ExpectedVersion.new(index - 1))
|
671
712
|
end
|
672
|
-
repository.append_to_stream(SRecord.new, stream_other, version_0)
|
713
|
+
repository.append_to_stream([SRecord.new], stream_other, version_0)
|
673
714
|
|
674
715
|
expect(read_events_forward(repository, stream, count: 3)).to eq(events.first(3))
|
675
716
|
expect(read_events_forward(repository, stream, count: 100)).to eq(events)
|
@@ -700,13 +741,13 @@ module RubyEventStore
|
|
700
741
|
ab60114c-011d-4d58-ab31-7ba65d99975e
|
701
742
|
868cac42-3d19-4b39-84e8-cd32d65c2445
|
702
743
|
].map { |id| SRecord.new(event_id: id) }
|
703
|
-
repository.append_to_stream(SRecord.new, stream_other, version_none)
|
744
|
+
repository.append_to_stream([SRecord.new], stream_other, version_none)
|
704
745
|
events.each.with_index do |event, index|
|
705
746
|
repository
|
706
|
-
.append_to_stream(event, stream, ExpectedVersion.new(index - 1))
|
707
|
-
.link_to_stream(event.event_id, stream_flow, ExpectedVersion.new(index - 1))
|
747
|
+
.append_to_stream([event], stream, ExpectedVersion.new(index - 1))
|
748
|
+
.link_to_stream([event.event_id], stream_flow, ExpectedVersion.new(index - 1))
|
708
749
|
end
|
709
|
-
repository.append_to_stream(SRecord.new, stream_other, version_0)
|
750
|
+
repository.append_to_stream([SRecord.new], stream_other, version_0)
|
710
751
|
|
711
752
|
expect(read_events_forward(repository, stream_flow, count: 3)).to eq(events.first(3))
|
712
753
|
expect(read_events_forward(repository, stream_flow, count: 100)).to eq(events)
|
@@ -727,11 +768,11 @@ module RubyEventStore
|
|
727
768
|
s1 = stream
|
728
769
|
s2 = stream_other
|
729
770
|
repository
|
730
|
-
.append_to_stream(a = SRecord.new(event_id: '7010d298-ab69-4bb1-9251-f3466b5d1282'), s1, version_none)
|
731
|
-
.append_to_stream(b = SRecord.new(event_id: '34f88aca-aaba-4ca0-9256-8017b47528c5'), s2, version_none)
|
732
|
-
.append_to_stream(c = SRecord.new(event_id: '8e61c864-ceae-4684-8726-97c34eb8fc4f'), s1, version_0)
|
733
|
-
.append_to_stream(d = SRecord.new(event_id: '30963ed9-6349-450b-ac9b-8ea50115b3bd'), s2, version_0)
|
734
|
-
.append_to_stream(e = SRecord.new(event_id: '5bdc58b7-e8a7-4621-afd6-ccb828d72457'), s2, version_1)
|
771
|
+
.append_to_stream([a = SRecord.new(event_id: '7010d298-ab69-4bb1-9251-f3466b5d1282')], s1, version_none)
|
772
|
+
.append_to_stream([b = SRecord.new(event_id: '34f88aca-aaba-4ca0-9256-8017b47528c5')], s2, version_none)
|
773
|
+
.append_to_stream([c = SRecord.new(event_id: '8e61c864-ceae-4684-8726-97c34eb8fc4f')], s1, version_0)
|
774
|
+
.append_to_stream([d = SRecord.new(event_id: '30963ed9-6349-450b-ac9b-8ea50115b3bd')], s2, version_0)
|
775
|
+
.append_to_stream([e = SRecord.new(event_id: '5bdc58b7-e8a7-4621-afd6-ccb828d72457')], s2, version_1)
|
735
776
|
|
736
777
|
expect(read_events_forward(repository, s1)).to eq [a,c]
|
737
778
|
expect(read_events_backward(repository, s1)).to eq [c,a]
|
@@ -740,16 +781,16 @@ module RubyEventStore
|
|
740
781
|
it 'reads all stream linked events forward & backward' do
|
741
782
|
s1, fs1, fs2 = stream, stream_flow, stream_other
|
742
783
|
repository
|
743
|
-
.append_to_stream(a = SRecord.new(event_id: '7010d298-ab69-4bb1-9251-f3466b5d1282'), s1, version_none)
|
744
|
-
.append_to_stream(b = SRecord.new(event_id: '34f88aca-aaba-4ca0-9256-8017b47528c5'), s1, version_0)
|
745
|
-
.append_to_stream(c = SRecord.new(event_id: '8e61c864-ceae-4684-8726-97c34eb8fc4f'), s1, version_1)
|
746
|
-
.append_to_stream(d = SRecord.new(event_id: '30963ed9-6349-450b-ac9b-8ea50115b3bd'), s1, version_2)
|
747
|
-
.append_to_stream(e = SRecord.new(event_id: '5bdc58b7-e8a7-4621-afd6-ccb828d72457'), s1, version_3)
|
748
|
-
.link_to_stream('7010d298-ab69-4bb1-9251-f3466b5d1282', fs1, version_none)
|
749
|
-
.link_to_stream('34f88aca-aaba-4ca0-9256-8017b47528c5', fs2, version_none)
|
750
|
-
.link_to_stream('8e61c864-ceae-4684-8726-97c34eb8fc4f', fs1, version_0)
|
751
|
-
.link_to_stream('30963ed9-6349-450b-ac9b-8ea50115b3bd', fs2, version_0)
|
752
|
-
.link_to_stream('5bdc58b7-e8a7-4621-afd6-ccb828d72457', fs2, version_1)
|
784
|
+
.append_to_stream([a = SRecord.new(event_id: '7010d298-ab69-4bb1-9251-f3466b5d1282')], s1, version_none)
|
785
|
+
.append_to_stream([b = SRecord.new(event_id: '34f88aca-aaba-4ca0-9256-8017b47528c5')], s1, version_0)
|
786
|
+
.append_to_stream([c = SRecord.new(event_id: '8e61c864-ceae-4684-8726-97c34eb8fc4f')], s1, version_1)
|
787
|
+
.append_to_stream([d = SRecord.new(event_id: '30963ed9-6349-450b-ac9b-8ea50115b3bd')], s1, version_2)
|
788
|
+
.append_to_stream([e = SRecord.new(event_id: '5bdc58b7-e8a7-4621-afd6-ccb828d72457')], s1, version_3)
|
789
|
+
.link_to_stream(['7010d298-ab69-4bb1-9251-f3466b5d1282'], fs1, version_none)
|
790
|
+
.link_to_stream(['34f88aca-aaba-4ca0-9256-8017b47528c5'], fs2, version_none)
|
791
|
+
.link_to_stream(['8e61c864-ceae-4684-8726-97c34eb8fc4f'], fs1, version_0)
|
792
|
+
.link_to_stream(['30963ed9-6349-450b-ac9b-8ea50115b3bd'], fs2, version_0)
|
793
|
+
.link_to_stream(['5bdc58b7-e8a7-4621-afd6-ccb828d72457'], fs2, version_1)
|
753
794
|
|
754
795
|
expect(read_events_forward(repository, fs1)).to eq [a,c]
|
755
796
|
expect(read_events_backward(repository, fs1)).to eq [c,a]
|
@@ -769,7 +810,7 @@ module RubyEventStore
|
|
769
810
|
868cac42-3d19-4b39-84e8-cd32d65c2445
|
770
811
|
].map { |id| SRecord.new(event_id: id) }
|
771
812
|
events.each do |ev|
|
772
|
-
repository.append_to_stream(ev, Stream.new(SecureRandom.uuid), version_none)
|
813
|
+
repository.append_to_stream([ev], Stream.new(SecureRandom.uuid), version_none)
|
773
814
|
end
|
774
815
|
|
775
816
|
expect(read_events_forward(repository, count: 3)).to eq(events.first(3))
|
@@ -802,8 +843,8 @@ module RubyEventStore
|
|
802
843
|
].map { |id| SRecord.new(event_id: id) }
|
803
844
|
events.each do |ev|
|
804
845
|
repository
|
805
|
-
.append_to_stream(ev, Stream.new(SecureRandom.uuid), version_none)
|
806
|
-
.link_to_stream(ev.event_id, Stream.new(SecureRandom.uuid), version_none)
|
846
|
+
.append_to_stream([ev], Stream.new(SecureRandom.uuid), version_none)
|
847
|
+
.link_to_stream([ev.event_id], Stream.new(SecureRandom.uuid), version_none)
|
807
848
|
end
|
808
849
|
|
809
850
|
expect(read_events_forward(repository, count: 3)).to eq(events.first(3))
|
@@ -826,8 +867,8 @@ module RubyEventStore
|
|
826
867
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
827
868
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
828
869
|
].map{|id| SRecord.new(event_id: id) }
|
829
|
-
repository.append_to_stream(events.first, stream, version_none)
|
830
|
-
repository.append_to_stream(events.last, stream, version_0)
|
870
|
+
repository.append_to_stream([events.first], stream, version_none)
|
871
|
+
repository.append_to_stream([events.last], stream, version_0)
|
831
872
|
|
832
873
|
expect(read_events_forward(repository, from: "96c920b1-cdd0-40f4-907c-861b9fff7d02")).to eq([events.last])
|
833
874
|
expect(read_events_backward(repository, from: "56404f79-0ba0-4aa0-8524-dc3436368ca0")).to eq([events.first])
|
@@ -842,13 +883,13 @@ module RubyEventStore
|
|
842
883
|
|
843
884
|
it 'does not allow same event twice in a stream' do
|
844
885
|
repository.append_to_stream(
|
845
|
-
SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef"),
|
886
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
846
887
|
stream,
|
847
888
|
version_none
|
848
889
|
)
|
849
890
|
expect do
|
850
891
|
repository.append_to_stream(
|
851
|
-
SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef"),
|
892
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
852
893
|
stream,
|
853
894
|
version_0
|
854
895
|
)
|
@@ -857,13 +898,13 @@ module RubyEventStore
|
|
857
898
|
|
858
899
|
it 'does not allow same event twice' do
|
859
900
|
repository.append_to_stream(
|
860
|
-
SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef"),
|
901
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
861
902
|
stream,
|
862
903
|
version_none
|
863
904
|
)
|
864
905
|
expect do
|
865
906
|
repository.append_to_stream(
|
866
|
-
SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef"),
|
907
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
867
908
|
stream_other,
|
868
909
|
version_none
|
869
910
|
)
|
@@ -871,41 +912,39 @@ module RubyEventStore
|
|
871
912
|
end
|
872
913
|
|
873
914
|
it 'does not allow linking same event twice in a stream' do
|
874
|
-
repository.append_to_stream(
|
875
|
-
|
876
|
-
|
915
|
+
repository.append_to_stream(
|
916
|
+
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
917
|
+
stream,
|
877
918
|
version_none
|
878
|
-
).link_to_stream("a1b49edb-7636-416f-874a-88f94b859bef", stream_flow, version_none)
|
919
|
+
).link_to_stream(["a1b49edb-7636-416f-874a-88f94b859bef"], stream_flow, version_none)
|
879
920
|
expect do
|
880
|
-
repository.link_to_stream("a1b49edb-7636-416f-874a-88f94b859bef", stream_flow, version_0)
|
921
|
+
repository.link_to_stream(["a1b49edb-7636-416f-874a-88f94b859bef"], stream_flow, version_0)
|
881
922
|
end.to raise_error(EventDuplicatedInStream)
|
882
923
|
end
|
883
924
|
|
884
925
|
it 'allows appending to GLOBAL_STREAM explicitly' do
|
885
926
|
event = SRecord.new(event_id: "df8b2ba3-4e2c-4888-8d14-4364855fa80e")
|
886
|
-
repository.append_to_stream(event, global_stream, version_any)
|
927
|
+
repository.append_to_stream([event], global_stream, version_any)
|
887
928
|
|
888
929
|
expect(read_events_forward(repository, count: 10)).to eq([event])
|
889
930
|
end
|
890
931
|
|
891
932
|
specify "events not persisted if append failed" do
|
892
|
-
repository.append_to_stream([
|
893
|
-
SRecord.new,
|
894
|
-
], stream, version_none)
|
933
|
+
repository.append_to_stream([SRecord.new], stream, version_none)
|
895
934
|
|
896
935
|
expect do
|
897
|
-
repository.append_to_stream(
|
898
|
-
SRecord.new(
|
899
|
-
|
900
|
-
|
901
|
-
|
936
|
+
repository.append_to_stream(
|
937
|
+
[SRecord.new(event_id: '9bedf448-e4d0-41a3-a8cd-f94aec7aa763')],
|
938
|
+
stream,
|
939
|
+
version_none
|
940
|
+
)
|
902
941
|
end.to raise_error(WrongExpectedEventVersion)
|
903
942
|
expect(repository.has_event?('9bedf448-e4d0-41a3-a8cd-f94aec7aa763')).to be_falsey
|
904
943
|
end
|
905
944
|
|
906
945
|
specify 'linking non-existent event' do
|
907
946
|
expect do
|
908
|
-
repository.link_to_stream('72922e65-1b32-4e97-8023-03ae81dd3a27', stream_flow, version_none)
|
947
|
+
repository.link_to_stream(['72922e65-1b32-4e97-8023-03ae81dd3a27'], stream_flow, version_none)
|
909
948
|
end.to raise_error do |err|
|
910
949
|
expect(err).to be_a(EventNotFound)
|
911
950
|
expect(err.event_id).to eq('72922e65-1b32-4e97-8023-03ae81dd3a27')
|
@@ -918,14 +957,14 @@ module RubyEventStore
|
|
918
957
|
end
|
919
958
|
|
920
959
|
specify 'can store arbitrary binary data' do
|
921
|
-
skip unless
|
960
|
+
skip unless helper.supports_binary?
|
922
961
|
binary = "\xB0"
|
923
962
|
expect(binary.valid_encoding?).to eq(false)
|
924
963
|
binary.force_encoding("binary")
|
925
964
|
expect(binary.valid_encoding?).to eq(true)
|
926
965
|
|
927
966
|
repository.append_to_stream(
|
928
|
-
event = SRecord.new(data: binary, metadata: binary),
|
967
|
+
[event = SRecord.new(data: binary, metadata: binary)],
|
929
968
|
stream,
|
930
969
|
version_none
|
931
970
|
)
|
@@ -933,6 +972,8 @@ module RubyEventStore
|
|
933
972
|
|
934
973
|
specify do
|
935
974
|
expect(repository.read(specification.in_batches.result)).to be_kind_of(Enumerator)
|
975
|
+
expect(repository.read(specification.in_batches.as_at.result)).to be_kind_of(Enumerator)
|
976
|
+
expect(repository.read(specification.in_batches.as_of.result)).to be_kind_of(Enumerator)
|
936
977
|
events = Array.new(10) { SRecord.new }
|
937
978
|
repository.append_to_stream(
|
938
979
|
events,
|
@@ -940,6 +981,9 @@ module RubyEventStore
|
|
940
981
|
ExpectedVersion.none
|
941
982
|
)
|
942
983
|
expect(repository.read(specification.in_batches.result)).to be_kind_of(Enumerator)
|
984
|
+
expect(repository.read(specification.in_batches.as_at.result)).to be_kind_of(Enumerator)
|
985
|
+
expect(repository.read(specification.in_batches.as_of.result)).to be_kind_of(Enumerator)
|
986
|
+
|
943
987
|
end
|
944
988
|
|
945
989
|
specify do
|
@@ -1091,7 +1135,7 @@ module RubyEventStore
|
|
1091
1135
|
|
1092
1136
|
context "#update_messages" do
|
1093
1137
|
specify "changes events" do
|
1094
|
-
skip unless
|
1138
|
+
skip unless helper.supports_upsert?
|
1095
1139
|
events = Array.new(5) { SRecord.new }
|
1096
1140
|
repository.append_to_stream(
|
1097
1141
|
events[0..2],
|
@@ -1104,19 +1148,20 @@ module RubyEventStore
|
|
1104
1148
|
ExpectedVersion.any
|
1105
1149
|
)
|
1106
1150
|
repository.update_messages([
|
1107
|
-
a = SRecord.new(event_id: events[0].event_id.clone, data: events[0].data,
|
1108
|
-
b = SRecord.new(event_id: events[1].event_id.dup,
|
1109
|
-
c = SRecord.new(event_id: events[2].event_id,
|
1110
|
-
d = SRecord.new(event_id: events[3].event_id.clone, data: events[3].data,
|
1111
|
-
e = SRecord.new(event_id: events[4].event_id.dup,
|
1151
|
+
a = SRecord.new(event_id: events[0].event_id.clone, data: events[0].data, metadata: events[0].metadata, event_type: events[0].event_type, timestamp: events[0].timestamp),
|
1152
|
+
b = SRecord.new(event_id: events[1].event_id.dup, data: { "test" => 1 }, metadata: events[1].metadata, event_type: events[1].event_type, timestamp: events[1].timestamp),
|
1153
|
+
c = SRecord.new(event_id: events[2].event_id, data: events[2].data, metadata: { "test" => 2 }, event_type: events[2].event_type, timestamp: events[2].timestamp),
|
1154
|
+
d = SRecord.new(event_id: events[3].event_id.clone, data: events[3].data, metadata: events[3].metadata, event_type: "event_type3", timestamp: events[3].timestamp),
|
1155
|
+
e = SRecord.new(event_id: events[4].event_id.dup, data: { "test" => 4 }, metadata: { "test" => 42 }, event_type: "event_type4", timestamp: events[4].timestamp),
|
1112
1156
|
])
|
1157
|
+
|
1113
1158
|
expect(repository.read(specification.result).to_a).to eq([a,b,c,d,e])
|
1114
1159
|
expect(repository.read(specification.stream("whatever").result).to_a).to eq([a,b,c])
|
1115
1160
|
expect(repository.read(specification.stream("elo").result).to_a).to eq([d,e])
|
1116
1161
|
end
|
1117
1162
|
|
1118
1163
|
specify "cannot change unexisting event" do
|
1119
|
-
skip unless
|
1164
|
+
skip unless helper.supports_upsert?
|
1120
1165
|
e = SRecord.new
|
1121
1166
|
expect{ repository.update_messages([e]) }.to raise_error do |err|
|
1122
1167
|
expect(err).to be_a(EventNotFound)
|
@@ -1124,6 +1169,14 @@ module RubyEventStore
|
|
1124
1169
|
expect(err.message).to eq("Event not found: #{e.event_id}")
|
1125
1170
|
end
|
1126
1171
|
end
|
1172
|
+
|
1173
|
+
specify "does not change timestamp" do
|
1174
|
+
r = SRecord.new(timestamp: Time.utc(2020, 1, 1))
|
1175
|
+
repository.append_to_stream([r], Stream.new("whatever"), ExpectedVersion.any)
|
1176
|
+
repository.update_messages([SRecord.new(event_id: r.event_id, timestamp: Time.utc(2020, 1, 20))])
|
1177
|
+
|
1178
|
+
expect(repository.read(specification.result).first.timestamp).to eq(Time.utc(2020, 1, 1))
|
1179
|
+
end
|
1127
1180
|
end
|
1128
1181
|
|
1129
1182
|
specify do
|
@@ -1135,7 +1188,7 @@ module RubyEventStore
|
|
1135
1188
|
stream_c = Stream.new('Stream C')
|
1136
1189
|
repository.append_to_stream([event_1, event_2], stream_a, version_any)
|
1137
1190
|
repository.append_to_stream([event_3], stream_b, version_any)
|
1138
|
-
repository.link_to_stream(event_1.event_id, stream_c, version_none)
|
1191
|
+
repository.link_to_stream([event_1.event_id], stream_c, version_none)
|
1139
1192
|
|
1140
1193
|
expect(repository.streams_of('8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea')).to eq [stream_a, stream_c]
|
1141
1194
|
expect(repository.streams_of('8cee1139-4f96-483a-a175-2b947283c3c7')).to eq [stream_a]
|
@@ -1228,5 +1281,135 @@ module RubyEventStore
|
|
1228
1281
|
expect(repository.count(specification.stream("Dummy").of_type([Type3]).result)).to eq(2)
|
1229
1282
|
expect(repository.count(specification.stream(stream.name).of_type([Type3]).result)).to eq(0)
|
1230
1283
|
end
|
1284
|
+
|
1285
|
+
specify 'timestamp precision' do
|
1286
|
+
time = Time.utc(2020, 9, 11, 12, 26, 0, 123456)
|
1287
|
+
repository.append_to_stream([SRecord.new(timestamp: time)], stream, version_none)
|
1288
|
+
event = read_events_forward(repository, count: 1).first
|
1289
|
+
|
1290
|
+
expect(event.timestamp).to eq(time)
|
1291
|
+
end
|
1292
|
+
|
1293
|
+
specify 'fetching records older than specified date in stream' do
|
1294
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1295
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1296
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1297
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1298
|
+
|
1299
|
+
expect(repository.read(specification.stream('whatever').older_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1])
|
1300
|
+
end
|
1301
|
+
|
1302
|
+
specify 'fetching records older than or equal to specified date in stream' do
|
1303
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1304
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1305
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1306
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1307
|
+
|
1308
|
+
expect(repository.read(specification.stream('whatever').older_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1, event_2])
|
1309
|
+
end
|
1310
|
+
|
1311
|
+
specify 'fetching records newer than specified date in stream' do
|
1312
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1313
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1314
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1315
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1316
|
+
|
1317
|
+
expect(repository.read(specification.stream('whatever').newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_3])
|
1318
|
+
end
|
1319
|
+
|
1320
|
+
specify 'fetching records newer than or equal to specified date in stream' do
|
1321
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1322
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1323
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1324
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1325
|
+
|
1326
|
+
expect(repository.read(specification.stream('whatever').newer_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_2, event_3])
|
1327
|
+
end
|
1328
|
+
|
1329
|
+
specify 'fetching records older than specified date' do
|
1330
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1331
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1332
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1333
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1334
|
+
|
1335
|
+
expect(repository.read(specification.older_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1])
|
1336
|
+
end
|
1337
|
+
|
1338
|
+
specify 'fetching records older than or equal to specified date' do
|
1339
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1340
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1341
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1342
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1343
|
+
|
1344
|
+
expect(repository.read(specification.older_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1, event_2])
|
1345
|
+
end
|
1346
|
+
|
1347
|
+
specify 'fetching records newer than specified date' do
|
1348
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1349
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1350
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1351
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1352
|
+
|
1353
|
+
expect(repository.read(specification.newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_3])
|
1354
|
+
end
|
1355
|
+
|
1356
|
+
specify 'fetching records newer than or equal to specified date' do
|
1357
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1358
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1359
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1360
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1361
|
+
|
1362
|
+
expect(repository.read(specification.newer_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_2, event_3])
|
1363
|
+
end
|
1364
|
+
|
1365
|
+
specify 'fetching records from disjoint periods' do
|
1366
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1367
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1368
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1369
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1370
|
+
|
1371
|
+
expect(repository.read(specification.older_than(Time.utc(2020, 1, 2)).newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([])
|
1372
|
+
end
|
1373
|
+
|
1374
|
+
specify 'fetching records within time range' do
|
1375
|
+
event_1 = SRecord.new(event_id: '8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea', timestamp: Time.utc(2020, 1, 1))
|
1376
|
+
event_2 = SRecord.new(event_id: '8cee1139-4f96-483a-a175-2b947283c3c7', timestamp: Time.utc(2020, 1, 2))
|
1377
|
+
event_3 = SRecord.new(event_id: 'd345f86d-b903-4d78-803f-38990c078d9e', timestamp: Time.utc(2020, 1, 3))
|
1378
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new('whatever'), version_any)
|
1379
|
+
|
1380
|
+
expect(repository.read(specification.between(Time.utc(2020, 1, 1)...Time.utc(2020, 1, 3)).result).to_a).to eq([event_1, event_2])
|
1381
|
+
end
|
1382
|
+
|
1383
|
+
specify "time order is respected" do
|
1384
|
+
repository.append_to_stream([
|
1385
|
+
SRecord.new(event_id: e1 = SecureRandom.uuid, timestamp: Time.new(2020,1,1), valid_at: Time.new(2020,1,9)),
|
1386
|
+
SRecord.new(event_id: e2 = SecureRandom.uuid, timestamp: Time.new(2020,1,3), valid_at: Time.new(2020,1,6)),
|
1387
|
+
SRecord.new(event_id: e3 = SecureRandom.uuid, timestamp: Time.new(2020,1,2), valid_at: Time.new(2020,1,3)),
|
1388
|
+
],
|
1389
|
+
Stream.new("Dummy"),
|
1390
|
+
ExpectedVersion.any
|
1391
|
+
)
|
1392
|
+
expect(repository.read(specification.result).map(&:event_id)).to eq [e1, e2, e3]
|
1393
|
+
expect(repository.read(specification.as_at.result).map(&:event_id)).to eq [e1, e3, e2]
|
1394
|
+
expect(repository.read(specification.as_at.backward.result).map(&:event_id)).to eq [e2, e3, e1]
|
1395
|
+
expect(repository.read(specification.as_of.result).map(&:event_id)).to eq [e3, e2, e1]
|
1396
|
+
expect(repository.read(specification.as_of.backward.result).map(&:event_id)).to eq [e1, e2, e3]
|
1397
|
+
end
|
1398
|
+
|
1399
|
+
specify "time order is respected with batches" do
|
1400
|
+
repository.append_to_stream([
|
1401
|
+
SRecord.new(event_id: e1 = SecureRandom.uuid, timestamp: Time.new(2020,1,1), valid_at: Time.new(2020,1,9)),
|
1402
|
+
SRecord.new(event_id: e2 = SecureRandom.uuid, timestamp: Time.new(2020,1,3), valid_at: Time.new(2020,1,6)),
|
1403
|
+
SRecord.new(event_id: e3 = SecureRandom.uuid, timestamp: Time.new(2020,1,2), valid_at: Time.new(2020,1,3)),
|
1404
|
+
],
|
1405
|
+
Stream.new("Dummy"),
|
1406
|
+
ExpectedVersion.any
|
1407
|
+
)
|
1408
|
+
expect(repository.read(specification.in_batches.result).to_a.flatten.map(&:event_id)).to eq [e1, e2, e3]
|
1409
|
+
expect(repository.read(specification.in_batches.as_at.result).to_a.flatten.map(&:event_id)).to eq [e1, e3, e2]
|
1410
|
+
expect(repository.read(specification.in_batches.as_at.backward.result).to_a.flatten.map(&:event_id)).to eq [e2, e3, e1]
|
1411
|
+
expect(repository.read(specification.in_batches.as_of.result).to_a.flatten.map(&:event_id)).to eq [e3, e2, e1]
|
1412
|
+
expect(repository.read(specification.in_batches.as_of.backward.result).to_a.flatten.map(&:event_id)).to eq [e1, e2, e3]
|
1413
|
+
end
|
1231
1414
|
end
|
1232
1415
|
end
|