ruby_event_store 2.2.0 → 2.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/ruby_event_store/client.rb +29 -2
- data/lib/ruby_event_store/errors.rb +1 -0
- data/lib/ruby_event_store/event.rb +5 -2
- data/lib/ruby_event_store/in_memory_repository.rb +64 -12
- data/lib/ruby_event_store/instrumented_dispatcher.rb +12 -2
- data/lib/ruby_event_store/instrumented_repository.rb +14 -8
- data/lib/ruby_event_store/mappers/encryption_mapper.rb +1 -1
- data/lib/ruby_event_store/mappers/forgotten_data.rb +1 -1
- data/lib/ruby_event_store/mappers/in_memory_encryption_key_repository.rb +1 -1
- data/lib/ruby_event_store/mappers/transformation/domain_event.rb +11 -1
- data/lib/ruby_event_store/metadata.rb +3 -3
- data/lib/ruby_event_store/projection.rb +2 -2
- data/lib/ruby_event_store/spec/broker_lint.rb +11 -11
- data/lib/ruby_event_store/spec/event_lint.rb +9 -9
- data/lib/ruby_event_store/spec/event_repository_lint.rb +273 -235
- data/lib/ruby_event_store/spec/subscriptions_lint.rb +35 -35
- data/lib/ruby_event_store/subscriptions.rb +1 -1
- data/lib/ruby_event_store/version.rb +1 -1
- data/lib/ruby_event_store.rb +43 -43
- metadata +16 -2
@@ -1,36 +1,36 @@
|
|
1
1
|
RSpec.shared_examples :event do |event_class, data, metadata|
|
2
|
-
it
|
2
|
+
it "allows initialization" do
|
3
3
|
expect {
|
4
4
|
event_class.new(event_id: Object.new, data: data || Object.new, metadata: metadata || {})
|
5
5
|
}.not_to raise_error
|
6
6
|
end
|
7
7
|
|
8
|
-
it
|
8
|
+
it "provides event_id as string" do
|
9
9
|
event = event_class.new
|
10
10
|
expect(event.event_id).to be_an_instance_of(String)
|
11
|
-
expect(event.event_id).not_to eq
|
11
|
+
expect(event.event_id).not_to eq ""
|
12
12
|
expect(event.event_id).not_to eq nil
|
13
13
|
end
|
14
14
|
|
15
|
-
it
|
15
|
+
it "provides message_id as string" do
|
16
16
|
event = event_class.new
|
17
17
|
expect(event.message_id).to be_an_instance_of(String)
|
18
18
|
end
|
19
19
|
|
20
|
-
it
|
20
|
+
it "message_id is the same as event_id" do
|
21
21
|
event = event_class.new
|
22
22
|
expect(event.event_id).to eq event.message_id
|
23
23
|
end
|
24
24
|
|
25
|
-
it
|
25
|
+
it "exposes given event_id to string" do
|
26
26
|
event = event_class.new(event_id: 1234567890)
|
27
|
-
expect(event.event_id).to eq
|
27
|
+
expect(event.event_id).to eq "1234567890"
|
28
28
|
end
|
29
29
|
|
30
|
-
it
|
30
|
+
it "provides event type as string" do
|
31
31
|
event = event_class.new
|
32
32
|
expect(event.event_type).to be_an_instance_of(String)
|
33
|
-
expect(event.event_type).not_to eq
|
33
|
+
expect(event.event_type).not_to eq ""
|
34
34
|
expect(event.event_type).not_to eq nil
|
35
35
|
end
|
36
36
|
|
@@ -5,7 +5,7 @@ module RubyEventStore
|
|
5
5
|
event_id: SecureRandom.uuid,
|
6
6
|
data: {},
|
7
7
|
metadata: {},
|
8
|
-
event_type:
|
8
|
+
event_type: "SRecordTestEvent",
|
9
9
|
timestamp: Time.new.utc,
|
10
10
|
valid_at: nil
|
11
11
|
)
|
@@ -26,50 +26,17 @@ module RubyEventStore
|
|
26
26
|
Type2 = Class.new(RubyEventStore::Event)
|
27
27
|
# @private
|
28
28
|
Type3 = Class.new(RubyEventStore::Event)
|
29
|
-
|
30
|
-
# @private
|
31
|
-
class EventRepositoryHelper
|
32
|
-
def supports_concurrent_auto?
|
33
|
-
true
|
34
|
-
end
|
35
|
-
|
36
|
-
def supports_concurrent_any?
|
37
|
-
true
|
38
|
-
end
|
39
|
-
|
40
|
-
def supports_binary?
|
41
|
-
true
|
42
|
-
end
|
43
|
-
|
44
|
-
def supports_upsert?
|
45
|
-
true
|
46
|
-
end
|
47
|
-
|
48
|
-
def has_connection_pooling?
|
49
|
-
false
|
50
|
-
end
|
51
|
-
|
52
|
-
def connection_pool_size
|
53
|
-
end
|
54
|
-
|
55
|
-
def cleanup_concurrency_test
|
56
|
-
end
|
57
|
-
|
58
|
-
def rescuable_concurrency_test_errors
|
59
|
-
[]
|
60
|
-
end
|
61
|
-
end
|
62
29
|
end
|
63
30
|
|
64
31
|
module RubyEventStore
|
65
|
-
::RSpec.shared_examples :event_repository do
|
66
|
-
let(:
|
32
|
+
::RSpec.shared_examples :event_repository do |mk_repository, helper|
|
33
|
+
let(:repository) { mk_repository.call }
|
67
34
|
let(:specification) { Specification.new(SpecificationReader.new(repository, Mappers::NullMapper.new)) }
|
68
35
|
let(:global_stream) { Stream.new(GLOBAL_STREAM) }
|
69
36
|
let(:stream) { Stream.new(SecureRandom.uuid) }
|
70
|
-
let(:stream_flow) { Stream.new(
|
71
|
-
let(:stream_other) { Stream.new(
|
72
|
-
let(:stream_test) { Stream.new(
|
37
|
+
let(:stream_flow) { Stream.new("flow") }
|
38
|
+
let(:stream_other) { Stream.new("other") }
|
39
|
+
let(:stream_test) { Stream.new("test") }
|
73
40
|
let(:version_none) { ExpectedVersion.none }
|
74
41
|
let(:version_auto) { ExpectedVersion.auto }
|
75
42
|
let(:version_any) { ExpectedVersion.any }
|
@@ -78,12 +45,12 @@ module RubyEventStore
|
|
78
45
|
let(:version_2) { ExpectedVersion.new(2) }
|
79
46
|
let(:version_3) { ExpectedVersion.new(3) }
|
80
47
|
|
81
|
-
def verify_conncurency_assumptions
|
48
|
+
def verify_conncurency_assumptions(helper)
|
82
49
|
return unless helper.has_connection_pooling?
|
83
|
-
expect(helper.connection_pool_size).to eq(5)
|
50
|
+
expect(helper.connection_pool_size).to eq(5), "expected connection pool of size 5, got #{helper.connection_pool_size}"
|
84
51
|
end
|
85
52
|
|
86
|
-
def read_events(scope, stream = nil, from: nil, to: nil, count: nil)
|
53
|
+
def read_events(repository, scope, stream = nil, from: nil, to: nil, count: nil)
|
87
54
|
scope = scope.stream(stream.name) if stream
|
88
55
|
scope = scope.from(from) if from
|
89
56
|
scope = scope.to(to) if to
|
@@ -91,25 +58,25 @@ module RubyEventStore
|
|
91
58
|
repository.read(scope.result).to_a
|
92
59
|
end
|
93
60
|
|
94
|
-
def read_events_forward(
|
95
|
-
read_events(specification, stream, from: from, to: to, count: count)
|
61
|
+
def read_events_forward(repository, stream = nil, from: nil, to: nil, count: nil)
|
62
|
+
read_events(repository, specification, stream, from: from, to: to, count: count)
|
96
63
|
end
|
97
64
|
|
98
|
-
def read_events_backward(
|
99
|
-
read_events(specification.backward, stream, from: from, to: to, count: count)
|
65
|
+
def read_events_backward(repository, stream = nil, from: nil, to: nil, count: nil)
|
66
|
+
read_events(repository, specification.backward, stream, from: from, to: to, count: count)
|
100
67
|
end
|
101
68
|
|
102
|
-
it
|
69
|
+
it "just created is empty" do
|
103
70
|
expect(read_events_forward(repository)).to be_empty
|
104
71
|
end
|
105
72
|
|
106
|
-
specify
|
73
|
+
specify "append_to_stream returns self" do
|
107
74
|
repository
|
108
75
|
.append_to_stream([event = SRecord.new], stream, version_none)
|
109
76
|
.append_to_stream([event = SRecord.new], stream, version_0)
|
110
77
|
end
|
111
78
|
|
112
|
-
specify
|
79
|
+
specify "link_to_stream returns self" do
|
113
80
|
event0 = SRecord.new
|
114
81
|
event1 = SRecord.new
|
115
82
|
repository
|
@@ -118,14 +85,14 @@ module RubyEventStore
|
|
118
85
|
.link_to_stream([event1.event_id], stream_flow, version_0)
|
119
86
|
end
|
120
87
|
|
121
|
-
specify
|
88
|
+
specify "adds an initial event to a new stream" do
|
122
89
|
repository.append_to_stream([event = SRecord.new], stream, version_none)
|
123
90
|
expect(read_events_forward(repository).first).to eq(event)
|
124
91
|
expect(read_events_forward(repository, stream).first).to eq(event)
|
125
92
|
expect(read_events_forward(repository, stream_other)).to be_empty
|
126
93
|
end
|
127
94
|
|
128
|
-
specify
|
95
|
+
specify "links an initial event to a new stream" do
|
129
96
|
repository
|
130
97
|
.append_to_stream([event = SRecord.new], stream, version_none)
|
131
98
|
.link_to_stream([event.event_id], stream_flow, version_none)
|
@@ -136,7 +103,7 @@ module RubyEventStore
|
|
136
103
|
expect(read_events_forward(repository, stream_other)).to be_empty
|
137
104
|
end
|
138
105
|
|
139
|
-
specify
|
106
|
+
specify "adds multiple initial events to a new stream" do
|
140
107
|
repository.append_to_stream([
|
141
108
|
event0 = SRecord.new,
|
142
109
|
event1 = SRecord.new,
|
@@ -145,7 +112,7 @@ module RubyEventStore
|
|
145
112
|
expect(read_events_forward(repository, stream)).to eq([event0, event1])
|
146
113
|
end
|
147
114
|
|
148
|
-
specify
|
115
|
+
specify "links multiple initial events to a new stream" do
|
149
116
|
repository.append_to_stream([
|
150
117
|
event0 = SRecord.new,
|
151
118
|
event1 = SRecord.new,
|
@@ -157,7 +124,7 @@ module RubyEventStore
|
|
157
124
|
expect(read_events_forward(repository, stream_flow)).to eq([event0, event1])
|
158
125
|
end
|
159
126
|
|
160
|
-
specify
|
127
|
+
specify "correct expected version on second write" do
|
161
128
|
repository.append_to_stream([
|
162
129
|
event0 = SRecord.new,
|
163
130
|
event1 = SRecord.new,
|
@@ -170,7 +137,7 @@ module RubyEventStore
|
|
170
137
|
expect(read_events_forward(repository, stream)).to eq([event0, event1, event2, event3])
|
171
138
|
end
|
172
139
|
|
173
|
-
specify
|
140
|
+
specify "correct expected version on second link" do
|
174
141
|
repository.append_to_stream([
|
175
142
|
event0 = SRecord.new,
|
176
143
|
event1 = SRecord.new,
|
@@ -185,7 +152,7 @@ module RubyEventStore
|
|
185
152
|
expect(read_events_forward(repository, stream_flow)).to eq([event2, event3, event0, event1])
|
186
153
|
end
|
187
154
|
|
188
|
-
specify
|
155
|
+
specify "incorrect expected version on second write" do
|
189
156
|
repository.append_to_stream([
|
190
157
|
event0 = SRecord.new,
|
191
158
|
event1 = SRecord.new,
|
@@ -201,7 +168,7 @@ module RubyEventStore
|
|
201
168
|
expect(read_events_forward(repository, stream)).to eq([event0, event1])
|
202
169
|
end
|
203
170
|
|
204
|
-
specify
|
171
|
+
specify "incorrect expected version on second link" do
|
205
172
|
repository.append_to_stream([
|
206
173
|
event0 = SRecord.new,
|
207
174
|
event1 = SRecord.new,
|
@@ -221,7 +188,7 @@ module RubyEventStore
|
|
221
188
|
expect(read_events_forward(repository, stream)).to eq([event0, event1])
|
222
189
|
end
|
223
190
|
|
224
|
-
specify
|
191
|
+
specify ":none on first and subsequent write" do
|
225
192
|
repository.append_to_stream([
|
226
193
|
eventA = SRecord.new,
|
227
194
|
], stream, version_none)
|
@@ -234,7 +201,7 @@ module RubyEventStore
|
|
234
201
|
expect(read_events_forward(repository, stream)).to eq([eventA])
|
235
202
|
end
|
236
203
|
|
237
|
-
specify
|
204
|
+
specify ":none on first and subsequent link" do
|
238
205
|
repository.append_to_stream([
|
239
206
|
eventA = SRecord.new,
|
240
207
|
eventB = SRecord.new,
|
@@ -249,7 +216,7 @@ module RubyEventStore
|
|
249
216
|
expect(read_events_forward(repository, stream_flow)).to eq([eventA])
|
250
217
|
end
|
251
218
|
|
252
|
-
specify
|
219
|
+
specify ":any allows stream with best-effort order and no guarantee" do
|
253
220
|
repository.append_to_stream([
|
254
221
|
event0 = SRecord.new,
|
255
222
|
event1 = SRecord.new,
|
@@ -262,7 +229,7 @@ module RubyEventStore
|
|
262
229
|
expect(read_events_forward(repository, stream).to_set).to eq(Set.new([event0, event1, event2, event3]))
|
263
230
|
end
|
264
231
|
|
265
|
-
specify
|
232
|
+
specify ":any allows linking in stream with best-effort order and no guarantee" do
|
266
233
|
repository.append_to_stream([
|
267
234
|
event0 = SRecord.new,
|
268
235
|
event1 = SRecord.new,
|
@@ -281,7 +248,7 @@ module RubyEventStore
|
|
281
248
|
expect(read_events_forward(repository, stream_flow).to_set).to eq(Set.new([event0, event1, event2, event3]))
|
282
249
|
end
|
283
250
|
|
284
|
-
specify
|
251
|
+
specify ":auto queries for last position in given stream" do
|
285
252
|
repository.append_to_stream([
|
286
253
|
eventA = SRecord.new,
|
287
254
|
eventB = SRecord.new,
|
@@ -297,7 +264,7 @@ module RubyEventStore
|
|
297
264
|
], stream, version_1)
|
298
265
|
end
|
299
266
|
|
300
|
-
specify
|
267
|
+
specify ":auto queries for last position in given stream when linking" do
|
301
268
|
repository.append_to_stream([
|
302
269
|
eventA = SRecord.new,
|
303
270
|
eventB = SRecord.new,
|
@@ -314,7 +281,7 @@ module RubyEventStore
|
|
314
281
|
], stream, version_1)
|
315
282
|
end
|
316
283
|
|
317
|
-
specify
|
284
|
+
specify ":auto starts from 0" do
|
318
285
|
repository.append_to_stream([
|
319
286
|
event0 = SRecord.new,
|
320
287
|
], stream, version_auto)
|
@@ -325,7 +292,7 @@ module RubyEventStore
|
|
325
292
|
end.to raise_error(WrongExpectedEventVersion)
|
326
293
|
end
|
327
294
|
|
328
|
-
specify
|
295
|
+
specify ":auto linking starts from 0" do
|
329
296
|
repository.append_to_stream([
|
330
297
|
event0 = SRecord.new,
|
331
298
|
], stream_other, version_auto)
|
@@ -339,7 +306,7 @@ module RubyEventStore
|
|
339
306
|
end.to raise_error(WrongExpectedEventVersion)
|
340
307
|
end
|
341
308
|
|
342
|
-
specify
|
309
|
+
specify ":auto queries for last position and follows in incremental way" do
|
343
310
|
# It is expected that there is higher level lock
|
344
311
|
# So this query is safe from race conditions
|
345
312
|
repository.append_to_stream([
|
@@ -357,7 +324,7 @@ module RubyEventStore
|
|
357
324
|
expect(read_events_forward(repository, stream)).to eq([event0, event1, event2, event3])
|
358
325
|
end
|
359
326
|
|
360
|
-
specify
|
327
|
+
specify ":auto queries for last position and follows in incremental way when linking" do
|
361
328
|
repository.append_to_stream([
|
362
329
|
event0 = SRecord.new,
|
363
330
|
event1 = SRecord.new,
|
@@ -377,7 +344,7 @@ module RubyEventStore
|
|
377
344
|
expect(read_events_forward(repository, stream_flow)).to eq([event0, event1, event2, event3])
|
378
345
|
end
|
379
346
|
|
380
|
-
specify
|
347
|
+
specify ":auto is compatible with manual expectation" do
|
381
348
|
repository.append_to_stream([
|
382
349
|
event0 = SRecord.new,
|
383
350
|
event1 = SRecord.new,
|
@@ -390,7 +357,7 @@ module RubyEventStore
|
|
390
357
|
expect(read_events_forward(repository, stream)).to eq([event0, event1, event2, event3])
|
391
358
|
end
|
392
359
|
|
393
|
-
specify
|
360
|
+
specify ":auto is compatible with manual expectation when linking" do
|
394
361
|
repository.append_to_stream([
|
395
362
|
event0 = SRecord.new,
|
396
363
|
event1 = SRecord.new,
|
@@ -405,7 +372,7 @@ module RubyEventStore
|
|
405
372
|
expect(read_events_forward(repository, stream_flow)).to eq([event0, event1,])
|
406
373
|
end
|
407
374
|
|
408
|
-
specify
|
375
|
+
specify "manual is compatible with auto expectation" do
|
409
376
|
repository.append_to_stream([
|
410
377
|
event0 = SRecord.new,
|
411
378
|
event1 = SRecord.new,
|
@@ -418,7 +385,7 @@ module RubyEventStore
|
|
418
385
|
expect(read_events_forward(repository, stream)).to eq([event0, event1, event2, event3])
|
419
386
|
end
|
420
387
|
|
421
|
-
specify
|
388
|
+
specify "manual is compatible with auto expectation when linking" do
|
422
389
|
repository.append_to_stream([
|
423
390
|
event0 = SRecord.new,
|
424
391
|
event1 = SRecord.new,
|
@@ -433,9 +400,9 @@ module RubyEventStore
|
|
433
400
|
expect(read_events_forward(repository, stream_flow)).to eq([event0, event1])
|
434
401
|
end
|
435
402
|
|
436
|
-
specify
|
403
|
+
specify "unlimited concurrency for :any - everything should succeed", timeout: 10, mutant: false do
|
437
404
|
skip unless helper.supports_concurrent_any?
|
438
|
-
verify_conncurency_assumptions
|
405
|
+
verify_conncurency_assumptions(helper)
|
439
406
|
begin
|
440
407
|
concurrency_level = 4
|
441
408
|
fail_occurred = false
|
@@ -466,14 +433,12 @@ module RubyEventStore
|
|
466
433
|
ev.event_id.start_with?("0-")
|
467
434
|
end
|
468
435
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
469
|
-
ensure
|
470
|
-
helper.cleanup_concurrency_test
|
471
436
|
end
|
472
437
|
end
|
473
438
|
|
474
|
-
specify
|
439
|
+
specify "unlimited concurrency for :any - everything should succeed when linking", timeout: 10, mutant: false do
|
475
440
|
skip unless helper.supports_concurrent_any?
|
476
|
-
verify_conncurency_assumptions
|
441
|
+
verify_conncurency_assumptions(helper)
|
477
442
|
begin
|
478
443
|
concurrency_level = 4
|
479
444
|
fail_occurred = false
|
@@ -511,14 +476,12 @@ module RubyEventStore
|
|
511
476
|
ev.event_id.start_with?("0-")
|
512
477
|
end
|
513
478
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
514
|
-
ensure
|
515
|
-
helper.cleanup_concurrency_test
|
516
479
|
end
|
517
480
|
end
|
518
481
|
|
519
|
-
specify
|
482
|
+
specify "limited concurrency for :auto - some operations will fail without outside lock, stream is ordered", mutant: false do
|
520
483
|
skip unless helper.supports_concurrent_auto?
|
521
|
-
verify_conncurency_assumptions
|
484
|
+
verify_conncurency_assumptions(helper)
|
522
485
|
begin
|
523
486
|
concurrency_level = 4
|
524
487
|
|
@@ -535,7 +498,7 @@ module RubyEventStore
|
|
535
498
|
SRecord.new(event_id: eid),
|
536
499
|
], stream, version_auto)
|
537
500
|
sleep(rand(concurrency_level) / 1000.0)
|
538
|
-
rescue WrongExpectedEventVersion
|
501
|
+
rescue WrongExpectedEventVersion
|
539
502
|
fail_occurred +=1
|
540
503
|
end
|
541
504
|
end
|
@@ -551,15 +514,15 @@ module RubyEventStore
|
|
551
514
|
ev.event_id.start_with?("0-")
|
552
515
|
end
|
553
516
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
554
|
-
|
555
|
-
|
556
|
-
|
517
|
+
|
518
|
+
positions = repository.read(specification.stream(stream.name).result).map { |r| repository.position_in_stream(r.event_id, stream) }
|
519
|
+
expect(positions).to eq((0...positions.size).to_a)
|
557
520
|
end
|
558
521
|
end
|
559
522
|
|
560
|
-
specify
|
523
|
+
specify "limited concurrency for :auto - some operations will fail without outside lock, stream is ordered", mutant: false do
|
561
524
|
skip unless helper.supports_concurrent_auto?
|
562
|
-
verify_conncurency_assumptions
|
525
|
+
verify_conncurency_assumptions(helper)
|
563
526
|
begin
|
564
527
|
concurrency_level = 4
|
565
528
|
|
@@ -583,7 +546,7 @@ module RubyEventStore
|
|
583
546
|
eid = "0000000#{i}-#{sprintf("%04d", j)}-0000-0000-000000000000"
|
584
547
|
repository.link_to_stream([eid], stream, version_auto)
|
585
548
|
sleep(rand(concurrency_level) / 1000.0)
|
586
|
-
rescue WrongExpectedEventVersion
|
549
|
+
rescue WrongExpectedEventVersion
|
587
550
|
fail_occurred +=1
|
588
551
|
end
|
589
552
|
end
|
@@ -599,13 +562,13 @@ module RubyEventStore
|
|
599
562
|
ev.event_id.start_with?("0-")
|
600
563
|
end
|
601
564
|
expect(events0).to eq(events0.sort_by{|ev| ev.event_id })
|
602
|
-
|
603
|
-
|
604
|
-
|
565
|
+
|
566
|
+
positions = repository.read(specification.stream(stream.name).result).map { |r| repository.position_in_stream(r.event_id, stream) }
|
567
|
+
expect(positions).to eq((0...positions.size).to_a)
|
605
568
|
end
|
606
569
|
end
|
607
570
|
|
608
|
-
it
|
571
|
+
it "appended event is stored in given stream" do
|
609
572
|
expected_event = SRecord.new
|
610
573
|
repository.append_to_stream([expected_event], stream, version_any)
|
611
574
|
expect(read_events_forward(repository, count: 1).first).to eq(expected_event)
|
@@ -613,21 +576,21 @@ module RubyEventStore
|
|
613
576
|
expect(read_events_forward(repository, stream_other)).to be_empty
|
614
577
|
end
|
615
578
|
|
616
|
-
it
|
579
|
+
it "data attributes are retrieved" do
|
617
580
|
event = SRecord.new(data: { "order_id" => 3 })
|
618
581
|
repository.append_to_stream([event], stream, version_any)
|
619
582
|
retrieved_event = read_events_forward(repository, count: 1).first
|
620
583
|
expect(retrieved_event.data).to eq({ "order_id" => 3 })
|
621
584
|
end
|
622
585
|
|
623
|
-
it
|
586
|
+
it "metadata attributes are retrieved" do
|
624
587
|
event = SRecord.new(metadata: { "request_id" => 3 })
|
625
588
|
repository.append_to_stream([event], stream, version_any)
|
626
589
|
retrieved_event = read_events_forward(repository, count: 1).first
|
627
590
|
expect(retrieved_event.metadata).to eq({ "request_id" => 3 })
|
628
591
|
end
|
629
592
|
|
630
|
-
it
|
593
|
+
it "data and metadata attributes are retrieved when linking" do
|
631
594
|
event = SRecord.new(
|
632
595
|
data: { "order_id" => 3 },
|
633
596
|
metadata: { "request_id" => 4},
|
@@ -641,7 +604,7 @@ module RubyEventStore
|
|
641
604
|
expect(event).to eq(retrieved_event)
|
642
605
|
end
|
643
606
|
|
644
|
-
it
|
607
|
+
it "does not have deleted streams" do
|
645
608
|
repository.append_to_stream([e1 = SRecord.new], stream, version_none)
|
646
609
|
repository.append_to_stream([e2 = SRecord.new], stream_other, version_none)
|
647
610
|
|
@@ -651,7 +614,7 @@ module RubyEventStore
|
|
651
614
|
expect(read_events_forward(repository, count: 10)).to eq([e1,e2])
|
652
615
|
end
|
653
616
|
|
654
|
-
it
|
617
|
+
it "does not have deleted streams with linked events" do
|
655
618
|
repository
|
656
619
|
.append_to_stream([e1 = SRecord.new], stream, version_none)
|
657
620
|
.link_to_stream([e1.event_id], stream_flow, version_none)
|
@@ -661,31 +624,98 @@ module RubyEventStore
|
|
661
624
|
expect(read_events_forward(repository, count: 10)).to eq([e1])
|
662
625
|
end
|
663
626
|
|
664
|
-
it
|
665
|
-
just_an_id =
|
627
|
+
it "has or has not domain event" do
|
628
|
+
just_an_id = "d5c134c2-db65-4e87-b6ea-d196f8f1a292"
|
666
629
|
repository.append_to_stream([SRecord.new(event_id: just_an_id)], stream, version_none)
|
667
630
|
|
668
631
|
expect(repository.has_event?(just_an_id)).to be_truthy
|
669
632
|
expect(repository.has_event?(just_an_id.clone)).to be_truthy
|
670
|
-
expect(repository.has_event?(
|
633
|
+
expect(repository.has_event?("any other id")).to be false
|
671
634
|
|
672
635
|
repository.delete_stream(stream)
|
673
636
|
expect(repository.has_event?(just_an_id)).to be_truthy
|
674
637
|
expect(repository.has_event?(just_an_id.clone)).to be_truthy
|
675
638
|
end
|
676
639
|
|
677
|
-
it
|
678
|
-
|
679
|
-
repository.append_to_stream([
|
640
|
+
it "#position_in_stream happy path" do
|
641
|
+
skip unless helper.supports_position_queries?
|
642
|
+
repository.append_to_stream([
|
643
|
+
event0 = SRecord.new,
|
644
|
+
event1 = SRecord.new
|
645
|
+
], stream, version_auto)
|
646
|
+
|
647
|
+
expect(repository.position_in_stream(event0.event_id, stream)).to eq(0)
|
648
|
+
expect(repository.position_in_stream(event1.event_id, stream)).to eq(1)
|
649
|
+
end
|
650
|
+
|
651
|
+
it "#position_in_stream happy path with linking" do
|
652
|
+
skip unless helper.supports_position_queries?
|
653
|
+
repository.append_to_stream([
|
654
|
+
event0 = SRecord.new,
|
655
|
+
event1 = SRecord.new
|
656
|
+
], stream, version_auto)
|
657
|
+
repository.link_to_stream([
|
658
|
+
event1.event_id,
|
659
|
+
event0.event_id,
|
660
|
+
], stream_other, version_auto)
|
661
|
+
|
662
|
+
expect(repository.position_in_stream(event0.event_id, stream)).to eq(0)
|
663
|
+
expect(repository.position_in_stream(event1.event_id, stream)).to eq(1)
|
664
|
+
expect(repository.position_in_stream(event1.event_id, stream_other)).to eq(0)
|
665
|
+
expect(repository.position_in_stream(event0.event_id, stream_other)).to eq(1)
|
666
|
+
end
|
667
|
+
|
668
|
+
it "#position_in_stream when event is not in the stream" do
|
669
|
+
skip unless helper.supports_position_queries?
|
670
|
+
just_an_id = "d5c134c2-db65-4e87-b6ea-d196f8f1a292"
|
671
|
+
|
672
|
+
expect do
|
673
|
+
repository.position_in_stream(just_an_id, stream)
|
674
|
+
end.to raise_error(EventNotFoundInStream)
|
675
|
+
end
|
676
|
+
|
677
|
+
it "#position_in_stream when event is published without position" do
|
678
|
+
skip unless helper.supports_position_queries?
|
679
|
+
repository.append_to_stream([event0 = SRecord.new], stream, version_any)
|
680
|
+
|
681
|
+
expect(repository.position_in_stream(event0.event_id, stream)).to eq(nil)
|
682
|
+
end
|
683
|
+
|
684
|
+
it "#global_position happy path" do
|
685
|
+
skip unless helper.supports_position_queries?
|
686
|
+
repository.append_to_stream([
|
687
|
+
event0 = SRecord.new,
|
688
|
+
event1 = SRecord.new
|
689
|
+
], stream, version_any)
|
690
|
+
|
691
|
+
expect(repository.global_position(event0.event_id)).to eq(0)
|
692
|
+
expect(repository.global_position(event1.event_id)).to eq(1)
|
693
|
+
end
|
694
|
+
|
695
|
+
it "#global_position for not existing event" do
|
696
|
+
skip unless helper.supports_position_queries?
|
697
|
+
just_an_id = "d5c134c2-db65-4e87-b6ea-d196f8f1a292"
|
698
|
+
|
699
|
+
expect do
|
700
|
+
repository.global_position(just_an_id)
|
701
|
+
end.to raise_error do |err|
|
702
|
+
expect(err).to be_a(EventNotFound)
|
703
|
+
expect(err.event_id).to eq(just_an_id)
|
704
|
+
end
|
705
|
+
end
|
706
|
+
|
707
|
+
it "knows last event in stream" do
|
708
|
+
repository.append_to_stream([a =SRecord.new(event_id: "00000000-0000-0000-0000-000000000001")], stream, version_none)
|
709
|
+
repository.append_to_stream([b = SRecord.new(event_id: "00000000-0000-0000-0000-000000000002")], stream, version_0)
|
680
710
|
|
681
711
|
expect(repository.last_stream_event(stream)).to eq(b)
|
682
712
|
expect(repository.last_stream_event(stream_other)).to be_nil
|
683
713
|
end
|
684
714
|
|
685
|
-
it
|
715
|
+
it "knows last event in stream when linked" do
|
686
716
|
repository.append_to_stream([
|
687
|
-
e0 = SRecord.new(event_id:
|
688
|
-
e1 = SRecord.new(event_id:
|
717
|
+
e0 = SRecord.new(event_id: "00000000-0000-0000-0000-000000000001"),
|
718
|
+
e1 = SRecord.new(event_id: "00000000-0000-0000-0000-000000000002"),
|
689
719
|
],
|
690
720
|
stream,
|
691
721
|
version_none
|
@@ -693,7 +723,7 @@ module RubyEventStore
|
|
693
723
|
expect(repository.last_stream_event(stream_flow)).to eq(e0)
|
694
724
|
end
|
695
725
|
|
696
|
-
it
|
726
|
+
it "reads batch of events from stream forward & backward" do
|
697
727
|
events = %w[
|
698
728
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
699
729
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -728,7 +758,7 @@ module RubyEventStore
|
|
728
758
|
expect(read_events_backward(repository, stream, to: events[4].event_id, count: 100)).to eq(events.last(5).reverse)
|
729
759
|
end
|
730
760
|
|
731
|
-
it
|
761
|
+
it "reads batch of linked events from stream forward & backward" do
|
732
762
|
events = %w[
|
733
763
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
734
764
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -764,39 +794,39 @@ module RubyEventStore
|
|
764
794
|
expect(read_events_backward(repository, stream_flow, to: events[4].event_id, count: 100)).to eq(events[5..9].reverse)
|
765
795
|
end
|
766
796
|
|
767
|
-
it
|
797
|
+
it "reads all stream events forward & backward" do
|
768
798
|
s1 = stream
|
769
799
|
s2 = stream_other
|
770
800
|
repository
|
771
|
-
.append_to_stream([a = SRecord.new(event_id:
|
772
|
-
.append_to_stream([b = SRecord.new(event_id:
|
773
|
-
.append_to_stream([c = SRecord.new(event_id:
|
774
|
-
.append_to_stream([d = SRecord.new(event_id:
|
775
|
-
.append_to_stream([e = SRecord.new(event_id:
|
801
|
+
.append_to_stream([a = SRecord.new(event_id: "7010d298-ab69-4bb1-9251-f3466b5d1282")], s1, version_none)
|
802
|
+
.append_to_stream([b = SRecord.new(event_id: "34f88aca-aaba-4ca0-9256-8017b47528c5")], s2, version_none)
|
803
|
+
.append_to_stream([c = SRecord.new(event_id: "8e61c864-ceae-4684-8726-97c34eb8fc4f")], s1, version_0)
|
804
|
+
.append_to_stream([d = SRecord.new(event_id: "30963ed9-6349-450b-ac9b-8ea50115b3bd")], s2, version_0)
|
805
|
+
.append_to_stream([e = SRecord.new(event_id: "5bdc58b7-e8a7-4621-afd6-ccb828d72457")], s2, version_1)
|
776
806
|
|
777
807
|
expect(read_events_forward(repository, s1)).to eq [a,c]
|
778
808
|
expect(read_events_backward(repository, s1)).to eq [c,a]
|
779
809
|
end
|
780
810
|
|
781
|
-
it
|
811
|
+
it "reads all stream linked events forward & backward" do
|
782
812
|
s1, fs1, fs2 = stream, stream_flow, stream_other
|
783
813
|
repository
|
784
|
-
.append_to_stream([a = SRecord.new(event_id:
|
785
|
-
.append_to_stream([b = SRecord.new(event_id:
|
786
|
-
.append_to_stream([c = SRecord.new(event_id:
|
787
|
-
.append_to_stream([d = SRecord.new(event_id:
|
788
|
-
.append_to_stream([e = SRecord.new(event_id:
|
789
|
-
.link_to_stream([
|
790
|
-
.link_to_stream([
|
791
|
-
.link_to_stream([
|
792
|
-
.link_to_stream([
|
793
|
-
.link_to_stream([
|
814
|
+
.append_to_stream([a = SRecord.new(event_id: "7010d298-ab69-4bb1-9251-f3466b5d1282")], s1, version_none)
|
815
|
+
.append_to_stream([b = SRecord.new(event_id: "34f88aca-aaba-4ca0-9256-8017b47528c5")], s1, version_0)
|
816
|
+
.append_to_stream([c = SRecord.new(event_id: "8e61c864-ceae-4684-8726-97c34eb8fc4f")], s1, version_1)
|
817
|
+
.append_to_stream([d = SRecord.new(event_id: "30963ed9-6349-450b-ac9b-8ea50115b3bd")], s1, version_2)
|
818
|
+
.append_to_stream([e = SRecord.new(event_id: "5bdc58b7-e8a7-4621-afd6-ccb828d72457")], s1, version_3)
|
819
|
+
.link_to_stream(["7010d298-ab69-4bb1-9251-f3466b5d1282"], fs1, version_none)
|
820
|
+
.link_to_stream(["34f88aca-aaba-4ca0-9256-8017b47528c5"], fs2, version_none)
|
821
|
+
.link_to_stream(["8e61c864-ceae-4684-8726-97c34eb8fc4f"], fs1, version_0)
|
822
|
+
.link_to_stream(["30963ed9-6349-450b-ac9b-8ea50115b3bd"], fs2, version_0)
|
823
|
+
.link_to_stream(["5bdc58b7-e8a7-4621-afd6-ccb828d72457"], fs2, version_1)
|
794
824
|
|
795
825
|
expect(read_events_forward(repository, fs1)).to eq [a,c]
|
796
826
|
expect(read_events_backward(repository, fs1)).to eq [c,a]
|
797
827
|
end
|
798
828
|
|
799
|
-
it
|
829
|
+
it "reads batch of events from all streams forward & backward" do
|
800
830
|
events = %w[
|
801
831
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
802
832
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -828,7 +858,7 @@ module RubyEventStore
|
|
828
858
|
expect(read_events_backward(repository, to: events[4].event_id, count: 100)).to eq(events.last(5).reverse)
|
829
859
|
end
|
830
860
|
|
831
|
-
it
|
861
|
+
it "linked events do not affect reading from all streams - no duplicates" do
|
832
862
|
events = %w[
|
833
863
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
834
864
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -862,7 +892,7 @@ module RubyEventStore
|
|
862
892
|
expect(read_events_backward(repository, to: events[4].event_id, count: 100)).to eq(events.last(5).reverse)
|
863
893
|
end
|
864
894
|
|
865
|
-
it
|
895
|
+
it "reads events different uuid object but same content" do
|
866
896
|
events = %w[
|
867
897
|
96c920b1-cdd0-40f4-907c-861b9fff7d02
|
868
898
|
56404f79-0ba0-4aa0-8524-dc3436368ca0
|
@@ -881,7 +911,7 @@ module RubyEventStore
|
|
881
911
|
expect(read_events_backward(repository, stream, to: "96c920b1-cdd0-40f4-907c-861b9fff7d02", count: 1)).to eq([events.last])
|
882
912
|
end
|
883
913
|
|
884
|
-
it
|
914
|
+
it "does not allow same event twice in a stream" do
|
885
915
|
repository.append_to_stream(
|
886
916
|
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
887
917
|
stream,
|
@@ -896,7 +926,7 @@ module RubyEventStore
|
|
896
926
|
end.to raise_error(EventDuplicatedInStream)
|
897
927
|
end
|
898
928
|
|
899
|
-
it
|
929
|
+
it "does not allow same event twice" do
|
900
930
|
repository.append_to_stream(
|
901
931
|
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
902
932
|
stream,
|
@@ -911,7 +941,7 @@ module RubyEventStore
|
|
911
941
|
end.to raise_error(EventDuplicatedInStream)
|
912
942
|
end
|
913
943
|
|
914
|
-
it
|
944
|
+
it "does not allow linking same event twice in a stream" do
|
915
945
|
repository.append_to_stream(
|
916
946
|
[SRecord.new(event_id: "a1b49edb-7636-416f-874a-88f94b859bef")],
|
917
947
|
stream,
|
@@ -922,7 +952,7 @@ module RubyEventStore
|
|
922
952
|
end.to raise_error(EventDuplicatedInStream)
|
923
953
|
end
|
924
954
|
|
925
|
-
it
|
955
|
+
it "allows appending to GLOBAL_STREAM explicitly" do
|
926
956
|
event = SRecord.new(event_id: "df8b2ba3-4e2c-4888-8d14-4364855fa80e")
|
927
957
|
repository.append_to_stream([event], global_stream, version_any)
|
928
958
|
|
@@ -934,29 +964,29 @@ module RubyEventStore
|
|
934
964
|
|
935
965
|
expect do
|
936
966
|
repository.append_to_stream(
|
937
|
-
[SRecord.new(event_id:
|
967
|
+
[SRecord.new(event_id: "9bedf448-e4d0-41a3-a8cd-f94aec7aa763")],
|
938
968
|
stream,
|
939
969
|
version_none
|
940
970
|
)
|
941
971
|
end.to raise_error(WrongExpectedEventVersion)
|
942
|
-
expect(repository.has_event?(
|
972
|
+
expect(repository.has_event?("9bedf448-e4d0-41a3-a8cd-f94aec7aa763")).to be false
|
943
973
|
end
|
944
974
|
|
945
|
-
specify
|
975
|
+
specify "linking non-existent event" do
|
946
976
|
expect do
|
947
|
-
repository.link_to_stream([
|
977
|
+
repository.link_to_stream(["72922e65-1b32-4e97-8023-03ae81dd3a27"], stream_flow, version_none)
|
948
978
|
end.to raise_error do |err|
|
949
979
|
expect(err).to be_a(EventNotFound)
|
950
|
-
expect(err.event_id).to eq(
|
951
|
-
expect(err.message).to eq(
|
980
|
+
expect(err.event_id).to eq("72922e65-1b32-4e97-8023-03ae81dd3a27")
|
981
|
+
expect(err.message).to eq("Event not found: 72922e65-1b32-4e97-8023-03ae81dd3a27")
|
952
982
|
end
|
953
983
|
end
|
954
984
|
|
955
|
-
specify
|
985
|
+
specify "read returns enumerator" do
|
956
986
|
expect(repository.read(specification.result)).to be_kind_of(Enumerator)
|
957
987
|
end
|
958
988
|
|
959
|
-
specify
|
989
|
+
specify "can store arbitrary binary data" do
|
960
990
|
skip unless helper.supports_binary?
|
961
991
|
binary = "\xB0"
|
962
992
|
expect(binary.valid_encoding?).to eq(false)
|
@@ -1180,50 +1210,50 @@ module RubyEventStore
|
|
1180
1210
|
end
|
1181
1211
|
|
1182
1212
|
specify do
|
1183
|
-
event_1 = SRecord.new(event_id:
|
1184
|
-
event_2 = SRecord.new(event_id:
|
1185
|
-
event_3 = SRecord.new(event_id:
|
1186
|
-
stream_a = Stream.new(
|
1187
|
-
stream_b = Stream.new(
|
1188
|
-
stream_c = Stream.new(
|
1213
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea")
|
1214
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7")
|
1215
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e")
|
1216
|
+
stream_a = Stream.new("Stream A")
|
1217
|
+
stream_b = Stream.new("Stream B")
|
1218
|
+
stream_c = Stream.new("Stream C")
|
1189
1219
|
repository.append_to_stream([event_1, event_2], stream_a, version_any)
|
1190
1220
|
repository.append_to_stream([event_3], stream_b, version_any)
|
1191
1221
|
repository.link_to_stream([event_1.event_id], stream_c, version_none)
|
1192
1222
|
|
1193
|
-
expect(repository.streams_of(
|
1194
|
-
expect(repository.streams_of(
|
1195
|
-
expect(repository.streams_of(
|
1196
|
-
expect(repository.streams_of(
|
1223
|
+
expect(repository.streams_of("8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea")).to eq [stream_a, stream_c]
|
1224
|
+
expect(repository.streams_of("8cee1139-4f96-483a-a175-2b947283c3c7")).to eq [stream_a]
|
1225
|
+
expect(repository.streams_of("d345f86d-b903-4d78-803f-38990c078d9e")).to eq [stream_b]
|
1226
|
+
expect(repository.streams_of("d10c8fe9-2163-418d-ba47-88c9a1f9391b")).to eq []
|
1197
1227
|
end
|
1198
1228
|
|
1199
1229
|
specify do
|
1200
|
-
e1 = SRecord.new(event_id:
|
1201
|
-
e2 = SRecord.new(event_id:
|
1202
|
-
e3 = SRecord.new(event_id:
|
1203
|
-
stream = Stream.new(
|
1230
|
+
e1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea")
|
1231
|
+
e2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7")
|
1232
|
+
e3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e")
|
1233
|
+
stream = Stream.new("Stream A")
|
1204
1234
|
repository.append_to_stream([e1, e2, e3], stream, version_any)
|
1205
1235
|
|
1206
1236
|
expect(repository.read(specification.with_id([
|
1207
|
-
|
1237
|
+
"8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea"
|
1208
1238
|
]).read_first.result)).to eq(e1)
|
1209
1239
|
expect(repository.read(specification.with_id([
|
1210
|
-
|
1240
|
+
"d345f86d-b903-4d78-803f-38990c078d9e"
|
1211
1241
|
]).read_first.result)).to eq(e3)
|
1212
1242
|
expect(repository.read(specification.with_id([
|
1213
|
-
|
1243
|
+
"c31b327c-0da1-4178-a3cd-d2f6bb5d0688"
|
1214
1244
|
]).read_first.result)).to eq(nil)
|
1215
1245
|
expect(repository.read(specification.with_id([
|
1216
|
-
|
1217
|
-
|
1246
|
+
"8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea",
|
1247
|
+
"d345f86d-b903-4d78-803f-38990c078d9e"
|
1218
1248
|
]).in_batches.result).to_a[0]).to eq([e1,e3])
|
1219
|
-
expect(repository.read(specification.stream(
|
1220
|
-
|
1249
|
+
expect(repository.read(specification.stream("Stream A").with_id([
|
1250
|
+
"8cee1139-4f96-483a-a175-2b947283c3c7"
|
1221
1251
|
]).read_first.result)).to eq(e2)
|
1222
|
-
expect(repository.read(specification.stream(
|
1223
|
-
|
1252
|
+
expect(repository.read(specification.stream("Stream B").with_id([
|
1253
|
+
"8cee1139-4f96-483a-a175-2b947283c3c7"
|
1224
1254
|
]).read_first.result)).to eq(nil)
|
1225
|
-
expect(repository.read(specification.stream(
|
1226
|
-
|
1255
|
+
expect(repository.read(specification.stream("Stream B").with_id([
|
1256
|
+
"c31b327c-0da1-4178-a3cd-d2f6bb5d0688"
|
1227
1257
|
]).read_first.result)).to eq(nil)
|
1228
1258
|
expect(repository.read(specification.with_id([]).result).to_a).to eq([])
|
1229
1259
|
end
|
@@ -1232,7 +1262,7 @@ module RubyEventStore
|
|
1232
1262
|
e1 = SRecord.new(event_type: Type1.to_s)
|
1233
1263
|
e2 = SRecord.new(event_type: Type2.to_s)
|
1234
1264
|
e3 = SRecord.new(event_type: Type1.to_s)
|
1235
|
-
stream = Stream.new(
|
1265
|
+
stream = Stream.new("Stream A")
|
1236
1266
|
repository.append_to_stream([e1, e2, e3], stream, version_any)
|
1237
1267
|
|
1238
1268
|
expect(repository.read(specification.of_type([Type1]).result).to_a).to eq([e1,e3])
|
@@ -1242,8 +1272,8 @@ module RubyEventStore
|
|
1242
1272
|
end
|
1243
1273
|
|
1244
1274
|
specify do
|
1245
|
-
stream = Stream.new(
|
1246
|
-
dummy = Stream.new(
|
1275
|
+
stream = Stream.new("Stream A")
|
1276
|
+
dummy = Stream.new("Dummy")
|
1247
1277
|
|
1248
1278
|
expect(repository.count(specification.result)).to eq(0)
|
1249
1279
|
(1..3).each do
|
@@ -1261,8 +1291,8 @@ module RubyEventStore
|
|
1261
1291
|
expect(repository.count(specification.with_id([not_existing_uuid]).result)).to eq(0)
|
1262
1292
|
|
1263
1293
|
expect(repository.count(specification.stream(stream.name).result)).to eq(3)
|
1264
|
-
expect(repository.count(specification.stream(
|
1265
|
-
expect(repository.count(specification.stream(
|
1294
|
+
expect(repository.count(specification.stream("Dummy").result)).to eq(1)
|
1295
|
+
expect(repository.count(specification.stream("not-existing-stream").result)).to eq(0)
|
1266
1296
|
|
1267
1297
|
repository.append_to_stream([SRecord.new(event_type: Type1.to_s)], dummy, version_any)
|
1268
1298
|
expect(repository.count(specification.from(event_id).result)).to eq(1)
|
@@ -1282,7 +1312,7 @@ module RubyEventStore
|
|
1282
1312
|
expect(repository.count(specification.stream(stream.name).of_type([Type3]).result)).to eq(0)
|
1283
1313
|
end
|
1284
1314
|
|
1285
|
-
specify
|
1315
|
+
specify "timestamp precision" do
|
1286
1316
|
time = Time.utc(2020, 9, 11, 12, 26, 0, 123456)
|
1287
1317
|
repository.append_to_stream([SRecord.new(timestamp: time)], stream, version_none)
|
1288
1318
|
event = read_events_forward(repository, count: 1).first
|
@@ -1290,92 +1320,92 @@ module RubyEventStore
|
|
1290
1320
|
expect(event.timestamp).to eq(time)
|
1291
1321
|
end
|
1292
1322
|
|
1293
|
-
specify
|
1294
|
-
event_1 = SRecord.new(event_id:
|
1295
|
-
event_2 = SRecord.new(event_id:
|
1296
|
-
event_3 = SRecord.new(event_id:
|
1297
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1323
|
+
specify "fetching records older than specified date in stream" do
|
1324
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1325
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1326
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1327
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1298
1328
|
|
1299
|
-
expect(repository.read(specification.stream(
|
1329
|
+
expect(repository.read(specification.stream("whatever").older_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1])
|
1300
1330
|
end
|
1301
1331
|
|
1302
|
-
specify
|
1303
|
-
event_1 = SRecord.new(event_id:
|
1304
|
-
event_2 = SRecord.new(event_id:
|
1305
|
-
event_3 = SRecord.new(event_id:
|
1306
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1332
|
+
specify "fetching records older than or equal to specified date in stream" do
|
1333
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1334
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1335
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1336
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1307
1337
|
|
1308
|
-
expect(repository.read(specification.stream(
|
1338
|
+
expect(repository.read(specification.stream("whatever").older_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1, event_2])
|
1309
1339
|
end
|
1310
1340
|
|
1311
|
-
specify
|
1312
|
-
event_1 = SRecord.new(event_id:
|
1313
|
-
event_2 = SRecord.new(event_id:
|
1314
|
-
event_3 = SRecord.new(event_id:
|
1315
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1341
|
+
specify "fetching records newer than specified date in stream" do
|
1342
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1343
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1344
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1345
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1316
1346
|
|
1317
|
-
expect(repository.read(specification.stream(
|
1347
|
+
expect(repository.read(specification.stream("whatever").newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_3])
|
1318
1348
|
end
|
1319
1349
|
|
1320
|
-
specify
|
1321
|
-
event_1 = SRecord.new(event_id:
|
1322
|
-
event_2 = SRecord.new(event_id:
|
1323
|
-
event_3 = SRecord.new(event_id:
|
1324
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1350
|
+
specify "fetching records newer than or equal to specified date in stream" do
|
1351
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1352
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1353
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1354
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1325
1355
|
|
1326
|
-
expect(repository.read(specification.stream(
|
1356
|
+
expect(repository.read(specification.stream("whatever").newer_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_2, event_3])
|
1327
1357
|
end
|
1328
1358
|
|
1329
|
-
specify
|
1330
|
-
event_1 = SRecord.new(event_id:
|
1331
|
-
event_2 = SRecord.new(event_id:
|
1332
|
-
event_3 = SRecord.new(event_id:
|
1333
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1359
|
+
specify "fetching records older than specified date" do
|
1360
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1361
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1362
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1363
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1334
1364
|
|
1335
1365
|
expect(repository.read(specification.older_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1])
|
1336
1366
|
end
|
1337
1367
|
|
1338
|
-
specify
|
1339
|
-
event_1 = SRecord.new(event_id:
|
1340
|
-
event_2 = SRecord.new(event_id:
|
1341
|
-
event_3 = SRecord.new(event_id:
|
1342
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1368
|
+
specify "fetching records older than or equal to specified date" do
|
1369
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1370
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1371
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1372
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1343
1373
|
|
1344
1374
|
expect(repository.read(specification.older_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_1, event_2])
|
1345
1375
|
end
|
1346
1376
|
|
1347
|
-
specify
|
1348
|
-
event_1 = SRecord.new(event_id:
|
1349
|
-
event_2 = SRecord.new(event_id:
|
1350
|
-
event_3 = SRecord.new(event_id:
|
1351
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1377
|
+
specify "fetching records newer than specified date" do
|
1378
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1379
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1380
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1381
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1352
1382
|
|
1353
1383
|
expect(repository.read(specification.newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([event_3])
|
1354
1384
|
end
|
1355
1385
|
|
1356
|
-
specify
|
1357
|
-
event_1 = SRecord.new(event_id:
|
1358
|
-
event_2 = SRecord.new(event_id:
|
1359
|
-
event_3 = SRecord.new(event_id:
|
1360
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1386
|
+
specify "fetching records newer than or equal to specified date" do
|
1387
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1388
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1389
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1390
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1361
1391
|
|
1362
1392
|
expect(repository.read(specification.newer_than_or_equal(Time.utc(2020, 1, 2)).result).to_a).to eq([event_2, event_3])
|
1363
1393
|
end
|
1364
1394
|
|
1365
|
-
specify
|
1366
|
-
event_1 = SRecord.new(event_id:
|
1367
|
-
event_2 = SRecord.new(event_id:
|
1368
|
-
event_3 = SRecord.new(event_id:
|
1369
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1395
|
+
specify "fetching records from disjoint periods" do
|
1396
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1397
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1398
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1399
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1370
1400
|
|
1371
1401
|
expect(repository.read(specification.older_than(Time.utc(2020, 1, 2)).newer_than(Time.utc(2020, 1, 2)).result).to_a).to eq([])
|
1372
1402
|
end
|
1373
1403
|
|
1374
|
-
specify
|
1375
|
-
event_1 = SRecord.new(event_id:
|
1376
|
-
event_2 = SRecord.new(event_id:
|
1377
|
-
event_3 = SRecord.new(event_id:
|
1378
|
-
repository.append_to_stream([event_1, event_2, event_3], Stream.new(
|
1404
|
+
specify "fetching records within time range" do
|
1405
|
+
event_1 = SRecord.new(event_id: "8a6f053e-3ce2-4c82-a55b-4d02c66ae6ea", timestamp: Time.utc(2020, 1, 1))
|
1406
|
+
event_2 = SRecord.new(event_id: "8cee1139-4f96-483a-a175-2b947283c3c7", timestamp: Time.utc(2020, 1, 2))
|
1407
|
+
event_3 = SRecord.new(event_id: "d345f86d-b903-4d78-803f-38990c078d9e", timestamp: Time.utc(2020, 1, 3))
|
1408
|
+
repository.append_to_stream([event_1, event_2, event_3], Stream.new("whatever"), version_any)
|
1379
1409
|
|
1380
1410
|
expect(repository.read(specification.between(Time.utc(2020, 1, 1)...Time.utc(2020, 1, 3)).result).to_a).to eq([event_1, event_2])
|
1381
1411
|
end
|
@@ -1389,11 +1419,11 @@ module RubyEventStore
|
|
1389
1419
|
Stream.new("Dummy"),
|
1390
1420
|
ExpectedVersion.any
|
1391
1421
|
)
|
1392
|
-
expect(repository.read(specification.result)
|
1393
|
-
expect(repository.read(specification.as_at.result)
|
1394
|
-
expect(repository.read(specification.as_at.backward.result)
|
1395
|
-
expect(repository.read(specification.as_of.result)
|
1396
|
-
expect(repository.read(specification.as_of.backward.result)
|
1422
|
+
expect(repository.read(specification.result)).to eq_ids([e1, e2, e3])
|
1423
|
+
expect(repository.read(specification.as_at.result)).to eq_ids([e1, e3, e2])
|
1424
|
+
expect(repository.read(specification.as_at.backward.result)).to eq_ids([e2, e3, e1])
|
1425
|
+
expect(repository.read(specification.as_of.result)).to eq_ids([e3, e2, e1])
|
1426
|
+
expect(repository.read(specification.as_of.backward.result)).to eq_ids([e1, e2, e3])
|
1397
1427
|
end
|
1398
1428
|
|
1399
1429
|
specify "time order is respected with batches" do
|
@@ -1405,11 +1435,19 @@ module RubyEventStore
|
|
1405
1435
|
Stream.new("Dummy"),
|
1406
1436
|
ExpectedVersion.any
|
1407
1437
|
)
|
1408
|
-
expect(repository.read(specification.in_batches.result).to_a.flatten
|
1409
|
-
expect(repository.read(specification.in_batches.as_at.result).to_a.flatten
|
1410
|
-
expect(repository.read(specification.in_batches.as_at.backward.result).to_a.flatten
|
1411
|
-
expect(repository.read(specification.in_batches.as_of.result).to_a.flatten
|
1412
|
-
expect(repository.read(specification.in_batches.as_of.backward.result).to_a.flatten
|
1438
|
+
expect(repository.read(specification.in_batches.result).to_a.flatten).to eq_ids([e1, e2, e3])
|
1439
|
+
expect(repository.read(specification.in_batches.as_at.result).to_a.flatten).to eq_ids([e1, e3, e2])
|
1440
|
+
expect(repository.read(specification.in_batches.as_at.backward.result).to_a.flatten).to eq_ids([e2, e3, e1])
|
1441
|
+
expect(repository.read(specification.in_batches.as_of.result).to_a.flatten).to eq_ids([e3, e2, e1])
|
1442
|
+
expect(repository.read(specification.in_batches.as_of.backward.result).to_a.flatten).to eq_ids([e1, e2, e3])
|
1443
|
+
end
|
1444
|
+
end
|
1445
|
+
|
1446
|
+
::RSpec::Matchers.define :eq_ids do |expected_ids|
|
1447
|
+
match do |enum|
|
1448
|
+
@actual = enum.map(&:event_id)
|
1449
|
+
expected_ids == @actual
|
1413
1450
|
end
|
1451
|
+
diffable
|
1414
1452
|
end
|
1415
1453
|
end
|