nexia_event_store 0.5.1 → 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 84e933fac343a7fcf78be09656f655c380b4aff5
4
- data.tar.gz: c773e4bda103b1a081cfef4deefc95de06e86833
3
+ metadata.gz: ec1af445ea9cc5bb3a16d7cd0e916687cb3cbbc1
4
+ data.tar.gz: dfed378ea13287db955caa1f66b2c0afba232637
5
5
  SHA512:
6
- metadata.gz: c7fe28929dbf33069a69d13c1def033533d79f4ed2163dd8a47b1fe1d1d1b38c0869895d5d4da7d4afc658b3509ecfc0b7f4557388b8f00876539fc86e55fcae
7
- data.tar.gz: 3f604af91ed93a41538047947a733915479e43a86cd8a403bee9887cbb347fc3b0160603e6ff2edf4b5e8e2ad6128a528789578f1f18eaa6d0f895f8f9c5121b
6
+ metadata.gz: d80dddc66f087bc75faacdcf0268d3967389bf9823251a304ab132e585872491f025a047b9e333101dc771884b5b5dbd64b90a28f3669e0cfdc3f8d8f78d24df
7
+ data.tar.gz: b8f90a7e2eda9b87ce7232cd12ee09afae749611d5d807938b3d31c4aad5834b5a65406ebeadc562fb0c6f7e3f684e95017bc9f7f942d6f5850d50c08b4032e1
@@ -135,7 +135,7 @@ module EventStore
135
135
  def self.create_db
136
136
  connect_db
137
137
  table = "#{schema}__schema_info".to_sym
138
- @db.run "CREATE SCHEMA #{EventStore.schema};" unless @db.table_exists?(table)
138
+ @db.run("CREATE SCHEMA IF NOT EXISTS #{schema}")
139
139
  Sequel::Migrator.run(@db, File.expand_path(File.join('..','..','db', self.migrations_dir), __FILE__), table: table)
140
140
  end
141
141
 
@@ -19,6 +19,7 @@ module EventStore
19
19
  :events_from,
20
20
  :event_stream_between,
21
21
  :event_table,
22
+ :last_event_before,
22
23
  :delete_events!
23
24
 
24
25
  def snapshot_exists?
@@ -48,6 +48,10 @@ module EventStore
48
48
  translate_events(aggregate.events_from(version_number, max))
49
49
  end
50
50
 
51
+ def last_event_before(start_time, fully_qualified_names = [])
52
+ translate_events(aggregate.last_event_before(start_time, fully_qualified_names))
53
+ end
54
+
51
55
  def event_stream_between(start_time, end_time, fully_qualified_names = [])
52
56
  translate_events(aggregate.event_stream_between(start_time, end_time, fully_qualified_names))
53
57
  end
@@ -38,6 +38,24 @@ module EventStore
38
38
  end
39
39
  end
40
40
 
41
+ def last_event_before(start_time, fully_qualified_names = [])
42
+ names = fully_qualified_names.map { |n| "\'#{n}\'" }.join(',')
43
+ last_event_before_query = <<-EOSQL
44
+ select * from
45
+ (select *, last_value(occurred_at)
46
+ over(partition by fully_qualified_name order by occurred_at
47
+ ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) as last
48
+ from #{@event_table}
49
+ where aggregate_id = \'#{@id}\'
50
+ and fully_qualified_name in (#{names})
51
+ and occurred_at < \'#{start_time}\') as subquery
52
+ where occurred_at = last
53
+ EOSQL
54
+
55
+ query = EventStore.db[last_event_before_query]
56
+ query.all.map {|e| e[:serialized_event] = EventStore.unescape_bytea(e[:serialized_event]); e}
57
+ end
58
+
41
59
  def event_stream_between(start_time, end_time, fully_qualified_names = [])
42
60
  query = events.where(occurred_at: start_time..end_time)
43
61
  query = query.where(fully_qualified_name: fully_qualified_names) if fully_qualified_names && fully_qualified_names.any?
@@ -1,3 +1,3 @@
1
1
  module EventStore
2
- VERSION = '0.5.1'
2
+ VERSION = '0.5.2'
3
3
  end
@@ -8,382 +8,396 @@ AGGREGATE_ID_THREE = SecureRandom.uuid
8
8
  describe EventStore::Client do
9
9
  subject(:es_client) { EventStore::Client }
10
10
 
11
- before do
12
- client_1 = es_client.new(AGGREGATE_ID_ONE, :device)
13
- client_2 = es_client.new(AGGREGATE_ID_TWO, :device)
14
-
15
- events_by_aggregate_id = {AGGREGATE_ID_ONE => [], AGGREGATE_ID_TWO => []}
16
- @event_time = Time.parse("2001-01-01 00:00:00 UTC")
17
- ([AGGREGATE_ID_ONE]*5 + [AGGREGATE_ID_TWO]*5).shuffle.each_with_index do |aggregate_id, version|
18
- events_by_aggregate_id[aggregate_id.to_s] << EventStore::Event.new(aggregate_id.to_s, @event_time, "zone_1_event", "1", serialized_binary_event_data)
19
- events_by_aggregate_id[aggregate_id.to_s] << EventStore::Event.new(aggregate_id.to_s, @event_time, "zone_2_event", "2", serialized_binary_event_data)
20
- events_by_aggregate_id[aggregate_id.to_s] << EventStore::Event.new(aggregate_id.to_s, @event_time, "system_event", EventStore::NO_SUB_KEY, serialized_binary_event_data)
11
+ context "with random events" do
12
+ before do
13
+ client_1 = es_client.new(AGGREGATE_ID_ONE, :device)
14
+ client_2 = es_client.new(AGGREGATE_ID_TWO, :device)
15
+
16
+ events_by_aggregate_id = {AGGREGATE_ID_ONE => [], AGGREGATE_ID_TWO => []}
17
+ @event_time = Time.parse("2001-01-01 00:00:00 UTC")
18
+ ([AGGREGATE_ID_ONE]*5 + [AGGREGATE_ID_TWO]*5).shuffle.each_with_index do |aggregate_id, version|
19
+ events_by_aggregate_id[aggregate_id.to_s] << EventStore::Event.new(aggregate_id.to_s, @event_time, "zone_1_event", "1", serialized_binary_event_data)
20
+ events_by_aggregate_id[aggregate_id.to_s] << EventStore::Event.new(aggregate_id.to_s, @event_time, "zone_2_event", "2", serialized_binary_event_data)
21
+ events_by_aggregate_id[aggregate_id.to_s] << EventStore::Event.new(aggregate_id.to_s, @event_time, "system_event", EventStore::NO_SUB_KEY, serialized_binary_event_data)
22
+ end
23
+ client_1.append events_by_aggregate_id[AGGREGATE_ID_ONE]
24
+ client_2.append events_by_aggregate_id[AGGREGATE_ID_TWO]
21
25
  end
22
- client_1.append events_by_aggregate_id[AGGREGATE_ID_ONE]
23
- client_2.append events_by_aggregate_id[AGGREGATE_ID_TWO]
24
- end
25
26
 
26
- it "counts the number of aggregates or clients" do
27
- expect(es_client.count).to eql(2)
28
- end
29
-
30
- it "returns a partial list of aggregates" do
31
- offset = 0
32
- limit = 1
33
- expect(es_client.ids(offset, limit)).to eq([[AGGREGATE_ID_ONE, AGGREGATE_ID_TWO].sort.first])
34
- end
27
+ it "counts the number of aggregates or clients" do
28
+ expect(es_client.count).to eql(2)
29
+ end
35
30
 
36
- describe "#exists?" do
37
- let(:fake_aggregate) { double("Aggregate") }
31
+ it "returns a partial list of aggregates" do
32
+ offset = 0
33
+ limit = 1
34
+ expect(es_client.ids(offset, limit)).to eq([[AGGREGATE_ID_ONE, AGGREGATE_ID_TWO].sort.first])
35
+ end
38
36
 
39
- subject(:client) { es_client.new(AGGREGATE_ID_ONE, :device) }
37
+ describe "#exists?" do
38
+ let(:fake_aggregate) { double("Aggregate") }
40
39
 
41
- before(:each) { expect(client).to receive(:aggregate).and_return(fake_aggregate) }
40
+ subject(:client) { es_client.new(AGGREGATE_ID_ONE, :device) }
42
41
 
43
- it "checks if the snapshot exists" do
44
- expect(fake_aggregate).to receive(:snapshot_exists?).and_return(true)
45
- expect(client.exists?).to eq(true)
46
- end
47
- end
42
+ before(:each) { expect(client).to receive(:aggregate).and_return(fake_aggregate) }
48
43
 
49
- describe '#raw_event_stream' do
50
- it "should be an array of hashes that represent database records, not EventStore::SerializedEvent objects" do
51
- raw_stream = es_client.new(AGGREGATE_ID_ONE, :device).raw_event_stream
52
- raw_event = raw_stream.first
53
- expect(raw_event.class).to eq(Hash)
54
- expect(raw_event.keys).to eq([:id, :version, :aggregate_id, :fully_qualified_name, :occurred_at, :serialized_event, :sub_key])
44
+ it "checks if the snapshot exists" do
45
+ expect(fake_aggregate).to receive(:snapshot_exists?).and_return(true)
46
+ expect(client.exists?).to eq(true)
47
+ end
55
48
  end
56
49
 
57
- it 'should be empty for aggregates without events' do
58
- stream = es_client.new(100, :device).raw_event_stream
59
- expect(stream.empty?).to be_truthy
60
- end
50
+ describe '#raw_event_stream' do
51
+ it "should be an array of hashes that represent database records, not EventStore::SerializedEvent objects" do
52
+ raw_stream = es_client.new(AGGREGATE_ID_ONE, :device).raw_event_stream
53
+ raw_event = raw_stream.first
54
+ expect(raw_event.class).to eq(Hash)
55
+ expect(raw_event.keys).to eq([:id, :version, :aggregate_id, :fully_qualified_name, :occurred_at, :serialized_event, :sub_key])
56
+ end
61
57
 
62
- it 'should only have events for a single aggregate' do
63
- stream = es_client.new(AGGREGATE_ID_ONE, :device).raw_event_stream
64
- stream.each { |event| expect(event[:aggregate_id]).to eq(AGGREGATE_ID_ONE) }
65
- end
58
+ it 'should be empty for aggregates without events' do
59
+ stream = es_client.new(100, :device).raw_event_stream
60
+ expect(stream.empty?).to be_truthy
61
+ end
66
62
 
67
- it 'should have all events for that aggregate' do
68
- stream = es_client.new(AGGREGATE_ID_ONE, :device).raw_event_stream
69
- expect(stream.count).to eq(15)
70
- end
71
- end
63
+ it 'should only have events for a single aggregate' do
64
+ stream = es_client.new(AGGREGATE_ID_ONE, :device).raw_event_stream
65
+ stream.each { |event| expect(event[:aggregate_id]).to eq(AGGREGATE_ID_ONE) }
66
+ end
72
67
 
73
- describe '#event_stream' do
74
- it "should be an array of EventStore::SerializedEvent objects" do
75
- stream = es_client.new(AGGREGATE_ID_ONE, :device).event_stream
76
- expect(stream.class).to eq(Array)
77
- event = stream.first
78
- expect(event.class).to eq(EventStore::SerializedEvent)
68
+ it 'should have all events for that aggregate' do
69
+ stream = es_client.new(AGGREGATE_ID_ONE, :device).raw_event_stream
70
+ expect(stream.count).to eq(15)
71
+ end
79
72
  end
80
73
 
81
- it 'should be empty for aggregates without events' do
82
- stream = es_client.new(100, :device).raw_event_stream
83
- expect(stream.empty?).to be_truthy
84
- end
74
+ describe '#event_stream' do
75
+ it "should be an array of EventStore::SerializedEvent objects" do
76
+ stream = es_client.new(AGGREGATE_ID_ONE, :device).event_stream
77
+ expect(stream.class).to eq(Array)
78
+ event = stream.first
79
+ expect(event.class).to eq(EventStore::SerializedEvent)
80
+ end
85
81
 
86
- it 'should only have events for a single aggregate' do
87
- raw_stream = es_client.new(AGGREGATE_ID_ONE, :device).raw_event_stream
88
- stream = es_client.new(AGGREGATE_ID_ONE, :device).event_stream
89
- expect(stream.map(&:fully_qualified_name)).to eq(raw_stream.inject([]){|m, event| m << event[:fully_qualified_name]; m})
90
- end
82
+ it 'should be empty for aggregates without events' do
83
+ stream = es_client.new(100, :device).raw_event_stream
84
+ expect(stream.empty?).to be_truthy
85
+ end
91
86
 
92
- it 'should have all events for that aggregate' do
93
- stream = es_client.new(AGGREGATE_ID_ONE, :device).event_stream
94
- expect(stream.count).to eq(15)
95
- end
87
+ it 'should only have events for a single aggregate' do
88
+ raw_stream = es_client.new(AGGREGATE_ID_ONE, :device).raw_event_stream
89
+ stream = es_client.new(AGGREGATE_ID_ONE, :device).event_stream
90
+ expect(stream.map(&:fully_qualified_name)).to eq(raw_stream.inject([]){|m, event| m << event[:fully_qualified_name]; m})
91
+ end
96
92
 
97
- context "when the serialized event is terminated prematurely with a null byte" do
98
- it "does not truncate the serialized event when there is a binary zero value is at the end" do
99
- serialized_event = serialized_event_data_terminated_by_null
100
- client = es_client.new("any_device", :device)
101
- event = EventStore::Event.new("any_device", @event_time, 'other_event_name', "nozone", serialized_event)
102
- client.append([event])
103
- expect(client.event_stream.last[:serialized_event]).to eql(serialized_event)
93
+ it 'should have all events for that aggregate' do
94
+ stream = es_client.new(AGGREGATE_ID_ONE, :device).event_stream
95
+ expect(stream.count).to eq(15)
104
96
  end
105
97
 
106
- it "conversion of byte array to and from hex should be lossless" do
107
- client = es_client.new("any_device", :device)
108
- serialized_event = serialized_event_data_terminated_by_null
109
- event = EventStore::Event.new("any_device", @event_time, 'terminated_by_null_event', "zone_number", serialized_event)
110
- client.append([event])
111
- hex_from_db = EventStore.db.from(EventStore.fully_qualified_table).where(fully_qualified_name: 'terminated_by_null_event').first[:serialized_event]
112
- expect(hex_from_db).to eql(EventStore.escape_bytea(serialized_event))
98
+ context "when the serialized event is terminated prematurely with a null byte" do
99
+ it "does not truncate the serialized event when there is a binary zero value is at the end" do
100
+ serialized_event = serialized_event_data_terminated_by_null
101
+ client = es_client.new("any_device", :device)
102
+ event = EventStore::Event.new("any_device", @event_time, 'other_event_name', "nozone", serialized_event)
103
+ client.append([event])
104
+ expect(client.event_stream.last[:serialized_event]).to eql(serialized_event)
105
+ end
106
+
107
+ it "conversion of byte array to and from hex should be lossless" do
108
+ client = es_client.new("any_device", :device)
109
+ serialized_event = serialized_event_data_terminated_by_null
110
+ event = EventStore::Event.new("any_device", @event_time, 'terminated_by_null_event', "zone_number", serialized_event)
111
+ client.append([event])
112
+ hex_from_db = EventStore.db.from(EventStore.fully_qualified_table).where(fully_qualified_name: 'terminated_by_null_event').first[:serialized_event]
113
+ expect(hex_from_db).to eql(EventStore.escape_bytea(serialized_event))
114
+ end
113
115
  end
114
116
  end
115
- end
116
117
 
117
118
 
118
- describe '#raw_event_streams_from_version' do
119
- subject { es_client.new(AGGREGATE_ID_ONE, :device) }
119
+ describe '#raw_event_streams_from_version' do
120
+ subject { es_client.new(AGGREGATE_ID_ONE, :device) }
120
121
 
121
- it 'should return all the raw events in the stream starting from a certain version' do
122
- minimum_event_version = 2
123
- raw_stream = subject.raw_event_stream_from(minimum_event_version)
124
- event_versions = raw_stream.inject([]){|m, event| m << event[:version]; m}
125
- expect(event_versions.min).to be >= minimum_event_version
126
- end
122
+ it 'should return all the raw events in the stream starting from a certain version' do
123
+ minimum_event_version = 2
124
+ raw_stream = subject.raw_event_stream_from(minimum_event_version)
125
+ event_versions = raw_stream.inject([]){|m, event| m << event[:version]; m}
126
+ expect(event_versions.min).to be >= minimum_event_version
127
+ end
127
128
 
128
- it 'should return no more than the maximum number of events specified above the ' do
129
- max_number_of_events = 5
130
- minimum_event_version = 2
131
- raw_stream = subject.raw_event_stream_from(minimum_event_version, max_number_of_events)
132
- expect(raw_stream.count).to eq(max_number_of_events)
133
- end
129
+ it 'should return no more than the maximum number of events specified above the ' do
130
+ max_number_of_events = 5
131
+ minimum_event_version = 2
132
+ raw_stream = subject.raw_event_stream_from(minimum_event_version, max_number_of_events)
133
+ expect(raw_stream.count).to eq(max_number_of_events)
134
+ end
134
135
 
135
- it 'should be empty for version above the current highest version number' do
136
- raw_stream = subject.raw_event_stream_from(subject.version + 1)
137
- expect(raw_stream).to be_empty
136
+ it 'should be empty for version above the current highest version number' do
137
+ raw_stream = subject.raw_event_stream_from(subject.version + 1)
138
+ expect(raw_stream).to be_empty
139
+ end
138
140
  end
139
- end
140
141
 
141
- describe 'event_stream_from_version' do
142
- subject { es_client.new(AGGREGATE_ID_ONE, :device) }
142
+ describe 'event_stream_from_version' do
143
+ subject { es_client.new(AGGREGATE_ID_ONE, :device) }
143
144
 
144
- it 'should return all the raw events in the stream starting from a certain version' do
145
- minimum_event_version = 2
146
- raw_stream = subject.raw_event_stream_from(minimum_event_version)
147
- event_versions = raw_stream.inject([]){|m, event| m << event[:version]; m}
148
- expect(event_versions.min).to be >= minimum_event_version
149
- end
145
+ it 'should return all the raw events in the stream starting from a certain version' do
146
+ minimum_event_version = 2
147
+ raw_stream = subject.raw_event_stream_from(minimum_event_version)
148
+ event_versions = raw_stream.inject([]){|m, event| m << event[:version]; m}
149
+ expect(event_versions.min).to be >= minimum_event_version
150
+ end
150
151
 
151
- it 'should return no more than the maximum number of events specified above the ' do
152
- max_number_of_events = 5
153
- minimum_event_version = 2
154
- raw_stream = subject.raw_event_stream_from(minimum_event_version, max_number_of_events)
155
- expect(raw_stream.count).to eq(max_number_of_events)
156
- end
152
+ it 'should return no more than the maximum number of events specified above the ' do
153
+ max_number_of_events = 5
154
+ minimum_event_version = 2
155
+ raw_stream = subject.raw_event_stream_from(minimum_event_version, max_number_of_events)
156
+ expect(raw_stream.count).to eq(max_number_of_events)
157
+ end
157
158
 
158
- it 'should be empty for version above the current highest version number' do
159
- raw_stream = subject.raw_event_stream_from(subject.version + 1)
160
- expect(raw_stream).to eq([])
159
+ it 'should be empty for version above the current highest version number' do
160
+ raw_stream = subject.raw_event_stream_from(subject.version + 1)
161
+ expect(raw_stream).to eq([])
162
+ end
161
163
  end
162
- end
163
164
 
164
- describe '#event_stream_between' do
165
- subject {es_client.new(AGGREGATE_ID_ONE, :device)}
165
+ describe '#event_stream_between' do
166
+ subject {es_client.new(AGGREGATE_ID_ONE, :device)}
166
167
 
167
- before do
168
- @oldest_event_time = @event_time + 1
169
- @middle_event_time = @event_time + 2
170
- @newest_event_time = @event_time + 3
171
-
172
- @outside_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@event_time).utc, "middle_event", "zone", "#{1002.to_s(2)}_foo")
173
- @event = EventStore::Event.new(AGGREGATE_ID_ONE, (@oldest_event_time).utc, "oldest_event", "zone", "#{1002.to_s(2)}_foo")
174
- @new_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@middle_event_time).utc, "middle_event", "zone", "#{1002.to_s(2)}_foo")
175
- @newest_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@newest_event_time).utc, "newest_event_type", "zone", "#{1002.to_s(2)}_foo")
176
- subject.append([@event, @new_event, @newest_event])
177
- end
168
+ before do
169
+ @oldest_event_time = @event_time + 1
170
+ @middle_event_time = @event_time + 2
171
+ @newest_event_time = @event_time + 3
172
+
173
+ @outside_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@event_time).utc, "middle_event", "zone", "#{1002.to_s(2)}_foo")
174
+ @event = EventStore::Event.new(AGGREGATE_ID_ONE, (@oldest_event_time).utc, "oldest_event", "zone", "#{1002.to_s(2)}_foo")
175
+ @new_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@middle_event_time).utc, "middle_event", "zone", "#{1002.to_s(2)}_foo")
176
+ @newest_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@newest_event_time).utc, "newest_event_type", "zone", "#{1002.to_s(2)}_foo")
177
+ subject.append([@event, @new_event, @newest_event])
178
+ end
178
179
 
179
- it "returns all events between a start and an end time" do
180
- start_time = @oldest_event_time
181
- end_time = @newest_event_time
182
- expect(subject.event_stream_between(start_time, end_time).length).to eq(3)
183
- end
180
+ it "returns all events between a start and an end time" do
181
+ start_time = @oldest_event_time
182
+ end_time = @newest_event_time
183
+ expect(subject.event_stream_between(start_time, end_time).length).to eq(3)
184
+ end
184
185
 
185
- it "returns an empty array if start time is before end time" do
186
- start_time = @newest_event_time
187
- end_time = @oldest_event_time
188
- expect(subject.event_stream_between(start_time, end_time).length).to eq(0)
189
- end
186
+ it "returns an empty array if start time is before end time" do
187
+ start_time = @newest_event_time
188
+ end_time = @oldest_event_time
189
+ expect(subject.event_stream_between(start_time, end_time).length).to eq(0)
190
+ end
190
191
 
191
- it "returns all the events at a given time if the start time is the same as the end time" do
192
- start_time = @oldest_event_time
193
- end_time = @oldest_event_time
194
- expect(subject.event_stream_between(start_time, end_time).length).to eq(1)
195
- end
192
+ it "returns all the events at a given time if the start time is the same as the end time" do
193
+ start_time = @oldest_event_time
194
+ end_time = @oldest_event_time
195
+ expect(subject.event_stream_between(start_time, end_time).length).to eq(1)
196
+ end
196
197
 
197
- it "returns unencodes the serialized_event fields out of the database encoding" do
198
- expect(EventStore).to receive(:unescape_bytea).once
199
- start_time = @oldest_event_time
200
- end_time = @oldest_event_time
201
- expect(subject.event_stream_between(start_time, end_time).length).to eq(1)
202
- end
198
+ it "returns unencodes the serialized_event fields out of the database encoding" do
199
+ expect(EventStore).to receive(:unescape_bytea).once
200
+ start_time = @oldest_event_time
201
+ end_time = @oldest_event_time
202
+ expect(subject.event_stream_between(start_time, end_time).length).to eq(1)
203
+ end
203
204
 
204
- it "returns the raw events translated into SerializedEvents" do
205
- expect(subject).to receive(:translate_events).once.and_call_original
206
- start_time = @oldest_event_time
207
- end_time = @oldest_event_time
208
- expect(subject.event_stream_between(start_time, end_time).length).to eq(1)
209
- end
205
+ it "returns the raw events translated into SerializedEvents" do
206
+ expect(subject).to receive(:translate_events).once.and_call_original
207
+ start_time = @oldest_event_time
208
+ end_time = @oldest_event_time
209
+ expect(subject.event_stream_between(start_time, end_time).length).to eq(1)
210
+ end
210
211
 
211
- it "returns types requested within the time range" do
212
- start_time = @oldest_event_time
213
- end_time = @newest_event_time
214
- fully_qualified_name = 'middle_event'
215
- expect(subject.event_stream_between(start_time, end_time, [fully_qualified_name]).length).to eq(1)
216
- end
212
+ it "returns types requested within the time range" do
213
+ start_time = @oldest_event_time
214
+ end_time = @newest_event_time
215
+ fully_qualified_name = 'middle_event'
216
+ expect(subject.event_stream_between(start_time, end_time, [fully_qualified_name]).length).to eq(1)
217
+ end
217
218
 
218
- it "returns types requested within the time range for more than one type" do
219
- start_time = @oldest_event_time
220
- end_time = @newest_event_time
221
- fully_qualified_names = ['middle_event', 'newest_event_type']
222
- expect(subject.event_stream_between(start_time, end_time, fully_qualified_names).length).to eq(2)
223
- end
219
+ it "returns types requested within the time range for more than one type" do
220
+ start_time = @oldest_event_time
221
+ end_time = @newest_event_time
222
+ fully_qualified_names = ['middle_event', 'newest_event_type']
223
+ expect(subject.event_stream_between(start_time, end_time, fully_qualified_names).length).to eq(2)
224
+ end
224
225
 
225
- it "returns an empty array if there are no events of the requested types in the time range" do
226
- start_time = @oldest_event_time
227
- end_time = @newest_event_time
228
- fully_qualified_names = ['random_strings']
229
- expect(subject.event_stream_between(start_time, end_time, fully_qualified_names).length).to eq(0)
230
- end
226
+ it "returns an empty array if there are no events of the requested types in the time range" do
227
+ start_time = @oldest_event_time
228
+ end_time = @newest_event_time
229
+ fully_qualified_names = ['random_strings']
230
+ expect(subject.event_stream_between(start_time, end_time, fully_qualified_names).length).to eq(0)
231
+ end
231
232
 
232
- it "returns only events of types that exist within the time range" do
233
- start_time = @oldest_event_time
234
- end_time = @newest_event_time
235
- fully_qualified_names = ['middle_event', 'event_name']
236
- expect(subject.event_stream_between(start_time, end_time, fully_qualified_names).length).to eq(1)
233
+ it "returns only events of types that exist within the time range" do
234
+ start_time = @oldest_event_time
235
+ end_time = @newest_event_time
236
+ fully_qualified_names = ['middle_event', 'event_name']
237
+ expect(subject.event_stream_between(start_time, end_time, fully_qualified_names).length).to eq(1)
238
+ end
237
239
  end
238
- end
239
240
 
240
- describe '#peek' do
241
- let(:client) {es_client.new(AGGREGATE_ID_ONE, :device)}
242
- subject { client.peek }
241
+ describe '#peek' do
242
+ let(:client) { es_client.new(AGGREGATE_ID_ONE, :device) }
243
243
 
244
- it 'should return the last event in the event stream' do
245
- last_event = EventStore.db.from(client.event_table).where(aggregate_id: AGGREGATE_ID_ONE).order(:version).last
246
- expect(subject).to eq(EventStore::SerializedEvent.new(last_event[:fully_qualified_name], EventStore.unescape_bytea(last_event[:serialized_event]), last_event[:version], @event_time))
244
+ it 'should return the last event in the event stream' do
245
+ last_event = EventStore.db.from(client.event_table).where(aggregate_id: AGGREGATE_ID_ONE).order(:version).last
246
+ peek = client.peek
247
+ expect(peek.fully_qualified_name).to eq(last_event[:fully_qualified_name])
248
+ expect(peek.version).to eq(last_event[:version])
249
+ end
247
250
  end
248
251
  end
249
252
 
250
- describe '#append' do
251
- before do
252
- @client = EventStore::Client.new(AGGREGATE_ID_ONE, :device)
253
- @event = @client.peek
254
- @old_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@event_time - 2000).utc, "old", "zone", "#{1000.to_s(2)}_foo")
255
- @new_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@event_time - 1000).utc, "new", "zone", "#{1001.to_s(2)}_foo")
256
- @really_new_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@event_time + 100).utc, "really_new", "zone", "#{1002.to_s(2)}_foo")
257
- @duplicate_event = EventStore::Event.new(AGGREGATE_ID_ONE, (@event_time).utc, 'duplicate', "zone", "#{12.to_s(2)}_foo")
258
- end
259
-
260
- describe "when expected version number is greater than the last version" do
261
- describe 'and there are no prior events of type' do
262
- before do
263
- @client.append([@old_event])
264
- end
265
-
266
- it 'should append a single event of a new type without raising an error' do
267
- initial_count = @client.count
268
- events = [@new_event]
269
- @client.append(events)
270
- expect(@client.count).to eq(initial_count + events.length)
271
- end
272
-
273
- it 'should append multiple events of a new type without raising and error' do
274
- initial_count = @client.count
275
- events = [@new_event, @new_event]
276
- @client.append(events)
277
- expect(@client.count).to eq(initial_count + events.length)
278
- end
279
-
280
- it "should increment the version number by the number of events added" do
281
- events = [@new_event, @really_new_event]
282
- initial_version = @client.version
283
- @client.append(events)
284
- expect(@client.version).to eq(initial_version + events.length)
285
- end
286
-
287
- it "should set the snapshot version number to match that of the last event in the aggregate's event stream" do
288
- events = [@new_event, @really_new_event]
289
- initial_stream_version = @client.raw_event_stream.last[:version]
290
- expect(@client.snapshot.version).to eq(initial_stream_version)
291
- @client.append(events)
292
- updated_stream_version = @client.raw_event_stream.last[:version]
293
- expect(@client.snapshot.version).to eq(updated_stream_version)
294
- end
295
-
296
- it "should write-through-cache the event in a snapshot without duplicating events" do
297
- @client.destroy!
298
- @client.append([@old_event, @new_event, @really_new_event])
299
- expect(@client.snapshot.to_a).to eq(@client.event_stream)
253
+ context "with prescribed events" do
254
+ let(:event_time) { Time.now }
255
+
256
+ describe '#append' do
257
+ let(:client) { EventStore::Client.new(AGGREGATE_ID_ONE, :device) }
258
+ let(:old_event) { EventStore::Event.new(AGGREGATE_ID_ONE, (event_time - 2000).utc, "old", "zone", "#{1000.to_s(2)}_foo") }
259
+ let(:new_event) { EventStore::Event.new(AGGREGATE_ID_ONE, (event_time - 1000).utc, "new", "zone", "#{1001.to_s(2)}_foo") }
260
+ let(:really_new_event) { EventStore::Event.new(AGGREGATE_ID_ONE, (event_time + 100).utc, "really_new", "zone", "#{1002.to_s(2)}_foo") }
261
+ let(:duplicate_event) { EventStore::Event.new(AGGREGATE_ID_ONE, (event_time).utc, "duplicate", "zone", "#{12.to_s(2)}_foo") }
262
+
263
+ describe "when expected version number is greater than the last version" do
264
+ describe 'and there are no prior events of type' do
265
+ before(:each) do
266
+ client.append([old_event])
267
+ end
268
+
269
+ it 'should append a single event of a new type without raising an error' do
270
+ initial_count = client.count
271
+ events = [new_event]
272
+ client.append(events)
273
+ expect(client.count).to eq(initial_count + events.length)
274
+ end
275
+
276
+ it 'should append multiple events of a new type without raising and error' do
277
+ initial_count = client.count
278
+ events = [new_event, new_event]
279
+ client.append(events)
280
+ expect(client.count).to eq(initial_count + events.length)
281
+ end
282
+
283
+ it "should increment the version number by the number of events added" do
284
+ events = [new_event, really_new_event]
285
+ expect{client.append(events)}.to change(client, :version).by(events.length)
286
+ end
287
+
288
+ it "sets the snapshot version number to match that of the last event in the aggregate's event stream" do
289
+ expect(client.snapshot.version).to eq(client.raw_event_stream.last[:version])
290
+
291
+ client.append([new_event, really_new_event])
292
+ expect(client.snapshot.version).to eq(client.raw_event_stream.last[:version])
293
+ end
294
+
295
+ it "should write-through-cache the event in a snapshot without duplicating events" do
296
+ client.destroy!
297
+ client.append([old_event, new_event, new_event])
298
+ expected = []
299
+ expected << client.event_stream.first
300
+ expected << client.event_stream.last
301
+ expect(client.snapshot.to_a).to eq(expected)
302
+ end
303
+
304
+ it "should raise a meaningful exception when a nil event given to it to append" do
305
+ expect {client.append([nil])}.to raise_exception(ArgumentError)
306
+ end
300
307
  end
301
308
 
302
- it "should raise a meaningful exception when a nil event given to it to append" do
303
- expect {@client.append([nil])}.to raise_exception(ArgumentError)
309
+ describe 'with prior events of same type' do
310
+ before(:each) do
311
+ client.append([old_event])
312
+ end
313
+
314
+ xit 'should raise a ConcurrencyError if the the event version is less than current version' do
315
+ client.append([duplicate_event])
316
+ reset_current_version_for(client)
317
+ expect { client.append([duplicate_event]) }.to raise_error(EventStore::ConcurrencyError)
318
+ end
319
+
320
+ it 'should not raise an error when two events of the same type are appended' do
321
+ client.append([duplicate_event])
322
+ client.append([duplicate_event]) #will fail automatically if it throws an error, no need for assertions (which now print warning for some reason)
323
+ end
324
+
325
+ it "should write-through-cache the event in a snapshot without duplicating events" do
326
+ client.destroy!
327
+ client.append([old_event, new_event, new_event])
328
+ expected = []
329
+ expected << client.event_stream.first
330
+ expected << client.event_stream.last
331
+ expect(client.snapshot.to_a).to eq(expected)
332
+ end
333
+
334
+ it "sets the snapshot version number to match that of the last event in the aggregate's event stream" do
335
+ expect(client.snapshot.version).to eq(client.raw_event_stream.last[:version])
336
+
337
+ client.append([old_event, old_event])
338
+ expect(client.snapshot.version).to eq(client.raw_event_stream.last[:version])
339
+ end
304
340
  end
305
341
  end
306
342
 
307
- describe 'with prior events of same type' do
308
- xit 'should raise a ConcurrencyError if the the event version is less than current version' do
309
- @client.append([@duplicate_event])
310
- reset_current_version_for(@client)
311
- expect { @client.append([@duplicate_event]) }.to raise_error(EventStore::ConcurrencyError)
343
+ describe 'transactional' do
344
+ before do
345
+ @bad_event = new_event.dup
346
+ @bad_event.fully_qualified_name = nil
312
347
  end
313
348
 
314
- it 'should not raise an error when two events of the same type are appended' do
315
- @client.append([@duplicate_event])
316
- @client.append([@duplicate_event]) #will fail automatically if it throws an error, no need for assertions (which now print warning for some reason)
349
+ it 'should revert all append events if one fails' do
350
+ starting_count = client.count
351
+ expect { client.append([new_event, @bad_event]) }.to raise_error(EventStore::AttributeMissingError)
352
+ expect(client.count).to eq(starting_count)
317
353
  end
318
354
 
319
- it "should write-through-cache the event in a snapshot without duplicating events" do
320
- @client.destroy!
321
- @client.append([@old_event, @new_event, @new_event])
322
- expected = []
323
- expected << @client.event_stream.first
324
- expected << @client.event_stream.last
325
- expect(@client.snapshot.to_a).to eq(expected)
355
+ it 'does not yield to the block if it fails' do
356
+ x = 0
357
+ expect { client.append([@bad_event]) { x += 1 } }.to raise_error(EventStore::AttributeMissingError)
358
+ expect(x).to eq(0)
326
359
  end
327
360
 
328
- #TODO if we let the db assign version# then this can't be true anymore
329
- # the current snapshot version will be the last version number inserted
330
- # if you give me duplicate events, I'm gonna append them and the last one in
331
- # is the one that will be in the snapshot
332
- xit "should increment the version number by the number of unique events added" do
333
- events = [@old_event, @old_event, @old_event]
334
- initial_version = @client.version
335
- @client.append(events)
336
- byebug
337
- expect(@client.version).to eq(initial_version + events.uniq.length)
361
+ it 'yield to the block after event creation' do
362
+ x = 0
363
+ client.append([]) { x += 1 }
364
+ expect(x).to eq(1)
338
365
  end
339
366
 
340
- it "should set the snapshot version number to match that of the last event in the aggregate's event stream" do
341
- events = [@old_event, @old_event]
342
- initial_stream_version = @client.raw_event_stream.last[:version]
343
- expect(@client.snapshot.version).to eq(initial_stream_version)
344
- @client.append(events)
345
- updated_stream_version = @client.raw_event_stream.last[:version]
346
- expect(@client.snapshot.version).to eq(updated_stream_version)
367
+ it 'should pass the raw event_data to the block' do
368
+ client.append([new_event]) do |raw_event_data|
369
+ expect(raw_event_data).to eq([new_event])
370
+ end
347
371
  end
348
372
  end
349
- end
350
373
 
351
- describe 'transactional' do
352
- before do
353
- @bad_event = @new_event.dup
354
- @bad_event.fully_qualified_name = nil
374
+ def reset_current_version_for(client)
375
+ aggregate = client.instance_variable_get("@aggregate")
376
+ EventStore.redis.hset(aggregate.snapshot_version_table, :current_version, 1000)
355
377
  end
378
+ end
356
379
 
357
- it 'should revert all append events if one fails' do
358
- starting_count = @client.count
359
- expect { @client.append([@new_event, @bad_event]) }.to raise_error(EventStore::AttributeMissingError)
360
- expect(@client.count).to eq(starting_count)
361
- end
380
+ describe '#last_event_before' do
381
+ let(:oldest_event_time) { event_time + 1 }
382
+ let(:middle_event_time) { event_time + 2 }
383
+ let(:newest_event_time) { event_time + 3 }
384
+ let(:other_event) { EventStore::Event.new(AGGREGATE_ID_ONE, (event_time).utc, "fqn2", "other", "#{1002.to_s(2)}_foo") }
385
+ let(:event) { EventStore::Event.new(AGGREGATE_ID_ONE, (oldest_event_time).utc, "fqn1", "event", "#{1002.to_s(2)}_foo") }
386
+ let(:new_event) { EventStore::Event.new(AGGREGATE_ID_ONE, (middle_event_time).utc, "fqn1", "new", "#{1002.to_s(2)}_foo") }
387
+ let(:newest_event) { EventStore::Event.new(AGGREGATE_ID_ONE, (newest_event_time).utc, "fqn1", "newest", "#{1002.to_s(2)}_foo") }
388
+ let(:fqns) { %W(fqn1 fqn2) }
362
389
 
363
- it 'does not yield to the block if it fails' do
364
- x = 0
365
- expect { @client.append([@bad_event]) { x += 1 } }.to raise_error(EventStore::AttributeMissingError)
366
- expect(x).to eq(0)
367
- end
390
+ subject(:client) { es_client.new(AGGREGATE_ID_ONE, :device) }
368
391
 
369
- it 'yield to the block after event creation' do
370
- x = 0
371
- @client.append([]) { x += 1 }
372
- expect(x).to eq(1)
392
+ before do
393
+ client.append([other_event, event, new_event, newest_event])
373
394
  end
374
395
 
375
- it 'should pass the raw event_data to the block' do
376
- @client.append([@new_event]) do |raw_event_data|
377
- expect(raw_event_data).to eq([@new_event])
378
- end
396
+ it "returns the latest event before the given time" do
397
+ last_events = client.last_event_before(newest_event_time, fqns)
398
+ expect(last_events.map{ |e| e.occurred_at.to_i}).to eq([new_event[:occurred_at].to_i, other_event[:occurred_at].to_i])
379
399
  end
380
400
  end
381
-
382
- def reset_current_version_for(client)
383
- aggregate = client.instance_variable_get("@aggregate")
384
- EventStore.redis.hset(aggregate.snapshot_version_table, :current_version, 1000)
385
- end
386
-
387
401
  end
388
402
 
389
403
  def serialized_event_data_terminated_by_null
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: nexia_event_store
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.1
4
+ version: 0.5.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Paul Saieg, John Colvin
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2015-01-26 00:00:00.000000000 Z
12
+ date: 2015-03-12 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: bundler
@@ -273,7 +273,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
273
273
  version: '0'
274
274
  requirements: []
275
275
  rubyforge_project:
276
- rubygems_version: 2.4.3
276
+ rubygems_version: 2.4.2
277
277
  signing_key:
278
278
  specification_version: 4
279
279
  summary: Ruby implementation of an EventSource (A+ES) for the Nexia Ecosystem