ruby_event_store-sequel 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +7 -0
- data/lib/ruby_event_store/generators/templates/1_create_ruby_event_store_tables.rb +40 -0
- data/lib/ruby_event_store/generators/templates/mysql/1_create_ruby_event_store_tables.rb +44 -0
- data/lib/ruby_event_store/generators/templates/postgres/1_create_ruby_event_store_tables.rb +44 -0
- data/lib/ruby_event_store/sequel/event_repository.rb +386 -0
- data/lib/ruby_event_store/sequel/index_violation_detector.rb +40 -0
- data/lib/ruby_event_store/sequel/version.rb +7 -0
- data/lib/ruby_event_store/sequel.rb +7 -0
- metadata +88 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: ea243993dea1905cfb81d7a15462ae7e46afdd072d81048bef2fa480cede1dda
|
4
|
+
data.tar.gz: b736b5c5b52a6e404a48d9faf5a348e69482ece6044402d21999b9a70d2f9252
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: a6765b5af6ee1bdf081285203ddfa6010e2ef510199b076add7fadbc8ec64713318af0cf822d270a6953cc5562a80082a453f8b86b2537cf2e0e3127f1837dfd
|
7
|
+
data.tar.gz: 40cd9e30fa7bb9390171efb3f2f53a39b62d087469e5cd842f004b1eb71cd3f497c0cc982975c33d1ecc4a0d2feb2ebebc75999aa14097614042b350d7ff4279
|
data/README.md
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
# RubyEventStore Sequel Event Repository
|
2
|
+
|
3
|
+

|
4
|
+
|
5
|
+
A [sequel](https://sequel.jeremyevans.net) based implementation of events repository for [Ruby Event Store](https://github.com/RailsEventStore/rails_event_store). It is an alternative to the ActiveRecord `EventRepository` implementation used in `rails_event_store` gem.
|
6
|
+
|
7
|
+
[Read the docs to get started.](http://railseventstore.org/docs/repository/)
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
::Sequel.migration do
|
4
|
+
up do
|
5
|
+
ENV["DATA_TYPE"] ||= "text"
|
6
|
+
data_type = ENV["DATA_TYPE"].to_sym
|
7
|
+
data_types = %i[text]
|
8
|
+
raise ArgumentError, "DATA_TYPE must be: #{data_types.join(", ")}" unless data_types.include?(data_type)
|
9
|
+
|
10
|
+
create_table :event_store_events_in_streams do
|
11
|
+
primary_key :id, type: :Bignum, null: false
|
12
|
+
|
13
|
+
column :stream, String, null: false
|
14
|
+
column :position, Integer
|
15
|
+
column :event_id, String, size: 36, null: false
|
16
|
+
column :created_at, Time, null: false, index: "index_event_store_events_in_streams_on_created_at"
|
17
|
+
|
18
|
+
index %i[stream position], unique: true, name: "index_event_store_events_in_streams_on_stream_and_position"
|
19
|
+
index %i[stream event_id], unique: true, name: "index_event_store_events_in_streams_on_stream_and_event_id"
|
20
|
+
end
|
21
|
+
|
22
|
+
create_table :event_store_events do
|
23
|
+
primary_key :id, type: :Bignum, null: false
|
24
|
+
|
25
|
+
column :event_id, String, size: 36, null: false
|
26
|
+
column :event_type, String, null: false
|
27
|
+
column :metadata, String, text: true
|
28
|
+
column :data, String, text: true, null: false
|
29
|
+
column :created_at, Time, null: false, index: "index_event_store_events_on_created_at"
|
30
|
+
column :valid_at, Time, index: "index_event_store_events_on_valid_at"
|
31
|
+
|
32
|
+
index :event_id, unique: true, name: "index_event_store_events_on_event_id"
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
down do
|
37
|
+
drop_table :event_store_events
|
38
|
+
drop_table :event_store_events_in_streams
|
39
|
+
end
|
40
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
::Sequel.migration do
|
4
|
+
up do
|
5
|
+
ENV["DATA_TYPE"] ||= "text"
|
6
|
+
data_type = ENV["DATA_TYPE"].to_sym
|
7
|
+
data_types = %i[text]
|
8
|
+
raise ArgumentError, "DATA_TYPE must be: #{data_types.join(", ")}" unless data_types.include?(data_type)
|
9
|
+
|
10
|
+
create_table :event_store_events_in_streams do
|
11
|
+
primary_key :id, type: :Bignum, null: false
|
12
|
+
|
13
|
+
column :stream, String, null: false
|
14
|
+
column :position, Integer
|
15
|
+
column :event_id, String, size: 36, null: false
|
16
|
+
column :created_at,
|
17
|
+
Time,
|
18
|
+
null: false,
|
19
|
+
type: "DATETIME(6)",
|
20
|
+
index: "index_event_store_events_in_streams_on_created_at"
|
21
|
+
|
22
|
+
index %i[stream position], unique: true, name: "index_event_store_events_in_streams_on_stream_and_position"
|
23
|
+
index %i[stream event_id], unique: true, name: "index_event_store_events_in_streams_on_stream_and_event_id"
|
24
|
+
end
|
25
|
+
|
26
|
+
create_table :event_store_events do
|
27
|
+
primary_key :id, type: :Bignum, null: false
|
28
|
+
|
29
|
+
column :event_id, String, size: 36, null: false
|
30
|
+
column :event_type, String, null: false
|
31
|
+
column :metadata, String, text: true
|
32
|
+
column :data, String, text: true, null: false
|
33
|
+
column :created_at, Time, null: false, type: "DATETIME(6)", index: "index_event_store_events_on_created_at"
|
34
|
+
column :valid_at, Time, type: "DATETIME(6)", index: "index_event_store_events_on_valid_at"
|
35
|
+
|
36
|
+
index :event_id, unique: true, name: "index_event_store_events_on_event_id"
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
down do
|
41
|
+
drop_table :event_store_events
|
42
|
+
drop_table :event_store_events_in_streams
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
::Sequel.migration do
|
4
|
+
up do
|
5
|
+
ENV["DATA_TYPE"] ||= "text"
|
6
|
+
data_type = ENV["DATA_TYPE"].to_sym
|
7
|
+
data_types = %i[text json jsonb]
|
8
|
+
raise ArgumentError, "DATA_TYPE must be: #{data_types.join(", ")}" unless data_types.include?(data_type)
|
9
|
+
|
10
|
+
create_table :event_store_events_in_streams do
|
11
|
+
primary_key :id, type: :Bignum, null: false
|
12
|
+
|
13
|
+
column :stream, String, null: false
|
14
|
+
column :position, Integer
|
15
|
+
column :event_id, String, null: false
|
16
|
+
column :created_at,
|
17
|
+
Time,
|
18
|
+
null: false,
|
19
|
+
type: "TIMESTAMP",
|
20
|
+
index: "index_event_store_events_in_streams_on_created_at"
|
21
|
+
|
22
|
+
index %i[stream position], unique: true, name: "index_event_store_events_in_streams_on_stream_and_position"
|
23
|
+
index %i[stream event_id], unique: true, name: "index_event_store_events_in_streams_on_stream_and_event_id"
|
24
|
+
end
|
25
|
+
|
26
|
+
create_table :event_store_events do
|
27
|
+
primary_key :id, type: :Bignum, null: false
|
28
|
+
|
29
|
+
column :event_id, String, null: false
|
30
|
+
column :event_type, String, null: false
|
31
|
+
column :metadata, data_type
|
32
|
+
column :data, data_type, null: false
|
33
|
+
column :created_at, Time, null: false, type: "TIMESTAMP", index: "index_event_store_events_on_created_at"
|
34
|
+
column :valid_at, Time, type: "TIMESTAMP", index: "index_event_store_events_on_valid_at"
|
35
|
+
|
36
|
+
index :event_id, unique: true, name: "index_event_store_events_on_event_id"
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
down do
|
41
|
+
drop_table :event_store_events
|
42
|
+
drop_table :event_store_events_in_streams
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,386 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyEventStore
|
4
|
+
module Sequel
|
5
|
+
class EventRepository
|
6
|
+
UPSERT_COLUMNS = %i[event_type data metadata valid_at].freeze
|
7
|
+
|
8
|
+
def initialize(sequel:, serializer:)
|
9
|
+
@serializer = serializer
|
10
|
+
@index_violation_detector = IndexViolationDetector.new("event_store_events", "event_store_events_in_streams")
|
11
|
+
@db = sequel
|
12
|
+
@db.timezone = :utc
|
13
|
+
end
|
14
|
+
|
15
|
+
attr_reader :index_violation_detector
|
16
|
+
|
17
|
+
def append_to_stream(records, stream, expected_version)
|
18
|
+
resolved_version = resolved_version(expected_version, stream)
|
19
|
+
|
20
|
+
@db.transaction do
|
21
|
+
records.map.with_index do |record, index|
|
22
|
+
serialized_record = record.serialize(@serializer)
|
23
|
+
|
24
|
+
@db[:event_store_events].insert(
|
25
|
+
event_id: serialized_record.event_id,
|
26
|
+
event_type: serialized_record.event_type,
|
27
|
+
data: serialized_record.data,
|
28
|
+
metadata: serialized_record.metadata,
|
29
|
+
created_at: record.timestamp,
|
30
|
+
valid_at: optimize_timestamp(record.valid_at, record.timestamp),
|
31
|
+
)
|
32
|
+
unless stream.global?
|
33
|
+
@db[:event_store_events_in_streams].insert(
|
34
|
+
event_id: serialized_record.event_id,
|
35
|
+
stream: stream.name,
|
36
|
+
created_at: Time.now.utc,
|
37
|
+
position: resolved_version ? resolved_version + index + 1 : nil,
|
38
|
+
)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
self
|
43
|
+
rescue ::Sequel::UniqueConstraintViolation => ex
|
44
|
+
raise EventDuplicatedInStream if index_violation_detector.detect(ex.message)
|
45
|
+
raise WrongExpectedEventVersion
|
46
|
+
end
|
47
|
+
|
48
|
+
def link_to_stream(event_ids, stream, expected_version)
|
49
|
+
(
|
50
|
+
event_ids -
|
51
|
+
@db[:event_store_events]
|
52
|
+
.select(::Sequel[:event_store_events][:event_id])
|
53
|
+
.where(::Sequel[:event_store_events][:event_id] => event_ids)
|
54
|
+
.map { |e| e[:event_id] }
|
55
|
+
).each { |id| raise EventNotFound.new(id) }
|
56
|
+
|
57
|
+
resolved_version = resolved_version(expected_version, stream)
|
58
|
+
|
59
|
+
@db.transaction do
|
60
|
+
event_ids.map.with_index do |event_id, index|
|
61
|
+
@db[:event_store_events_in_streams].insert(
|
62
|
+
event_id: event_id,
|
63
|
+
stream: stream.name,
|
64
|
+
created_at: Time.now.utc,
|
65
|
+
position: resolved_version ? resolved_version + index + 1 : nil,
|
66
|
+
)
|
67
|
+
end
|
68
|
+
end
|
69
|
+
self
|
70
|
+
rescue ::Sequel::UniqueConstraintViolation => ex
|
71
|
+
raise EventDuplicatedInStream if index_violation_detector.detect(ex.message)
|
72
|
+
raise WrongExpectedEventVersion
|
73
|
+
end
|
74
|
+
|
75
|
+
def position_in_stream(event_id, stream)
|
76
|
+
record =
|
77
|
+
@db[:event_store_events_in_streams]
|
78
|
+
.select(::Sequel[:event_store_events_in_streams][:position])
|
79
|
+
.where(
|
80
|
+
::Sequel[:event_store_events_in_streams][:event_id] => event_id,
|
81
|
+
::Sequel[:event_store_events_in_streams][:stream] => stream.name,
|
82
|
+
)
|
83
|
+
.first
|
84
|
+
raise EventNotFoundInStream.new if record.nil?
|
85
|
+
record[:position]
|
86
|
+
end
|
87
|
+
|
88
|
+
def global_position(event_id)
|
89
|
+
record =
|
90
|
+
@db[:event_store_events]
|
91
|
+
.select(::Sequel[:event_store_events][:id])
|
92
|
+
.where(::Sequel[:event_store_events][:event_id] => event_id)
|
93
|
+
.first
|
94
|
+
raise EventNotFound.new(event_id) if record.nil?
|
95
|
+
record[:id] - 1
|
96
|
+
end
|
97
|
+
|
98
|
+
def event_in_stream?(event_id, stream)
|
99
|
+
@db[:event_store_events_in_streams].where(event_id: event_id, stream: stream.name).any?
|
100
|
+
end
|
101
|
+
|
102
|
+
def delete_stream(stream)
|
103
|
+
@db[:event_store_events_in_streams].where(stream: stream.name).delete
|
104
|
+
end
|
105
|
+
|
106
|
+
def has_event?(event_id)
|
107
|
+
@db[:event_store_events].where(event_id: event_id).any?
|
108
|
+
end
|
109
|
+
|
110
|
+
def last_stream_event(stream)
|
111
|
+
row = @db[:event_store_events_in_streams].where(stream: stream.name).order(:position, :id).last
|
112
|
+
return row if row.nil?
|
113
|
+
event = @db[:event_store_events].where(event_id: row[:event_id]).first
|
114
|
+
SerializedRecord.new(
|
115
|
+
event_id: event[:event_id],
|
116
|
+
event_type: event[:event_type],
|
117
|
+
data: event[:data],
|
118
|
+
metadata: event[:metadata],
|
119
|
+
timestamp: event[:created_at].iso8601(TIMESTAMP_PRECISION),
|
120
|
+
valid_at: (event[:valid_at] || event[:created_at]).iso8601(TIMESTAMP_PRECISION),
|
121
|
+
).deserialize(@serializer)
|
122
|
+
end
|
123
|
+
|
124
|
+
def read(specification)
|
125
|
+
if specification.batched?
|
126
|
+
stream = read_(specification)
|
127
|
+
batch_reader = ->(offset, limit) { stream.offset(offset).limit(limit).map(&method(:record)) }
|
128
|
+
RubyEventStore::BatchEnumerator.new(specification.batch_size, specification.limit, batch_reader).each
|
129
|
+
elsif specification.first?
|
130
|
+
record_ = read_(specification).first
|
131
|
+
record(record_) if record_
|
132
|
+
elsif specification.last?
|
133
|
+
record_ = read_(specification).last
|
134
|
+
record(record_) if record_
|
135
|
+
else
|
136
|
+
read_(specification).map { |h| record(h) }.each
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def count(specification)
|
141
|
+
read_(specification).count
|
142
|
+
end
|
143
|
+
|
144
|
+
def update_messages(records)
|
145
|
+
hashes = records.map { |record| upsert_hash(record, record.serialize(@serializer)) }
|
146
|
+
for_update = records.map(&:event_id)
|
147
|
+
@db.transaction do
|
148
|
+
existing =
|
149
|
+
@db[:event_store_events]
|
150
|
+
.where(event_id: for_update)
|
151
|
+
.select(:event_id, :id, :created_at, :valid_at)
|
152
|
+
.reduce({}) do |acc, record|
|
153
|
+
acc.merge(record[:event_id] => [record[:id], record[:created_at], record[:valid_at]])
|
154
|
+
end
|
155
|
+
|
156
|
+
(for_update - existing.keys).each { |id| raise EventNotFound.new(id) }
|
157
|
+
hashes.each do |h|
|
158
|
+
h[:id] = existing.fetch(h.fetch(:event_id)).at(0)
|
159
|
+
h[:created_at] = existing.fetch(h.fetch(:event_id)).at(1)
|
160
|
+
h[:valid_at] = existing.fetch(h.fetch(:event_id)).at(2)
|
161
|
+
end
|
162
|
+
|
163
|
+
if supports_on_duplicate_key_update?
|
164
|
+
commit_on_duplicate_key_update(hashes)
|
165
|
+
else
|
166
|
+
commit_insert_conflict_update(hashes)
|
167
|
+
end
|
168
|
+
end
|
169
|
+
end
|
170
|
+
|
171
|
+
def streams_of(event_id)
|
172
|
+
@db[:event_store_events_in_streams].where(event_id: event_id).map { |h| Stream.new(h[:stream]) }
|
173
|
+
end
|
174
|
+
|
175
|
+
private
|
176
|
+
|
177
|
+
def optimize_timestamp(valid_at, created_at)
|
178
|
+
valid_at unless valid_at.eql?(created_at)
|
179
|
+
end
|
180
|
+
|
181
|
+
def record(h)
|
182
|
+
SerializedRecord.new(
|
183
|
+
event_id: h[:event_id],
|
184
|
+
event_type: h[:event_type],
|
185
|
+
data: h[:data],
|
186
|
+
metadata: h[:metadata],
|
187
|
+
timestamp: h[:created_at].iso8601(TIMESTAMP_PRECISION),
|
188
|
+
valid_at: (h[:valid_at].nil? ? h[:created_at] : h[:valid_at]).iso8601(TIMESTAMP_PRECISION),
|
189
|
+
).deserialize(@serializer)
|
190
|
+
end
|
191
|
+
|
192
|
+
def read_(specification)
|
193
|
+
specification.stream.global? ? read_from_global_stream(specification) : read_from_specific_stream(specification)
|
194
|
+
end
|
195
|
+
|
196
|
+
def resolved_version(expected_version, stream)
|
197
|
+
expected_version.resolve_for(
|
198
|
+
stream,
|
199
|
+
->(stream) do
|
200
|
+
@db[:event_store_events_in_streams]
|
201
|
+
.select(:position)
|
202
|
+
.where(stream: stream.name)
|
203
|
+
.order(:position)
|
204
|
+
.last
|
205
|
+
&.fetch(:position)
|
206
|
+
end,
|
207
|
+
)
|
208
|
+
end
|
209
|
+
|
210
|
+
def read_from_specific_stream(specification)
|
211
|
+
dataset =
|
212
|
+
@db[:event_store_events]
|
213
|
+
.join(:event_store_events_in_streams, event_id: :event_id)
|
214
|
+
.select(
|
215
|
+
::Sequel[:event_store_events][:event_id],
|
216
|
+
:event_type,
|
217
|
+
:data,
|
218
|
+
:metadata,
|
219
|
+
::Sequel[:event_store_events][:created_at],
|
220
|
+
:valid_at,
|
221
|
+
)
|
222
|
+
.where(stream: specification.stream.name)
|
223
|
+
.order(::Sequel[:event_store_events_in_streams][:id])
|
224
|
+
|
225
|
+
dataset = dataset.where(event_type: specification.with_types) if specification.with_types?
|
226
|
+
dataset =
|
227
|
+
dataset.where(::Sequel[:event_store_events][:event_id] => specification.with_ids) if specification.with_ids?
|
228
|
+
|
229
|
+
if specification.start
|
230
|
+
condition = "event_store_events_in_streams.id #{specification.forward? ? ">" : "<"} ?"
|
231
|
+
dataset =
|
232
|
+
dataset.where(
|
233
|
+
::Sequel.lit(condition, find_event_id_in_stream(specification.start, specification.stream.name)),
|
234
|
+
)
|
235
|
+
end
|
236
|
+
|
237
|
+
if specification.stop
|
238
|
+
condition = "event_store_events_in_streams.id #{specification.forward? ? "<" : ">"} ?"
|
239
|
+
dataset =
|
240
|
+
dataset.where(
|
241
|
+
::Sequel.lit(condition, find_event_id_in_stream(specification.stop, specification.stream.name)),
|
242
|
+
)
|
243
|
+
end
|
244
|
+
|
245
|
+
if specification.older_than
|
246
|
+
dataset = dataset.where(::Sequel.lit("#{time_comparison_field(specification)} < ?", specification.older_than))
|
247
|
+
end
|
248
|
+
|
249
|
+
if specification.older_than_or_equal
|
250
|
+
dataset =
|
251
|
+
dataset.where(
|
252
|
+
::Sequel.lit("#{time_comparison_field(specification)} <= ?", specification.older_than_or_equal),
|
253
|
+
)
|
254
|
+
end
|
255
|
+
|
256
|
+
if specification.newer_than
|
257
|
+
dataset = dataset.where(::Sequel.lit("#{time_comparison_field(specification)} > ?", specification.newer_than))
|
258
|
+
end
|
259
|
+
|
260
|
+
if specification.newer_than_or_equal
|
261
|
+
dataset =
|
262
|
+
dataset.where(
|
263
|
+
::Sequel.lit("#{time_comparison_field(specification)} >= ?", specification.newer_than_or_equal),
|
264
|
+
)
|
265
|
+
end
|
266
|
+
|
267
|
+
dataset = dataset.order(::Sequel[:event_store_events][:created_at]) if specification.time_sort_by_as_at?
|
268
|
+
dataset = dataset.order(::Sequel.lit(coalesced_date)) if specification.time_sort_by_as_of?
|
269
|
+
dataset = dataset.limit(specification.limit) if specification.limit?
|
270
|
+
dataset = dataset.order(::Sequel[:event_store_events_in_streams][:id]).reverse if specification.backward?
|
271
|
+
|
272
|
+
dataset
|
273
|
+
end
|
274
|
+
|
275
|
+
def find_event_id_in_stream(specification_event_id, specification_stream_name)
|
276
|
+
event =
|
277
|
+
@db[:event_store_events_in_streams]
|
278
|
+
.select(:id)
|
279
|
+
.where(event_id: specification_event_id, stream: specification_stream_name)
|
280
|
+
.first
|
281
|
+
raise EventNotFound.new(specification_event_id) unless event
|
282
|
+
|
283
|
+
event[:id]
|
284
|
+
end
|
285
|
+
|
286
|
+
def find_event_id_globally(specification_event_id)
|
287
|
+
event = @db[:event_store_events].select(:id).where(event_id: specification_event_id).first
|
288
|
+
raise EventNotFound.new(specification_event_id) unless event
|
289
|
+
|
290
|
+
event[:id]
|
291
|
+
end
|
292
|
+
|
293
|
+
def read_from_global_stream(specification)
|
294
|
+
dataset =
|
295
|
+
@db[:event_store_events].select(
|
296
|
+
::Sequel[:event_store_events][:event_id],
|
297
|
+
::Sequel[:event_store_events][:event_type],
|
298
|
+
::Sequel[:event_store_events][:data],
|
299
|
+
::Sequel[:event_store_events][:metadata],
|
300
|
+
::Sequel[:event_store_events][:created_at],
|
301
|
+
::Sequel[:event_store_events][:valid_at],
|
302
|
+
).order(:id)
|
303
|
+
|
304
|
+
dataset = dataset.where(event_type: specification.with_types) if specification.with_types?
|
305
|
+
dataset = dataset.where(event_id: specification.with_ids) if specification.with_ids?
|
306
|
+
|
307
|
+
if specification.start
|
308
|
+
id = find_event_id_globally(specification.start)
|
309
|
+
condition = "event_store_events.id #{specification.forward? ? ">" : "<"} ?"
|
310
|
+
|
311
|
+
dataset = dataset.where(::Sequel.lit(condition, id))
|
312
|
+
end
|
313
|
+
|
314
|
+
if specification.stop
|
315
|
+
id = find_event_id_globally(specification.stop)
|
316
|
+
condition = "event_store_events.id #{specification.forward? ? "<" : ">"} ?"
|
317
|
+
|
318
|
+
dataset = dataset.where(::Sequel.lit(condition, id))
|
319
|
+
end
|
320
|
+
|
321
|
+
if specification.older_than
|
322
|
+
dataset = dataset.where(::Sequel.lit("#{time_comparison_field(specification)} < ?", specification.older_than))
|
323
|
+
end
|
324
|
+
|
325
|
+
if specification.older_than_or_equal
|
326
|
+
dataset =
|
327
|
+
dataset.where(
|
328
|
+
::Sequel.lit("#{time_comparison_field(specification)} <= ?", specification.older_than_or_equal),
|
329
|
+
)
|
330
|
+
end
|
331
|
+
|
332
|
+
if specification.newer_than
|
333
|
+
dataset = dataset.where(::Sequel.lit("#{time_comparison_field(specification)} > ?", specification.newer_than))
|
334
|
+
end
|
335
|
+
|
336
|
+
if specification.newer_than_or_equal
|
337
|
+
dataset =
|
338
|
+
dataset.where(
|
339
|
+
::Sequel.lit("#{time_comparison_field(specification)} >= ?", specification.newer_than_or_equal),
|
340
|
+
)
|
341
|
+
end
|
342
|
+
|
343
|
+
dataset = dataset.order(::Sequel[:event_store_events][:created_at]) if specification.time_sort_by_as_at?
|
344
|
+
dataset = dataset.order(::Sequel.lit(coalesced_date)) if specification.time_sort_by_as_of?
|
345
|
+
dataset = dataset.limit(specification.limit) if specification.limit?
|
346
|
+
dataset = dataset.order(::Sequel[:event_store_events][:id]) unless specification.time_sort_by
|
347
|
+
dataset = dataset.reverse if specification.backward?
|
348
|
+
|
349
|
+
dataset
|
350
|
+
end
|
351
|
+
|
352
|
+
def coalesced_date
|
353
|
+
"COALESCE(event_store_events.valid_at, event_store_events.created_at)"
|
354
|
+
end
|
355
|
+
|
356
|
+
def time_comparison_field(specification)
|
357
|
+
specification.time_sort_by_as_of? ? coalesced_date : "event_store_events.created_at"
|
358
|
+
end
|
359
|
+
|
360
|
+
def upsert_hash(record, serialized_record)
|
361
|
+
{
|
362
|
+
event_id: serialized_record.event_id,
|
363
|
+
data: serialized_record.data,
|
364
|
+
metadata: serialized_record.metadata,
|
365
|
+
event_type: serialized_record.event_type,
|
366
|
+
valid_at: optimize_timestamp(record.valid_at, record.timestamp),
|
367
|
+
}
|
368
|
+
end
|
369
|
+
|
370
|
+
def supports_on_duplicate_key_update?
|
371
|
+
@db.adapter_scheme =~ /mysql/
|
372
|
+
end
|
373
|
+
|
374
|
+
def commit_on_duplicate_key_update(hashes)
|
375
|
+
@db[:event_store_events].on_duplicate_key_update(*UPSERT_COLUMNS).multi_insert(hashes)
|
376
|
+
end
|
377
|
+
|
378
|
+
def commit_insert_conflict_update(hashes)
|
379
|
+
@db[:event_store_events].insert_conflict(
|
380
|
+
target: :event_id,
|
381
|
+
update: UPSERT_COLUMNS.each_with_object({}) { |column, memo| memo[column] = ::Sequel[:excluded][column] },
|
382
|
+
).multi_insert(hashes)
|
383
|
+
end
|
384
|
+
end
|
385
|
+
end
|
386
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module RubyEventStore
|
4
|
+
module Sequel
|
5
|
+
class IndexViolationDetector
|
6
|
+
def initialize(event_store_events, event_store_events_in_streams)
|
7
|
+
@postgres_pkey_error = "Key (event_id)".freeze
|
8
|
+
@postgres_index_error = "Key (stream, event_id)".freeze
|
9
|
+
@mysql5_pkey_error = "for key 'index_#{event_store_events}_on_event_id'".freeze
|
10
|
+
@mysql8_pkey_error = "for key '#{event_store_events}.index_#{event_store_events}_on_event_id'".freeze
|
11
|
+
@mysql5_index_error = "for key 'index_#{event_store_events_in_streams}_on_stream_and_event_id'".freeze
|
12
|
+
@mysql8_index_error =
|
13
|
+
"for key '#{event_store_events_in_streams}.index_#{event_store_events_in_streams}_on_stream_and_event_id'".freeze
|
14
|
+
@sqlite3_pkey_error = "constraint failed: #{event_store_events}.event_id".freeze
|
15
|
+
@sqlite3_index_error =
|
16
|
+
"constraint failed: #{event_store_events_in_streams}.stream, #{event_store_events_in_streams}.event_id".freeze
|
17
|
+
end
|
18
|
+
|
19
|
+
def detect(message)
|
20
|
+
detect_postgres(message) || detect_mysql(message) || detect_sqlite(message)
|
21
|
+
end
|
22
|
+
|
23
|
+
private
|
24
|
+
|
25
|
+
def detect_postgres(message)
|
26
|
+
message.include?(@postgres_pkey_error) || message.include?(@postgres_index_error)
|
27
|
+
end
|
28
|
+
|
29
|
+
def detect_mysql(message)
|
30
|
+
message.include?(@mysql5_pkey_error) || message.include?(@mysql8_pkey_error) ||
|
31
|
+
message.include?(@mysql5_index_error) || message.include?(@mysql8_index_error)
|
32
|
+
end
|
33
|
+
|
34
|
+
def detect_sqlite(message)
|
35
|
+
message.include?(@sqlite3_pkey_error) || message.include?(@sqlite3_index_error)
|
36
|
+
end
|
37
|
+
end
|
38
|
+
private_constant(:IndexViolationDetector)
|
39
|
+
end
|
40
|
+
end
|
metadata
ADDED
@@ -0,0 +1,88 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: ruby_event_store-sequel
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Arkency
|
8
|
+
bindir: bin
|
9
|
+
cert_chain: []
|
10
|
+
date: 1980-01-02 00:00:00.000000000 Z
|
11
|
+
dependencies:
|
12
|
+
- !ruby/object:Gem::Dependency
|
13
|
+
name: sequel
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
15
|
+
requirements:
|
16
|
+
- - "~>"
|
17
|
+
- !ruby/object:Gem::Version
|
18
|
+
version: '5.11'
|
19
|
+
type: :runtime
|
20
|
+
prerelease: false
|
21
|
+
version_requirements: !ruby/object:Gem::Requirement
|
22
|
+
requirements:
|
23
|
+
- - "~>"
|
24
|
+
- !ruby/object:Gem::Version
|
25
|
+
version: '5.11'
|
26
|
+
- !ruby/object:Gem::Dependency
|
27
|
+
name: ruby_event_store
|
28
|
+
requirement: !ruby/object:Gem::Requirement
|
29
|
+
requirements:
|
30
|
+
- - ">="
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: 2.0.0
|
33
|
+
- - "<"
|
34
|
+
- !ruby/object:Gem::Version
|
35
|
+
version: 3.0.0
|
36
|
+
type: :runtime
|
37
|
+
prerelease: false
|
38
|
+
version_requirements: !ruby/object:Gem::Requirement
|
39
|
+
requirements:
|
40
|
+
- - ">="
|
41
|
+
- !ruby/object:Gem::Version
|
42
|
+
version: 2.0.0
|
43
|
+
- - "<"
|
44
|
+
- !ruby/object:Gem::Version
|
45
|
+
version: 3.0.0
|
46
|
+
description: Implementation of events repository based on Sequel for Ruby Event Store
|
47
|
+
email:
|
48
|
+
- dev@arkency.com
|
49
|
+
executables: []
|
50
|
+
extensions: []
|
51
|
+
extra_rdoc_files:
|
52
|
+
- README.md
|
53
|
+
files:
|
54
|
+
- README.md
|
55
|
+
- lib/ruby_event_store/generators/templates/1_create_ruby_event_store_tables.rb
|
56
|
+
- lib/ruby_event_store/generators/templates/mysql/1_create_ruby_event_store_tables.rb
|
57
|
+
- lib/ruby_event_store/generators/templates/postgres/1_create_ruby_event_store_tables.rb
|
58
|
+
- lib/ruby_event_store/sequel.rb
|
59
|
+
- lib/ruby_event_store/sequel/event_repository.rb
|
60
|
+
- lib/ruby_event_store/sequel/index_violation_detector.rb
|
61
|
+
- lib/ruby_event_store/sequel/version.rb
|
62
|
+
homepage: https://railseventstore.org
|
63
|
+
licenses:
|
64
|
+
- MIT
|
65
|
+
metadata:
|
66
|
+
homepage_uri: https://railseventstore.org
|
67
|
+
changelog_uri: https://github.com/RailsEventStore/rails_event_store/blob/master/contrib/ruby_event_store-sequel/CHANGELOG.md
|
68
|
+
source_code_uri: https://github.com/RailsEventStore/rails_event_store
|
69
|
+
bug_tracker_uri: https://github.com/RailsEventStore/rails_event_store/issues
|
70
|
+
rubygems_mfa_required: 'true'
|
71
|
+
rdoc_options: []
|
72
|
+
require_paths:
|
73
|
+
- lib
|
74
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
75
|
+
requirements:
|
76
|
+
- - ">="
|
77
|
+
- !ruby/object:Gem::Version
|
78
|
+
version: '2.7'
|
79
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
80
|
+
requirements:
|
81
|
+
- - ">="
|
82
|
+
- !ruby/object:Gem::Version
|
83
|
+
version: '0'
|
84
|
+
requirements: []
|
85
|
+
rubygems_version: 3.6.8
|
86
|
+
specification_version: 4
|
87
|
+
summary: Sequel-based event repository for Ruby Event Store
|
88
|
+
test_files: []
|