ruby_event_store-rom 0.35.0 → 0.36.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +1 -0
  3. data/.rubocop_todo.yml +84 -0
  4. data/Gemfile +4 -4
  5. data/Makefile +3 -0
  6. data/Rakefile +3 -3
  7. data/db/migrate/20180327044629_create_ruby_event_store_tables.rb +17 -8
  8. data/lib/ruby_event_store/rom.rb +19 -16
  9. data/lib/ruby_event_store/rom/adapters/memory/changesets/create_events.rb +17 -0
  10. data/lib/ruby_event_store/rom/adapters/memory/changesets/create_stream_entries.rb +17 -0
  11. data/lib/ruby_event_store/rom/adapters/memory/changesets/update_events.rb +16 -0
  12. data/lib/ruby_event_store/rom/adapters/memory/relations/events.rb +14 -5
  13. data/lib/ruby_event_store/rom/adapters/memory/relations/stream_entries.rb +8 -4
  14. data/lib/ruby_event_store/rom/adapters/memory/unit_of_work.rb +6 -21
  15. data/lib/ruby_event_store/rom/adapters/sql/changesets/create_events.rb +13 -0
  16. data/lib/ruby_event_store/rom/adapters/sql/changesets/update_events.rb +39 -0
  17. data/lib/ruby_event_store/rom/adapters/sql/index_violation_detector.rb +15 -16
  18. data/lib/ruby_event_store/rom/adapters/sql/relations/events.rb +13 -1
  19. data/lib/ruby_event_store/rom/adapters/sql/relations/stream_entries.rb +8 -4
  20. data/lib/ruby_event_store/rom/adapters/sql/tasks/migration_tasks.rake +16 -3
  21. data/lib/ruby_event_store/rom/changesets/create_events.rb +29 -0
  22. data/lib/ruby_event_store/rom/changesets/create_stream_entries.rb +21 -0
  23. data/lib/ruby_event_store/rom/changesets/update_events.rb +29 -0
  24. data/lib/ruby_event_store/rom/event_repository.rb +16 -6
  25. data/lib/ruby_event_store/rom/mappers/event_to_serialized_record.rb +1 -1
  26. data/lib/ruby_event_store/rom/mappers/stream_entry_to_serialized_record.rb +1 -1
  27. data/lib/ruby_event_store/rom/memory.rb +15 -3
  28. data/lib/ruby_event_store/rom/repositories/events.rb +18 -30
  29. data/lib/ruby_event_store/rom/repositories/stream_entries.rb +17 -18
  30. data/lib/ruby_event_store/rom/sql.rb +62 -12
  31. data/lib/ruby_event_store/rom/types.rb +13 -0
  32. data/lib/ruby_event_store/rom/unit_of_work.rb +1 -1
  33. data/lib/ruby_event_store/rom/version.rb +1 -1
  34. data/lib/ruby_event_store/spec/rom/event_repository_lint.rb +55 -90
  35. data/lib/ruby_event_store/spec/rom/relations/events_lint.rb +12 -12
  36. data/lib/ruby_event_store/spec/rom/relations/stream_entries_lint.rb +44 -44
  37. data/lib/ruby_event_store/spec/rom/spec_helper_lint.rb +1 -1
  38. data/lib/ruby_event_store/spec/rom/unit_of_work_lint.rb +1 -1
  39. data/ruby_event_store-rom.gemspec +12 -13
  40. metadata +40 -31
  41. data/lib/ruby_event_store/rom/adapters/sql/unit_of_work.rb +0 -37
@@ -0,0 +1,13 @@
1
+ module RubyEventStore
2
+ module ROM
3
+ module SQL
4
+ module Changesets
5
+ class CreateEvents < ROM::Changesets::CreateEvents
6
+ def commit
7
+ relation.multi_insert(to_a)
8
+ end
9
+ end
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,39 @@
1
+ module RubyEventStore
2
+ module ROM
3
+ module SQL
4
+ module Changesets
5
+ class UpdateEvents < ::ROM::Changeset::Create
6
+ include ROM::Changesets::UpdateEvents::Defaults
7
+
8
+ UPSERT_COLUMNS = %i[event_type data metadata created_at]
9
+
10
+ def commit
11
+ if SQL.supports_on_duplicate_key_update?(relation.dataset.db)
12
+ commit_on_duplicate_key_update
13
+ elsif SQL.supports_insert_conflict_update?(relation.dataset.db)
14
+ commit_insert_conflict_update
15
+ else
16
+ raise "Database doesn't support upserts: #{relation.dataset.db.adapter_scheme}"
17
+ end
18
+ end
19
+
20
+ private
21
+
22
+ def commit_on_duplicate_key_update
23
+ relation.dataset.on_duplicate_key_update(*UPSERT_COLUMNS).multi_insert(to_a)
24
+ end
25
+
26
+ def commit_insert_conflict_update
27
+ relation.dataset.insert_conflict(
28
+ # constraint: 'index_name',
29
+ target: :id,
30
+ update: UPSERT_COLUMNS.each_with_object({}) do |column, memo|
31
+ memo[column] = Sequel[:excluded][column]
32
+ end
33
+ ).multi_insert(to_a)
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
@@ -2,23 +2,22 @@ module RubyEventStore
2
2
  module ROM
3
3
  module SQL
4
4
  class IndexViolationDetector
5
-
6
- MYSQL_PKEY_ERROR = "for key 'PRIMARY'"
7
- POSTGRES_PKEY_ERROR = "event_store_events_pkey"
8
- SQLITE3_PKEY_ERROR = "event_store_events.id"
9
-
10
- MYSQL_INDEX_ERROR = "for key 'index_event_store_events_in_streams_on_stream_and_event_id'"
11
- POSTGRES_INDEX_ERROR = "Key (stream, event_id)"
12
- SQLITE3_INDEX_ERROR = "event_store_events_in_streams.stream, event_store_events_in_streams.event_id"
13
-
5
+ MYSQL_PKEY_ERROR = "for key 'PRIMARY'".freeze
6
+ POSTGRES_PKEY_ERROR = 'event_store_events_pkey'.freeze
7
+ SQLITE3_PKEY_ERROR = 'event_store_events.id'.freeze
8
+
9
+ MYSQL_INDEX_ERROR = "for key 'index_event_store_events_in_streams_on_stream_and_event_id'".freeze
10
+ POSTGRES_INDEX_ERROR = 'Key (stream, event_id)'.freeze
11
+ SQLITE3_INDEX_ERROR = 'event_store_events_in_streams.stream, event_store_events_in_streams.event_id'.freeze
12
+
14
13
  def detect(message)
15
- message.include?(MYSQL_PKEY_ERROR) ||
16
- message.include?(POSTGRES_PKEY_ERROR) ||
17
- message.include?(SQLITE3_PKEY_ERROR) ||
18
-
19
- message.include?(MYSQL_INDEX_ERROR) ||
20
- message.include?(POSTGRES_INDEX_ERROR) ||
21
- message.include?(SQLITE3_INDEX_ERROR)
14
+ message.include?(MYSQL_PKEY_ERROR) ||
15
+ message.include?(POSTGRES_PKEY_ERROR) ||
16
+ message.include?(SQLITE3_PKEY_ERROR) ||
17
+
18
+ message.include?(MYSQL_INDEX_ERROR) ||
19
+ message.include?(POSTGRES_INDEX_ERROR) ||
20
+ message.include?(SQLITE3_INDEX_ERROR)
22
21
  end
23
22
  end
24
23
  end
@@ -4,7 +4,19 @@ module RubyEventStore
4
4
  module Relations
5
5
  class Events < ::ROM::Relation[:sql]
6
6
  schema(:event_store_events, as: :events, infer: true) do
7
- attribute :created_at, ::ROM::Types::Strict::Time.default { Time.now }
7
+ attribute :data, RubyEventStore::ROM::Types::SerializedRecordSerializer,
8
+ read: RubyEventStore::ROM::Types::SerializedRecordDeserializer
9
+ attribute :metadata, RubyEventStore::ROM::Types::SerializedRecordSerializer,
10
+ read: RubyEventStore::ROM::Types::SerializedRecordDeserializer
11
+ attribute :created_at, RubyEventStore::ROM::Types::DateTime
12
+ end
13
+
14
+ def create_changeset(tuples)
15
+ events.changeset(Changesets::CreateEvents, tuples)
16
+ end
17
+
18
+ def update_changeset(tuples)
19
+ events.changeset(Changesets::UpdateEvents, tuples)
8
20
  end
9
21
  end
10
22
  end
@@ -4,17 +4,21 @@ module RubyEventStore
4
4
  module Relations
5
5
  class StreamEntries < ::ROM::Relation[:sql]
6
6
  schema(:event_store_events_in_streams, as: :stream_entries, infer: true) do
7
- attribute :created_at, ::ROM::Types::Strict::Time.default { Time.now }
7
+ attribute :created_at, RubyEventStore::ROM::Types::DateTime
8
8
 
9
9
  associations do
10
10
  belongs_to :events, as: :event, foreign_key: :event_id
11
11
  end
12
12
  end
13
13
 
14
- alias_method :take, :limit
14
+ alias take limit
15
15
 
16
16
  SERIALIZED_GLOBAL_STREAM_NAME = 'all'.freeze
17
17
 
18
+ def create_changeset(tuples)
19
+ changeset(ROM::Changesets::CreateStreamEntries, tuples)
20
+ end
21
+
18
22
  def by_stream(stream)
19
23
  where(stream: normalize_stream_name(stream))
20
24
  end
@@ -36,8 +40,8 @@ module RubyEventStore
36
40
  end
37
41
 
38
42
  DIRECTION_MAP = {
39
- forward: [:asc, :>],
40
- backward: [:desc, :<]
43
+ forward: %i[asc >],
44
+ backward: %i[desc <]
41
45
  }.freeze
42
46
 
43
47
  def ordered(direction, stream, offset_entry_id = nil)
@@ -1,6 +1,6 @@
1
1
  require 'ruby_event_store/rom/sql'
2
2
 
3
- MIGRATIONS_PATH = 'db/migrate'
3
+ MIGRATIONS_PATH = 'db/migrate'.freeze
4
4
 
5
5
  desc 'Setup ROM EventRespository environment'
6
6
  task 'db:setup' do
@@ -10,11 +10,24 @@ end
10
10
 
11
11
  desc 'Copy RubyEventStore SQL migrations to db/migrate'
12
12
  task 'db:migrations:copy' => 'db:setup' do
13
+ # Optional data type for `data` and `metadata`
14
+ data_type = ENV['DATA_TYPE']
15
+
13
16
  Dir[File.join(File.dirname(__FILE__), '../../../../../../', MIGRATIONS_PATH, '/*.rb')].each do |input|
14
- name = File.basename(input, '.*').sub(/\d+_/, '')
17
+ contents = File.read(input)
18
+ name = File.basename(input, '.*').sub(/\d+_/, '')
19
+
20
+ re_data_type = /(ENV.+?DATA_TYPE.+?\|\|=\s*)['"](jsonb?|text)['"]/
21
+
22
+ if data_type && contents =~ re_data_type
23
+ # Search/replace this string: ENV['DATA_TYPE'] ||= 'text'
24
+ contents = contents.sub(re_data_type, format('\1"%<data_type>s"', data_type: data_type))
25
+ name += "_with_#{data_type}"
26
+ end
27
+
15
28
  output = ROM::SQL::RakeSupport.create_migration(name, path: File.join(Dir.pwd, MIGRATIONS_PATH))
16
29
 
17
- File.write output, File.read(input)
30
+ File.write output, contents
18
31
 
19
32
  puts "<= migration file created #{output}"
20
33
  end
@@ -0,0 +1,29 @@
1
+ module RubyEventStore
2
+ module ROM
3
+ module Changesets
4
+ class CreateEvents < ::ROM::Changeset::Create
5
+ module Defaults
6
+ def self.included(base)
7
+ base.class_eval do
8
+ relation :events
9
+
10
+ # Convert to Hash
11
+ map(&:to_h)
12
+
13
+ map do
14
+ rename_keys event_id: :id
15
+ accept_keys %i[id data metadata event_type]
16
+ end
17
+
18
+ map do |tuple|
19
+ Hash(created_at: RubyEventStore::ROM::Types::DateTime.call(nil)).merge(tuple)
20
+ end
21
+ end
22
+ end
23
+ end
24
+
25
+ include Defaults
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,21 @@
1
+ module RubyEventStore
2
+ module ROM
3
+ module Changesets
4
+ class CreateStreamEntries < ::ROM::Changeset::Create
5
+ module Defaults
6
+ def self.included(base)
7
+ base.class_eval do
8
+ relation :stream_entries
9
+
10
+ map do |tuple|
11
+ Hash(created_at: RubyEventStore::ROM::Types::DateTime.call(nil)).merge(tuple)
12
+ end
13
+ end
14
+ end
15
+ end
16
+
17
+ include Defaults
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,29 @@
1
+ module RubyEventStore
2
+ module ROM
3
+ module Changesets
4
+ class UpdateEvents < ::ROM::Changeset::Update
5
+ module Defaults
6
+ def self.included(base)
7
+ base.class_eval do
8
+ relation :events
9
+
10
+ # Convert to Hash
11
+ map(&:to_h)
12
+
13
+ map do
14
+ rename_keys event_id: :id
15
+ accept_keys %i[id data metadata event_type created_at]
16
+ end
17
+
18
+ map do |tuple|
19
+ Hash(created_at: RubyEventStore::ROM::Types::DateTime.call(nil)).merge(tuple)
20
+ end
21
+ end
22
+ end
23
+ end
24
+
25
+ include Defaults
26
+ end
27
+ end
28
+ end
29
+ end
@@ -10,7 +10,7 @@ module RubyEventStore
10
10
  def_delegators :@rom, :unit_of_work
11
11
 
12
12
  def initialize(rom: ROM.env)
13
- raise ArgumentError, "Must specify rom" unless rom && rom.instance_of?(Env)
13
+ raise ArgumentError, 'Must specify rom' unless rom && rom.instance_of?(Env)
14
14
 
15
15
  @rom = rom
16
16
  @events = Repositories::Events.new(rom.container)
@@ -46,7 +46,7 @@ module RubyEventStore
46
46
  # Validate event IDs
47
47
  @events
48
48
  .find_nonexistent_pks(event_ids)
49
- .each { |id| raise EventNotFound.new(id) }
49
+ .each { |id| raise EventNotFound, id }
50
50
 
51
51
  guard_for(:unique_violation) do
52
52
  unit_of_work do |changesets|
@@ -66,9 +66,7 @@ module RubyEventStore
66
66
  end
67
67
 
68
68
  def has_event?(event_id)
69
- !! guard_for(:not_found, event_id, swallow: EventNotFound) do
70
- @events.exist?(event_id)
71
- end
69
+ guard_for(:not_found, event_id, swallow: EventNotFound) { @events.exist?(event_id) } || false
72
70
  end
73
71
 
74
72
  def last_stream_event(stream)
@@ -87,15 +85,27 @@ module RubyEventStore
87
85
  @events.count(specification)
88
86
  end
89
87
 
88
+ def update_messages(messages)
89
+ # Validate event IDs
90
+ @events
91
+ .find_nonexistent_pks(messages.map(&:event_id))
92
+ .each { |id| raise EventNotFound, id }
93
+
94
+ unit_of_work do |changesets|
95
+ changesets << @events.update_changeset(messages)
96
+ end
97
+ end
98
+
90
99
  def streams_of(event_id)
91
100
  @stream_entries.streams_of(event_id)
92
- .map{|name| Stream.new(name)}
101
+ .map { |name| Stream.new(name) }
93
102
  end
94
103
 
95
104
  private
96
105
 
97
106
  def normalize_to_array(events)
98
107
  return events if events.is_a?(Enumerable)
108
+
99
109
  [events]
100
110
  end
101
111
  end
@@ -6,7 +6,7 @@ module RubyEventStore
6
6
  class EventToSerializedRecord < ::ROM::Transformer
7
7
  relation :events
8
8
  register_as :event_to_serialized_record
9
-
9
+
10
10
  map_array do
11
11
  rename_keys id: :event_id
12
12
  accept_keys %i[event_id data metadata event_type]
@@ -6,7 +6,7 @@ module RubyEventStore
6
6
  class StreamEntryToSerializedRecord < ::ROM::Transformer
7
7
  relation :stream_entries
8
8
  register_as :stream_entry_to_serialized_record
9
-
9
+
10
10
  map_array do
11
11
  unwrap :event, %i[data metadata event_type]
12
12
  accept_keys %i[event_id data metadata event_type]
@@ -3,6 +3,9 @@ require 'rom/memory'
3
3
  require_relative 'adapters/memory/unit_of_work'
4
4
  require_relative 'adapters/memory/relations/events'
5
5
  require_relative 'adapters/memory/relations/stream_entries'
6
+ require_relative 'adapters/memory/changesets/create_events'
7
+ require_relative 'adapters/memory/changesets/update_events'
8
+ require_relative 'adapters/memory/changesets/create_stream_entries'
6
9
 
7
10
  module RubyEventStore
8
11
  module ROM
@@ -22,10 +25,11 @@ module RubyEventStore
22
25
  def configure(env)
23
26
  env.register_unit_of_work_options(class: UnitOfWork)
24
27
 
25
- env.register_error_handler :unique_violation, -> ex {
28
+ env.register_error_handler :unique_violation, lambda { |ex|
26
29
  case ex
27
30
  when TupleUniquenessError
28
31
  raise EventDuplicatedInStream if ex.message =~ /event_id/
32
+
29
33
  raise WrongExpectedEventVersion
30
34
  end
31
35
  }
@@ -35,7 +39,7 @@ module RubyEventStore
35
39
  class SpecHelper
36
40
  attr_reader :env
37
41
  attr_reader :connection_pool_size, :close_pool_connection
38
-
42
+
39
43
  def initialize
40
44
  @connection_pool_size = 5
41
45
  @env = ROM.setup(:memory)
@@ -54,14 +58,22 @@ module RubyEventStore
54
58
  def drop_gateway_schema
55
59
  gateway.connection.data.values.each { |v| v.data.clear }
56
60
  end
57
-
61
+
58
62
  def close_gateway_connection
59
63
  gateway.disconnect
60
64
  end
61
65
 
66
+ def gateway_type?(name)
67
+ name == :memory
68
+ end
69
+
62
70
  def has_connection_pooling?
63
71
  true
64
72
  end
73
+
74
+ def supports_upsert?
75
+ true
76
+ end
65
77
  end
66
78
  end
67
79
  end
@@ -1,29 +1,22 @@
1
1
  require_relative '../mappers/event_to_serialized_record'
2
+ require_relative '../changesets/create_events'
3
+ require_relative '../changesets/update_events'
2
4
 
3
5
  module RubyEventStore
4
6
  module ROM
5
7
  module Repositories
6
8
  class Events < ::ROM::Repository[:events]
7
- class Create < ::ROM::Changeset::Create
8
- # Convert to Hash
9
- map(&:to_h)
10
-
11
- map do
12
- rename_keys event_id: :id
13
- accept_keys %i[id data metadata event_type]
14
- end
15
-
16
- map do |tuple|
17
- Hash(created_at: Time.now).merge(tuple)
18
- end
9
+ def create_changeset(serialized_records)
10
+ events.create_changeset(serialized_records)
19
11
  end
20
12
 
21
- def create_changeset(serialized_records)
22
- events.changeset(Create, serialized_records)
13
+ def update_changeset(serialized_records)
14
+ events.update_changeset(serialized_records)
23
15
  end
24
16
 
25
17
  def find_nonexistent_pks(event_ids)
26
18
  return event_ids unless event_ids.any?
19
+
27
20
  event_ids - events.by_pk(event_ids).pluck(:id)
28
21
  end
29
22
 
@@ -45,21 +38,19 @@ module RubyEventStore
45
38
  query = read_scope(specification)
46
39
 
47
40
  if specification.batched?
48
- reader = ->(offset, limit) do
49
- query_builder(query, offset: offset, limit: limit).to_ary
50
- end
51
- BatchEnumerator.new(specification.batch_size, specification.limit, reader).each
41
+ BatchEnumerator.new(
42
+ specification.batch_size,
43
+ specification.limit,
44
+ ->(offset, limit) { query_builder(query, offset: offset, limit: limit).to_ary }
45
+ ).each
52
46
  else
53
- limit = specification.limit if specification.limit?
54
- query = query_builder(query, limit: limit)
47
+ query = query_builder(query, limit: (specification.limit if specification.limit?))
55
48
  if specification.head?
56
49
  specification.first? || specification.last? ? query.first : query.each
50
+ elsif specification.last?
51
+ query.to_ary.last
57
52
  else
58
- if specification.last?
59
- query.to_ary.last
60
- else
61
- specification.first? ? query.first : query.each
62
- end
53
+ specification.first? ? query.first : query.each
63
54
  end
64
55
  end
65
56
  end
@@ -70,12 +61,10 @@ module RubyEventStore
70
61
  query.count
71
62
  end
72
63
 
73
- protected
64
+ protected
74
65
 
75
66
  def read_scope(specification)
76
- unless specification.head?
77
- offset_entry_id = stream_entries.by_stream_and_event_id(specification.stream, specification.start).fetch(:id)
78
- end
67
+ offset_entry_id = stream_entries.by_stream_and_event_id(specification.stream, specification.start).fetch(:id) unless specification.head?
79
68
 
80
69
  direction = specification.forward? ? :forward : :backward
81
70
 
@@ -84,7 +73,6 @@ module RubyEventStore
84
73
  end
85
74
 
86
75
  query = stream_entries.ordered(direction, specification.stream, offset_entry_id)
87
-
88
76
  query = query.by_event_id(specification.with_ids) if specification.with_ids
89
77
  query = query.by_event_type(specification.with_types) if specification.with_types?
90
78
  query