pg_eventstore 0.2.2 → 0.2.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b8c492f46288f9c4c5d3912cad3aba0664bf91693a2273785654e6a96f634668
4
- data.tar.gz: 6cbc3ab33fd7d9ed71ae8664ad95b784fffd07ead7d56a7046f481f36eecc531
3
+ metadata.gz: bc158c2f99fee36514e1902199691dfb93e882ecdebe4738bbcf787fadc8f514
4
+ data.tar.gz: a70d00d4bdaef48223e35234f01d938a713f9306aa1bfb45bde99733b3654c39
5
5
  SHA512:
6
- metadata.gz: efe92ab2610b9bbf0e33fc6ed98a7104737ad9f02a7217651b7f7354a7c3b25008a01033b71f7057279dd5ad21846cab3fbd4408941b4f6a6015532651ba40e6
7
- data.tar.gz: cf31608048782d34203643b02374ff52afbed8760e0dbbff7e5026c6c518d26f489c396412f8ec88a52fbaf1dbd22a5788feb1873bfc1c59b7cfce8080b8e4b2
6
+ metadata.gz: bf38d241b001b4244d9fa915d2dfcabec7ac9456aa83832b8d850fe3fd34e48b0ef8b9ef5f0cca91b776124ec3333dea658220f3bd427309b3b1ae09a12714ae
7
+ data.tar.gz: af5b5e4394ee696790145852ac6a7761728e5cc7e7aca127e6cde112b857e5516b121ac2bb7bf2280e7e66c93cf0823e33bcbe1f258cadbff31d50f83d9972a8
data/CHANGELOG.md CHANGED
@@ -1,3 +1,13 @@
1
+ ## [0.2.4] - 2023-12-20
2
+
3
+ Due to performance issues under certain circumstances, searching by event type was giving bad performance. I decided to extract `type` column from `events` table into separated table. **No breaking changes in public API though.**
4
+
5
+ **Warning** The migrations this version has, requires you to shut down applications that use `pg_eventstore` and only then run `rake pg_eventstore:migrate`.
6
+
7
+ ## [0.2.3] - 2023-12-18
8
+
9
+ - Fix performance when searching by event type only(under certain circumstances PosetgreSQL was picking wrong index).
10
+
1
11
  ## [0.2.2] - 2023-12-14
2
12
 
3
13
  - Fix `pg_eventstore:drop` rake task to also drop `migrations` table
@@ -0,0 +1,4 @@
1
+ CREATE INDEX idx_events_global_position_including_type ON public.events USING btree (global_position) INCLUDE (type);
2
+ COMMENT ON INDEX idx_events_global_position_including_type IS 'Usually "type" column has low distinct values. Thus, composit index by "type" and "global_position" columns may not be picked by Query Planner properly. Improve an index by "global_position" by including "type" column which allows Query Planner to perform better by picking the correct index.';
3
+
4
+ DROP INDEX idx_events_global_position;
@@ -0,0 +1,17 @@
1
+ CREATE TABLE public.event_types
2
+ (
3
+ id bigserial NOT NULL,
4
+ type character varying NOT NULL
5
+ );
6
+
7
+ ALTER TABLE ONLY public.events ADD COLUMN event_type_id bigint;
8
+
9
+ ALTER TABLE ONLY public.event_types
10
+ ADD CONSTRAINT event_types_pkey PRIMARY KEY (id);
11
+
12
+ ALTER TABLE ONLY public.events
13
+ ADD CONSTRAINT events_event_type_fk FOREIGN KEY (event_type_id)
14
+ REFERENCES public.event_types (id);
15
+
16
+ CREATE UNIQUE INDEX idx_event_types_type ON public.event_types USING btree (type);
17
+ CREATE INDEX idx_events_event_type_id ON public.events USING btree (event_type_id);
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ PgEventstore.connection.with do |conn|
4
+ types = conn.exec('select type from events group by type').to_a.map { |attrs| attrs['type'] }
5
+ types.each.with_index(1) do |type, index|
6
+ id = conn.exec_params('SELECT id FROM event_types WHERE type = $1', [type]).to_a.first['id']
7
+ id ||= conn.exec_params('INSERT INTO event_types (type) VALUES ($1) RETURNING *', [type]).to_a.first['id']
8
+ conn.exec_params('UPDATE events SET event_type_id = $1 WHERE type = $2 AND event_type_id IS NULL', [id, type])
9
+ puts "Processed #{index} types of #{types.size}"
10
+ end
11
+ end
@@ -0,0 +1,6 @@
1
+ CREATE INDEX idx_events_global_position ON public.events USING btree (global_position);
2
+
3
+ DROP INDEX idx_events_stream_id_and_type_and_revision;
4
+ DROP INDEX idx_events_type_and_stream_id_and_position;
5
+ DROP INDEX idx_events_global_position_including_type;
6
+ DROP INDEX idx_events_type_and_position;
@@ -0,0 +1 @@
1
+ ALTER TABLE public.events ALTER COLUMN event_type_id SET NOT NULL;
@@ -0,0 +1 @@
1
+ ALTER TABLE public.events ALTER COLUMN type DROP NOT NULL;
@@ -27,7 +27,11 @@ module PgEventstore
27
27
  # @raise [PgEventstore::WrongExpectedRevisionError]
28
28
  def append_to_stream(stream, events_or_event, options: {}, middlewares: nil)
29
29
  result =
30
- Commands::Append.new(queries(middlewares(middlewares))).call(stream, *events_or_event, options: options)
30
+ Commands::Append.new(
31
+ Queries.new(
32
+ streams: stream_queries, events: event_queries(middlewares(middlewares)), transactions: transaction_queries
33
+ )
34
+ ).call(stream, *events_or_event, options: options)
31
35
  events_or_event.is_a?(Array) ? result : result.first
32
36
  end
33
37
 
@@ -43,7 +47,7 @@ module PgEventstore
43
47
  #
44
48
  # @return the result of the given block
45
49
  def multiple(&blk)
46
- Commands::Multiple.new(queries(middlewares)).call(&blk)
50
+ Commands::Multiple.new(Queries.new(transactions: transaction_queries)).call(&blk)
47
51
  end
48
52
 
49
53
  # Read events from the specific stream or from "all" stream.
@@ -101,7 +105,7 @@ module PgEventstore
101
105
  # @raise [PgEventstore::StreamNotFoundError]
102
106
  def read(stream, options: {}, middlewares: nil)
103
107
  Commands::Read.
104
- new(queries(middlewares(middlewares))).
108
+ new(Queries.new(streams: stream_queries, events: event_queries(middlewares(middlewares)))).
105
109
  call(stream, options: { max_count: config.max_count }.merge(options))
106
110
  end
107
111
 
@@ -120,10 +124,20 @@ module PgEventstore
120
124
  PgEventstore.connection(config.name)
121
125
  end
122
126
 
127
+ # @return [PgEventstore::StreamQueries]
128
+ def stream_queries
129
+ StreamQueries.new(connection)
130
+ end
131
+
132
+ # @return [PgEventstore::TransactionQueries]
133
+ def transaction_queries
134
+ TransactionQueries.new(connection)
135
+ end
136
+
123
137
  # @param middlewares [Array<Object<#serialize, #deserialize>>]
124
- # @return [PgEventstore::Queries]
125
- def queries(middlewares)
126
- Queries.new(
138
+ # @return [PgEventstore::EventQueries]
139
+ def event_queries(middlewares)
140
+ EventQueries.new(
127
141
  connection,
128
142
  EventSerializer.new(middlewares),
129
143
  PgResultDeserializer.new(middlewares, config.event_class_resolver)
@@ -14,14 +14,14 @@ module PgEventstore
14
14
  def call(stream, *events, options: {})
15
15
  raise SystemStreamError, stream if stream.system?
16
16
 
17
- queries.transaction do
18
- stream = queries.find_or_create_stream(stream)
17
+ queries.transactions.transaction do
18
+ stream = queries.streams.find_or_create_stream(stream)
19
19
  revision = stream.stream_revision
20
20
  assert_expected_revision!(revision, options[:expected_revision]) if options[:expected_revision]
21
21
  events.map.with_index(1) do |event, index|
22
- queries.insert(stream, prepared_event(event, revision + index))
22
+ queries.events.insert(stream, prepared_event(event, revision + index))
23
23
  end.tap do
24
- queries.update_stream_revision(stream, revision + events.size)
24
+ queries.streams.update_stream_revision(stream, revision + events.size)
25
25
  end
26
26
  end
27
27
  end
@@ -5,7 +5,7 @@ module PgEventstore
5
5
  # @!visibility private
6
6
  class Multiple < AbstractCommand
7
7
  def call(&blk)
8
- queries.transaction do
8
+ queries.transactions.transaction do
9
9
  yield
10
10
  end
11
11
  end
@@ -15,9 +15,9 @@ module PgEventstore
15
15
  # @return [Array<PgEventstore::Event>]
16
16
  # @raise [PgEventstore::StreamNotFoundError]
17
17
  def call(stream, options: {})
18
- stream = queries.find_stream(stream) || raise(StreamNotFoundError, stream) unless stream.all_stream?
18
+ stream = queries.streams.find_stream(stream) || raise(StreamNotFoundError, stream) unless stream.all_stream?
19
19
 
20
- queries.stream_events(stream, options)
20
+ queries.events.stream_events(stream, options)
21
21
  end
22
22
  end
23
23
  end
@@ -0,0 +1,93 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'pg_eventstore/query_builders/events_filtering_query'
4
+
5
+ module PgEventstore
6
+ # @!visibility private
7
+ class EventQueries
8
+ attr_reader :connection, :serializer, :deserializer
9
+ private :connection, :serializer, :deserializer
10
+
11
+ # @param connection [PgEventstore::Connection]
12
+ # @param serializer [PgEventstore::EventSerializer]
13
+ # @param deserializer [PgEventstore::PgResultDeserializer]
14
+ def initialize(connection, serializer, deserializer)
15
+ @connection = connection
16
+ @serializer = serializer
17
+ @deserializer = deserializer
18
+ end
19
+
20
+ # @see PgEventstore::Client#read for more info
21
+ # @param stream [PgEventstore::Stream]
22
+ # @param options [Hash]
23
+ # @return [Array<PgEventstore::Event>]
24
+ def stream_events(stream, options)
25
+ options = include_event_types_ids(options)
26
+ exec_params = events_filtering(stream, options).to_exec_params
27
+ pg_result = connection.with do |conn|
28
+ conn.exec_params(*exec_params)
29
+ end
30
+ deserializer.deserialize_many(pg_result)
31
+ end
32
+
33
+ # @param stream [PgEventstore::Stream] persisted stream
34
+ # @param event [PgEventstore::Event]
35
+ # @return [PgEventstore::Event]
36
+ def insert(stream, event)
37
+ serializer.serialize(event)
38
+
39
+ attributes = event.options_hash.slice(:id, :data, :metadata, :stream_revision, :link_id).compact
40
+ attributes[:stream_id] = stream.id
41
+ attributes[:event_type_id] = event_type_queries.find_or_create_type(event.type)
42
+
43
+ sql = <<~SQL
44
+ INSERT INTO events (#{attributes.keys.join(', ')})
45
+ VALUES (#{positional_vars(attributes.values)})
46
+ RETURNING *, $#{attributes.values.size + 1} as type
47
+ SQL
48
+
49
+ pg_result = connection.with do |conn|
50
+ conn.exec_params(sql, [*attributes.values, event.type])
51
+ end
52
+ deserializer.without_middlewares.deserialize_one(pg_result).tap do |persisted_event|
53
+ persisted_event.stream = stream
54
+ end
55
+ end
56
+
57
+ private
58
+
59
+ # @param stream [PgEventstore::Stream]
60
+ # @param options [Hash]
61
+ # @param offset [Integer]
62
+ # @return [PgEventstore::EventsFilteringQuery]
63
+ def events_filtering(stream, options, offset: 0)
64
+ return QueryBuilders::EventsFiltering.all_stream_filtering(options, offset: offset) if stream.all_stream?
65
+
66
+ QueryBuilders::EventsFiltering.specific_stream_filtering(stream, options, offset: offset)
67
+ end
68
+
69
+ # Replaces filter by event type strings with filter by event type ids
70
+ # @param options [Hash]
71
+ # @return [Hash]
72
+ def include_event_types_ids(options)
73
+ options in { filter: { event_types: Array => event_types } }
74
+ return options unless event_types
75
+
76
+ filter = options[:filter].dup
77
+ filter[:event_type_ids] = event_type_queries.find_event_types(event_types).uniq
78
+ filter.delete(:event_types)
79
+ options.merge(filter: filter)
80
+ end
81
+
82
+ # @param array [Array]
83
+ # @return [String] positional variables, based on array size. Example: "$1, $2, $3"
84
+ def positional_vars(array)
85
+ array.size.times.map { |t| "$#{t + 1}" }.join(', ')
86
+ end
87
+
88
+ # @return [PgEventstore::EventTypeQueries]
89
+ def event_type_queries
90
+ EventTypeQueries.new(connection)
91
+ end
92
+ end
93
+ end
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ module PgEventstore
4
+ # @!visibility private
5
+ class EventTypeQueries
6
+ attr_reader :connection
7
+ private :connection
8
+
9
+ # @param connection [PgEventstore::Connection]
10
+ def initialize(connection)
11
+ @connection = connection
12
+ end
13
+
14
+ # @param type [String]
15
+ # @return [Integer] event type's id
16
+ def find_or_create_type(type)
17
+ find_type(type) || create_type(type)
18
+ end
19
+
20
+ # @param type [String]
21
+ # @return [Integer, nil] event type's id
22
+ def find_type(type)
23
+ connection.with do |conn|
24
+ conn.exec_params('SELECT id FROM event_types WHERE type = $1', [type])
25
+ end.to_a.dig(0, 'id')
26
+ end
27
+
28
+ # @param type [String]
29
+ # @return [Integer] event type's id
30
+ def create_type(type)
31
+ connection.with do |conn|
32
+ conn.exec_params('INSERT INTO event_types (type) VALUES ($1) RETURNING id', [type])
33
+ end.to_a.dig(0, 'id')
34
+ end
35
+
36
+ # @param types [Array<String>]
37
+ # @return [Array<Integer, nil>]
38
+ def find_event_types(types)
39
+ connection.with do |conn|
40
+ conn.exec_params(<<~SQL, [types])
41
+ SELECT event_types.id, types.type
42
+ FROM event_types
43
+ RIGHT JOIN (
44
+ SELECT unnest($1::varchar[]) type
45
+ ) types ON types.type = event_types.type
46
+ SQL
47
+ end.to_a.map { |attrs| attrs['id'] }
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,64 @@
1
+ # frozen_string_literal: true
2
+
3
+ module PgEventstore
4
+ # @!visibility private
5
+ class StreamQueries
6
+ attr_reader :connection
7
+ private :connection
8
+
9
+ # @param connection [PgEventstore::Connection]
10
+ def initialize(connection)
11
+ @connection = connection
12
+ end
13
+
14
+ # Finds a stream in the database by the given Stream object
15
+ # @param stream [PgEventstore::Stream]
16
+ # @return [PgEventstore::Stream, nil] persisted stream
17
+ def find_stream(stream)
18
+ builder =
19
+ SQLBuilder.new.
20
+ from('streams').
21
+ where('streams.context = ? AND streams.stream_name = ? AND streams.stream_id = ?', *stream.to_a).
22
+ limit(1)
23
+ pg_result = connection.with do |conn|
24
+ conn.exec_params(*builder.to_exec_params)
25
+ end
26
+ deserialize(pg_result) if pg_result.ntuples == 1
27
+ end
28
+
29
+ # @param stream [PgEventstore::Stream]
30
+ # @return [PgEventstore::RawStream] persisted stream
31
+ def create_stream(stream)
32
+ create_sql = <<~SQL
33
+ INSERT INTO streams (context, stream_name, stream_id) VALUES ($1, $2, $3) RETURNING *
34
+ SQL
35
+ pg_result = connection.with do |conn|
36
+ conn.exec_params(create_sql, stream.to_a)
37
+ end
38
+ deserialize(pg_result)
39
+ end
40
+
41
+ # @return [PgEventstore::Stream] persisted stream
42
+ def find_or_create_stream(stream)
43
+ find_stream(stream) || create_stream(stream)
44
+ end
45
+
46
+ # @param stream [PgEventstore::Stream] persisted stream
47
+ # @return [void]
48
+ def update_stream_revision(stream, revision)
49
+ connection.with do |conn|
50
+ conn.exec_params(<<~SQL, [revision, stream.id])
51
+ UPDATE streams SET stream_revision = $1 WHERE id = $2
52
+ SQL
53
+ end
54
+ end
55
+
56
+ private
57
+
58
+ # @param pg_result [PG::Result]
59
+ # @return [PgEventstore::Stream, nil]
60
+ def deserialize(pg_result)
61
+ PgEventstore::Stream.new(**pg_result.to_a.first.transform_keys(&:to_sym))
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ module PgEventstore
4
+ # @!visibility private
5
+ class TransactionQueries
6
+ attr_reader :connection
7
+ private :connection
8
+
9
+ # @param connection [PgEventstore::Connection]
10
+ def initialize(connection)
11
+ @connection = connection
12
+ end
13
+
14
+ # @return [void]
15
+ def transaction
16
+ connection.with do |conn|
17
+ # We are inside a transaction already - no need to start another one
18
+ if [PG::PQTRANS_ACTIVE, PG::PQTRANS_INTRANS].include?(conn.transaction_status)
19
+ next yield
20
+ end
21
+
22
+ pg_transaction(conn) do
23
+ yield
24
+ end
25
+ end
26
+ end
27
+
28
+ private
29
+
30
+ # @param pg_connection [PG::Connection]
31
+ # @return [void]
32
+ def pg_transaction(pg_connection)
33
+ pg_connection.transaction do
34
+ pg_connection.exec("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE")
35
+ yield
36
+ end
37
+ rescue PG::TRSerializationFailure, PG::TRDeadlockDetected
38
+ retry
39
+ end
40
+ end
41
+ end
@@ -1,127 +1,23 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require_relative 'query_builders/events_filtering_query'
3
+ require_relative 'queries/transaction_queries'
4
+ require_relative 'queries/event_queries'
5
+ require_relative 'queries/stream_queries'
6
+ require_relative 'queries/event_type_queries'
4
7
 
5
8
  module PgEventstore
6
9
  # @!visibility private
7
10
  class Queries
8
- attr_reader :connection, :serializer, :deserializer
9
- private :connection, :serializer, :deserializer
10
-
11
- # @param connection [PgEventstore::Connection]
12
- # @param serializer [PgEventstore::EventSerializer]
13
- # @param deserializer [PgEventstore::PgResultDeserializer]
14
- def initialize(connection, serializer, deserializer)
15
- @connection = connection
16
- @serializer = serializer
17
- @deserializer = deserializer
18
- end
19
-
20
- # @return [void]
21
- def transaction
22
- connection.with do |conn|
23
- # We are inside a transaction already - no need to start another one
24
- if [PG::PQTRANS_ACTIVE, PG::PQTRANS_INTRANS].include?(conn.transaction_status)
25
- next yield
26
- end
27
-
28
- conn.transaction do
29
- conn.exec("SET TRANSACTION ISOLATION LEVEL SERIALIZABLE")
30
- yield
31
- end
32
- end
33
- rescue PG::TRSerializationFailure, PG::TRDeadlockDetected => e
34
- retry if [PG::PQTRANS_IDLE, PG::PQTRANS_UNKNOWN].include?(e.connection.transaction_status)
35
- raise
36
- end
37
-
38
- # Finds a stream in the database by the given Stream object
39
- # @param stream [PgEventstore::Stream]
40
- # @return [PgEventstore::Stream, nil] persisted stream
41
- def find_stream(stream)
42
- builder =
43
- SQLBuilder.new.
44
- from('streams').
45
- where('streams.context = ? AND streams.stream_name = ? AND streams.stream_id = ?', *stream.to_a).
46
- limit(1)
47
- pg_result = connection.with do |conn|
48
- conn.exec_params(*builder.to_exec_params)
49
- end
50
- PgEventstore::Stream.new(**pg_result.to_a.first.transform_keys(&:to_sym)) if pg_result.ntuples == 1
51
- end
52
-
53
- # @param stream [PgEventstore::Stream]
54
- # @return [PgEventstore::RawStream] persisted stream
55
- def create_stream(stream)
56
- create_sql = <<~SQL
57
- INSERT INTO streams (context, stream_name, stream_id) VALUES ($1, $2, $3) RETURNING *
58
- SQL
59
- pg_result = connection.with do |conn|
60
- conn.exec_params(create_sql, stream.to_a)
61
- end
62
- PgEventstore::Stream.new(**pg_result.to_a.first.transform_keys(&:to_sym))
63
- end
64
-
65
- # @return [PgEventstore::Stream] persisted stream
66
- def find_or_create_stream(stream)
67
- find_stream(stream) || create_stream(stream)
68
- end
69
-
70
- # @see PgEventstore::Client#read for more info
71
- # @param stream [PgEventstore::Stream]
72
- # @param options [Hash]
73
- # @return [Array<PgEventstore::Event>]
74
- def stream_events(stream, options)
75
- exec_params = events_filtering(stream, options).to_exec_params
76
- pg_result = connection.with do |conn|
77
- conn.exec_params(*exec_params)
78
- end
79
- deserializer.deserialize_many(pg_result)
80
- end
81
-
82
- # @param stream [PgEventstore::Stream] persisted stream
83
- # @param event [PgEventstore::Event]
84
- # @return [PgEventstore::Event]
85
- def insert(stream, event)
86
- serializer.serialize(event)
87
-
88
- attributes = event.options_hash.slice(:id, :type, :data, :metadata, :stream_revision, :link_id).compact
89
- attributes[:stream_id] = stream.id
90
-
91
- sql = <<~SQL
92
- INSERT INTO events (#{attributes.keys.join(', ')})
93
- VALUES (#{(1..attributes.values.size).map { |n| "$#{n}" }.join(', ')})
94
- RETURNING *
95
- SQL
96
-
97
- pg_result = connection.with do |conn|
98
- conn.exec_params(sql, attributes.values)
99
- end
100
- deserializer.without_middlewares.deserialize_one(pg_result).tap do |persisted_event|
101
- persisted_event.stream = stream
102
- end
103
- end
104
-
105
- # @param stream [PgEventstore::Stream] persisted stream
106
- # @return [void]
107
- def update_stream_revision(stream, revision)
108
- connection.with do |conn|
109
- conn.exec_params(<<~SQL, [revision, stream.id])
110
- UPDATE streams SET stream_revision = $1 WHERE id = $2
111
- SQL
112
- end
113
- end
114
-
115
- private
116
-
117
- # @param stream [PgEventstore::Stream]
118
- # @param options [Hash]
119
- # @param offset [Integer]
120
- # @return [PgEventstore::EventsFilteringQuery]
121
- def events_filtering(stream, options, offset: 0)
122
- return QueryBuilders::EventsFiltering.all_stream_filtering(options, offset: offset) if stream.all_stream?
123
-
124
- QueryBuilders::EventsFiltering.specific_stream_filtering(stream, options, offset: offset)
125
- end
11
+ include Extensions::OptionsExtension
12
+
13
+ # @!attribute events
14
+ # @return [PgEventstore::EventQueries, nil]
15
+ attribute(:events)
16
+ # @!attribute streams
17
+ # @return [PgEventstore::StreamQueries, nil]
18
+ attribute(:streams)
19
+ # @!attribute transactions
20
+ # @return [PgEventstore::TransactionQueries, nil]
21
+ attribute(:transactions)
126
22
  end
127
23
  end
@@ -23,8 +23,8 @@ module PgEventstore
23
23
  # @return [PgEventstore::QueryBuilders::EventsFiltering]
24
24
  def all_stream_filtering(options, offset: 0)
25
25
  event_filter = new
26
- options in { filter: { event_types: Array => event_types } }
27
- event_filter.add_event_types(event_types)
26
+ options in { filter: { event_type_ids: Array => event_type_ids } }
27
+ event_filter.add_event_types(event_type_ids)
28
28
  event_filter.add_limit(options[:max_count])
29
29
  event_filter.add_offset(offset)
30
30
  event_filter.resolve_links(options[:resolve_link_tos])
@@ -41,8 +41,8 @@ module PgEventstore
41
41
  # @return [PgEventstore::QueryBuilders::EventsFiltering]
42
42
  def specific_stream_filtering(stream, options, offset: 0)
43
43
  event_filter = new
44
- options in { filter: { event_types: Array => event_types } }
45
- event_filter.add_event_types(event_types)
44
+ options in { filter: { event_type_ids: Array => event_type_ids } }
45
+ event_filter.add_event_types(event_type_ids)
46
46
  event_filter.add_limit(options[:max_count])
47
47
  event_filter.add_offset(offset)
48
48
  event_filter.resolve_links(options[:resolve_link_tos])
@@ -58,8 +58,10 @@ module PgEventstore
58
58
  SQLBuilder.new.
59
59
  select('events.*').
60
60
  select('row_to_json(streams.*) as stream').
61
+ select('event_types.type as type').
61
62
  from('events').
62
63
  join('JOIN streams ON streams.id = events.stream_id').
64
+ join('JOIN event_types ON event_types.id = events.event_type_id').
63
65
  limit(DEFAULT_LIMIT).
64
66
  offset(DEFAULT_OFFSET)
65
67
  end
@@ -85,16 +87,16 @@ module PgEventstore
85
87
  @sql_builder.where("streams.id = ?", stream.id)
86
88
  end
87
89
 
88
- # @param event_types [Array, nil]
90
+ # @param event_type_ids [Array<Integer>, nil]
89
91
  # @return [void]
90
- def add_event_types(event_types)
91
- return if event_types.nil?
92
- return if event_types.empty?
93
-
94
- sql = event_types.size.times.map do
95
- "events.type = ?"
96
- end.join(" OR ")
97
- @sql_builder.where(sql, *event_types)
92
+ def add_event_types(event_type_ids)
93
+ return if event_type_ids.nil?
94
+ return if event_type_ids.empty?
95
+
96
+ sql = event_type_ids.size.times.map do
97
+ "?"
98
+ end.join(", ")
99
+ @sql_builder.where("event_types.id IN (#{sql})", *event_type_ids)
98
100
  end
99
101
 
100
102
  # @param revision [Integer, nil]
@@ -31,12 +31,16 @@ namespace :pg_eventstore do
31
31
  latest_migration =
32
32
  conn.exec('SELECT number FROM migrations ORDER BY number DESC LIMIT 1').to_a.dig(0, 'number') || -1
33
33
 
34
- Dir["#{migration_files_root}/*.sql"].each do |f_name|
35
- number = File.basename(f_name).split('_')[0].to_i
36
- next if latest_migration >= number
34
+ Dir.chdir migration_files_root do
35
+ Dir["*.{sql,rb}"].each do |f_name|
36
+ number = File.basename(f_name).split('_')[0].to_i
37
+ next if latest_migration >= number
37
38
 
38
- conn.transaction do
39
- conn.exec(File.read(f_name))
39
+ if File.extname(f_name) == '.rb'
40
+ load f_name
41
+ else
42
+ conn.exec(File.read(f_name))
43
+ end
40
44
  conn.exec_params('INSERT INTO migrations (number) VALUES ($1)', [number])
41
45
  end
42
46
  end
@@ -53,6 +57,7 @@ namespace :pg_eventstore do
53
57
  conn.exec <<~SQL
54
58
  DROP TABLE IF EXISTS public.events;
55
59
  DROP TABLE IF EXISTS public.streams;
60
+ DROP TABLE IF EXISTS public.event_types;
56
61
  DROP TABLE IF EXISTS public.migrations;
57
62
  DROP EXTENSION IF EXISTS "uuid-ossp";
58
63
  DROP EXTENSION IF EXISTS pgcrypto;
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module PgEventstore
4
- VERSION = "0.2.2"
4
+ VERSION = "0.2.4"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pg_eventstore
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.2
4
+ version: 0.2.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ivan Dzyzenko
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-12-14 00:00:00.000000000 Z
11
+ date: 2023-12-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: pg
@@ -55,6 +55,12 @@ files:
55
55
  - db/initial/tables.sql
56
56
  - db/migrations/0_improve_all_stream_indexes.sql
57
57
  - db/migrations/1_improve_specific_stream_indexes.sql
58
+ - db/migrations/2_adjust_global_position_index.sql
59
+ - db/migrations/3_extract_type_into_separate_table.sql
60
+ - db/migrations/4_populate_event_types.rb
61
+ - db/migrations/5_adjust_indexes.sql
62
+ - db/migrations/6_change_events_event_type_id_null_constraint.sql
63
+ - db/migrations/7_change_events_type_constraint.sql
58
64
  - docs/appending_events.md
59
65
  - docs/configuration.md
60
66
  - docs/events_and_streams.md
@@ -78,6 +84,10 @@ files:
78
84
  - lib/pg_eventstore/middleware.rb
79
85
  - lib/pg_eventstore/pg_result_deserializer.rb
80
86
  - lib/pg_eventstore/queries.rb
87
+ - lib/pg_eventstore/queries/event_queries.rb
88
+ - lib/pg_eventstore/queries/event_type_queries.rb
89
+ - lib/pg_eventstore/queries/stream_queries.rb
90
+ - lib/pg_eventstore/queries/transaction_queries.rb
81
91
  - lib/pg_eventstore/query_builders/events_filtering_query.rb
82
92
  - lib/pg_eventstore/rspec/has_option_matcher.rb
83
93
  - lib/pg_eventstore/sql_builder.rb