pg_eventstore 1.13.3 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +1 -1
  3. data/CHANGELOG.md +20 -3
  4. data/Dockerfile +3 -0
  5. data/README.md +20 -7
  6. data/db/migrations/10_setup_pg_cron.rb +23 -0
  7. data/db/migrations/11_add_events_link_global_position.sql +1 -0
  8. data/db/migrations/12_migrate_legacy_links.rb +83 -0
  9. data/db/migrations/13_remove_events_link_id.sql +6 -0
  10. data/db/migrations/14_remove_ids_events_id_index.sql +1 -0
  11. data/db/migrations/9_create_events_horizon.sql +21 -0
  12. data/docs/appending_events.md +1 -1
  13. data/docs/events_and_streams.md +1 -1
  14. data/docs/multiple_commands.md +16 -1
  15. data/lib/pg_eventstore/callbacks.rb +7 -5
  16. data/lib/pg_eventstore/cli/try_to_delete_subscriptions_set.rb +2 -2
  17. data/lib/pg_eventstore/client.rb +7 -5
  18. data/lib/pg_eventstore/commands/all_stream_read_grouped.rb +3 -3
  19. data/lib/pg_eventstore/commands/append.rb +3 -3
  20. data/lib/pg_eventstore/commands/event_modifiers/prepare_link_event.rb +5 -2
  21. data/lib/pg_eventstore/commands/event_modifiers/prepare_regular_event.rb +1 -1
  22. data/lib/pg_eventstore/commands/link_to.rb +6 -6
  23. data/lib/pg_eventstore/commands/multiple.rb +2 -2
  24. data/lib/pg_eventstore/commands/regular_stream_read_grouped.rb +1 -1
  25. data/lib/pg_eventstore/commands/regular_stream_read_paginated.rb +1 -1
  26. data/lib/pg_eventstore/commands/system_stream_read_paginated.rb +1 -1
  27. data/lib/pg_eventstore/connection.rb +1 -35
  28. data/lib/pg_eventstore/errors.rb +1 -1
  29. data/lib/pg_eventstore/event.rb +7 -5
  30. data/lib/pg_eventstore/extensions/options_extension.rb +40 -11
  31. data/lib/pg_eventstore/maintenance.rb +1 -1
  32. data/lib/pg_eventstore/queries/event_queries.rb +10 -9
  33. data/lib/pg_eventstore/queries/links_resolver.rb +6 -3
  34. data/lib/pg_eventstore/queries/partition_queries.rb +72 -7
  35. data/lib/pg_eventstore/queries/transaction_queries.rb +10 -4
  36. data/lib/pg_eventstore/query_builders/events_filtering.rb +3 -7
  37. data/lib/pg_eventstore/query_builders/partitions_filtering.rb +28 -18
  38. data/lib/pg_eventstore/sql_builder.rb +30 -12
  39. data/lib/pg_eventstore/stream.rb +1 -1
  40. data/lib/pg_eventstore/subscriptions/basic_runner.rb +4 -4
  41. data/lib/pg_eventstore/subscriptions/callback_handlers/subscription_feeder_handlers.rb +1 -1
  42. data/lib/pg_eventstore/subscriptions/callback_handlers/subscription_runner_handlers.rb +2 -2
  43. data/lib/pg_eventstore/subscriptions/events_processor.rb +1 -1
  44. data/lib/pg_eventstore/subscriptions/queries/subscription_command_queries.rb +5 -5
  45. data/lib/pg_eventstore/subscriptions/queries/subscription_queries.rb +3 -2
  46. data/lib/pg_eventstore/subscriptions/queries/subscription_service_queries.rb +78 -0
  47. data/lib/pg_eventstore/subscriptions/queries/subscriptions_set_command_queries.rb +2 -2
  48. data/lib/pg_eventstore/subscriptions/queries/subscriptions_set_queries.rb +1 -1
  49. data/lib/pg_eventstore/subscriptions/subscription.rb +18 -7
  50. data/lib/pg_eventstore/subscriptions/subscription_feeder.rb +8 -2
  51. data/lib/pg_eventstore/subscriptions/subscription_handler_performance.rb +1 -3
  52. data/lib/pg_eventstore/subscriptions/subscription_runner.rb +5 -2
  53. data/lib/pg_eventstore/subscriptions/subscription_runners_feeder.rb +9 -1
  54. data/lib/pg_eventstore/subscriptions/subscriptions_manager.rb +16 -10
  55. data/lib/pg_eventstore/tasks/setup.rake +30 -31
  56. data/lib/pg_eventstore/utils.rb +8 -0
  57. data/lib/pg_eventstore/version.rb +1 -1
  58. data/lib/pg_eventstore/web/application.rb +5 -5
  59. data/lib/pg_eventstore/web/paginator/events_collection.rb +4 -4
  60. data/lib/pg_eventstore/web/paginator/helpers.rb +3 -3
  61. data/lib/pg_eventstore/web/paginator/stream_ids_collection.rb +2 -2
  62. data/lib/pg_eventstore/web/subscriptions/helpers.rb +2 -2
  63. data/lib/pg_eventstore.rb +4 -4
  64. data/pg_eventstore.gemspec +1 -1
  65. data/sig/pg_eventstore/client.rbs +1 -1
  66. data/sig/pg_eventstore/commands/multiple.rbs +1 -1
  67. data/sig/pg_eventstore/event.rbs +7 -5
  68. data/sig/pg_eventstore/extensions/options_extension.rbs +9 -1
  69. data/sig/pg_eventstore/queries/event_queries.rbs +11 -11
  70. data/sig/pg_eventstore/queries/links_resolver.rbs +5 -5
  71. data/sig/pg_eventstore/queries/partition_queries.rbs +5 -1
  72. data/sig/pg_eventstore/queries/transaction_queries.rbs +2 -2
  73. data/sig/pg_eventstore/query_builders/partitions_filtering.rbs +9 -5
  74. data/sig/pg_eventstore/sql_builder.rbs +8 -2
  75. data/sig/pg_eventstore/subscriptions/queries/subscription_queries.rbs +13 -13
  76. data/sig/pg_eventstore/subscriptions/queries/subscription_service_queries.rbs +19 -0
  77. data/sig/pg_eventstore/subscriptions/subscription.rbs +2 -0
  78. data/sig/pg_eventstore/subscriptions/subscription_feeder.rbs +2 -0
  79. data/sig/pg_eventstore/subscriptions/subscription_runner.rbs +0 -2
  80. data/sig/pg_eventstore/subscriptions/subscription_runners_feeder.rbs +10 -3
  81. data/sig/pg_eventstore/subscriptions/subscriptions_manager.rbs +2 -0
  82. data/sig/pg_eventstore/utils.rbs +10 -2
  83. metadata +11 -2
@@ -6,6 +6,8 @@ module PgEventstore
6
6
 
7
7
  # @return [String] a type of link event
8
8
  LINK_TYPE = '$>'
9
+ # @return [String]
10
+ PRIMARY_TABLE_NAME = 'events'
9
11
 
10
12
  # @!attribute id
11
13
  # @return [String] UUIDv4 string
@@ -28,10 +30,10 @@ module PgEventstore
28
30
  # @!attribute metadata
29
31
  # @return [Hash] event's metadata
30
32
  attribute(:metadata) { {} }
31
- # @!attribute link_id
32
- # @return [String, nil] UUIDv4 of an event the current event points to. If it is not nil, then the current
33
- # event is a link
34
- attribute(:link_id)
33
+ # @!attribute link_global_position
34
+ # @return [Integer, nil] global_position of an event the current event points to. If it is not nil, then the
35
+ # current event is a link
36
+ attribute(:link_global_position)
35
37
  # @!attribute link_partition_id
36
38
  # @return [Integer, nil] a partition id of an event the link event points to. It is used to load original event
37
39
  # when resolve_link_tos: true option is provided when reading events.
@@ -56,7 +58,7 @@ module PgEventstore
56
58
  # Detect whether an event is a link event
57
59
  # @return [Boolean]
58
60
  def link?
59
- !link_id.nil?
61
+ !link_global_position.nil?
60
62
  end
61
63
 
62
64
  # Detect whether an event is a system event
@@ -1,7 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'set'
4
-
5
3
  module PgEventstore
6
4
  module Extensions
7
5
  # A very simple extension that implements a DSL for adding attr_accessors with default values,
@@ -72,16 +70,47 @@ module PgEventstore
72
70
  end
73
71
  end
74
72
 
75
- class Options < Set
76
- def add(option)
77
- @hash[option] = option
78
- self
73
+ class Options
74
+ include Enumerable
75
+
76
+ attr_reader :options
77
+ protected :options
78
+
79
+ # @param options [Array<PgEventstore::Extensions::OptionsExtension::Option>]
80
+ def initialize(options = [])
81
+ @options = options.to_h { [_1, true] }
79
82
  end
80
83
 
81
- # @param option [Symbol]
84
+ # @param option_name [Symbol]
82
85
  # @return [PgEventstore::Extensions::OptionsExtension::Option, nil]
83
- def [](option)
84
- @hash[Option.new(option)]
86
+ def [](option_name)
87
+ option = Option.new(option_name)
88
+ options.find { |key, _| key == option }&.dig(0)
89
+ end
90
+
91
+ # @param other [PgEventstore::Extensions::OptionsExtension::Options]
92
+ # @return [PgEventstore::Extensions::OptionsExtension::Options]
93
+ def +(other)
94
+ self.class.new(options.keys + other.options.keys)
95
+ end
96
+
97
+ # @param option [PgEventstore::Extensions::OptionsExtension::Option]
98
+ # @return [Boolean]
99
+ def include?(option)
100
+ options.key?(option)
101
+ end
102
+
103
+ # @return [Boolean]
104
+ def dup
105
+ self.class.new(options.keys)
106
+ end
107
+
108
+ def each(...)
109
+ options.keys.each(...)
110
+ end
111
+
112
+ def ==(other)
113
+ options.keys == other.options.keys
85
114
  end
86
115
  end
87
116
 
@@ -92,7 +121,7 @@ module PgEventstore
92
121
  # context of your object to determine the default value of the option
93
122
  # @return [Symbol]
94
123
  def option(opt_name, metadata: nil, &blk)
95
- self.options = (options + Options.new([Option.new(opt_name, metadata: metadata)])).freeze
124
+ self.options = (options + Options.new([Option.new(opt_name, metadata:)])).freeze
96
125
  warn_already_defined(opt_name)
97
126
  warn_already_defined(:"#{opt_name}=")
98
127
  define_method "#{opt_name}=" do |value|
@@ -112,7 +141,7 @@ module PgEventstore
112
141
 
113
142
  def inherited(klass)
114
143
  super
115
- klass.options = Options.new(options).freeze
144
+ klass.options = options.dup.freeze
116
145
  end
117
146
 
118
147
  private
@@ -25,7 +25,7 @@ module PgEventstore
25
25
  def delete_event(event, force: false)
26
26
  Commands::DeleteEvent.new(
27
27
  Queries.new(transactions: transaction_queries, maintenance: maintenance_queries)
28
- ).call(event, force: force)
28
+ ).call(event, force:)
29
29
  end
30
30
 
31
31
  private
@@ -28,7 +28,8 @@ module PgEventstore
28
28
  def event_exists?(event)
29
29
  return false if event.id.nil? || event.stream.nil?
30
30
 
31
- sql_builder = SQLBuilder.new.select('1 as exists').from('events').where('id = ?', event.id).limit(1)
31
+ sql_builder = SQLBuilder.new.select('1 as exists').from(Event::PRIMARY_TABLE_NAME).where('id = ?', event.id)
32
+ sql_builder.limit(1)
32
33
  sql_builder.where(
33
34
  'context = ? and stream_name = ? and type = ?', event.stream.context, event.stream.stream_name, event.type
34
35
  )
@@ -40,24 +41,24 @@ module PgEventstore
40
41
  # Takes an array of potentially persisted events and loads their ids from db. Those ids can be later used to check
41
42
  # whether events are actually existing events.
42
43
  # @param events [Array<PgEventstore::Event>]
43
- # @return [Array<String>]
44
- def ids_from_db(events)
45
- sql_builder = SQLBuilder.new.from('events').select('id')
44
+ # @return [Array<Integer>]
45
+ def global_positions_from_db(events)
46
+ sql_builder = SQLBuilder.new.from(Event::PRIMARY_TABLE_NAME).select('global_position')
46
47
  partition_attrs = events.map { |event| [event.stream&.context, event.stream&.stream_name, event.type] }.uniq
47
48
  partition_attrs.each do |context, stream_name, event_type|
48
49
  sql_builder.where_or('context = ? and stream_name = ? and type = ?', context, stream_name, event_type)
49
50
  end
50
- sql_builder.where('id = ANY(?::uuid[])', events.map(&:id))
51
+ sql_builder.where('global_position = ANY(?::bigint[])', events.map(&:global_position))
51
52
  raw_events = PgEventstore.connection.with do |conn|
52
53
  conn.exec_params(*sql_builder.to_exec_params)
53
54
  end.to_a
54
- raw_events.map { |attrs| attrs['id'] }
55
+ raw_events.map { |attrs| attrs['global_position'] }
55
56
  end
56
57
 
57
58
  # @param stream [PgEventstore::Stream]
58
59
  # @return [Integer, nil]
59
60
  def stream_revision(stream)
60
- sql_builder = SQLBuilder.new.from('events').select('stream_revision')
61
+ sql_builder = SQLBuilder.new.from(Event::PRIMARY_TABLE_NAME).select('stream_revision')
61
62
  sql_builder.where('context = ? and stream_name = ? and stream_id = ?', *stream.to_a)
62
63
  sql_builder.order('stream_revision DESC').limit(1)
63
64
  connection.with do |conn|
@@ -83,7 +84,7 @@ module PgEventstore
83
84
  # @return [Array<PgEventstore::Event>]
84
85
  def insert(stream, events)
85
86
  sql_rows_for_insert, values = prepared_statements(stream, events)
86
- columns = %w[id data metadata stream_revision link_id link_partition_id type context stream_name stream_id]
87
+ columns = %w[id data metadata stream_revision link_global_position link_partition_id type context stream_name stream_id]
87
88
 
88
89
  sql = <<~SQL
89
90
  INSERT INTO events (#{columns.join(', ')})
@@ -128,7 +129,7 @@ module PgEventstore
128
129
  sql_rows_for_insert = events.map do |event|
129
130
  event = serializer.serialize(event)
130
131
  attributes = event.options_hash.slice(
131
- :id, :data, :metadata, :stream_revision, :link_id, :link_partition_id, :type
132
+ :id, :data, :metadata, :stream_revision, :link_global_position, :link_partition_id, :type
132
133
  )
133
134
 
134
135
  attributes = attributes.merge(stream.to_hash)
@@ -20,9 +20,9 @@ module PgEventstore
20
20
  link_events = raw_events.select { _1['link_partition_id'] }.group_by { _1['link_partition_id'] }
21
21
  return raw_events if link_events.empty?
22
22
 
23
- original_events = load_original_events(link_events).to_h { |attrs| [attrs['id'], attrs] }
23
+ original_events = load_original_events(link_events).to_h { |attrs| [attrs['global_position'], attrs] }
24
24
  raw_events.map do |attrs|
25
- original_event = original_events[attrs['link_id']]
25
+ original_event = original_events[attrs['link_global_position']]
26
26
  next attrs unless original_event
27
27
 
28
28
  original_event.merge('link' => attrs).merge(attrs.except(*original_event.keys))
@@ -37,7 +37,10 @@ module PgEventstore
37
37
  partitions = partition_queries.find_by_ids(link_events.keys)
38
38
  sql_builders = partitions.map do |partition|
39
39
  sql_builder = SQLBuilder.new.select('*').from(partition['table_name'])
40
- sql_builder.where('id = ANY(?::uuid[])', link_events[partition['id']].map { _1['link_id'] })
40
+ sql_builder.where(
41
+ 'global_position = ANY(?::bigint[])',
42
+ link_events[partition['id']].map { _1['link_global_position'] }
43
+ )
41
44
  end
42
45
  sql_builder = sql_builders[1..].each_with_object(sql_builders.first) do |builder, top_builder|
43
46
  top_builder.union(builder)
@@ -76,7 +76,7 @@ module PgEventstore
76
76
  # @return [Hash] partition attributes
77
77
  def create_event_type_partition(stream, event_type, stream_name_partition_name)
78
78
  attributes = {
79
- context: stream.context, stream_name: stream.stream_name, event_type: event_type,
79
+ context: stream.context, stream_name: stream.stream_name, event_type:,
80
80
  table_name: event_type_partition_name(stream, event_type)
81
81
  }
82
82
 
@@ -178,14 +178,42 @@ module PgEventstore
178
178
 
179
179
  # @param stream_filters [Array<Hash[Symbol, String]>]
180
180
  # @param event_filters [Array<String>]
181
+ # @param scope [Symbol] what kind of partition we want to receive. Available options are :event_type, :context,
182
+ # :stream_name and :auto. In :auto mode the scope will be calculated based on stream_filters and event_filters.
181
183
  # @return [Array<PgEventstore::Partition>]
182
- def partitions(stream_filters, event_filters)
183
- partitions_filter = QueryBuilders::PartitionsFiltering.new
184
- stream_filters.each { |attrs| partitions_filter.add_stream_attrs(**attrs) }
185
- partitions_filter.add_event_types(event_filters)
186
- partitions_filter.with_event_types
184
+ def partitions(stream_filters, event_filters, scope: :event_type)
185
+ stream_filters = stream_filters.select { QueryBuilders::PartitionsFiltering.correct_stream_filter?(_1) }
186
+ sql_builder =
187
+ if event_filters.any?
188
+ # When event type filters are present - they apply constraints to any stream filter. Thus, we can't look up
189
+ # partitions by stream attributes separately.
190
+ filter = QueryBuilders::PartitionsFiltering.new
191
+ stream_filters.each { |attrs| filter.add_stream_attrs(**attrs) }
192
+ filter.add_event_types(event_filters)
193
+ set_partitions_scope(filter, stream_filters, event_filters, scope)
194
+ else
195
+ # When event type filters are absent - we can look up partitions by context and context/stream_name
196
+ # separately, thus potentially producing one-to-one mapping of filter-to-partition with :auto scope. For
197
+ # example, let's say we have stream attributes filter like
198
+ # [{ context: 'FooCtx', stream_name: 'Bar'}, { context: 'BarCtx' }], then we would be able to look up
199
+ # partitions by the exact match, returning only two of them according to the provided filters - stream
200
+ # partition for first filter and context partition for second filter.
201
+ builders = stream_filters.map do |attrs|
202
+ filter = QueryBuilders::PartitionsFiltering.new
203
+ filter.add_stream_attrs(**attrs)
204
+ set_partitions_scope(filter, [attrs], event_filters, scope)
205
+ end
206
+
207
+ sql_builder = SQLBuilder.union_builders(builders) if builders.any?
208
+ sql_builder ||
209
+ begin
210
+ builder = QueryBuilders::PartitionsFiltering.new
211
+ set_partitions_scope(builder, stream_filters, event_filters, scope)
212
+ end
213
+ end
214
+
187
215
  connection.with do |conn|
188
- conn.exec_params(*partitions_filter.to_exec_params)
216
+ conn.exec_params(*sql_builder.to_exec_params)
189
217
  end.map(&method(:deserialize))
190
218
  end
191
219
 
@@ -210,6 +238,43 @@ module PgEventstore
210
238
 
211
239
  private
212
240
 
241
+ # @param partitions_filter [PgEventstore::QueryBuilders::PartitionsFiltering]
242
+ # @param stream_filters [Array<Hash[Symbol, String]>]
243
+ # @param event_filters [Array<String>]
244
+ # @param scope [Symbol]
245
+ # @return [PgEventstore::SQLBuilder]
246
+ def set_partitions_scope(partitions_filter, stream_filters, event_filters, scope)
247
+ case scope
248
+ when :event_type
249
+ partitions_filter.with_event_types
250
+ when :stream_name
251
+ filter = QueryBuilders::PartitionsFiltering.new
252
+ filter.without_event_types
253
+ filter.with_stream_names
254
+ builder = filter.to_sql_builder
255
+ builder.where(
256
+ '(context, stream_name) in ?',
257
+ partitions_filter.to_sql_builder.unselect.select('context, stream_name').group('context, stream_name')
258
+ )
259
+ when :context
260
+ filter = QueryBuilders::PartitionsFiltering.new
261
+ filter.without_event_types
262
+ filter.without_stream_names
263
+ builder = filter.to_sql_builder
264
+ builder.where('context in ?', partitions_filter.to_sql_builder.unselect.select('context').group('context'))
265
+ when :auto
266
+ if event_filters.any?
267
+ set_partitions_scope(partitions_filter, stream_filters, event_filters, :event_type)
268
+ elsif stream_filters.any? { _1[:stream_name] }
269
+ set_partitions_scope(partitions_filter, stream_filters, event_filters, :stream_name)
270
+ else
271
+ set_partitions_scope(partitions_filter, stream_filters, event_filters, :context)
272
+ end
273
+ else
274
+ raise NotImplementedError, "Don't know how to handle #{scope.inspect} scope!"
275
+ end
276
+ end
277
+
213
278
  # @param attrs [Hash]
214
279
  # @return [PgEventstore::Partition]
215
280
  def deserialize(attrs)
@@ -23,24 +23,30 @@ module PgEventstore
23
23
  end
24
24
 
25
25
  # @param level [Symbol] transaction isolation level
26
+ # @param read_only [Boolean] whether transaction is read-only
26
27
  # @return [void]
27
- def transaction(level = :serializable, &blk)
28
+ def transaction(level = :serializable, read_only: false, &blk)
28
29
  connection.with do |conn|
29
30
  # We are inside a transaction already - no need to start another one
30
31
  next yield if [PG::PQTRANS_ACTIVE, PG::PQTRANS_INTRANS].include?(conn.transaction_status)
31
32
 
32
- pg_transaction(ISOLATION_LEVELS[level], conn, &blk)
33
+ pg_transaction(ISOLATION_LEVELS[level], read_only, conn, &blk)
33
34
  end
34
35
  end
35
36
 
36
37
  private
37
38
 
38
39
  # @param level [String] PostgreSQL transaction isolation level
40
+ # @param read_only [Boolean]
39
41
  # @param pg_connection [PG::Connection]
40
42
  # @return [void]
41
- def pg_transaction(level, pg_connection, &_blk)
43
+ def pg_transaction(level, read_only, pg_connection, &)
42
44
  pg_connection.transaction do
43
- pg_connection.exec("SET TRANSACTION ISOLATION LEVEL #{level}")
45
+ if read_only
46
+ pg_connection.exec("SET TRANSACTION ISOLATION LEVEL #{level} READ ONLY")
47
+ else
48
+ pg_connection.exec("SET TRANSACTION ISOLATION LEVEL #{level}")
49
+ end
44
50
  yield
45
51
  end
46
52
  rescue PG::TRSerializationFailure, PG::TRDeadlockDetected
@@ -4,10 +4,6 @@ module PgEventstore
4
4
  module QueryBuilders
5
5
  # @!visibility private
6
6
  class EventsFiltering < BasicFiltering
7
- # @return [String]
8
- TABLE_NAME = 'events'
9
- private_constant :TABLE_NAME
10
-
11
7
  # @return [Integer]
12
8
  DEFAULT_LIMIT = 1_000
13
9
  # @return [Hash<String => String, Symbol => String>]
@@ -94,7 +90,7 @@ module PgEventstore
94
90
  stream_attrs in { context: String | NilClass => context }
95
91
  stream_attrs in { stream_name: String | NilClass => stream_name }
96
92
  stream_attrs in { stream_id: String | NilClass => stream_id }
97
- { context: context, stream_name: stream_name, stream_id: stream_id }
93
+ { context:, stream_name:, stream_id: }
98
94
  end
99
95
  streams || []
100
96
  end
@@ -107,7 +103,7 @@ module PgEventstore
107
103
 
108
104
  # @return [String]
109
105
  def to_table_name
110
- TABLE_NAME
106
+ Event::PRIMARY_TABLE_NAME
111
107
  end
112
108
 
113
109
  # @param context [String, nil]
@@ -115,7 +111,7 @@ module PgEventstore
115
111
  # @param stream_id [String, nil]
116
112
  # @return [void]
117
113
  def add_stream_attrs(context: nil, stream_name: nil, stream_id: nil)
118
- stream_attrs = { context: context, stream_name: stream_name, stream_id: stream_id }
114
+ stream_attrs = { context:, stream_name:, stream_id: }
119
115
  return unless correct_stream_filter?(stream_attrs)
120
116
 
121
117
  stream_attrs.compact!
@@ -24,10 +24,23 @@ module PgEventstore
24
24
  streams = streams&.map do |stream_attrs|
25
25
  stream_attrs in { context: String | NilClass => context }
26
26
  stream_attrs in { stream_name: String | NilClass => stream_name }
27
- { context: context, stream_name: stream_name }
27
+ { context:, stream_name: }
28
28
  end
29
29
  streams || []
30
30
  end
31
+
32
+ # @param stream_attrs [Hash]
33
+ # @return [Boolean]
34
+ def correct_stream_filter?(stream_attrs)
35
+ result = (stream_attrs in { context: String, stream_name: String } | { context: String })
36
+ return true if result
37
+
38
+ PgEventstore.logger&.debug(<<~TEXT)
39
+ Ignoring unsupported stream filter format for grouped read #{stream_attrs.compact.inspect}. \
40
+ See docs/reading_events.md docs for supported formats.
41
+ TEXT
42
+ false
43
+ end
31
44
  end
32
45
 
33
46
  # @return [String]
@@ -37,10 +50,10 @@ module PgEventstore
37
50
 
38
51
  # @param context [String, nil]
39
52
  # @param stream_name [String, nil]
40
- # @return [void]
53
+ # @return [PgEventstore::SQLBuilder]
41
54
  def add_stream_attrs(context: nil, stream_name: nil)
42
- stream_attrs = { context: context, stream_name: stream_name }
43
- return unless correct_stream_filter?(stream_attrs)
55
+ stream_attrs = { context:, stream_name: }
56
+ return @sql_builder unless self.class.correct_stream_filter?(stream_attrs)
44
57
 
45
58
  stream_attrs.compact!
46
59
  sql = stream_attrs.map do |attr, _|
@@ -50,31 +63,28 @@ module PgEventstore
50
63
  end
51
64
 
52
65
  # @param event_types [Array<String>]
53
- # @return [void]
66
+ # @return [PgEventstore::SQLBuilder]
54
67
  def add_event_types(event_types)
55
- return if event_types.empty?
68
+ return @sql_builder if event_types.empty?
56
69
 
57
70
  @sql_builder.where("#{to_table_name}.event_type = ANY(?::varchar[])", event_types)
58
71
  end
59
72
 
60
- # @return [void]
73
+ # @return [PgEventstore::SQLBuilder]
61
74
  def with_event_types
62
75
  @sql_builder.where('event_type IS NOT NULL')
63
76
  end
64
77
 
65
- private
78
+ def with_stream_names
79
+ @sql_builder.where('stream_name IS NOT NULL')
80
+ end
66
81
 
67
- # @param stream_attrs [Hash]
68
- # @return [Boolean]
69
- def correct_stream_filter?(stream_attrs)
70
- result = (stream_attrs in { context: String, stream_name: String } | { context: String, stream_name: nil })
71
- return true if result
82
+ def without_event_types
83
+ @sql_builder.where('event_type IS NULL')
84
+ end
72
85
 
73
- PgEventstore.logger&.debug(<<~TEXT)
74
- Ignoring unsupported stream filter format for grouped read #{stream_attrs.compact.inspect}. \
75
- See docs/reading_events.md docs for supported formats.
76
- TEXT
77
- false
86
+ def without_stream_names
87
+ @sql_builder.where('stream_name IS NULL')
78
88
  end
79
89
  end
80
90
  end
@@ -64,7 +64,7 @@ module PgEventstore
64
64
  self
65
65
  end
66
66
 
67
- # @param table_name [String]
67
+ # @param table_name [String | SQLBuilder]
68
68
  # @return [self]
69
69
  def from(table_name)
70
70
  @from_value = table_name
@@ -132,7 +132,7 @@ module PgEventstore
132
132
  self
133
133
  end
134
134
 
135
- # @return [Array<String, Array<Object>>]
135
+ # @return [[String, Array<_>]]
136
136
  def to_exec_params
137
137
  @positional_values.clear
138
138
  @positional_values_size = 0
@@ -141,19 +141,27 @@ module PgEventstore
141
141
 
142
142
  protected
143
143
 
144
- # @return [Array<String, Array<Object>>]
144
+ # @return [[String, Array<_>]]
145
145
  def _to_exec_params
146
146
  return [single_query_sql, @positional_values] if @union_values.empty?
147
147
 
148
148
  [union_query_sql, @positional_values]
149
149
  end
150
150
 
151
+ # @return [String]
152
+ def from_sql
153
+ return @from_value if @from_value.is_a?(String)
154
+
155
+ sql = merge(@from_value)
156
+ "(#{sql}) #{@from_value.from_sql}"
157
+ end
158
+
151
159
  private
152
160
 
153
161
  # @return [String]
154
162
  def single_query_sql
155
163
  where_sql = [where_sql('OR'), where_sql('AND')].reject(&:empty?).map { |sql| "(#{sql})" }.join(' AND ')
156
- sql = "SELECT #{select_sql} FROM #{@from_value}"
164
+ sql = "SELECT #{select_sql} FROM #{from_sql}"
157
165
  sql += " #{join_sql}" unless @join_values.empty?
158
166
  sql += " WHERE #{where_sql}" unless where_sql.empty?
159
167
  sql += " GROUP BY #{@group_values.join(', ')}" unless @group_values.empty?
@@ -168,11 +176,7 @@ module PgEventstore
168
176
  sql = single_query_sql
169
177
  union_parts = ["(#{sql})"]
170
178
  union_parts += @union_values.map do |builder|
171
- builder.positional_values_size = @positional_values_size
172
- builder_sql, values = builder._to_exec_params
173
- @positional_values.push(*values)
174
- @positional_values_size += values.size
175
- "(#{builder_sql})"
179
+ "(#{merge(builder)})"
176
180
  end
177
181
  union_parts.join(' UNION ALL ')
178
182
  end
@@ -200,14 +204,28 @@ module PgEventstore
200
204
  @order_values.join(', ')
201
205
  end
202
206
 
207
+ # @param builder [PgEventstore::SQLBuilder]
208
+ # @return [String]
209
+ def merge(builder)
210
+ builder.positional_values_size = @positional_values_size
211
+ sql_query, positional_values = builder._to_exec_params
212
+ @positional_values.push(*positional_values)
213
+ @positional_values_size += positional_values.size
214
+ sql_query
215
+ end
216
+
203
217
  # Replaces "?" signs in the given string with positional variables and memorize positional values they refer to.
204
218
  # @param sql [String]
205
219
  # @return [String]
206
220
  def extract_positional_args(sql, *arguments)
207
221
  sql.gsub('?').each_with_index do |_, index|
208
- @positional_values.push(arguments[index])
209
- @positional_values_size += 1
210
- "$#{@positional_values_size}"
222
+ if arguments[index].is_a?(SQLBuilder)
223
+ "(#{merge(arguments[index])})"
224
+ else
225
+ @positional_values.push(arguments[index])
226
+ @positional_values_size += 1
227
+ "$#{@positional_values_size}"
228
+ end
211
229
  end
212
230
  end
213
231
  end
@@ -67,7 +67,7 @@ module PgEventstore
67
67
  # @param keys [Array<Symbol>, nil]
68
68
  # @return [Hash<Symbol => String>]
69
69
  def deconstruct_keys(keys)
70
- hash = { context: context, stream_name: stream_name, stream_id: stream_id }
70
+ hash = { context:, stream_name:, stream_id: }
71
71
  return hash unless keys
72
72
 
73
73
  hash.slice(*keys)
@@ -46,7 +46,7 @@ module PgEventstore
46
46
  #
47
47
  # def initialize
48
48
  # @basic_runner = PgEventstore::BasicRunner.new(
49
- # run_interval: 1, async_shutdown_time: 2, recovery_strategies: recovery_strategies
49
+ # run_interval: 1, async_shutdown_time: 2, recovery_strategies:
50
50
  # )
51
51
  # @jobs_performed = 0
52
52
  # @jobs_limit = 3
@@ -224,7 +224,7 @@ module PgEventstore
224
224
 
225
225
  # @param state [Symbol]
226
226
  # @return [Object, nil] a result of evaluating of passed block
227
- def within_state(state, &_blk)
227
+ def within_state(state, &)
228
228
  synchronize do
229
229
  return unless @state.public_send("#{RunnerState::STATES.fetch(state)}?")
230
230
 
@@ -248,8 +248,8 @@ module PgEventstore
248
248
 
249
249
  private
250
250
 
251
- def synchronize(&blk)
252
- @mutex.synchronize(&blk)
251
+ def synchronize(&)
252
+ @mutex.synchronize(&)
253
253
  end
254
254
 
255
255
  # @return [void]
@@ -10,7 +10,7 @@ module PgEventstore
10
10
  # @param state [String]
11
11
  # @return [void]
12
12
  def update_subscriptions_set_state(subscriptions_set_lifecycle, state)
13
- subscriptions_set_lifecycle.persisted_subscriptions_set.update(state: state)
13
+ subscriptions_set_lifecycle.persisted_subscriptions_set.update(state:)
14
14
  end
15
15
 
16
16
  # @param subscriptions_lifecycle [PgEventstore::SubscriptionsLifecycle]
@@ -21,7 +21,7 @@ module PgEventstore
21
21
  def update_subscription_stats(subscription, stats, current_position)
22
22
  subscription.update(
23
23
  average_event_processing_time: stats.average_event_processing_time,
24
- current_position: current_position,
24
+ current_position:,
25
25
  total_processed_events: subscription.total_processed_events + 1
26
26
  )
27
27
  end
@@ -50,7 +50,7 @@ module PgEventstore
50
50
  # @param state [String]
51
51
  # @return [void]
52
52
  def update_subscription_state(subscription, state)
53
- subscription.update(state: state)
53
+ subscription.update(state:)
54
54
  end
55
55
  end
56
56
  end
@@ -21,7 +21,7 @@ module PgEventstore
21
21
  @basic_runner = BasicRunner.new(
22
22
  run_interval: 0,
23
23
  async_shutdown_time: graceful_shutdown_timeout,
24
- recovery_strategies: recovery_strategies
24
+ recovery_strategies:
25
25
  )
26
26
  attach_runner_callbacks
27
27
  end
@@ -21,15 +21,15 @@ module PgEventstore
21
21
  def find_or_create_by(subscription_id:, subscriptions_set_id:, command_name:, data:)
22
22
  transaction_queries.transaction do
23
23
  existing = find_by(
24
- subscription_id: subscription_id, subscriptions_set_id: subscriptions_set_id, command_name: command_name
24
+ subscription_id:, subscriptions_set_id:, command_name:
25
25
  )
26
26
  next existing if existing
27
27
 
28
28
  create(
29
- subscription_id: subscription_id,
30
- subscriptions_set_id: subscriptions_set_id,
31
- command_name: command_name,
32
- data: data
29
+ subscription_id:,
30
+ subscriptions_set_id:,
31
+ command_name:,
32
+ data:
33
33
  )
34
34
  end
35
35
  end