ruby_event_store 0.27.1 → 0.28.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 7a86c8269c00c7fc0586a26cc129573df691b34a
4
- data.tar.gz: 49469f4a21d4679acd373e58beea78a8baa7ff51
2
+ SHA256:
3
+ metadata.gz: 72a8ebb8ab54701aac0c46c74e2ca5dd58bd4b505c39e2d3323c5b0e67c1408a
4
+ data.tar.gz: 6ba8a87fb402111fe441dcfb366a59ae8b967e82e884ee9f88beab3e20d8d3ba
5
5
  SHA512:
6
- metadata.gz: 69318af6cb2f0eddc85b0e33afc35643086c0d15b6c8a51fceaec0e5557d2712b866ee0832ad683574a0e0970444f9bb0bd93e0e660c3cd815dd4b545ce26516
7
- data.tar.gz: 4fe460334ee294978c2a8a8f43d386484617cbd49b907d98fe4832f96ad513f695aac314146ae70eb3764d24727ef530c401c206eb8b7dc5b7d5e7acdcdf4177
6
+ metadata.gz: 16a7cfe13de7dbfb095198e4a9a93d7b3b26f6a085be0c85c3b28266a494781e6535e16687c23df900f39e83cdefdb62b87e803bd8ec0c735e6ac442e6e54eda
7
+ data.tar.gz: 428425082d57566878b5736ff838cf15eda1fa970a72758d111b067920227970d073eabf45ca833c197792fdc050bb17c471c975b5d8863d65d8fea48def833f
data/Gemfile CHANGED
@@ -1,3 +1,4 @@
1
1
  source 'https://rubygems.org'
2
2
  gemspec
3
- gem 'pry'
3
+ gem 'pry'
4
+ gem 'protobuf_nested_struct'
data/Makefile CHANGED
@@ -17,6 +17,12 @@ install: ## Install gem dependencies
17
17
  @echo "Installing gem dependencies"
18
18
  @bundle install
19
19
 
20
+ remove-lock:
21
+ @echo "Removing resolved dependency versions"
22
+ -rm Gemfile.lock
23
+
24
+ reinstall: remove-lock install ## Removing resolved dependency versions
25
+
20
26
  test: ## Run unit tests
21
27
  @echo "Running unit tests"
22
28
  @bundle exec rspec
@@ -28,6 +34,14 @@ mutate: test ## Run mutation tests
28
34
  $(addprefix --ignore-subject ,$(IGNORE)) \
29
35
  --use rspec "$(SUBJECT)"
30
36
 
37
+ mutate-fast: ## Run mutation tests with --fail-fast
38
+ @echo "Running mutation tests"
39
+ @MUTATING=true bundle exec mutant --include lib \
40
+ $(addprefix --require ,$(REQUIRE)) \
41
+ $(addprefix --ignore-subject ,$(IGNORE)) \
42
+ --fail-fast \
43
+ --use rspec "$(SUBJECT)"
44
+
31
45
  build:
32
46
  @echo "Building gem package"
33
47
  @gem build -V $(GEM_NAME).gemspec
@@ -36,7 +50,7 @@ build:
36
50
 
37
51
  push:
38
52
  @echo "Pushing package to RubyGems"
39
- @gem push pkg/$(GEM_NAME)-$(GEM_VERSION).gem
53
+ @gem push -k dev_arkency pkg/$(GEM_NAME)-$(GEM_VERSION).gem
40
54
 
41
55
  clean:
42
56
  @echo "Removing previously built package"
@@ -1,11 +1,13 @@
1
1
  module RubyEventStore
2
2
  class Client
3
3
  def initialize(repository:,
4
+ mapper: Mappers::Default.new,
4
5
  event_broker: PubSub::Broker.new,
5
6
  page_size: PAGE_SIZE,
6
7
  metadata_proc: nil,
7
8
  clock: ->{ Time.now.utc })
8
9
  @repository = repository
10
+ @mapper = mapper
9
11
  @event_broker = event_broker
10
12
  @page_size = page_size
11
13
  @metadata_proc = metadata_proc
@@ -15,7 +17,7 @@ module RubyEventStore
15
17
  def publish_events(events, stream_name: GLOBAL_STREAM, expected_version: :any)
16
18
  append_to_stream(events, stream_name: stream_name, expected_version: expected_version)
17
19
  events.each do |ev|
18
- @event_broker.notify_subscribers(ev)
20
+ event_broker.notify_subscribers(ev)
19
21
  end
20
22
  :ok
21
23
  end
@@ -27,62 +29,48 @@ module RubyEventStore
27
29
  def append_to_stream(events, stream_name: GLOBAL_STREAM, expected_version: :any)
28
30
  events = normalize_to_array(events)
29
31
  events.each{|event| enrich_event_metadata(event) }
30
- @repository.append_to_stream(events, stream_name, expected_version)
32
+ repository.append_to_stream(serialized_events(events), Stream.new(stream_name), ExpectedVersion.new(expected_version))
31
33
  :ok
32
34
  end
33
35
 
34
36
  def link_to_stream(event_ids, stream_name:, expected_version: :any)
35
- @repository.link_to_stream(event_ids, stream_name, expected_version)
37
+ repository.link_to_stream(event_ids, Stream.new(stream_name), ExpectedVersion.new(expected_version))
36
38
  self
37
39
  end
38
40
 
39
41
  def delete_stream(stream_name)
40
- raise IncorrectStreamData if stream_name.nil? || stream_name.empty?
41
- @repository.delete_stream(stream_name)
42
+ repository.delete_stream(Stream.new(stream_name))
42
43
  :ok
43
44
  end
44
45
 
45
- def read_events_forward(stream_name, start: :head, count: @page_size)
46
- raise IncorrectStreamData if stream_name.nil? || stream_name.empty?
47
- page = Page.new(@repository, start, count)
48
- @repository.read_events_forward(stream_name, page.start, page.count)
46
+ def read_events_forward(stream_name, start: :head, count: page_size)
47
+ deserialized_events(read.stream(stream_name).limit(count).from(start).each)
49
48
  end
50
49
 
51
- def read_events_backward(stream_name, start: :head, count: @page_size)
52
- raise IncorrectStreamData if stream_name.nil? || stream_name.empty?
53
- page = Page.new(@repository, start, count)
54
- @repository.read_events_backward(stream_name, page.start, page.count)
50
+ def read_events_backward(stream_name, start: :head, count: page_size)
51
+ deserialized_events(read.stream(stream_name).limit(count).from(start).backward.each)
55
52
  end
56
53
 
57
54
  def read_stream_events_forward(stream_name)
58
- raise IncorrectStreamData if stream_name.nil? || stream_name.empty?
59
- @repository.read_stream_events_forward(stream_name)
55
+ deserialized_events(read.stream(stream_name).each)
60
56
  end
61
57
 
62
58
  def read_stream_events_backward(stream_name)
63
- raise IncorrectStreamData if stream_name.nil? || stream_name.empty?
64
- @repository.read_stream_events_backward(stream_name)
59
+ deserialized_events(read.stream(stream_name).backward.each)
65
60
  end
66
61
 
67
- def read_all_streams_forward(start: :head, count: @page_size)
68
- page = Page.new(@repository, start, count)
69
- @repository.read_all_streams_forward(page.start, page.count)
62
+ def read_all_streams_forward(start: :head, count: page_size)
63
+ deserialized_events(read.limit(count).from(start).each)
70
64
  end
71
65
 
72
- def read_all_streams_backward(start: :head, count: @page_size)
73
- page = Page.new(@repository, start, count)
74
- @repository.read_all_streams_backward(page.start, page.count)
66
+ def read_all_streams_backward(start: :head, count: page_size)
67
+ deserialized_events(read.limit(count).from(start).backward.each)
75
68
  end
76
69
 
77
70
  def read_event(event_id)
78
- @repository.read_event(event_id)
71
+ deserialize_event(repository.read_event(event_id))
79
72
  end
80
73
 
81
- def get_all_streams
82
- @repository.get_all_streams
83
- end
84
-
85
-
86
74
  DEPRECATED_WITHIN = "subscribe(subscriber, event_types, &task) has been deprecated. Use within(&task).subscribe(subscriber, to: event_types).call instead"
87
75
  DEPRECATED_TO = "subscribe(subscriber, event_types) has been deprecated. Use subscribe(subscriber, to: event_types) instead"
88
76
  # OLD:
@@ -97,7 +85,7 @@ module RubyEventStore
97
85
  raise SubscriberNotExist, "subscriber must be first argument or block" unless subscriber || proc
98
86
  raise ArgumentError, "list of event types must be second argument or named argument to: , it cannot be both" if event_types
99
87
  subscriber ||= proc
100
- @event_broker.add_subscriber(subscriber, to)
88
+ event_broker.add_subscriber(subscriber, to)
101
89
  else
102
90
  if proc
103
91
  warn(DEPRECATED_WITHIN)
@@ -124,10 +112,10 @@ module RubyEventStore
124
112
  within(&proc).subscribe_to_all_events(subscriber).call
125
113
  -> {}
126
114
  else
127
- @event_broker.add_global_subscriber(subscriber)
115
+ event_broker.add_global_subscriber(subscriber)
128
116
  end
129
117
  else
130
- @event_broker.add_global_subscriber(proc)
118
+ event_broker.add_global_subscriber(proc)
131
119
  end
132
120
  end
133
121
 
@@ -160,7 +148,7 @@ module RubyEventStore
160
148
  end
161
149
 
162
150
  private
163
-
151
+
164
152
  def add_thread_subscribers
165
153
  @subscribers.map do |handler, types|
166
154
  @event_broker.add_thread_subscriber(handler, types)
@@ -180,39 +168,43 @@ module RubyEventStore
180
168
 
181
169
  def within(&block)
182
170
  raise ArgumentError if block.nil?
183
- Within.new(block, @event_broker)
171
+ Within.new(block, event_broker)
184
172
  end
185
173
 
186
174
  private
187
175
 
188
- def normalize_to_array(events)
189
- return *events
176
+ def serialized_events(events)
177
+ events.map do |ev|
178
+ mapper.event_to_serialized_record(ev)
179
+ end
190
180
  end
191
181
 
192
- def enrich_event_metadata(event)
193
- metadata = {}
194
- metadata[:timestamp] ||= @clock.()
195
- metadata.merge!(@metadata_proc.call || {}) if @metadata_proc
196
- metadata.each do |key, value|
197
- @repository.add_metadata(event, key, value)
182
+ def deserialized_events(serialized_events)
183
+ serialized_events.map do |sev|
184
+ deserialize_event(sev)
198
185
  end
199
186
  end
200
187
 
201
- class Page
202
- def initialize(repository, start, count)
203
- if start.instance_of?(Symbol)
204
- raise InvalidPageStart unless [:head].include?(start)
205
- else
206
- start = start.to_s
207
- raise InvalidPageStart if start.empty?
208
- raise EventNotFound.new(start) unless repository.has_event?(start)
209
- end
210
- raise InvalidPageSize unless count > 0
211
- @start = start
212
- @count = count
188
+ def deserialize_event(sev)
189
+ mapper.serialized_record_to_event(sev)
190
+ end
191
+
192
+ def read
193
+ Specification.new(repository)
194
+ end
195
+
196
+ def normalize_to_array(events)
197
+ return *events
198
+ end
199
+
200
+ def enrich_event_metadata(event)
201
+ event.metadata[:timestamp] ||= clock.()
202
+ if metadata_proc
203
+ md = metadata_proc.call || {}
204
+ md.each{|k,v| event.metadata[k]=(v) }
213
205
  end
214
- attr_reader :start, :count
215
206
  end
216
207
 
208
+ attr_reader :repository, :mapper, :event_broker, :clock, :metadata_proc, :page_size
217
209
  end
218
210
  end
@@ -1,4 +1,4 @@
1
1
  module RubyEventStore
2
- GLOBAL_STREAM = 'all'.freeze
2
+ GLOBAL_STREAM = Object.new
3
3
  PAGE_SIZE = 100.freeze
4
4
  end
@@ -7,6 +7,7 @@ module RubyEventStore
7
7
  InvalidPageSize = Class.new(ArgumentError)
8
8
  EventDuplicatedInStream = Class.new(StandardError)
9
9
  NotSupported = Class.new(StandardError)
10
+ ReservedInternalName = Class.new(StandardError)
10
11
 
11
12
  class EventNotFound < StandardError
12
13
  attr_reader :event_id
@@ -4,16 +4,21 @@ module RubyEventStore
4
4
  class Event
5
5
  def initialize(event_id: SecureRandom.uuid, metadata: nil, data: nil)
6
6
  @event_id = event_id.to_s
7
- @metadata = metadata.to_h
7
+ @metadata = Metadata.new(metadata.to_h)
8
8
  @data = data.to_h
9
9
  end
10
10
  attr_reader :event_id, :metadata, :data
11
11
 
12
+ def type
13
+ self.class.name
14
+ end
15
+
12
16
  def to_h
13
17
  {
14
18
  event_id: event_id,
15
- metadata: metadata,
16
- data: data
19
+ metadata: metadata.to_h,
20
+ data: data,
21
+ type: type,
17
22
  }
18
23
  end
19
24
 
@@ -0,0 +1,73 @@
1
+ module RubyEventStore
2
+ class ExpectedVersion
3
+ POSITION_DEFAULT = -1.freeze
4
+ NOT_RESOLVED = Object.new.freeze
5
+
6
+ def self.any
7
+ new(:any)
8
+ end
9
+
10
+ def self.none
11
+ new(:none)
12
+ end
13
+
14
+ def self.auto
15
+ new(:auto)
16
+ end
17
+
18
+ attr_reader :version
19
+
20
+ def initialize(version)
21
+ @version = version
22
+ invalid_version! unless [Integer, :any, :none, :auto].any? {|i| i === version}
23
+ end
24
+
25
+ def any?
26
+ version.equal?(:any)
27
+ end
28
+
29
+ def auto?
30
+ version.equal?(:auto)
31
+ end
32
+
33
+ def none?
34
+ version.equal?(:none)
35
+ end
36
+
37
+ def resolve_for(stream, resolver = Proc.new {})
38
+ invalid_version! if stream.global? && !any?
39
+
40
+ case version
41
+ when Integer
42
+ version
43
+ when :none
44
+ POSITION_DEFAULT
45
+ when :auto
46
+ resolver[stream] || POSITION_DEFAULT
47
+ end
48
+ end
49
+
50
+ BIG_VALUE = 0b110111100100000010010010110011101011000101010101001100100110011
51
+ private_constant :BIG_VALUE
52
+
53
+ def hash
54
+ [
55
+ self.class,
56
+ version
57
+ ].hash ^ BIG_VALUE
58
+ end
59
+
60
+ def ==(other_expected_version)
61
+ other_expected_version.instance_of?(self.class) &&
62
+ other_expected_version.version.equal?(version)
63
+ end
64
+
65
+ alias_method :eql?, :==
66
+
67
+ private
68
+
69
+ def invalid_version!
70
+ raise InvalidExpectedVersion
71
+ end
72
+ end
73
+ end
@@ -4,95 +4,66 @@ require 'thread'
4
4
  module RubyEventStore
5
5
  class InMemoryRepository
6
6
 
7
- def initialize(mapper: Mappers::Default.new)
8
- @all = Array.new
7
+ def initialize
9
8
  @streams = Hash.new
10
9
  @mutex = Mutex.new
11
- @mapper = mapper
10
+ @global = Array.new
12
11
  end
13
12
 
14
- def append_to_stream(events, stream_name, expected_version)
15
- add_to_stream(events, expected_version, stream_name, true)
13
+ def append_to_stream(events, stream, expected_version)
14
+ add_to_stream(events, expected_version, stream, true)
16
15
  end
17
16
 
18
- def link_to_stream(event_ids, stream_name, expected_version)
19
- events = normalize_to_array(event_ids).map{|eid| read_event(eid) }
20
- add_to_stream(events, expected_version, stream_name, nil)
17
+ def link_to_stream(event_ids, stream, expected_version)
18
+ events = normalize_to_array(event_ids).map {|eid| read_event(eid)}
19
+ add_to_stream(events, expected_version, stream, nil)
21
20
  end
22
21
 
23
- def delete_stream(stream_name)
24
- @streams.delete(stream_name)
22
+ def delete_stream(stream)
23
+ streams.delete(stream.name)
25
24
  end
26
25
 
27
26
  def has_event?(event_id)
28
- @all.any?{ |item| item.event_id.eql?(event_id) }
27
+ global.any?{ |item| item.event_id.eql?(event_id) }
29
28
  end
30
29
 
31
- def last_stream_event(stream_name)
32
- read_stream_events_forward(stream_name).last
33
- end
34
-
35
- def read_events_forward(stream_name, start_event_id, count)
36
- source = read_stream_events_forward(stream_name)
37
- read_batch(source, start_event_id, count)
38
- end
39
-
40
- def read_events_backward(stream_name, start_event_id, count)
41
- source = read_stream_events_backward(stream_name)
42
- read_batch(source, start_event_id, count)
43
- end
44
-
45
- def read_stream_events_forward(stream_name)
46
- @streams[stream_name] || Array.new
47
- end
48
-
49
- def read_stream_events_backward(stream_name)
50
- read_stream_events_forward(stream_name).reverse
51
- end
52
-
53
- def read_all_streams_forward(start_event_id, count)
54
- read_batch(@all, start_event_id, count)
55
- end
56
-
57
- def read_all_streams_backward(start_event_id, count)
58
- read_batch(@all.reverse, start_event_id, count)
30
+ def last_stream_event(stream)
31
+ stream_of(stream.name).last
59
32
  end
60
33
 
61
34
  def read_event(event_id)
62
- @all.find { |e| event_id.eql?(e.event_id) } or raise EventNotFound.new(event_id)
35
+ global.find {|e| event_id.eql?(e.event_id)} or raise EventNotFound.new(event_id)
63
36
  end
64
37
 
65
- def get_all_streams
66
- [Stream.new("all")] + @streams.keys.map { |name| Stream.new(name) }
67
- end
68
-
69
- def add_metadata(event, key, value)
70
- @mapper.add_metadata(event, key, value)
38
+ def read(spec)
39
+ events = spec.global_stream? ? global : stream_of(spec.stream_name)
40
+ events = events.reverse if spec.backward?
41
+ events = read_batch(events, spec.start, spec.count) if spec.limit?
42
+ events.each
71
43
  end
72
44
 
73
45
  private
74
46
 
47
+ def stream_of(name)
48
+ streams.fetch(name, Array.new)
49
+ end
50
+
75
51
  def normalize_to_array(events)
76
52
  return *events
77
53
  end
78
54
 
79
- def add_to_stream(events, expected_version, stream_name, include_global)
80
- raise InvalidExpectedVersion if !expected_version.equal?(:any) && stream_name.eql?(GLOBAL_STREAM)
55
+ def add_to_stream(events, expected_version, stream, include_global)
81
56
  events = normalize_to_array(events)
82
- expected_version = case expected_version
83
- when :none
84
- -1
85
- when :auto
86
- read_stream_events_forward(stream_name).size - 1
87
- when Integer, :any
88
- expected_version
89
- else
90
- raise InvalidExpectedVersion
91
- end
92
- append_with_synchronize(events, expected_version, stream_name, include_global)
57
+ append_with_synchronize(events, expected_version, stream, include_global)
93
58
  end
94
59
 
95
- def append_with_synchronize(events, expected_version, stream_name, include_global)
60
+ def last_stream_version(stream)
61
+ stream_of(stream.name).size - 1
62
+ end
63
+
64
+ def append_with_synchronize(events, expected_version, stream, include_global)
65
+ resolved_version = expected_version.resolve_for(stream, method(:last_stream_version))
66
+
96
67
  # expected_version :auto assumes external lock is used
97
68
  # which makes reading stream before writing safe.
98
69
  #
@@ -101,37 +72,38 @@ module RubyEventStore
101
72
  # conditions more likely. And we only use mutex.synchronize for writing
102
73
  # not for the whole read+write algorithm.
103
74
  Thread.pass
104
- @mutex.synchronize do
105
- if expected_version == :any
106
- expected_version = read_stream_events_forward(stream_name).size - 1
107
- end
108
- append(events, expected_version, stream_name, include_global)
75
+ mutex.synchronize do
76
+ resolved_version = last_stream_version(stream) if expected_version.any?
77
+ append(events, resolved_version, stream, include_global)
109
78
  end
110
79
  end
111
80
 
112
- def append(events, expected_version, stream_name, include_global)
113
- stream = read_stream_events_forward(stream_name)
114
- raise WrongExpectedEventVersion unless (stream.size - 1).equal?(expected_version)
81
+ def append(events, resolved_version, stream, include_global)
82
+ stream_events = stream_of(stream.name)
83
+ raise WrongExpectedEventVersion unless last_stream_version(stream).equal?(resolved_version)
84
+
115
85
  events.each do |event|
116
- raise EventDuplicatedInStream if stream.any?{|ev| ev.event_id.eql?(event.event_id) }
86
+ raise EventDuplicatedInStream if stream_events.any? {|ev| ev.event_id.eql?(event.event_id)}
117
87
  if include_global
118
- raise EventDuplicatedInStream if @all.any?{|ev| ev.event_id.eql?(event.event_id) }
119
- @all.push(event)
88
+ raise EventDuplicatedInStream if has_event?(event.event_id)
89
+ global.push(event)
120
90
  end
121
- stream.push(event)
91
+ stream_events.push(event)
122
92
  end
123
- @streams[stream_name] = stream
93
+ streams[stream.name] = stream_events
124
94
  self
125
95
  end
126
96
 
127
97
  def read_batch(source, start_event_id, count)
128
- return source[0..count-1] if start_event_id.equal?(:head)
98
+ return source[0..count - 1] if start_event_id.equal?(:head)
129
99
  start_index = index_of(source, start_event_id)
130
- source[start_index+1..start_index+count]
100
+ source[start_index + 1..start_index + count]
131
101
  end
132
102
 
133
103
  def index_of(source, event_id)
134
- source.index{ |item| item.event_id.eql?(event_id) }
104
+ source.index {|item| item.event_id.eql?(event_id)}
135
105
  end
106
+
107
+ attr_reader :streams, :mutex, :global
136
108
  end
137
109
  end