aggregate_streams 1.0.0.0.rc4 → 1.0.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ab8ab1a6d681ef30dcfc03fade5e32f3b1c7d54cb8fea4ff3f9e5b22a7c3122f
4
- data.tar.gz: d2903306103791d7466069ef61f10652ce08daaea9b6a6371af6002bb9c706b6
3
+ metadata.gz: cdcf7bac6c03a998987f6cf21d3e88efdf7bcef26f6d00c6e72f19363e1d58a3
4
+ data.tar.gz: d9cc11c444fd189ddc431009d5c32823144ed7f54b31285477c7ad67ce484791
5
5
  SHA512:
6
- metadata.gz: e24676f6919e982afaa00badb072dce9b4fe76cb1f86ac90bb3dfc53f55bfd9b45170f779eee70821c6e60752efe14852e51bef89159ab06b42bfc6b511a8c9c
7
- data.tar.gz: deb98567a015486f7523419a3f34c1f0d86bb958021155253a037c92fe8b8ba607411ed1d33b7116d5aefdb05f7d98942f2520f0cf696e15f68c0a547743aec3
6
+ metadata.gz: 75b2874f85d2c020e81065d27923cac9b3b32c20805989ef84e381ed1399bddd427d06bef4a3c87e51feb9a51d040af39e0fcca6f9cef63f780307b8275af318
7
+ data.tar.gz: 687bbdb915c196352707e7d27fc633083ac86b28bd9c3964c6a3c71ce9c60f419848d9d15eb2af5754db661c0380c0a38a9e86a66f77e1e1c570c741708d5b4a
@@ -1,7 +1,7 @@
1
1
  require 'consumer/postgres'
2
2
  require 'entity_store'
3
3
  require 'entity_snapshot/postgres'
4
- require 'try'
4
+ require 'retry'
5
5
 
6
6
  require 'aggregate_streams/aggregation'
7
7
  require 'aggregate_streams/projection'
@@ -31,48 +31,48 @@ module AggregateStreams
31
31
  def handle(message_data)
32
32
  logger.trace { "Handling message (Stream: #{message_data.stream_name}, Global Position: #{message_data.global_position})" }
33
33
 
34
- stream_id = Messaging::StreamName.get_id(message_data.stream_name)
34
+ Retry.(MessageStore::ExpectedVersion::Error, millisecond_intervals: [0, 10, 100, 1000]) do
35
+ stream_id = Messaging::StreamName.get_id(message_data.stream_name)
35
36
 
36
- aggregation, version = store.fetch(stream_id, include: :version)
37
+ aggregation, version = store.fetch(stream_id, include: :version)
37
38
 
38
- if aggregation.processed?(message_data)
39
- logger.info(tag: :ignored) { "Message already handled (Stream: #{message_data.stream_name}, Global Position: #{message_data.global_position})" }
40
- return
41
- end
39
+ if aggregation.processed?(message_data)
40
+ logger.info(tag: :ignored) { "Message already handled (Stream: #{message_data.stream_name}, Global Position: #{message_data.global_position})" }
41
+ return
42
+ end
42
43
 
43
- raw_input_data = Messaging::Message::Transform::MessageData.read(message_data)
44
- input_metadata = Messaging::Message::Metadata.build(raw_input_data[:metadata])
44
+ raw_input_data = Messaging::Message::Transform::MessageData.read(message_data)
45
+ input_metadata = Messaging::Message::Metadata.build(raw_input_data[:metadata])
45
46
 
46
- output_metadata = raw_metadata(input_metadata)
47
+ output_metadata = raw_metadata(input_metadata)
47
48
 
48
- write_message_data = MessageStore::MessageData::Write.new
49
+ write_message_data = MessageStore::MessageData::Write.new
49
50
 
50
- SetAttributes.(write_message_data, message_data, copy: [:type, :data])
51
+ SetAttributes.(write_message_data, message_data, copy: [:type, :data])
51
52
 
52
- write_message_data.metadata = output_metadata
53
+ write_message_data.metadata = output_metadata
53
54
 
54
- input_category = Messaging::StreamName.get_category(message_data.stream_name)
55
- write_message_data = transform(write_message_data, input_category)
55
+ input_category = Messaging::StreamName.get_category(message_data.stream_name)
56
+ write_message_data = transform(write_message_data, input_category)
56
57
 
57
- if write_message_data
58
- assure_message_data(write_message_data)
59
- else
60
- logger.info(tag: :ignored) { "Message ignored (Stream: #{message_data.stream_name}, Global Position: #{message_data.global_position})" }
61
- return
62
- end
58
+ if write_message_data
59
+ assure_message_data(write_message_data)
60
+ else
61
+ logger.info(tag: :ignored) { "Message ignored (Stream: #{message_data.stream_name}, Global Position: #{message_data.global_position})" }
62
+ return
63
+ end
63
64
 
64
- Try.(MessageStore::ExpectedVersion::Error) do
65
65
  stream_name = stream_name(stream_id)
66
66
  write.(write_message_data, stream_name, expected_version: version)
67
- end
68
67
 
69
- logger.info do
70
- message_type = message_data.type
71
- unless write_message_data.type == message_type
72
- message_type = "#{write_message_data.type} ← #{message_type}"
73
- end
68
+ logger.info do
69
+ message_type = message_data.type
70
+ unless write_message_data.type == message_type
71
+ message_type = "#{write_message_data.type} ← #{message_type}"
72
+ end
74
73
 
75
- "Message copied (Message Type: #{message_type}, Stream: #{message_data.stream_name}, Global Position: #{message_data.global_position})"
74
+ "Message copied (Message Type: #{message_type}, Stream: #{message_data.stream_name}, Global Position: #{message_data.global_position})"
75
+ end
76
76
  end
77
77
  end
78
78
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: aggregate_streams
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0.0.rc4
4
+ version: 1.0.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Nathan Ladd
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-05-08 00:00:00.000000000 Z
11
+ date: 2021-07-19 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: evt-consumer-postgres
@@ -53,7 +53,7 @@ dependencies:
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
55
  - !ruby/object:Gem::Dependency
56
- name: evt-try
56
+ name: evt-retry
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
59
  - - ">="
@@ -132,11 +132,11 @@ required_ruby_version: !ruby/object:Gem::Requirement
132
132
  version: '2.7'
133
133
  required_rubygems_version: !ruby/object:Gem::Requirement
134
134
  requirements:
135
- - - ">"
135
+ - - ">="
136
136
  - !ruby/object:Gem::Version
137
- version: 1.3.1
137
+ version: '0'
138
138
  requirements: []
139
- rubygems_version: 3.1.6
139
+ rubygems_version: 3.2.15
140
140
  signing_key:
141
141
  specification_version: 4
142
142
  summary: Combine messages from multiple Eventide streams into an aggregate stream