nulogy_message_bus_consumer 1.0.0 → 2.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,54 @@
1
+ RSpec.describe "End to end" do
2
+ let(:logger) { NulogyMessageBusConsumer::NullLogger.new }
3
+ let(:topic) { TestTopic.new }
4
+ let(:tap) { MiddlewareTap.new }
5
+ let(:message_handler_spy) { double }
6
+ subject(:pipeline) do
7
+ pipeline = NulogyMessageBusConsumer.recommended_consumer_pipeline(config: topic.config)
8
+ pipeline.insert(tap, after: NulogyMessageBusConsumer::Steps::ConnectToMessageBus)
9
+ pipeline.append(message_handler_spy)
10
+ pipeline
11
+ end
12
+
13
+ after { topic.close }
14
+
15
+ it "receives messages using the full pipeline" do
16
+ create_topic(topic.topic_name)
17
+
18
+ called = false
19
+ expect(message_handler_spy).to receive(:call) do |message:, **_kargs|
20
+ expect(message).to have_attributes(event_data: {data: "Some Payload"})
21
+ called = true
22
+ :success
23
+ end
24
+
25
+ pipeline_thread = start(pipeline, tap)
26
+
27
+ topic.produce_one_message(
28
+ key: "Some Key",
29
+ payload: message_payload(data: "Some Payload")
30
+ )
31
+
32
+ NulogyMessageBusConsumer::KafkaUtils.wait_for { called }
33
+ Thread.kill(pipeline_thread)
34
+ end
35
+
36
+ def start(pipeline, tap)
37
+ thr = Thread.new { pipeline.invoke }
38
+ wait_for_partition_assignment(tap)
39
+ thr
40
+ end
41
+
42
+ def wait_for_partition_assignment(tap)
43
+ NulogyMessageBusConsumer::KafkaUtils.wait_for { tap.arguments.fetch(:kafka_consumer, nil) }
44
+ NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(tap.arguments[:kafka_consumer])
45
+ end
46
+
47
+ def message_payload(**payload)
48
+ JSON.dump(
49
+ id: SecureRandom.uuid,
50
+ created_at: 1_000,
51
+ event_json: JSON.dump(payload)
52
+ )
53
+ end
54
+ end
@@ -31,7 +31,7 @@ RSpec.describe NulogyMessageBusConsumer::KafkaUtils do
31
31
  it "does not keep the connection open when there are no messages" do
32
32
  topic.produce_one_message(payload: "The Only Message")
33
33
 
34
- enum = utils.every_message_until_none_are_left(topic.consumer)
34
+ enum = utils.every_message_until_none_are_left(topic.consumer, Kafka::CONSUMER_POLL_TIMEOUT)
35
35
 
36
36
  expect(enum).to match([
37
37
  have_attributes(payload: "The Only Message")
@@ -3,22 +3,22 @@ RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
3
3
  let(:consumer) { test_topic.consumer }
4
4
  let(:logger) { NulogyMessageBusConsumer::NullLogger.new }
5
5
  let(:handler) { spy }
6
+ let(:subscribe_to_partition) { NulogyMessageBusConsumer::Steps::SubscribeToPartition.new }
6
7
  let(:pipeline) do
7
8
  NulogyMessageBusConsumer::Pipeline.new([
8
9
  NulogyMessageBusConsumer::Steps::ConnectToMessageBus.new(test_topic.config, logger, kafka_consumer: consumer),
9
- NulogyMessageBusConsumer::Steps::SeekBeginningOfTopic.new,
10
- NulogyMessageBusConsumer::Steps::StreamMessagesUntilNoneAreLeft.new(logger),
11
- NulogyMessageBusConsumer::Steps::CommitOnSuccess.new,
10
+ NulogyMessageBusConsumer::Steps::StreamMessagesUntilNoneAreLeft.new(logger, Kafka::CONSUMER_POLL_TIMEOUT),
11
+ NulogyMessageBusConsumer::Steps::CommitOnSuccess.new(logger),
12
12
  handler
13
13
  ])
14
14
  end
15
15
  let(:deduped_pipeline) do
16
16
  NulogyMessageBusConsumer::Pipeline.new([
17
17
  NulogyMessageBusConsumer::Steps::ConnectToMessageBus.new(test_topic.config, logger, kafka_consumer: consumer),
18
- NulogyMessageBusConsumer::Steps::SeekBeginningOfTopic.new,
19
- NulogyMessageBusConsumer::Steps::StreamMessagesUntilNoneAreLeft.new(logger),
18
+ subscribe_to_partition,
19
+ NulogyMessageBusConsumer::Steps::StreamMessagesUntilNoneAreLeft.new(logger, Kafka::CONSUMER_POLL_TIMEOUT),
20
+ NulogyMessageBusConsumer::Steps::CommitOnSuccess.new(logger),
20
21
  NulogyMessageBusConsumer::Steps::DeduplicateMessages.new(logger),
21
- NulogyMessageBusConsumer::Steps::CommitOnSuccess.new,
22
22
  handler
23
23
  ])
24
24
  end
@@ -27,6 +27,8 @@ RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
27
27
 
28
28
  context "when successful" do
29
29
  it "commits and processes the next message" do
30
+ create_topic(test_topic.topic_name)
31
+
30
32
  expect(handler).to receive(:call).with(a_message_with(key: "test 1")).and_return(:success)
31
33
  expect(handler).to receive(:call).with(a_message_with(key: "test 2")).and_return(:success)
32
34
  expect(consumer).to receive(:commit).twice
@@ -40,6 +42,8 @@ RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
40
42
 
41
43
  context "when failing by :failure" do
42
44
  it "reprocesses the message" do
45
+ create_topic(test_topic.topic_name)
46
+
43
47
  expect(handler).to receive(:call).with(a_message_with(key: "test 1")).and_return(:failure)
44
48
  expect(handler).to receive(:call).with(a_message_with(key: "test 1")).and_return(:success)
45
49
  expect(handler).to receive(:call).with(a_message_with(key: "test 2")).and_return(:success)
@@ -55,36 +59,20 @@ RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
55
59
  # This test is more illustrative of how we expect it to work.
56
60
  # Specifically, testing the "auto.offset.store" setting for the consumer.
57
61
  context "when a partition has a failing message" do
58
- let(:handler) { ->(message:, **_) { message.event_data[:type] == "good" ? :success : :failure } }
62
+ let(:handler) { ->(message:, **_) { message.event_data[:type].start_with?("good") ? :success : :failure } }
59
63
 
60
- it "processes messages from other partitions without committing offsets for partitions with failing messages" do
61
- Kafka.create_topic(test_topic.topic_name)
64
+ it "it blocks subsequent messages from being processed" do
65
+ create_topic(test_topic.topic_name)
62
66
 
63
67
  # Produce message to a single partition. This partition will be blocked by the second message.
64
- test_topic.produce_one_message(partition: 1, event_json: {type: "good"}) # success
68
+ success_id = test_topic.produce_one_message(partition: 1, event_json: {type: "good 1"}) # success
65
69
  test_topic.produce_one_message(partition: 1, event_json: {type: "bad"}) # failure
66
- blocked_id = test_topic.produce_one_message(partition: 1, event_json: {type: "good"}) # blocked
67
-
68
- consume_from_partition(1) do
69
- deduped_pipeline.invoke
70
- end
70
+ blocked_id = test_topic.produce_one_message(partition: 1, event_json: {type: "good 2"}) # blocked
71
71
 
72
- # produce to another partition
73
- success_id = test_topic.produce_one_message(partition: 2, event_json: {type: "good"}) # success
74
-
75
- consume_from_partition(2) do
76
- deduped_pipeline.invoke
77
- end
78
-
79
- # try consuming from all partitions again -- it will fail on the blocked one again
80
72
  deduped_pipeline.invoke
81
73
 
82
- # Wait for assignment after a reconnect
83
- NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(consumer)
84
-
85
74
  lag = consumer.lag(consumer.committed)
86
- expect(lag.dig(test_topic.topic_name, 1)).to be >= 1
87
- expect(lag.dig(test_topic.topic_name, 2)).to be(0)
75
+ expect(lag.dig(test_topic.topic_name, 1)).to be 2
88
76
  expect(NulogyMessageBusConsumer::ProcessedMessage.exists?(success_id)).to be(true)
89
77
  expect(NulogyMessageBusConsumer::ProcessedMessage.exists?(blocked_id)).to be(false)
90
78
  end
@@ -93,6 +81,8 @@ RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
93
81
 
94
82
  context "when failing by exception" do
95
83
  it "reprocesses the message" do
84
+ create_topic(test_topic.topic_name)
85
+
96
86
  expect(handler).to receive(:call).with(a_message_with(key: "test 1")).and_raise("intentional error")
97
87
  expect(handler).to receive(:call).with(a_message_with(key: "test 1")).and_return(:success)
98
88
  expect(handler).to receive(:call).with(a_message_with(key: "test 2")).and_return(:success)
@@ -114,18 +104,36 @@ RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
114
104
  message: have_attributes(matcher)
115
105
  )
116
106
  end
107
+ end
117
108
 
118
- def consume_from_partition(partition_number)
119
- original_assignment = consumer.assignment
120
- topic_partitions = original_assignment
121
- .to_h
122
- .transform_values { |values| values.select { |t| t.partition == partition_number } }
123
- new_assignment = Rdkafka::Consumer::TopicPartitionList.new(topic_partitions)
124
-
125
- consumer.assign(new_assignment)
109
+ module NulogyMessageBusConsumer
110
+ module Steps
111
+ class SubscribeToPartition
112
+ attr_writer :partition_number
113
+
114
+ def call(kafka_consumer:, **_)
115
+ original_assignment = kafka_consumer.assignment
116
+ if @partition_number
117
+ topic_partitions = original_assignment
118
+ .to_h
119
+ .transform_values { |values| values.select { |t| t.partition == @partition_number } }
120
+ new_assignment = Rdkafka::Consumer::TopicPartitionList.new(topic_partitions)
121
+
122
+ kafka_consumer.assign(new_assignment)
123
+ NulogyMessageBusConsumer::KafkaUtils.wait_for do
124
+ kafka_consumer.assignment == new_assignment
125
+ end
126
+ end
126
127
 
127
- yield
128
- ensure
129
- consumer.assign(original_assignment)
128
+ yield
129
+ ensure
130
+ if @partition_number
131
+ kafka_consumer.assign(original_assignment)
132
+ NulogyMessageBusConsumer::KafkaUtils.wait_for do
133
+ kafka_consumer.assignment == original_assignment
134
+ end
135
+ end
136
+ end
137
+ end
130
138
  end
131
139
  end
@@ -1,46 +1,32 @@
1
1
  RSpec.describe NulogyMessageBusConsumer::Steps::ConnectToMessageBus do
2
- subject(:pipeline) do
3
- pipeline = NulogyMessageBusConsumer.recommended_consumer_pipeline(config: config)
4
- pipeline.insert(tap, after: NulogyMessageBusConsumer::Steps::ConnectToMessageBus)
5
- pipeline.append(message_handler_spy)
6
- pipeline
7
- end
8
-
2
+ let(:logger) { NulogyMessageBusConsumer::NullLogger.new }
9
3
  let(:topic) { TestTopic.new }
10
- let(:config) { topic.config }
11
4
  let(:tap) { MiddlewareTap.new }
12
5
  let(:message_handler_spy) { double }
6
+ let(:pipeline) do
7
+ NulogyMessageBusConsumer::Pipeline.new([
8
+ NulogyMessageBusConsumer::Steps::ConnectToMessageBus.new(topic.config, logger, kafka_consumer: topic.consumer),
9
+ NulogyMessageBusConsumer::Steps::StreamMessagesUntilNoneAreLeft.new(logger, Kafka::CONSUMER_POLL_TIMEOUT),
10
+ message_handler_spy
11
+ ])
12
+ end
13
13
 
14
14
  after { topic.close }
15
15
 
16
- it "receives messages", skip: flakey_in_ci do
17
- called = false
16
+ it "receives messages" do
17
+ create_topic(topic.topic_name)
18
+
18
19
  expect(message_handler_spy).to receive(:call) do |message:, **_kargs|
19
20
  expect(message).to have_attributes(event_data: {data: "Some Payload"})
20
- called = true
21
21
  :success
22
22
  end
23
23
 
24
- pipeline_thread = start(pipeline, tap)
25
-
26
24
  topic.produce_one_message(
27
25
  key: "Some Key",
28
26
  payload: message_payload(data: "Some Payload")
29
27
  )
30
28
 
31
- NulogyMessageBusConsumer::KafkaUtils.wait_for { called }
32
- Thread.kill(pipeline_thread)
33
- end
34
-
35
- def start(pipeline, tap)
36
- thr = Thread.new { pipeline.invoke }
37
- wait_for_partition_assignment(tap)
38
- thr
39
- end
40
-
41
- def wait_for_partition_assignment(tap)
42
- NulogyMessageBusConsumer::KafkaUtils.wait_for { tap.arguments[:kafka_consumer] }
43
- NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(tap.arguments[:kafka_consumer])
29
+ pipeline.invoke
44
30
  end
45
31
 
46
32
  def message_payload(**payload)
@@ -5,6 +5,8 @@ RSpec.describe TestTopic do
5
5
 
6
6
  context "smoke test for specs" do
7
7
  it "publishes and receives messages" do
8
+ create_topic(topic.topic_name)
9
+
8
10
  topic.produce_one_message(
9
11
  key: "Some Key",
10
12
  payload: "Some Payload"
@@ -18,6 +20,8 @@ RSpec.describe TestTopic do
18
20
  end
19
21
 
20
22
  it "receives returns nil when no messages are received" do
23
+ create_topic(topic.topic_name)
24
+
21
25
  consumer = topic.consumer
22
26
  message = consumer.poll(1)
23
27
  expect(message).to be(nil)
@@ -3,16 +3,7 @@ require "open3"
3
3
  module Kafka
4
4
  module_function
5
5
 
6
- def kafka_config
7
- config = {
8
- "auto.offset.reset": "beginning",
9
- "bootstrap.servers": test_bootstrap_servers,
10
- "enable.auto.commit": false,
11
- "group.id": random_consumer_group
12
- }
13
-
14
- Rdkafka::Config.new(config)
15
- end
6
+ CONSUMER_POLL_TIMEOUT = 1000
16
7
 
17
8
  def random_topic_name
18
9
  "test-topic-#{SecureRandom.uuid}"
@@ -27,16 +18,33 @@ module Kafka
27
18
  end
28
19
 
29
20
  def setup_kafka_producer
30
- kafka_config.producer
21
+ kafka_producer_config.producer
22
+ end
23
+
24
+ def kafka_producer_config
25
+ config = {"bootstrap.servers": test_bootstrap_servers}
26
+
27
+ Rdkafka::Config.new(config)
31
28
  end
32
29
 
33
30
  def setup_kafka_consumer(topic_name)
34
- consumer = kafka_config.consumer
31
+ consumer = kafka_consumer_config.consumer
35
32
  consumer.subscribe(topic_name)
36
33
  NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(consumer)
37
34
  consumer
38
35
  end
39
36
 
37
+ def kafka_consumer_config
38
+ config = {
39
+ "auto.offset.reset": "beginning",
40
+ "bootstrap.servers": test_bootstrap_servers,
41
+ "enable.auto.commit": false,
42
+ "group.id": random_consumer_group
43
+ }
44
+
45
+ Rdkafka::Config.new(config)
46
+ end
47
+
40
48
  def create_topic(topic_name)
41
49
  run("kaf topic create #{topic_name} --brokers #{test_bootstrap_servers} --replicas 1 --partitions 3")
42
50
  end
@@ -34,7 +34,7 @@ class TestTopic
34
34
  end
35
35
 
36
36
  def consume_one_message
37
- consumer.poll(250)
37
+ consumer.poll(Kafka::CONSUMER_POLL_TIMEOUT)
38
38
  end
39
39
 
40
40
  def config
@@ -1,5 +1,6 @@
1
1
  RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
2
- subject(:step) { NulogyMessageBusConsumer::Steps::CommitOnSuccess.new }
2
+ let(:logger) { NulogyMessageBusConsumer::NullLogger.new }
3
+ subject(:step) { NulogyMessageBusConsumer::Steps::CommitOnSuccess.new(logger) }
3
4
 
4
5
  let(:kafka_consumer) { spy }
5
6
  let(:message) { NulogyMessageBusConsumer::Message.new }
@@ -21,6 +22,9 @@ RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
21
22
 
22
23
  context "when result is :failure" do
23
24
  it "returns the result" do
25
+ topic_partition_list = {topic_name: %i[partition_1 partition_2]}
26
+ allow(kafka_consumer).to receive(:assignment).and_return(topic_partition_list)
27
+
24
28
  result = step.call(kafka_consumer: kafka_consumer, message: message) { :failure }
25
29
 
26
30
  expect(result).to be(:failure)
@@ -33,6 +37,8 @@ RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
33
37
 
34
38
  expect(kafka_consumer).to receive(:unsubscribe)
35
39
  expect(kafka_consumer).to receive(:subscribe).with(:topic_name)
40
+ expect(kafka_consumer).to receive(:assignment).and_return({})
41
+ expect(kafka_consumer).to receive(:assignment).and_return(topic_partition_list)
36
42
 
37
43
  step.call(kafka_consumer: kafka_consumer, message: message) { :failure }
38
44
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: nulogy_message_bus_consumer
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0
4
+ version: 2.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Nulogy
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-04-26 00:00:00.000000000 Z
11
+ date: 2023-11-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activerecord
@@ -136,20 +136,6 @@ dependencies:
136
136
  - - '='
137
137
  - !ruby/object:Gem::Version
138
138
  version: 6.0.3.5
139
- - !ruby/object:Gem::Dependency
140
- name: rake-release
141
- requirement: !ruby/object:Gem::Requirement
142
- requirements:
143
- - - '='
144
- - !ruby/object:Gem::Version
145
- version: 1.3.0
146
- type: :development
147
- prerelease: false
148
- version_requirements: !ruby/object:Gem::Requirement
149
- requirements:
150
- - - '='
151
- - !ruby/object:Gem::Version
152
- version: 1.3.0
153
139
  - !ruby/object:Gem::Dependency
154
140
  name: rspec
155
141
  requirement: !ruby/object:Gem::Requirement
@@ -236,13 +222,12 @@ dependencies:
236
222
  version: 1.38.1
237
223
  description:
238
224
  email:
239
- - tass@nulogy.com
225
+ - platform-engineering@nulogy.com
240
226
  executables: []
241
227
  extensions: []
242
228
  extra_rdoc_files: []
243
229
  files:
244
230
  - Rakefile
245
- - config/credentials/message-bus-us-east-1.key
246
231
  - config/credentials/message-bus-us-east-1.yml.enc
247
232
  - config/routes.rb
248
233
  - db/migrate/20200509095105_create_message_bus_processed_messages.rb
@@ -292,7 +277,6 @@ files:
292
277
  - spec/dummy/config/application.rb
293
278
  - spec/dummy/config/boot.rb
294
279
  - spec/dummy/config/cable.yml
295
- - spec/dummy/config/credentials/message-bus-us-east-1.key
296
280
  - spec/dummy/config/credentials/message-bus-us-east-1.yml.enc
297
281
  - spec/dummy/config/database.yml
298
282
  - spec/dummy/config/environment.rb
@@ -315,8 +299,6 @@ files:
315
299
  - spec/dummy/config/spring.rb
316
300
  - spec/dummy/config/storage.yml
317
301
  - spec/dummy/db/schema.rb
318
- - spec/dummy/log/development.log
319
- - spec/dummy/log/production.log
320
302
  - spec/dummy/log/test.log
321
303
  - spec/dummy/public/404.html
322
304
  - spec/dummy/public/422.html
@@ -326,6 +308,7 @@ files:
326
308
  - spec/dummy/public/favicon.ico
327
309
  - spec/dummy/tmp/development_secret.txt
328
310
  - spec/integration/nulogy_message_bus_consumer/auditor_spec.rb
311
+ - spec/integration/nulogy_message_bus_consumer/end_to_end_spec.rb
329
312
  - spec/integration/nulogy_message_bus_consumer/kafka_utils_spec.rb
330
313
  - spec/integration/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb
331
314
  - spec/integration/nulogy_message_bus_consumer/steps/connect_to_message_bus_spec.rb
@@ -347,7 +330,7 @@ files:
347
330
  - spec/unit/nulogy_message_bus_consumer/steps/stream_messages_spec.rb
348
331
  - spec/unit/nulogy_message_bus_consumer/tasks/calculator_spec.rb
349
332
  - spec/unit/nulogy_message_bus_consumer_spec.rb
350
- homepage: https://github.com/nulogy/message-bus/tree/master/gems/nulogy_message_bus_consumer
333
+ homepage: https://github.com/nulogy/message-bus/tree/main/gems/nulogy_message_bus_consumer
351
334
  licenses: []
352
335
  metadata:
353
336
  allowed_push_host: https://rubygems.org/
@@ -366,7 +349,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
366
349
  - !ruby/object:Gem::Version
367
350
  version: '0'
368
351
  requirements: []
369
- rubygems_version: 3.2.15
352
+ rubygems_version: 3.2.22
370
353
  signing_key:
371
354
  specification_version: 4
372
355
  summary: Code for accessing the Nulogy Message Bus
@@ -391,7 +374,6 @@ test_files:
391
374
  - spec/dummy/config/application.rb
392
375
  - spec/dummy/config/boot.rb
393
376
  - spec/dummy/config/cable.yml
394
- - spec/dummy/config/credentials/message-bus-us-east-1.key
395
377
  - spec/dummy/config/credentials/message-bus-us-east-1.yml.enc
396
378
  - spec/dummy/config/database.yml
397
379
  - spec/dummy/config/environment.rb
@@ -415,8 +397,6 @@ test_files:
415
397
  - spec/dummy/config/storage.yml
416
398
  - spec/dummy/config.ru
417
399
  - spec/dummy/db/schema.rb
418
- - spec/dummy/log/development.log
419
- - spec/dummy/log/production.log
420
400
  - spec/dummy/log/test.log
421
401
  - spec/dummy/public/404.html
422
402
  - spec/dummy/public/422.html
@@ -426,6 +406,7 @@ test_files:
426
406
  - spec/dummy/public/favicon.ico
427
407
  - spec/dummy/tmp/development_secret.txt
428
408
  - spec/integration/nulogy_message_bus_consumer/auditor_spec.rb
409
+ - spec/integration/nulogy_message_bus_consumer/end_to_end_spec.rb
429
410
  - spec/integration/nulogy_message_bus_consumer/kafka_utils_spec.rb
430
411
  - spec/integration/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb
431
412
  - spec/integration/nulogy_message_bus_consumer/steps/connect_to_message_bus_spec.rb
@@ -1 +0,0 @@
1
- dfa19863b2709390893da4c2fb85579a
@@ -1 +0,0 @@
1
- dfa19863b2709390893da4c2fb85579a
@@ -1,4 +0,0 @@
1
- Connecting to the MessageBus
2
- Using consumer group id: test-jasons-partitions-1-audit
3
- Listening for kafka messages on topic test-jasons-partitions-1
4
- {"event":"message_processing_errored","class":"ActiveRecord::NoDatabaseError","message":"FATAL: database \"message_bus_consumer_development\" does not exist\n"}
@@ -1,18 +0,0 @@
1
- D, [2020-11-16T11:34:16.160590 #96117] DEBUG -- :  (2.3ms) SELECT pg_try_advisory_lock(5775182565276900090)
2
- D, [2020-11-16T11:34:16.267700 #96117] DEBUG -- :  (5.5ms) SELECT "schema_migrations"."version" FROM "schema_migrations" ORDER BY "schema_migrations"."version" ASC
3
- D, [2020-11-16T11:34:16.332985 #96117] DEBUG -- : ActiveRecord::InternalMetadata Load (5.3ms) SELECT "ar_internal_metadata".* FROM "ar_internal_metadata" WHERE "ar_internal_metadata"."key" = $1 LIMIT $2 [["key", "environment"], ["LIMIT", 1]]
4
- D, [2020-11-16T11:34:16.340173 #96117] DEBUG -- :  (1.3ms) BEGIN
5
- D, [2020-11-16T11:34:16.362450 #96117] DEBUG -- : ActiveRecord::InternalMetadata Update (22.0ms) UPDATE "ar_internal_metadata" SET "value" = $1, "updated_at" = $2 WHERE "ar_internal_metadata"."key" = $3 [["value", "production"], ["updated_at", "2020-11-16 16:34:16.338391"], ["key", "environment"]]
6
- D, [2020-11-16T11:34:16.367993 #96117] DEBUG -- :  (5.1ms) COMMIT
7
- D, [2020-11-16T11:34:16.370057 #96117] DEBUG -- :  (1.9ms) SELECT pg_advisory_unlock(5775182565276900090)
8
- D, [2020-11-16T11:40:32.607173 #98237] DEBUG -- :  (1.4ms) SELECT pg_try_advisory_lock(5775182565276900090)
9
- D, [2020-11-16T11:40:32.621593 #98237] DEBUG -- :  (2.6ms) SELECT "schema_migrations"."version" FROM "schema_migrations" ORDER BY "schema_migrations"."version" ASC
10
- I, [2020-11-16T11:40:32.622869 #98237] INFO -- : Migrating to CreateMessageBusProcessedMessages (20200509095105)
11
- D, [2020-11-16T11:40:32.631756 #98237] DEBUG -- :  (2.0ms) BEGIN
12
- D, [2020-11-16T11:40:32.639446 #98237] DEBUG -- :  (7.5ms) CREATE TABLE "message_bus_processed_messages" ("id" uuid NOT NULL PRIMARY KEY, "created_at" timestamp NOT NULL)
13
- D, [2020-11-16T11:40:32.640814 #98237] DEBUG -- :  (1.2ms) ROLLBACK
14
- D, [2020-11-16T11:40:32.642439 #98237] DEBUG -- :  (1.5ms) SELECT pg_advisory_unlock(5775182565276900090)
15
- D, [2020-11-16T11:41:09.889396 #98796] DEBUG -- :  (1.5ms) SELECT pg_try_advisory_lock(5775182565276900090)
16
- D, [2020-11-16T11:41:09.902585 #98796] DEBUG -- :  (1.8ms) SELECT "schema_migrations"."version" FROM "schema_migrations" ORDER BY "schema_migrations"."version" ASC
17
- D, [2020-11-16T11:41:09.912030 #98796] DEBUG -- : ActiveRecord::InternalMetadata Load (1.6ms) SELECT "ar_internal_metadata".* FROM "ar_internal_metadata" WHERE "ar_internal_metadata"."key" = $1 LIMIT $2 [["key", "environment"], ["LIMIT", 1]]
18
- D, [2020-11-16T11:41:09.919113 #98796] DEBUG -- :  (1.4ms) SELECT pg_advisory_unlock(5775182565276900090)