nulogy_message_bus_producer 2.0.0 → 3.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +188 -15
  3. data/Rakefile +6 -2
  4. data/db/migrate/20201005150212_rename_tenant_id_and_public.rb +6 -0
  5. data/lib/nulogy_message_bus_producer.rb +47 -19
  6. data/lib/nulogy_message_bus_producer/{base_public_subscription.rb → base_subscription.rb} +1 -1
  7. data/lib/nulogy_message_bus_producer/config.rb +6 -0
  8. data/lib/nulogy_message_bus_producer/repopulate_replication_slots.rb +25 -0
  9. data/lib/nulogy_message_bus_producer/subscriber_graphql_schema_validator.rb +1 -1
  10. data/lib/nulogy_message_bus_producer/{public_subscription.rb → subscription.rb} +1 -2
  11. data/lib/nulogy_message_bus_producer/{public_subscription_event.rb → subscription_event.rb} +1 -1
  12. data/lib/nulogy_message_bus_producer/subscriptions/no_variables.rb +43 -0
  13. data/lib/nulogy_message_bus_producer/subscriptions/postgres_transport.rb +85 -0
  14. data/lib/nulogy_message_bus_producer/subscriptions/risky_subscription_blocker.rb +70 -0
  15. data/lib/nulogy_message_bus_producer/version.rb +1 -1
  16. data/lib/tasks/engine/message_bus_producer.rake +11 -0
  17. data/spec/dummy/config/database.yml +1 -1
  18. data/spec/dummy/config/puma.rb +2 -2
  19. data/spec/dummy/db/migrate/20201005164116_create_active_storage_tables.active_storage.rb +5 -0
  20. data/spec/dummy/db/schema.rb +3 -5
  21. data/spec/dummy/log/development.log +510 -0
  22. data/spec/dummy/log/test.log +18126 -0
  23. data/spec/integration/lib/nulogy_message_bus_producer/repopulate_replication_slots_spec.rb +141 -0
  24. data/spec/integration/lib/nulogy_message_bus_producer/subscriber_graphql_schema_validator_spec.rb +49 -0
  25. data/spec/integration/lib/nulogy_message_bus_producer/subscription_spec.rb +61 -0
  26. data/spec/integration/lib/nulogy_message_bus_producer/subscriptions/no_variables_spec.rb +46 -0
  27. data/spec/integration/lib/nulogy_message_bus_producer/subscriptions/postgres_transport_spec.rb +135 -0
  28. data/spec/integration/lib/nulogy_message_bus_producer/subscriptions/risky_subscription_blocker_spec.rb +49 -0
  29. data/spec/integration_spec_helper.rb +5 -0
  30. data/spec/spec_helper.rb +0 -40
  31. data/spec/support/kafka.rb +105 -0
  32. data/spec/support/kafka_connect.rb +31 -0
  33. data/spec/support/spec_utils.rb +16 -0
  34. data/spec/support/sql_helpers.rb +45 -0
  35. data/spec/support/subscription_helpers.rb +52 -0
  36. data/spec/support/test_graphql_schema.rb +48 -0
  37. metadata +89 -38
  38. data/lib/nulogy_message_bus_producer/postgres_public_subscriptions.rb +0 -117
  39. data/spec/integration/lib/graphql_api/postgres_public_subscriptions_spec.rb +0 -122
  40. data/spec/integration/lib/graphql_api/validators/subscriber_graphql_schema_validator_spec.rb +0 -76
  41. data/spec/unit/lib/graphql_api/models/public_subscription_spec.rb +0 -66
  42. data/spec/unit_spec_helper.rb +0 -6
@@ -0,0 +1,141 @@
1
+ require "integration_spec_helper"
2
+ require "net/http"
3
+
4
+ RSpec.describe NulogyMessageBusProducer::RepopulateReplicationSlots do
5
+ let(:company_uuid) { SecureRandom.uuid }
6
+ let(:number_of_messages) { 100 }
7
+
8
+ let(:kafka_bootstrap_servers) { "host.docker.internal:39092" }
9
+ let(:kafka_connect) { KafkaConnect.new("http://localhost:8083", "ruby_specs") }
10
+ let(:replication_slot_name) { "rspec_replication_slot" }
11
+ let(:topic_name) { "repopulate_replication_slot_tests" }
12
+
13
+ before do
14
+ cleanup_everything
15
+ end
16
+
17
+ it "generates events" do
18
+ Kafka.create_topic(topic_name)
19
+ consumer = Kafka.setup_kafka_consumer(topic_name)
20
+
21
+ without_transaction do
22
+ subscribe_to(event_type: "testCreated", topic_name: topic_name)
23
+
24
+ number_of_messages.times { |n| create_event(uuid(n)) }
25
+
26
+ start_debezium
27
+
28
+ described_class.repopulate
29
+ end
30
+
31
+ message_payloads = Kafka.wait_for_messages(consumer).map(&:payload)
32
+ expect(message_payloads.count).to eq(number_of_messages)
33
+ matcher = Array.new(number_of_messages) { |n| include(uuid(n)) }
34
+ expect(message_payloads).to match(matcher)
35
+ end
36
+
37
+ def cleanup_everything
38
+ truncate_db
39
+ begin
40
+ Kafka.delete_topic(topic_name)
41
+ rescue StandardError
42
+ nil
43
+ end
44
+ kafka_connect.delete
45
+ wait_for_replication_slot_cleanup(replication_slot_name)
46
+ end
47
+
48
+ def create_event(entity_uuid)
49
+ root_object = {
50
+ company_uuid: company_uuid,
51
+ foo: {
52
+ id: entity_uuid
53
+ }
54
+ }
55
+ trigger_event("testCreated", root_object)
56
+ end
57
+
58
+ def start_debezium # rubocop:disable Metrics/AbcSize
59
+ config = build_debezium_config
60
+
61
+ response = kafka_connect.configure(config)
62
+
63
+ expect(response.code).to eq("201"), <<~MESSAGE
64
+ Creating a Debezium config in Kafka Connect has failed. HTTP Request returned:
65
+ Code: #{response.code}
66
+ #{JSON.parse(response.body).pretty_inspect}
67
+ #{config.pretty_inspect}
68
+ MESSAGE
69
+ wait_for_tasks_to_start(kafka_connect)
70
+ wait_for_replication_slot(replication_slot_name)
71
+ end
72
+
73
+ def wait_for_tasks_to_start(kafka_connect)
74
+ SpecUtils.wait_for(attempts: 10, interval: 0.5) do
75
+ tasks = kafka_connect.status[:tasks]
76
+ next false if tasks.blank?
77
+
78
+ expect(tasks.all? { |task| task[:state] == "RUNNING" }).to eq(true), <<~MESSAGE
79
+ Expected the Kafka Connect tasks to be running. Instead found:
80
+ #{tasks.pretty_inspect}
81
+ MESSAGE
82
+ end
83
+ end
84
+
85
+ def build_debezium_config # rubocop:disable Metrics/MethodLength
86
+ db_config = Rails.configuration.database_configuration[Rails.env]
87
+ events_table = NulogyMessageBusProducer::SubscriptionEvent.table_name
88
+
89
+ {
90
+ "bootstrap.servers": kafka_bootstrap_servers,
91
+
92
+ "database.dbname": db_config["database"],
93
+ "database.hostname": db_config["host"] == "localhost" ? "host.docker.internal" : db_config["host"],
94
+ "database.password": db_config["password"],
95
+ "database.port": db_config["port"] || 5432,
96
+ "database.server.name": "test-environment",
97
+ "database.user": db_config["username"],
98
+ "slot.name": replication_slot_name,
99
+
100
+ "behavior.on.null.values": "delete",
101
+ "connector.class": "io.debezium.connector.postgresql.PostgresConnector",
102
+ "database.initial.statements": <<~SQL,
103
+ DO $$BEGIN
104
+ IF NOT EXISTS(SELECT FROM pg_publication WHERE pubname = 'debezium_public_events')
105
+ THEN CREATE PUBLICATION debezium_public_events FOR TABLE #{events_table} WITH (publish = 'insert');; END IF;; END$$;
106
+ SQL
107
+ "errors.log.enable": "true",
108
+ "errors.log.include.messages": "true",
109
+ "heartbeat.interval.ms": "30000",
110
+ "plugin.name": "pgoutput",
111
+ "publication.name": "debezium_public_events",
112
+ "slot.drop.on.stop": "true",
113
+ "snapshot.mode": "never",
114
+ "table.whitelist": "public.#{events_table}",
115
+
116
+ "transforms": "requireTopicName,unwrap,extractTopicName,extractPartitionKey,removeFields",
117
+
118
+ "transforms.requireTopicName.type": "io.confluent.connect.transforms.Filter$Value",
119
+ "transforms.requireTopicName.filter.condition": "$.after.topic_name",
120
+ "transforms.requireTopicName.filter.type": "include",
121
+ "transforms.requireTopicName.missing.or.null.behavior": "exclude",
122
+
123
+ "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
124
+ "transforms.unwrap.drop.tombstones": "true",
125
+
126
+ "transforms.extractTopicName.type": "io.confluent.connect.transforms.ExtractTopic$Value",
127
+ "transforms.extractTopicName.field": "topic_name",
128
+
129
+ "transforms.extractPartitionKey.type": "org.apache.kafka.connect.transforms.ValueToKey",
130
+ "transforms.extractPartitionKey.fields": "partition_key",
131
+
132
+ "transforms.removeFields.type": "org.apache.kafka.connect.transforms.ReplaceField$Value",
133
+ "transforms.removeFields.blacklist": "topic_name,partition_key",
134
+
135
+ "key.converter": "org.apache.kafka.connect.json.JsonConverter",
136
+ "key.converter.schemas.enable": "false",
137
+ "value.converter": "org.apache.kafka.connect.json.JsonConverter",
138
+ "value.converter.schemas.enable": "false"
139
+ }
140
+ end
141
+ end
@@ -0,0 +1,49 @@
1
+ require "integration_spec_helper"
2
+
3
+ RSpec.describe NulogyMessageBusProducer::SubscriberGraphqlSchemaValidator do
4
+ subject(:validator) { described_class.new }
5
+
6
+ describe "#validate" do
7
+ context "when a valid query is present" do
8
+ it "return true" do
9
+ subscribe_to(query: <<~GRAPHQL)
10
+ foo {
11
+ id
12
+ }
13
+ GRAPHQL
14
+
15
+ expect(validator.validate).to be(true)
16
+ end
17
+ end
18
+
19
+ context "when an invalid query is present" do
20
+ let(:subscription_with_error) do
21
+ subscription = subscribe_to(query: <<~GRAPHQL)
22
+ foo {
23
+ id
24
+ }
25
+ GRAPHQL
26
+
27
+ subscription.query.gsub!(/\bid\b/, "a_field_that_does_not_exist")
28
+ subscription.save(validate: false)
29
+ subscription
30
+ end
31
+
32
+ it "returns false" do
33
+ subscription_with_error
34
+
35
+ expect(validator.validate).to be(false)
36
+ end
37
+
38
+ it "has errors" do
39
+ subscription_with_error
40
+
41
+ validator.validate
42
+
43
+ expect(validator.errors).to contain_exactly(
44
+ "Field 'a_field_that_does_not_exist' doesn't exist on type 'testObject' (id: #{subscription_with_error.id})"
45
+ )
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,61 @@
1
+ require "integration_spec_helper"
2
+
3
+ RSpec.describe NulogyMessageBusProducer::Subscription do
4
+ context "when validating" do
5
+ it "is invalid with a blank query" do
6
+ model = build_subscription(query: "")
7
+
8
+ model.validate
9
+
10
+ expect(model.errors[:query]).to contain_exactly("can't be blank")
11
+ end
12
+
13
+ it "is invalid with blank schema_key" do
14
+ model = build_subscription(schema_key: "")
15
+
16
+ model.validate
17
+
18
+ expect(model.errors[:schema_key]).to contain_exactly("can't be blank")
19
+ end
20
+
21
+ it "is invalid with an invalid schema_key" do
22
+ model = build_subscription(schema_key: "invalid")
23
+
24
+ model.validate
25
+
26
+ expect(model.errors[:query]).to contain_exactly(/Could not find a schema for schema_key 'invalid'/)
27
+ end
28
+
29
+ it "is invalid with an invalid query" do
30
+ model = build_subscription(
31
+ query: subscription_query(query: "foo { a_field_that_does_not_exist }")
32
+ )
33
+
34
+ model.validate
35
+
36
+ expect(model).not_to be_valid
37
+ expect(model.errors[:query]).to contain_exactly(
38
+ "Field 'a_field_that_does_not_exist' doesn't exist on type 'testObject' (id: <new_record>)"
39
+ )
40
+ end
41
+
42
+ it "valid with a valid query" do
43
+ model = build_subscription(
44
+ query: subscription_query(query: "foo { id }")
45
+ )
46
+
47
+ model.validate
48
+
49
+ expect(model.errors).not_to include(:query)
50
+ end
51
+ end
52
+
53
+ def build_subscription(overrides = {})
54
+ attrs = {
55
+ schema_key: "test",
56
+ query: subscription_query
57
+ }.merge(overrides)
58
+
59
+ NulogyMessageBusProducer::Subscription.new(attrs)
60
+ end
61
+ end
@@ -0,0 +1,46 @@
1
+ require "integration_spec_helper"
2
+
3
+ RSpec.describe NulogyMessageBusProducer::Subscriptions::NoVariables do
4
+ it "blocks subscriptions with variables" do
5
+ query = <<~SUBSCRIPTION
6
+ subscription($sub_id: ID!, $sg_id: ID!, $topic: String!) {
7
+ testCreated(subscriptionId: $sub_id, subscriptionGroupId: $sg_id, topicName: $topic) {
8
+ foo { id }
9
+ }
10
+ }
11
+ SUBSCRIPTION
12
+
13
+ result = execute_graphql(
14
+ query,
15
+ NulogyMessageBusProducer::Specs::TestSchema,
16
+ variables: {
17
+ sub_id: SecureRandom.uuid,
18
+ sg_id: SecureRandom.uuid,
19
+ topic: "test_topic"
20
+ }
21
+ )
22
+
23
+ expect(result).to include_json(
24
+ errors: [{ message: include("Subscriptions should not be created with arguments") }]
25
+ )
26
+ end
27
+
28
+ it "does not block subscriptions without variables" do
29
+ query = <<~SUBSCRIPTION
30
+ subscription {
31
+ testCreated(
32
+ subscriptionId: "#{SecureRandom.uuid}",
33
+ subscriptionGroupId: "#{SecureRandom.uuid}",
34
+ topicName: "test_topic"
35
+ ) { foo { id } }
36
+ }
37
+ SUBSCRIPTION
38
+
39
+ result = execute_graphql(
40
+ query,
41
+ NulogyMessageBusProducer::Specs::TestSchema
42
+ )
43
+
44
+ expect(result).not_to include(:errors)
45
+ end
46
+ end
@@ -0,0 +1,135 @@
1
+ require "integration_spec_helper"
2
+
3
+ RSpec.describe NulogyMessageBusProducer::Subscriptions::PostgresTransport do
4
+ context "when subscription is triggered" do
5
+ let(:company_uuid) { SecureRandom.uuid }
6
+
7
+ it "generates an event for a subscription" do
8
+ subscription = subscribe_to(
9
+ event_type: "testCreated",
10
+ topic_name: "some_topic"
11
+ )
12
+ root_object = {
13
+ foo: { id: "some id" },
14
+ company_uuid: company_uuid
15
+ }
16
+
17
+ trigger_event("testCreated", root_object)
18
+
19
+ event = NulogyMessageBusProducer::SubscriptionEvent.find_by!(subscription_id: subscription.id)
20
+ expect(event).to have_attributes(
21
+ partition_key: "#{subscription.subscription_group_id},#{company_uuid}",
22
+ topic_name: "some_topic",
23
+ event_json: include_json(testCreated: { foo: { id: "some id" } }),
24
+ company_uuid: company_uuid
25
+ )
26
+ end
27
+
28
+ it "allows configuring context" do
29
+ NulogyMessageBusProducer.config.context_for_subscription = lambda do |_|
30
+ { context_data: "some contextual information" }
31
+ end
32
+ subscription = subscribe_to(
33
+ event_type: "testCreated",
34
+ query: <<~GRAPHQL
35
+ foo {
36
+ contextData
37
+ }
38
+ GRAPHQL
39
+ )
40
+ root_object = {
41
+ foo: {},
42
+ company_uuid: company_uuid
43
+ }
44
+
45
+ trigger_event("testCreated", root_object)
46
+
47
+ event = NulogyMessageBusProducer::SubscriptionEvent.find_by!(subscription_id: subscription.id)
48
+ expect(event).to have_attributes(
49
+ event_json: include_json(testCreated: { foo: { contextData: "some contextual information" } })
50
+ )
51
+ end
52
+
53
+ it "generates one event per subscription" do
54
+ subscribe_to(event_type: "testCreated")
55
+ subscribe_to(event_type: "testCreated")
56
+
57
+ expect do
58
+ root_object = {
59
+ foo: { id: "some id" },
60
+ company_uuid: company_uuid
61
+ }
62
+ trigger_event("testCreated", root_object)
63
+ end.to change(NulogyMessageBusProducer::SubscriptionEvent, :count).by(2)
64
+
65
+ event_data = NulogyMessageBusProducer::SubscriptionEvent.pluck(:event_json)
66
+ expect(event_data).to all(
67
+ include_json(testCreated: { foo: { id: "some id" } })
68
+ )
69
+ end
70
+ end
71
+
72
+ context "when the class is not registered" do
73
+ it "raises" do
74
+ expect do
75
+ Class.new(GraphQL::Schema) do
76
+ use(NulogyMessageBusProducer::Subscriptions::PostgresTransport)
77
+ end
78
+ end.to raise_error(KeyError, /The schema registry did not contain an entry/)
79
+ end
80
+ end
81
+
82
+ context "when a subscription error occurs" do
83
+ context "when failing with raise" do
84
+ before do
85
+ NulogyMessageBusProducer.config.producing_events_fails_with(:raise)
86
+ end
87
+
88
+ it "raises an exception" do
89
+ expect { trigger_erroneous_subscription }.to raise_error(/A subscription event could not be produced/)
90
+ end
91
+ end
92
+
93
+ context "when failing with a soft fail" do
94
+ before do
95
+ NulogyMessageBusProducer.config.producing_events_fails_with(:soft_fail)
96
+ end
97
+
98
+ it "does not raise an error" do
99
+ trigger_erroneous_subscription
100
+ end
101
+
102
+ it "calls the provided block" do
103
+ called = false
104
+
105
+ NulogyMessageBusProducer.config.producing_events_fails_with(:soft_fail) do |_|
106
+ called = true
107
+ end
108
+
109
+ trigger_erroneous_subscription
110
+
111
+ expect(called).to be(true)
112
+ end
113
+ end
114
+ end
115
+
116
+ def trigger_erroneous_subscription
117
+ event_type = "testCreated"
118
+
119
+ subscription = subscribe_to(event_type: event_type)
120
+ simulate_invalid_query(subscription)
121
+
122
+ trigger_event(event_type, uuid: SecureRandom.uuid, company_uuid: SecureRandom.uuid)
123
+ end
124
+
125
+ def simulate_invalid_query(subscription)
126
+ subscription.query = <<~GRAPHQL
127
+ query ($id: UUID!) {
128
+ foo (id: $id) {
129
+ a_field_that_does_not_exist
130
+ }
131
+ }
132
+ GRAPHQL
133
+ subscription.save(validate: false)
134
+ end
135
+ end
@@ -0,0 +1,49 @@
1
+ require "integration_spec_helper"
2
+
3
+ RSpec.describe NulogyMessageBusProducer::Subscriptions::RiskySubscriptionBlocker do
4
+ it "blocks subscriptions with arguments" do
5
+ query = <<~GRAPHQL
6
+ foo {
7
+ fieldWithArguments(first: "value")
8
+ }
9
+ GRAPHQL
10
+
11
+ result = attempt_subscription(query)
12
+
13
+ expect(result).to include_json(
14
+ errors: [{
15
+ message: "Arguments may not be used:\nfieldWithArguments"
16
+ }]
17
+ )
18
+ end
19
+
20
+ it "blocks subscriptions which would expand lists" do
21
+ query = <<~GRAPHQL
22
+ fooList {
23
+ id
24
+ }
25
+ GRAPHQL
26
+
27
+ result = attempt_subscription(query)
28
+
29
+ expect(result).to include_json(
30
+ errors: [{
31
+ message: "Lists may not be queried:\nfooList"
32
+ }]
33
+ )
34
+ end
35
+
36
+ def attempt_subscription(query)
37
+ execute_graphql(<<~GRAPHQL, NulogyMessageBusProducer::Specs::TestSchema)
38
+ subscription {
39
+ testCreated (
40
+ subscriptionId: "#{SecureRandom.uuid}",
41
+ subscriptionGroupId: "#{SecureRandom.uuid}",
42
+ topicName: "some_topic"
43
+ ) {
44
+ #{query}
45
+ }
46
+ }
47
+ GRAPHQL
48
+ end
49
+ end