nulogy_message_bus_producer 1.0.4 → 3.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +183 -15
- data/Rakefile +6 -2
- data/db/migrate/20201005150212_rename_tenant_id_and_public.rb +6 -0
- data/lib/nulogy_message_bus_producer.rb +61 -25
- data/lib/nulogy_message_bus_producer/{base_public_subscription.rb → base_subscription.rb} +1 -1
- data/lib/nulogy_message_bus_producer/config.rb +72 -0
- data/lib/nulogy_message_bus_producer/repopulate_replication_slots.rb +23 -0
- data/lib/nulogy_message_bus_producer/subscriber_graphql_schema_validator.rb +1 -1
- data/lib/nulogy_message_bus_producer/{public_subscription.rb → subscription.rb} +4 -3
- data/lib/nulogy_message_bus_producer/{public_subscription_event.rb → subscription_event.rb} +1 -1
- data/lib/nulogy_message_bus_producer/subscriptions/postgres_transport.rb +85 -0
- data/lib/nulogy_message_bus_producer/subscriptions/risky_subscription_blocker.rb +58 -0
- data/lib/nulogy_message_bus_producer/version.rb +1 -1
- data/lib/tasks/engine/message_bus_producer.rake +11 -0
- data/spec/dummy/config/database.yml +1 -1
- data/spec/dummy/db/migrate/20201005164116_create_active_storage_tables.active_storage.rb +5 -0
- data/spec/dummy/db/schema.rb +3 -5
- data/spec/dummy/log/development.log +2217 -31
- data/spec/dummy/log/test.log +27556 -16
- data/spec/integration/lib/nulogy_message_bus_producer/repopulate_replication_slots_spec.rb +133 -0
- data/spec/integration/lib/nulogy_message_bus_producer/subscriber_graphql_schema_validator_spec.rb +49 -0
- data/spec/integration/lib/nulogy_message_bus_producer/subscription_spec.rb +63 -0
- data/spec/integration/lib/nulogy_message_bus_producer/subscriptions/postgres_transport_spec.rb +137 -0
- data/spec/integration/lib/nulogy_message_bus_producer/subscriptions/risky_subscription_blocker_spec.rb +51 -0
- data/spec/integration_spec_helper.rb +6 -0
- data/spec/spec_helper.rb +0 -25
- data/spec/support/kafka.rb +98 -0
- data/spec/support/kafka_connect.rb +31 -0
- data/spec/support/spec_utils.rb +15 -0
- data/spec/support/sql_helpers.rb +47 -0
- data/spec/support/subscription_helpers.rb +52 -0
- data/spec/support/test_graphql_schema.rb +47 -0
- metadata +88 -39
- data/lib/nulogy_message_bus_producer/postgres_public_subscriptions.rb +0 -102
- data/spec/integration/lib/graphql_api/postgres_public_subscriptions_spec.rb +0 -16
- data/spec/integration/lib/graphql_api/validators/subscriber_graphql_schema_validator_spec.rb +0 -76
- data/spec/unit/lib/graphql_api/models/public_subscription_spec.rb +0 -56
- data/spec/unit_spec_helper.rb +0 -6
@@ -0,0 +1,133 @@
|
|
1
|
+
require "integration_spec_helper"
|
2
|
+
require 'net/http'
|
3
|
+
|
4
|
+
RSpec.describe NulogyMessageBusProducer::RepopulateReplicationSlots do
|
5
|
+
let(:company_uuid) { SecureRandom.uuid }
|
6
|
+
let(:number_of_messages) { 100 }
|
7
|
+
|
8
|
+
let(:kafka_bootstrap_servers) { "host.docker.internal:39092" }
|
9
|
+
let(:kafka_connect) { KafkaConnect.new("http://localhost:8083", "ruby_specs") }
|
10
|
+
let(:replication_slot_name) { "rspec_replication_slot" }
|
11
|
+
let(:topic_name) { "repopulate_replication_slot_tests" }
|
12
|
+
|
13
|
+
before do
|
14
|
+
cleanup_everything
|
15
|
+
end
|
16
|
+
|
17
|
+
it "generates events" do
|
18
|
+
Kafka.create_topic(topic_name)
|
19
|
+
consumer = Kafka.setup_kafka_consumer(topic_name)
|
20
|
+
|
21
|
+
without_transaction do
|
22
|
+
subscribe_to(event_type: "testCreated", topic_name: topic_name)
|
23
|
+
|
24
|
+
number_of_messages.times { |n| create_event(uuid(n)) }
|
25
|
+
|
26
|
+
configure_debezium
|
27
|
+
|
28
|
+
NulogyMessageBusProducer::RepopulateReplicationSlots.repopulate
|
29
|
+
end
|
30
|
+
|
31
|
+
message_payloads = Kafka.wait_for_messages(consumer).map(&:payload)
|
32
|
+
matcher = number_of_messages.times.map { |n| include(uuid(n)) }
|
33
|
+
expect(message_payloads.count).to eq(number_of_messages)
|
34
|
+
expect(message_payloads).to match(matcher)
|
35
|
+
end
|
36
|
+
|
37
|
+
def cleanup_everything
|
38
|
+
truncate_db
|
39
|
+
Kafka.delete_topic(topic_name) rescue nil
|
40
|
+
kafka_connect.delete
|
41
|
+
wait_for_replication_slot_cleanup(replication_slot_name)
|
42
|
+
end
|
43
|
+
|
44
|
+
def create_event(entity_uuid)
|
45
|
+
root_object = {
|
46
|
+
company_uuid: company_uuid,
|
47
|
+
foo: {
|
48
|
+
id: entity_uuid
|
49
|
+
}
|
50
|
+
}
|
51
|
+
trigger_event("testCreated", root_object)
|
52
|
+
end
|
53
|
+
|
54
|
+
def configure_debezium
|
55
|
+
config = build_debezium_config
|
56
|
+
|
57
|
+
response = kafka_connect.configure(config)
|
58
|
+
|
59
|
+
expect(response.code).to eq("201"), <<~MESSAGE
|
60
|
+
Creating a Debezium config in Kafka Connect has failed. HTTP Request returned:
|
61
|
+
Code: #{response.code}
|
62
|
+
#{JSON.parse(response.body).pretty_inspect}
|
63
|
+
#{config.pretty_inspect}
|
64
|
+
MESSAGE
|
65
|
+
wait_for_tasks_to_start(kafka_connect)
|
66
|
+
wait_for_replication_slot(replication_slot_name)
|
67
|
+
end
|
68
|
+
|
69
|
+
def wait_for_tasks_to_start(kafka_connect)
|
70
|
+
SpecUtils.wait_for(attempts: 10, interval: 0.5) do
|
71
|
+
tasks = kafka_connect.status[:tasks]
|
72
|
+
next false if tasks.blank?
|
73
|
+
|
74
|
+
expect(tasks.all? { |task| task[:state] == "RUNNING" }).to eq(true), <<~MESSAGE
|
75
|
+
Expected the Kafka Connect tasks to be running. Instead found:
|
76
|
+
#{tasks.pretty_inspect}
|
77
|
+
MESSAGE
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
def build_debezium_config
|
82
|
+
db_config = Rails.configuration.database_configuration[Rails.env]
|
83
|
+
events_table = NulogyMessageBusProducer::SubscriptionEvent.table_name
|
84
|
+
|
85
|
+
{
|
86
|
+
"bootstrap.servers": kafka_bootstrap_servers,
|
87
|
+
|
88
|
+
"database.dbname": db_config["database"],
|
89
|
+
"database.hostname": db_config["host"] == "localhost" ? "host.docker.internal" : db_config["host"],
|
90
|
+
"database.password": db_config["password"],
|
91
|
+
"database.port": db_config["port"] || 5432,
|
92
|
+
"database.server.name": "test-environment",
|
93
|
+
"database.user": db_config["username"],
|
94
|
+
"slot.name": replication_slot_name,
|
95
|
+
|
96
|
+
"behavior.on.null.values": "delete",
|
97
|
+
"connector.class": "io.debezium.connector.postgresql.PostgresConnector",
|
98
|
+
"database.initial.statements": "DO $$BEGIN IF not exists(select from pg_publication where pubname = 'debezium_public_events') THEN CREATE PUBLICATION debezium_public_events FOR TABLE #{events_table} WITH (publish = 'insert');; END IF;; END$$;",
|
99
|
+
"errors.log.enable": "true",
|
100
|
+
"errors.log.include.messages": "true",
|
101
|
+
"heartbeat.interval.ms": "30000",
|
102
|
+
"plugin.name": "pgoutput",
|
103
|
+
"publication.name": "debezium_public_events",
|
104
|
+
"slot.drop.on.stop": "true",
|
105
|
+
"snapshot.mode": "never",
|
106
|
+
"table.whitelist": "public.#{events_table}",
|
107
|
+
|
108
|
+
"transforms": "requireTopicName,unwrap,extractTopicName,extractPartitionKey,removeFields",
|
109
|
+
|
110
|
+
"transforms.requireTopicName.type": "io.confluent.connect.transforms.Filter$Value",
|
111
|
+
"transforms.requireTopicName.filter.condition": "$.after.topic_name",
|
112
|
+
"transforms.requireTopicName.filter.type": "include",
|
113
|
+
"transforms.requireTopicName.missing.or.null.behavior": "exclude",
|
114
|
+
|
115
|
+
"transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
|
116
|
+
"transforms.unwrap.drop.tombstones": "true",
|
117
|
+
|
118
|
+
"transforms.extractTopicName.type": "io.confluent.connect.transforms.ExtractTopic$Value",
|
119
|
+
"transforms.extractTopicName.field": "topic_name",
|
120
|
+
|
121
|
+
"transforms.extractPartitionKey.type": "org.apache.kafka.connect.transforms.ValueToKey",
|
122
|
+
"transforms.extractPartitionKey.fields": "partition_key",
|
123
|
+
|
124
|
+
"transforms.removeFields.type": "org.apache.kafka.connect.transforms.ReplaceField$Value",
|
125
|
+
"transforms.removeFields.blacklist": "topic_name,partition_key",
|
126
|
+
|
127
|
+
"key.converter": "org.apache.kafka.connect.json.JsonConverter",
|
128
|
+
"key.converter.schemas.enable": "false",
|
129
|
+
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
|
130
|
+
"value.converter.schemas.enable": "false"
|
131
|
+
}
|
132
|
+
end
|
133
|
+
end
|
data/spec/integration/lib/nulogy_message_bus_producer/subscriber_graphql_schema_validator_spec.rb
ADDED
@@ -0,0 +1,49 @@
|
|
1
|
+
require "integration_spec_helper"
|
2
|
+
|
3
|
+
RSpec.describe NulogyMessageBusProducer::SubscriberGraphqlSchemaValidator do
|
4
|
+
subject(:validator) { NulogyMessageBusProducer::SubscriberGraphqlSchemaValidator.new }
|
5
|
+
|
6
|
+
describe "#validate" do
|
7
|
+
context "when a valid query is present" do
|
8
|
+
it "return true" do
|
9
|
+
subscribe_to(query: <<~GRAPHQL)
|
10
|
+
foo {
|
11
|
+
id
|
12
|
+
}
|
13
|
+
GRAPHQL
|
14
|
+
|
15
|
+
expect(validator.validate).to be(true)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
context "when an invalid query is present" do
|
20
|
+
let(:subscription_with_error) do
|
21
|
+
subscription = subscribe_to(query: <<~GRAPHQL)
|
22
|
+
foo {
|
23
|
+
id
|
24
|
+
}
|
25
|
+
GRAPHQL
|
26
|
+
|
27
|
+
subscription.query.gsub!(/\bid\b/, 'a_field_that_does_not_exist')
|
28
|
+
subscription.save(validate: false)
|
29
|
+
subscription
|
30
|
+
end
|
31
|
+
|
32
|
+
it "returns false" do
|
33
|
+
subscription_with_error
|
34
|
+
|
35
|
+
expect(validator.validate).to be(false)
|
36
|
+
end
|
37
|
+
|
38
|
+
it "has errors" do
|
39
|
+
subscription_with_error
|
40
|
+
|
41
|
+
validator.validate
|
42
|
+
|
43
|
+
expect(validator.errors).to contain_exactly(
|
44
|
+
"Field 'a_field_that_does_not_exist' doesn't exist on type 'testObject' (id: #{subscription_with_error.id})"
|
45
|
+
)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
require "integration_spec_helper"
|
2
|
+
|
3
|
+
RSpec.describe NulogyMessageBusProducer::Subscription do
|
4
|
+
context "when validating" do
|
5
|
+
it "is invalid with a blank query" do
|
6
|
+
model = build_subscription(query: "")
|
7
|
+
|
8
|
+
model.validate
|
9
|
+
|
10
|
+
expect(model.errors[:query]).to contain_exactly("can't be blank")
|
11
|
+
end
|
12
|
+
|
13
|
+
it "is invalid with blank schema_key" do
|
14
|
+
model = build_subscription(schema_key: "")
|
15
|
+
|
16
|
+
model.validate
|
17
|
+
|
18
|
+
expect(model.errors[:schema_key]).to contain_exactly("can't be blank")
|
19
|
+
end
|
20
|
+
|
21
|
+
it "is invalid with an invalid schema_key" do
|
22
|
+
model = build_subscription(schema_key: "invalid")
|
23
|
+
|
24
|
+
model.validate
|
25
|
+
|
26
|
+
expect(model.errors[:query]).to contain_exactly(/Could not find a schema for schema_key 'invalid'/)
|
27
|
+
end
|
28
|
+
|
29
|
+
it "is invalid with an invalid query" do
|
30
|
+
model = build_subscription(
|
31
|
+
schema_key: "test",
|
32
|
+
query: subscription_query(query: " foo { a_field_that_does_not_exist }")
|
33
|
+
)
|
34
|
+
|
35
|
+
model.validate
|
36
|
+
|
37
|
+
expect(model).to_not be_valid
|
38
|
+
expect(model.errors[:query]).to contain_exactly(
|
39
|
+
"Field 'a_field_that_does_not_exist' doesn't exist on type 'testObject' (id: <new_record>)"
|
40
|
+
)
|
41
|
+
end
|
42
|
+
|
43
|
+
it "valid with a valid query" do
|
44
|
+
model = build_subscription(
|
45
|
+
schema_key: "test",
|
46
|
+
query: subscription_query(query: "foo { id }")
|
47
|
+
)
|
48
|
+
|
49
|
+
model.validate
|
50
|
+
|
51
|
+
expect(model.errors).to_not include(:query)
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
def build_subscription(overrides = {})
|
56
|
+
attrs = {
|
57
|
+
schema_key: "test",
|
58
|
+
query: subscription_query
|
59
|
+
}.merge(overrides)
|
60
|
+
|
61
|
+
NulogyMessageBusProducer::Subscription.new(attrs)
|
62
|
+
end
|
63
|
+
end
|
data/spec/integration/lib/nulogy_message_bus_producer/subscriptions/postgres_transport_spec.rb
ADDED
@@ -0,0 +1,137 @@
|
|
1
|
+
require "integration_spec_helper"
|
2
|
+
|
3
|
+
RSpec.describe NulogyMessageBusProducer::Subscriptions::PostgresTransport do
|
4
|
+
context "when subscription is triggered" do
|
5
|
+
let(:company_uuid) { SecureRandom.uuid }
|
6
|
+
|
7
|
+
it "generates an event for a subscription" do
|
8
|
+
subscription = subscribe_to(
|
9
|
+
event_type: "testCreated",
|
10
|
+
topic_name: "some_topic"
|
11
|
+
)
|
12
|
+
root_object = {
|
13
|
+
foo: { id: "some id" },
|
14
|
+
company_uuid: company_uuid
|
15
|
+
}
|
16
|
+
|
17
|
+
trigger_event("testCreated", root_object)
|
18
|
+
|
19
|
+
event = NulogyMessageBusProducer::SubscriptionEvent.find_by!(subscription_id: subscription.id)
|
20
|
+
expect(event).to have_attributes(
|
21
|
+
partition_key: "#{subscription.subscription_group_id},#{company_uuid}",
|
22
|
+
topic_name: "some_topic",
|
23
|
+
event_json: include_json(testCreated: { foo: { id: "some id" } }),
|
24
|
+
company_uuid: company_uuid
|
25
|
+
)
|
26
|
+
end
|
27
|
+
|
28
|
+
it "allows configuring context" do
|
29
|
+
NulogyMessageBusProducer.config.context_for_subscription = lambda do |_|
|
30
|
+
{ context_data: "some contextual information" }
|
31
|
+
end
|
32
|
+
subscription = subscribe_to(
|
33
|
+
event_type: "testCreated",
|
34
|
+
query: <<~GRAPHQL
|
35
|
+
foo {
|
36
|
+
contextData
|
37
|
+
}
|
38
|
+
GRAPHQL
|
39
|
+
)
|
40
|
+
root_object = {
|
41
|
+
foo: {},
|
42
|
+
company_uuid: company_uuid
|
43
|
+
}
|
44
|
+
|
45
|
+
trigger_event("testCreated", root_object)
|
46
|
+
|
47
|
+
event = NulogyMessageBusProducer::SubscriptionEvent.find_by!(subscription_id: subscription.id)
|
48
|
+
expect(event).to have_attributes(
|
49
|
+
event_json: include_json(
|
50
|
+
testCreated: { foo: { contextData: "some contextual information" } }
|
51
|
+
)
|
52
|
+
)
|
53
|
+
end
|
54
|
+
|
55
|
+
it "generates one event per subscription" do
|
56
|
+
subscribe_to(event_type: "testCreated")
|
57
|
+
subscribe_to(event_type: "testCreated")
|
58
|
+
|
59
|
+
expect do
|
60
|
+
root_object = {
|
61
|
+
foo: { id: "some id" },
|
62
|
+
company_uuid: company_uuid
|
63
|
+
}
|
64
|
+
trigger_event("testCreated", root_object)
|
65
|
+
end.to change(NulogyMessageBusProducer::SubscriptionEvent, :count).by(2)
|
66
|
+
|
67
|
+
event_data = NulogyMessageBusProducer::SubscriptionEvent.pluck(:event_json)
|
68
|
+
expect(event_data).to all(
|
69
|
+
include_json(testCreated: { foo: { id: "some id" } })
|
70
|
+
)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
context "when the class is not registered" do
|
75
|
+
it "raises" do
|
76
|
+
expect do
|
77
|
+
Class.new(GraphQL::Schema) do
|
78
|
+
use(NulogyMessageBusProducer::Subscriptions::PostgresTransport)
|
79
|
+
end
|
80
|
+
end.to raise_error(KeyError, /The schema registry did not contain an entry/)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
context "when a subscription error occurs" do
|
85
|
+
context "when failing with raise" do
|
86
|
+
before do
|
87
|
+
NulogyMessageBusProducer.config.producing_events_fails_with(:raise)
|
88
|
+
end
|
89
|
+
|
90
|
+
it "raises an exception" do
|
91
|
+
expect { trigger_erroneous_subscription }.to raise_error(/A subscription event could not be produced/)
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
context "when failing with a soft fail" do
|
96
|
+
before do
|
97
|
+
NulogyMessageBusProducer.config.producing_events_fails_with(:soft_fail)
|
98
|
+
end
|
99
|
+
|
100
|
+
it "does not raise an error" do
|
101
|
+
trigger_erroneous_subscription
|
102
|
+
end
|
103
|
+
|
104
|
+
it "calls the provided block" do
|
105
|
+
called = false
|
106
|
+
|
107
|
+
NulogyMessageBusProducer.config.producing_events_fails_with(:soft_fail) do |_|
|
108
|
+
called = true
|
109
|
+
end
|
110
|
+
|
111
|
+
trigger_erroneous_subscription
|
112
|
+
|
113
|
+
expect(called).to be(true)
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
def trigger_erroneous_subscription
|
119
|
+
event_type = "testCreated"
|
120
|
+
|
121
|
+
subscription = subscribe_to(event_type: event_type)
|
122
|
+
simulate_invalid_query(subscription)
|
123
|
+
|
124
|
+
trigger_event(event_type, uuid: SecureRandom.uuid, company_uuid: SecureRandom.uuid)
|
125
|
+
end
|
126
|
+
|
127
|
+
def simulate_invalid_query(subscription)
|
128
|
+
subscription.query = <<~GRAPHQL
|
129
|
+
query ($id: UUID!) {
|
130
|
+
foo (id: $id) {
|
131
|
+
a_field_that_does_not_exist
|
132
|
+
}
|
133
|
+
}
|
134
|
+
GRAPHQL
|
135
|
+
subscription.save(validate: false)
|
136
|
+
end
|
137
|
+
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
require "integration_spec_helper"
|
2
|
+
|
3
|
+
RSpec.describe NulogyMessageBusProducer::Subscriptions::RiskySubscriptionBlocker do
|
4
|
+
it "blocks subscriptions with arguments" do
|
5
|
+
query = <<~GRAPHQL
|
6
|
+
foo {
|
7
|
+
fieldWithArguments(first: "value")
|
8
|
+
}
|
9
|
+
GRAPHQL
|
10
|
+
|
11
|
+
result = attempt_subscription(query)
|
12
|
+
|
13
|
+
expect(result).to include_json(
|
14
|
+
errors: [{
|
15
|
+
message: "Arguments may not be used:\nfieldWithArguments"
|
16
|
+
}]
|
17
|
+
)
|
18
|
+
end
|
19
|
+
|
20
|
+
it "blocks subscriptions which would expand lists" do
|
21
|
+
query = <<~GRAPHQL
|
22
|
+
fooList {
|
23
|
+
id
|
24
|
+
}
|
25
|
+
GRAPHQL
|
26
|
+
|
27
|
+
result = attempt_subscription(query)
|
28
|
+
|
29
|
+
expect(result).to include_json(
|
30
|
+
errors: [{
|
31
|
+
message: "Lists may not be queried:\nfooList"
|
32
|
+
}]
|
33
|
+
)
|
34
|
+
end
|
35
|
+
|
36
|
+
def attempt_subscription(query)
|
37
|
+
gql = <<~GRAPHQL
|
38
|
+
subscription {
|
39
|
+
testCreated (
|
40
|
+
subscriptionId: "#{SecureRandom.uuid}",
|
41
|
+
subscriptionGroupId: "#{SecureRandom.uuid}",
|
42
|
+
topicName: "some_topic"
|
43
|
+
) {
|
44
|
+
#{query}
|
45
|
+
}
|
46
|
+
}
|
47
|
+
GRAPHQL
|
48
|
+
|
49
|
+
execute_graphql(gql, NulogyMessageBusProducer::Specs::TestSchema)
|
50
|
+
end
|
51
|
+
end
|
@@ -6,7 +6,9 @@ ENV["RAILS_ENV"] ||= "test"
|
|
6
6
|
require File.expand_path("dummy/config/environment.rb", __dir__)
|
7
7
|
|
8
8
|
require "rspec/rails"
|
9
|
+
require "rspec/json_expectations"
|
9
10
|
require "active_record"
|
11
|
+
require "spec_helper"
|
10
12
|
|
11
13
|
Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].sort.each { |f| require f }
|
12
14
|
|
@@ -32,4 +34,8 @@ RSpec.configure do |config|
|
|
32
34
|
config.expect_with(:rspec) do |c|
|
33
35
|
c.max_formatted_output_length = 1_000_000
|
34
36
|
end
|
37
|
+
|
38
|
+
config.include(SpecUtils)
|
39
|
+
config.include(SqlHelpers)
|
40
|
+
config.include(SubscriptionHelpers)
|
35
41
|
end
|