kafka_rest_client 0.2.2 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +25 -2
- data/kafka_rest_client.gemspec +1 -1
- data/lib/kafka_rest_client/avro_producer.rb +66 -9
- data/lib/kafka_rest_client/errors.rb +2 -0
- data/lib/kafka_rest_client/utils.rb +26 -0
- metadata +2 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a2547fd6407a534cd9dcc57623b629f2a4dc3f67
|
4
|
+
data.tar.gz: 46678ca71d81e2ab8d4618c54ef6ae89ac1a912c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ebf98c38adac856ae66952a02081451aebcac5f3406ce157c7656719dd4582ebce5ef39eecc65623b90349c5b92a83f25f92054d14ae32c7c0187c1eb1b87649
|
7
|
+
data.tar.gz: 69768766374ee6d1d765251888e5c9dae0173470fdccaea8015681768b33743ffd08f3814459ef75d1dc30a9fd5ec22402113a569421b1588470fbb0e30cc309
|
data/README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
A Ruby client to interact with [Kafka REST Proxy](http://docs.confluent.io/1.0.1/kafka-rest/docs/index.html)
|
4
4
|
|
5
|
-
**Current Version:** 0.
|
5
|
+
**Current Version:** 0.3.0
|
6
6
|
|
7
7
|
**Supported Ruby versions:** 2.0, 2.1, 2.2
|
8
8
|
|
@@ -11,7 +11,7 @@ A Ruby client to interact with [Kafka REST Proxy](http://docs.confluent.io/1.0.1
|
|
11
11
|
Add this line to your application's Gemfile:
|
12
12
|
|
13
13
|
```ruby
|
14
|
-
gem 'kafka_rest_client', git: 'git@github.com:FundingCircle/kafka_rest_client.git', tag: 'v0.
|
14
|
+
gem 'kafka_rest_client', git: 'git@github.com:FundingCircle/kafka_rest_client.git', tag: 'v0.3.0'
|
15
15
|
```
|
16
16
|
|
17
17
|
## Usage
|
@@ -39,6 +39,9 @@ producer = KafkaRestClient::AvroProducer.new
|
|
39
39
|
# with the "#{topic}-value" name
|
40
40
|
producer.produce('ice-cream-melted', { temperature: 35, unit: 'celsius' })
|
41
41
|
|
42
|
+
# This would post a request to the REST Proxy e.g. :
|
43
|
+
# {"id": 1, "temperature": 32, "unit": "celsius"}
|
44
|
+
|
42
45
|
# Produce multiple events
|
43
46
|
# The schema_id will be fetched from the schema registry by looking up a schema
|
44
47
|
# with the "#{topic}-value" name
|
@@ -70,6 +73,26 @@ end
|
|
70
73
|
|
71
74
|
producer.produce('ice-cream-melted', [IceCreamMeltedEvent.new(35, 'celsius')])
|
72
75
|
|
76
|
+
# All of the above examples expect events to have union types explicitly defined.
|
77
|
+
# For example, if you have a nullable string field, the type annotation needs to be:
|
78
|
+
# {"field_name": { "string": "field_value"}}
|
79
|
+
# You can provide the following option to automatically annotate nullable fields.
|
80
|
+
# ⚠️ This does not currently support nested documents
|
81
|
+
producer.produce('ice-cream-melted',
|
82
|
+
{ temperature: 35, unit: 'celsius' },
|
83
|
+
annotate_optional_fields: true,
|
84
|
+
value_schema: {"namespace": "com.fundingcircle",
|
85
|
+
"type": "record",
|
86
|
+
"name": "IceCreamMelted",
|
87
|
+
"fields": [{"name": "id", "type": "string"},
|
88
|
+
{"name": "temperature","type": "int"},
|
89
|
+
{"name": "unit",
|
90
|
+
"type": ["null", "string"]
|
91
|
+
}]
|
92
|
+
})
|
93
|
+
# This would post a request to the REST Proxy with the correct type annotation, e.g. :
|
94
|
+
# {"id": 1, "temperature": 32, "unit": { "string": "celsius"}}
|
95
|
+
|
73
96
|
# Exception handling
|
74
97
|
begin
|
75
98
|
producer.produce('ice-cream-melted', { temperature: 35, unit: 'celsius' })
|
data/kafka_rest_client.gemspec
CHANGED
@@ -4,7 +4,7 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
|
4
4
|
|
5
5
|
Gem::Specification.new do |spec|
|
6
6
|
spec.name = 'kafka_rest_client'
|
7
|
-
spec.version = '0.
|
7
|
+
spec.version = '0.3.0'
|
8
8
|
spec.authors = ['Funding Circle Engineering']
|
9
9
|
spec.email = ['engineering+kafka_rest_client@fundingcircle.com']
|
10
10
|
spec.licenses = ['BSD-3-Clause']
|
@@ -1,7 +1,7 @@
|
|
1
|
-
require 'kafka_rest_client/errors'
|
2
1
|
require 'kafka_rest_client/configuration'
|
2
|
+
require 'kafka_rest_client/errors'
|
3
|
+
require 'kafka_rest_client/utils'
|
3
4
|
require 'httparty'
|
4
|
-
require 'json'
|
5
5
|
|
6
6
|
module KafkaRestClient
|
7
7
|
class AvroProducer
|
@@ -24,7 +24,11 @@ module KafkaRestClient
|
|
24
24
|
|
25
25
|
def produce(topic, events, options = {})
|
26
26
|
if enabled?
|
27
|
-
|
27
|
+
events = [events].flatten.map { |event| event_to_hash event }
|
28
|
+
if options[:annotate_optional_fields]
|
29
|
+
serialized_events = annotate_events_optional_fields(topic, events, options)
|
30
|
+
end
|
31
|
+
payload = build_event_payload(topic, (serialized_events || events), options)
|
28
32
|
response = post_event_to_kafka(topic, payload)
|
29
33
|
config.logger.debug("Produced to Kafka topic #{topic}: #{payload.inspect}")
|
30
34
|
response
|
@@ -33,6 +37,52 @@ module KafkaRestClient
|
|
33
37
|
|
34
38
|
private
|
35
39
|
|
40
|
+
def event_to_hash(event)
|
41
|
+
if event.respond_to? :as_json
|
42
|
+
event.as_json
|
43
|
+
else
|
44
|
+
Utils.deep_stringify_keys event
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
def annotate_optional_fields(event, fields)
|
49
|
+
event.each do |k, v|
|
50
|
+
if v.is_a?(Hash)
|
51
|
+
fail NestedHashSerializationNotImplemented
|
52
|
+
elsif v && fields[k]
|
53
|
+
event[k] = { fields[k]['type'] => event[k] }
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
def select_schema(topic, options)
|
59
|
+
if options[:value_schema]
|
60
|
+
options[:value_schema]
|
61
|
+
elsif options[:value_schema_id]
|
62
|
+
get_schema_by_id(options[:value_schema_id]).fetch('schema')
|
63
|
+
else
|
64
|
+
get_latest_schema(topic).fetch('schema')
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
# Transforms an optional schema attribute
|
69
|
+
# From {"name"=>"temperature", "type"=>["null", "double"]}
|
70
|
+
# To {"temperature"=>{"type"=>"double"}}
|
71
|
+
def schema_optional_fields(raw_schema)
|
72
|
+
JSON.parse(raw_schema).fetch('fields').inject({}) do |hash, attribute|
|
73
|
+
if attribute['type'].include? 'null'
|
74
|
+
hash[attribute['name']] = { 'type' => (attribute['type'] - ['null']).first }
|
75
|
+
end
|
76
|
+
hash
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
def annotate_events_optional_fields(topic, events, options)
|
81
|
+
raw_schema = select_schema(topic, options)
|
82
|
+
optional_fields = schema_optional_fields(raw_schema)
|
83
|
+
events.map { |event| annotate_optional_fields(event, optional_fields) }
|
84
|
+
end
|
85
|
+
|
36
86
|
def build_event_payload(topic, events, options)
|
37
87
|
payload = { records: events.map { |event| { value: event } } }.merge(
|
38
88
|
options.select { |k, _| [:value_schema_id, :value_schema].include?(k) }
|
@@ -45,16 +95,13 @@ module KafkaRestClient
|
|
45
95
|
payload
|
46
96
|
end
|
47
97
|
|
48
|
-
def
|
49
|
-
response = HTTParty.get(
|
50
|
-
"#{config.schema_registry_url}/subjects/#{topic}-value/versions/latest",
|
51
|
-
timeout: config.timeout
|
52
|
-
)
|
98
|
+
def get_schema(id, url)
|
99
|
+
response = HTTParty.get(url, timeout: config.timeout)
|
53
100
|
|
54
101
|
if response.code == 200
|
55
102
|
JSON.parse(response.body)
|
56
103
|
elsif response.code == 404
|
57
|
-
fail SchemaNotFoundError, "Schema for #{
|
104
|
+
fail SchemaNotFoundError, "Schema for #{id} not found: #{response.body}"
|
58
105
|
elsif response.code == 500
|
59
106
|
fail InternalServerError, "Schema registry internal error: #{response.body}"
|
60
107
|
elsif response.code == 503
|
@@ -64,6 +111,16 @@ module KafkaRestClient
|
|
64
111
|
end
|
65
112
|
end
|
66
113
|
|
114
|
+
def get_schema_by_id(id)
|
115
|
+
url = "#{config.schema_registry_url}/schemas/ids/#{id}"
|
116
|
+
get_schema(id, url)
|
117
|
+
end
|
118
|
+
|
119
|
+
def get_latest_schema(topic)
|
120
|
+
url = "#{config.schema_registry_url}/subjects/#{topic}-value/versions/latest"
|
121
|
+
get_schema('latest', url)
|
122
|
+
end
|
123
|
+
|
67
124
|
def post_event_to_kafka(topic, message)
|
68
125
|
response = HTTParty.post(
|
69
126
|
"#{config.kafka_proxy_url}/topics/#{topic}",
|
@@ -0,0 +1,26 @@
|
|
1
|
+
module KafkaRestClient
|
2
|
+
module Utils
|
3
|
+
# Courtersy of avdi
|
4
|
+
# Source https://gist.github.com/avdi/239567
|
5
|
+
def transform_hash(original, options={}, &block)
|
6
|
+
original.inject({}){|result, (key,value)|
|
7
|
+
value = if (options[:deep] && Hash === value)
|
8
|
+
transform_hash(value, options, &block)
|
9
|
+
else
|
10
|
+
value
|
11
|
+
end
|
12
|
+
block.call(result,key,value)
|
13
|
+
result
|
14
|
+
}
|
15
|
+
end
|
16
|
+
|
17
|
+
def deep_stringify_keys(hash)
|
18
|
+
transform_hash(hash, :deep => true) {|hash, key, value|
|
19
|
+
hash[key.to_s] = value
|
20
|
+
}
|
21
|
+
end
|
22
|
+
|
23
|
+
module_function :transform_hash
|
24
|
+
module_function :deep_stringify_keys
|
25
|
+
end
|
26
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: kafka_rest_client
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Funding Circle Engineering
|
@@ -144,6 +144,7 @@ files:
|
|
144
144
|
- lib/kafka_rest_client/avro_producer.rb
|
145
145
|
- lib/kafka_rest_client/configuration.rb
|
146
146
|
- lib/kafka_rest_client/errors.rb
|
147
|
+
- lib/kafka_rest_client/utils.rb
|
147
148
|
homepage: http://github.com/FundingCircle/kakfa_rest_client
|
148
149
|
licenses:
|
149
150
|
- BSD-3-Clause
|