faraday_json 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,219 @@
1
+ #
2
+ # FaradayJSON
3
+ # https://github.com/spriteCloud/faraday_json
4
+ #
5
+ # Copyright (c) 2015 spriteCloud B.V. and other FaradayJSON contributors.
6
+ # All rights reserved.
7
+ #
8
+
9
+ module FaradayJSON
10
+
11
+ # Character encoding helper functions
12
+ module Encoding
13
+
14
+ # Two versions of transcode, one for Ruby 1.8 and one for greater versions.
15
+
16
+ if RUBY_VERSION.start_with?("1.8")
17
+
18
+ def transcode(data, input_charset, output_charset, opts = {})
19
+ # In Ruby 1.8, we pretty much have to believe the given charsets; there's
20
+ # not a lot of choice.
21
+
22
+ # If we don't have an input charset, we can't do better than US-ASCII.
23
+ if input_charset.nil? or input_charset.empty?
24
+ input_charset = opts.fetch('default_input_charset', 'us-ascii')
25
+ end
26
+
27
+ # The default output charset, on the other hand, should be UTF-8.
28
+ if output_charset.nil? or output_charset.empty?
29
+ output_charset = opts.fetch('default_output_charset', 'UTF-8//IGNORE')
30
+ end
31
+
32
+ # Transcode using iconv
33
+ require 'iconv'
34
+ return ::Iconv.conv(output_charset, input_charset, data)
35
+ end
36
+
37
+ else # end ruby 1.8/start ruby > 1.8
38
+
39
+ def transcode(data, input_charset, output_charset, opts = {})
40
+ # Strings have an encode function in Ruby > 1.8
41
+ if not data.respond_to?(:encode)
42
+ return data
43
+ end
44
+
45
+ # If we don't have a charset, just use whatever is in the string
46
+ # currently. If we do have a charset, we'll have to run some extra
47
+ # checks.
48
+ if not (input_charset.nil? or input_charset.empty?)
49
+ # Check passed charset is *understood* by finding it. If this fails,
50
+ # an exception is raised, which it also should be.
51
+ canonical = ::Encoding.find(input_charset)
52
+
53
+ # Second, ensure the canonical charset and the actual string encoding
54
+ # are identical. If not, we'll have to do a little more than just
55
+ # transcode to UTF-8.
56
+ if canonical != data.encoding
57
+ if opts.fetch('force_input_charset', false)
58
+ data.force_encoding(canonical)
59
+ else
60
+ raise "Provided charset was #{canonical}, but data was #{data.encoding}"
61
+ end
62
+ end
63
+ end
64
+
65
+ # If there's no output charset, we should default to UTF-8.
66
+ if output_charset.nil? or output_charset.empty?
67
+ output_charset = opts.fetch('default_output_charset', 'UTF-8')
68
+ end
69
+
70
+ # Transcode!
71
+ return data.encode(output_charset)
72
+ end
73
+
74
+ end # ruby > 1.8
75
+
76
+ # Convenient helper. Output is UTF-8. Input is either a string, or some data
77
+ # data. There's a Ruby 1.8 version mostly because it has to iteratively convert
78
+ # included strings.
79
+ if RUBY_VERSION.start_with?("1.8")
80
+
81
+ def to_utf8(data, charset, opts = {})
82
+ if data.is_a? Hash
83
+ transcoded = {}
84
+ data.each do |key, value|
85
+ transcoded[to_utf8(key, charset, opts)] = to_utf8(value, charset, opts)
86
+ end
87
+ return transcoded
88
+ elsif data.is_a? Array
89
+ transcoded = []
90
+ data.each do |value|
91
+ transcoded << to_utf8(value, charest, opts)
92
+ end
93
+ return transcoded
94
+ elsif data.is_a? String
95
+ return transcode(data, charset, 'UTF-8//IGNORE', opts)
96
+ else
97
+ return data
98
+ end
99
+ end
100
+
101
+ else # end ruby 1.8/start ruby > 1.8
102
+
103
+ def to_utf8(data, charset, opts = {})
104
+ return transcode(data, charset, 'UTF-8', opts)
105
+ end
106
+
107
+ end # ruby > 1.8
108
+
109
+
110
+ # Helper function; strips a BOM for UTF-16 encodings
111
+ def strip_bom(data, charset, opts = {})
112
+ # Only need to do this on Strings
113
+ if not data.is_a? String
114
+ return data
115
+ end
116
+
117
+ # If the charset is given, it overrides string internal encoding.
118
+ enc = get_dominant_encoding(data, charset, opts)
119
+
120
+ # Make the encoding canonical (if we can find out about that).
121
+ canonical = get_canonical_encoding(enc)
122
+
123
+ # Determine what a BOM would look like.
124
+ bom = get_bom(canonical)
125
+
126
+ # We can't operate on data, we need a byte array.
127
+ arr = data.each_byte.to_a
128
+
129
+ # Match BOM
130
+ found = true
131
+ bom.each_index do |i|
132
+ if bom[i] != arr[i]
133
+ found = false
134
+ break
135
+ end
136
+ end
137
+
138
+ # So we may have found a BOM! Strip it.
139
+ if found
140
+ ret = arr[bom.length..-1].pack('c*')
141
+ if ret.respond_to? :force_encoding
142
+ ret.force_encoding(canonical)
143
+ end
144
+ return ret
145
+ end
146
+
147
+ # No BOM
148
+ return data
149
+ end
150
+
151
+ # Given a String with (potentially, this depends on Ruby version) an encoding,
152
+ # and a charset from a content-type header (which may be nil), determines the
153
+ # dominant encoding. (Charset, if given, overrides internal encoding,
154
+ # if present).
155
+ def get_dominant_encoding(str, charset, opts = {})
156
+ enc = nil
157
+ if str.respond_to? :encoding
158
+ enc = str.encoding
159
+ end
160
+
161
+ if charset.nil? or charset.empty?
162
+ if enc.nil?
163
+ default_encoding = opts.fetch('default_encoding', nil)
164
+ if default_encoding.nil?
165
+ raise "No charset provided, don't know what to do!" # FIXME
166
+ end
167
+ enc = default_encoding
168
+ end
169
+ else
170
+ enc = charset
171
+ end
172
+
173
+ return enc
174
+ end
175
+
176
+
177
+ # Returns a canonical version of an encoding.
178
+ def get_canonical_encoding(enc)
179
+ if defined? ::Encoding and ::Encoding.respond_to? :find
180
+ # Oh... Ruby 1.9.2 doesn't like passing an Encoding to find()...
181
+ if not enc.is_a? ::Encoding
182
+ enc = ::Encoding.find(enc)
183
+ end
184
+ return enc.to_s.downcase
185
+ end
186
+ return enc.downcase
187
+ end
188
+
189
+
190
+ # Given a (canonical) encoding, returns a BOM as an array of byte values. If
191
+ # the given encoding does not have a BOM, an empty array is returned.
192
+ def get_bom(enc)
193
+ bom = []
194
+ if enc.start_with?('utf16be') or enc.start_with?('utf-16be')
195
+ bom = [0xfe, 0xff]
196
+ elsif enc.start_with?('utf16le') or enc.start_with?('utf-16le')
197
+ bom = [0xff, 0xfe]
198
+ elsif enc.start_with?('utf8') or enc.start_with?('utf-8')
199
+ bom = [0xef, 0xbb, 0xbf]
200
+ elsif enc.start_with?('utf32be') or enc.start_with?('utf-32be')
201
+ bom = [0x00, 0x00, 0xfe, 0xff]
202
+ elsif enc.start_with?('utf32le') or enc.start_with?('utf-32le')
203
+ bom = [0xff, 0xfe, 0x00, 0x00]
204
+ end
205
+ return bom
206
+ end
207
+
208
+
209
+
210
+ # Helper function for testing
211
+ def bin_to_hex(data)
212
+ if data.respond_to? :each_byte
213
+ return data.each_byte.map { |b| b.to_s(16) }.join
214
+ end
215
+ return data
216
+ end
217
+
218
+ end # module Encoding
219
+ end # module FaradayJSON
@@ -0,0 +1,165 @@
1
+ #
2
+ # FaradayJSON
3
+ # https://github.com/spriteCloud/faraday_json
4
+ #
5
+ # Copyright (c) 2015 spriteCloud B.V. and other FaradayJSON contributors.
6
+ # All rights reserved.
7
+ #
8
+
9
+ require 'faraday_json/encoding'
10
+
11
+ module FaradayJSON
12
+ # Public: Parse response bodies as JSON.
13
+ class ParseJson < Faraday::Middleware
14
+ CONTENT_TYPE = 'Content-Type'.freeze
15
+
16
+ include ::FaradayJSON::Encoding
17
+
18
+ dependency do
19
+ require 'json' unless defined?(::JSON)
20
+ end
21
+
22
+ def initialize(app = nil, options = {})
23
+ super(app)
24
+ @options = options
25
+ @content_types = Array(options[:content_type])
26
+ end
27
+
28
+ def call(environment)
29
+ @app.call(environment).on_complete do |env|
30
+ if process_response_type?(response_type(env)) and parse_response?(env)
31
+ process_response(env)
32
+ end
33
+ end
34
+ end
35
+
36
+ def process_response(env)
37
+ env[:raw_body] = env[:body] if preserve_raw?(env)
38
+ body = env[:body]
39
+
40
+ # Body will be in an unknown encoding. Use charset field to coerce it to
41
+ # internal UTF-8.
42
+ charset = response_charset(env)
43
+
44
+ # We must ensure we're interpreting the body as the right charset. First,
45
+ # strip the BOM (if any).
46
+ body = strip_bom(body, charset, { 'default_encoding' => 'us-ascii' })
47
+
48
+ # Transcode to UTF-8
49
+ body = to_utf8(body, charset, { 'force_input_charset' => true })
50
+
51
+ # Now that's done, parse the JSON.
52
+ ret = nil
53
+ begin
54
+ ret = ::JSON.parse(body) unless body.strip.empty?
55
+ rescue StandardError, SyntaxError => err
56
+ raise err if err.is_a? SyntaxError and err.class.name != 'Psych::SyntaxError'
57
+ raise Faraday::Error::ParsingError, err
58
+ end
59
+ env[:body] = ret
60
+ end
61
+
62
+ def response_type(env)
63
+ type = env[:response_headers][CONTENT_TYPE].to_s
64
+ type = type.split(';', 2).first if type.index(';')
65
+ type
66
+ end
67
+
68
+ def response_charset(env)
69
+ header = env[:response_headers][CONTENT_TYPE].to_s
70
+ if header.index(';')
71
+ header.split(';').each do |part|
72
+ if part.index('charset=')
73
+ return part.split('charset=', 2).last
74
+ end
75
+ end
76
+ end
77
+ return nil
78
+ end
79
+
80
+ def process_response_type?(type)
81
+ @content_types.empty? or @content_types.any? { |pattern|
82
+ pattern.is_a?(Regexp) ? type =~ pattern : type == pattern
83
+ }
84
+ end
85
+
86
+ def parse_response?(env)
87
+ env[:body].respond_to? :to_str
88
+ end
89
+
90
+ def preserve_raw?(env)
91
+ env[:request].fetch(:preserve_raw, @options[:preserve_raw])
92
+ end
93
+
94
+
95
+
96
+ # DRAGONS
97
+ module OptionsExtension
98
+ attr_accessor :preserve_raw
99
+
100
+ def to_hash
101
+ super.update(:preserve_raw => preserve_raw)
102
+ end
103
+
104
+ def each
105
+ return to_enum(:each) unless block_given?
106
+ super
107
+ yield :preserve_raw, preserve_raw
108
+ end
109
+
110
+ def fetch(key, *args)
111
+ if :preserve_raw == key
112
+ value = __send__(key)
113
+ value.nil? ? args.fetch(0) : value
114
+ else
115
+ super
116
+ end
117
+ end
118
+ end
119
+
120
+ if defined?(Faraday::RequestOptions)
121
+ begin
122
+ Faraday::RequestOptions.from(:preserve_raw => true)
123
+ rescue NoMethodError
124
+ Faraday::RequestOptions.send(:include, OptionsExtension)
125
+ end
126
+ end
127
+ end # class ParseJson
128
+
129
+ # Public: Override the content-type of the response with "application/json"
130
+ # if the response body looks like it might be JSON, i.e. starts with an
131
+ # open bracket.
132
+ #
133
+ # This is to fix responses from certain API providers that insist on serving
134
+ # JSON with wrong MIME-types such as "text/javascript".
135
+ class ParseJsonMimeTypeFix < ParseJson
136
+ MIME_TYPE = 'application/json'.freeze
137
+
138
+ def process_response(env)
139
+ old_type = env[:response_headers][CONTENT_TYPE].to_s
140
+ new_type = MIME_TYPE.dup
141
+ new_type << ';' << old_type.split(';', 2).last if old_type.index(';')
142
+ env[:response_headers][CONTENT_TYPE] = new_type
143
+ end
144
+
145
+ BRACKETS = %w- [ { -
146
+ WHITESPACE = [ " ", "\n", "\r", "\t" ]
147
+
148
+ def parse_response?(env)
149
+ super and BRACKETS.include? first_char(env[:body])
150
+ end
151
+
152
+ def first_char(body)
153
+ idx = -1
154
+ begin
155
+ char = body[idx += 1]
156
+ char = char.chr if char
157
+ end while char and WHITESPACE.include? char
158
+ char
159
+ end
160
+ end # class ParseJson
161
+
162
+ end
163
+
164
+ # deprecated alias
165
+ Faraday::Response::ParseJson = FaradayJSON::ParseJson
@@ -0,0 +1,10 @@
1
+ #
2
+ # FaradayJSON
3
+ # https://github.com/spriteCloud/faraday_json
4
+ #
5
+ # Copyright (c) 2015 spriteCloud B.V. and other FaradayJSON contributors.
6
+ # All rights reserved.
7
+ #
8
+ module FaradayJSON
9
+ VERSION = "0.1.0"
10
+ end
@@ -0,0 +1 @@
1
+ {"a":"Hell�, W�rld!"}
Binary file
Binary file
@@ -0,0 +1 @@
1
+ {"a":"Hellö, Wörld!"}
@@ -0,0 +1,376 @@
1
+ # encoding: utf-8
2
+
3
+ require 'helper'
4
+ require 'faraday_json/encode_json'
5
+
6
+ describe FaradayJSON::EncodeJson do
7
+ let(:middleware) { described_class.new(lambda{|env| env}) }
8
+
9
+ def process(body, content_type = nil)
10
+ env = {:body => body, :request_headers => Faraday::Utils::Headers.new}
11
+ env[:request_headers]['content-type'] = content_type if content_type
12
+ middleware.call(faraday_env(env))
13
+ end
14
+
15
+ def result_body() result[:body] end
16
+ def result_type() result[:request_headers]['content-type'] end
17
+ def result_length() result[:request_headers]['content-length'].to_i end
18
+
19
+ context "no body" do
20
+ let(:result) { process(nil) }
21
+
22
+ it "doesn't change body" do
23
+ expect(result_body).to be_nil
24
+ end
25
+
26
+ it "doesn't add content type" do
27
+ expect(result_type).to be_nil
28
+ end
29
+ end
30
+
31
+ context "empty body" do
32
+ let(:result) { process('') }
33
+
34
+ it "doesn't change body" do
35
+ expect(result_body).to be_empty
36
+ end
37
+
38
+ it "doesn't add content type" do
39
+ expect(result_type).to be_nil
40
+ end
41
+ end
42
+
43
+ context "string body" do
44
+ let(:result) { process('{"a":1}') }
45
+
46
+ it "doesn't change body" do
47
+ expect(result_body).to eq('{"a":1}')
48
+ end
49
+
50
+ it "adds content type" do
51
+ expect(result_type).to eq('application/json; charset=utf-8')
52
+ end
53
+ end
54
+
55
+ context "object body" do
56
+ let(:result) { process({:a => 1}) }
57
+
58
+ it "encodes body" do
59
+ expect(result_body).to eq('{"a":1}')
60
+ end
61
+
62
+ it "adds content type" do
63
+ expect(result_type).to eq('application/json; charset=utf-8')
64
+ end
65
+ end
66
+
67
+ context "empty object body" do
68
+ let(:result) { process({}) }
69
+
70
+ it "encodes body" do
71
+ expect(result_body).to eq('{}')
72
+ end
73
+ end
74
+
75
+ context "object body with json type" do
76
+ let(:result) { process({:a => 1}, 'application/json; charset=utf-8') }
77
+
78
+ it "encodes body" do
79
+ expect(result_body).to eq('{"a":1}')
80
+ end
81
+
82
+ it "doesn't change content type" do
83
+ expect(result_type).to eq('application/json; charset=utf-8')
84
+ end
85
+ end
86
+
87
+ context "object body with incompatible type" do
88
+ let(:result) { process({:a => 1}, 'application/xml; charset=utf-8') }
89
+
90
+ it "doesn't change body" do
91
+ expect(result_body).to eq({:a => 1})
92
+ end
93
+
94
+ it "doesn't change content type" do
95
+ expect(result_type).to eq('application/xml; charset=utf-8')
96
+ end
97
+ end
98
+
99
+ ### Unicode test cases
100
+ # Ruby 1.8 will almost certainly fail if there is no charset given in a header.
101
+ # In Ruby >1.8, we have some more methods for guessing well.
102
+
103
+ ### All Ruby versions should work with a charset given.
104
+ context "utf-8 in string body" do
105
+ let(:result) { process('{"a":"ä"}', 'application/json; charset=utf-8') }
106
+
107
+ it "doesn't change body" do
108
+ expect(result_body).to eq('{"a":"ä"}')
109
+ end
110
+
111
+ it "doesn't change content type" do
112
+ expect(result_type).to eq('application/json; charset=utf-8')
113
+ end
114
+
115
+ it "adds content length" do
116
+ expect(result_length).to eq(10)
117
+ end
118
+ end
119
+
120
+ context "utf-8 in object body" do
121
+ let(:result) { process({:a => "ä"}, 'application/json; charset=utf-8') }
122
+
123
+ it "encodes body" do
124
+ expect(result_body).to eq('{"a":"ä"}')
125
+ end
126
+
127
+ it "doesn't change content type" do
128
+ expect(result_type).to eq('application/json; charset=utf-8')
129
+ end
130
+
131
+ it "adds content length" do
132
+ expect(result_length).to eq(10)
133
+ end
134
+ end
135
+
136
+ context "non-unicode in string body" do
137
+ let(:result) {
138
+ process(test_encode('{"a":"ä"}', 'iso-8859-15'), 'application/json; charset=iso-8859-15')
139
+ }
140
+
141
+ it "changes body" do
142
+ expect(result_body).to eq('{"a":"ä"}')
143
+ end
144
+
145
+ it "changes content type" do
146
+ expect(result_type).to eq('application/json; charset=utf-8')
147
+ end
148
+
149
+ it "adds content length" do
150
+ expect(result_length).to eq(10)
151
+ end
152
+ end
153
+
154
+ context "non-unicode in object body" do
155
+ let(:result) {
156
+ process({:a => test_encode('ä', 'iso-8859-15')}, 'application/json; charset=iso-8859-15')
157
+ }
158
+
159
+ it "encodes body" do
160
+ expect(result_body).to eq('{"a":"ä"}')
161
+ end
162
+
163
+ it "changes content type" do
164
+ expect(result_type).to eq('application/json; charset=utf-8')
165
+ end
166
+
167
+ it "adds content length" do
168
+ expect(result_length).to eq(10)
169
+ end
170
+ end
171
+
172
+ context "non-utf-8 in string body" do
173
+ let(:result) {
174
+ process(test_encode('{"a":"ä"}', 'utf-16be'), 'application/json; charset=utf-16be')
175
+ }
176
+
177
+ it "changes body" do
178
+ expect(result_body).to eq('{"a":"ä"}')
179
+ end
180
+
181
+ it "changes content type" do
182
+ expect(result_type).to eq('application/json; charset=utf-8')
183
+ end
184
+
185
+ it "adds content length" do
186
+ expect(result_length).to eq(10)
187
+ end
188
+ end
189
+
190
+ context "non-utf-8 in object body" do
191
+ let(:result) {
192
+ process({:a => test_encode('ä', 'utf-16le')}, 'application/json; charset=utf-16le')
193
+ }
194
+
195
+ it "encodes body" do
196
+ expect(result_body).to eq('{"a":"ä"}')
197
+ end
198
+
199
+ it "changes content type" do
200
+ expect(result_type).to eq('application/json; charset=utf-8')
201
+ end
202
+
203
+ it "adds content length" do
204
+ expect(result_length).to eq(10)
205
+ end
206
+ end
207
+
208
+
209
+ ### Ruby versions > 1.8 should be able to guess missing charsets at times.
210
+ if not RUBY_VERSION.start_with?("1.8")
211
+ context "utf-8 in string body without content type" do
212
+ let(:result) { process('{"a":"ä"}') }
213
+
214
+ it "doesn't change body" do
215
+ expect(result_body).to eq('{"a":"ä"}')
216
+ end
217
+
218
+ it "adds content type" do
219
+ expect(result_type).to eq('application/json; charset=utf-8')
220
+ end
221
+
222
+ it "adds content length" do
223
+ expect(result_length).to eq(10)
224
+ end
225
+ end
226
+
227
+ context "utf-8 in object body without content type" do
228
+ let(:result) { process({:a => "ä"}) }
229
+
230
+ it "encodes body" do
231
+ expect(result_body).to eq('{"a":"ä"}')
232
+ end
233
+
234
+ it "adds content type" do
235
+ expect(result_type).to eq('application/json; charset=utf-8')
236
+ end
237
+
238
+ it "adds content length" do
239
+ expect(result_length).to eq(10)
240
+ end
241
+ end
242
+
243
+ context "non-unicode in string body without content type" do
244
+ let(:result) {
245
+ process(test_encode('{"a":"ä"}', 'iso-8859-15'))
246
+ }
247
+
248
+ it "doesn't change body" do
249
+ expect(result_body).to eq('{"a":"ä"}')
250
+ end
251
+
252
+ it "adds content type" do
253
+ expect(result_type).to eq('application/json; charset=utf-8')
254
+ end
255
+
256
+ it "adds content length" do
257
+ expect(result_length).to eq(10)
258
+ end
259
+ end
260
+
261
+ context "non-unicode in object body without content type" do
262
+ let(:result) {
263
+ process({:a => test_encode('ä', 'iso-8859-15')})
264
+ }
265
+
266
+ it "encodes body" do
267
+ expect(result_body).to eq('{"a":"ä"}')
268
+ end
269
+
270
+ it "adds content type" do
271
+ expect(result_type).to eq('application/json; charset=utf-8')
272
+ end
273
+
274
+ it "adds content length" do
275
+ expect(result_length).to eq(10)
276
+ end
277
+ end
278
+
279
+ context "non-utf-8 in string body without content type" do
280
+ let(:result) {
281
+ process(test_encode('{"a":"ä"}', 'utf-16be'))
282
+ }
283
+
284
+
285
+ it "doesn't change body" do
286
+ expect(result_body).to eq('{"a":"ä"}')
287
+ end
288
+
289
+ it "adds content type" do
290
+ expect(result_type).to eq('application/json; charset=utf-8')
291
+ end
292
+
293
+ it "adds content length" do
294
+ expect(result_length).to eq(10)
295
+ end
296
+ end
297
+
298
+ context "non-utf-8 in object body without content type" do
299
+ let(:result) {
300
+ process({:a => test_encode('ä', 'utf-16le')})
301
+ }
302
+
303
+ it "encodes body" do
304
+ expect(result_body).to eq('{"a":"ä"}')
305
+ end
306
+
307
+ it "adds content type" do
308
+ expect(result_type).to eq('application/json; charset=utf-8')
309
+ end
310
+
311
+ it "adds content length" do
312
+ expect(result_length).to eq(10)
313
+ end
314
+ end
315
+
316
+ ### Dealing with files in various encoding should ideally be easy
317
+ FILES = {
318
+ 'spec/data/iso8859-15_file.json' => 'iso-8859-15',
319
+ 'spec/data/utf16be_file.json' => 'utf-16be',
320
+ 'spec/data/utf16le_file.json' => 'utf-16le',
321
+ 'spec/data/utf8_file.json' => 'utf-8',
322
+ }
323
+
324
+
325
+ FILES.each do |fname, enc|
326
+ context "reading #{enc} encoded file '#{fname}'" do
327
+ # Read the string from file; read binary/with encoding. Ruby 1.8 will
328
+ # ignore this, but must still work.
329
+ data = File.new(fname, "rb:#{enc}").read
330
+
331
+ # Passing that data with a charset should do the right thing.
332
+ let(:result) {
333
+ process(data)
334
+ }
335
+
336
+ it "encodes body" do
337
+ expect(result_body).to eq("{\"a\":\"Hellö, Wörld!\"}\n")
338
+ end
339
+
340
+ it "adds content type" do
341
+ expect(result_type).to eq('application/json; charset=utf-8')
342
+ end
343
+
344
+ it "adds content length" do
345
+ expect(result_length).to eq(24)
346
+ end
347
+ end
348
+ end
349
+
350
+ FILES.each do |fname, enc|
351
+ context "reading #{enc} encoded file '#{fname}' as binary" do
352
+ # Read the string from file; read binary/with encoding. Ruby 1.8 will
353
+ # ignore this, but must still work.
354
+ data = File.new(fname, "rb").read
355
+
356
+ # Passing that data with a charset should do the right thing.
357
+ let(:result) {
358
+ process(data, "application/json; charset=#{enc}")
359
+ }
360
+
361
+ it "encodes body" do
362
+ expect(result_body).to eq("{\"a\":\"Hellö, Wörld!\"}\n")
363
+ end
364
+
365
+ it "adds content type" do
366
+ expect(result_type).to eq('application/json; charset=utf-8')
367
+ end
368
+
369
+ it "adds content length" do
370
+ expect(result_length).to eq(24)
371
+ end
372
+ end
373
+ end
374
+
375
+ end
376
+ end