json-emitter 0.0.2 → 0.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +2 -7
- data/lib/json-emitter/buffered_stream.rb +9 -0
- data/lib/json-emitter/stream.rb +1 -12
- data/lib/json-emitter/version.rb +1 -1
- data/lib/json-emitter.rb +12 -16
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d39d9a362efddd2cb39412926290328feb042228099e2b737d6d8450fad28cdd
|
4
|
+
data.tar.gz: ff8a418af3119910c46d78320e8b4dcac3cd6c0282202e1bae970c56fadf3e12
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: b5cc83b5e2018d7f9b05f98f3cbfa9ab3f45176ff96e121f4336303466abdfb1836db673c5b567ae3579de7cdf9bf3d95b7879488ef35a25d34422d11177c1ed
|
7
|
+
data.tar.gz: 33cc0cf57bedd84fda214c19859a32289c31d08e5e1d587d55485e73b5dab81030cd4fc12b14ed0c6e8234238821492fc694e603ad4d7ee8193f479de867ad6c
|
data/README.md
CHANGED
@@ -50,11 +50,6 @@ File.open("/tmp/foo.json", "w+") { |file|
|
|
50
50
|
stream.each { |json_chunk|
|
51
51
|
...
|
52
52
|
}
|
53
|
-
|
54
|
-
# this will buffer the JSON into roughly 8k chunks
|
55
|
-
stream.buffered(8).each { |json_8k_chunk|
|
56
|
-
...
|
57
|
-
}
|
58
53
|
```
|
59
54
|
|
60
55
|
# HTTP Chunked Transfer (a.k.a streaming)
|
@@ -90,7 +85,7 @@ get :orders do
|
|
90
85
|
|
91
86
|
stream JsonEmitter.array(enumerator) { |order|
|
92
87
|
ApiV1::Entities::Order.new(order)
|
93
|
-
}
|
88
|
+
}
|
94
89
|
end
|
95
90
|
```
|
96
91
|
|
@@ -104,7 +99,7 @@ app = ->(env) {
|
|
104
99
|
|
105
100
|
stream = JsonEmitter.array(enumerator) { |order|
|
106
101
|
order.to_h
|
107
|
-
}
|
102
|
+
}
|
108
103
|
|
109
104
|
[200, {"Content-Type" => "application/json"}, stream]
|
110
105
|
}
|
data/lib/json-emitter/stream.rb
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
module JsonEmitter
|
2
2
|
#
|
3
3
|
# Represents a stream of JSON to be generated and yielded. It can be treated like any Enumerable.
|
4
|
-
#
|
4
|
+
# Unlike UnbufferedStream, the size of the yielded strings can vary from 1 to 1000's.
|
5
5
|
#
|
6
6
|
class Stream
|
7
7
|
include Enumerable
|
@@ -15,17 +15,6 @@ module JsonEmitter
|
|
15
15
|
@enum = enum
|
16
16
|
end
|
17
17
|
|
18
|
-
#
|
19
|
-
# Returns a new stream that will buffer the output. You can perform the same "write" or "each" operations
|
20
|
-
# on the new stream, but the chunks of output will be (roughly) uniform in size.
|
21
|
-
#
|
22
|
-
# @param buffer_size [Integer] The buffer size in kb. This is a size *hint*, not a hard limit.
|
23
|
-
# @return [JsonEmitter::BufferedStream]
|
24
|
-
#
|
25
|
-
def buffered(buffer_size = 16, unit: :kb)
|
26
|
-
BufferedStream.new(@enum, buffer_size, unit: unit)
|
27
|
-
end
|
28
|
-
|
29
18
|
#
|
30
19
|
# Write the stream to the specified IO object.
|
31
20
|
#
|
data/lib/json-emitter/version.rb
CHANGED
data/lib/json-emitter.rb
CHANGED
@@ -12,8 +12,8 @@ require 'json-emitter/buffered_stream'
|
|
12
12
|
# Primitive values will be serialized to JSON using MultiJson.dump. MultiJson finds and uses the most efficient
|
13
13
|
# JSON generator you have on your system (e.g. oj) and falls back to the stdlib JSON library.
|
14
14
|
#
|
15
|
-
# The emitter can be used to output to anything (files, network sockets, etc)
|
16
|
-
#
|
15
|
+
# The emitter can be used to output to anything (files, network sockets, etc). It works very well with so-called
|
16
|
+
# "HTTP chunked responses" in Rack/Rails/Sinatra/Grape/etc.
|
17
17
|
#
|
18
18
|
module JsonEmitter
|
19
19
|
class << self
|
@@ -47,17 +47,15 @@ module JsonEmitter
|
|
47
47
|
# # do something with each json chunk
|
48
48
|
# end
|
49
49
|
#
|
50
|
-
# # if you need the outputted chunks to be (roughly) equal in size, call "buffered"
|
51
|
-
# # and pass in the buffer size in kb.
|
52
|
-
# buffered_stream = stream.buffered(16)
|
53
|
-
#
|
54
50
|
# @param enum [Enumerable] Something that can be enumerated over, like an Array or Enumerator. Each element should be something that can be rendered as JSON (e.g. a number, string, boolean, Array, or Hash).
|
51
|
+
# @param buffer_size [Integer] The buffer size in kb. This is a size *hint*, not a hard limit.
|
52
|
+
# @param unit [Symbol] :bytes | :kb (default) | :mb
|
55
53
|
# @yield If a block is given, it will be yielded each value in the array. The return value from the block will be converted to JSON instead of the original value.
|
56
|
-
# @return [JsonEmitter::
|
54
|
+
# @return [JsonEmitter::BufferedStream]
|
57
55
|
#
|
58
|
-
def self.array(enum, &mapper)
|
56
|
+
def self.array(enum, buffer_size: 16, buffer_unit: :kb, &mapper)
|
59
57
|
emitter = Emitter.new.array(enum, &mapper)
|
60
|
-
|
58
|
+
BufferedStream.new(emitter, buffer_size, unit: buffer_unit)
|
61
59
|
end
|
62
60
|
|
63
61
|
#
|
@@ -93,16 +91,14 @@ module JsonEmitter
|
|
93
91
|
# # do something with each json chunk
|
94
92
|
# end
|
95
93
|
#
|
96
|
-
# # if you need the outputted chunks to be (roughly) equal in size, call "buffered"
|
97
|
-
# # and pass in the buffer size in kb.
|
98
|
-
# buffered_stream = stream.buffered(16)
|
99
|
-
#
|
100
94
|
# @param hash [Hash] Keys should be Strings or Symbols and values should be any JSON-compatible value like a number, string, boolean, Array, or Hash.
|
101
|
-
# @
|
95
|
+
# @param buffer_size [Integer] The buffer size in kb. This is a size *hint*, not a hard limit.
|
96
|
+
# @param unit [Symbol] :bytes | :kb (default) | :mb
|
97
|
+
# @return [JsonEmitter::BufferedStream]
|
102
98
|
#
|
103
|
-
def self.object(hash)
|
99
|
+
def self.object(hash, buffer_size: 16, buffer_unit: :kb)
|
104
100
|
emitter = Emitter.new.object(hash)
|
105
|
-
|
101
|
+
BufferedStream.new(emitter, buffer_size, unit: buffer_unit)
|
106
102
|
end
|
107
103
|
|
108
104
|
# Wrap the enumeration in a Proc. It will be passed a callback which it must call to continue.
|