io-stream 0.6.1 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/lib/io/stream/buffered.rb +46 -22
- data/lib/io/stream/generic.rb +19 -323
- data/lib/io/stream/openssl.rb +15 -0
- data/lib/io/stream/readable.rb +358 -0
- data/lib/io/stream/shim/buffered.rb +16 -0
- data/lib/io/stream/shim/readable.rb +8 -0
- data/lib/io/stream/string_buffer.rb +5 -0
- data/lib/io/stream/version.rb +1 -1
- data/lib/io/stream/writable.rb +92 -0
- data/lib/io/stream.rb +5 -0
- data/license.md +1 -1
- data/readme.md +58 -4
- data/releases.md +75 -0
- data.tar.gz.sig +0 -0
- metadata +7 -11
- metadata.gz.sig +0 -0
- data/lib/io/stream/shim/shim.md +0 -4
- data/lib/io/stream/shim/timeout.rb +0 -17
@@ -0,0 +1,358 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# Released under the MIT License.
|
4
|
+
# Copyright, 2023-2024, by Samuel Williams.
|
5
|
+
|
6
|
+
require_relative "string_buffer"
|
7
|
+
|
8
|
+
module IO::Stream
|
9
|
+
# The default block size for IO buffers. Defaults to 256KB (optimized for modern SSDs and networks).
|
10
|
+
BLOCK_SIZE = ENV.fetch("IO_STREAM_BLOCK_SIZE", 1024*256).to_i
|
11
|
+
|
12
|
+
# The minimum read size for efficient I/O operations. Defaults to the same as BLOCK_SIZE.
|
13
|
+
MINIMUM_READ_SIZE = ENV.fetch("IO_STREAM_MINIMUM_READ_SIZE", BLOCK_SIZE).to_i
|
14
|
+
|
15
|
+
# The maximum read size for a single read operation. This limit exists because:
|
16
|
+
# 1. System calls like read() cannot handle requests larger than SSIZE_MAX
|
17
|
+
# 2. Very large reads can cause memory pressure and poor interactive performance
|
18
|
+
# 3. Most socket buffers and pipe capacities are much smaller anyway
|
19
|
+
# On 64-bit systems SSIZE_MAX is ~8.8 million MB, on 32-bit it's ~2GB.
|
20
|
+
# Our default of 16MB provides a good balance of throughput and responsiveness, and is page aligned.
|
21
|
+
# It is also a multiple of the minimum read size, so that we can read in chunks without exceeding the maximum.
|
22
|
+
MAXIMUM_READ_SIZE = ENV.fetch("IO_STREAM_MAXIMUM_READ_SIZE", MINIMUM_READ_SIZE * 64).to_i
|
23
|
+
|
24
|
+
# A module providing readable stream functionality.
|
25
|
+
#
|
26
|
+
# You must implement the `sysread` method to read data from the underlying IO.
|
27
|
+
module Readable
|
28
|
+
# Initialize readable stream functionality.
|
29
|
+
# @parameter minimum_read_size [Integer] The minimum size for read operations.
|
30
|
+
# @parameter maximum_read_size [Integer] The maximum size for read operations.
|
31
|
+
# @parameter block_size [Integer] Legacy parameter, use minimum_read_size instead.
|
32
|
+
def initialize(minimum_read_size: MINIMUM_READ_SIZE, maximum_read_size: MAXIMUM_READ_SIZE, block_size: nil, **, &block)
|
33
|
+
@done = false
|
34
|
+
@read_buffer = StringBuffer.new
|
35
|
+
# Used as destination buffer for underlying reads.
|
36
|
+
@input_buffer = StringBuffer.new
|
37
|
+
|
38
|
+
# Support legacy block_size parameter for backwards compatibility
|
39
|
+
@minimum_read_size = block_size || minimum_read_size
|
40
|
+
@maximum_read_size = maximum_read_size
|
41
|
+
|
42
|
+
super(**, &block) if defined?(super)
|
43
|
+
end
|
44
|
+
|
45
|
+
attr_accessor :minimum_read_size
|
46
|
+
|
47
|
+
# Legacy accessor for backwards compatibility
|
48
|
+
# @returns [Integer] The minimum read size.
|
49
|
+
def block_size
|
50
|
+
@minimum_read_size
|
51
|
+
end
|
52
|
+
|
53
|
+
# Legacy setter for backwards compatibility
|
54
|
+
# @parameter value [Integer] The minimum read size.
|
55
|
+
def block_size=(value)
|
56
|
+
@minimum_read_size = value
|
57
|
+
end
|
58
|
+
|
59
|
+
# Read data from the stream.
|
60
|
+
# @parameter size [Integer | Nil] The number of bytes to read. If nil, read until end of stream.
|
61
|
+
# @returns [String] The data read from the stream.
|
62
|
+
def read(size = nil)
|
63
|
+
return String.new(encoding: Encoding::BINARY) if size == 0
|
64
|
+
|
65
|
+
if size
|
66
|
+
until @done or @read_buffer.bytesize >= size
|
67
|
+
# Compute the amount of data we need to read from the underlying stream:
|
68
|
+
read_size = size - @read_buffer.bytesize
|
69
|
+
|
70
|
+
# Don't read less than @minimum_read_size to avoid lots of small reads:
|
71
|
+
fill_read_buffer(read_size > @minimum_read_size ? read_size : @minimum_read_size)
|
72
|
+
end
|
73
|
+
else
|
74
|
+
until @done
|
75
|
+
fill_read_buffer
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
return consume_read_buffer(size)
|
80
|
+
end
|
81
|
+
|
82
|
+
# Read at most `size` bytes from the stream. Will avoid reading from the underlying stream if possible.
|
83
|
+
def read_partial(size = nil)
|
84
|
+
return String.new(encoding: Encoding::BINARY) if size == 0
|
85
|
+
|
86
|
+
if !@done and @read_buffer.empty?
|
87
|
+
fill_read_buffer
|
88
|
+
end
|
89
|
+
|
90
|
+
return consume_read_buffer(size)
|
91
|
+
end
|
92
|
+
|
93
|
+
# Read exactly the specified number of bytes.
|
94
|
+
# @parameter size [Integer] The number of bytes to read.
|
95
|
+
# @parameter exception [Class] The exception to raise if not enough data is available.
|
96
|
+
# @returns [String] The data read from the stream.
|
97
|
+
def read_exactly(size, exception: EOFError)
|
98
|
+
if buffer = read(size)
|
99
|
+
if buffer.bytesize != size
|
100
|
+
raise exception, "Could not read enough data!"
|
101
|
+
end
|
102
|
+
|
103
|
+
return buffer
|
104
|
+
end
|
105
|
+
|
106
|
+
raise exception, "Encountered done while reading data!"
|
107
|
+
end
|
108
|
+
|
109
|
+
# This is a compatibility shim for existing code that uses `readpartial`.
|
110
|
+
def readpartial(size = nil)
|
111
|
+
read_partial(size) or raise EOFError, "Encountered done while reading data!"
|
112
|
+
end
|
113
|
+
|
114
|
+
# Find the index of a pattern in the read buffer, reading more data if needed.
|
115
|
+
# @parameter pattern [String] The pattern to search for.
|
116
|
+
# @parameter offset [Integer] The offset to start searching from.
|
117
|
+
# @parameter limit [Integer | Nil] The maximum number of bytes to read while searching.
|
118
|
+
# @returns [Integer | Nil] The index of the pattern, or nil if not found.
|
119
|
+
private def index_of(pattern, offset, limit, discard = false)
|
120
|
+
# We don't want to split on the pattern, so we subtract the size of the pattern.
|
121
|
+
split_offset = pattern.bytesize - 1
|
122
|
+
|
123
|
+
until index = @read_buffer.index(pattern, offset)
|
124
|
+
offset = @read_buffer.bytesize - split_offset
|
125
|
+
|
126
|
+
offset = 0 if offset < 0
|
127
|
+
|
128
|
+
if limit and offset >= limit
|
129
|
+
return nil
|
130
|
+
end
|
131
|
+
|
132
|
+
unless fill_read_buffer
|
133
|
+
return nil
|
134
|
+
end
|
135
|
+
|
136
|
+
if discard
|
137
|
+
# If we are discarding, we should consume the read buffer up to the offset:
|
138
|
+
consume_read_buffer(offset)
|
139
|
+
offset = 0
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
return index
|
144
|
+
end
|
145
|
+
|
146
|
+
# Efficiently read data from the stream until encountering pattern.
|
147
|
+
# @parameter pattern [String] The pattern to match.
|
148
|
+
# @parameter offset [Integer] The offset to start searching from.
|
149
|
+
# @parameter limit [Integer] The maximum number of bytes to read, including the pattern (even if chomped).
|
150
|
+
# @parameter chomp [Boolean] Whether to remove the pattern from the returned data.
|
151
|
+
# @returns [String | Nil] The contents of the stream up until the pattern, or nil if the pattern was not found.
|
152
|
+
def read_until(pattern, offset = 0, limit: nil, chomp: true)
|
153
|
+
if index = index_of(pattern, offset, limit)
|
154
|
+
return nil if limit and index >= limit
|
155
|
+
|
156
|
+
@read_buffer.freeze
|
157
|
+
matched = @read_buffer.byteslice(0, index+(chomp ? 0 : pattern.bytesize))
|
158
|
+
@read_buffer = @read_buffer.byteslice(index+pattern.bytesize, @read_buffer.bytesize)
|
159
|
+
|
160
|
+
return matched
|
161
|
+
end
|
162
|
+
end
|
163
|
+
|
164
|
+
# Efficiently discard data from the stream until encountering pattern.
|
165
|
+
# @parameter pattern [String] The pattern to match.
|
166
|
+
# @parameter offset [Integer] The offset to start searching from.
|
167
|
+
# @parameter limit [Integer] The maximum number of bytes to read, including the pattern.
|
168
|
+
# @returns [String | Nil] The contents of the stream up until the pattern, or nil if the pattern was not found.
|
169
|
+
def discard_until(pattern, offset = 0, limit: nil)
|
170
|
+
if index = index_of(pattern, offset, limit, true)
|
171
|
+
@read_buffer.freeze
|
172
|
+
|
173
|
+
if limit and index >= limit
|
174
|
+
@read_buffer = @read_buffer.byteslice(limit, @read_buffer.bytesize)
|
175
|
+
|
176
|
+
return nil
|
177
|
+
end
|
178
|
+
|
179
|
+
matched = @read_buffer.byteslice(0, index+pattern.bytesize)
|
180
|
+
@read_buffer = @read_buffer.byteslice(index+pattern.bytesize, @read_buffer.bytesize)
|
181
|
+
|
182
|
+
return matched
|
183
|
+
end
|
184
|
+
end
|
185
|
+
|
186
|
+
# Peek at data in the buffer without consuming it.
|
187
|
+
# @parameter size [Integer | Nil] The number of bytes to peek at. If nil, peek at all available data.
|
188
|
+
# @returns [String] The data in the buffer without consuming it.
|
189
|
+
def peek(size = nil)
|
190
|
+
if size
|
191
|
+
until @done or @read_buffer.bytesize >= size
|
192
|
+
# Compute the amount of data we need to read from the underlying stream:
|
193
|
+
read_size = size - @read_buffer.bytesize
|
194
|
+
|
195
|
+
# Don't read less than @minimum_read_size to avoid lots of small reads:
|
196
|
+
fill_read_buffer(read_size > @minimum_read_size ? read_size : @minimum_read_size)
|
197
|
+
end
|
198
|
+
return @read_buffer[..([size, @read_buffer.size].min - 1)]
|
199
|
+
end
|
200
|
+
until (block_given? && yield(@read_buffer)) or @done
|
201
|
+
fill_read_buffer
|
202
|
+
end
|
203
|
+
return @read_buffer
|
204
|
+
end
|
205
|
+
|
206
|
+
# Read a line from the stream, similar to IO#gets.
|
207
|
+
# @parameter separator [String] The line separator to search for.
|
208
|
+
# @parameter limit [Integer | Nil] The maximum number of bytes to read.
|
209
|
+
# @parameter chomp [Boolean] Whether to remove the separator from the returned line.
|
210
|
+
# @returns [String | Nil] The line read from the stream, or nil if at end of stream.
|
211
|
+
def gets(separator = $/, limit = nil, chomp: false)
|
212
|
+
# Compatibility with IO#gets:
|
213
|
+
if separator.is_a?(Integer)
|
214
|
+
limit = separator
|
215
|
+
separator = $/
|
216
|
+
end
|
217
|
+
|
218
|
+
# We don't want to split in the middle of the separator, so we subtract the size of the separator from the start of the search:
|
219
|
+
split_offset = separator.bytesize - 1
|
220
|
+
|
221
|
+
offset = 0
|
222
|
+
|
223
|
+
until index = @read_buffer.index(separator, offset)
|
224
|
+
offset = @read_buffer.bytesize - split_offset
|
225
|
+
offset = 0 if offset < 0
|
226
|
+
|
227
|
+
# If a limit was given, and the offset is beyond the limit, we should return up to the limit:
|
228
|
+
if limit and offset >= limit
|
229
|
+
# As we didn't find the separator, there is nothing to chomp either.
|
230
|
+
return consume_read_buffer(limit)
|
231
|
+
end
|
232
|
+
|
233
|
+
# If we can't read any more data, we should return what we have:
|
234
|
+
return consume_read_buffer unless fill_read_buffer
|
235
|
+
end
|
236
|
+
|
237
|
+
# If the index of the separator was beyond the limit:
|
238
|
+
if limit and index >= limit
|
239
|
+
# Return up to the limit:
|
240
|
+
return consume_read_buffer(limit)
|
241
|
+
end
|
242
|
+
|
243
|
+
# Freeze the read buffer, as this enables us to use byteslice without generating a hidden copy:
|
244
|
+
@read_buffer.freeze
|
245
|
+
|
246
|
+
line = @read_buffer.byteslice(0, index+(chomp ? 0 : separator.bytesize))
|
247
|
+
@read_buffer = @read_buffer.byteslice(index+separator.bytesize, @read_buffer.bytesize)
|
248
|
+
|
249
|
+
return line
|
250
|
+
end
|
251
|
+
|
252
|
+
# Determins if the stream has consumed all available data. May block if the stream is not readable.
|
253
|
+
# See {readable?} for a non-blocking alternative.
|
254
|
+
#
|
255
|
+
# @returns [Boolean] If the stream is at file which means there is no more data to be read.
|
256
|
+
def done?
|
257
|
+
if !@read_buffer.empty?
|
258
|
+
return false
|
259
|
+
elsif @done
|
260
|
+
return true
|
261
|
+
else
|
262
|
+
return !self.fill_read_buffer
|
263
|
+
end
|
264
|
+
end
|
265
|
+
|
266
|
+
alias eof? done?
|
267
|
+
|
268
|
+
# Mark the stream as done and raise `EOFError`.
|
269
|
+
def done!
|
270
|
+
@read_buffer.clear
|
271
|
+
@done = true
|
272
|
+
|
273
|
+
raise EOFError
|
274
|
+
end
|
275
|
+
|
276
|
+
alias eof! done!
|
277
|
+
|
278
|
+
# Whether there is a chance that a read operation will succeed or not.
|
279
|
+
# @returns [Boolean] If the stream is readable, i.e. a `read` operation has a chance of success.
|
280
|
+
def readable?
|
281
|
+
# If we are at the end of the file, we can't read any more data:
|
282
|
+
if @done
|
283
|
+
return false
|
284
|
+
end
|
285
|
+
|
286
|
+
# If the read buffer is not empty, we can read more data:
|
287
|
+
if !@read_buffer.empty?
|
288
|
+
return true
|
289
|
+
end
|
290
|
+
|
291
|
+
# If the underlying stream is readable, we can read more data:
|
292
|
+
return !closed?
|
293
|
+
end
|
294
|
+
|
295
|
+
# Close the read end of the stream.
|
296
|
+
def close_read
|
297
|
+
end
|
298
|
+
|
299
|
+
private
|
300
|
+
|
301
|
+
# Fills the buffer from the underlying stream.
|
302
|
+
def fill_read_buffer(size = @minimum_read_size)
|
303
|
+
# Limit the read size to avoid exceeding SSIZE_MAX and to manage memory usage.
|
304
|
+
# Very large reads can also hurt interactive performance by blocking for too long.
|
305
|
+
if size > @maximum_read_size
|
306
|
+
size = @maximum_read_size
|
307
|
+
end
|
308
|
+
|
309
|
+
# This effectively ties the input and output stream together.
|
310
|
+
flush
|
311
|
+
|
312
|
+
if @read_buffer.empty?
|
313
|
+
if sysread(size, @read_buffer)
|
314
|
+
# Console.info(self, name: "read") {@read_buffer.inspect}
|
315
|
+
return true
|
316
|
+
end
|
317
|
+
else
|
318
|
+
if chunk = sysread(size, @input_buffer)
|
319
|
+
@read_buffer << chunk
|
320
|
+
# Console.info(self, name: "read") {@read_buffer.inspect}
|
321
|
+
|
322
|
+
return true
|
323
|
+
end
|
324
|
+
end
|
325
|
+
|
326
|
+
# else for both cases above:
|
327
|
+
@done = true
|
328
|
+
return false
|
329
|
+
end
|
330
|
+
|
331
|
+
# Consumes at most `size` bytes from the buffer.
|
332
|
+
# @parameter size [Integer | Nil] The amount of data to consume. If nil, consume entire buffer.
|
333
|
+
def consume_read_buffer(size = nil)
|
334
|
+
# If we are at done, and the read buffer is empty, we can't consume anything.
|
335
|
+
return nil if @done && @read_buffer.empty?
|
336
|
+
|
337
|
+
result = nil
|
338
|
+
|
339
|
+
if size.nil? or size >= @read_buffer.bytesize
|
340
|
+
# Consume the entire read buffer:
|
341
|
+
result = @read_buffer
|
342
|
+
@read_buffer = StringBuffer.new
|
343
|
+
else
|
344
|
+
# This approach uses more memory.
|
345
|
+
# result = @read_buffer.slice!(0, size)
|
346
|
+
|
347
|
+
# We know that we are not going to reuse the original buffer.
|
348
|
+
# But byteslice will generate a hidden copy. So let's freeze it first:
|
349
|
+
@read_buffer.freeze
|
350
|
+
|
351
|
+
result = @read_buffer.byteslice(0, size)
|
352
|
+
@read_buffer = @read_buffer.byteslice(size, @read_buffer.bytesize)
|
353
|
+
end
|
354
|
+
|
355
|
+
return result
|
356
|
+
end
|
357
|
+
end
|
358
|
+
end
|
@@ -5,10 +5,14 @@
|
|
5
5
|
|
6
6
|
unless IO.method_defined?(:buffered?, false)
|
7
7
|
class IO
|
8
|
+
# Check if the IO is buffered.
|
9
|
+
# @returns [Boolean] True if the IO is buffered (not synchronized).
|
8
10
|
def buffered?
|
9
11
|
return !self.sync
|
10
12
|
end
|
11
13
|
|
14
|
+
# Set the buffered state of the IO.
|
15
|
+
# @parameter value [Boolean] True to enable buffering, false to disable.
|
12
16
|
def buffered=(value)
|
13
17
|
self.sync = !value
|
14
18
|
end
|
@@ -18,13 +22,18 @@ end
|
|
18
22
|
require "socket"
|
19
23
|
|
20
24
|
unless BasicSocket.method_defined?(:buffered?, false)
|
25
|
+
# Socket extensions for buffering support.
|
21
26
|
class BasicSocket
|
27
|
+
# Check if this socket uses TCP protocol.
|
28
|
+
# @returns [Boolean] True if the socket is TCP over IPv4 or IPv6.
|
22
29
|
def ip_protocol_tcp?
|
23
30
|
local_address = self.local_address
|
24
31
|
|
25
32
|
return (local_address.afamily == ::Socket::AF_INET || local_address.afamily == ::Socket::AF_INET6) && local_address.socktype == ::Socket::SOCK_STREAM
|
26
33
|
end
|
27
34
|
|
35
|
+
# Check if the socket is buffered.
|
36
|
+
# @returns [Boolean] True if the socket is buffered.
|
28
37
|
def buffered?
|
29
38
|
return false unless super
|
30
39
|
|
@@ -35,6 +44,8 @@ unless BasicSocket.method_defined?(:buffered?, false)
|
|
35
44
|
end
|
36
45
|
end
|
37
46
|
|
47
|
+
# Set the buffered state of the socket.
|
48
|
+
# @parameter value [Boolean] True to enable buffering, false to disable.
|
38
49
|
def buffered=(value)
|
39
50
|
super
|
40
51
|
|
@@ -53,11 +64,16 @@ end
|
|
53
64
|
require "stringio"
|
54
65
|
|
55
66
|
unless StringIO.method_defined?(:buffered?, false)
|
67
|
+
# StringIO extensions for buffering support.
|
56
68
|
class StringIO
|
69
|
+
# Check if the StringIO is buffered.
|
70
|
+
# @returns [Boolean] True if the StringIO is buffered (not synchronized).
|
57
71
|
def buffered?
|
58
72
|
return !self.sync
|
59
73
|
end
|
60
74
|
|
75
|
+
# Set the buffered state of the StringIO.
|
76
|
+
# @parameter value [Boolean] True to enable buffering, false to disable.
|
61
77
|
def buffered=(value)
|
62
78
|
self.sync = !value
|
63
79
|
end
|
@@ -5,6 +5,8 @@
|
|
5
5
|
|
6
6
|
class IO
|
7
7
|
unless method_defined?(:readable?, false)
|
8
|
+
# Check if the IO is readable.
|
9
|
+
# @returns [Boolean] True if the IO is readable (not closed).
|
8
10
|
def readable?
|
9
11
|
# Do not call `eof?` here as it is not concurrency-safe and it can block.
|
10
12
|
!closed?
|
@@ -16,6 +18,8 @@ require "socket"
|
|
16
18
|
|
17
19
|
class BasicSocket
|
18
20
|
unless method_defined?(:readable?, false)
|
21
|
+
# Check if the socket is readable.
|
22
|
+
# @returns [Boolean] True if the socket is readable.
|
19
23
|
def readable?
|
20
24
|
# If we can wait for the socket to become readable, we know that the socket may still be open.
|
21
25
|
result = self.recv_nonblock(1, ::Socket::MSG_PEEK, exception: false)
|
@@ -36,6 +40,8 @@ require "stringio"
|
|
36
40
|
|
37
41
|
class StringIO
|
38
42
|
unless method_defined?(:readable?, false)
|
43
|
+
# Check if the StringIO is readable.
|
44
|
+
# @returns [Boolean] True if the StringIO is readable (not at EOF).
|
39
45
|
def readable?
|
40
46
|
!eof?
|
41
47
|
end
|
@@ -46,6 +52,8 @@ require "openssl"
|
|
46
52
|
|
47
53
|
class OpenSSL::SSL::SSLSocket
|
48
54
|
unless method_defined?(:readable?, false)
|
55
|
+
# Check if the SSL socket is readable.
|
56
|
+
# @returns [Boolean] True if the SSL socket is readable.
|
49
57
|
def readable?
|
50
58
|
to_io.readable?
|
51
59
|
end
|
@@ -4,15 +4,20 @@
|
|
4
4
|
# Copyright, 2023-2024, by Samuel Williams.
|
5
5
|
|
6
6
|
module IO::Stream
|
7
|
+
# A specialized string buffer for binary data with automatic encoding handling.
|
7
8
|
class StringBuffer < String
|
8
9
|
BINARY = Encoding::BINARY
|
9
10
|
|
11
|
+
# Initialize a new string buffer with binary encoding.
|
10
12
|
def initialize
|
11
13
|
super
|
12
14
|
|
13
15
|
force_encoding(BINARY)
|
14
16
|
end
|
15
17
|
|
18
|
+
# Append a string to the buffer, converting to binary encoding if necessary.
|
19
|
+
# @parameter string [String] The string to append.
|
20
|
+
# @returns [StringBuffer] Self for method chaining.
|
16
21
|
def << string
|
17
22
|
if string.encoding == BINARY
|
18
23
|
super(string)
|
data/lib/io/stream/version.rb
CHANGED
@@ -0,0 +1,92 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# Released under the MIT License.
|
4
|
+
# Copyright, 2023-2024, by Samuel Williams.
|
5
|
+
|
6
|
+
require_relative "readable"
|
7
|
+
|
8
|
+
module IO::Stream
|
9
|
+
# The minimum write size before flushing. Defaults to 64KB.
|
10
|
+
MINIMUM_WRITE_SIZE = ENV.fetch("IO_STREAM_MINIMUM_WRITE_SIZE", BLOCK_SIZE).to_i
|
11
|
+
|
12
|
+
# A module providing writable stream functionality.
|
13
|
+
#
|
14
|
+
# You must implement the `syswrite` method to write data to the underlying IO.
|
15
|
+
module Writable
|
16
|
+
# Initialize writable stream functionality.
|
17
|
+
# @parameter minimum_write_size [Integer] The minimum buffer size before flushing.
|
18
|
+
def initialize(minimum_write_size: MINIMUM_WRITE_SIZE, **, &block)
|
19
|
+
@writing = ::Thread::Mutex.new
|
20
|
+
@write_buffer = StringBuffer.new
|
21
|
+
@minimum_write_size = minimum_write_size
|
22
|
+
|
23
|
+
super(**, &block) if defined?(super)
|
24
|
+
end
|
25
|
+
|
26
|
+
attr_accessor :minimum_write_size
|
27
|
+
|
28
|
+
# Flushes buffered data to the stream.
|
29
|
+
def flush
|
30
|
+
return if @write_buffer.empty?
|
31
|
+
|
32
|
+
@writing.synchronize do
|
33
|
+
self.drain(@write_buffer)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
# Writes `string` to the buffer. When the buffer is full or #sync is true the
|
38
|
+
# buffer is flushed to the underlying `io`.
|
39
|
+
# @parameter string [String] the string to write to the buffer.
|
40
|
+
# @returns [Integer] the number of bytes appended to the buffer.
|
41
|
+
def write(string, flush: false)
|
42
|
+
@writing.synchronize do
|
43
|
+
@write_buffer << string
|
44
|
+
|
45
|
+
flush |= (@write_buffer.bytesize >= @minimum_write_size)
|
46
|
+
|
47
|
+
if flush
|
48
|
+
self.drain(@write_buffer)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
return string.bytesize
|
53
|
+
end
|
54
|
+
|
55
|
+
# Appends `string` to the buffer and returns self for method chaining.
|
56
|
+
# @parameter string [String] the string to write to the stream.
|
57
|
+
def <<(string)
|
58
|
+
write(string)
|
59
|
+
|
60
|
+
return self
|
61
|
+
end
|
62
|
+
|
63
|
+
# Write arguments to the stream followed by a separator and flush immediately.
|
64
|
+
# @parameter arguments [Array] The arguments to write to the stream.
|
65
|
+
# @parameter separator [String] The separator to append after each argument.
|
66
|
+
def puts(*arguments, separator: $/)
|
67
|
+
return if arguments.empty?
|
68
|
+
|
69
|
+
@writing.synchronize do
|
70
|
+
arguments.each do |argument|
|
71
|
+
@write_buffer << argument << separator
|
72
|
+
end
|
73
|
+
|
74
|
+
self.drain(@write_buffer)
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
# Close the write end of the stream by flushing any remaining data.
|
79
|
+
def close_write
|
80
|
+
flush
|
81
|
+
end
|
82
|
+
|
83
|
+
private def drain(buffer)
|
84
|
+
begin
|
85
|
+
syswrite(buffer)
|
86
|
+
ensure
|
87
|
+
# If the write operation fails, we still need to clear this buffer, and the data is essentially lost.
|
88
|
+
buffer.clear
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
data/lib/io/stream.rb
CHANGED
@@ -6,10 +6,15 @@
|
|
6
6
|
require_relative "stream/version"
|
7
7
|
require_relative "stream/buffered"
|
8
8
|
|
9
|
+
# @namespace
|
9
10
|
class IO
|
11
|
+
# @namespace
|
10
12
|
module Stream
|
11
13
|
end
|
12
14
|
|
15
|
+
# Convert any IO-like object into a buffered stream.
|
16
|
+
# @parameter io [IO] The IO object to wrap.
|
17
|
+
# @returns [IO::Stream::Buffered] A buffered stream wrapper.
|
13
18
|
def self.Stream(io)
|
14
19
|
if io.is_a?(Stream::Buffered)
|
15
20
|
io
|
data/license.md
CHANGED
data/readme.md
CHANGED
@@ -8,6 +8,64 @@ Provide a buffered stream implementation for Ruby, independent of the underlying
|
|
8
8
|
|
9
9
|
Please see the [project documentation](https://socketry.github.io/io-stream) for more details.
|
10
10
|
|
11
|
+
## Releases
|
12
|
+
|
13
|
+
Please see the [project releases](https://socketry.github.io/io-streamreleases/index) for all releases.
|
14
|
+
|
15
|
+
### v0.8.0
|
16
|
+
|
17
|
+
- On Ruby v3.3+, use `IO#write` directly instead of `IO#write_nonblock`, for better performance.
|
18
|
+
- Introduce support for `Readable#discard_until` method to discard data until a specific pattern is found.
|
19
|
+
|
20
|
+
### v0.7.0
|
21
|
+
|
22
|
+
- Split stream functionality into separate `Readable` and `Writable` modules for better modularity and composition.
|
23
|
+
- Remove unused timeout shim functionality.
|
24
|
+
- 100% documentation coverage.
|
25
|
+
|
26
|
+
### v0.6.1
|
27
|
+
|
28
|
+
- Fix compatibility with Ruby v3.3.0 - v3.3.6 where broken `@io.close` could hang.
|
29
|
+
|
30
|
+
### v0.6.0
|
31
|
+
|
32
|
+
- Improve compatibility of `gets` implementation to better match Ruby's IO\#gets behavior.
|
33
|
+
|
34
|
+
### v0.5.0
|
35
|
+
|
36
|
+
- Add support for `read_until(limit:)` parameter to limit the amount of data read.
|
37
|
+
- Minor documentation improvements.
|
38
|
+
|
39
|
+
### v0.4.3
|
40
|
+
|
41
|
+
- Add comprehensive tests for `buffered?` method on `SSLSocket`.
|
42
|
+
- Ensure TLS connections have correct buffering behavior.
|
43
|
+
- Improve test suite organization and readability.
|
44
|
+
|
45
|
+
### v0.4.2
|
46
|
+
|
47
|
+
- Add external test suite for better integration testing.
|
48
|
+
- Update dependencies and improve code style with RuboCop.
|
49
|
+
|
50
|
+
### v0.4.1
|
51
|
+
|
52
|
+
- Add compatibility fix for `SSLSocket` raising `EBADF` errors.
|
53
|
+
- Fix `IO#close` hang issue in certain scenarios.
|
54
|
+
- Add `#to_io` method to `IO::Stream::Buffered` for better compatibility.
|
55
|
+
- Modernize gem structure and dependencies.
|
56
|
+
|
57
|
+
### v0.4.0
|
58
|
+
|
59
|
+
- Add convenient `IO.Stream()` constructor method for creating buffered streams.
|
60
|
+
|
61
|
+
### v0.3.0
|
62
|
+
|
63
|
+
- Add support for timeouts with compatibility shims for various IO types.
|
64
|
+
|
65
|
+
## See Also
|
66
|
+
|
67
|
+
- [async-io](https://github.com/socketry/async-io) — Where this implementation originally came from.
|
68
|
+
|
11
69
|
## Contributing
|
12
70
|
|
13
71
|
We welcome contributions to this project.
|
@@ -25,7 +83,3 @@ In order to protect users of this project, we require all contributors to comply
|
|
25
83
|
### Community Guidelines
|
26
84
|
|
27
85
|
This project is best served by a collaborative and respectful environment. Treat each other professionally, respect differing viewpoints, and engage constructively. Harassment, discrimination, or harmful behavior is not tolerated. Communicate clearly, listen actively, and support one another. If any issues arise, please inform the project maintainers.
|
28
|
-
|
29
|
-
## See Also
|
30
|
-
|
31
|
-
- [async-io](https://github.com/socketry/async-io) — Where this implementation originally came from.
|