messagepack 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.adoc +773 -0
- data/Rakefile +8 -0
- data/docs/Gemfile +7 -0
- data/docs/README.md +85 -0
- data/docs/_config.yml +137 -0
- data/docs/_guides/index.adoc +14 -0
- data/docs/_guides/io-streaming.adoc +226 -0
- data/docs/_guides/migration.adoc +218 -0
- data/docs/_guides/performance.adoc +189 -0
- data/docs/_pages/buffer.adoc +85 -0
- data/docs/_pages/extension-types.adoc +117 -0
- data/docs/_pages/factory-pattern.adoc +115 -0
- data/docs/_pages/index.adoc +20 -0
- data/docs/_pages/serialization.adoc +159 -0
- data/docs/_pages/streaming.adoc +97 -0
- data/docs/_pages/symbol-extension.adoc +69 -0
- data/docs/_pages/timestamp-extension.adoc +88 -0
- data/docs/_references/api.adoc +360 -0
- data/docs/_references/extensions.adoc +198 -0
- data/docs/_references/format.adoc +301 -0
- data/docs/_references/index.adoc +14 -0
- data/docs/_tutorials/extension-types.adoc +170 -0
- data/docs/_tutorials/getting-started.adoc +165 -0
- data/docs/_tutorials/index.adoc +14 -0
- data/docs/_tutorials/thread-safety.adoc +157 -0
- data/docs/index.adoc +77 -0
- data/docs/lychee.toml +42 -0
- data/lib/messagepack/bigint.rb +131 -0
- data/lib/messagepack/buffer.rb +534 -0
- data/lib/messagepack/core_ext.rb +34 -0
- data/lib/messagepack/error.rb +24 -0
- data/lib/messagepack/extensions/base.rb +55 -0
- data/lib/messagepack/extensions/registry.rb +154 -0
- data/lib/messagepack/extensions/symbol.rb +38 -0
- data/lib/messagepack/extensions/timestamp.rb +110 -0
- data/lib/messagepack/extensions/value.rb +38 -0
- data/lib/messagepack/factory.rb +349 -0
- data/lib/messagepack/format.rb +99 -0
- data/lib/messagepack/packer.rb +702 -0
- data/lib/messagepack/symbol.rb +4 -0
- data/lib/messagepack/time.rb +29 -0
- data/lib/messagepack/timestamp.rb +4 -0
- data/lib/messagepack/unpacker.rb +1418 -0
- data/lib/messagepack/version.rb +5 -0
- data/lib/messagepack.rb +81 -0
- metadata +94 -0
|
@@ -0,0 +1,534 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative 'format'
|
|
4
|
+
|
|
5
|
+
module Messagepack
|
|
6
|
+
# BinaryBuffer manages binary data for reading and writing MessagePack data.
|
|
7
|
+
#
|
|
8
|
+
# This class provides:
|
|
9
|
+
# - Chunk-based storage for efficient write operations
|
|
10
|
+
# - Streaming support via IO integration
|
|
11
|
+
# - Big-endian binary primitives for MessagePack format
|
|
12
|
+
#
|
|
13
|
+
class BinaryBuffer
|
|
14
|
+
DEFAULT_IO_BUFFER_SIZE = 32 * 1024
|
|
15
|
+
# Coalescing threshold: chunks smaller than this will be merged with the previous chunk
|
|
16
|
+
# This reduces the number of chunks and improves to_s performance
|
|
17
|
+
COALESCE_THRESHOLD = 512
|
|
18
|
+
|
|
19
|
+
attr_reader :io
|
|
20
|
+
|
|
21
|
+
# Disable dup and clone as they have weird semantics
|
|
22
|
+
undef_method :dup
|
|
23
|
+
undef_method :clone
|
|
24
|
+
|
|
25
|
+
def initialize(io = nil, io_buffer_size: DEFAULT_IO_BUFFER_SIZE)
|
|
26
|
+
@io = io
|
|
27
|
+
@io_buffer_size = io_buffer_size
|
|
28
|
+
@io_buffer = nil
|
|
29
|
+
@chunks = []
|
|
30
|
+
@position = 0
|
|
31
|
+
@length = 0
|
|
32
|
+
@saved_position = nil # For save/restore position
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
# Write a single byte
|
|
36
|
+
def write_byte(byte)
|
|
37
|
+
append_to_last_chunk((byte & 0xFF).chr.force_encoding(Encoding::BINARY))
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Write multiple bytes
|
|
41
|
+
def write_bytes(bytes)
|
|
42
|
+
return self if bytes.nil? || bytes.empty?
|
|
43
|
+
|
|
44
|
+
append_to_last_chunk(bytes.dup.force_encoding(Encoding::BINARY))
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# Append data to the last chunk if it's small enough, otherwise create a new chunk
|
|
48
|
+
# This coalesces small chunks to reduce the total number of chunks
|
|
49
|
+
def append_to_last_chunk(data)
|
|
50
|
+
data_bytesize = data.bytesize
|
|
51
|
+
data.force_encoding(Encoding::BINARY)
|
|
52
|
+
|
|
53
|
+
# Check if we should coalesce with the last chunk
|
|
54
|
+
if !@chunks.empty? && should_coalesce?(data_bytesize)
|
|
55
|
+
# Append to the last chunk instead of creating a new one
|
|
56
|
+
@chunks[-1] << data
|
|
57
|
+
@length += data_bytesize
|
|
58
|
+
else
|
|
59
|
+
# Create a new chunk
|
|
60
|
+
@chunks << data
|
|
61
|
+
@length += data_bytesize
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
self
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# Check if we should coalesce new data with the last chunk
|
|
68
|
+
# Coalesce if: both the new data AND the last chunk are below the threshold
|
|
69
|
+
def should_coalesce?(new_data_bytesize)
|
|
70
|
+
last_chunk_bytesize = @chunks.last.bytesize
|
|
71
|
+
|
|
72
|
+
# Coalesce if both are below the threshold
|
|
73
|
+
last_chunk_bytesize < COALESCE_THRESHOLD && new_data_bytesize < COALESCE_THRESHOLD
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
# Write 16-bit unsigned big-endian integer
|
|
77
|
+
def write_big_endian_uint16(value)
|
|
78
|
+
append_to_last_chunk([value].pack('n'))
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
# Write 32-bit unsigned big-endian integer
|
|
82
|
+
def write_big_endian_uint32(value)
|
|
83
|
+
append_to_last_chunk([value].pack('N'))
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
# Write 64-bit unsigned big-endian integer
|
|
87
|
+
def write_big_endian_uint64(value)
|
|
88
|
+
append_to_last_chunk([value].pack('Q>'))
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Write 64-bit signed big-endian integer
|
|
92
|
+
def write_big_endian_int64(value)
|
|
93
|
+
append_to_last_chunk([value].pack('q>'))
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
# Write 32-bit float (IEEE 754 binary32, big-endian)
|
|
97
|
+
def write_float32(value)
|
|
98
|
+
append_to_last_chunk([value].pack('g'))
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
# Write 64-bit float (IEEE 754 binary64, big-endian)
|
|
102
|
+
def write_float64(value)
|
|
103
|
+
append_to_last_chunk([value].pack('G'))
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Write data to buffer and return bytes written
|
|
107
|
+
def write(data)
|
|
108
|
+
return 0 if data.nil? || data.empty?
|
|
109
|
+
|
|
110
|
+
append_to_last_chunk(data.dup.force_encoding(Encoding::BINARY))
|
|
111
|
+
data.bytesize
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
# Read n bytes as a string
|
|
115
|
+
# Returns nil if buffer is empty
|
|
116
|
+
# Returns all available data if requested more than available
|
|
117
|
+
def read(n = nil)
|
|
118
|
+
return "" if n == 0 # Special case: read(0) returns empty string
|
|
119
|
+
|
|
120
|
+
if n.nil?
|
|
121
|
+
# Read all available data
|
|
122
|
+
# Try to read from IO if buffer is empty
|
|
123
|
+
if @chunks.empty? && @io
|
|
124
|
+
# Read all data from IO when called with no arguments
|
|
125
|
+
while @io
|
|
126
|
+
data = @io.read(@io_buffer_size)
|
|
127
|
+
break unless data # EOF
|
|
128
|
+
feed(data)
|
|
129
|
+
end
|
|
130
|
+
elsif @io
|
|
131
|
+
# Continue reading from IO until all data is consumed
|
|
132
|
+
# (when called with no arguments, read all data from IO)
|
|
133
|
+
while @io
|
|
134
|
+
data = @io.read(@io_buffer_size)
|
|
135
|
+
break unless data # EOF
|
|
136
|
+
feed(data)
|
|
137
|
+
end
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
# Return empty string (not nil) for empty buffers
|
|
141
|
+
return "" if @chunks.empty? || @position >= @length
|
|
142
|
+
|
|
143
|
+
result = String.new(capacity: @length - @position)
|
|
144
|
+
while @position < @length
|
|
145
|
+
chunk_index, offset = chunk_and_offset(@position)
|
|
146
|
+
chunk = @chunks[chunk_index]
|
|
147
|
+
bytes_to_read = chunk.bytesize - offset
|
|
148
|
+
result << chunk.byteslice(offset, bytes_to_read)
|
|
149
|
+
@position += bytes_to_read
|
|
150
|
+
end
|
|
151
|
+
return result.empty? ? nil : result
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
return nil if @position >= @length && !@io
|
|
155
|
+
ensure_readable(n)
|
|
156
|
+
available = @length - @position
|
|
157
|
+
|
|
158
|
+
if available == 0
|
|
159
|
+
return nil
|
|
160
|
+
elsif n > available
|
|
161
|
+
# Return all available data if more than available requested
|
|
162
|
+
read_bytes_internal(available)
|
|
163
|
+
else
|
|
164
|
+
read_bytes_internal(n)
|
|
165
|
+
end
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
# Read n bytes as a string
|
|
169
|
+
# Raises EOFError if not enough data available
|
|
170
|
+
def read_all(n = nil)
|
|
171
|
+
return "" if n == 0 # Special case: read_all(0) returns empty string
|
|
172
|
+
|
|
173
|
+
if n.nil?
|
|
174
|
+
# Read all available data
|
|
175
|
+
# Try to read from IO if buffer is empty
|
|
176
|
+
if @chunks.empty? && @io
|
|
177
|
+
# Read all data from IO when called with no arguments
|
|
178
|
+
while @io
|
|
179
|
+
data = @io.read(@io_buffer_size)
|
|
180
|
+
break unless data # EOF
|
|
181
|
+
feed(data)
|
|
182
|
+
end
|
|
183
|
+
elsif @io
|
|
184
|
+
# Continue reading from IO until all data is consumed
|
|
185
|
+
# (when called with no arguments, read all data from IO)
|
|
186
|
+
while @io
|
|
187
|
+
data = @io.read(@io_buffer_size)
|
|
188
|
+
break unless data # EOF
|
|
189
|
+
feed(data)
|
|
190
|
+
end
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
# Return empty string if buffer is empty (not nil)
|
|
194
|
+
return "" if @chunks.empty? || @position >= @length
|
|
195
|
+
|
|
196
|
+
result = String.new(capacity: @length - @position)
|
|
197
|
+
while @position < @length
|
|
198
|
+
chunk_index, offset = chunk_and_offset(@position)
|
|
199
|
+
chunk = @chunks[chunk_index]
|
|
200
|
+
bytes_to_read = chunk.bytesize - offset
|
|
201
|
+
result << chunk.byteslice(offset, bytes_to_read)
|
|
202
|
+
@position += bytes_to_read
|
|
203
|
+
end
|
|
204
|
+
return result
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
ensure_readable(n)
|
|
208
|
+
available = @length - @position
|
|
209
|
+
|
|
210
|
+
if n > available
|
|
211
|
+
raise EOFError, "not enough data: requested #{n} but only #{available} available"
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
read_bytes_internal(n)
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
# Read a single byte, or nil if no data available
|
|
218
|
+
def read_byte
|
|
219
|
+
ensure_readable(1)
|
|
220
|
+
return nil if @position >= @length
|
|
221
|
+
|
|
222
|
+
chunk_index, offset = chunk_and_offset(@position)
|
|
223
|
+
chunk = @chunks[chunk_index]
|
|
224
|
+
byte = chunk.getbyte(offset)
|
|
225
|
+
@position += 1
|
|
226
|
+
byte
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
# Read n bytes as a string (internal method)
|
|
230
|
+
def read_bytes_internal(n)
|
|
231
|
+
ensure_readable(n)
|
|
232
|
+
return nil if n > @length - @position
|
|
233
|
+
|
|
234
|
+
result = String.new(capacity: n)
|
|
235
|
+
remaining = n
|
|
236
|
+
|
|
237
|
+
while remaining > 0
|
|
238
|
+
chunk_index, offset = chunk_and_offset(@position)
|
|
239
|
+
chunk = @chunks[chunk_index]
|
|
240
|
+
available = chunk.bytesize - offset
|
|
241
|
+
to_read = [remaining, available].min
|
|
242
|
+
|
|
243
|
+
result << chunk.byteslice(offset, to_read)
|
|
244
|
+
@position += to_read
|
|
245
|
+
remaining -= to_read
|
|
246
|
+
end
|
|
247
|
+
|
|
248
|
+
result
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
# Read n bytes as a string (backward compatibility alias)
|
|
252
|
+
def read_bytes(n)
|
|
253
|
+
read_bytes_internal(n)
|
|
254
|
+
end
|
|
255
|
+
|
|
256
|
+
# Read 16-bit unsigned big-endian integer
|
|
257
|
+
def read_big_endian_uint16
|
|
258
|
+
data = read_bytes(2)
|
|
259
|
+
return nil if data.nil?
|
|
260
|
+
|
|
261
|
+
data.unpack1('n')
|
|
262
|
+
end
|
|
263
|
+
|
|
264
|
+
# Read 32-bit unsigned big-endian integer
|
|
265
|
+
def read_big_endian_uint32
|
|
266
|
+
data = read_bytes(4)
|
|
267
|
+
return nil if data.nil?
|
|
268
|
+
|
|
269
|
+
data.unpack1('N')
|
|
270
|
+
end
|
|
271
|
+
|
|
272
|
+
# Read 64-bit unsigned big-endian integer
|
|
273
|
+
def read_big_endian_uint64
|
|
274
|
+
data = read_bytes(8)
|
|
275
|
+
return nil if data.nil?
|
|
276
|
+
|
|
277
|
+
data.unpack1('Q>')
|
|
278
|
+
end
|
|
279
|
+
|
|
280
|
+
# Read 64-bit signed big-endian integer
|
|
281
|
+
def read_big_endian_int64
|
|
282
|
+
data = read_bytes(8)
|
|
283
|
+
return nil if data.nil?
|
|
284
|
+
|
|
285
|
+
data.unpack1('q>')
|
|
286
|
+
end
|
|
287
|
+
|
|
288
|
+
# Read 32-bit float (IEEE 754 binary32, big-endian)
|
|
289
|
+
def read_float32
|
|
290
|
+
data = read_bytes(4)
|
|
291
|
+
return nil if data.nil?
|
|
292
|
+
|
|
293
|
+
data.unpack1('g')
|
|
294
|
+
end
|
|
295
|
+
|
|
296
|
+
# Read 64-bit float (IEEE 754 binary64, big-endian)
|
|
297
|
+
def read_float64
|
|
298
|
+
data = read_bytes(8)
|
|
299
|
+
return nil if data.nil?
|
|
300
|
+
|
|
301
|
+
data.unpack1('G')
|
|
302
|
+
end
|
|
303
|
+
|
|
304
|
+
# Look at next byte without consuming it
|
|
305
|
+
def peek_byte
|
|
306
|
+
return nil if @position >= @length
|
|
307
|
+
|
|
308
|
+
chunk_index, offset = chunk_and_offset(@position)
|
|
309
|
+
@chunks[chunk_index].getbyte(offset)
|
|
310
|
+
end
|
|
311
|
+
|
|
312
|
+
# Skip n bytes, returns bytes actually skipped
|
|
313
|
+
# If more bytes requested than available, skips all available
|
|
314
|
+
def skip(n)
|
|
315
|
+
return 0 if n == 0 # Special case: skip(0) returns 0
|
|
316
|
+
ensure_readable(n)
|
|
317
|
+
available = @length - @position
|
|
318
|
+
|
|
319
|
+
actual = [n, available].min
|
|
320
|
+
@position += actual
|
|
321
|
+
actual
|
|
322
|
+
end
|
|
323
|
+
|
|
324
|
+
# Skip n bytes, raises EOFError if not enough data
|
|
325
|
+
def skip_all(n)
|
|
326
|
+
return self if n == 0 # Special case: skip_all(0) returns self
|
|
327
|
+
ensure_readable(n)
|
|
328
|
+
available = @length - @position
|
|
329
|
+
|
|
330
|
+
if n > available
|
|
331
|
+
raise EOFError, "not enough data: requested #{n} but only #{available} available"
|
|
332
|
+
end
|
|
333
|
+
|
|
334
|
+
@position += n
|
|
335
|
+
self
|
|
336
|
+
end
|
|
337
|
+
|
|
338
|
+
# Skip n bytes (internal method, returns self)
|
|
339
|
+
def skip_bytes(n)
|
|
340
|
+
@position += n
|
|
341
|
+
self
|
|
342
|
+
end
|
|
343
|
+
|
|
344
|
+
# Check if bytes are available for reading
|
|
345
|
+
def bytes_available?
|
|
346
|
+
@position < @length
|
|
347
|
+
end
|
|
348
|
+
|
|
349
|
+
# Get the number of bytes available for reading
|
|
350
|
+
def bytes_available
|
|
351
|
+
@length - @position
|
|
352
|
+
end
|
|
353
|
+
|
|
354
|
+
# Check if buffer is empty
|
|
355
|
+
def empty?
|
|
356
|
+
@length - @position == 0
|
|
357
|
+
end
|
|
358
|
+
|
|
359
|
+
# Check if at EOF (no more data and no IO to read from)
|
|
360
|
+
def eof?
|
|
361
|
+
@position >= @length && !@io
|
|
362
|
+
end
|
|
363
|
+
|
|
364
|
+
# Convert all buffer data to a single string (does not consume data)
|
|
365
|
+
def to_s
|
|
366
|
+
# Fast-path: if position is 0, we can join all chunks directly
|
|
367
|
+
if @position == 0
|
|
368
|
+
result = @chunks.empty? ? String.new : @chunks.join
|
|
369
|
+
result.force_encoding(Encoding::BINARY)
|
|
370
|
+
return result
|
|
371
|
+
end
|
|
372
|
+
|
|
373
|
+
# General case: skip bytes before @position
|
|
374
|
+
result = String.new(capacity: @length)
|
|
375
|
+
offset = 0
|
|
376
|
+
@chunks.each do |chunk|
|
|
377
|
+
chunk_bytes = chunk.bytesize
|
|
378
|
+
next if offset + chunk_bytes <= @position
|
|
379
|
+
|
|
380
|
+
start_offset = [@position - offset, 0].max
|
|
381
|
+
result << chunk.byteslice(start_offset, chunk_bytes - start_offset)
|
|
382
|
+
offset += chunk_bytes
|
|
383
|
+
break if offset >= @length
|
|
384
|
+
end
|
|
385
|
+
result.force_encoding(Encoding::BINARY)
|
|
386
|
+
end
|
|
387
|
+
|
|
388
|
+
# Convert to array of chunks
|
|
389
|
+
def to_a
|
|
390
|
+
@chunks.dup
|
|
391
|
+
end
|
|
392
|
+
|
|
393
|
+
# Clear buffer to empty state
|
|
394
|
+
def clear
|
|
395
|
+
@chunks.clear
|
|
396
|
+
@position = 0
|
|
397
|
+
@length = 0
|
|
398
|
+
@io_buffer = nil
|
|
399
|
+
self
|
|
400
|
+
end
|
|
401
|
+
|
|
402
|
+
# Reset buffer to empty state
|
|
403
|
+
def reset
|
|
404
|
+
@chunks.clear
|
|
405
|
+
@position = 0
|
|
406
|
+
@length = 0
|
|
407
|
+
@io_buffer = nil
|
|
408
|
+
@saved_position = nil
|
|
409
|
+
self
|
|
410
|
+
end
|
|
411
|
+
|
|
412
|
+
# Get current size of buffer (available bytes)
|
|
413
|
+
def size
|
|
414
|
+
@length - @position
|
|
415
|
+
end
|
|
416
|
+
|
|
417
|
+
# Write buffer contents to an IO object
|
|
418
|
+
def write_to(io)
|
|
419
|
+
return 0 if @length - @position == 0 # Nothing to write
|
|
420
|
+
|
|
421
|
+
bytes_written = 0
|
|
422
|
+
remaining = @length - @position
|
|
423
|
+
|
|
424
|
+
chunk_index, offset = chunk_and_offset(@position)
|
|
425
|
+
|
|
426
|
+
# Write each chunk from current position
|
|
427
|
+
(chunk_index...@chunks.length).each do |idx|
|
|
428
|
+
start_offset = (idx == chunk_index) ? offset : 0
|
|
429
|
+
chunk = @chunks[idx]
|
|
430
|
+
data = chunk.byteslice(start_offset, chunk.bytesize - start_offset)
|
|
431
|
+
next if data.empty?
|
|
432
|
+
|
|
433
|
+
io.write(data)
|
|
434
|
+
bytes_written += data.bytesize
|
|
435
|
+
end
|
|
436
|
+
|
|
437
|
+
@position = @length
|
|
438
|
+
bytes_written
|
|
439
|
+
end
|
|
440
|
+
|
|
441
|
+
# Flush buffer contents to the internal IO (if present)
|
|
442
|
+
def flush
|
|
443
|
+
return self unless @io
|
|
444
|
+
|
|
445
|
+
data = to_s
|
|
446
|
+
@io.write(data)
|
|
447
|
+
reset
|
|
448
|
+
self
|
|
449
|
+
end
|
|
450
|
+
|
|
451
|
+
# Feed more data for streaming (typically from IO)
|
|
452
|
+
def feed(data)
|
|
453
|
+
return self if data.nil? || data.empty?
|
|
454
|
+
|
|
455
|
+
@chunks << data.dup.force_encoding(Encoding::BINARY)
|
|
456
|
+
@length += data.bytesize
|
|
457
|
+
self
|
|
458
|
+
end
|
|
459
|
+
|
|
460
|
+
alias << feed
|
|
461
|
+
|
|
462
|
+
# Close the IO if present
|
|
463
|
+
def close
|
|
464
|
+
@io.close if @io && !@io.closed?
|
|
465
|
+
@io = nil
|
|
466
|
+
self
|
|
467
|
+
end
|
|
468
|
+
|
|
469
|
+
# Save the current read position for later restoration
|
|
470
|
+
# This is useful for peek-ahead operations where we might need to roll back
|
|
471
|
+
def save_position
|
|
472
|
+
@saved_position = @position
|
|
473
|
+
self
|
|
474
|
+
end
|
|
475
|
+
|
|
476
|
+
# Restore a previously saved position
|
|
477
|
+
# Returns true if position was restored, false if no position was saved
|
|
478
|
+
def restore_position
|
|
479
|
+
return false if @saved_position.nil?
|
|
480
|
+
@position = @saved_position
|
|
481
|
+
@saved_position = nil
|
|
482
|
+
true
|
|
483
|
+
end
|
|
484
|
+
|
|
485
|
+
# Discard a saved position without restoring
|
|
486
|
+
def discard_saved_position
|
|
487
|
+
@saved_position = nil
|
|
488
|
+
self
|
|
489
|
+
end
|
|
490
|
+
|
|
491
|
+
# Check if a position is currently saved
|
|
492
|
+
def position_saved?
|
|
493
|
+
!@saved_position.nil?
|
|
494
|
+
end
|
|
495
|
+
|
|
496
|
+
private
|
|
497
|
+
|
|
498
|
+
# Ensure n bytes are available for reading
|
|
499
|
+
def ensure_readable(n)
|
|
500
|
+
return unless @io
|
|
501
|
+
|
|
502
|
+
while @length - @position < n
|
|
503
|
+
break unless feed_from_io
|
|
504
|
+
end
|
|
505
|
+
end
|
|
506
|
+
|
|
507
|
+
# Read from @io and add to buffer
|
|
508
|
+
def feed_from_io
|
|
509
|
+
@io_buffer ||= String.new(capacity: @io_buffer_size)
|
|
510
|
+
@io_buffer.clear
|
|
511
|
+
|
|
512
|
+
data = @io.read(@io_buffer_size)
|
|
513
|
+
return false if data.nil?
|
|
514
|
+
|
|
515
|
+
feed(data)
|
|
516
|
+
true
|
|
517
|
+
end
|
|
518
|
+
|
|
519
|
+
# Find chunk index and offset for a given position
|
|
520
|
+
def chunk_and_offset(pos)
|
|
521
|
+
offset = 0
|
|
522
|
+
@chunks.each_with_index do |chunk, index|
|
|
523
|
+
chunk_size = chunk.bytesize
|
|
524
|
+
if pos < offset + chunk_size
|
|
525
|
+
return [index, pos - offset]
|
|
526
|
+
end
|
|
527
|
+
offset += chunk_size
|
|
528
|
+
end
|
|
529
|
+
|
|
530
|
+
# Position is beyond all chunks (should not happen with proper ensure_readable)
|
|
531
|
+
[@chunks.length - 1, @chunks.last.bytesize - 1]
|
|
532
|
+
end
|
|
533
|
+
end
|
|
534
|
+
end
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require_relative 'packer'
|
|
4
|
+
|
|
5
|
+
module Messagepack
|
|
6
|
+
# Core extensions for to_msgpack method.
|
|
7
|
+
#
|
|
8
|
+
# This module is included in core Ruby classes to provide
|
|
9
|
+
# a convenient to_msgpack method.
|
|
10
|
+
#
|
|
11
|
+
module CoreExt
|
|
12
|
+
def to_msgpack(packer_or_io = nil)
|
|
13
|
+
if packer_or_io.is_a?(Packer)
|
|
14
|
+
to_msgpack_with_packer(packer_or_io)
|
|
15
|
+
elsif packer_or_io
|
|
16
|
+
Messagepack.pack(self, packer_or_io)
|
|
17
|
+
else
|
|
18
|
+
Messagepack.pack(self)
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
private
|
|
23
|
+
|
|
24
|
+
def to_msgpack_with_packer(packer)
|
|
25
|
+
packer.write(self)
|
|
26
|
+
packer
|
|
27
|
+
end
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
# Include in core classes
|
|
32
|
+
[NilClass, TrueClass, FalseClass, Integer, Float, String, Array, Hash, Symbol].each do |klass|
|
|
33
|
+
klass.include(Messagepack::CoreExt) unless klass.include?(Messagepack::CoreExt)
|
|
34
|
+
end
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Messagepack
|
|
4
|
+
# Base exception class for all MessagePack errors
|
|
5
|
+
class Error < StandardError; end
|
|
6
|
+
|
|
7
|
+
# Exception raised during unpacking/deserialization
|
|
8
|
+
class UnpackError < Error; end
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
# Malformed MessagePack format data
|
|
12
|
+
class Messagepack::MalformedFormatError < Messagepack::UnpackError; end
|
|
13
|
+
|
|
14
|
+
# Stack overflow or underflow during unpacking
|
|
15
|
+
class Messagepack::StackError < Messagepack::UnpackError; end
|
|
16
|
+
|
|
17
|
+
# Type mismatch during unpacking
|
|
18
|
+
class Messagepack::TypeError < Messagepack::UnpackError; end
|
|
19
|
+
|
|
20
|
+
# Unexpected type during unpacking
|
|
21
|
+
class Messagepack::UnexpectedTypeError < Messagepack::TypeError; end
|
|
22
|
+
|
|
23
|
+
# Unknown extension type during unpacking
|
|
24
|
+
class Messagepack::UnknownExtTypeError < Messagepack::UnpackError; end
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Messagepack
|
|
4
|
+
module Extension
|
|
5
|
+
# Base module for extension types.
|
|
6
|
+
#
|
|
7
|
+
# Extension types allow custom Ruby objects to be serialized
|
|
8
|
+
# and deserialized with MessagePack.
|
|
9
|
+
#
|
|
10
|
+
# To create a custom extension:
|
|
11
|
+
#
|
|
12
|
+
# class MyType
|
|
13
|
+
# include Messagepack::Extension::Base
|
|
14
|
+
#
|
|
15
|
+
# attr_reader :data
|
|
16
|
+
#
|
|
17
|
+
# def initialize(data)
|
|
18
|
+
# @data = data
|
|
19
|
+
# end
|
|
20
|
+
#
|
|
21
|
+
# def to_msgpack_ext
|
|
22
|
+
# @data # Return binary string representation
|
|
23
|
+
# end
|
|
24
|
+
#
|
|
25
|
+
# def self.from_msgpack_ext(data)
|
|
26
|
+
# new(data) # Reconstruct from binary string
|
|
27
|
+
# end
|
|
28
|
+
# end
|
|
29
|
+
#
|
|
30
|
+
# # Register the extension
|
|
31
|
+
# MyType.register_as_extension(42)
|
|
32
|
+
#
|
|
33
|
+
module Base
|
|
34
|
+
def self.included(base)
|
|
35
|
+
base.extend(ClassMethods)
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
module ClassMethods
|
|
39
|
+
# Register this class as an extension type.
|
|
40
|
+
#
|
|
41
|
+
# @param type_id [Integer] The extension type ID (-128 to 127)
|
|
42
|
+
# @param recursive [Boolean] Whether packer/unpacker is passed to proc
|
|
43
|
+
def register_as_extension(type_id, recursive: false)
|
|
44
|
+
Messagepack::DefaultFactory.register_type(
|
|
45
|
+
type_id,
|
|
46
|
+
self,
|
|
47
|
+
packer: ->(obj) { obj.to_msgpack_ext },
|
|
48
|
+
unpacker: ->(data) { from_msgpack_ext(data) },
|
|
49
|
+
recursive: recursive
|
|
50
|
+
)
|
|
51
|
+
end
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
end
|