riffola 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 2be335b138c805162b56fb07b14d589a1c90f00fa0a6fed6c1cb9dba482151f4
4
+ data.tar.gz: c393edd260fc4b43a94feb327449e059a25f46a4189c2b42c1516029619703a8
5
+ SHA512:
6
+ metadata.gz: fc246a663893068f9c96fcdcbe842112a0c160b63f475b98ad357e8c775613f0f279eb413ad7aad135b9644d44570824c9980493390fc4821c8514a5dd9ffb14
7
+ data.tar.gz: dd1956dc6c17bffcb1e37e24d33abbb3c79553a600f11a6b26267434f7467bfe283034aad8b63c8f9bf55d3de60d3341500e926febaaf26c171a45fc9314ece1
@@ -0,0 +1,240 @@
1
+ module Riffola
2
+
3
+ # Return a list of Riff chunk objects stored in a file, from a given offset.
4
+ # A chunk is defined as this:
5
+ # * A name (4 bytes)
6
+ # * A size (4 bytes by default)
7
+ # * A header (0 bytes by default)
8
+ # * Data (Size bytes)
9
+ #
10
+ # Parameters::
11
+ # * *file* (String): The file name
12
+ # * *offset* (Integer): The offset to read the file from [default: 0]
13
+ # * *chunks_format* (Hash<String, Hash<Symbol,Object> >): Chunks format. See Chunk#initialize for details. [default = {}]
14
+ # * *max_size* (Integer): Maximum readable size (starting from the file's offset) to retrieve chunks, or nil to read till the end of the file [default: nil]
15
+ # * *parent_chunk* (Chunk or nil): Parent chunk, or nil if none [default: nil]
16
+ # * *warnings* (Boolean): Do we activate warnings? [default: true]
17
+ # * *debug* (Boolean): Do we activate debugging logs? [default: false]
18
+ # * Proc: Optional code called for each chunk being decoded
19
+ # * Parameters::
20
+ # * *chunk* (Chunk): Chunk being decoded
21
+ # * Result::
22
+ # * Boolean: Do we continue decoding chunks?
23
+ # Result::
24
+ # * Array<Chunk>: The chunks list
25
+ def self.read(file, offset: 0, chunks_format: {}, max_size: nil, parent_chunk: nil, warnings: true, debug: false)
26
+ chunks = []
27
+ max_size = File.size(file) - offset if max_size.nil?
28
+ chunk = max_size > 0 ? Chunk.new(file,
29
+ offset: offset,
30
+ chunks_format: chunks_format,
31
+ max_size: max_size,
32
+ parent_chunk: parent_chunk,
33
+ warnings: warnings,
34
+ debug: debug
35
+ ) : nil
36
+ while !chunk.nil?
37
+ chunks << chunk
38
+ break if block_given? && !yield(chunk)
39
+ chunk = chunk.next
40
+ end
41
+ chunks
42
+ end
43
+
44
+ class Chunk
45
+
46
+ # Define default chunk format properties
47
+ DEFAULT_CHUNK_FORMAT = {
48
+ size_length: 4,
49
+ header_size: 0,
50
+ data_size_correction: 0
51
+ }
52
+
53
+ # Constructor
54
+ #
55
+ # Parameters::
56
+ # * *file_name* (String): The file name
57
+ # * *offset* (Integer): The offset to read the file from [default: 0]
58
+ # * *chunks_format* (Hash<String, Hash<Symbol,Object> >): Format of a given set of chunk names (use '*' for all chunks). For each chunk name, the following can be specified: [default = {}]
59
+ # * *size_length* (Integer): Number of bytes encoding the size of the chunk [default: 4]
60
+ # * *header_size* (Integer): Size of the chunk's header [default: 0]
61
+ # * *data_size_correction* (Integer): Correction to apply to the data size read [default: 0]
62
+ # Each property can also be a Proc taking the file handle (positioned at the beginning of the chunk) and returning the real value:
63
+ # * Parameters::
64
+ # * *file* (IO): The file IO, positioned at the beginning of the chunk (at the name)
65
+ # * Result::
66
+ # * Object: The corresponding property value
67
+ # * *max_size* (Integer): Maximum readable size (starting from the file's offset) to retrieve chunks, or nil to read till the end of the file [default: nil]
68
+ # * *parent_chunk* (Chunk or nil): Parent chunk, or nil if none [default: nil]
69
+ # * *warnings* (Boolean): Do we activate warnings? [default: true]
70
+ # * *debug* (Boolean): Do we activate debugging logs? [default: false]
71
+ def initialize(file_name, offset: 0, chunks_format: {}, max_size: nil, parent_chunk: nil, warnings: true, debug: false)
72
+ @file_name = file_name
73
+ @offset = offset
74
+ @chunks_format = chunks_format
75
+ # Fill the default format if not present
76
+ @chunks_format['*'] = {} unless @chunks_format.key?('*')
77
+ DEFAULT_CHUNK_FORMAT.each do |format_property, default_property_value|
78
+ @chunks_format['*'][format_property] = default_property_value unless @chunks_format['*'].key?(format_property)
79
+ end
80
+ @parent_chunk = parent_chunk
81
+ @max_size = max_size.nil? ? File.size(@file_name) - @offset : max_size
82
+ @warnings = warnings
83
+ @debug = debug
84
+ # Get chunk format in instance variables named after the property
85
+ chunk_name = self.name
86
+ DEFAULT_CHUNK_FORMAT.keys.each do |format_property|
87
+ property_value = @chunks_format.key?(chunk_name) && @chunks_format[chunk_name].key?(format_property) ? @chunks_format[chunk_name][format_property] : @chunks_format['*'][format_property]
88
+ if property_value.is_a?(Proc)
89
+ File.open(@file_name) do |file|
90
+ file.seek(@offset)
91
+ property_value = property_value.call(file)
92
+ end
93
+ end
94
+ instance_variable_set(:"@#{format_property}", property_value)
95
+ end
96
+ puts "[DEBUG] - Read chunk from #{@file_name}@#{@offset}/#{@max_size}: #{chunk_name} (size length: #{@size_length}, header size: #{@header_size}, data size: #{self.size}/#{@max_size})" if @debug
97
+ # puts "[DEBUG] - Chunks format: #{@chunks_format.inspect}" if @debug
98
+ end
99
+
100
+ # Return the name of this chunk
101
+ #
102
+ # Result::
103
+ # * String: Chunk name
104
+ def name
105
+ chunk_name = nil
106
+ File.open(@file_name) do |file|
107
+ file.seek(@offset, IO::SEEK_CUR)
108
+ chunk_name = file.read(4)
109
+ end
110
+ puts "[WARNING] - Doesn't look like a valid chunk name: #{chunk_name}" if @warnings && !chunk_name =~ /^[\w ]{4}$/
111
+ chunk_name
112
+ end
113
+
114
+ # Return the size of this chunk
115
+ #
116
+ # Result::
117
+ # * Integer: Chunk size in bytes
118
+ def size
119
+ chunk_size = nil
120
+ File.open(@file_name) do |file|
121
+ file.seek(@offset + 4, IO::SEEK_CUR)
122
+ case @size_length
123
+ when 4
124
+ chunk_size = file.read(@size_length).unpack('L').first
125
+ when 2
126
+ chunk_size = file.read(@size_length).unpack('S').first
127
+ else
128
+ raise "Can't decode size field of length #{@size_length}"
129
+ end
130
+ end
131
+ chunk_size + @data_size_correction
132
+ end
133
+
134
+ # Return the header of this chunk
135
+ #
136
+ # Result::
137
+ # * String: Header
138
+ def header
139
+ chunk_header = nil
140
+ File.open(@file_name) do |file|
141
+ file.seek(@offset + 4 + @size_length, IO::SEEK_CUR)
142
+ chunk_header = file.read(@header_size)
143
+ end
144
+ chunk_header
145
+ end
146
+
147
+ # Return the data of this chunk
148
+ #
149
+ # Result::
150
+ # * String: Data
151
+ def data
152
+ chunk_data = nil
153
+ data_size = self.size
154
+ complete_header_size = 4 + @size_length + @header_size
155
+ puts "[WARNING] - Data size is #{data_size} but the maximum readable size is #{@max_size} and the headers have #{complete_header_size}" if @warnings && complete_header_size + data_size > @max_size
156
+ File.open(@file_name) do |file|
157
+ file.seek(@offset + complete_header_size, IO::SEEK_CUR)
158
+ chunk_data = file.read(data_size)
159
+ end
160
+ chunk_data
161
+ end
162
+
163
+ # Return the parent chunk, or nil if none
164
+ #
165
+ # Result::
166
+ # * Chunk or nil: The parent chunk, or nil if none
167
+ def parent_chunk
168
+ @parent_chunk
169
+ end
170
+
171
+ # Return a string representation of this chunk
172
+ #
173
+ # Result::
174
+ # * String: tring representation of this chunk
175
+ def to_s
176
+ "<Riffola-Chunk #{self.name} (#{@file_name}@#{@offset})>"
177
+ end
178
+
179
+ # Return the next chunk
180
+ #
181
+ # Result::
182
+ # * Chunk or nil: The next chunk, or nil if none
183
+ def next
184
+ complete_chunk_size = 4 + @size_length + @header_size + self.size
185
+ remaining_size = @max_size - complete_chunk_size
186
+ raise "#{self} - Remaining size for next chunk: #{remaining_size}" if remaining_size < 0
187
+ remaining_size > 0 ? Chunk.new(@file_name,
188
+ offset: @offset + complete_chunk_size,
189
+ chunks_format: @chunks_format,
190
+ max_size: remaining_size,
191
+ parent_chunk: @parent_chunk,
192
+ warnings: @warnings,
193
+ debug: @debug
194
+ ) : nil
195
+ end
196
+
197
+ # Return this chunk's data as a list of sub-chunks
198
+ #
199
+ # Parameters::
200
+ # * *data_offset* (Integer): The offset to read the sub-chunks from this chunk's data [default: 0]
201
+ # * *sub_chunks_format* (Hash<String, Hash<Symbol,Object> >): Chunks format. See Chunk#initialize for details. [default = @chunks_format]
202
+ # * *warnings* (Boolean): Do we activate warnings? [default: @warnings]
203
+ # * *debug* (Boolean): Do we activate debugging logs? [default: @debug]
204
+ # * Proc: Optional code called for each chunk being decoded
205
+ # * Parameters::
206
+ # * *chunk* (Chunk): Chunk being decoded
207
+ # * Result::
208
+ # * Boolean: Do we continue decoding chunks?
209
+ # Result::
210
+ # * Array<Chunk>: List of sub-chunks
211
+ def sub_chunks(data_offset: 0, sub_chunk_size_length: @size_length, sub_chunk_header_size: @header_size, sub_chunks_format: @chunks_format, warnings: @warnings, debug: @debug, &callback)
212
+ data_size = self.size
213
+ data_size > 0 ? Riffola.read(@file_name,
214
+ offset: @offset + 4 + @size_length + @header_size + data_offset,
215
+ chunks_format: sub_chunks_format,
216
+ max_size: data_size - data_offset,
217
+ parent_chunk: self,
218
+ warnings: @warnings,
219
+ debug: @debug,
220
+ &callback
221
+ ) : []
222
+ end
223
+
224
+ # Compare Chunks
225
+ #
226
+ # Parameters::
227
+ # * *other* (Object): Other object
228
+ # Result::
229
+ # * Boolean: Are objects equal?
230
+ def ==(other)
231
+ other.is_a?(Chunk) &&
232
+ other.name == self.name &&
233
+ other.size == self.size &&
234
+ other.header == self.header &&
235
+ other.data == self.data
236
+ end
237
+
238
+ end
239
+
240
+ end
@@ -0,0 +1,5 @@
1
+ module Riffola
2
+
3
+ VERSION = '0.0.1'
4
+
5
+ end
@@ -0,0 +1,523 @@
1
+ require 'riffola'
2
+ require 'tempfile'
3
+ require 'hex_string'
4
+
5
+ describe Riffola do
6
+
7
+ # Are we in debug mode?
8
+ #
9
+ # Result::
10
+ # * Boolean: Are we in debug mode?
11
+ def debug?
12
+ ENV['TEST_DEBUG'] == '1'
13
+ end
14
+
15
+ HEX_DUMP_CHARS_SIZE = 16
16
+
17
+ # Convert a given string in a debuggable hexadecimal format
18
+ #
19
+ # Parameters::
20
+ # * *str* (String): String to be converted
21
+ # Result::
22
+ # * String: The output
23
+ def hex_dump(str)
24
+ str.scan(/.{1,#{HEX_DUMP_CHARS_SIZE}}/m).map do |line|
25
+ "#{"%-#{HEX_DUMP_CHARS_SIZE * 3}s" % line.to_hex_string}| #{line.gsub(/[^[:print:]]/, '.')}"
26
+ end.join("\n")
27
+ end
28
+
29
+ # Get a string encoding chunks
30
+ #
31
+ # Parameters::
32
+ # * *chunks* (Array< Hash<Symbol,Object> >): List of chunks data:
33
+ # * *name* (String): Chunk's name
34
+ # * *data* (String): Chunk's data
35
+ # * *data_size* (Integer): Chunk's data size [default = data.size]
36
+ # * *header* (String): Chunk's header [default = '']
37
+ # * *size_length* (Integer): Size in bytes for size encoding [default = 4]
38
+ # Result::
39
+ # * String: The encoded chunks
40
+ def chunks_to_str(chunks)
41
+ chunks.map do |chunk_info|
42
+ chunk_info[:header] = '' unless chunk_info.key?(:header)
43
+ chunk_info[:size_length] = 4 unless chunk_info.key?(:size_length)
44
+ chunk_info[:data_size] = chunk_info[:data].size unless chunk_info.key?(:data_size)
45
+ size_pack_code =
46
+ case chunk_info[:size_length]
47
+ when 2
48
+ 'S'
49
+ when 4
50
+ 'L'
51
+ else
52
+ raise "Unknown size length to encode: #{chunk_info[:size_length]}"
53
+ end
54
+ "#{chunk_info[:name]}#{[chunk_info[:data_size]].pack(size_pack_code)}#{chunk_info[:header]}#{chunk_info[:data]}"
55
+ end.join
56
+ end
57
+
58
+ # Create a file with some chunks content and call code with its file name
59
+ #
60
+ # Parameters::
61
+ # * *chunks* (Array< Hash<Symbol,Object> >): List of chunks data (check chunks for details). [default = []]:
62
+ # * Proc: Code called once the file has been created. File is deleted after code execution.
63
+ # * Parameters::
64
+ # * *file* (String): File name
65
+ def with_file_content(chunks = [])
66
+ Tempfile.open do |tmp_file|
67
+ tmp_file.write(chunks_to_str(chunks))
68
+ tmp_file.flush
69
+ puts "[Test Debug] - File #{tmp_file.path} has content:\n#{hex_dump(File.read(tmp_file))}" if debug?
70
+ yield tmp_file.path
71
+ end
72
+ end
73
+
74
+ # Return the chunks read from a file content
75
+ #
76
+ # Parameters::
77
+ # * *chunks* (Array< Hash<Symbol,Object> >): List of chunks data. See with_file_content to understand the structure. [default = []]
78
+ # * *chunks_format* (Object): The chunks_format parameter give to Riffola.read [default: {}]
79
+ # * Proc: Code called with the chunks decoded from the file
80
+ # * Parameters::
81
+ # * *chunks* (Array<Riffola::Chunk>): List of chunks read from the file
82
+ def read_chunks(chunks = [], chunks_format: {})
83
+ with_file_content(chunks) do |file|
84
+ yield Riffola.read(file, chunks_format: chunks_format, debug: debug?)
85
+ end
86
+ end
87
+
88
+ it 'reads an empty file' do
89
+ read_chunks do |chunks|
90
+ expect(chunks).to eq []
91
+ end
92
+ end
93
+
94
+ it 'reads a file containing 1 chunk' do
95
+ chunk_name = 'ABCD'
96
+ chunk_data = 'TestData'
97
+ read_chunks([
98
+ {
99
+ name: chunk_name,
100
+ data: chunk_data
101
+ }
102
+ ]) do |chunks|
103
+ expect(chunks.size).to eq 1
104
+ chunk = chunks.first
105
+ expect(chunk.name).to eq chunk_name
106
+ expect(chunk.size).to eq chunk_data.size
107
+ expect(chunk.data).to eq chunk_data
108
+ expect(chunk.header).to eq ''
109
+ expect(chunk.parent_chunk).to eq nil
110
+ expect(chunk.next).to eq nil
111
+ end
112
+ end
113
+
114
+ it 'reads a file containing several chunks' do
115
+ read_chunks([
116
+ {
117
+ name: 'CHK1',
118
+ data: 'ChunkData1'
119
+ },
120
+ {
121
+ name: 'CHK2',
122
+ data: 'ChunkData2'
123
+ },
124
+ {
125
+ name: 'CHK3',
126
+ data: 'ChunkData3'
127
+ }
128
+ ]) do |chunks|
129
+ expect(chunks.size).to eq 3
130
+ chunk = chunks.first
131
+ expect(chunk.name).to eq 'CHK1'
132
+ expect(chunk.size).to eq 10
133
+ expect(chunk.data).to eq 'ChunkData1'
134
+ expect(chunk.header).to eq ''
135
+ expect(chunk.parent_chunk).to eq nil
136
+ expect(chunk.next).to eq chunks[1]
137
+ chunk = chunk.next
138
+ expect(chunk.name).to eq 'CHK2'
139
+ expect(chunk.size).to eq 10
140
+ expect(chunk.data).to eq 'ChunkData2'
141
+ expect(chunk.header).to eq ''
142
+ expect(chunk.parent_chunk).to eq nil
143
+ expect(chunk.next).to eq chunks[2]
144
+ chunk = chunk.next
145
+ expect(chunk.name).to eq 'CHK3'
146
+ expect(chunk.size).to eq 10
147
+ expect(chunk.data).to eq 'ChunkData3'
148
+ expect(chunk.header).to eq ''
149
+ expect(chunk.parent_chunk).to eq nil
150
+ expect(chunk.next).to eq nil
151
+ end
152
+ end
153
+
154
+ it 'reads a file containing several chunks with size encoded in 2 bytes' do
155
+ read_chunks([
156
+ {
157
+ name: 'CHK1',
158
+ data: 'ChunkData1',
159
+ size_length: 2
160
+ },
161
+ {
162
+ name: 'CHK2',
163
+ data: 'ChunkData2',
164
+ size_length: 2
165
+ },
166
+ {
167
+ name: 'CHK3',
168
+ data: 'ChunkData3',
169
+ size_length: 2
170
+ }
171
+ ], chunks_format: { '*' => { size_length: 2 } }) do |chunks|
172
+ expect(chunks.size).to eq 3
173
+ chunk = chunks.first
174
+ expect(chunk.name).to eq 'CHK1'
175
+ expect(chunk.size).to eq 10
176
+ expect(chunk.data).to eq 'ChunkData1'
177
+ expect(chunk.header).to eq ''
178
+ expect(chunk.parent_chunk).to eq nil
179
+ expect(chunk.next).to eq chunks[1]
180
+ chunk = chunk.next
181
+ expect(chunk.name).to eq 'CHK2'
182
+ expect(chunk.size).to eq 10
183
+ expect(chunk.data).to eq 'ChunkData2'
184
+ expect(chunk.header).to eq ''
185
+ expect(chunk.parent_chunk).to eq nil
186
+ expect(chunk.next).to eq chunks[2]
187
+ chunk = chunk.next
188
+ expect(chunk.name).to eq 'CHK3'
189
+ expect(chunk.size).to eq 10
190
+ expect(chunk.data).to eq 'ChunkData3'
191
+ expect(chunk.header).to eq ''
192
+ expect(chunk.parent_chunk).to eq nil
193
+ expect(chunk.next).to eq nil
194
+ end
195
+ end
196
+
197
+ it 'reads a file containing several chunks with headers' do
198
+ read_chunks([
199
+ {
200
+ name: 'CHK1',
201
+ data: 'ChunkData1',
202
+ header: 'ChunkHeader1'
203
+ },
204
+ {
205
+ name: 'CHK2',
206
+ data: 'ChunkData2',
207
+ header: 'ChunkHeader2'
208
+ },
209
+ {
210
+ name: 'CHK3',
211
+ data: 'ChunkData3',
212
+ header: 'ChunkHeader3'
213
+ }
214
+ ], chunks_format: { '*' => { header_size: 12 } }) do |chunks|
215
+ expect(chunks.size).to eq 3
216
+ chunk = chunks.first
217
+ expect(chunk.name).to eq 'CHK1'
218
+ expect(chunk.size).to eq 10
219
+ expect(chunk.data).to eq 'ChunkData1'
220
+ expect(chunk.header).to eq 'ChunkHeader1'
221
+ expect(chunk.parent_chunk).to eq nil
222
+ expect(chunk.next).to eq chunks[1]
223
+ chunk = chunk.next
224
+ expect(chunk.name).to eq 'CHK2'
225
+ expect(chunk.size).to eq 10
226
+ expect(chunk.data).to eq 'ChunkData2'
227
+ expect(chunk.header).to eq 'ChunkHeader2'
228
+ expect(chunk.parent_chunk).to eq nil
229
+ expect(chunk.next).to eq chunks[2]
230
+ chunk = chunk.next
231
+ expect(chunk.name).to eq 'CHK3'
232
+ expect(chunk.size).to eq 10
233
+ expect(chunk.data).to eq 'ChunkData3'
234
+ expect(chunk.header).to eq 'ChunkHeader3'
235
+ expect(chunk.parent_chunk).to eq nil
236
+ expect(chunk.next).to eq nil
237
+ end
238
+ end
239
+
240
+ it 'reads a file containing several chunks with data size correction' do
241
+ read_chunks([
242
+ {
243
+ name: 'CHK1',
244
+ data: 'ChunkData1',
245
+ data_size: 4
246
+ },
247
+ {
248
+ name: 'CHK2',
249
+ data: 'ChunkData2',
250
+ data_size: 4
251
+ },
252
+ {
253
+ name: 'CHK3',
254
+ data: 'ChunkData3',
255
+ data_size: 4
256
+ }
257
+ ], chunks_format: { '*' => { data_size_correction: 6 } }) do |chunks|
258
+ expect(chunks.size).to eq 3
259
+ chunk = chunks.first
260
+ expect(chunk.name).to eq 'CHK1'
261
+ expect(chunk.size).to eq 10
262
+ expect(chunk.data).to eq 'ChunkData1'
263
+ expect(chunk.header).to eq ''
264
+ expect(chunk.parent_chunk).to eq nil
265
+ expect(chunk.next).to eq chunks[1]
266
+ chunk = chunk.next
267
+ expect(chunk.name).to eq 'CHK2'
268
+ expect(chunk.size).to eq 10
269
+ expect(chunk.data).to eq 'ChunkData2'
270
+ expect(chunk.header).to eq ''
271
+ expect(chunk.parent_chunk).to eq nil
272
+ expect(chunk.next).to eq chunks[2]
273
+ chunk = chunk.next
274
+ expect(chunk.name).to eq 'CHK3'
275
+ expect(chunk.size).to eq 10
276
+ expect(chunk.data).to eq 'ChunkData3'
277
+ expect(chunk.header).to eq ''
278
+ expect(chunk.parent_chunk).to eq nil
279
+ expect(chunk.next).to eq nil
280
+ end
281
+ end
282
+
283
+ it 'reads a file containing several chunks with data size correction given as a Proc' do
284
+ read_chunks([
285
+ {
286
+ name: 'CHK1',
287
+ data: 'ChunkData1',
288
+ data_size: 4
289
+ },
290
+ {
291
+ name: 'CHK2',
292
+ data: 'ChunkData2',
293
+ data_size: 4
294
+ },
295
+ {
296
+ name: 'CHK3',
297
+ data: 'ChunkData3',
298
+ data_size: 4
299
+ }
300
+ ], chunks_format: { '*' => { data_size_correction: proc { |_file| 6 } } }) do |chunks|
301
+ expect(chunks.size).to eq 3
302
+ chunk = chunks.first
303
+ expect(chunk.name).to eq 'CHK1'
304
+ expect(chunk.size).to eq 10
305
+ expect(chunk.data).to eq 'ChunkData1'
306
+ expect(chunk.header).to eq ''
307
+ expect(chunk.parent_chunk).to eq nil
308
+ expect(chunk.next).to eq chunks[1]
309
+ chunk = chunk.next
310
+ expect(chunk.name).to eq 'CHK2'
311
+ expect(chunk.size).to eq 10
312
+ expect(chunk.data).to eq 'ChunkData2'
313
+ expect(chunk.header).to eq ''
314
+ expect(chunk.parent_chunk).to eq nil
315
+ expect(chunk.next).to eq chunks[2]
316
+ chunk = chunk.next
317
+ expect(chunk.name).to eq 'CHK3'
318
+ expect(chunk.size).to eq 10
319
+ expect(chunk.data).to eq 'ChunkData3'
320
+ expect(chunk.header).to eq ''
321
+ expect(chunk.parent_chunk).to eq nil
322
+ expect(chunk.next).to eq nil
323
+ end
324
+ end
325
+
326
+ it 'reads a file containing several chunks with data size correction on some chunks only' do
327
+ read_chunks([
328
+ {
329
+ name: 'CHK1',
330
+ data: 'ChunkData1'
331
+ },
332
+ {
333
+ name: 'CHK2',
334
+ data: 'ChunkData2',
335
+ data_size: 4
336
+ },
337
+ {
338
+ name: 'CHK3',
339
+ data: 'ChunkData3',
340
+ data_size: 14
341
+ }
342
+ ], chunks_format: { 'CHK2' => { data_size_correction: 6 }, 'CHK3' => { data_size_correction: -4 } }) do |chunks|
343
+ expect(chunks.size).to eq 3
344
+ chunk = chunks.first
345
+ expect(chunk.name).to eq 'CHK1'
346
+ expect(chunk.size).to eq 10
347
+ expect(chunk.data).to eq 'ChunkData1'
348
+ expect(chunk.header).to eq ''
349
+ expect(chunk.parent_chunk).to eq nil
350
+ expect(chunk.next).to eq chunks[1]
351
+ chunk = chunk.next
352
+ expect(chunk.name).to eq 'CHK2'
353
+ expect(chunk.size).to eq 10
354
+ expect(chunk.data).to eq 'ChunkData2'
355
+ expect(chunk.header).to eq ''
356
+ expect(chunk.parent_chunk).to eq nil
357
+ expect(chunk.next).to eq chunks[2]
358
+ chunk = chunk.next
359
+ expect(chunk.name).to eq 'CHK3'
360
+ expect(chunk.size).to eq 10
361
+ expect(chunk.data).to eq 'ChunkData3'
362
+ expect(chunk.header).to eq ''
363
+ expect(chunk.parent_chunk).to eq nil
364
+ expect(chunk.next).to eq nil
365
+ end
366
+ end
367
+
368
+ it 'reads a file containing several chunks with sub-chunks' do
369
+ chunk2_data = chunks_to_str([
370
+ {
371
+ name: 'SCK1',
372
+ data: 'SubChunkData1'
373
+ },
374
+ {
375
+ name: 'SCK2',
376
+ data: 'SubChunkData2'
377
+ },
378
+ {
379
+ name: 'SCK3',
380
+ data: 'SubChunkData3'
381
+ }
382
+ ])
383
+ read_chunks([
384
+ {
385
+ name: 'CHK1',
386
+ data: 'ChunkData1'
387
+ },
388
+ {
389
+ name: 'CHK2',
390
+ data: chunk2_data
391
+ },
392
+ {
393
+ name: 'CHK3',
394
+ data: 'ChunkData3'
395
+ }
396
+ ]) do |chunks|
397
+ expect(chunks.size).to eq 3
398
+ chunk = chunks.first
399
+ expect(chunk.name).to eq 'CHK1'
400
+ expect(chunk.size).to eq 10
401
+ expect(chunk.data).to eq 'ChunkData1'
402
+ expect(chunk.header).to eq ''
403
+ expect(chunk.parent_chunk).to eq nil
404
+ expect(chunk.next).to eq chunks[1]
405
+ chunk = chunk.next
406
+ expect(chunk.name).to eq 'CHK2'
407
+ expect(chunk.size).to eq chunk2_data.size
408
+ expect(chunk.data).to eq chunk2_data
409
+ expect(chunk.header).to eq ''
410
+ expect(chunk.parent_chunk).to eq nil
411
+ expect(chunk.next).to eq chunks[2]
412
+ chunk = chunk.next
413
+ expect(chunk.name).to eq 'CHK3'
414
+ expect(chunk.size).to eq 10
415
+ expect(chunk.data).to eq 'ChunkData3'
416
+ expect(chunk.header).to eq ''
417
+ expect(chunk.parent_chunk).to eq nil
418
+ expect(chunk.next).to eq nil
419
+ sub_chunks = chunks[1].sub_chunks
420
+ expect(sub_chunks.size).to eq 3
421
+ chunk = sub_chunks.first
422
+ expect(chunk.name).to eq 'SCK1'
423
+ expect(chunk.size).to eq 13
424
+ expect(chunk.data).to eq 'SubChunkData1'
425
+ expect(chunk.header).to eq ''
426
+ expect(chunk.parent_chunk).to eq chunks[1]
427
+ expect(chunk.next).to eq sub_chunks[1]
428
+ chunk = chunk.next
429
+ expect(chunk.name).to eq 'SCK2'
430
+ expect(chunk.size).to eq 13
431
+ expect(chunk.data).to eq 'SubChunkData2'
432
+ expect(chunk.header).to eq ''
433
+ expect(chunk.parent_chunk).to eq chunks[1]
434
+ expect(chunk.next).to eq sub_chunks[2]
435
+ chunk = chunk.next
436
+ expect(chunk.name).to eq 'SCK3'
437
+ expect(chunk.size).to eq 13
438
+ expect(chunk.data).to eq 'SubChunkData3'
439
+ expect(chunk.header).to eq ''
440
+ expect(chunk.parent_chunk).to eq chunks[1]
441
+ expect(chunk.next).to eq nil
442
+ end
443
+ end
444
+
445
+ it 'reads a file containing several chunks with sub-chunks having specific formats' do
446
+ chunk2_data = chunks_to_str([
447
+ {
448
+ name: 'SCK1',
449
+ data: 'SubChunkData1'
450
+ },
451
+ {
452
+ name: 'SCK2',
453
+ data: 'SubChunkData2',
454
+ data_size: 7
455
+ },
456
+ {
457
+ name: 'SCK3',
458
+ data: 'SubChunkData3'
459
+ }
460
+ ])
461
+ read_chunks([
462
+ {
463
+ name: 'CHK1',
464
+ data: 'ChunkData1'
465
+ },
466
+ {
467
+ name: 'CHK2',
468
+ data: chunk2_data
469
+ },
470
+ {
471
+ name: 'CHK3',
472
+ data: 'ChunkData3'
473
+ }
474
+ ], chunks_format: { 'SCK2' => { data_size_correction: 6 } }) do |chunks|
475
+ expect(chunks.size).to eq 3
476
+ chunk = chunks.first
477
+ expect(chunk.name).to eq 'CHK1'
478
+ expect(chunk.size).to eq 10
479
+ expect(chunk.data).to eq 'ChunkData1'
480
+ expect(chunk.header).to eq ''
481
+ expect(chunk.parent_chunk).to eq nil
482
+ expect(chunk.next).to eq chunks[1]
483
+ chunk = chunk.next
484
+ expect(chunk.name).to eq 'CHK2'
485
+ expect(chunk.size).to eq chunk2_data.size
486
+ expect(chunk.data).to eq chunk2_data
487
+ expect(chunk.header).to eq ''
488
+ expect(chunk.parent_chunk).to eq nil
489
+ expect(chunk.next).to eq chunks[2]
490
+ chunk = chunk.next
491
+ expect(chunk.name).to eq 'CHK3'
492
+ expect(chunk.size).to eq 10
493
+ expect(chunk.data).to eq 'ChunkData3'
494
+ expect(chunk.header).to eq ''
495
+ expect(chunk.parent_chunk).to eq nil
496
+ expect(chunk.next).to eq nil
497
+ sub_chunks = chunks[1].sub_chunks
498
+ expect(sub_chunks.size).to eq 3
499
+ chunk = sub_chunks.first
500
+ expect(chunk.name).to eq 'SCK1'
501
+ expect(chunk.size).to eq 13
502
+ expect(chunk.data).to eq 'SubChunkData1'
503
+ expect(chunk.header).to eq ''
504
+ expect(chunk.parent_chunk).to eq chunks[1]
505
+ expect(chunk.next).to eq sub_chunks[1]
506
+ chunk = chunk.next
507
+ expect(chunk.name).to eq 'SCK2'
508
+ expect(chunk.size).to eq 13
509
+ expect(chunk.data).to eq 'SubChunkData2'
510
+ expect(chunk.header).to eq ''
511
+ expect(chunk.parent_chunk).to eq chunks[1]
512
+ expect(chunk.next).to eq sub_chunks[2]
513
+ chunk = chunk.next
514
+ expect(chunk.name).to eq 'SCK3'
515
+ expect(chunk.size).to eq 13
516
+ expect(chunk.data).to eq 'SubChunkData3'
517
+ expect(chunk.header).to eq ''
518
+ expect(chunk.parent_chunk).to eq chunks[1]
519
+ expect(chunk.next).to eq nil
520
+ end
521
+ end
522
+
523
+ end
@@ -0,0 +1,100 @@
1
+ # This file was generated by the `rspec --init` command. Conventionally, all
2
+ # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
3
+ # The generated `.rspec` file contains `--require spec_helper` which will cause
4
+ # this file to always be loaded, without a need to explicitly require it in any
5
+ # files.
6
+ #
7
+ # Given that it is always loaded, you are encouraged to keep this file as
8
+ # light-weight as possible. Requiring heavyweight dependencies from this file
9
+ # will add to the boot time of your test suite on EVERY test run, even for an
10
+ # individual file that may not need all of that loaded. Instead, consider making
11
+ # a separate helper file that requires the additional dependencies and performs
12
+ # the additional setup, and require it from the spec files that actually need
13
+ # it.
14
+ #
15
+ # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
16
+ RSpec.configure do |config|
17
+ # rspec-expectations config goes here. You can use an alternate
18
+ # assertion/expectation library such as wrong or the stdlib/minitest
19
+ # assertions if you prefer.
20
+ config.expect_with :rspec do |expectations|
21
+ # This option will default to `true` in RSpec 4. It makes the `description`
22
+ # and `failure_message` of custom matchers include text for helper methods
23
+ # defined using `chain`, e.g.:
24
+ # be_bigger_than(2).and_smaller_than(4).description
25
+ # # => "be bigger than 2 and smaller than 4"
26
+ # ...rather than:
27
+ # # => "be bigger than 2"
28
+ expectations.include_chain_clauses_in_custom_matcher_descriptions = true
29
+ end
30
+
31
+ # rspec-mocks config goes here. You can use an alternate test double
32
+ # library (such as bogus or mocha) by changing the `mock_with` option here.
33
+ config.mock_with :rspec do |mocks|
34
+ # Prevents you from mocking or stubbing a method that does not exist on
35
+ # a real object. This is generally recommended, and will default to
36
+ # `true` in RSpec 4.
37
+ mocks.verify_partial_doubles = true
38
+ end
39
+
40
+ # This option will default to `:apply_to_host_groups` in RSpec 4 (and will
41
+ # have no way to turn it off -- the option exists only for backwards
42
+ # compatibility in RSpec 3). It causes shared context metadata to be
43
+ # inherited by the metadata hash of host groups and examples, rather than
44
+ # triggering implicit auto-inclusion in groups with matching metadata.
45
+ config.shared_context_metadata_behavior = :apply_to_host_groups
46
+
47
+ # The settings below are suggested to provide a good initial experience
48
+ # with RSpec, but feel free to customize to your heart's content.
49
+ =begin
50
+ # This allows you to limit a spec run to individual examples or groups
51
+ # you care about by tagging them with `:focus` metadata. When nothing
52
+ # is tagged with `:focus`, all examples get run. RSpec also provides
53
+ # aliases for `it`, `describe`, and `context` that include `:focus`
54
+ # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
55
+ config.filter_run_when_matching :focus
56
+
57
+ # Allows RSpec to persist some state between runs in order to support
58
+ # the `--only-failures` and `--next-failure` CLI options. We recommend
59
+ # you configure your source control system to ignore this file.
60
+ config.example_status_persistence_file_path = "spec/examples.txt"
61
+
62
+ # Limits the available syntax to the non-monkey patched syntax that is
63
+ # recommended. For more details, see:
64
+ # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
65
+ # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
66
+ # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
67
+ config.disable_monkey_patching!
68
+
69
+ # This setting enables warnings. It's recommended, but in some cases may
70
+ # be too noisy due to issues in dependencies.
71
+ config.warnings = true
72
+
73
+ # Many RSpec users commonly either run the entire suite or an individual
74
+ # file, and it's useful to allow more verbose output when running an
75
+ # individual spec file.
76
+ if config.files_to_run.one?
77
+ # Use the documentation formatter for detailed output,
78
+ # unless a formatter has already been configured
79
+ # (e.g. via a command-line flag).
80
+ config.default_formatter = "doc"
81
+ end
82
+
83
+ # Print the 10 slowest examples and example groups at the
84
+ # end of the spec run, to help surface which specs are running
85
+ # particularly slow.
86
+ config.profile_examples = 10
87
+
88
+ # Run specs in random order to surface order dependencies. If you find an
89
+ # order dependency and want to debug it, you can fix the order by providing
90
+ # the seed, which is printed after each run.
91
+ # --seed 1234
92
+ config.order = :random
93
+
94
+ # Seed global randomization in this process using the `--seed` CLI option.
95
+ # Setting this allows you to use `--seed` to deterministically reproduce
96
+ # test failures related to randomization by passing the same `--seed` value
97
+ # as the one that triggered the failure.
98
+ Kernel.srand config.seed
99
+ =end
100
+ end
metadata ADDED
@@ -0,0 +1,122 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: riffola
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - Muriel Salvan
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2018-10-11 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: rspec
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: hex_string
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ">="
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ">="
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: byebug
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - ">="
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rubocop
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: ruby-prof
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ">="
74
+ - !ruby/object:Gem::Version
75
+ version: '0'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
83
+ description: Library reading an extended RIFF format, supporting huge files. RIFF
84
+ format is composed of a list of chunks, each chunk being an identifier, an encoded
85
+ data size, an optional header and chunk data itself. Riffola has ways to deal with
86
+ RIFF files taking some liberties on the RIFF format (additional headers, wrong chunk
87
+ size...).
88
+ email:
89
+ - muriel@x-aeon.com
90
+ executables: []
91
+ extensions: []
92
+ extra_rdoc_files: []
93
+ files:
94
+ - lib/riffola.rb
95
+ - lib/riffola/version.rb
96
+ - spec/riffola_spec.rb
97
+ - spec/spec_helper.rb
98
+ homepage: https://github.com/Muriel-Salvan/riffola
99
+ licenses:
100
+ - BSD-4-Clause
101
+ metadata: {}
102
+ post_install_message:
103
+ rdoc_options: []
104
+ require_paths:
105
+ - lib
106
+ required_ruby_version: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
111
+ required_rubygems_version: !ruby/object:Gem::Requirement
112
+ requirements:
113
+ - - ">="
114
+ - !ruby/object:Gem::Version
115
+ version: '0'
116
+ requirements: []
117
+ rubyforge_project:
118
+ rubygems_version: 2.7.3
119
+ signing_key:
120
+ specification_version: 4
121
+ summary: Riffola - Reading extended RIFF files
122
+ test_files: []