cabriolet 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. checksums.yaml +7 -0
  2. data/ARCHITECTURE.md +799 -0
  3. data/CHANGELOG.md +44 -0
  4. data/LICENSE +29 -0
  5. data/README.adoc +1207 -0
  6. data/exe/cabriolet +6 -0
  7. data/lib/cabriolet/auto.rb +173 -0
  8. data/lib/cabriolet/binary/bitstream.rb +148 -0
  9. data/lib/cabriolet/binary/bitstream_writer.rb +180 -0
  10. data/lib/cabriolet/binary/chm_structures.rb +213 -0
  11. data/lib/cabriolet/binary/hlp_structures.rb +66 -0
  12. data/lib/cabriolet/binary/kwaj_structures.rb +74 -0
  13. data/lib/cabriolet/binary/lit_structures.rb +107 -0
  14. data/lib/cabriolet/binary/oab_structures.rb +112 -0
  15. data/lib/cabriolet/binary/structures.rb +56 -0
  16. data/lib/cabriolet/binary/szdd_structures.rb +60 -0
  17. data/lib/cabriolet/cab/compressor.rb +382 -0
  18. data/lib/cabriolet/cab/decompressor.rb +510 -0
  19. data/lib/cabriolet/cab/extractor.rb +357 -0
  20. data/lib/cabriolet/cab/parser.rb +264 -0
  21. data/lib/cabriolet/chm/compressor.rb +513 -0
  22. data/lib/cabriolet/chm/decompressor.rb +436 -0
  23. data/lib/cabriolet/chm/parser.rb +254 -0
  24. data/lib/cabriolet/cli.rb +776 -0
  25. data/lib/cabriolet/compressors/base.rb +34 -0
  26. data/lib/cabriolet/compressors/lzss.rb +250 -0
  27. data/lib/cabriolet/compressors/lzx.rb +581 -0
  28. data/lib/cabriolet/compressors/mszip.rb +315 -0
  29. data/lib/cabriolet/compressors/quantum.rb +446 -0
  30. data/lib/cabriolet/constants.rb +75 -0
  31. data/lib/cabriolet/decompressors/base.rb +39 -0
  32. data/lib/cabriolet/decompressors/lzss.rb +138 -0
  33. data/lib/cabriolet/decompressors/lzx.rb +726 -0
  34. data/lib/cabriolet/decompressors/mszip.rb +390 -0
  35. data/lib/cabriolet/decompressors/none.rb +27 -0
  36. data/lib/cabriolet/decompressors/quantum.rb +456 -0
  37. data/lib/cabriolet/errors.rb +39 -0
  38. data/lib/cabriolet/format_detector.rb +156 -0
  39. data/lib/cabriolet/hlp/compressor.rb +272 -0
  40. data/lib/cabriolet/hlp/decompressor.rb +198 -0
  41. data/lib/cabriolet/hlp/parser.rb +131 -0
  42. data/lib/cabriolet/huffman/decoder.rb +79 -0
  43. data/lib/cabriolet/huffman/encoder.rb +108 -0
  44. data/lib/cabriolet/huffman/tree.rb +138 -0
  45. data/lib/cabriolet/kwaj/compressor.rb +479 -0
  46. data/lib/cabriolet/kwaj/decompressor.rb +237 -0
  47. data/lib/cabriolet/kwaj/parser.rb +183 -0
  48. data/lib/cabriolet/lit/compressor.rb +255 -0
  49. data/lib/cabriolet/lit/decompressor.rb +250 -0
  50. data/lib/cabriolet/models/cabinet.rb +81 -0
  51. data/lib/cabriolet/models/chm_file.rb +28 -0
  52. data/lib/cabriolet/models/chm_header.rb +67 -0
  53. data/lib/cabriolet/models/chm_section.rb +38 -0
  54. data/lib/cabriolet/models/file.rb +119 -0
  55. data/lib/cabriolet/models/folder.rb +102 -0
  56. data/lib/cabriolet/models/folder_data.rb +21 -0
  57. data/lib/cabriolet/models/hlp_file.rb +45 -0
  58. data/lib/cabriolet/models/hlp_header.rb +37 -0
  59. data/lib/cabriolet/models/kwaj_header.rb +98 -0
  60. data/lib/cabriolet/models/lit_header.rb +55 -0
  61. data/lib/cabriolet/models/oab_header.rb +95 -0
  62. data/lib/cabriolet/models/szdd_header.rb +72 -0
  63. data/lib/cabriolet/modifier.rb +326 -0
  64. data/lib/cabriolet/oab/compressor.rb +353 -0
  65. data/lib/cabriolet/oab/decompressor.rb +315 -0
  66. data/lib/cabriolet/parallel.rb +333 -0
  67. data/lib/cabriolet/repairer.rb +288 -0
  68. data/lib/cabriolet/streaming.rb +221 -0
  69. data/lib/cabriolet/system/file_handle.rb +107 -0
  70. data/lib/cabriolet/system/io_system.rb +87 -0
  71. data/lib/cabriolet/system/memory_handle.rb +105 -0
  72. data/lib/cabriolet/szdd/compressor.rb +217 -0
  73. data/lib/cabriolet/szdd/decompressor.rb +184 -0
  74. data/lib/cabriolet/szdd/parser.rb +127 -0
  75. data/lib/cabriolet/validator.rb +332 -0
  76. data/lib/cabriolet/version.rb +5 -0
  77. data/lib/cabriolet.rb +104 -0
  78. metadata +157 -0
@@ -0,0 +1,333 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cabriolet
4
+ # Parallel extraction for multi-core performance
5
+ module Parallel
6
+ # Parallel extractor for archives
7
+ class Extractor
8
+ DEFAULT_WORKERS = 4
9
+
10
+ def initialize(archive, output_dir, workers: DEFAULT_WORKERS, **options)
11
+ @archive = archive
12
+ @output_dir = output_dir
13
+ @workers = [workers, 1].max # At least 1 worker
14
+ @options = options
15
+ @preserve_paths = options.fetch(:preserve_paths, true)
16
+ @overwrite = options.fetch(:overwrite, false)
17
+ @queue = Queue.new
18
+ @stats = { extracted: 0, skipped: 0, failed: 0, bytes: 0 }
19
+ @stats_mutex = Mutex.new
20
+ end
21
+
22
+ # Extract all files using parallel workers
23
+ #
24
+ # @return [Hash] Extraction statistics
25
+ #
26
+ # @example
27
+ # extractor = Cabriolet::Parallel::Extractor.new(cab, 'output/', workers: 8)
28
+ # stats = extractor.extract_all
29
+ def extract_all
30
+ FileUtils.mkdir_p(@output_dir)
31
+
32
+ # Queue all files
33
+ @archive.files.each { |file| @queue << file }
34
+
35
+ # Add termination signals
36
+ @workers.times { @queue << :done }
37
+
38
+ # Start worker threads
39
+ threads = Array.new(@workers) do |worker_id|
40
+ Thread.new { worker_loop(worker_id) }
41
+ end
42
+
43
+ # Wait for all workers to complete
44
+ threads.each(&:join)
45
+
46
+ @stats
47
+ end
48
+
49
+ # Extract files with progress callback
50
+ #
51
+ # @yield [current, total, file] Progress callback
52
+ # @return [Hash] Extraction statistics
53
+ #
54
+ # @example
55
+ # extractor.extract_with_progress do |current, total, file|
56
+ # puts "#{current}/#{total}: #{file.name}"
57
+ # end
58
+ def extract_with_progress(&block)
59
+ return extract_all unless block
60
+
61
+ total = @archive.files.count
62
+ current = 0
63
+ current_mutex = Mutex.new
64
+
65
+ FileUtils.mkdir_p(@output_dir)
66
+
67
+ # Queue all files
68
+ @archive.files.each { |file| @queue << file }
69
+ @workers.times { @queue << :done }
70
+
71
+ # Start worker threads with progress
72
+ threads = Array.new(@workers) do |_worker_id|
73
+ Thread.new do
74
+ loop do
75
+ file = @queue.pop
76
+ break if file == :done
77
+
78
+ extract_file(file)
79
+
80
+ current_mutex.synchronize do
81
+ current += 1
82
+ yield(current, total, file)
83
+ end
84
+ end
85
+ end
86
+ end
87
+
88
+ threads.each(&:join)
89
+ @stats
90
+ end
91
+
92
+ private
93
+
94
+ def worker_loop(_worker_id)
95
+ loop do
96
+ file = @queue.pop
97
+ break if file == :done
98
+
99
+ extract_file(file)
100
+ end
101
+ end
102
+
103
+ def extract_file(file)
104
+ output_path = build_output_path(file.name)
105
+
106
+ if File.exist?(output_path) && !@overwrite
107
+ update_stats(:skipped)
108
+ return
109
+ end
110
+
111
+ begin
112
+ # Create directory (thread-safe)
113
+ FileUtils.mkdir_p(File.dirname(output_path))
114
+
115
+ # Extract file data
116
+ data = file.data
117
+
118
+ # Write file (one at a time per file)
119
+ File.write(output_path, data, mode: "wb")
120
+
121
+ # Preserve timestamps if available
122
+ if file.respond_to?(:datetime) && file.datetime
123
+ File.utime(File.atime(output_path), file.datetime, output_path)
124
+ end
125
+
126
+ update_stats(:extracted, data.bytesize)
127
+ rescue StandardError => e
128
+ update_stats(:failed)
129
+ warn "Worker error extracting #{file.name}: #{e.message}"
130
+ end
131
+ end
132
+
133
+ def build_output_path(filename)
134
+ if @preserve_paths
135
+ clean_name = filename.gsub("\\", "/")
136
+ File.join(@output_dir, clean_name)
137
+ else
138
+ base_name = File.basename(filename.gsub("\\", "/"))
139
+ File.join(@output_dir, base_name)
140
+ end
141
+ end
142
+
143
+ def update_stats(stat_type, bytes = 0)
144
+ @stats_mutex.synchronize do
145
+ @stats[stat_type] += 1
146
+ @stats[:bytes] += bytes if bytes.positive?
147
+ end
148
+ end
149
+ end
150
+
151
+ # Parallel batch processor
152
+ class BatchProcessor
153
+ def initialize(workers: Extractor::DEFAULT_WORKERS)
154
+ @workers = workers
155
+ @stats = { total: 0, successful: 0, failed: 0 }
156
+ @stats_mutex = Mutex.new
157
+ end
158
+
159
+ # Process multiple archives in parallel
160
+ #
161
+ # @param archive_paths [Array<String>] Paths to archives
162
+ # @param output_base [String] Base output directory
163
+ # @yield [archive_path, stats] Optional callback per archive
164
+ # @return [Hash] Overall statistics
165
+ #
166
+ # @example
167
+ # processor = Cabriolet::Parallel::BatchProcessor.new(workers: 8)
168
+ # stats = processor.process_all(Dir.glob('*.cab'), 'output/')
169
+ def process_all(archive_paths, output_base, &block)
170
+ queue = Queue.new
171
+ archive_paths.each { |path| queue << path }
172
+ @workers.times { queue << :done }
173
+
174
+ threads = Array.new(@workers) do
175
+ Thread.new { process_loop(queue, output_base, &block) }
176
+ end
177
+
178
+ threads.each(&:join)
179
+ @stats
180
+ end
181
+
182
+ private
183
+
184
+ def process_loop(queue, output_base, &block)
185
+ loop do
186
+ archive_path = queue.pop
187
+ break if archive_path == :done
188
+
189
+ process_one(archive_path, output_base, &block)
190
+ end
191
+ end
192
+
193
+ def process_one(archive_path, output_base)
194
+ update_stats(:total)
195
+
196
+ begin
197
+ archive = Cabriolet::Auto.open(archive_path)
198
+ output_dir = File.join(output_base, File.basename(archive_path, ".*"))
199
+
200
+ extractor = Extractor.new(archive, output_dir, workers: 2)
201
+ stats = extractor.extract_all
202
+
203
+ update_stats(:successful)
204
+
205
+ yield(archive_path, stats) if block_given?
206
+ rescue StandardError => e
207
+ update_stats(:failed)
208
+ warn "Failed to process #{archive_path}: #{e.message}"
209
+ end
210
+ end
211
+
212
+ def update_stats(stat_type)
213
+ @stats_mutex.synchronize do
214
+ @stats[stat_type] += 1
215
+ end
216
+ end
217
+
218
+ attr_reader :stats
219
+ end
220
+
221
+ # Thread pool for custom parallel operations
222
+ class ThreadPool
223
+ def initialize(size: Extractor::DEFAULT_WORKERS)
224
+ @size = size
225
+ @queue = Queue.new
226
+ @threads = []
227
+ @running = false
228
+ end
229
+
230
+ # Start the thread pool
231
+ def start
232
+ return if @running
233
+
234
+ @running = true
235
+ @threads = Array.new(@size) do
236
+ Thread.new { worker_loop }
237
+ end
238
+ end
239
+
240
+ # Submit a task to the pool
241
+ #
242
+ # @yield Task to execute
243
+ def submit(&block)
244
+ start unless @running
245
+ @queue << block
246
+ end
247
+
248
+ # Shutdown the thread pool
249
+ #
250
+ # @param wait [Boolean] Wait for pending tasks to complete
251
+ def shutdown(wait: true)
252
+ return unless @running
253
+
254
+ if wait
255
+ # Wait for queue to empty
256
+ sleep 0.01 until @queue.empty?
257
+ end
258
+
259
+ # Send termination signals
260
+ @size.times { @queue << :shutdown }
261
+
262
+ # Wait for threads to finish
263
+ @threads.each(&:join)
264
+ @threads.clear
265
+ @running = false
266
+ end
267
+
268
+ # Execute tasks in parallel with automatic cleanup
269
+ #
270
+ # @param items [Array] Items to process
271
+ # @yield [item] Process each item
272
+ # @return [Array] Results from each task
273
+ def map(items)
274
+ start
275
+ results = []
276
+ results_mutex = Mutex.new
277
+
278
+ items.each_with_index do |item, index|
279
+ submit do
280
+ result = yield(item)
281
+ results_mutex.synchronize do
282
+ results[index] = result
283
+ end
284
+ end
285
+ end
286
+
287
+ shutdown(wait: true)
288
+ results
289
+ end
290
+
291
+ private
292
+
293
+ def worker_loop
294
+ loop do
295
+ task = @queue.pop
296
+ break if task == :shutdown
297
+
298
+ begin
299
+ task.call
300
+ rescue StandardError => e
301
+ warn "Thread pool worker error: #{e.message}"
302
+ end
303
+ end
304
+ end
305
+ end
306
+
307
+ class << self
308
+ # Extract archive using parallel workers
309
+ #
310
+ # @param archive [Object] Archive object
311
+ # @param output_dir [String] Output directory
312
+ # @param workers [Integer] Number of parallel workers
313
+ # @return [Hash] Extraction statistics
314
+ def extract(archive, output_dir, workers: Extractor::DEFAULT_WORKERS,
315
+ **options)
316
+ extractor = Extractor.new(archive, output_dir, workers: workers,
317
+ **options)
318
+ extractor.extract_all
319
+ end
320
+
321
+ # Process multiple archives in parallel
322
+ #
323
+ # @param paths [Array<String>] Archive paths
324
+ # @param output_base [String] Base output directory
325
+ # @param workers [Integer] Number of parallel workers
326
+ # @return [Hash] Processing statistics
327
+ def process_batch(paths, output_base, workers: Extractor::DEFAULT_WORKERS)
328
+ processor = BatchProcessor.new(workers: workers)
329
+ processor.process_all(paths, output_base)
330
+ end
331
+ end
332
+ end
333
+ end
@@ -0,0 +1,288 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Cabriolet
4
+ # Archive repair and recovery functionality
5
+ class Repairer
6
+ def initialize(path, **options)
7
+ @path = path
8
+ @options = options
9
+ @format = FormatDetector.detect(path)
10
+ @recovery_stats = { recovered: 0, failed: 0, partial: 0 }
11
+ end
12
+
13
+ # Attempt to repair the archive
14
+ #
15
+ # @param output [String] Output path for repaired archive
16
+ # @param options [Hash] Repair options
17
+ # @option options [Boolean] :salvage_mode (true) Enable salvage mode
18
+ # @option options [Boolean] :skip_corrupted (true) Skip corrupted files
19
+ # @option options [Boolean] :rebuild_index (true) Rebuild file index
20
+ # @return [RepairReport] Repair report
21
+ #
22
+ # @example
23
+ # repairer = Cabriolet::Repairer.new('corrupted.cab')
24
+ # report = repairer.repair(output: 'repaired.cab')
25
+ def repair(output:, **options)
26
+ salvage_mode = options.fetch(:salvage_mode, true)
27
+ skip_corrupted = options.fetch(:skip_corrupted, true)
28
+ rebuild_index = options.fetch(:rebuild_index, true)
29
+
30
+ begin
31
+ # Parse with salvage mode enabled
32
+ parser_class = FormatDetector.format_to_parser(@format)
33
+ unless parser_class
34
+ raise UnsupportedFormatError,
35
+ "No parser for format: #{@format}"
36
+ end
37
+
38
+ archive = parser_class.new(
39
+ salvage_mode: salvage_mode,
40
+ skip_checksum: true,
41
+ continue_on_error: true,
42
+ ).parse(@path)
43
+
44
+ # Extract recoverable files
45
+ recovered_files = extract_recoverable_files(archive, skip_corrupted)
46
+
47
+ # Rebuild archive
48
+ rebuild_archive(recovered_files, output) if rebuild_index
49
+
50
+ RepairReport.new(
51
+ success: true,
52
+ original_file: @path,
53
+ repaired_file: output,
54
+ stats: @recovery_stats,
55
+ recovered_files: recovered_files.map(&:name),
56
+ )
57
+ rescue StandardError => e
58
+ RepairReport.new(
59
+ success: false,
60
+ original_file: @path,
61
+ repaired_file: output,
62
+ stats: @recovery_stats,
63
+ error: e.message,
64
+ )
65
+ end
66
+ end
67
+
68
+ # Salvage files from corrupted archive
69
+ #
70
+ # @param output_dir [String] Directory to save recovered files
71
+ # @return [SalvageReport] Salvage report with statistics
72
+ #
73
+ # @example
74
+ # repairer = Cabriolet::Repairer.new('corrupted.cab')
75
+ # report = repairer.salvage(output_dir: 'recovered/')
76
+ def salvage(output_dir:)
77
+ FileUtils.mkdir_p(output_dir)
78
+
79
+ parser_class = FormatDetector.format_to_parser(@format)
80
+ archive = parser_class.new(
81
+ salvage_mode: true,
82
+ skip_checksum: true,
83
+ continue_on_error: true,
84
+ ).parse(@path)
85
+
86
+ salvaged_files = []
87
+
88
+ archive.files.each do |file|
89
+ data = file.data
90
+ output_path = File.join(output_dir, sanitize_filename(file.name))
91
+ FileUtils.mkdir_p(File.dirname(output_path))
92
+ File.write(output_path, data, mode: "wb")
93
+
94
+ @recovery_stats[:recovered] += 1
95
+ salvaged_files << file.name
96
+ rescue StandardError => e
97
+ @recovery_stats[:failed] += 1
98
+ warn "Could not salvage #{file.name}: #{e.message}"
99
+ end
100
+
101
+ SalvageReport.new(
102
+ output_dir: output_dir,
103
+ stats: @recovery_stats,
104
+ salvaged_files: salvaged_files,
105
+ )
106
+ end
107
+
108
+ private
109
+
110
+ def extract_recoverable_files(archive, skip_corrupted)
111
+ recovered = []
112
+
113
+ archive.files.each do |file|
114
+ # Try to decompress file data
115
+ data = file.data
116
+
117
+ # Verify data integrity if possible
118
+ if file.respond_to?(:size) && data.bytesize == file.size
119
+ recovered << RecoveredFile.new(file, data, :complete)
120
+ @recovery_stats[:recovered] += 1
121
+ elsif skip_corrupted
122
+ @recovery_stats[:failed] += 1
123
+ else
124
+ recovered << RecoveredFile.new(file, data, :partial)
125
+ @recovery_stats[:partial] += 1
126
+ end
127
+ rescue StandardError => e
128
+ @recovery_stats[:failed] += 1
129
+ warn "Failed to recover #{file.name}: #{e.message}" unless skip_corrupted
130
+ end
131
+
132
+ recovered
133
+ end
134
+
135
+ def rebuild_archive(files, output_path)
136
+ # Rebuild based on format
137
+ case @format
138
+ when :cab
139
+ rebuild_cab(files, output_path)
140
+ else
141
+ # For other formats, just extract the files
142
+ # Full rebuild may not be supported
143
+ raise UnsupportedOperationError, "Rebuild not supported for #{@format}"
144
+ end
145
+ end
146
+
147
+ def rebuild_cab(files, output_path)
148
+ require_relative "cab/compressor"
149
+
150
+ compressor = CAB::Compressor.new(
151
+ output: output_path,
152
+ compression: :mszip, # Use safe compression
153
+ )
154
+
155
+ files.each do |recovered_file|
156
+ compressor.add_file_data(
157
+ recovered_file.name,
158
+ recovered_file.data,
159
+ attributes: recovered_file.attributes,
160
+ date: recovered_file.date,
161
+ time: recovered_file.time,
162
+ )
163
+ end
164
+
165
+ compressor.compress
166
+ end
167
+
168
+ def sanitize_filename(filename)
169
+ # Remove path traversal attempts and dangerous characters
170
+ filename.gsub("\\", "/").gsub("..", "_").gsub(%r{^/}, "")
171
+ end
172
+
173
+ # Recovered file wrapper
174
+ class RecoveredFile
175
+ attr_reader :name, :data, :status, :attributes, :date, :time
176
+
177
+ def initialize(original_file, data, status)
178
+ @name = original_file.name
179
+ @data = data
180
+ @status = status # :complete or :partial
181
+ @attributes = original_file.attributes if original_file.respond_to?(:attributes)
182
+ @date = original_file.date if original_file.respond_to?(:date)
183
+ @time = original_file.time if original_file.respond_to?(:time)
184
+ end
185
+
186
+ def complete?
187
+ @status == :complete
188
+ end
189
+
190
+ def partial?
191
+ @status == :partial
192
+ end
193
+ end
194
+ end
195
+
196
+ # Repair report
197
+ class RepairReport
198
+ attr_reader :success, :original_file, :repaired_file, :stats,
199
+ :recovered_files, :error
200
+
201
+ def initialize(success:, original_file:, repaired_file:, stats:,
202
+ recovered_files: [], error: nil)
203
+ @success = success
204
+ @original_file = original_file
205
+ @repaired_file = repaired_file
206
+ @stats = stats
207
+ @recovered_files = recovered_files
208
+ @error = error
209
+ end
210
+
211
+ def success?
212
+ @success
213
+ end
214
+
215
+ def summary
216
+ if success?
217
+ "Repair successful: #{@stats[:recovered]} files recovered, #{@stats[:failed]} failed"
218
+ else
219
+ "Repair failed: #{@error}"
220
+ end
221
+ end
222
+
223
+ def detailed_report
224
+ report = ["=" * 60]
225
+ report << "Archive Repair Report"
226
+ report << ("=" * 60)
227
+ report << "Original: #{@original_file}"
228
+ report << "Repaired: #{@repaired_file}"
229
+ report << "Status: #{success? ? 'SUCCESS' : 'FAILED'}"
230
+ report << ""
231
+ report << "Statistics:"
232
+ report << " Recovered: #{@stats[:recovered]}"
233
+ report << " Partial: #{@stats[:partial]}"
234
+ report << " Failed: #{@stats[:failed]}"
235
+ report << ""
236
+
237
+ if @error
238
+ report << "Error: #{@error}"
239
+ report << ""
240
+ end
241
+
242
+ if @recovered_files.any?
243
+ report << "Recovered Files:"
244
+ @recovered_files.each { |f| report << " - #{f}" }
245
+ report << ""
246
+ end
247
+
248
+ report << ("=" * 60)
249
+ report.join("\n")
250
+ end
251
+ end
252
+
253
+ # Salvage report
254
+ class SalvageReport
255
+ attr_reader :output_dir, :stats, :salvaged_files
256
+
257
+ def initialize(output_dir:, stats:, salvaged_files:)
258
+ @output_dir = output_dir
259
+ @stats = stats
260
+ @salvaged_files = salvaged_files
261
+ end
262
+
263
+ def summary
264
+ "Salvaged #{@stats[:recovered]} files to #{@output_dir}, #{@stats[:failed]} failed"
265
+ end
266
+
267
+ def detailed_report
268
+ report = ["=" * 60]
269
+ report << "Salvage Operation Report"
270
+ report << ("=" * 60)
271
+ report << "Output Directory: #{@output_dir}"
272
+ report << ""
273
+ report << "Statistics:"
274
+ report << " Salvaged: #{@stats[:recovered]}"
275
+ report << " Failed: #{@stats[:failed]}"
276
+ report << ""
277
+
278
+ if @salvaged_files.any?
279
+ report << "Salvaged Files:"
280
+ @salvaged_files.each { |f| report << " - #{f}" }
281
+ report << ""
282
+ end
283
+
284
+ report << ("=" * 60)
285
+ report.join("\n")
286
+ end
287
+ end
288
+ end