zip-merge 7.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- checksums.yaml.gz.sig +0 -0
- data/lib/zip/merge/version.rb +11 -0
- data/lib/zip/merge.rb +311 -0
- data/lib/zip-merge.rb +3 -0
- data.tar.gz.sig +0 -0
- metadata +96 -0
- metadata.gz.sig +0 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA256:
|
|
3
|
+
metadata.gz: a859c92350627feaf0b728b47e735675f19c339d50c6bf709a5c516e6fd252ec
|
|
4
|
+
data.tar.gz: 54d03cd74d3ec83b9050bdcad173320f5a9c00dd49a657a04f736f92c3ce53bc
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 96e218946eb50b84c11e3d1fff6354626fd44f50d1672e32275aa3eb18e960821566845a0a4fd6adcd1a68a7c2be9d2de6255c2933da91b266105309e3369b20
|
|
7
|
+
data.tar.gz: 882dadf7d82282213faf7d43691eee6f4382a14eb261c5b4aeb7659db609c9532b1c86a688a5bdd2f0c034a1c1eaf37c8db98f31536f726c65f490488763e4d5
|
checksums.yaml.gz.sig
ADDED
|
Binary file
|
data/lib/zip/merge.rb
ADDED
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "stringio"
|
|
4
|
+
require "zlib"
|
|
5
|
+
require "tree_haver"
|
|
6
|
+
require_relative "merge/version"
|
|
7
|
+
|
|
8
|
+
module Zip
|
|
9
|
+
module Merge
|
|
10
|
+
LOCAL = 0x04034b50
|
|
11
|
+
CENTRAL = 0x02014b50
|
|
12
|
+
EOCD = 0x06054b50
|
|
13
|
+
DOS_EPOCH = [0, 0, 0x21, 0].pack("C*")
|
|
14
|
+
|
|
15
|
+
RenderError = Class.new(StandardError) do
|
|
16
|
+
attr_reader :diagnostic
|
|
17
|
+
|
|
18
|
+
def initialize(diagnostic)
|
|
19
|
+
@diagnostic = diagnostic
|
|
20
|
+
super(diagnostic.message)
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
module_function
|
|
25
|
+
|
|
26
|
+
def parse_zip_inventory(source)
|
|
27
|
+
bytes = source.b
|
|
28
|
+
central = scan_central_directory(bytes)
|
|
29
|
+
locals = scan_local_headers(bytes, central[:records])
|
|
30
|
+
entries = central[:records].map do |name, record|
|
|
31
|
+
local = locals.fetch(name)
|
|
32
|
+
TreeHaver::ZipArchiveEntry.new(
|
|
33
|
+
path: name,
|
|
34
|
+
normalized_path: normalize_zip_path(name),
|
|
35
|
+
directory: name.end_with?("/"),
|
|
36
|
+
compression: compression_name(record[:method]),
|
|
37
|
+
compressed_size: record[:compressed_size],
|
|
38
|
+
uncompressed_size: record[:uncompressed_size],
|
|
39
|
+
crc32: "%08x" % record[:crc32],
|
|
40
|
+
local_header_range: TreeHaver::ByteRange.new(start_byte: record[:local_offset], end_byte: local[:data_start]),
|
|
41
|
+
data_range: TreeHaver::ByteRange.new(start_byte: local[:data_start], end_byte: local[:data_start] + record[:compressed_size]),
|
|
42
|
+
central_directory_range: record[:range]
|
|
43
|
+
)
|
|
44
|
+
end.sort_by { |entry| entry.local_header_range.start_byte }
|
|
45
|
+
|
|
46
|
+
TreeHaver::ZipFamilyReport.new(
|
|
47
|
+
archive: TreeHaver::ZipArchiveInfo.new(format: "zip", schema: "zip.ksy", entry_count: entries.length, central_directory_range: central[:range]),
|
|
48
|
+
entries: entries,
|
|
49
|
+
member_decisions: [],
|
|
50
|
+
unsafe_entries: unsafe_entries(entries, central[:records]),
|
|
51
|
+
merge_report: empty_report
|
|
52
|
+
)
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def plan_zip_merge(ancestor, current, incoming)
|
|
56
|
+
report = TreeHaver::ZipFamilyReport.new(
|
|
57
|
+
archive: incoming.archive,
|
|
58
|
+
entries: incoming.entries,
|
|
59
|
+
member_decisions: [],
|
|
60
|
+
unsafe_entries: incoming.unsafe_entries || [],
|
|
61
|
+
merge_report: empty_report
|
|
62
|
+
)
|
|
63
|
+
ancestor_entries = entries_by_path(ancestor.entries)
|
|
64
|
+
current_entries = entries_by_path(current.entries)
|
|
65
|
+
incoming_entries = entries_by_path(incoming.entries)
|
|
66
|
+
unsafe_by_path = report.unsafe_entries.to_h { |entry| [entry.normalized_path, entry] }
|
|
67
|
+
|
|
68
|
+
(ancestor_entries.keys | current_entries.keys | incoming_entries.keys).sort.each do |path|
|
|
69
|
+
ancestor_entry = ancestor_entries[path]
|
|
70
|
+
current_entry = current_entries[path]
|
|
71
|
+
incoming_entry = incoming_entries[path]
|
|
72
|
+
if unsafe_by_path[path]
|
|
73
|
+
unsafe = unsafe_by_path[path]
|
|
74
|
+
report.member_decisions << TreeHaver::ZipMemberDecision.new(normalized_path: path, operation: "reject", disposition: "unsafe", reason: unsafe.reason)
|
|
75
|
+
report.merge_report.diagnostics << diagnostic(unsafe.category, schema_path(path), unsafe.reason)
|
|
76
|
+
elsif current_entry.nil? && incoming_entry
|
|
77
|
+
decision(report, path, "add", "requires_renderer", "member exists only in incoming archive")
|
|
78
|
+
elsif current_entry && incoming_entry.nil?
|
|
79
|
+
decision(report, path, "delete", "requires_renderer", "member was removed from incoming archive")
|
|
80
|
+
elsif ancestor_entry && same_entry?(current_entry, ancestor_entry) && same_entry?(incoming_entry, ancestor_entry)
|
|
81
|
+
report.member_decisions << TreeHaver::ZipMemberDecision.new(normalized_path: path, operation: "preserve", disposition: "safe", reason: "member is unchanged from ancestor")
|
|
82
|
+
report.merge_report.preserved_ranges.concat([current_entry.local_header_range, current_entry.data_range])
|
|
83
|
+
elsif (family = nested_family(path))
|
|
84
|
+
report.member_decisions << TreeHaver::ZipMemberDecision.new(normalized_path: path, operation: "delegate", disposition: "requires_renderer", nested_family: family, reason: "structured member can be merged by a nested family before ZIP rendering")
|
|
85
|
+
report.merge_report.nested_dispatches << TreeHaver::BinaryNestedDispatch.new(schema_path: "#{schema_path(path)}/data", family: family, status: "planned")
|
|
86
|
+
report.merge_report.rewritten_nodes << schema_path(path)
|
|
87
|
+
report.merge_report.checksum_updates << "#{schema_path(path)}/crc32"
|
|
88
|
+
else
|
|
89
|
+
decision(report, path, "rewrite", "requires_renderer", "member bytes or metadata changed")
|
|
90
|
+
report.merge_report.checksum_updates << "#{schema_path(path)}/crc32"
|
|
91
|
+
end
|
|
92
|
+
report.merge_report.matched_schema_paths << schema_path(path)
|
|
93
|
+
end
|
|
94
|
+
unless report.merge_report.rewritten_nodes.empty? && report.merge_report.checksum_updates.empty?
|
|
95
|
+
report.merge_report.rewritten_nodes << "/central_directory"
|
|
96
|
+
report.merge_report.checksum_updates.concat(["/central_directory/size", "/central_directory/offset"])
|
|
97
|
+
end
|
|
98
|
+
report
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def render_with_raw_preservation(source:, plan:, member_bytes: {}, compression: 0)
|
|
102
|
+
raise render_error("unsupported_compression", "/render/options/compression", "unsupported raw-preserving compression method") unless [0, 8].include?(compression)
|
|
103
|
+
|
|
104
|
+
source = source.b
|
|
105
|
+
source_inventory = parse_zip_inventory(source)
|
|
106
|
+
central = scan_central_directory(source)
|
|
107
|
+
source_entries = entries_by_path(source_inventory.entries)
|
|
108
|
+
raw_ranges = raw_local_record_ranges(source, source_entries)
|
|
109
|
+
output = +"".b
|
|
110
|
+
central_records = []
|
|
111
|
+
entries = entries_by_path(plan.entries)
|
|
112
|
+
plan.member_decisions.each do |member|
|
|
113
|
+
entry = entries[member.normalized_path]
|
|
114
|
+
case member.operation
|
|
115
|
+
when "reject"
|
|
116
|
+
raise render_error("rejected_member", schema_path(member.normalized_path), member.reason)
|
|
117
|
+
when "delete"
|
|
118
|
+
next
|
|
119
|
+
when "preserve"
|
|
120
|
+
source_entry = source_entries.fetch(member.normalized_path)
|
|
121
|
+
validate_raw_preserve_entry!(source, central, source_entry)
|
|
122
|
+
range = raw_ranges.fetch(member.normalized_path)
|
|
123
|
+
offset = output.bytesize
|
|
124
|
+
output << source.byteslice(range.start_byte...range.end_byte)
|
|
125
|
+
central_records << central_record_from_entry(source_entry, offset)
|
|
126
|
+
when "add", "rewrite", "delegate"
|
|
127
|
+
content = member_bytes.fetch(member.normalized_path)
|
|
128
|
+
rendered, record = rendered_local_record(entry, content.b, compression, output.bytesize)
|
|
129
|
+
output << rendered
|
|
130
|
+
central_records << record
|
|
131
|
+
else
|
|
132
|
+
raise "unsupported ZIP render operation #{member.operation.inspect}"
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
central_start = output.bytesize
|
|
136
|
+
central_records.each { |record| output << central_directory_record(record) }
|
|
137
|
+
central_size = output.bytesize - central_start
|
|
138
|
+
output << eocd_record(central_records.length, central_size, central_start)
|
|
139
|
+
report = parse_zip_inventory(output)
|
|
140
|
+
merge_report = plan.merge_report
|
|
141
|
+
merge_report.preserved_ranges = plan.member_decisions.filter_map { |member| raw_ranges[member.normalized_path] if member.operation == "preserve" }
|
|
142
|
+
[output, report, merge_report]
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
def new_stored_zip(entries)
|
|
146
|
+
output = +"".b
|
|
147
|
+
central = []
|
|
148
|
+
entries.keys.sort.each do |name|
|
|
149
|
+
rendered, record = rendered_local_record(path_entry(name, entries[name]), entries[name].b, 0, output.bytesize)
|
|
150
|
+
output << rendered
|
|
151
|
+
central << record
|
|
152
|
+
end
|
|
153
|
+
start = output.bytesize
|
|
154
|
+
central.each { |record| output << central_directory_record(record) }
|
|
155
|
+
output << eocd_record(central.length, output.bytesize - start, start)
|
|
156
|
+
output
|
|
157
|
+
end
|
|
158
|
+
|
|
159
|
+
def empty_report
|
|
160
|
+
TreeHaver::BinaryMergeReport.new(format: "zip", schema: "zip.ksy", matched_schema_paths: [], preserved_ranges: [], rewritten_nodes: [], checksum_updates: [], nested_dispatches: [], diagnostics: [])
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
def scan_central_directory(source)
|
|
164
|
+
eocd = source.bytesize - 22
|
|
165
|
+
eocd -= 1 while eocd >= 0 && source.byteslice(eocd, 4).unpack1("V") != EOCD
|
|
166
|
+
raise "missing ZIP end of central directory" if eocd.negative?
|
|
167
|
+
size = source.byteslice(eocd + 12, 4).unpack1("V")
|
|
168
|
+
offset = source.byteslice(eocd + 16, 4).unpack1("V")
|
|
169
|
+
comment_length = source.byteslice(eocd + 20, 2).unpack1("v")
|
|
170
|
+
records = {}
|
|
171
|
+
cursor = offset
|
|
172
|
+
while cursor < offset + size
|
|
173
|
+
raise "unexpected central directory record" unless source.byteslice(cursor, 4).unpack1("V") == CENTRAL
|
|
174
|
+
name_len = source.byteslice(cursor + 28, 2).unpack1("v")
|
|
175
|
+
extra_len = source.byteslice(cursor + 30, 2).unpack1("v")
|
|
176
|
+
comment_len = source.byteslice(cursor + 32, 2).unpack1("v")
|
|
177
|
+
name = source.byteslice(cursor + 46, name_len)
|
|
178
|
+
records[name] = {
|
|
179
|
+
range: TreeHaver::ByteRange.new(start_byte: cursor, end_byte: cursor + 46 + name_len + extra_len + comment_len),
|
|
180
|
+
flags: source.byteslice(cursor + 8, 2).unpack1("v"),
|
|
181
|
+
method: source.byteslice(cursor + 10, 2).unpack1("v"),
|
|
182
|
+
crc32: source.byteslice(cursor + 16, 4).unpack1("V"),
|
|
183
|
+
compressed_size: source.byteslice(cursor + 20, 4).unpack1("V"),
|
|
184
|
+
uncompressed_size: source.byteslice(cursor + 24, 4).unpack1("V"),
|
|
185
|
+
extra_length: extra_len,
|
|
186
|
+
comment_length: comment_len,
|
|
187
|
+
local_offset: source.byteslice(cursor + 42, 4).unpack1("V")
|
|
188
|
+
}
|
|
189
|
+
cursor = records[name][:range].end_byte
|
|
190
|
+
end
|
|
191
|
+
{ range: TreeHaver::ByteRange.new(start_byte: offset, end_byte: offset + size), records: records, archive_comment: comment_length.positive? }
|
|
192
|
+
end
|
|
193
|
+
|
|
194
|
+
def scan_local_headers(source, records)
|
|
195
|
+
records.transform_values do |record|
|
|
196
|
+
cursor = record[:local_offset]
|
|
197
|
+
raise "unexpected ZIP local header" unless source.byteslice(cursor, 4).unpack1("V") == LOCAL
|
|
198
|
+
name_len = source.byteslice(cursor + 26, 2).unpack1("v")
|
|
199
|
+
extra_len = source.byteslice(cursor + 28, 2).unpack1("v")
|
|
200
|
+
{ data_start: cursor + 30 + name_len + extra_len, extra_length: extra_len }
|
|
201
|
+
end
|
|
202
|
+
end
|
|
203
|
+
|
|
204
|
+
def validate_raw_preserve_entry!(source, central, entry)
|
|
205
|
+
raise render_error("archive_comment", "/archive/comment", "raw-preserving ZIP renderer does not yet preserve archive comments") if central[:archive_comment]
|
|
206
|
+
record = central[:records].fetch(entry.path)
|
|
207
|
+
raise render_error("encrypted_member", schema_path(entry.normalized_path), "raw-preserving ZIP renderer rejects encrypted member #{entry.normalized_path}") unless (record[:flags] & 0x1).zero?
|
|
208
|
+
raise render_error("unsupported_compression", schema_path(entry.normalized_path), "raw-preserving ZIP renderer rejects unsupported compression #{entry.compression.inspect}") unless [0, 8].include?(record[:method])
|
|
209
|
+
raise render_error("central_directory_extra_field", schema_path(entry.normalized_path), "raw-preserving ZIP renderer does not yet preserve central-directory extra fields for #{entry.normalized_path}") unless record[:extra_length].zero?
|
|
210
|
+
raise render_error("member_comment", schema_path(entry.normalized_path), "raw-preserving ZIP renderer does not yet preserve member comments for #{entry.normalized_path}") unless record[:comment_length].zero?
|
|
211
|
+
local_extra = source.byteslice(entry.local_header_range.start_byte + 28, 2).unpack1("v")
|
|
212
|
+
raise render_error("local_header_extra_field", schema_path(entry.normalized_path), "raw-preserving ZIP renderer does not yet preserve local extra fields for #{entry.normalized_path}") unless local_extra.zero?
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
def unsafe_entries(entries, records)
|
|
216
|
+
seen = {}
|
|
217
|
+
entries.flat_map do |entry|
|
|
218
|
+
list = []
|
|
219
|
+
list << TreeHaver::ZipUnsafeEntry.new(path: entry.path, normalized_path: entry.normalized_path, category: "path_traversal", reason: "entry escapes the archive root") if escapes_root?(entry.path)
|
|
220
|
+
list << TreeHaver::ZipUnsafeEntry.new(path: entry.path, normalized_path: entry.normalized_path, category: "duplicate_normalized_path", reason: "normalized path collides with an existing entry") if seen[entry.normalized_path] && seen[entry.normalized_path] != entry.path
|
|
221
|
+
list << TreeHaver::ZipUnsafeEntry.new(path: entry.path, normalized_path: entry.normalized_path, category: "encrypted_member", reason: "encrypted member cannot be rendered by the default provider") unless (records[entry.path][:flags] & 0x1).zero?
|
|
222
|
+
list << TreeHaver::ZipUnsafeEntry.new(path: entry.path, normalized_path: entry.normalized_path, category: "signing_sensitive_member", reason: "signature-bearing member mutation is not enabled") if signing_sensitive?(entry.normalized_path)
|
|
223
|
+
seen[entry.normalized_path] = entry.path
|
|
224
|
+
list
|
|
225
|
+
end
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
def rendered_local_record(entry, content, method, offset)
|
|
229
|
+
payload = method == 8 ? Zlib::Deflate.deflate(content) : content
|
|
230
|
+
crc = Zlib.crc32(content)
|
|
231
|
+
header = [LOCAL, 20, 0, method].pack("Vvvv") + DOS_EPOCH + [crc, payload.bytesize, content.bytesize, entry.path.bytesize, 0].pack("VVVvv") + entry.path
|
|
232
|
+
[header + payload, { name: entry.path, method: method, crc32: crc, compressed_size: payload.bytesize, uncompressed_size: content.bytesize, offset: offset, flags: 0 }]
|
|
233
|
+
end
|
|
234
|
+
|
|
235
|
+
def central_directory_record(record)
|
|
236
|
+
[CENTRAL, 20, 20, record[:flags], record[:method]].pack("Vvvvv") + DOS_EPOCH + [record[:crc32], record[:compressed_size], record[:uncompressed_size], record[:name].bytesize, 0, 0, 0, 0, 0, record[:offset]].pack("VVVvvvvvVV") + record[:name]
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
def eocd_record(entries, size, offset)
|
|
240
|
+
[EOCD, 0, 0, entries, entries, size, offset, 0].pack("VvvvvVVv")
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
def raw_local_record_ranges(source, entries)
|
|
244
|
+
ordered = entries.values.sort_by { |entry| entry.local_header_range.start_byte }
|
|
245
|
+
ordered.each_with_index.to_h do |entry, index|
|
|
246
|
+
end_byte = ordered[index + 1]&.local_header_range&.start_byte || entry.central_directory_range.start_byte
|
|
247
|
+
[entry.normalized_path, TreeHaver::ByteRange.new(start_byte: entry.local_header_range.start_byte, end_byte: end_byte)]
|
|
248
|
+
end
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
def central_record_from_entry(entry, offset)
|
|
252
|
+
{ name: entry.path, method: entry.compression == "deflate" ? 8 : 0, crc32: entry.crc32.to_i(16), compressed_size: entry.compressed_size, uncompressed_size: entry.uncompressed_size, offset: offset, flags: 0 }
|
|
253
|
+
end
|
|
254
|
+
|
|
255
|
+
def path_entry(name, content)
|
|
256
|
+
TreeHaver::ZipArchiveEntry.new(path: name, normalized_path: normalize_zip_path(name), directory: name.end_with?("/"), compression: "stored", compressed_size: content.bytesize, uncompressed_size: content.bytesize, crc32: "%08x" % Zlib.crc32(content), local_header_range: TreeHaver::ByteRange.new(start_byte: 0, end_byte: 0), data_range: TreeHaver::ByteRange.new(start_byte: 0, end_byte: 0), central_directory_range: TreeHaver::ByteRange.new(start_byte: 0, end_byte: 0))
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
def decision(report, path, operation, disposition, reason)
|
|
260
|
+
report.member_decisions << TreeHaver::ZipMemberDecision.new(normalized_path: path, operation: operation, disposition: disposition, reason: reason)
|
|
261
|
+
report.merge_report.rewritten_nodes << schema_path(path)
|
|
262
|
+
end
|
|
263
|
+
|
|
264
|
+
def normalize_zip_path(path)
|
|
265
|
+
path.tr("\\", "/").split("/").each_with_object([]) { |part, stack| part == ".." ? stack.pop : stack << part unless part == "." }.join("/")
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
def compression_name(method)
|
|
269
|
+
case method
|
|
270
|
+
when 0 then "stored"
|
|
271
|
+
when 8 then "deflate"
|
|
272
|
+
else "method-#{method}"
|
|
273
|
+
end
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
def escapes_root?(path)
|
|
277
|
+
path.start_with?("/") || path.tr("\\", "/").split("/").then { |parts| depth = 0; parts.any? { |part| part == ".." ? (depth -= 1) : (depth += 1 unless part == "."); depth.negative? } }
|
|
278
|
+
end
|
|
279
|
+
|
|
280
|
+
def signing_sensitive?(path)
|
|
281
|
+
path.upcase.start_with?("META-INF/") && [".RSA", ".DSA", ".EC", ".SF"].any? { |suffix| path.upcase.end_with?(suffix) }
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
def same_entry?(left, right)
|
|
285
|
+
left && right && left.path == right.path && left.compression == right.compression && left.compressed_size == right.compressed_size && left.uncompressed_size == right.uncompressed_size && left.crc32 == right.crc32
|
|
286
|
+
end
|
|
287
|
+
|
|
288
|
+
def entries_by_path(entries)
|
|
289
|
+
entries.to_h { |entry| [entry.normalized_path, entry] }
|
|
290
|
+
end
|
|
291
|
+
|
|
292
|
+
def nested_family(path)
|
|
293
|
+
return "markdown" if path.match?(/\.m(?:d|arkdown)\z/i)
|
|
294
|
+
return "json" if path.end_with?(".json")
|
|
295
|
+
return "yaml" if path.match?(/\.ya?ml\z/i)
|
|
296
|
+
return "xml" if path.end_with?(".xml")
|
|
297
|
+
end
|
|
298
|
+
|
|
299
|
+
def schema_path(path)
|
|
300
|
+
"/entries/by_path/#{path}"
|
|
301
|
+
end
|
|
302
|
+
|
|
303
|
+
def diagnostic(category, schema_path, message)
|
|
304
|
+
TreeHaver::BinaryDiagnostic.new(severity: "error", category: category, message: message, schema_path: schema_path)
|
|
305
|
+
end
|
|
306
|
+
|
|
307
|
+
def render_error(category, schema_path, message)
|
|
308
|
+
RenderError.new(diagnostic(category, schema_path, message))
|
|
309
|
+
end
|
|
310
|
+
end
|
|
311
|
+
end
|
data/lib/zip-merge.rb
ADDED
data.tar.gz.sig
ADDED
|
Binary file
|
metadata
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: zip-merge
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 7.0.0
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Peter H. Boling
|
|
8
|
+
bindir: bin
|
|
9
|
+
cert_chain:
|
|
10
|
+
- |
|
|
11
|
+
-----BEGIN CERTIFICATE-----
|
|
12
|
+
MIIEgDCCAuigAwIBAgIBATANBgkqhkiG9w0BAQsFADBDMRUwEwYDVQQDDAxwZXRl
|
|
13
|
+
ci5ib2xpbmcxFTATBgoJkiaJk/IsZAEZFgVnbWFpbDETMBEGCgmSJomT8ixkARkW
|
|
14
|
+
A2NvbTAeFw0yNTA1MDQxNTMzMDlaFw00NTA0MjkxNTMzMDlaMEMxFTATBgNVBAMM
|
|
15
|
+
DHBldGVyLmJvbGluZzEVMBMGCgmSJomT8ixkARkWBWdtYWlsMRMwEQYKCZImiZPy
|
|
16
|
+
LGQBGRYDY29tMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAruUoo0WA
|
|
17
|
+
uoNuq6puKWYeRYiZekz/nsDeK5x/0IEirzcCEvaHr3Bmz7rjo1I6On3gGKmiZs61
|
|
18
|
+
LRmQ3oxy77ydmkGTXBjruJB+pQEn7UfLSgQ0xa1/X3kdBZt6RmabFlBxnHkoaGY5
|
|
19
|
+
mZuZ5+Z7walmv6sFD9ajhzj+oIgwWfnEHkXYTR8I6VLN7MRRKGMPoZ/yvOmxb2DN
|
|
20
|
+
coEEHWKO9CvgYpW7asIihl/9GMpKiRkcYPm9dGQzZc6uTwom1COfW0+ZOFrDVBuV
|
|
21
|
+
FMQRPswZcY4Wlq0uEBLPU7hxnCL9nKK6Y9IhdDcz1mY6HZ91WImNslOSI0S8hRpj
|
|
22
|
+
yGOWxQIhBT3fqCBlRIqFQBudrnD9jSNpSGsFvbEijd5ns7Z9ZMehXkXDycpGAUj1
|
|
23
|
+
to/5cuTWWw1JqUWrKJYoifnVhtE1o1DZ+LkPtWxHtz5kjDG/zR3MG0Ula0UOavlD
|
|
24
|
+
qbnbcXPBnwXtTFeZ3C+yrWpE4pGnl3yGkZj9SMTlo9qnTMiPmuWKQDatAgMBAAGj
|
|
25
|
+
fzB9MAkGA1UdEwQCMAAwCwYDVR0PBAQDAgSwMB0GA1UdDgQWBBQE8uWvNbPVNRXZ
|
|
26
|
+
HlgPbc2PCzC4bjAhBgNVHREEGjAYgRZwZXRlci5ib2xpbmdAZ21haWwuY29tMCEG
|
|
27
|
+
A1UdEgQaMBiBFnBldGVyLmJvbGluZ0BnbWFpbC5jb20wDQYJKoZIhvcNAQELBQAD
|
|
28
|
+
ggGBAJbnUwfJQFPkBgH9cL7hoBfRtmWiCvdqdjeTmi04u8zVNCUox0A4gT982DE9
|
|
29
|
+
wmuN12LpdajxZONqbXuzZvc+nb0StFwmFYZG6iDwaf4BPywm2e/Vmq0YG45vZXGR
|
|
30
|
+
L8yMDSK1cQXjmA+ZBKOHKWavxP6Vp7lWvjAhz8RFwqF9GuNIdhv9NpnCAWcMZtpm
|
|
31
|
+
GUPyIWw/Cw/2wZp74QzZj6Npx+LdXoLTF1HMSJXZ7/pkxLCsB8m4EFVdb/IrW/0k
|
|
32
|
+
kNSfjtAfBHO8nLGuqQZVH9IBD1i9K6aSs7pT6TW8itXUIlkIUI2tg5YzW6OFfPzq
|
|
33
|
+
QekSkX3lZfY+HTSp/o+YvKkqWLUV7PQ7xh1ZYDtocpaHwgxe/j3bBqHE+CUPH2vA
|
|
34
|
+
0V/FwdTRWcwsjVoOJTrYcff8pBZ8r2MvtAc54xfnnhGFzeRHfcltobgFxkAXdE6p
|
|
35
|
+
DVjBtqT23eugOqQ73umLcYDZkc36vnqGxUBSsXrzY9pzV5gGr2I8YUxMqf6ATrZt
|
|
36
|
+
L9nRqA==
|
|
37
|
+
-----END CERTIFICATE-----
|
|
38
|
+
date: 1980-01-02 00:00:00.000000000 Z
|
|
39
|
+
dependencies:
|
|
40
|
+
- !ruby/object:Gem::Dependency
|
|
41
|
+
name: tree_haver
|
|
42
|
+
requirement: !ruby/object:Gem::Requirement
|
|
43
|
+
requirements:
|
|
44
|
+
- - '='
|
|
45
|
+
- !ruby/object:Gem::Version
|
|
46
|
+
version: 7.0.0
|
|
47
|
+
type: :runtime
|
|
48
|
+
prerelease: false
|
|
49
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
50
|
+
requirements:
|
|
51
|
+
- - '='
|
|
52
|
+
- !ruby/object:Gem::Version
|
|
53
|
+
version: 7.0.0
|
|
54
|
+
description: Portable ZIP inventory, planning, nested dispatch, and raw-preservation
|
|
55
|
+
rendering helpers for Structured Merge.
|
|
56
|
+
email:
|
|
57
|
+
- info@structuredmerge.org
|
|
58
|
+
executables: []
|
|
59
|
+
extensions: []
|
|
60
|
+
extra_rdoc_files: []
|
|
61
|
+
files:
|
|
62
|
+
- lib/zip-merge.rb
|
|
63
|
+
- lib/zip/merge.rb
|
|
64
|
+
- lib/zip/merge/version.rb
|
|
65
|
+
homepage: https://github.com/structuredmerge/structuredmerge-ruby
|
|
66
|
+
licenses:
|
|
67
|
+
- AGPL-3.0-only
|
|
68
|
+
- PolyForm-Small-Business-1.0.0
|
|
69
|
+
metadata:
|
|
70
|
+
homepage_uri: https://structuredmerge.org
|
|
71
|
+
source_code_uri: https://github.com/structuredmerge/structuredmerge-ruby/tree/v7.0.0
|
|
72
|
+
changelog_uri: https://github.com/structuredmerge/structuredmerge-ruby/blob/v7.0.0/CHANGELOG.md
|
|
73
|
+
bug_tracker_uri: https://github.com/structuredmerge/structuredmerge-ruby/issues
|
|
74
|
+
documentation_uri: https://www.rubydoc.info/gems/zip-merge/7.0.0
|
|
75
|
+
funding_uri: https://github.com/sponsors/pboling
|
|
76
|
+
wiki_uri: https://github.com/structuredmerge/structuredmerge-ruby/wiki
|
|
77
|
+
discord_uri: https://discord.gg/3qme4XHNKN
|
|
78
|
+
rubygems_mfa_required: 'true'
|
|
79
|
+
rdoc_options: []
|
|
80
|
+
require_paths:
|
|
81
|
+
- lib
|
|
82
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
83
|
+
requirements:
|
|
84
|
+
- - ">="
|
|
85
|
+
- !ruby/object:Gem::Version
|
|
86
|
+
version: 4.0.0
|
|
87
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
88
|
+
requirements:
|
|
89
|
+
- - ">="
|
|
90
|
+
- !ruby/object:Gem::Version
|
|
91
|
+
version: '0'
|
|
92
|
+
requirements: []
|
|
93
|
+
rubygems_version: 4.0.10
|
|
94
|
+
specification_version: 4
|
|
95
|
+
summary: Structured Merge ZIP merge planning and rendering helpers for Ruby
|
|
96
|
+
test_files: []
|
metadata.gz.sig
ADDED
|
Binary file
|