gitki 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/ChangeLog +0 -0
- data/README.rdoc +74 -0
- data/Rakefile +59 -0
- data/app.rb +90 -0
- data/bin/gitki +3 -0
- data/bin/gitki.ru +13 -0
- data/config.ru +3 -0
- data/console +13 -0
- data/lib/gitki.png +0 -0
- data/lib/gitki.rb +116 -0
- data/lib/home_template.haml +17 -0
- data/lib/navigation_template.haml +4 -0
- data/public/background.png +0 -0
- data/public/favicon.ico +0 -0
- data/setting.yml +3 -0
- data/spec/gitki_spec.rb +104 -0
- data/vendor/git_store/LICENSE +18 -0
- data/vendor/git_store/README.md +147 -0
- data/vendor/git_store/Rakefile +35 -0
- data/vendor/git_store/TODO +3 -0
- data/vendor/git_store/git_store.gemspec +40 -0
- data/vendor/git_store/lib/git_store.rb +373 -0
- data/vendor/git_store/lib/git_store/blob.rb +32 -0
- data/vendor/git_store/lib/git_store/commit.rb +65 -0
- data/vendor/git_store/lib/git_store/diff.rb +76 -0
- data/vendor/git_store/lib/git_store/handlers.rb +36 -0
- data/vendor/git_store/lib/git_store/pack.rb +425 -0
- data/vendor/git_store/lib/git_store/tag.rb +40 -0
- data/vendor/git_store/lib/git_store/tree.rb +183 -0
- data/vendor/git_store/lib/git_store/user.rb +29 -0
- data/vendor/git_store/test/bare_store_spec.rb +33 -0
- data/vendor/git_store/test/benchmark.rb +30 -0
- data/vendor/git_store/test/commit_spec.rb +81 -0
- data/vendor/git_store/test/git_store_spec.rb +257 -0
- data/vendor/git_store/test/helper.rb +18 -0
- data/vendor/git_store/test/tree_spec.rb +92 -0
- data/views/layout.haml +23 -0
- data/views/page.haml +7 -0
- data/views/pages.haml +9 -0
- data/views/styles.sass +87 -0
- metadata +103 -0
@@ -0,0 +1,32 @@
|
|
1
|
+
class GitStore
|
2
|
+
|
3
|
+
# This class stores the raw string data of a blob, but also the
|
4
|
+
# deserialized data object.
|
5
|
+
class Blob
|
6
|
+
|
7
|
+
attr_accessor :store, :id, :data, :mode, :object
|
8
|
+
|
9
|
+
# Initialize a Blob
|
10
|
+
def initialize(store, id = nil, data = nil)
|
11
|
+
@store = store
|
12
|
+
@id = id || store.id_for('blob', data)
|
13
|
+
@data = data
|
14
|
+
@mode = "100644"
|
15
|
+
end
|
16
|
+
|
17
|
+
def ==(other)
|
18
|
+
Blob === other and id == other.id
|
19
|
+
end
|
20
|
+
|
21
|
+
def dump
|
22
|
+
@data
|
23
|
+
end
|
24
|
+
|
25
|
+
# Write the data to the git object store
|
26
|
+
def write
|
27
|
+
@id = store.put(self)
|
28
|
+
end
|
29
|
+
|
30
|
+
end
|
31
|
+
|
32
|
+
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
class GitStore
|
2
|
+
|
3
|
+
class Commit
|
4
|
+
attr_accessor :store, :id, :tree, :parent, :author, :committer, :message
|
5
|
+
|
6
|
+
def initialize(store, id = nil, data = nil)
|
7
|
+
@store = store
|
8
|
+
@id = id
|
9
|
+
@parent = []
|
10
|
+
|
11
|
+
parse(data) if data
|
12
|
+
end
|
13
|
+
|
14
|
+
def ==(other)
|
15
|
+
Commit === other and id == other.id
|
16
|
+
end
|
17
|
+
|
18
|
+
def parse(data)
|
19
|
+
headers, @message = data.split(/\n\n/, 2)
|
20
|
+
|
21
|
+
headers.split(/\n/).each do |header|
|
22
|
+
key, value = header.split(/ /, 2)
|
23
|
+
case key
|
24
|
+
when 'parent'
|
25
|
+
@parent << value
|
26
|
+
|
27
|
+
when 'author'
|
28
|
+
@author = User.parse(value)
|
29
|
+
|
30
|
+
when 'committer'
|
31
|
+
@committer = User.parse(value)
|
32
|
+
|
33
|
+
when 'tree'
|
34
|
+
@tree = store.get(value)
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
self
|
39
|
+
end
|
40
|
+
|
41
|
+
def diff(commit, path = nil)
|
42
|
+
commit = commit.id if Commit === commit
|
43
|
+
Diff.exec(store, "git diff --full-index #{commit} #{id} -- #{path}")
|
44
|
+
end
|
45
|
+
|
46
|
+
def diffs(path = nil)
|
47
|
+
diff(parent.first, path)
|
48
|
+
end
|
49
|
+
|
50
|
+
def write
|
51
|
+
@id = store.put(self)
|
52
|
+
end
|
53
|
+
|
54
|
+
def dump
|
55
|
+
[ "tree #{ tree.id }",
|
56
|
+
parent.map { |parent| "parent #{parent}" },
|
57
|
+
"author #{ author.dump }",
|
58
|
+
"committer #{ committer.dump }",
|
59
|
+
'',
|
60
|
+
message ].flatten.join("\n")
|
61
|
+
end
|
62
|
+
|
63
|
+
end
|
64
|
+
|
65
|
+
end
|
@@ -0,0 +1,76 @@
|
|
1
|
+
class GitStore
|
2
|
+
|
3
|
+
# adapted from Grit
|
4
|
+
class Diff
|
5
|
+
attr_reader :store
|
6
|
+
attr_reader :a_path, :b_path
|
7
|
+
attr_reader :a_blob, :b_blob
|
8
|
+
attr_reader :a_mode, :b_mode
|
9
|
+
attr_reader :new_file, :deleted_file
|
10
|
+
attr_reader :diff
|
11
|
+
|
12
|
+
def initialize(store, a_path, b_path, a_blob, b_blob, a_mode, b_mode, new_file, deleted_file, diff)
|
13
|
+
@store = store
|
14
|
+
@a_path = a_path
|
15
|
+
@b_path = b_path
|
16
|
+
@a_blob = a_blob =~ /^0{40}$/ ? nil : store.get(a_blob)
|
17
|
+
@b_blob = b_blob =~ /^0{40}$/ ? nil : store.get(b_blob)
|
18
|
+
@a_mode = a_mode
|
19
|
+
@b_mode = b_mode
|
20
|
+
@new_file = new_file
|
21
|
+
@deleted_file = deleted_file
|
22
|
+
@diff = diff
|
23
|
+
end
|
24
|
+
|
25
|
+
def self.exec(store, cmd)
|
26
|
+
list(store, IO.popen(cmd) { |io| io.read })
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.list(store, text)
|
30
|
+
lines = text.split("\n")
|
31
|
+
|
32
|
+
diffs = []
|
33
|
+
|
34
|
+
while !lines.empty?
|
35
|
+
m, a_path, b_path = *lines.shift.match(%r{^diff --git a/(.+?) b/(.+)$})
|
36
|
+
|
37
|
+
if lines.first =~ /^old mode/
|
38
|
+
m, a_mode = *lines.shift.match(/^old mode (\d+)/)
|
39
|
+
m, b_mode = *lines.shift.match(/^new mode (\d+)/)
|
40
|
+
end
|
41
|
+
|
42
|
+
if lines.empty? || lines.first =~ /^diff --git/
|
43
|
+
diffs << Diff.new(store, a_path, b_path, nil, nil, a_mode, b_mode, false, false, nil)
|
44
|
+
next
|
45
|
+
end
|
46
|
+
|
47
|
+
new_file = false
|
48
|
+
deleted_file = false
|
49
|
+
|
50
|
+
if lines.first =~ /^new file/
|
51
|
+
m, b_mode = lines.shift.match(/^new file mode (.+)$/)
|
52
|
+
a_mode = nil
|
53
|
+
new_file = true
|
54
|
+
elsif lines.first =~ /^deleted file/
|
55
|
+
m, a_mode = lines.shift.match(/^deleted file mode (.+)$/)
|
56
|
+
b_mode = nil
|
57
|
+
deleted_file = true
|
58
|
+
end
|
59
|
+
|
60
|
+
m, a_blob, b_blob, b_mode = *lines.shift.match(%r{^index ([0-9A-Fa-f]+)\.\.([0-9A-Fa-f]+) ?(.+)?$})
|
61
|
+
b_mode.strip! if b_mode
|
62
|
+
|
63
|
+
diff_lines = []
|
64
|
+
while lines.first && lines.first !~ /^diff/
|
65
|
+
diff_lines << lines.shift
|
66
|
+
end
|
67
|
+
diff = diff_lines.join("\n")
|
68
|
+
|
69
|
+
diffs << Diff.new(store, a_path, b_path, a_blob, b_blob, a_mode, b_mode, new_file, deleted_file, diff)
|
70
|
+
end
|
71
|
+
|
72
|
+
diffs
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
|
2
|
+
# This fix ensures sorted yaml maps.
|
3
|
+
class Hash
|
4
|
+
def to_yaml( opts = {} )
|
5
|
+
YAML::quick_emit( object_id, opts ) do |out|
|
6
|
+
out.map( taguri, to_yaml_style ) do |map|
|
7
|
+
sort_by { |k, v| k.to_s }.each do |k, v|
|
8
|
+
map.add( k, v )
|
9
|
+
end
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
class GitStore
|
16
|
+
|
17
|
+
class DefaultHandler
|
18
|
+
def read(data)
|
19
|
+
data
|
20
|
+
end
|
21
|
+
|
22
|
+
def write(data)
|
23
|
+
data.to_s
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
class YAMLHandler
|
28
|
+
def read(data)
|
29
|
+
YAML.load(data)
|
30
|
+
end
|
31
|
+
|
32
|
+
def write(data)
|
33
|
+
data.to_yaml
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,425 @@
|
|
1
|
+
#
|
2
|
+
# converted from the gitrb project
|
3
|
+
#
|
4
|
+
# authors:
|
5
|
+
# Matthias Lederhofer <matled@gmx.net>
|
6
|
+
# Simon 'corecode' Schubert <corecode@fs.ei.tum.de>
|
7
|
+
# Scott Chacon <schacon@gmail.com>
|
8
|
+
#
|
9
|
+
# provides native ruby access to git objects and pack files
|
10
|
+
#
|
11
|
+
|
12
|
+
require 'zlib'
|
13
|
+
|
14
|
+
class GitStore
|
15
|
+
PACK_SIGNATURE = "PACK"
|
16
|
+
PACK_IDX_SIGNATURE = "\377tOc"
|
17
|
+
85
|
18
|
+
OBJ_NONE = 0
|
19
|
+
OBJ_COMMIT = 1
|
20
|
+
OBJ_TREE = 2
|
21
|
+
OBJ_BLOB = 3
|
22
|
+
OBJ_TAG = 4
|
23
|
+
|
24
|
+
OBJ_TYPES = [nil, 'commit', 'tree', 'blob', 'tag'].freeze
|
25
|
+
|
26
|
+
class Mmap
|
27
|
+
def initialize(file, version = 1)
|
28
|
+
@file = file
|
29
|
+
@offset = nil
|
30
|
+
if version == 2
|
31
|
+
@global_offset = 8
|
32
|
+
else
|
33
|
+
@global_offset = 0
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def unmap
|
38
|
+
@file = nil
|
39
|
+
end
|
40
|
+
|
41
|
+
def [](*idx)
|
42
|
+
idx = idx[0] if idx.length == 1
|
43
|
+
case idx
|
44
|
+
when Range
|
45
|
+
offset = idx.first
|
46
|
+
len = idx.last - idx.first + idx.exclude_end? ? 0 : 1
|
47
|
+
when Fixnum
|
48
|
+
offset = idx
|
49
|
+
len = nil
|
50
|
+
when Array
|
51
|
+
offset, len = idx
|
52
|
+
else
|
53
|
+
raise RuntimeError, "invalid index param: #{idx.class}"
|
54
|
+
end
|
55
|
+
if @offset != offset
|
56
|
+
@file.seek(offset + @global_offset)
|
57
|
+
end
|
58
|
+
@offset = offset + len ? len : 1
|
59
|
+
if not len
|
60
|
+
@file.read(1)[0]
|
61
|
+
else
|
62
|
+
@file.read(len)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
class PackFormatError < StandardError
|
68
|
+
end
|
69
|
+
|
70
|
+
class PackStorage
|
71
|
+
OBJ_OFS_DELTA = 6
|
72
|
+
OBJ_REF_DELTA = 7
|
73
|
+
|
74
|
+
FanOutCount = 256
|
75
|
+
SHA1Size = 20
|
76
|
+
IdxOffsetSize = 4
|
77
|
+
OffsetSize = 4
|
78
|
+
CrcSize = 4
|
79
|
+
OffsetStart = FanOutCount * IdxOffsetSize
|
80
|
+
SHA1Start = OffsetStart + OffsetSize
|
81
|
+
EntrySize = OffsetSize + SHA1Size
|
82
|
+
EntrySizeV2 = SHA1Size + CrcSize + OffsetSize
|
83
|
+
|
84
|
+
def initialize(file)
|
85
|
+
if file =~ /\.idx$/
|
86
|
+
file = file[0...-3] + 'pack'
|
87
|
+
end
|
88
|
+
@name = file
|
89
|
+
@cache = {}
|
90
|
+
init_pack
|
91
|
+
end
|
92
|
+
|
93
|
+
def with_idx(index_file = nil)
|
94
|
+
if !index_file
|
95
|
+
index_file = @name
|
96
|
+
idxfile = File.open(@name[0...-4]+'idx')
|
97
|
+
else
|
98
|
+
idxfile = File.open(index_file)
|
99
|
+
end
|
100
|
+
|
101
|
+
# read header
|
102
|
+
sig = idxfile.read(4)
|
103
|
+
ver = idxfile.read(4).unpack("N")[0]
|
104
|
+
|
105
|
+
if sig == PACK_IDX_SIGNATURE
|
106
|
+
if(ver != 2)
|
107
|
+
raise PackFormatError, "pack #@name has unknown pack file version #{ver}"
|
108
|
+
end
|
109
|
+
@version = 2
|
110
|
+
else
|
111
|
+
@version = 1
|
112
|
+
end
|
113
|
+
|
114
|
+
idx = Mmap.new(idxfile, @version)
|
115
|
+
yield idx
|
116
|
+
idx.unmap
|
117
|
+
idxfile.close
|
118
|
+
end
|
119
|
+
|
120
|
+
def with_packfile
|
121
|
+
packfile = File.open(@name)
|
122
|
+
result = yield packfile
|
123
|
+
packfile.close
|
124
|
+
|
125
|
+
result
|
126
|
+
end
|
127
|
+
|
128
|
+
def cache_objects
|
129
|
+
@cache = {}
|
130
|
+
with_packfile do |packfile|
|
131
|
+
each_entry do |sha, offset|
|
132
|
+
data, type = unpack_object(packfile, offset, {:caching => true})
|
133
|
+
if data
|
134
|
+
@cache[sha] = [type, data]
|
135
|
+
end
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def name
|
141
|
+
@name
|
142
|
+
end
|
143
|
+
|
144
|
+
def close
|
145
|
+
# shouldnt be anything open now
|
146
|
+
end
|
147
|
+
|
148
|
+
# given an index file, list out the shas that it's packfile contains
|
149
|
+
def get_shas
|
150
|
+
shas = []
|
151
|
+
each_sha1 { |sha| shas << sha.unpack("H*")[0] }
|
152
|
+
shas
|
153
|
+
end
|
154
|
+
|
155
|
+
def [](sha1)
|
156
|
+
if obj = @cache[sha1]
|
157
|
+
return obj
|
158
|
+
end
|
159
|
+
|
160
|
+
offset = find_object(sha1)
|
161
|
+
return nil if !offset
|
162
|
+
@cache[sha1] = obj = parse_object(offset)
|
163
|
+
return obj
|
164
|
+
end
|
165
|
+
|
166
|
+
def init_pack
|
167
|
+
with_idx do |idx|
|
168
|
+
@offsets = [0]
|
169
|
+
FanOutCount.times do |i|
|
170
|
+
pos = idx[i * IdxOffsetSize,IdxOffsetSize].unpack('N')[0]
|
171
|
+
if pos < @offsets[i]
|
172
|
+
raise PackFormatError, "pack #@name has discontinuous index #{i}"
|
173
|
+
end
|
174
|
+
@offsets << pos
|
175
|
+
end
|
176
|
+
@size = @offsets[-1]
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
def each_entry
|
181
|
+
with_idx do |idx|
|
182
|
+
if @version == 2
|
183
|
+
data = read_data_v2(idx)
|
184
|
+
data.each do |sha1, crc, offset|
|
185
|
+
yield sha1, offset
|
186
|
+
end
|
187
|
+
else
|
188
|
+
pos = OffsetStart
|
189
|
+
@size.times do
|
190
|
+
offset = idx[pos,OffsetSize].unpack('N')[0]
|
191
|
+
sha1 = idx[pos+OffsetSize,SHA1Size]
|
192
|
+
pos += EntrySize
|
193
|
+
yield sha1, offset
|
194
|
+
end
|
195
|
+
end
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
def read_data_v2(idx)
|
200
|
+
data = []
|
201
|
+
pos = OffsetStart
|
202
|
+
@size.times do |i|
|
203
|
+
data[i] = [idx[pos,SHA1Size], 0, 0]
|
204
|
+
pos += SHA1Size
|
205
|
+
end
|
206
|
+
@size.times do |i|
|
207
|
+
crc = idx[pos,CrcSize]
|
208
|
+
data[i][1] = crc
|
209
|
+
pos += CrcSize
|
210
|
+
end
|
211
|
+
@size.times do |i|
|
212
|
+
offset = idx[pos,OffsetSize].unpack('N')[0]
|
213
|
+
data[i][2] = offset
|
214
|
+
pos += OffsetSize
|
215
|
+
end
|
216
|
+
data
|
217
|
+
end
|
218
|
+
private :read_data_v2
|
219
|
+
|
220
|
+
def each_sha1
|
221
|
+
with_idx do |idx|
|
222
|
+
if @version == 2
|
223
|
+
data = read_data_v2(idx)
|
224
|
+
data.each do |sha1, crc, offset|
|
225
|
+
yield sha1
|
226
|
+
end
|
227
|
+
else
|
228
|
+
pos = SHA1Start
|
229
|
+
@size.times do
|
230
|
+
sha1 = idx[pos,SHA1Size]
|
231
|
+
pos += EntrySize
|
232
|
+
yield sha1
|
233
|
+
end
|
234
|
+
end
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
def find_object_in_index(idx, sha1)
|
239
|
+
slot = sha1[0]
|
240
|
+
return nil if !slot
|
241
|
+
first, last = @offsets[slot,2]
|
242
|
+
while first < last
|
243
|
+
mid = (first + last) / 2
|
244
|
+
if @version == 2
|
245
|
+
midsha1 = idx[OffsetStart + (mid * SHA1Size), SHA1Size]
|
246
|
+
cmp = midsha1 <=> sha1
|
247
|
+
|
248
|
+
if cmp < 0
|
249
|
+
first = mid + 1
|
250
|
+
elsif cmp > 0
|
251
|
+
last = mid
|
252
|
+
else
|
253
|
+
pos = OffsetStart + (@size * (SHA1Size + CrcSize)) + (mid * OffsetSize)
|
254
|
+
offset = idx[pos, OffsetSize].unpack('N')[0]
|
255
|
+
return offset
|
256
|
+
end
|
257
|
+
else
|
258
|
+
midsha1 = idx[SHA1Start + mid * EntrySize,SHA1Size]
|
259
|
+
cmp = midsha1 <=> sha1
|
260
|
+
|
261
|
+
if cmp < 0
|
262
|
+
first = mid + 1
|
263
|
+
elsif cmp > 0
|
264
|
+
last = mid
|
265
|
+
else
|
266
|
+
pos = OffsetStart + mid * EntrySize
|
267
|
+
offset = idx[pos,OffsetSize].unpack('N')[0]
|
268
|
+
return offset
|
269
|
+
end
|
270
|
+
end
|
271
|
+
end
|
272
|
+
nil
|
273
|
+
end
|
274
|
+
|
275
|
+
def find_object(sha1)
|
276
|
+
obj = nil
|
277
|
+
with_idx do |idx|
|
278
|
+
obj = find_object_in_index(idx, sha1)
|
279
|
+
end
|
280
|
+
obj
|
281
|
+
end
|
282
|
+
private :find_object
|
283
|
+
|
284
|
+
def parse_object(offset)
|
285
|
+
data, type = with_packfile do |packfile|
|
286
|
+
unpack_object(packfile, offset)
|
287
|
+
end
|
288
|
+
|
289
|
+
return data, OBJ_TYPES[type]
|
290
|
+
end
|
291
|
+
|
292
|
+
def unpack_object(packfile, offset, options = {})
|
293
|
+
obj_offset = offset
|
294
|
+
packfile.seek(offset)
|
295
|
+
|
296
|
+
c = packfile.read(1)[0]
|
297
|
+
size = c & 0xf
|
298
|
+
type = (c >> 4) & 7
|
299
|
+
shift = 4
|
300
|
+
offset += 1
|
301
|
+
while c & 0x80 != 0
|
302
|
+
c = packfile.read(1)[0]
|
303
|
+
size |= ((c & 0x7f) << shift)
|
304
|
+
shift += 7
|
305
|
+
offset += 1
|
306
|
+
end
|
307
|
+
|
308
|
+
return [false, false] if !(type == OBJ_COMMIT || type == OBJ_TREE) && options[:caching]
|
309
|
+
|
310
|
+
case type
|
311
|
+
when OBJ_OFS_DELTA, OBJ_REF_DELTA
|
312
|
+
data, type = unpack_deltified(packfile, type, offset, obj_offset, size, options)
|
313
|
+
#puts type
|
314
|
+
when OBJ_COMMIT, OBJ_TREE, OBJ_BLOB, OBJ_TAG
|
315
|
+
data = unpack_compressed(offset, size)
|
316
|
+
else
|
317
|
+
raise PackFormatError, "invalid type #{type}"
|
318
|
+
end
|
319
|
+
[data, type]
|
320
|
+
end
|
321
|
+
private :unpack_object
|
322
|
+
|
323
|
+
def unpack_deltified(packfile, type, offset, obj_offset, size, options = {})
|
324
|
+
packfile.seek(offset)
|
325
|
+
data = packfile.read(SHA1Size)
|
326
|
+
|
327
|
+
if type == OBJ_OFS_DELTA
|
328
|
+
i = 0
|
329
|
+
c = data[i]
|
330
|
+
base_offset = c & 0x7f
|
331
|
+
while c & 0x80 != 0
|
332
|
+
c = data[i += 1]
|
333
|
+
base_offset += 1
|
334
|
+
base_offset <<= 7
|
335
|
+
base_offset |= c & 0x7f
|
336
|
+
end
|
337
|
+
base_offset = obj_offset - base_offset
|
338
|
+
offset += i + 1
|
339
|
+
else
|
340
|
+
base_offset = find_object(data)
|
341
|
+
offset += SHA1Size
|
342
|
+
end
|
343
|
+
|
344
|
+
base, type = unpack_object(packfile, base_offset)
|
345
|
+
|
346
|
+
return [false, false] if !(type == OBJ_COMMIT || type == OBJ_TREE) && options[:caching]
|
347
|
+
|
348
|
+
delta = unpack_compressed(offset, size)
|
349
|
+
[patch_delta(base, delta), type]
|
350
|
+
end
|
351
|
+
private :unpack_deltified
|
352
|
+
|
353
|
+
def unpack_compressed(offset, destsize)
|
354
|
+
outdata = ""
|
355
|
+
with_packfile do |packfile|
|
356
|
+
packfile.seek(offset)
|
357
|
+
zstr = Zlib::Inflate.new
|
358
|
+
while outdata.size < destsize
|
359
|
+
indata = packfile.read(4096)
|
360
|
+
if indata.size == 0
|
361
|
+
raise PackFormatError, 'error reading pack data'
|
362
|
+
end
|
363
|
+
outdata += zstr.inflate(indata)
|
364
|
+
end
|
365
|
+
if outdata.size > destsize
|
366
|
+
raise PackFormatError, 'error reading pack data'
|
367
|
+
end
|
368
|
+
zstr.close
|
369
|
+
end
|
370
|
+
outdata
|
371
|
+
end
|
372
|
+
private :unpack_compressed
|
373
|
+
|
374
|
+
def patch_delta(base, delta)
|
375
|
+
src_size, pos = patch_delta_header_size(delta, 0)
|
376
|
+
if src_size != base.size
|
377
|
+
raise PackFormatError, 'invalid delta data'
|
378
|
+
end
|
379
|
+
|
380
|
+
dest_size, pos = patch_delta_header_size(delta, pos)
|
381
|
+
dest = ""
|
382
|
+
while pos < delta.size
|
383
|
+
c = delta[pos]
|
384
|
+
pos += 1
|
385
|
+
if c & 0x80 != 0
|
386
|
+
pos -= 1
|
387
|
+
cp_off = cp_size = 0
|
388
|
+
cp_off = delta[pos += 1] if c & 0x01 != 0
|
389
|
+
cp_off |= delta[pos += 1] << 8 if c & 0x02 != 0
|
390
|
+
cp_off |= delta[pos += 1] << 16 if c & 0x04 != 0
|
391
|
+
cp_off |= delta[pos += 1] << 24 if c & 0x08 != 0
|
392
|
+
cp_size = delta[pos += 1] if c & 0x10 != 0
|
393
|
+
cp_size |= delta[pos += 1] << 8 if c & 0x20 != 0
|
394
|
+
cp_size |= delta[pos += 1] << 16 if c & 0x40 != 0
|
395
|
+
cp_size = 0x10000 if cp_size == 0
|
396
|
+
pos += 1
|
397
|
+
dest += base[cp_off,cp_size]
|
398
|
+
elsif c != 0
|
399
|
+
dest += delta[pos,c]
|
400
|
+
pos += c
|
401
|
+
else
|
402
|
+
raise PackFormatError, 'invalid delta data'
|
403
|
+
end
|
404
|
+
end
|
405
|
+
dest
|
406
|
+
end
|
407
|
+
private :patch_delta
|
408
|
+
|
409
|
+
def patch_delta_header_size(delta, pos)
|
410
|
+
size = 0
|
411
|
+
shift = 0
|
412
|
+
begin
|
413
|
+
c = delta[pos]
|
414
|
+
if c == nil
|
415
|
+
raise PackFormatError, 'invalid delta header'
|
416
|
+
end
|
417
|
+
pos += 1
|
418
|
+
size |= (c & 0x7f) << shift
|
419
|
+
shift += 7
|
420
|
+
end while c & 0x80 != 0
|
421
|
+
[size, pos]
|
422
|
+
end
|
423
|
+
private :patch_delta_header_size
|
424
|
+
end
|
425
|
+
end
|