git_store 0.3.1
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +5 -0
- data/LICENSE +18 -0
- data/README.md +147 -0
- data/Rakefile +35 -0
- data/git_store.gemspec +40 -0
- data/lib/git_store/blob.rb +32 -0
- data/lib/git_store/commit.rb +65 -0
- data/lib/git_store/diff.rb +76 -0
- data/lib/git_store/handlers.rb +36 -0
- data/lib/git_store/pack.rb +425 -0
- data/lib/git_store/tag.rb +40 -0
- data/lib/git_store/tree.rb +183 -0
- data/lib/git_store/user.rb +29 -0
- data/lib/git_store.rb +392 -0
- data/test/bare_store_spec.rb +33 -0
- data/test/benchmark.rb +30 -0
- data/test/commit_spec.rb +81 -0
- data/test/git_store_spec.rb +257 -0
- data/test/tree_spec.rb +92 -0
- metadata +79 -0
@@ -0,0 +1,425 @@
|
|
1
|
+
#
|
2
|
+
# converted from the gitrb project
|
3
|
+
#
|
4
|
+
# authors:
|
5
|
+
# Matthias Lederhofer <matled@gmx.net>
|
6
|
+
# Simon 'corecode' Schubert <corecode@fs.ei.tum.de>
|
7
|
+
# Scott Chacon <schacon@gmail.com>
|
8
|
+
#
|
9
|
+
# provides native ruby access to git objects and pack files
|
10
|
+
#
|
11
|
+
|
12
|
+
require 'zlib'
|
13
|
+
|
14
|
+
class GitStore
|
15
|
+
PACK_SIGNATURE = "PACK"
|
16
|
+
PACK_IDX_SIGNATURE = "\377tOc"
|
17
|
+
85
|
18
|
+
OBJ_NONE = 0
|
19
|
+
OBJ_COMMIT = 1
|
20
|
+
OBJ_TREE = 2
|
21
|
+
OBJ_BLOB = 3
|
22
|
+
OBJ_TAG = 4
|
23
|
+
|
24
|
+
OBJ_TYPES = [nil, 'commit', 'tree', 'blob', 'tag'].freeze
|
25
|
+
|
26
|
+
class Mmap
|
27
|
+
def initialize(file, version = 1)
|
28
|
+
@file = file
|
29
|
+
@offset = nil
|
30
|
+
if version == 2
|
31
|
+
@global_offset = 8
|
32
|
+
else
|
33
|
+
@global_offset = 0
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def unmap
|
38
|
+
@file = nil
|
39
|
+
end
|
40
|
+
|
41
|
+
def [](*idx)
|
42
|
+
idx = idx[0] if idx.length == 1
|
43
|
+
case idx
|
44
|
+
when Range
|
45
|
+
offset = idx.first
|
46
|
+
len = idx.last - idx.first + idx.exclude_end? ? 0 : 1
|
47
|
+
when Fixnum
|
48
|
+
offset = idx
|
49
|
+
len = nil
|
50
|
+
when Array
|
51
|
+
offset, len = idx
|
52
|
+
else
|
53
|
+
raise RuntimeError, "invalid index param: #{idx.class}"
|
54
|
+
end
|
55
|
+
if @offset != offset
|
56
|
+
@file.seek(offset + @global_offset)
|
57
|
+
end
|
58
|
+
@offset = offset + len ? len : 1
|
59
|
+
if not len
|
60
|
+
@file.read(1)[0]
|
61
|
+
else
|
62
|
+
@file.read(len)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
class PackFormatError < StandardError
|
68
|
+
end
|
69
|
+
|
70
|
+
class PackStorage
|
71
|
+
OBJ_OFS_DELTA = 6
|
72
|
+
OBJ_REF_DELTA = 7
|
73
|
+
|
74
|
+
FanOutCount = 256
|
75
|
+
SHA1Size = 20
|
76
|
+
IdxOffsetSize = 4
|
77
|
+
OffsetSize = 4
|
78
|
+
CrcSize = 4
|
79
|
+
OffsetStart = FanOutCount * IdxOffsetSize
|
80
|
+
SHA1Start = OffsetStart + OffsetSize
|
81
|
+
EntrySize = OffsetSize + SHA1Size
|
82
|
+
EntrySizeV2 = SHA1Size + CrcSize + OffsetSize
|
83
|
+
|
84
|
+
def initialize(file)
|
85
|
+
if file =~ /\.idx$/
|
86
|
+
file = file[0...-3] + 'pack'
|
87
|
+
end
|
88
|
+
@name = file
|
89
|
+
@cache = {}
|
90
|
+
init_pack
|
91
|
+
end
|
92
|
+
|
93
|
+
def with_idx(index_file = nil)
|
94
|
+
if !index_file
|
95
|
+
index_file = @name
|
96
|
+
idxfile = File.open(@name[0...-4]+'idx')
|
97
|
+
else
|
98
|
+
idxfile = File.open(index_file)
|
99
|
+
end
|
100
|
+
|
101
|
+
# read header
|
102
|
+
sig = idxfile.read(4)
|
103
|
+
ver = idxfile.read(4).unpack("N")[0]
|
104
|
+
|
105
|
+
if sig == PACK_IDX_SIGNATURE
|
106
|
+
if(ver != 2)
|
107
|
+
raise PackFormatError, "pack #@name has unknown pack file version #{ver}"
|
108
|
+
end
|
109
|
+
@version = 2
|
110
|
+
else
|
111
|
+
@version = 1
|
112
|
+
end
|
113
|
+
|
114
|
+
idx = Mmap.new(idxfile, @version)
|
115
|
+
yield idx
|
116
|
+
idx.unmap
|
117
|
+
idxfile.close
|
118
|
+
end
|
119
|
+
|
120
|
+
def with_packfile
|
121
|
+
packfile = File.open(@name)
|
122
|
+
result = yield packfile
|
123
|
+
packfile.close
|
124
|
+
|
125
|
+
result
|
126
|
+
end
|
127
|
+
|
128
|
+
def cache_objects
|
129
|
+
@cache = {}
|
130
|
+
with_packfile do |packfile|
|
131
|
+
each_entry do |sha, offset|
|
132
|
+
data, type = unpack_object(packfile, offset, {:caching => true})
|
133
|
+
if data
|
134
|
+
@cache[sha] = [type, data]
|
135
|
+
end
|
136
|
+
end
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def name
|
141
|
+
@name
|
142
|
+
end
|
143
|
+
|
144
|
+
def close
|
145
|
+
# shouldnt be anything open now
|
146
|
+
end
|
147
|
+
|
148
|
+
# given an index file, list out the shas that it's packfile contains
|
149
|
+
def get_shas
|
150
|
+
shas = []
|
151
|
+
each_sha1 { |sha| shas << sha.unpack("H*")[0] }
|
152
|
+
shas
|
153
|
+
end
|
154
|
+
|
155
|
+
def [](sha1)
|
156
|
+
if obj = @cache[sha1]
|
157
|
+
return obj
|
158
|
+
end
|
159
|
+
|
160
|
+
offset = find_object(sha1)
|
161
|
+
return nil if !offset
|
162
|
+
@cache[sha1] = obj = parse_object(offset)
|
163
|
+
return obj
|
164
|
+
end
|
165
|
+
|
166
|
+
def init_pack
|
167
|
+
with_idx do |idx|
|
168
|
+
@offsets = [0]
|
169
|
+
FanOutCount.times do |i|
|
170
|
+
pos = idx[i * IdxOffsetSize,IdxOffsetSize].unpack('N')[0]
|
171
|
+
if pos < @offsets[i]
|
172
|
+
raise PackFormatError, "pack #@name has discontinuous index #{i}"
|
173
|
+
end
|
174
|
+
@offsets << pos
|
175
|
+
end
|
176
|
+
@size = @offsets[-1]
|
177
|
+
end
|
178
|
+
end
|
179
|
+
|
180
|
+
def each_entry
|
181
|
+
with_idx do |idx|
|
182
|
+
if @version == 2
|
183
|
+
data = read_data_v2(idx)
|
184
|
+
data.each do |sha1, crc, offset|
|
185
|
+
yield sha1, offset
|
186
|
+
end
|
187
|
+
else
|
188
|
+
pos = OffsetStart
|
189
|
+
@size.times do
|
190
|
+
offset = idx[pos,OffsetSize].unpack('N')[0]
|
191
|
+
sha1 = idx[pos+OffsetSize,SHA1Size]
|
192
|
+
pos += EntrySize
|
193
|
+
yield sha1, offset
|
194
|
+
end
|
195
|
+
end
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
def read_data_v2(idx)
|
200
|
+
data = []
|
201
|
+
pos = OffsetStart
|
202
|
+
@size.times do |i|
|
203
|
+
data[i] = [idx[pos,SHA1Size], 0, 0]
|
204
|
+
pos += SHA1Size
|
205
|
+
end
|
206
|
+
@size.times do |i|
|
207
|
+
crc = idx[pos,CrcSize]
|
208
|
+
data[i][1] = crc
|
209
|
+
pos += CrcSize
|
210
|
+
end
|
211
|
+
@size.times do |i|
|
212
|
+
offset = idx[pos,OffsetSize].unpack('N')[0]
|
213
|
+
data[i][2] = offset
|
214
|
+
pos += OffsetSize
|
215
|
+
end
|
216
|
+
data
|
217
|
+
end
|
218
|
+
private :read_data_v2
|
219
|
+
|
220
|
+
def each_sha1
|
221
|
+
with_idx do |idx|
|
222
|
+
if @version == 2
|
223
|
+
data = read_data_v2(idx)
|
224
|
+
data.each do |sha1, crc, offset|
|
225
|
+
yield sha1
|
226
|
+
end
|
227
|
+
else
|
228
|
+
pos = SHA1Start
|
229
|
+
@size.times do
|
230
|
+
sha1 = idx[pos,SHA1Size]
|
231
|
+
pos += EntrySize
|
232
|
+
yield sha1
|
233
|
+
end
|
234
|
+
end
|
235
|
+
end
|
236
|
+
end
|
237
|
+
|
238
|
+
def find_object_in_index(idx, sha1)
|
239
|
+
slot = sha1[0]
|
240
|
+
return nil if !slot
|
241
|
+
first, last = @offsets[slot,2]
|
242
|
+
while first < last
|
243
|
+
mid = (first + last) / 2
|
244
|
+
if @version == 2
|
245
|
+
midsha1 = idx[OffsetStart + (mid * SHA1Size), SHA1Size]
|
246
|
+
cmp = midsha1 <=> sha1
|
247
|
+
|
248
|
+
if cmp < 0
|
249
|
+
first = mid + 1
|
250
|
+
elsif cmp > 0
|
251
|
+
last = mid
|
252
|
+
else
|
253
|
+
pos = OffsetStart + (@size * (SHA1Size + CrcSize)) + (mid * OffsetSize)
|
254
|
+
offset = idx[pos, OffsetSize].unpack('N')[0]
|
255
|
+
return offset
|
256
|
+
end
|
257
|
+
else
|
258
|
+
midsha1 = idx[SHA1Start + mid * EntrySize,SHA1Size]
|
259
|
+
cmp = midsha1 <=> sha1
|
260
|
+
|
261
|
+
if cmp < 0
|
262
|
+
first = mid + 1
|
263
|
+
elsif cmp > 0
|
264
|
+
last = mid
|
265
|
+
else
|
266
|
+
pos = OffsetStart + mid * EntrySize
|
267
|
+
offset = idx[pos,OffsetSize].unpack('N')[0]
|
268
|
+
return offset
|
269
|
+
end
|
270
|
+
end
|
271
|
+
end
|
272
|
+
nil
|
273
|
+
end
|
274
|
+
|
275
|
+
def find_object(sha1)
|
276
|
+
obj = nil
|
277
|
+
with_idx do |idx|
|
278
|
+
obj = find_object_in_index(idx, sha1)
|
279
|
+
end
|
280
|
+
obj
|
281
|
+
end
|
282
|
+
private :find_object
|
283
|
+
|
284
|
+
def parse_object(offset)
|
285
|
+
data, type = with_packfile do |packfile|
|
286
|
+
unpack_object(packfile, offset)
|
287
|
+
end
|
288
|
+
|
289
|
+
return data, OBJ_TYPES[type]
|
290
|
+
end
|
291
|
+
|
292
|
+
def unpack_object(packfile, offset, options = {})
|
293
|
+
obj_offset = offset
|
294
|
+
packfile.seek(offset)
|
295
|
+
|
296
|
+
c = packfile.read(1)[0]
|
297
|
+
size = c & 0xf
|
298
|
+
type = (c >> 4) & 7
|
299
|
+
shift = 4
|
300
|
+
offset += 1
|
301
|
+
while c & 0x80 != 0
|
302
|
+
c = packfile.read(1)[0]
|
303
|
+
size |= ((c & 0x7f) << shift)
|
304
|
+
shift += 7
|
305
|
+
offset += 1
|
306
|
+
end
|
307
|
+
|
308
|
+
return [false, false] if !(type == OBJ_COMMIT || type == OBJ_TREE) && options[:caching]
|
309
|
+
|
310
|
+
case type
|
311
|
+
when OBJ_OFS_DELTA, OBJ_REF_DELTA
|
312
|
+
data, type = unpack_deltified(packfile, type, offset, obj_offset, size, options)
|
313
|
+
#puts type
|
314
|
+
when OBJ_COMMIT, OBJ_TREE, OBJ_BLOB, OBJ_TAG
|
315
|
+
data = unpack_compressed(offset, size)
|
316
|
+
else
|
317
|
+
raise PackFormatError, "invalid type #{type}"
|
318
|
+
end
|
319
|
+
[data, type]
|
320
|
+
end
|
321
|
+
private :unpack_object
|
322
|
+
|
323
|
+
def unpack_deltified(packfile, type, offset, obj_offset, size, options = {})
|
324
|
+
packfile.seek(offset)
|
325
|
+
data = packfile.read(SHA1Size)
|
326
|
+
|
327
|
+
if type == OBJ_OFS_DELTA
|
328
|
+
i = 0
|
329
|
+
c = data[i]
|
330
|
+
base_offset = c & 0x7f
|
331
|
+
while c & 0x80 != 0
|
332
|
+
c = data[i += 1]
|
333
|
+
base_offset += 1
|
334
|
+
base_offset <<= 7
|
335
|
+
base_offset |= c & 0x7f
|
336
|
+
end
|
337
|
+
base_offset = obj_offset - base_offset
|
338
|
+
offset += i + 1
|
339
|
+
else
|
340
|
+
base_offset = find_object(data)
|
341
|
+
offset += SHA1Size
|
342
|
+
end
|
343
|
+
|
344
|
+
base, type = unpack_object(packfile, base_offset)
|
345
|
+
|
346
|
+
return [false, false] if !(type == OBJ_COMMIT || type == OBJ_TREE) && options[:caching]
|
347
|
+
|
348
|
+
delta = unpack_compressed(offset, size)
|
349
|
+
[patch_delta(base, delta), type]
|
350
|
+
end
|
351
|
+
private :unpack_deltified
|
352
|
+
|
353
|
+
def unpack_compressed(offset, destsize)
|
354
|
+
outdata = ""
|
355
|
+
with_packfile do |packfile|
|
356
|
+
packfile.seek(offset)
|
357
|
+
zstr = Zlib::Inflate.new
|
358
|
+
while outdata.size < destsize
|
359
|
+
indata = packfile.read(4096)
|
360
|
+
if indata.size == 0
|
361
|
+
raise PackFormatError, 'error reading pack data'
|
362
|
+
end
|
363
|
+
outdata += zstr.inflate(indata)
|
364
|
+
end
|
365
|
+
if outdata.size > destsize
|
366
|
+
raise PackFormatError, 'error reading pack data'
|
367
|
+
end
|
368
|
+
zstr.close
|
369
|
+
end
|
370
|
+
outdata
|
371
|
+
end
|
372
|
+
private :unpack_compressed
|
373
|
+
|
374
|
+
def patch_delta(base, delta)
|
375
|
+
src_size, pos = patch_delta_header_size(delta, 0)
|
376
|
+
if src_size != base.size
|
377
|
+
raise PackFormatError, 'invalid delta data'
|
378
|
+
end
|
379
|
+
|
380
|
+
dest_size, pos = patch_delta_header_size(delta, pos)
|
381
|
+
dest = ""
|
382
|
+
while pos < delta.size
|
383
|
+
c = delta[pos]
|
384
|
+
pos += 1
|
385
|
+
if c & 0x80 != 0
|
386
|
+
pos -= 1
|
387
|
+
cp_off = cp_size = 0
|
388
|
+
cp_off = delta[pos += 1] if c & 0x01 != 0
|
389
|
+
cp_off |= delta[pos += 1] << 8 if c & 0x02 != 0
|
390
|
+
cp_off |= delta[pos += 1] << 16 if c & 0x04 != 0
|
391
|
+
cp_off |= delta[pos += 1] << 24 if c & 0x08 != 0
|
392
|
+
cp_size = delta[pos += 1] if c & 0x10 != 0
|
393
|
+
cp_size |= delta[pos += 1] << 8 if c & 0x20 != 0
|
394
|
+
cp_size |= delta[pos += 1] << 16 if c & 0x40 != 0
|
395
|
+
cp_size = 0x10000 if cp_size == 0
|
396
|
+
pos += 1
|
397
|
+
dest += base[cp_off,cp_size]
|
398
|
+
elsif c != 0
|
399
|
+
dest += delta[pos,c]
|
400
|
+
pos += c
|
401
|
+
else
|
402
|
+
raise PackFormatError, 'invalid delta data'
|
403
|
+
end
|
404
|
+
end
|
405
|
+
dest
|
406
|
+
end
|
407
|
+
private :patch_delta
|
408
|
+
|
409
|
+
def patch_delta_header_size(delta, pos)
|
410
|
+
size = 0
|
411
|
+
shift = 0
|
412
|
+
begin
|
413
|
+
c = delta[pos]
|
414
|
+
if c == nil
|
415
|
+
raise PackFormatError, 'invalid delta header'
|
416
|
+
end
|
417
|
+
pos += 1
|
418
|
+
size |= (c & 0x7f) << shift
|
419
|
+
shift += 7
|
420
|
+
end while c & 0x80 != 0
|
421
|
+
[size, pos]
|
422
|
+
end
|
423
|
+
private :patch_delta_header_size
|
424
|
+
end
|
425
|
+
end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
class GitStore
|
2
|
+
|
3
|
+
class Tag
|
4
|
+
attr_accessor :store, :id, :object, :type, :tagger, :message
|
5
|
+
|
6
|
+
def initialize(store, id = nil, data = nil)
|
7
|
+
@store = store
|
8
|
+
@id = id
|
9
|
+
|
10
|
+
parse(data) if data
|
11
|
+
end
|
12
|
+
|
13
|
+
def ==(other)
|
14
|
+
Tag === other and id == other.id
|
15
|
+
end
|
16
|
+
|
17
|
+
def parse(data)
|
18
|
+
headers, @message = data.split(/\n\n/, 2)
|
19
|
+
|
20
|
+
headers.split(/\n/).each do |header|
|
21
|
+
key, value = header.split(/ /, 2)
|
22
|
+
case key
|
23
|
+
when 'type'
|
24
|
+
@type = value
|
25
|
+
|
26
|
+
when 'object'
|
27
|
+
@object = store.get(value)
|
28
|
+
|
29
|
+
when 'tagger'
|
30
|
+
@tagger = User.parse(value)
|
31
|
+
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
self
|
36
|
+
end
|
37
|
+
|
38
|
+
end
|
39
|
+
|
40
|
+
end
|
@@ -0,0 +1,183 @@
|
|
1
|
+
class GitStore
|
2
|
+
|
3
|
+
class Tree
|
4
|
+
include Enumerable
|
5
|
+
|
6
|
+
attr_reader :store, :table
|
7
|
+
attr_accessor :id, :data, :mode
|
8
|
+
|
9
|
+
# Initialize a tree
|
10
|
+
def initialize(store, id = nil, data = nil)
|
11
|
+
@store = store
|
12
|
+
@id = id
|
13
|
+
@table = {}
|
14
|
+
@mode = "040000"
|
15
|
+
parse(data) if data
|
16
|
+
end
|
17
|
+
|
18
|
+
def ==(other)
|
19
|
+
Tree === other and id == other.id
|
20
|
+
end
|
21
|
+
|
22
|
+
# Has this tree been modified?
|
23
|
+
def modified?
|
24
|
+
@modified or @table.values.any? { |entry| Tree === entry and entry.modified? }
|
25
|
+
end
|
26
|
+
|
27
|
+
# Find or create a subtree with specified name.
|
28
|
+
def tree(name)
|
29
|
+
get(name) or put(name, Tree.new(store))
|
30
|
+
end
|
31
|
+
|
32
|
+
# Read the contents of a raw git object.
|
33
|
+
def parse(data)
|
34
|
+
@table.clear
|
35
|
+
|
36
|
+
while data.size > 0
|
37
|
+
mode, data = data.split(" ", 2)
|
38
|
+
name, data = data.split("\0", 2)
|
39
|
+
id = data.slice!(0, 20).unpack("H*").first
|
40
|
+
|
41
|
+
@table[name] = store.get(id)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
def dump
|
46
|
+
@table.map { |k, v| "#{ v.mode } #{ k }\0#{ [v.write].pack("H*") }" }.join
|
47
|
+
end
|
48
|
+
|
49
|
+
# Write this treetree back to the git repository.
|
50
|
+
#
|
51
|
+
# Returns the object id of the tree.
|
52
|
+
def write
|
53
|
+
return id if not modified?
|
54
|
+
@modified = false
|
55
|
+
@id = store.put(self)
|
56
|
+
end
|
57
|
+
|
58
|
+
# Read entry with specified name.
|
59
|
+
def get(name)
|
60
|
+
entry = @table[name]
|
61
|
+
|
62
|
+
case entry
|
63
|
+
when Blob
|
64
|
+
entry.object ||= handler_for(name).read(entry.data)
|
65
|
+
|
66
|
+
when Tree
|
67
|
+
entry
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def handler_for(name)
|
72
|
+
store.handler_for(name)
|
73
|
+
end
|
74
|
+
|
75
|
+
# Write entry with specified name.
|
76
|
+
def put(name, value)
|
77
|
+
@modified = true
|
78
|
+
|
79
|
+
if value.is_a?(Tree)
|
80
|
+
@table[name] = value
|
81
|
+
else
|
82
|
+
@table[name] = Blob.new(store, nil, handler_for(name).write(value))
|
83
|
+
end
|
84
|
+
|
85
|
+
value
|
86
|
+
end
|
87
|
+
|
88
|
+
# Remove entry with specified name.
|
89
|
+
def remove(name)
|
90
|
+
@modified = true
|
91
|
+
@table.delete(name.to_s)
|
92
|
+
end
|
93
|
+
|
94
|
+
# Does this key exist in the table?
|
95
|
+
def has_key?(name)
|
96
|
+
@table.has_key?(name.to_s)
|
97
|
+
end
|
98
|
+
|
99
|
+
def normalize_path(path)
|
100
|
+
(path[0, 1] == '/' ? path[1..-1] : path).split('/')
|
101
|
+
end
|
102
|
+
|
103
|
+
# Read a value on specified path.
|
104
|
+
def [](path)
|
105
|
+
normalize_path(path).inject(self) do |tree, key|
|
106
|
+
tree.get(key) or return nil
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
# Write a value on specified path.
|
111
|
+
def []=(path, value)
|
112
|
+
list = normalize_path(path)
|
113
|
+
tree = list[0..-2].to_a.inject(self) { |tree, name| tree.tree(name) }
|
114
|
+
tree.put(list.last, value)
|
115
|
+
end
|
116
|
+
|
117
|
+
# Delete a value on specified path.
|
118
|
+
def delete(path)
|
119
|
+
list = normalize_path(path)
|
120
|
+
|
121
|
+
tree = list[0..-2].to_a.inject(self) do |tree, key|
|
122
|
+
tree.get(key) or return
|
123
|
+
end
|
124
|
+
|
125
|
+
tree.remove(list.last)
|
126
|
+
end
|
127
|
+
|
128
|
+
# Iterate over all objects found in this subtree.
|
129
|
+
def each(path = [], &block)
|
130
|
+
@table.sort.each do |name, entry|
|
131
|
+
child_path = path + [name]
|
132
|
+
case entry
|
133
|
+
when Blob
|
134
|
+
entry.object ||= handler_for(name).read(entry.data)
|
135
|
+
yield child_path.join("/"), entry.object
|
136
|
+
|
137
|
+
when Tree
|
138
|
+
entry.each(child_path, &block)
|
139
|
+
end
|
140
|
+
end
|
141
|
+
end
|
142
|
+
|
143
|
+
def each_blob(path = [], &block)
|
144
|
+
@table.sort.each do |name, entry|
|
145
|
+
child_path = path + [name]
|
146
|
+
|
147
|
+
case entry
|
148
|
+
when Blob
|
149
|
+
yield child_path.join("/"), entry
|
150
|
+
|
151
|
+
when Tree
|
152
|
+
entry.each_blob(child_path, &block)
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
def paths
|
158
|
+
map { |path, data| path }
|
159
|
+
end
|
160
|
+
|
161
|
+
def values
|
162
|
+
map { |path, data| data }
|
163
|
+
end
|
164
|
+
|
165
|
+
# Convert this tree into a hash object.
|
166
|
+
def to_hash
|
167
|
+
@table.inject({}) do |hash, (name, entry)|
|
168
|
+
if entry.is_a?(Tree)
|
169
|
+
hash[name] = entry.to_hash
|
170
|
+
else
|
171
|
+
hash[name] = entry.object ||= handler_for(name).read(entry.data)
|
172
|
+
end
|
173
|
+
hash
|
174
|
+
end
|
175
|
+
end
|
176
|
+
|
177
|
+
def inspect
|
178
|
+
"#<GitStore::Tree #{id} #{mode} #{to_hash.inspect}>"
|
179
|
+
end
|
180
|
+
|
181
|
+
end
|
182
|
+
|
183
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
class GitStore
|
2
|
+
|
3
|
+
class User
|
4
|
+
attr_accessor :name, :email, :time
|
5
|
+
|
6
|
+
def initialize(name, email, time)
|
7
|
+
@name, @email, @time = name, email, time
|
8
|
+
end
|
9
|
+
|
10
|
+
def dump
|
11
|
+
"#{ name } <#{email}> #{ time.to_i } #{ time.strftime('%z') }"
|
12
|
+
end
|
13
|
+
|
14
|
+
def self.from_config
|
15
|
+
name = IO.popen("git config user.name") { |io| io.gets.chomp }
|
16
|
+
email = IO.popen("git config user.email") { |io| io.gets.chomp }
|
17
|
+
|
18
|
+
new name, email, Time.now
|
19
|
+
end
|
20
|
+
|
21
|
+
def self.parse(user)
|
22
|
+
if match = user.match(/(.*)<(.*)> (\d+) ([+-]\d+)/)
|
23
|
+
new match[1].strip, match[2].strip, Time.at(match[3].to_i + match[4].to_i * 3600)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
end
|
28
|
+
|
29
|
+
end
|