gitroom 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/bin/gitfs +38 -0
- data/bin/gitserve +5 -0
- data/lib/gitroom.rb +5 -0
- data/lib/gitroom/admin.rb +33 -0
- data/lib/gitroom/concerns/node_cached.rb +108 -0
- data/lib/gitroom/connect.rb +23 -0
- data/lib/gitroom/models/binary_file.rb +6 -0
- data/lib/gitroom/models/chunk.rb +6 -0
- data/lib/gitroom/models/log.rb +4 -0
- data/lib/gitroom/models/node.rb +194 -0
- data/lib/gitroom/mounter.rb +232 -0
- data/lib/gitroom/server.rb +32 -0
- metadata +64 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA512:
|
3
|
+
metadata.gz: 9b70a187ec96be19efadab5ec788a0b12695cf2e3b909fa00cb652a8967919f0033839a95391bb578a26171b73cceca5152867ac246b99edad2fa82bdb77109a
|
4
|
+
data.tar.gz: f4c16482a278339f43310ceb14d6b279d8aa75918985bc51e09d6257c15d7f0f4353a09459d766d0a2d1e3ebfd06eb823db46b018d1c9b204ccbff992d71376a
|
5
|
+
SHA1:
|
6
|
+
metadata.gz: 8a10e96a237e14edb7fec3a971c7fbbadb7cb112
|
7
|
+
data.tar.gz: aaaf090b281e4875d6beec32b4c7ea0d6e9d79fc
|
data/bin/gitfs
ADDED
@@ -0,0 +1,38 @@
|
|
1
|
+
#!/usr/bin/env ruby -rubygems
|
2
|
+
|
3
|
+
require 'gitroom/connect'
|
4
|
+
connect
|
5
|
+
|
6
|
+
require 'gitroom/mounter'
|
7
|
+
require 'rfuse'
|
8
|
+
|
9
|
+
if ARGV.length == 0
|
10
|
+
print "\n"
|
11
|
+
print "Usage: [ruby [--debug]] #{$0} mountpoint [mount_options...]\n"
|
12
|
+
print "\n"
|
13
|
+
print " mountpoint must be an existing directory\n"
|
14
|
+
print " mount_option '-h' will list supported options\n"
|
15
|
+
print "\n"
|
16
|
+
print " For verbose debugging output use --debug to ruby\n"
|
17
|
+
print " and '-odebug' as mount_option\n"
|
18
|
+
print "\n"
|
19
|
+
exit(1)
|
20
|
+
end
|
21
|
+
|
22
|
+
building = GitRoom::Mounter.new
|
23
|
+
|
24
|
+
fo = RFuse::FuseDelegator.new building, *ARGV
|
25
|
+
|
26
|
+
if fo.mounted?
|
27
|
+
Signal.trap("TERM") { print "Caught TERM\n" ; fo.exit }
|
28
|
+
Signal.trap("INT") { print "Caught INT\n"; fo.exit }
|
29
|
+
|
30
|
+
begin
|
31
|
+
fo.loop
|
32
|
+
rescue
|
33
|
+
print "Error: #{$!}\n"
|
34
|
+
ensure
|
35
|
+
fo.unmount if fo.mounted?
|
36
|
+
print "Unmounted #{ARGV[0]}\n"
|
37
|
+
end
|
38
|
+
end
|
data/bin/gitserve
ADDED
data/lib/gitroom.rb
ADDED
@@ -0,0 +1,33 @@
|
|
1
|
+
require 'gitroom/connect'
|
2
|
+
connect
|
3
|
+
|
4
|
+
require 'gitroom/models/node'
|
5
|
+
require 'gitroom/models/log'
|
6
|
+
|
7
|
+
module GitRoom
|
8
|
+
class Admin
|
9
|
+
attr_accessor :list
|
10
|
+
|
11
|
+
def find(regex_string = '')
|
12
|
+
@list = Node.where 'path ~ ?', regex_string
|
13
|
+
end
|
14
|
+
|
15
|
+
def pluck
|
16
|
+
@list.pluck :path
|
17
|
+
end
|
18
|
+
|
19
|
+
def delete
|
20
|
+
@list.each { |x| x.destroy }
|
21
|
+
end
|
22
|
+
|
23
|
+
def logs
|
24
|
+
Log.pluck :data
|
25
|
+
end
|
26
|
+
|
27
|
+
def logs!
|
28
|
+
res = logs
|
29
|
+
Log.destroy_all
|
30
|
+
res
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,108 @@
|
|
1
|
+
require 'gitroom/models/node'
|
2
|
+
require 'yaml'
|
3
|
+
|
4
|
+
class Integer
|
5
|
+
def minutes
|
6
|
+
self * 60
|
7
|
+
end
|
8
|
+
end
|
9
|
+
|
10
|
+
module NodeCached
|
11
|
+
CACHE_FILES = 16384
|
12
|
+
MAX_FILE_SIZE = 65536
|
13
|
+
EXPIRE_TIME = 1.minutes
|
14
|
+
READ_SCORE = 2
|
15
|
+
WRITE_SCORE = -7
|
16
|
+
|
17
|
+
# redis.expire key, seconds
|
18
|
+
|
19
|
+
def find_cached_node(path)
|
20
|
+
present = cached_node_present? path
|
21
|
+
if present.nil? || present == 'true'
|
22
|
+
node = Node.where(:path => path).first
|
23
|
+
if node && present.nil?
|
24
|
+
cached_node_present! path
|
25
|
+
elsif !node
|
26
|
+
cached_node_absent! path
|
27
|
+
end
|
28
|
+
node
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def read_cached_node(node, size, offset)
|
33
|
+
path = node.path
|
34
|
+
cached_file_rank_change! path, READ_SCORE
|
35
|
+
rank = cached_file_rank path
|
36
|
+
content = cached_file_content path
|
37
|
+
if content
|
38
|
+
print "CACHE HIT :: #{path}\n"
|
39
|
+
res = content[offset...offset + size]
|
40
|
+
if rank < CACHE_FILES
|
41
|
+
cached_file_content_touch! path, EXPIRE_TIME
|
42
|
+
end
|
43
|
+
else
|
44
|
+
print "DATABASE HIT :: #{path}\n"
|
45
|
+
content = node.get_full_content
|
46
|
+
full_size = content.size
|
47
|
+
if rank < CACHE_FILES && full_size < MAX_FILE_SIZE
|
48
|
+
res = content[offset...offset + size]
|
49
|
+
cache_file! path, content
|
50
|
+
cached_file_content_touch! path, EXPIRE_TIME
|
51
|
+
else
|
52
|
+
res = node.get_content offset, size
|
53
|
+
end
|
54
|
+
end
|
55
|
+
res
|
56
|
+
end
|
57
|
+
|
58
|
+
def write_cached_node(node, buf, offset)
|
59
|
+
path = node.path
|
60
|
+
cached_file_rank_change! path, WRITE_SCORE
|
61
|
+
cached_file_clean! path
|
62
|
+
node.set_content offset, buf
|
63
|
+
# read_cached_node node, buf.size, offset
|
64
|
+
end
|
65
|
+
|
66
|
+
def cached_node_present!(path)
|
67
|
+
redis.set "cache::node::#{path}::present?", 'true'
|
68
|
+
end
|
69
|
+
|
70
|
+
def cached_node_absent!(path)
|
71
|
+
redis.set "cache::node::#{path}::present?", 'false'
|
72
|
+
redis.del "cache::file::#{path}::content"
|
73
|
+
redis.zrem "cache::files::ranks", path
|
74
|
+
end
|
75
|
+
|
76
|
+
def cached_node_present?(path)
|
77
|
+
redis.get "cache::node::#{path}::present?"
|
78
|
+
end
|
79
|
+
|
80
|
+
def cached_file_rank_change!(path, incr)
|
81
|
+
redis.zincrby "cache::files::ranks", incr, path
|
82
|
+
end
|
83
|
+
|
84
|
+
def cached_file_rank(path)
|
85
|
+
redis.zrevrank "cache::files::ranks", path
|
86
|
+
end
|
87
|
+
|
88
|
+
def cached_file_content(path)
|
89
|
+
content = redis.get "cache::file::#{path}::content"
|
90
|
+
content && YAML.load(content)
|
91
|
+
end
|
92
|
+
|
93
|
+
def cache_file!(path, content)
|
94
|
+
redis.set "cache::file::#{path}::content", content.to_yaml
|
95
|
+
end
|
96
|
+
|
97
|
+
def cached_file_content_expires(path)
|
98
|
+
redis.ttl "cache::file::#{path}::content"
|
99
|
+
end
|
100
|
+
|
101
|
+
def cached_file_content_touch!(path, time)
|
102
|
+
redis.expire "cache::file::#{path}::content", time
|
103
|
+
end
|
104
|
+
|
105
|
+
def cached_file_clean!(path)
|
106
|
+
redis.del "cache::file::#{path}::content"
|
107
|
+
end
|
108
|
+
end
|
@@ -0,0 +1,23 @@
|
|
1
|
+
class Object
|
2
|
+
attr_accessor :redis_connection
|
3
|
+
|
4
|
+
def redis
|
5
|
+
Object.redis_connection
|
6
|
+
end
|
7
|
+
end
|
8
|
+
|
9
|
+
def connect
|
10
|
+
require 'yaml'
|
11
|
+
require 'active_record'
|
12
|
+
require 'redis'
|
13
|
+
|
14
|
+
env = ENV['ENVIRONMENT'] || 'development'
|
15
|
+
|
16
|
+
ar_connection = YAML.load_file('config/database.yml')[env]
|
17
|
+
|
18
|
+
ActiveRecord::Base.establish_connection ar_connection
|
19
|
+
|
20
|
+
red_connection = YAML.load_file('config/redis.yml')[env]
|
21
|
+
|
22
|
+
Object.redis_connection = Redis.new red_connection
|
23
|
+
end
|
@@ -0,0 +1,194 @@
|
|
1
|
+
require 'active_record'
|
2
|
+
require 'json'
|
3
|
+
require 'gitroom/models/chunk'
|
4
|
+
|
5
|
+
class Node < ActiveRecord::Base
|
6
|
+
CHUNK_SIZE = 16384
|
7
|
+
|
8
|
+
has_many :nodes, :dependent => :destroy
|
9
|
+
belongs_to :node
|
10
|
+
|
11
|
+
has_many :chunks, :dependent => :destroy
|
12
|
+
|
13
|
+
def stat
|
14
|
+
if directory
|
15
|
+
RFuse::Stat.directory mode,:uid => uid, :gid => gid, :atime => updated_at,
|
16
|
+
:mtime => updated_at, :size => 48
|
17
|
+
else
|
18
|
+
RFuse::Stat.file mode,:uid => uid, :gid => gid, :atime => updated_at,
|
19
|
+
:mtime => updated_at, :size => content_size
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def getxattr(name)
|
24
|
+
JSON.parse(xattrs)[name]
|
25
|
+
end
|
26
|
+
|
27
|
+
def setxattr(name, value, flag)
|
28
|
+
json = JSON.parse(xattrs)
|
29
|
+
json[name] = value
|
30
|
+
xattrs = json.to_s
|
31
|
+
end
|
32
|
+
|
33
|
+
def removexattr(name)
|
34
|
+
json = JSON.parse(xattrs)
|
35
|
+
json.delete name
|
36
|
+
xattrs = json.to_s
|
37
|
+
end
|
38
|
+
|
39
|
+
def listxattr
|
40
|
+
JSON.parse(xattrs).keys
|
41
|
+
end
|
42
|
+
|
43
|
+
def get_chunk(number, offset = 0, right = CHUNK_SIZE - 1)
|
44
|
+
chunk = chunks.where(:chunk_number => number).first
|
45
|
+
chunk && chunk.content[offset..right] || ''
|
46
|
+
end
|
47
|
+
|
48
|
+
def get_chunk_cached(chunk, offset = 0, right = CHUNK_SIZE - 1)
|
49
|
+
chunk && chunk.content[offset..right] || ''
|
50
|
+
end
|
51
|
+
|
52
|
+
def set_chunk(number, buf, offset = 0)
|
53
|
+
chunk = chunks.where(:chunk_number => number).first
|
54
|
+
unless chunk
|
55
|
+
chunk = chunks.create! :chunk_number => number, :node_id => id, :content => ""
|
56
|
+
end
|
57
|
+
if chunk.content.size < offset+buf.size
|
58
|
+
chunk.content += "\0" * (offset + buf.size - chunk.content.size)
|
59
|
+
end
|
60
|
+
chunk.content[offset...offset+buf.size] = buf
|
61
|
+
rechunk = Chunk.find chunk.id
|
62
|
+
rechunk.content = chunk.content.to_s
|
63
|
+
rechunk.save!
|
64
|
+
end
|
65
|
+
|
66
|
+
def get_content(offset, size)
|
67
|
+
num_start = offset / CHUNK_SIZE
|
68
|
+
offset_start = offset % CHUNK_SIZE
|
69
|
+
num_end = (offset + size - 1) / CHUNK_SIZE
|
70
|
+
offset_end = (offset + size - 1) % CHUNK_SIZE
|
71
|
+
content = ''
|
72
|
+
cached_chunks = chunks.where('chunk_number >= ? and chunk_number <= ?', num_start, num_end).order('chunk_number asc')
|
73
|
+
# (num_start..num_end).to_a.each do |num|
|
74
|
+
cached_chunks.each do |chunk|
|
75
|
+
num = chunk.chunk_number
|
76
|
+
offset = num == num_start ? offset_start : 0
|
77
|
+
right = num == num_end ? offset_end : (CHUNK_SIZE - 1)
|
78
|
+
content += get_chunk_cached chunk, offset, right
|
79
|
+
end
|
80
|
+
content
|
81
|
+
end
|
82
|
+
|
83
|
+
def get_full_content
|
84
|
+
chunks.order('chunk_number asc').pluck(:content).join
|
85
|
+
end
|
86
|
+
|
87
|
+
def set_content(offset, buf)
|
88
|
+
size = buf.size
|
89
|
+
num_start = offset / CHUNK_SIZE
|
90
|
+
offset_start = offset % CHUNK_SIZE
|
91
|
+
num_end = (offset + size - 1) / CHUNK_SIZE
|
92
|
+
offset_end = (offset + size - 1) % CHUNK_SIZE
|
93
|
+
(num_start..num_end).to_a.each do |num|
|
94
|
+
offset = num == num_start ? offset_start : 0
|
95
|
+
data_offset = num == num_start ? 0 : ((num - num_start) * CHUNK_SIZE - offset_start)
|
96
|
+
data_right = num == num_end ? -1 : (data_offset + CHUNK_SIZE - 1)
|
97
|
+
set_chunk num, buf[data_offset..data_right], offset
|
98
|
+
end
|
99
|
+
buf.size
|
100
|
+
end
|
101
|
+
|
102
|
+
# def get_content(offset, size)
|
103
|
+
# print 'A'
|
104
|
+
# num_start = offset / CHUNK_SIZE
|
105
|
+
# offset_start = offset % CHUNK_SIZE
|
106
|
+
# num_end = (offset + size - 1) / CHUNK_SIZE
|
107
|
+
# offset_end = (offset + size - 1) % CHUNK_SIZE
|
108
|
+
# print 'B'
|
109
|
+
# selected_chunks = chunks.where('chunk_number >= ? and chunk_number <= ?', num_start, num_end)
|
110
|
+
# selected_chunks = selected_chunks.pluck(:content)
|
111
|
+
# print 'C'
|
112
|
+
# if num_start == num_end || selected_chunks.size == 1
|
113
|
+
# print 'D0'
|
114
|
+
# selected_chunks[0] = selected_chunks[offset_start..offset_end]
|
115
|
+
# else
|
116
|
+
# print 'D1'
|
117
|
+
# selected_chunks[0] = selected_chunks[offset_start..-1]
|
118
|
+
# selected_chunks[-1] = selected_chunks[0..offset_end] if selected_chunks.count == num_end - num_start + 1
|
119
|
+
# end
|
120
|
+
# print 'E'
|
121
|
+
# res = selected_chunks.join
|
122
|
+
# print "F: #{res}"
|
123
|
+
# res
|
124
|
+
# end
|
125
|
+
|
126
|
+
# def set_content(offset, buf)
|
127
|
+
# print "\nset_content #{offset}, #{buf.size}:: \n#{buf}\n"
|
128
|
+
# Chunk.transaction do
|
129
|
+
# size = buf.size
|
130
|
+
# num_start = offset / CHUNK_SIZE
|
131
|
+
# offset_start = offset % CHUNK_SIZE
|
132
|
+
# num_end = (offset + size - 1) / CHUNK_SIZE
|
133
|
+
# offset_end = (offset + size - 1) % CHUNK_SIZE
|
134
|
+
# chunk_count = chunks.count
|
135
|
+
# last_chunk = chunks.last
|
136
|
+
# if chunk_count <= num_end
|
137
|
+
# if last_chunk && last_chunk.content.size < CHUNK_SIZE
|
138
|
+
# last_chunk.content += "\0" * (CHUNK_SIZE - last_chunk.content.size)
|
139
|
+
# rechunk = Chunk.find last_chunk.id
|
140
|
+
# rechunk.content = last_chunk.content.to_s
|
141
|
+
# rechunk.save!
|
142
|
+
# end
|
143
|
+
# while chunk_count <= num_end
|
144
|
+
# content = "\0" * (chunk_count == num_end ? offset_end + 1 : CHUNK_SIZE)
|
145
|
+
# chunks.create! :chunk_number => chunk_count, :node_id => id, :content => content
|
146
|
+
# chunk_count += 1
|
147
|
+
# end
|
148
|
+
# end
|
149
|
+
# selected_chunks = chunks.order('chunk_number asc').where('chunk_number >= ? and chunk_number <= ?', num_start, num_end)
|
150
|
+
# if num_start == num_end
|
151
|
+
# if selected_chunks[0].content.size <= offset_end
|
152
|
+
# selected_chunks[0].content += "\0" * (offset_end + 1 - selected_chunks[0].content.size)
|
153
|
+
# end
|
154
|
+
# selected_chunks[0].content[offset_start..offset_end] = buf
|
155
|
+
# else
|
156
|
+
# selected_chunks[0].content[offset_start..-1] = buf[0..CHUNK_SIZE - offset_start - 1]
|
157
|
+
# selected_chunks[-1].content[0..offset_end] = buf[-1 - offset_end..-1]
|
158
|
+
|
159
|
+
# selected_chunks[1...-1].each do |chunk|
|
160
|
+
# num = chunk.chunk_number - num_start
|
161
|
+
# chunk.content = buf[num*CHUNK_SIZE...(num+1)*CHUNK_SIZE]
|
162
|
+
# end
|
163
|
+
# end
|
164
|
+
# print "\n"
|
165
|
+
# selected_chunks.each do |chunk|
|
166
|
+
# rechunk = Chunk.find chunk.id
|
167
|
+
# rechunk.content = chunk.content.to_s
|
168
|
+
# print "set_content #{chunk.chunk_number} :: #{chunk.content}\n"
|
169
|
+
# rechunk.save!
|
170
|
+
# end
|
171
|
+
# print "\n"
|
172
|
+
# end
|
173
|
+
# end
|
174
|
+
|
175
|
+
def content_size
|
176
|
+
if chunks.count == 0
|
177
|
+
0
|
178
|
+
else
|
179
|
+
(chunks.count - 1) * CHUNK_SIZE + chunks.order('chunk_number asc').last.content.size
|
180
|
+
end
|
181
|
+
end
|
182
|
+
|
183
|
+
def truncate_content(offset)
|
184
|
+
Chunk.transaction do
|
185
|
+
num_end = (offset) / CHUNK_SIZE
|
186
|
+
offset_end = (offset) % CHUNK_SIZE
|
187
|
+
selected_chunks = chunks.where('chunk_number > ?', num_end)
|
188
|
+
selected_chunks.each { |chunk| chunk.destroy }
|
189
|
+
last_chunk = chunks.last
|
190
|
+
last_chunk.content = last_chunk.content[0..offset_end]
|
191
|
+
last_chunk.save
|
192
|
+
end
|
193
|
+
end
|
194
|
+
end
|
@@ -0,0 +1,232 @@
|
|
1
|
+
require 'git'
|
2
|
+
require 'yaml'
|
3
|
+
require 'rfuse'
|
4
|
+
require 'gitroom/models/node'
|
5
|
+
require 'gitroom/models/log'
|
6
|
+
require 'test/unit/assertions'
|
7
|
+
require 'gitroom/concerns/node_cached'
|
8
|
+
|
9
|
+
module GitRoom
|
10
|
+
class Mounter
|
11
|
+
include Test::Unit::Assertions
|
12
|
+
include NodeCached
|
13
|
+
|
14
|
+
def readdir(ctx, path, filler, offset, ffi)
|
15
|
+
find_node path
|
16
|
+
@node.nodes.each do |node|
|
17
|
+
filler.push node.name, node.stat, 0
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
def getattr(ctx, path)
|
22
|
+
find_node path
|
23
|
+
begin
|
24
|
+
@node.stat
|
25
|
+
rescue
|
26
|
+
print "Unexpected in stat #{path}"
|
27
|
+
enoent(path)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
def mkdir(ctx, path, mode)
|
32
|
+
update_node path, mode
|
33
|
+
cached_node_present! path
|
34
|
+
end
|
35
|
+
|
36
|
+
def mknod(ctx, path, mode, major, minor)
|
37
|
+
update_node path, mode, ctx.uid, ctx.gid, false, ''
|
38
|
+
cached_node_present! path
|
39
|
+
end
|
40
|
+
|
41
|
+
def open(ctx, path, ffi)
|
42
|
+
end
|
43
|
+
|
44
|
+
def chmod(ctx, path, mode)
|
45
|
+
find_node path
|
46
|
+
@node.update_attributes! :mode => mode
|
47
|
+
end
|
48
|
+
|
49
|
+
def chown(ctx, path, uid, gid)
|
50
|
+
find_node path
|
51
|
+
@node.update_attributes! :uid => uid, :gid => gid
|
52
|
+
end
|
53
|
+
|
54
|
+
def truncate(ctx, path, offset)
|
55
|
+
find_node path
|
56
|
+
# @node.update_attributes! :content => @node.content[0..offset]
|
57
|
+
@node.truncate_content offset
|
58
|
+
end
|
59
|
+
|
60
|
+
def utime(ctx, path, actime, modtime)
|
61
|
+
find_node path
|
62
|
+
@node.update_attributes! :updated_at => Time.at([actime, modtime].max)
|
63
|
+
end
|
64
|
+
|
65
|
+
def unlink(ctx, path)
|
66
|
+
find_node path
|
67
|
+
@node.destroy
|
68
|
+
cached_node_absent! path
|
69
|
+
end
|
70
|
+
|
71
|
+
def rmdir(ctx, path)
|
72
|
+
find_node path
|
73
|
+
@node.destroy
|
74
|
+
cached_node_absent! path
|
75
|
+
end
|
76
|
+
|
77
|
+
# TODO:: Make symlink and link methods
|
78
|
+
|
79
|
+
# def symlink(ctx, path, as)
|
80
|
+
# end
|
81
|
+
|
82
|
+
# def link(ctx, path, as)
|
83
|
+
# end
|
84
|
+
|
85
|
+
def rename(ctx, path, as)
|
86
|
+
find_node path
|
87
|
+
move_node(@node, as)
|
88
|
+
cached_node_absent! path
|
89
|
+
cached_node_absent! as
|
90
|
+
cached_node_present! as
|
91
|
+
end
|
92
|
+
|
93
|
+
def read(ctx, path, size, offset, fi)
|
94
|
+
find_node path
|
95
|
+
eisdir(path) if @node.directory
|
96
|
+
res = ''
|
97
|
+
begin
|
98
|
+
res = read_cached_node @node, size, offset
|
99
|
+
rescue
|
100
|
+
print "Unexpected in read #{path}: #{$!}\n"
|
101
|
+
enoent(path)
|
102
|
+
end
|
103
|
+
res
|
104
|
+
end
|
105
|
+
|
106
|
+
def write(ctx, path, buf, offset, fi)
|
107
|
+
find_node path
|
108
|
+
eisdir(path) if @node.directory
|
109
|
+
begin
|
110
|
+
write_cached_node @node, buf, offset
|
111
|
+
rescue
|
112
|
+
print "Unexpected in write #{path}: #{$!}\n"
|
113
|
+
enoent(path)
|
114
|
+
end
|
115
|
+
buf.size
|
116
|
+
end
|
117
|
+
|
118
|
+
def setxattr(ctx, path, name, value, size, flags)
|
119
|
+
find_node path
|
120
|
+
@node.setxattr name, value, flags
|
121
|
+
end
|
122
|
+
|
123
|
+
def getxattr(ctx, path, name)
|
124
|
+
find_node path
|
125
|
+
@node.getxattr(name) || ''
|
126
|
+
end
|
127
|
+
|
128
|
+
def listxattr(ctx, path)
|
129
|
+
find_node path
|
130
|
+
@node.listxattr
|
131
|
+
end
|
132
|
+
|
133
|
+
def removexattr(ctx, path, name)
|
134
|
+
find_node path
|
135
|
+
@node.removexattr name
|
136
|
+
end
|
137
|
+
|
138
|
+
|
139
|
+
def statfs(ctx,path)
|
140
|
+
s = RFuse::StatVfs.new
|
141
|
+
s.f_bsize = 1024
|
142
|
+
s.f_frsize = 1024
|
143
|
+
s.f_blocks = 1000000
|
144
|
+
s.f_bfree = 500000
|
145
|
+
s.f_bavail = 990000
|
146
|
+
s.f_files = 10000
|
147
|
+
s.f_ffree = 9900
|
148
|
+
s.f_favail = 9900
|
149
|
+
s.f_fsid = 23423
|
150
|
+
s.f_flag = 0
|
151
|
+
s.f_namemax = 10000
|
152
|
+
return s
|
153
|
+
end
|
154
|
+
|
155
|
+
def ioctl(ctx, path, cmd, arg, ffi, flags, data)
|
156
|
+
# FT: I was not been able to test it.
|
157
|
+
print "*** IOCTL: command: ", cmd, "\n"
|
158
|
+
end
|
159
|
+
|
160
|
+
def poll(ctx, path, ffi, ph, reventsp)
|
161
|
+
print "*** POLL: ", path, "\n"
|
162
|
+
# This is how we notify the caller if something happens:
|
163
|
+
ph.notifyPoll
|
164
|
+
# when the GC harvests the object it calls fuse_pollhandle_destroy
|
165
|
+
# by itself.
|
166
|
+
end
|
167
|
+
|
168
|
+
def init(ctx,rfuseconninfo)
|
169
|
+
print "RFuse TestFS started\n"
|
170
|
+
print "init called\n"
|
171
|
+
print "proto_major:#{rfuseconninfo.proto_major}\n"
|
172
|
+
end
|
173
|
+
|
174
|
+
private
|
175
|
+
|
176
|
+
def find_node(path, ignore = false)
|
177
|
+
@node = nil
|
178
|
+
# @node = Node.where(:path => path).first
|
179
|
+
@node = find_cached_node(path)
|
180
|
+
enoent(path) if @node.nil? && !ignore
|
181
|
+
@node
|
182
|
+
end
|
183
|
+
|
184
|
+
def update_node(path, mode, uid = nil, gid = nil, directory = true, content = nil)
|
185
|
+
nodes = split_dirnode(path)
|
186
|
+
uid ||= nodes[:dirnode].uid
|
187
|
+
gid ||= nodes[:dirnode].gid
|
188
|
+
nodes[:node] ||= if directory
|
189
|
+
Node.create! :name => nodes[:name], :path => path, :directory => true,
|
190
|
+
:node_id => nodes[:dirnode].id, :mode => mode,
|
191
|
+
:uid => uid, :gid => gid
|
192
|
+
else
|
193
|
+
Node.create! :name => nodes[:name], :path => path, :directory => false,
|
194
|
+
:node_id => nodes[:dirnode].id, :mode => mode
|
195
|
+
end
|
196
|
+
end
|
197
|
+
|
198
|
+
def split_dirpath(path)
|
199
|
+
pathname = Pathname.new path
|
200
|
+
{ :name => pathname.basename.to_s, :dirname => pathname.dirname.to_s }
|
201
|
+
end
|
202
|
+
|
203
|
+
def split_dirnode(path)
|
204
|
+
names = split_dirpath path
|
205
|
+
node = find_node path, true
|
206
|
+
dirnode = find_node names[:dirname]
|
207
|
+
enotdir(path) unless dirnode.directory
|
208
|
+
{ :node => node, :dirnode => dirnode, :name => names[:name], :dirname => names[:dirname] }
|
209
|
+
end
|
210
|
+
|
211
|
+
def move_node(node, path)
|
212
|
+
nodes = split_dirnode(path)
|
213
|
+
nodes[:node] && nodes[:node].destroy
|
214
|
+
node.update_attributes! :path => path, :node_id => nodes[:dirnode].id, :name => nodes[:name]
|
215
|
+
end
|
216
|
+
|
217
|
+
def enoent(path = nil)
|
218
|
+
# print "Raising enoent #{path}\n"
|
219
|
+
raise Errno::ENOENT.new(path)
|
220
|
+
end
|
221
|
+
|
222
|
+
def enotdir(path = nil)
|
223
|
+
# print "Raising enotdir #{path}\n"
|
224
|
+
raise Errno::ENOTDIR.new(path)
|
225
|
+
end
|
226
|
+
|
227
|
+
def eisdir(path = nil)
|
228
|
+
# print "Raising eisdir #{path}\n"
|
229
|
+
raise Errno::EISDIR.new(path)
|
230
|
+
end
|
231
|
+
end
|
232
|
+
end
|
@@ -0,0 +1,32 @@
|
|
1
|
+
require 'git'
|
2
|
+
require 'yaml'
|
3
|
+
require 'fusefs'
|
4
|
+
require 'gitroom/models/node'
|
5
|
+
require 'gitroom/models/log'
|
6
|
+
|
7
|
+
module GitRoom
|
8
|
+
class Server
|
9
|
+
def self.serve(user)
|
10
|
+
permissions = 'rw'
|
11
|
+
command = ENV['SSH_ORIGINAL_COMMAND']
|
12
|
+
abort unless user and permissions and command
|
13
|
+
|
14
|
+
STDERR.write ENV.to_hash.to_yaml
|
15
|
+
|
16
|
+
valid_actions = ['git-receive-pack', 'git-upload-pack']
|
17
|
+
action = command.split[0]
|
18
|
+
repo = command.split[1]
|
19
|
+
commands = command.split
|
20
|
+
commands[1] = "repos/#{repo}"
|
21
|
+
command = commands.join ' '
|
22
|
+
abort unless valid_actions.include? action
|
23
|
+
|
24
|
+
abort "read denied for #{user}" unless permissions =~ /r/
|
25
|
+
abort "write denied for #{user}" if action == 'git-receive-pack' and permissions !~ /w/
|
26
|
+
|
27
|
+
STDERR.write "user #{user} authorized\n"
|
28
|
+
|
29
|
+
Kernel.exec 'git', 'shell', '-c', command
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
metadata
ADDED
@@ -0,0 +1,64 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: gitroom
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Alexey Fedorov (waterlink)
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
|
12
|
+
date: 2013-08-01 00:00:00 Z
|
13
|
+
dependencies: []
|
14
|
+
|
15
|
+
description: Simplest git server with permissions
|
16
|
+
email: waterlink000@gmail.com
|
17
|
+
executables:
|
18
|
+
- gitserve
|
19
|
+
- gitfs
|
20
|
+
extensions: []
|
21
|
+
|
22
|
+
extra_rdoc_files: []
|
23
|
+
|
24
|
+
files:
|
25
|
+
- lib/gitroom.rb
|
26
|
+
- lib/gitroom/connect.rb
|
27
|
+
- lib/gitroom/server.rb
|
28
|
+
- lib/gitroom/mounter.rb
|
29
|
+
- lib/gitroom/admin.rb
|
30
|
+
- lib/gitroom/models/node.rb
|
31
|
+
- lib/gitroom/models/log.rb
|
32
|
+
- lib/gitroom/models/binary_file.rb
|
33
|
+
- lib/gitroom/models/chunk.rb
|
34
|
+
- lib/gitroom/concerns/node_cached.rb
|
35
|
+
- bin/gitserve
|
36
|
+
- bin/gitfs
|
37
|
+
homepage: https://rubygems.org/gems/gitroom
|
38
|
+
licenses:
|
39
|
+
- MIT
|
40
|
+
metadata: {}
|
41
|
+
|
42
|
+
post_install_message:
|
43
|
+
rdoc_options: []
|
44
|
+
|
45
|
+
require_paths:
|
46
|
+
- lib
|
47
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
48
|
+
requirements:
|
49
|
+
- &id001
|
50
|
+
- ">="
|
51
|
+
- !ruby/object:Gem::Version
|
52
|
+
version: "0"
|
53
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
54
|
+
requirements:
|
55
|
+
- *id001
|
56
|
+
requirements: []
|
57
|
+
|
58
|
+
rubyforge_project:
|
59
|
+
rubygems_version: 2.0.6
|
60
|
+
signing_key:
|
61
|
+
specification_version: 4
|
62
|
+
summary: Git Room!
|
63
|
+
test_files: []
|
64
|
+
|