sonar_connector_filestore 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.document +5 -0
- data/.gitignore +23 -0
- data/LICENSE +20 -0
- data/README.rdoc +19 -0
- data/Rakefile +46 -0
- data/VERSION +1 -0
- data/lib/sonar_connector_filestore.rb +355 -0
- data/spec/sonar_connector_filestore_spec.rb +420 -0
- data/spec/spec.opts +1 -0
- data/spec/spec_helper.rb +14 -0
- metadata +109 -0
data/.document
ADDED
data/.gitignore
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
Copyright (c) 2010 Trampoline Systems Ltd
|
2
|
+
|
3
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
4
|
+
a copy of this software and associated documentation files (the
|
5
|
+
"Software"), to deal in the Software without restriction, including
|
6
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
7
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
8
|
+
permit persons to whom the Software is furnished to do so, subject to
|
9
|
+
the following conditions:
|
10
|
+
|
11
|
+
The above copyright notice and this permission notice shall be
|
12
|
+
included in all copies or substantial portions of the Software.
|
13
|
+
|
14
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
15
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
16
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
17
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
18
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
19
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
20
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.rdoc
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
= sonar-connector-filestore
|
2
|
+
|
3
|
+
sonar-connector works with sets of files. it pulls some files from a server and stores them locally on disk. it takes those files and pushes them to a sonar server etc
|
4
|
+
|
5
|
+
sonar-connector-filestore provides some simple mechanisms for connectors to deal reliably with sets of files, establish ownership of files and transfer ownership between connectors
|
6
|
+
|
7
|
+
== Note on Patches/Pull Requests
|
8
|
+
|
9
|
+
* Fork the project.
|
10
|
+
* Make your feature addition or bug fix.
|
11
|
+
* Add tests for it. This is important so I don't break it in a
|
12
|
+
future version unintentionally.
|
13
|
+
* Commit, do not mess with rakefile, version, or history.
|
14
|
+
(if you want to have your own version, that is fine but bump version in a commit by itself I can ignore when I pull)
|
15
|
+
* Send me a pull request. Bonus points for topic branches.
|
16
|
+
|
17
|
+
== Copyright
|
18
|
+
|
19
|
+
Copyright (c) 2010 Trampoline Systems Ltd. See LICENSE for details.
|
data/Rakefile
ADDED
@@ -0,0 +1,46 @@
|
|
1
|
+
require 'rubygems'
|
2
|
+
require 'rake'
|
3
|
+
|
4
|
+
begin
|
5
|
+
require 'jeweler'
|
6
|
+
Jeweler::Tasks.new do |gem|
|
7
|
+
gem.name = "sonar_connector_filestore"
|
8
|
+
gem.summary = %Q{reliable staged processing with the filesystem}
|
9
|
+
gem.description = %Q{support for staged processing using a vanilla filesystem for storage}
|
10
|
+
gem.email = "craig@trampolinesystems.com"
|
11
|
+
gem.homepage = "http://github.com/trampoline/sonar-connector-filestore"
|
12
|
+
gem.authors = ["mccraigmccraig"]
|
13
|
+
gem.add_development_dependency "rspec", ">= 1.2.9"
|
14
|
+
gem.add_development_dependency "rr", ">= 0.10.11"
|
15
|
+
# gem is a Gem::Specification... see http://www.rubygems.org/read/chapter/20 for additional settings
|
16
|
+
end
|
17
|
+
Jeweler::GemcutterTasks.new
|
18
|
+
rescue LoadError
|
19
|
+
puts "Jeweler (or a dependency) not available. Install it with: gem install jeweler"
|
20
|
+
end
|
21
|
+
|
22
|
+
require 'spec/rake/spectask'
|
23
|
+
Spec::Rake::SpecTask.new(:spec) do |spec|
|
24
|
+
spec.libs << 'lib' << 'spec'
|
25
|
+
spec.spec_files = FileList['spec/**/*_spec.rb']
|
26
|
+
end
|
27
|
+
|
28
|
+
Spec::Rake::SpecTask.new(:rcov) do |spec|
|
29
|
+
spec.libs << 'lib' << 'spec'
|
30
|
+
spec.pattern = 'spec/**/*_spec.rb'
|
31
|
+
spec.rcov = true
|
32
|
+
end
|
33
|
+
|
34
|
+
task :spec => :check_dependencies
|
35
|
+
|
36
|
+
task :default => :spec
|
37
|
+
|
38
|
+
require 'rake/rdoctask'
|
39
|
+
Rake::RDocTask.new do |rdoc|
|
40
|
+
version = File.exist?('VERSION') ? File.read('VERSION') : ""
|
41
|
+
|
42
|
+
rdoc.rdoc_dir = 'rdoc'
|
43
|
+
rdoc.title = "sonar-connector-filestore #{version}"
|
44
|
+
rdoc.rdoc_files.include('README*')
|
45
|
+
rdoc.rdoc_files.include('lib/**/*.rb')
|
46
|
+
end
|
data/VERSION
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
0.2.0
|
@@ -0,0 +1,355 @@
|
|
1
|
+
require 'fileutils'
|
2
|
+
require 'uuidtools'
|
3
|
+
require 'logger'
|
4
|
+
require 'set'
|
5
|
+
|
6
|
+
module Sonar
|
7
|
+
module Connector
|
8
|
+
|
9
|
+
# a FileStore has an on-disk directory structure :
|
10
|
+
#
|
11
|
+
# - root, effectively a parent directory
|
12
|
+
# - name : the filestore directory name
|
13
|
+
# - areas : names of acceptable sub-directories in the FileStore directory
|
14
|
+
# so a filestore with (@root=="/foo", @name==:bar, @areas=[:area51, :area52])
|
15
|
+
# would have directories :
|
16
|
+
#
|
17
|
+
# /foo
|
18
|
+
# /foo/bar
|
19
|
+
# /foo/bar/area51
|
20
|
+
# /foo/bar/area52
|
21
|
+
class FileStore
|
22
|
+
class << self
|
23
|
+
# the default logger...
|
24
|
+
attr_accessor :logger
|
25
|
+
end
|
26
|
+
FileStore.logger = Logger.new($stdout)
|
27
|
+
FileStore.logger.level = Logger::INFO
|
28
|
+
|
29
|
+
attr_reader :root
|
30
|
+
attr_reader :name
|
31
|
+
attr_reader :areas
|
32
|
+
attr_writer :logger
|
33
|
+
|
34
|
+
def self.valid_filestore_name?(f)
|
35
|
+
(f.to_s == File.basename(f.to_s)) &&
|
36
|
+
ordinary_directory_name?(f)
|
37
|
+
end
|
38
|
+
|
39
|
+
def self.valid_area_name?(a)
|
40
|
+
a.to_s != "tmp"
|
41
|
+
end
|
42
|
+
|
43
|
+
def self.ordinary_directory_name?(f)
|
44
|
+
File.basename(f.to_s) !~ /^\./
|
45
|
+
end
|
46
|
+
|
47
|
+
def self.ordinary_directory?(f)
|
48
|
+
ordinary_directory_name?(f.to_s) && File.directory?(f.to_s)
|
49
|
+
end
|
50
|
+
|
51
|
+
def initialize(root, name, areas, opts={})
|
52
|
+
raise "directory '#{root}' does not exist or is not a directory" if !File.directory?(root)
|
53
|
+
@root = root
|
54
|
+
|
55
|
+
raise "#{name} is not a valid filestore name" if !FileStore.valid_filestore_name?(name)
|
56
|
+
@name = name
|
57
|
+
FileUtils.mkdir_p(filestore_path)
|
58
|
+
|
59
|
+
@areas = Set.new([*areas])
|
60
|
+
@areas.each{|area| raise "#{area} is not a valid area name" if !FileStore.valid_area_name?(area)}
|
61
|
+
@areas.each{|area| FileUtils.mkdir_p(area_path(area))}
|
62
|
+
|
63
|
+
@logger = opts[:logger]
|
64
|
+
end
|
65
|
+
|
66
|
+
def logger
|
67
|
+
@logger || FileStore.logger
|
68
|
+
end
|
69
|
+
|
70
|
+
def destroy!
|
71
|
+
FileUtils.rm_r(filestore_path)
|
72
|
+
end
|
73
|
+
|
74
|
+
def filestore_path
|
75
|
+
File.join(root, name.to_s)
|
76
|
+
end
|
77
|
+
|
78
|
+
def check_area(area)
|
79
|
+
raise "no such area: #{area}" if !@areas.include?(area) && area!=:tmp
|
80
|
+
end
|
81
|
+
|
82
|
+
def area_path(area)
|
83
|
+
check_area(area)
|
84
|
+
File.join(filestore_path, area.to_s)
|
85
|
+
end
|
86
|
+
|
87
|
+
def file_path(area, filename)
|
88
|
+
File.join(area_path(area), filename)
|
89
|
+
end
|
90
|
+
|
91
|
+
# marker exception to tell process and process_batch to
|
92
|
+
# leave files in the source area
|
93
|
+
class LeaveInSourceArea < RuntimeError
|
94
|
+
end
|
95
|
+
|
96
|
+
# process files from source_area. move it to error_area if the block
|
97
|
+
# raises an exception and to success_area if the block completes. if
|
98
|
+
# LeaveInSourceArea is raised, don't do anything with the files
|
99
|
+
def process(source_area, error_area=nil, success_area=nil)
|
100
|
+
raise "i need a block" if !block_given?
|
101
|
+
|
102
|
+
files = area_files(source_area)
|
103
|
+
files.each do |f|
|
104
|
+
begin
|
105
|
+
yield f
|
106
|
+
if success_area
|
107
|
+
move(source_area, f, success_area)
|
108
|
+
else
|
109
|
+
delete(source_area, f)
|
110
|
+
end
|
111
|
+
rescue LeaveInSourceArea=>e
|
112
|
+
logger.info("leaving files in #{source_area}")
|
113
|
+
raise
|
114
|
+
rescue Exception=>e
|
115
|
+
logger.warn(FileStore.to_s){[e.class.to_s, e.message, *e.backtrace].join("\n")}
|
116
|
+
if error_area
|
117
|
+
move(source_area, f, error_area)
|
118
|
+
else
|
119
|
+
delete(source_area, f)
|
120
|
+
end
|
121
|
+
raise
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
# process a batch of files from source_area. move them to error_area if
|
127
|
+
# the block raises and exception, and to success_area if the block completes,
|
128
|
+
# and leave where they are if LeaveInSourceArea is raised.
|
129
|
+
# returns the number of items processed, 0 if all work is done.
|
130
|
+
def process_batch(batch_size, source_area, error_area=nil, success_area=nil)
|
131
|
+
raise "i need a block" if !block_given?
|
132
|
+
|
133
|
+
batch = area_files(source_area, batch_size)
|
134
|
+
return 0 if batch.size==0
|
135
|
+
begin
|
136
|
+
yield batch
|
137
|
+
if success_area
|
138
|
+
batch.each{|p| move(source_area, p, success_area)}
|
139
|
+
else
|
140
|
+
batch.each{|p| delete(source_area, p)}
|
141
|
+
end
|
142
|
+
rescue LeaveInSourceArea=>e
|
143
|
+
logger.info("leaving files in #{source_area}")
|
144
|
+
raise
|
145
|
+
rescue Exception=>e
|
146
|
+
logger.warn(FileStore.to_s){[e.class.to_s, e.message, *e.backtrace].join("\n")}
|
147
|
+
if error_area
|
148
|
+
batch.each{|p| move(source_area, p, error_area)}
|
149
|
+
else
|
150
|
+
batch.each{|p| delete(source_area, p)}
|
151
|
+
end
|
152
|
+
raise
|
153
|
+
end
|
154
|
+
return batch.size
|
155
|
+
end
|
156
|
+
|
157
|
+
# fetch at most max regular file paths from an area
|
158
|
+
def area_files(area, max=nil)
|
159
|
+
relative_file_paths(area_path(area), max)
|
160
|
+
end
|
161
|
+
|
162
|
+
# number of items in an area
|
163
|
+
def count(area)
|
164
|
+
ap = area_path(area)
|
165
|
+
Dir[File.join(ap, "*")].length
|
166
|
+
end
|
167
|
+
|
168
|
+
# hash of counts keyed by area
|
169
|
+
def area_count
|
170
|
+
@areas.reduce({}){|h,area| h[area]=count(area) ; h}
|
171
|
+
end
|
172
|
+
|
173
|
+
# disk usage of an area in kb
|
174
|
+
def size(area)
|
175
|
+
ap = area_path(area)
|
176
|
+
`du -k #{ap}`.gsub(/\W+tmp\W*$/m,'').to_i
|
177
|
+
end
|
178
|
+
|
179
|
+
# hash of sizes keyed by area
|
180
|
+
def area_size
|
181
|
+
@areas.reduce({}){|h,area| h[area]=size(area) ; h}
|
182
|
+
end
|
183
|
+
|
184
|
+
# iterate over all files in top level of an area, calling a block on each
|
185
|
+
def for_each(area)
|
186
|
+
ap = area_path(area)
|
187
|
+
Dir.foreach(area_path(area)) do |f|
|
188
|
+
fp = File.join(ap,f)
|
189
|
+
yield f if File.file?(fp) || FileStore.ordinary_directory?(fp)
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
# write a file to an area
|
194
|
+
def write(area, filename, content)
|
195
|
+
ensure_area_directory(area, filename)
|
196
|
+
File.open(file_path(area, filename), "w"){ |io| io << content }
|
197
|
+
end
|
198
|
+
|
199
|
+
# read a file from an area
|
200
|
+
def read(area, filename)
|
201
|
+
File.read(file_path(area, filename))
|
202
|
+
end
|
203
|
+
|
204
|
+
# remove a file from an area
|
205
|
+
def delete(area, filename)
|
206
|
+
FileUtils.rm_r(file_path(area, filename))
|
207
|
+
end
|
208
|
+
|
209
|
+
# move a file from one area to another
|
210
|
+
def move(from_area, filename, to_area)
|
211
|
+
move_file(area_path(from_area), filename, area_path(to_area))
|
212
|
+
end
|
213
|
+
|
214
|
+
# remove any empty directories from an area
|
215
|
+
def scrub!(area)
|
216
|
+
scrub_path(area_path(area), false)
|
217
|
+
end
|
218
|
+
|
219
|
+
# flip files from an area into a sub-directory of an area
|
220
|
+
# in another
|
221
|
+
# filestore, named by the name of this filestore
|
222
|
+
# thus
|
223
|
+
# fs1.flip(:complete, fs2, :working ) moves
|
224
|
+
# fs1/complete/* => fs2/working/fs1/*
|
225
|
+
# if unique_names is false, then unique directories
|
226
|
+
# are constructued in the targetfs to flip to, otherwise
|
227
|
+
# identical names are assumed to be identical files
|
228
|
+
# and will overwrite already present files
|
229
|
+
def flip(area, filestore, to_area, unique_names=true)
|
230
|
+
ap = area_path(area)
|
231
|
+
paths = []
|
232
|
+
|
233
|
+
scrub!(area) # only move what we need to
|
234
|
+
|
235
|
+
# collect all moveable paths
|
236
|
+
for_each(area) do |f|
|
237
|
+
paths << File.join(ap, f)
|
238
|
+
end
|
239
|
+
filestore.receive_flip(name, to_area, paths, unique_names) if paths.length>0
|
240
|
+
end
|
241
|
+
|
242
|
+
# receive a flip... move all paths to be flipped
|
243
|
+
# into a temporary directory, and then move that
|
244
|
+
# directory into place in one atomic move operation
|
245
|
+
def receive_flip(from_filestore_name, to_area, paths, unique_names)
|
246
|
+
# $stderr << "receive_flip(#{from_filestore_name}, #{to_area}, #{paths.inspect}, #{unique_names})\n"
|
247
|
+
tmp_area_path = area_path(:tmp)
|
248
|
+
|
249
|
+
# tmp_uuid
|
250
|
+
tmp_uuid = unique_name
|
251
|
+
|
252
|
+
# first move all moveable paths to a unique named tmp area within the receive area
|
253
|
+
tmp_path = File.join(tmp_area_path, tmp_uuid)
|
254
|
+
if paths.length>0
|
255
|
+
FileUtils.mkdir_p(tmp_path)
|
256
|
+
paths.each do |path|
|
257
|
+
FileUtils.mv(path, tmp_path)
|
258
|
+
end
|
259
|
+
end
|
260
|
+
|
261
|
+
# move everything from the receive area... recovers interrupted receive_flips too
|
262
|
+
to_path = area_path(to_area)
|
263
|
+
Dir.foreach(tmp_area_path) do |path|
|
264
|
+
path_1 = File.join(tmp_area_path, path)
|
265
|
+
if unique_names
|
266
|
+
|
267
|
+
if FileStore.ordinary_directory?(path_1)
|
268
|
+
# names are unique, so don't move the uuid folders
|
269
|
+
Dir.foreach(path_1) do |file_path|
|
270
|
+
path_2 = File.join(path_1, file_path)
|
271
|
+
FileUtils.mv(path_2, to_path, :force=>true) if File.file?(path_2) || FileStore.ordinary_directory?(path_2)
|
272
|
+
end
|
273
|
+
elsif File.file?(path_1) # names are unique, so ok to move plain files too
|
274
|
+
FileUtils.mv(path_1, to_path, :force=>true)
|
275
|
+
end
|
276
|
+
|
277
|
+
else
|
278
|
+
# move uuid named dirs
|
279
|
+
FileUtils.mv(path_1, to_path, :force=>true) if File.file?(path_1) || FileStore.ordinary_directory?(path_1)
|
280
|
+
end
|
281
|
+
end
|
282
|
+
|
283
|
+
# finally remove any empty tmp dirs
|
284
|
+
scrub!(:tmp)
|
285
|
+
end
|
286
|
+
|
287
|
+
private
|
288
|
+
|
289
|
+
def unique_name
|
290
|
+
UUIDTools::UUID.timestamp_create
|
291
|
+
end
|
292
|
+
|
293
|
+
# depth first search
|
294
|
+
def scrub_path(dir, scrub)
|
295
|
+
empty = scrub
|
296
|
+
Dir.foreach(dir) do |f|
|
297
|
+
path = File.join(dir, f)
|
298
|
+
if File.directory?(path)
|
299
|
+
# want to descend : so avoid short-cut evaluation
|
300
|
+
empty = scrub_path(path, true) && empty if FileStore.ordinary_directory_name?(f)
|
301
|
+
else
|
302
|
+
empty = false
|
303
|
+
end
|
304
|
+
end
|
305
|
+
FileUtils.rm_rf(dir) if empty
|
306
|
+
end
|
307
|
+
|
308
|
+
# fetch at most max relative regular file paths from a directory hierarchy
|
309
|
+
# rooted at dir
|
310
|
+
def relative_file_paths(dir, max=nil)
|
311
|
+
file_paths(dir, max).map{|p| p.gsub(/^#{dir}#{File::SEPARATOR}/,'')}
|
312
|
+
end
|
313
|
+
|
314
|
+
# fetch at most max regular file paths from a directory hierarchy
|
315
|
+
# rooted at dir
|
316
|
+
def file_paths(dir, max=nil)
|
317
|
+
paths = []
|
318
|
+
Dir.foreach(dir) do |f|
|
319
|
+
return paths if max && paths.size >= max
|
320
|
+
path = File.join(dir, f)
|
321
|
+
if File.directory?(path)
|
322
|
+
paths += file_paths(path, max) if FileStore.ordinary_directory_name?(f)
|
323
|
+
elsif File.file?(path)
|
324
|
+
paths << path
|
325
|
+
end
|
326
|
+
end
|
327
|
+
paths
|
328
|
+
end
|
329
|
+
|
330
|
+
# move a file named relative to filename dir
|
331
|
+
# to the same filename relative to to_dir
|
332
|
+
def move_file(from_dir, filename, to_dir)
|
333
|
+
f1 = File.join(from_dir, filename)
|
334
|
+
f2 = File.join(to_dir, filename)
|
335
|
+
ensure_directory(to_dir, filename)
|
336
|
+
FileUtils.mv(f1, f2)
|
337
|
+
end
|
338
|
+
|
339
|
+
# ensure that the directory of a filename exists in the given area
|
340
|
+
def ensure_area_directory(area, filename)
|
341
|
+
# create a directory in the destination area if necessary
|
342
|
+
ensure_directory(area_path(area), filename)
|
343
|
+
end
|
344
|
+
|
345
|
+
# given a directory, and a filename relative to it, ensure
|
346
|
+
# that the directory containing the actual file exists
|
347
|
+
# e.g. given dir==/a/b/c and filename==d/e/f.txt
|
348
|
+
# then ensure directory /a/b/c/d/e exists
|
349
|
+
def ensure_directory(dir, filename)
|
350
|
+
file_dir = File.expand_path(File.join(dir, File.dirname(filename)))
|
351
|
+
FileUtils.mkdir_p(file_dir)
|
352
|
+
end
|
353
|
+
end
|
354
|
+
end
|
355
|
+
end
|
@@ -0,0 +1,420 @@
|
|
1
|
+
require File.expand_path(File.dirname(__FILE__) + '/spec_helper')
|
2
|
+
require 'set'
|
3
|
+
require 'fileutils'
|
4
|
+
|
5
|
+
module Sonar
|
6
|
+
module Connector
|
7
|
+
describe "SonarConnectorFilestore" do
|
8
|
+
|
9
|
+
before(:all) do
|
10
|
+
FileStore::logger.level = Logger::FATAL
|
11
|
+
end
|
12
|
+
|
13
|
+
before(:each) do
|
14
|
+
FileUtils.rm_rf(TMP_DIR)
|
15
|
+
FileUtils.mkdir_p(TMP_DIR)
|
16
|
+
end
|
17
|
+
|
18
|
+
after(:each) do
|
19
|
+
end
|
20
|
+
|
21
|
+
def create_testfs(*areas)
|
22
|
+
areas = [:foo, :bar] if areas.empty?
|
23
|
+
FileStore.new(TMP_DIR, :testfs, areas)
|
24
|
+
end
|
25
|
+
|
26
|
+
describe "valid_area_name?" do
|
27
|
+
it "should not permit :tmp as an area name" do
|
28
|
+
FileStore.valid_area_name?(:tmp).should == false
|
29
|
+
end
|
30
|
+
|
31
|
+
it "should permit :foo as an area name" do
|
32
|
+
FileStore.valid_area_name?(:foo).should == true
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
describe "ordinary_directory_name?" do
|
37
|
+
it "should return false for .." do
|
38
|
+
FileStore.ordinary_directory_name?("..").should == false
|
39
|
+
end
|
40
|
+
|
41
|
+
it "should return false for ." do
|
42
|
+
FileStore.ordinary_directory_name?(".").should == false
|
43
|
+
end
|
44
|
+
|
45
|
+
it "should return false for .foo" do
|
46
|
+
FileStore.ordinary_directory_name?(".foo").should == false
|
47
|
+
end
|
48
|
+
|
49
|
+
it "should return true for foo" do
|
50
|
+
FileStore.ordinary_directory_name?("foo").should == true
|
51
|
+
end
|
52
|
+
|
53
|
+
it "should return false for foo/." do
|
54
|
+
FileStore.ordinary_directory_name?("foo/.").should == false
|
55
|
+
end
|
56
|
+
|
57
|
+
it "should return false for foo/.foo" do
|
58
|
+
FileStore.ordinary_directory_name?("foo/.foo").should == false
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
describe "valid_filestore_name?" do
|
63
|
+
it "should not permitted sub-directory names" do
|
64
|
+
FileStore.valid_filestore_name?("foo/bar").should == false
|
65
|
+
end
|
66
|
+
|
67
|
+
it "should permit simple filenames" do
|
68
|
+
FileStore.valid_filestore_name?("foo").should == true
|
69
|
+
end
|
70
|
+
|
71
|
+
it "should not permit dot filenames" do
|
72
|
+
FileStore.valid_filestore_name?(".foo").should == false
|
73
|
+
end
|
74
|
+
|
75
|
+
it "should not permit special dirnames" do
|
76
|
+
FileStore.valid_filestore_name?(".").should == false
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
describe "ordinary_directory?" do
|
81
|
+
it "should return true for regular real directory" do
|
82
|
+
FileStore.ordinary_directory?(TMP_DIR).should == true
|
83
|
+
end
|
84
|
+
|
85
|
+
it "should return false for nested special directory" do
|
86
|
+
FileStore.ordinary_directory?(File.join(TMP_DIR, '.')).should == false
|
87
|
+
end
|
88
|
+
|
89
|
+
it "should return false for non-existent directory" do
|
90
|
+
FileStore.ordinary_directory?("blahblah").should == false
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
|
95
|
+
it "should initialize with a root, a name and areas, and create directories" do
|
96
|
+
fs=create_testfs
|
97
|
+
fs.root.should == TMP_DIR
|
98
|
+
fs.name.should == :testfs
|
99
|
+
fs.areas.should == [:foo, :bar].to_set
|
100
|
+
File.directory?(File.join(TMP_DIR, "testfs")).should == true
|
101
|
+
File.directory?(File.join(TMP_DIR, "testfs", "foo")).should == true
|
102
|
+
File.directory?(File.join(TMP_DIR, "testfs", "bar")).should == true
|
103
|
+
end
|
104
|
+
|
105
|
+
it "should destroy itself cleanly" do
|
106
|
+
fs=create_testfs
|
107
|
+
fs.destroy!
|
108
|
+
File.exist?(File.join(TMP_DIR, "testfs")).should == false
|
109
|
+
end
|
110
|
+
|
111
|
+
it "should write files to an area" do
|
112
|
+
fs = create_testfs
|
113
|
+
fs.write(:foo, "testfile.txt", "one two three")
|
114
|
+
File.read(File.join(TMP_DIR, "testfs", "foo", "testfile.txt")).should == "one two three"
|
115
|
+
end
|
116
|
+
|
117
|
+
it "should count files in an area" do
|
118
|
+
fs = create_testfs
|
119
|
+
fs.write(:foo, "testfile.txt", "one two three")
|
120
|
+
fs.write(:foo, "testfile2.txt", "one two three")
|
121
|
+
fs.count(:foo).should == 2
|
122
|
+
end
|
123
|
+
|
124
|
+
it "should count files across all areas" do
|
125
|
+
fs = create_testfs
|
126
|
+
fs.write(:foo, "testfile.txt", "one two three")
|
127
|
+
fs.write(:bar, "testfile2.txt", "one two three")
|
128
|
+
fs.area_count.should == {:foo=>1, :bar=>1}
|
129
|
+
end
|
130
|
+
|
131
|
+
it "should give a kb based disk usage for an area" do
|
132
|
+
# don't actually know what disk page size is
|
133
|
+
# so test that each file occupies >0 space, and that two
|
134
|
+
# files the same size occupy twice as much as one
|
135
|
+
fs = create_testfs
|
136
|
+
fs.write(:foo, "testfile.txt", "one two three")
|
137
|
+
sz1 = fs.size(:foo)
|
138
|
+
sz1.should > 0
|
139
|
+
fs.write(:foo, "testfile2.txt", "one two three")
|
140
|
+
fs.size(:foo).should == 2*sz1
|
141
|
+
end
|
142
|
+
|
143
|
+
it "should give kb based disk usage across all areas" do
|
144
|
+
fs = create_testfs
|
145
|
+
fs.write(:foo, "testfile.txt", "one two three")
|
146
|
+
sz1 = fs.size(:foo)
|
147
|
+
sz1.should > 0
|
148
|
+
fs.write(:bar, "testfile.txt", "one two three")
|
149
|
+
fs.write(:bar, "testfile2.txt", "one two three")
|
150
|
+
fs.area_size.should == {:foo=>sz1, :bar=>2*sz1}
|
151
|
+
end
|
152
|
+
|
153
|
+
it "should scrub empty directories from an area" do
|
154
|
+
fs = create_testfs
|
155
|
+
ap = fs.area_path(:foo)
|
156
|
+
FileUtils.mkdir_p(File.join(ap, "bar", "baz"))
|
157
|
+
FileUtils.mkdir_p(File.join(ap, "woo"))
|
158
|
+
FileUtils.mkdir_p(File.join(ap, "waz"))
|
159
|
+
fs.write(:foo, File.join("waz", "testfile.txt"), "one two three")
|
160
|
+
|
161
|
+
fs.scrub!(:foo)
|
162
|
+
|
163
|
+
File.exist?(File.join(ap, "bar")).should == false
|
164
|
+
File.exist?(File.join(ap, "woo")).should == false
|
165
|
+
File.exist?(File.join(ap, "waz", "testfile.txt")).should == true
|
166
|
+
end
|
167
|
+
|
168
|
+
|
169
|
+
it "should iterate over all files in an area" do
|
170
|
+
fs = create_testfs
|
171
|
+
fs.write(:foo, "testfile.txt", "one two three")
|
172
|
+
fs.write(:foo, "testfile2.txt", "four five six")
|
173
|
+
|
174
|
+
texts = Set.new
|
175
|
+
ap = fs.area_path(:foo)
|
176
|
+
fs.for_each(:foo) do |f|
|
177
|
+
texts << File.read(File.join(ap, f))
|
178
|
+
end
|
179
|
+
|
180
|
+
texts.should == ["one two three", "four five six"].to_set
|
181
|
+
end
|
182
|
+
|
183
|
+
it "should ignore . and .. files when iterating" do
|
184
|
+
fs = create_testfs(:foo, :bar, :baz)
|
185
|
+
ap = fs.area_path(:foo)
|
186
|
+
|
187
|
+
# only files "foo" and "bar" exist in area :foo
|
188
|
+
stub(File).file?(File.join(ap,"foo")){true}
|
189
|
+
stub(File).file?(File.join(ap,"bar")){true}
|
190
|
+
stub(File).file?(){false}
|
191
|
+
|
192
|
+
stub(Dir).foreach do |path, proc|
|
193
|
+
[".", "..", "foo", "bar"].each{ |p| proc.call(p)}
|
194
|
+
end
|
195
|
+
|
196
|
+
files = Set.new
|
197
|
+
fs.for_each(:foo){|f| files << f}
|
198
|
+
files.should == ["foo", "bar"].to_set
|
199
|
+
|
200
|
+
end
|
201
|
+
|
202
|
+
describe "process" do
|
203
|
+
before do
|
204
|
+
@fs = create_testfs(:foo, :bar, :baz)
|
205
|
+
@fs.write(:foo, "testfile.txt", "one two three")
|
206
|
+
@fs.write(:foo, "testfile2.txt", "four five six")
|
207
|
+
@fs.write(:foo, "testfile3.txt", "seven eight nine")
|
208
|
+
end
|
209
|
+
|
210
|
+
it "should process all files in an area" do
|
211
|
+
texts = Set.new
|
212
|
+
@fs.process(:foo) do |f|
|
213
|
+
texts << File.read(@fs.file_path(:foo, f))
|
214
|
+
end
|
215
|
+
texts.should == ["one two three", "four five six", "seven eight nine"].to_set
|
216
|
+
@fs.count(:foo).should == 0
|
217
|
+
end
|
218
|
+
|
219
|
+
it "should move failed processings to the error_area" do
|
220
|
+
texts = Set.new
|
221
|
+
lambda {
|
222
|
+
@fs.process(:foo, :bar) do |f|
|
223
|
+
s = File.read(@fs.file_path(:foo, f))
|
224
|
+
raise "five" if s =~ /five/
|
225
|
+
texts << s
|
226
|
+
end
|
227
|
+
}.should raise_error("five")
|
228
|
+
@fs.count(:foo).should == 1
|
229
|
+
@fs.count(:bar).should == 1
|
230
|
+
@fs.read(:bar, "testfile2.txt").should == "four five six"
|
231
|
+
end
|
232
|
+
|
233
|
+
it "should move completed processings to the success_area" do
|
234
|
+
texts = Set.new
|
235
|
+
lambda {
|
236
|
+
@fs.process(:foo, :bar, :baz) do |f|
|
237
|
+
s = File.read(@fs.file_path(:foo, f))
|
238
|
+
raise "five" if s =~ /five/
|
239
|
+
texts << s
|
240
|
+
end
|
241
|
+
}.should raise_error("five")
|
242
|
+
@fs.count(:foo).should == 1
|
243
|
+
@fs.count(:bar).should == 1
|
244
|
+
@fs.count(:baz).should == 1
|
245
|
+
@fs.read(:bar, "testfile2.txt").should == "four five six"
|
246
|
+
end
|
247
|
+
end
|
248
|
+
|
249
|
+
describe "process_batch" do
|
250
|
+
before do
|
251
|
+
@fs = create_testfs(:foo, :bar, :baz)
|
252
|
+
FileUtils.mkdir_p(File.join(@fs.area_path(:foo), "a", "b"))
|
253
|
+
FileUtils.mkdir_p(File.join(@fs.area_path(:foo), "c", "d"))
|
254
|
+
@fs.write(:foo, "a/b/testfile.txt", "one two three")
|
255
|
+
@fs.write(:foo, "a/testfile2.txt", "four five six")
|
256
|
+
@fs.write(:foo, "c/d/testfile3.txt", "seven eight nine")
|
257
|
+
@fs.write(:foo, "c/testfile3.txt", "ten eleven twelve")
|
258
|
+
|
259
|
+
@files = ["a/b/testfile.txt", "a/testfile2.txt", "c/d/testfile3.txt", "c/testfile3.txt"].to_set
|
260
|
+
@processed = Set.new
|
261
|
+
end
|
262
|
+
|
263
|
+
def process_batch(fs, size, source_area, error_area=nil, success_area=nil)
|
264
|
+
fs.process_batch(size, source_area, error_area, success_area) do |batch|
|
265
|
+
@processed += batch
|
266
|
+
batch.each do |f|
|
267
|
+
raise "#{f} not in @files" if !@files.delete?(f)
|
268
|
+
end
|
269
|
+
if block_given?
|
270
|
+
batch.each do |f|
|
271
|
+
yield f
|
272
|
+
end
|
273
|
+
end
|
274
|
+
end
|
275
|
+
end
|
276
|
+
|
277
|
+
def check_files(fs, area, paths, test)
|
278
|
+
paths.each{|f| File.exist?(fs.file_path(area, f)).should == test}
|
279
|
+
end
|
280
|
+
|
281
|
+
it "should process and delete a limited batch of files" do
|
282
|
+
process_batch(@fs, 2, :foo).should == 2
|
283
|
+
|
284
|
+
@processed.size.should == 2
|
285
|
+
@files.size.should == 2
|
286
|
+
check_files(@fs, :foo, @processed, false)
|
287
|
+
check_files(@fs, :foo, @files, true)
|
288
|
+
|
289
|
+
process_batch(@fs, 3, :foo).should == 2
|
290
|
+
|
291
|
+
@processed.size.should == 4
|
292
|
+
@files.size.should == 0
|
293
|
+
check_files(@fs, :foo, @processed, false)
|
294
|
+
|
295
|
+
process_batch(@fs, 2, :foo).should == 0
|
296
|
+
end
|
297
|
+
|
298
|
+
it "should not call the block if there are no files to process" do
|
299
|
+
process_batch(@fs, 4, :foo).should == 4
|
300
|
+
@fs.process_batch(1, :foo) do
|
301
|
+
raise "i should not be called"
|
302
|
+
end
|
303
|
+
end
|
304
|
+
|
305
|
+
it "should move failed batches to error_area if given" do
|
306
|
+
lambda {
|
307
|
+
process_batch(@fs, 2, :foo, :bar){|f| raise "foo"}
|
308
|
+
}.should raise_error("foo")
|
309
|
+
|
310
|
+
@files.size.should == 2
|
311
|
+
check_files(@fs, :foo, @files, true)
|
312
|
+
check_files(@fs, :bar, @processed, true)
|
313
|
+
|
314
|
+
@ok = Set.new
|
315
|
+
process_batch(@fs, 2, :foo, :bar){|f| @ok << f}.should == 2
|
316
|
+
@files.size.should == 0
|
317
|
+
check_files(@fs, :foo, @ok, false)
|
318
|
+
check_files(@fs, :bar, @ok, false)
|
319
|
+
|
320
|
+
process_batch(@fs, 2, :foo, :bar).should == 0
|
321
|
+
end
|
322
|
+
|
323
|
+
it "should move successful batches to success_area if given" do
|
324
|
+
process_batch(@fs, 2, :foo, :bar, :baz).should == 2
|
325
|
+
@files.size.should == 2
|
326
|
+
check_files(@fs, :foo, @files, true)
|
327
|
+
check_files(@fs, :baz, @processed, true)
|
328
|
+
|
329
|
+
process_batch(@fs, 2, :foo, :bar, :baz).should == 2
|
330
|
+
check_files(@fs, :baz, @processed, true)
|
331
|
+
|
332
|
+
process_batch(@fs, 2, :foo, :bar, :baz).should == 0
|
333
|
+
end
|
334
|
+
|
335
|
+
it "should leave files in source area if LeaveInSourceArea is thrown" do
|
336
|
+
lambda {
|
337
|
+
process_batch(@fs, 2, :foo, :bar){|f| raise FileStore::LeaveInSourceArea }
|
338
|
+
}.should raise_error(FileStore::LeaveInSourceArea)
|
339
|
+
check_files(@fs, :foo, @files, true)
|
340
|
+
check_files(@fs, :baz, @processed, false)
|
341
|
+
check_files(@fs, :foo, @processed, true)
|
342
|
+
end
|
343
|
+
|
344
|
+
end
|
345
|
+
|
346
|
+
describe "flip" do
|
347
|
+
before do
|
348
|
+
@testfs = create_testfs(:foo, :bar, :baz)
|
349
|
+
@testfs.write(:foo, "testfile.txt", "one two three")
|
350
|
+
|
351
|
+
@targetfs = FileStore.new(TMP_DIR, :targetfs, [:a, :b])
|
352
|
+
end
|
353
|
+
|
354
|
+
|
355
|
+
it "should flip non-unique names from testfs to uniquely named subdir of targetfs" do
|
356
|
+
stub(@targetfs).unique_name{"some-uuid-string"}
|
357
|
+
|
358
|
+
@testfs.flip(:foo, @targetfs, :a, false)
|
359
|
+
|
360
|
+
File.exists?(File.join(@targetfs.area_path(:a), "some-uuid-string", "testfile.txt")).should == true
|
361
|
+
|
362
|
+
# should recreate area in flipped source, so source is
|
363
|
+
# still valid
|
364
|
+
File.exists?(File.join(@testfs.area_path(:foo)))
|
365
|
+
end
|
366
|
+
|
367
|
+
it "should flip unique names from testfs to targetfs without introducing additional subdirectories" do
|
368
|
+
@testfs.flip(:foo, @targetfs, :a)
|
369
|
+
|
370
|
+
File.exists?(File.join(@targetfs.area_path(:a), "testfile.txt")).should == true
|
371
|
+
|
372
|
+
# should recreate area in flipped source, so source is
|
373
|
+
# still valid
|
374
|
+
File.exists?(File.join(@testfs.area_path(:foo)))
|
375
|
+
end
|
376
|
+
|
377
|
+
end
|
378
|
+
|
379
|
+
describe "area_files" do
|
380
|
+
before do
|
381
|
+
@fs = create_testfs(:foo)
|
382
|
+
FileUtils.mkdir_p(File.join(@fs.area_path(:foo), "a", "b"))
|
383
|
+
FileUtils.mkdir_p(File.join(@fs.area_path(:foo), "c", "d"))
|
384
|
+
@fs.write(:foo, "a/b/testfile.txt", "one two three")
|
385
|
+
@fs.write(:foo, "a/testfile2.txt", "four five six")
|
386
|
+
@fs.write(:foo, "c/d/testfile3.txt", "seven eight nine")
|
387
|
+
@fs.write(:foo, "c/testfile3.txt", "ten eleven twelve")
|
388
|
+
|
389
|
+
@files = ["a/b/testfile.txt", "a/testfile2.txt", "c/d/testfile3.txt", "c/testfile3.txt"].to_set
|
390
|
+
end
|
391
|
+
|
392
|
+
it "should fetch all paths if max not given" do
|
393
|
+
@fs.area_files(:foo).to_set.should == @files
|
394
|
+
end
|
395
|
+
|
396
|
+
it "should fetch a limited number of paths if max given" do
|
397
|
+
fs = @fs.area_files(:foo, 2).to_set
|
398
|
+
fs.size.should == 2
|
399
|
+
(@files - fs).size.should == 2
|
400
|
+
end
|
401
|
+
end
|
402
|
+
|
403
|
+
describe "logger" do
|
404
|
+
it "should write to the class logger if no instance logger given" do
|
405
|
+
fs = create_testfs(:foo)
|
406
|
+
mock(FileStore.logger).debug("foo")
|
407
|
+
fs.logger.debug("foo")
|
408
|
+
end
|
409
|
+
|
410
|
+
it "should write to the instance logger if given" do
|
411
|
+
fs = create_testfs(:foo)
|
412
|
+
mock(logger = Object.new).debug("foo")
|
413
|
+
fs.logger = logger
|
414
|
+
fs.logger.debug("foo")
|
415
|
+
end
|
416
|
+
end
|
417
|
+
|
418
|
+
end
|
419
|
+
end
|
420
|
+
end
|
data/spec/spec.opts
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
--color
|
data/spec/spec_helper.rb
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
$LOAD_PATH.unshift(File.dirname(__FILE__))
|
2
|
+
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
|
3
|
+
require 'rubygems'
|
4
|
+
require 'sonar_connector_filestore'
|
5
|
+
require 'spec'
|
6
|
+
require 'spec/autorun'
|
7
|
+
require 'rr'
|
8
|
+
require 'fileutils'
|
9
|
+
|
10
|
+
Spec::Runner.configure do |config|
|
11
|
+
config.mock_with RR::Adapters::Rspec
|
12
|
+
end
|
13
|
+
|
14
|
+
TMP_DIR = File.expand_path("../../tmp", __FILE__)
|
metadata
ADDED
@@ -0,0 +1,109 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: sonar_connector_filestore
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
hash: 23
|
5
|
+
prerelease: false
|
6
|
+
segments:
|
7
|
+
- 0
|
8
|
+
- 2
|
9
|
+
- 0
|
10
|
+
version: 0.2.0
|
11
|
+
platform: ruby
|
12
|
+
authors:
|
13
|
+
- mccraigmccraig
|
14
|
+
autorequire:
|
15
|
+
bindir: bin
|
16
|
+
cert_chain: []
|
17
|
+
|
18
|
+
date: 2010-10-25 00:00:00 +01:00
|
19
|
+
default_executable:
|
20
|
+
dependencies:
|
21
|
+
- !ruby/object:Gem::Dependency
|
22
|
+
name: rspec
|
23
|
+
prerelease: false
|
24
|
+
requirement: &id001 !ruby/object:Gem::Requirement
|
25
|
+
none: false
|
26
|
+
requirements:
|
27
|
+
- - ">="
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
hash: 13
|
30
|
+
segments:
|
31
|
+
- 1
|
32
|
+
- 2
|
33
|
+
- 9
|
34
|
+
version: 1.2.9
|
35
|
+
type: :development
|
36
|
+
version_requirements: *id001
|
37
|
+
- !ruby/object:Gem::Dependency
|
38
|
+
name: rr
|
39
|
+
prerelease: false
|
40
|
+
requirement: &id002 !ruby/object:Gem::Requirement
|
41
|
+
none: false
|
42
|
+
requirements:
|
43
|
+
- - ">="
|
44
|
+
- !ruby/object:Gem::Version
|
45
|
+
hash: 33
|
46
|
+
segments:
|
47
|
+
- 0
|
48
|
+
- 10
|
49
|
+
- 11
|
50
|
+
version: 0.10.11
|
51
|
+
type: :development
|
52
|
+
version_requirements: *id002
|
53
|
+
description: support for staged processing using a vanilla filesystem for storage
|
54
|
+
email: craig@trampolinesystems.com
|
55
|
+
executables: []
|
56
|
+
|
57
|
+
extensions: []
|
58
|
+
|
59
|
+
extra_rdoc_files:
|
60
|
+
- LICENSE
|
61
|
+
- README.rdoc
|
62
|
+
files:
|
63
|
+
- .document
|
64
|
+
- .gitignore
|
65
|
+
- LICENSE
|
66
|
+
- README.rdoc
|
67
|
+
- Rakefile
|
68
|
+
- VERSION
|
69
|
+
- lib/sonar_connector_filestore.rb
|
70
|
+
- spec/sonar_connector_filestore_spec.rb
|
71
|
+
- spec/spec.opts
|
72
|
+
- spec/spec_helper.rb
|
73
|
+
has_rdoc: true
|
74
|
+
homepage: http://github.com/trampoline/sonar-connector-filestore
|
75
|
+
licenses: []
|
76
|
+
|
77
|
+
post_install_message:
|
78
|
+
rdoc_options:
|
79
|
+
- --charset=UTF-8
|
80
|
+
require_paths:
|
81
|
+
- lib
|
82
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
83
|
+
none: false
|
84
|
+
requirements:
|
85
|
+
- - ">="
|
86
|
+
- !ruby/object:Gem::Version
|
87
|
+
hash: 3
|
88
|
+
segments:
|
89
|
+
- 0
|
90
|
+
version: "0"
|
91
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
92
|
+
none: false
|
93
|
+
requirements:
|
94
|
+
- - ">="
|
95
|
+
- !ruby/object:Gem::Version
|
96
|
+
hash: 3
|
97
|
+
segments:
|
98
|
+
- 0
|
99
|
+
version: "0"
|
100
|
+
requirements: []
|
101
|
+
|
102
|
+
rubyforge_project:
|
103
|
+
rubygems_version: 1.3.7
|
104
|
+
signing_key:
|
105
|
+
specification_version: 3
|
106
|
+
summary: reliable staged processing with the filesystem
|
107
|
+
test_files:
|
108
|
+
- spec/sonar_connector_filestore_spec.rb
|
109
|
+
- spec/spec_helper.rb
|