amione-fakes3 0.1.5.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,5 @@
1
+ pkg/*
2
+ *.gem
3
+ .bundle
4
+ tmp
5
+ test_root
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source :rubygems
2
+ gem 'fakes3', :path => '.' # for dev and test, use local fakes3
3
+ # Specify your gem's dependencies in fakes3.gemspec
4
+ gemspec
@@ -0,0 +1,32 @@
1
+ PATH
2
+ remote: .
3
+ specs:
4
+ fakes3 (0.1.5)
5
+ builder
6
+ thor
7
+
8
+ GEM
9
+ remote: http://rubygems.org/
10
+ specs:
11
+ aws-s3 (0.6.2)
12
+ builder
13
+ mime-types
14
+ xml-simple
15
+ builder (3.0.0)
16
+ mime-types (1.18)
17
+ rake (0.9.2.2)
18
+ right_aws (3.0.4)
19
+ right_http_connection (>= 1.2.5)
20
+ right_http_connection (1.3.0)
21
+ thor (0.14.6)
22
+ xml-simple (1.1.1)
23
+
24
+ PLATFORMS
25
+ ruby
26
+
27
+ DEPENDENCIES
28
+ aws-s3
29
+ bundler (>= 1.0.0)
30
+ fakes3!
31
+ rake
32
+ right_aws
@@ -0,0 +1,20 @@
1
+ Copyright (c) 2011,2012 Curtis W Spencer (@jubos) and Spool
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining
4
+ a copy of this software and associated documentation files (the
5
+ "Software"), to deal in the Software without restriction, including
6
+ without limitation the rights to use, copy, modify, merge, publish,
7
+ distribute, sublicense, and/or sell copies of the Software, and to
8
+ permit persons to whom the Software is furnished to do so, subject to
9
+ the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be
12
+ included in all copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
@@ -0,0 +1,46 @@
1
+ ## Introduction
2
+ FakeS3 is a lightweight server that responds to the same calls Amazon S3 responds to.
3
+ It is extremely useful for testing of S3 in a sandbox environment without actually
4
+ making calls to Amazon, which not only require network, but also cost you precious dollars.
5
+
6
+ The goal of Fake S3 is to minimize runtime dependencies and be more of a
7
+ development tool to test S3 calls in your code rather than a production server
8
+ looking to duplicate S3 functionality. Trying RiakCS, ParkPlace/Boardwalk, or
9
+ Ceph might be a place to start if that is your goal.
10
+
11
+ FakeS3 doesn't support all of the S3 command set, but the basic ones like put, get,
12
+ list, copy, and make bucket are supported. More coming soon.
13
+
14
+ ## Installation
15
+
16
+ gem install fakes3
17
+
18
+ ## Running
19
+
20
+ To run a fakes3 server, you just specify a root and a port.
21
+
22
+ fakes3 -r /mnt/fakes3_root -p 4567
23
+
24
+ ## Connecting to FakeS3
25
+
26
+ Take a look at the test cases to see client example usage. For now, FakeS3 is
27
+ mainly tested with s3cmd, aws-s3 gem, and right_aws. There are plenty more
28
+ libraries out there, and please do mention if other clients work or not.
29
+
30
+ Here is a running list of [supported clients](https://github.com/jubos/fake-s3/wiki/Supported-Clients "Supported Clients")
31
+
32
+ ## Running Tests
33
+
34
+ Start the test server using
35
+
36
+ rake test_server
37
+
38
+ Then in another terminal window run
39
+
40
+ rake test
41
+
42
+ It is a TODO to get this to be just one command
43
+
44
+ ## More Information
45
+
46
+ Check out the [wiki](https://github.com/jubos/fake-s3/wiki)
@@ -0,0 +1,17 @@
1
+ require 'rubygems'
2
+ require 'bundler'
3
+ require 'rake/testtask'
4
+ include Rake::DSL
5
+ Bundler::GemHelper.install_tasks
6
+
7
+ Rake::TestTask.new(:test) do |t|
8
+ t.libs << "."
9
+ t.test_files = FileList['test/*_test.rb']
10
+ end
11
+
12
+ desc "Run the test_server"
13
+ task :test_server do |t|
14
+ system("bundle exec bin/fakes3 --port 10453 --root test_root")
15
+ end
16
+
17
+ task :default => :test
@@ -0,0 +1,4 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'fakes3/cli'
4
+ FakeS3::CLI.start
@@ -0,0 +1,30 @@
1
+ # -*- encoding: utf-8 -*-
2
+ require File.join(File.dirname(__FILE__), 'lib', 'fakes3', 'version')
3
+
4
+ Gem::Specification.new do |s|
5
+ s.name = "amione-fakes3"
6
+ s.version = FakeS3::VERSION
7
+ s.platform = Gem::Platform::RUBY
8
+ s.authors = ["Curtis Spencer"]
9
+ s.email = ["thorin@gmail.com"]
10
+ s.homepage = ""
11
+ s.summary = %q{FakeS3 is a server that simulates S3 commands so you can test your S3 functionality in your projects}
12
+ s.description = %q{Use FakeS3 to test basic S3 functionality without actually connecting to S3}
13
+
14
+ s.rubyforge_project = "fakes3"
15
+
16
+ s.add_development_dependency "bundler", ">= 1.0.0"
17
+ s.add_development_dependency "aws-s3"
18
+ s.add_development_dependency "right_aws"
19
+ s.add_development_dependency "rake"
20
+ #s.add_development_dependency "aws-sdk"
21
+ #s.add_development_dependency "ruby-debug"
22
+ #s.add_development_dependency "ruby-debug19"
23
+ s.add_dependency "thor"
24
+ s.add_dependency "builder"
25
+
26
+ s.files = `git ls-files`.split("\n")
27
+ s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
28
+ s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
29
+ s.require_paths = ["lib"]
30
+ end
@@ -0,0 +1,3 @@
1
+ require 'fakes3/version'
2
+ require 'fakes3/file_store'
3
+ require 'fakes3/server'
@@ -0,0 +1,64 @@
1
+ require 'builder'
2
+ require 'thread'
3
+ require 'fakes3/s3_object'
4
+ require 'fakes3/sorted_object_list'
5
+
6
+ module FakeS3
7
+ class Bucket
8
+ attr_accessor :name,:creation_date,:objects
9
+
10
+ def initialize(name,creation_date,objects)
11
+ @name = name
12
+ @creation_date = creation_date
13
+ @objects = SortedObjectList.new
14
+ objects.each do |obj|
15
+ @objects.add(obj)
16
+ end
17
+ @mutex = Mutex.new
18
+ end
19
+
20
+ def find(object_name)
21
+ @mutex.synchronize do
22
+ @objects.find(object_name)
23
+ end
24
+ end
25
+
26
+ def add(object)
27
+ # Unfortunately have to synchronize here since the our SortedObjectList
28
+ # not thread safe. Probably can get finer granularity if performance is
29
+ # important
30
+ @mutex.synchronize do
31
+ @objects.add(object)
32
+ end
33
+ end
34
+
35
+ def remove(object)
36
+ @mutex.synchronize do
37
+ @objects.remove(object)
38
+ end
39
+ end
40
+
41
+ def query_for_range(options)
42
+ marker = options[:marker]
43
+ prefix = options[:prefix]
44
+ max_keys = options[:max_keys] || 1000
45
+ delimiter = options[:delimiter]
46
+
47
+ match_set = nil
48
+ @mutex.synchronize do
49
+ match_set = @objects.list(options)
50
+ end
51
+
52
+ bq = BucketQuery.new
53
+ bq.bucket = self
54
+ bq.marker = marker
55
+ bq.prefix = prefix
56
+ bq.max_keys = max_keys
57
+ bq.delimiter = delimiter
58
+ bq.matches = match_set.matches
59
+ bq.is_truncated = match_set.is_truncated
60
+ return bq
61
+ end
62
+
63
+ end
64
+ end
@@ -0,0 +1,11 @@
1
+ module FakeS3
2
+ class BucketQuery
3
+ attr_accessor :prefix,:matches,:marker,:max_keys,
4
+ :delimiter,:bucket,:is_truncated
5
+
6
+ # Syntactic sugar
7
+ def is_truncated?
8
+ @is_truncated
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,63 @@
1
+ require 'thor'
2
+ require 'fakes3/server'
3
+ require 'fakes3/version'
4
+
5
+ module FakeS3
6
+ class CLI < Thor
7
+ default_task("server")
8
+
9
+ desc "server", "Run a server on a particular hostname"
10
+ method_option :root, :type => :string, :aliases => '-r', :required => true
11
+ method_option :port, :type => :numeric, :aliases => '-p', :required => true
12
+ method_option :address, :type => :string, :aliases => '-a', :required => false, :desc => "Bind to this address. Defaults to 0.0.0.0"
13
+ method_option :hostname, :type => :string, :aliases => '-h', :desc => "The root name of the host. Defaults to s3.amazonaws.com."
14
+ method_option :limit, :aliases => '-l', :type => :string, :desc => 'Rate limit for serving (ie. 50K, 1.0M)'
15
+ def server
16
+ store = nil
17
+ if options[:root]
18
+ root = File.expand_path(options[:root])
19
+ # TODO Do some sanity checking here
20
+ store = FileStore.new(root)
21
+ end
22
+
23
+ if store.nil?
24
+ puts "You must specify a root to use a file store (the current default)"
25
+ exit(-1)
26
+ end
27
+
28
+ hostname = 's3.amazonaws.com'
29
+ if options[:hostname]
30
+ hostname = options[:hostname]
31
+ # In case the user has put a port on the hostname
32
+ if hostname =~ /:(\d+)/
33
+ hostname = hostname.split(":")[0]
34
+ end
35
+ end
36
+
37
+ if options[:limit]
38
+ begin
39
+ store.rate_limit = options[:limit]
40
+ rescue
41
+ puts $!.message
42
+ exit(-1)
43
+ end
44
+ end
45
+
46
+ address = options[:address] || '0.0.0.0'
47
+
48
+ puts "Loading FakeS3 with #{root} on port #{options[:port]} with hostname #{hostname}"
49
+ server = FakeS3::Server.new(address,options[:port],store,hostname)
50
+ server.serve
51
+ end
52
+
53
+ desc "version", "Report the current fakes3 version"
54
+ def version
55
+ puts <<"EOF"
56
+ ======================
57
+ FakeS3 #{FakeS3::VERSION}
58
+
59
+ Copyright 2012, Curtis Spencer (@jubos)
60
+ EOF
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,46 @@
1
+ module FakeS3
2
+ class FakeS3Exception < RuntimeError
3
+ attr_accessor :resource,:request_id
4
+
5
+ def self.metaclass; class << self; self; end; end
6
+
7
+ def self.traits(*arr)
8
+ return @traits if arr.empty?
9
+ attr_accessor *arr
10
+
11
+ arr.each do |a|
12
+ metaclass.instance_eval do
13
+ define_method( a ) do |val|
14
+ @traits ||= {}
15
+ @traits[a] = val
16
+ end
17
+ end
18
+ end
19
+
20
+ class_eval do
21
+ define_method( :initialize ) do
22
+ self.class.traits.each do |k,v|
23
+ instance_variable_set("@#{k}", v)
24
+ end
25
+ end
26
+ end
27
+ end
28
+
29
+ traits :message,:http_status
30
+
31
+ def code
32
+ self.class.to_s
33
+ end
34
+ end
35
+
36
+ class NoSuchBucket < FakeS3Exception
37
+ message "The bucket you tried to delete is not empty."
38
+ http_status "404"
39
+ end
40
+
41
+ class BucketNotEmpty < FakeS3Exception
42
+ message "The bucket you tried to delete is not empty."
43
+ http_status "409"
44
+ end
45
+
46
+ end
@@ -0,0 +1,201 @@
1
+ require 'fileutils'
2
+ require 'time'
3
+ require 'fakes3/s3_object'
4
+ require 'fakes3/bucket'
5
+ require 'fakes3/rate_limitable_file'
6
+ require 'digest/md5'
7
+ require 'yaml'
8
+
9
+ module FakeS3
10
+ class FileStore
11
+ SHUCK_METADATA_DIR = ".fakes3_metadataFFF"
12
+
13
+ def initialize(root)
14
+ @root = root
15
+ @buckets = []
16
+ @bucket_hash = {}
17
+ Dir[File.join(root,"*")].each do |bucket|
18
+ bucket_name = File.basename(bucket)
19
+ bucket_obj = Bucket.new(bucket_name,Time.now,[])
20
+ @buckets << bucket_obj
21
+ @bucket_hash[bucket_name] = bucket_obj
22
+ end
23
+ end
24
+
25
+ # Pass a rate limit in bytes per second
26
+ def rate_limit=(rate_limit)
27
+ if rate_limit.is_a?(String)
28
+ if rate_limit =~ /^(\d+)$/
29
+ RateLimitableFile.rate_limit = rate_limit.to_i
30
+ elsif rate_limit =~ /^(.*)K$/
31
+ RateLimitableFile.rate_limit = $1.to_f * 1000
32
+ elsif rate_limit =~ /^(.*)M$/
33
+ RateLimitableFile.rate_limit = $1.to_f * 1000000
34
+ elsif rate_limit =~ /^(.*)G$/
35
+ RateLimitableFile.rate_limit = $1.to_f * 1000000000
36
+ else
37
+ raise "Invalid Rate Limit Format: Valid values include (1000,10K,1.1M)"
38
+ end
39
+ else
40
+ RateLimitableFile.rate_limit = nil
41
+ end
42
+ end
43
+
44
+ def buckets
45
+ @buckets
46
+ end
47
+
48
+ def get_bucket_folder(bucket)
49
+ File.join(@root,bucket.name)
50
+ end
51
+
52
+ def get_bucket(bucket)
53
+ @bucket_hash[bucket]
54
+ end
55
+
56
+ def create_bucket(bucket)
57
+ FileUtils.mkdir_p(File.join(@root,bucket))
58
+ bucket_obj = Bucket.new(bucket,Time.now,[])
59
+ if !@bucket_hash[bucket]
60
+ @buckets << bucket_obj
61
+ @bucket_hash[bucket] = bucket_obj
62
+ end
63
+ bucket_obj
64
+ end
65
+
66
+ def delete_bucket(bucket_name)
67
+ bucket = get_bucket(bucket_name)
68
+ raise NoSuchBucket if !bucket
69
+ raise BucketNotEmpty if bucket.objects.count > 0
70
+ FileUtils.rm_r(get_bucket_folder(bucket))
71
+ @bucket_hash.delete(bucket_name)
72
+ end
73
+
74
+ def get_object(bucket,object_name, request)
75
+ begin
76
+ real_obj = S3Object.new
77
+ obj_root = File.join(@root,bucket,object_name,SHUCK_METADATA_DIR)
78
+ metadata = YAML.load(File.open(File.join(obj_root,"metadata"),'rb'))
79
+ real_obj.name = object_name
80
+ real_obj.md5 = metadata[:md5]
81
+ real_obj.content_type = metadata.fetch(:content_type) { "application/octet-stream" }
82
+ #real_obj.io = File.open(File.join(obj_root,"content"),'rb')
83
+ real_obj.io = RateLimitableFile.open(File.join(obj_root,"content"),'rb')
84
+ real_obj.size = metadata.fetch(:size) { 0 }
85
+ real_obj.creation_date = File.ctime(obj_root).iso8601()
86
+ real_obj.modified_date = metadata.fetch(:modified_date) { File.mtime(File.join(obj_root,"content")).iso8601() }
87
+ return real_obj
88
+ rescue
89
+ puts $!
90
+ $!.backtrace.each { |line| puts line }
91
+ return nil
92
+ end
93
+ end
94
+
95
+ def object_metadata(bucket,object)
96
+ end
97
+
98
+ def copy_object(src_bucket_name,src_name,dst_bucket_name,dst_name)
99
+ src_root = File.join(@root,src_bucket_name,src_name,SHUCK_METADATA_DIR)
100
+ src_metadata_filename = File.join(src_root,"metadata")
101
+ src_metadata = YAML.load(File.open(src_metadata_filename,'rb').read)
102
+ src_content_filename = File.join(src_root,"content")
103
+
104
+ dst_filename= File.join(@root,dst_bucket_name,dst_name)
105
+ FileUtils.mkdir_p(dst_filename)
106
+
107
+ metadata_dir = File.join(dst_filename,SHUCK_METADATA_DIR)
108
+ FileUtils.mkdir_p(metadata_dir)
109
+
110
+ content = File.join(metadata_dir,"content")
111
+ metadata = File.join(metadata_dir,"metadata")
112
+
113
+ File.open(content,'wb') do |f|
114
+ File.open(src_content_filename,'rb') do |input|
115
+ f << input.read
116
+ end
117
+ end
118
+
119
+ File.open(metadata,'w') do |f|
120
+ File.open(src_metadata_filename,'r') do |input|
121
+ f << input.read
122
+ end
123
+ end
124
+
125
+ src_bucket = self.get_bucket(src_bucket_name)
126
+ dst_bucket = self.get_bucket(dst_bucket_name)
127
+
128
+ obj = S3Object.new
129
+ obj.name = dst_name
130
+ obj.md5 = src_metadata[:md5]
131
+ obj.content_type = src_metadata[:content_type]
132
+ obj.size = src_metadata[:size]
133
+ obj.modified_date = src_metadata[:modified_date]
134
+
135
+ src_obj = src_bucket.find(src_name)
136
+ dst_bucket.add(obj)
137
+ src_bucket.remove(src_obj)
138
+ return obj
139
+ end
140
+
141
+ def store_object(bucket,object_name,request)
142
+ begin
143
+ filename = File.join(@root,bucket.name,object_name)
144
+ FileUtils.mkdir_p(filename)
145
+
146
+ metadata_dir = File.join(filename,SHUCK_METADATA_DIR)
147
+ FileUtils.mkdir_p(metadata_dir)
148
+
149
+ content = File.join(filename,SHUCK_METADATA_DIR,"content")
150
+ metadata = File.join(filename,SHUCK_METADATA_DIR,"metadata")
151
+
152
+ md5 = Digest::MD5.new
153
+ # TODO put a tmpfile here first and mv it over at the end
154
+
155
+ File.open(content,'wb') do |f|
156
+ request.body do |chunk|
157
+ f << chunk
158
+ md5 << chunk
159
+ end
160
+ end
161
+
162
+ metadata_struct = {}
163
+ metadata_struct[:md5] = md5.hexdigest
164
+ metadata_struct[:content_type] = request.header["content-type"].first
165
+ metadata_struct[:size] = File.size(content)
166
+ metadata_struct[:modified_date] = File.mtime(content).iso8601()
167
+
168
+ File.open(metadata,'w') do |f|
169
+ f << YAML::dump(metadata_struct)
170
+ end
171
+
172
+ obj = S3Object.new
173
+ obj.name = object_name
174
+ obj.md5 = metadata_struct[:md5]
175
+ obj.content_type = metadata_struct[:content_type]
176
+ obj.size = metadata_struct[:size]
177
+ obj.modified_date = metadata_struct[:modified_date]
178
+
179
+ bucket.add(obj)
180
+ return obj
181
+ rescue
182
+ puts $!
183
+ $!.backtrace.each { |line| puts line }
184
+ return nil
185
+ end
186
+ end
187
+
188
+ def delete_object(bucket,object_name,request)
189
+ begin
190
+ filename = File.join(@root,bucket.name,object_name)
191
+ FileUtils.rm_rf(filename)
192
+ object = bucket.find(object_name)
193
+ bucket.remove(object)
194
+ rescue
195
+ puts $!
196
+ $!.backtrace.each { |line| puts line }
197
+ return nil
198
+ end
199
+ end
200
+ end
201
+ end