s3fsr 1.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/bin/s3fsr +26 -0
- data/lib/aws-matt/s3/acl.rb +636 -0
- data/lib/aws-matt/s3/authentication.rb +221 -0
- data/lib/aws-matt/s3/base.rb +236 -0
- data/lib/aws-matt/s3/bittorrent.rb +58 -0
- data/lib/aws-matt/s3/bucket.rb +348 -0
- data/lib/aws-matt/s3/connection.rb +323 -0
- data/lib/aws-matt/s3/error.rb +69 -0
- data/lib/aws-matt/s3/exceptions.rb +133 -0
- data/lib/aws-matt/s3/extensions.rb +324 -0
- data/lib/aws-matt/s3/logging.rb +311 -0
- data/lib/aws-matt/s3/object.rb +619 -0
- data/lib/aws-matt/s3/owner.rb +44 -0
- data/lib/aws-matt/s3/parsing.rb +99 -0
- data/lib/aws-matt/s3/response.rb +180 -0
- data/lib/aws-matt/s3/service.rb +51 -0
- data/lib/aws-matt/s3/version.rb +12 -0
- data/lib/aws-matt/s3.rb +61 -0
- data/lib/s3fsr.rb +296 -0
- metadata +72 -0
@@ -0,0 +1,99 @@
|
|
1
|
+
#:stopdoc:
|
2
|
+
module AWS
|
3
|
+
module S3
|
4
|
+
module Parsing
|
5
|
+
class << self
|
6
|
+
def parser=(parsing_library)
|
7
|
+
XmlParser.parsing_library = parsing_library
|
8
|
+
end
|
9
|
+
|
10
|
+
def parser
|
11
|
+
XmlParser.parsing_library
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
module Typecasting
|
16
|
+
def typecast(object)
|
17
|
+
case object
|
18
|
+
when Hash
|
19
|
+
typecast_hash(object)
|
20
|
+
when Array
|
21
|
+
object.map {|element| typecast(element)}
|
22
|
+
when String
|
23
|
+
CoercibleString.coerce(object)
|
24
|
+
else
|
25
|
+
object
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def typecast_hash(hash)
|
30
|
+
if content = hash['__content__']
|
31
|
+
typecast(content)
|
32
|
+
else
|
33
|
+
keys = hash.keys.map {|key| key.underscore}
|
34
|
+
values = hash.values.map {|value| typecast(value)}
|
35
|
+
keys.inject({}) do |new_hash, key|
|
36
|
+
new_hash[key] = values.slice!(0)
|
37
|
+
new_hash
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
class XmlParser < Hash
|
44
|
+
include Typecasting
|
45
|
+
|
46
|
+
class << self
|
47
|
+
attr_accessor :parsing_library
|
48
|
+
end
|
49
|
+
|
50
|
+
attr_reader :body, :xml_in, :root
|
51
|
+
|
52
|
+
def initialize(body)
|
53
|
+
@body = body
|
54
|
+
unless body.strip.empty?
|
55
|
+
parse
|
56
|
+
set_root
|
57
|
+
typecast_xml_in
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
private
|
62
|
+
|
63
|
+
def parse
|
64
|
+
@xml_in = self.class.parsing_library.xml_in(body, parsing_options)
|
65
|
+
end
|
66
|
+
|
67
|
+
def parsing_options
|
68
|
+
{
|
69
|
+
# Includes the enclosing tag as the top level key
|
70
|
+
'keeproot' => true,
|
71
|
+
# Makes tag value available via the '__content__' key
|
72
|
+
'contentkey' => '__content__',
|
73
|
+
# Always parse tags into a hash, even when there are no attributes
|
74
|
+
# (unless there is also no value, in which case it is nil)
|
75
|
+
'forcecontent' => true,
|
76
|
+
# If a tag is empty, makes its content nil
|
77
|
+
'suppressempty' => nil,
|
78
|
+
# Force nested elements to be put into an array, even if there is only one of them
|
79
|
+
'forcearray' => ['Contents', 'Bucket', 'Grant', 'CommonPrefixes']
|
80
|
+
}
|
81
|
+
end
|
82
|
+
|
83
|
+
def set_root
|
84
|
+
@root = @xml_in.keys.first.underscore
|
85
|
+
end
|
86
|
+
|
87
|
+
def typecast_xml_in
|
88
|
+
typecast_xml = {}
|
89
|
+
@xml_in.dup.each do |key, value| # Some typecasting is destructive so we dup
|
90
|
+
typecast_xml[key.underscore] = typecast(value)
|
91
|
+
end
|
92
|
+
# An empty body will try to update with a string so only update if the result is a hash
|
93
|
+
update(typecast_xml[root]) if typecast_xml[root].is_a?(Hash)
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
99
|
+
#:startdoc:
|
@@ -0,0 +1,180 @@
|
|
1
|
+
#:stopdoc:
|
2
|
+
module AWS
|
3
|
+
module S3
|
4
|
+
class Base
|
5
|
+
class Response < String
|
6
|
+
attr_reader :response, :body, :parsed
|
7
|
+
def initialize(response)
|
8
|
+
@response = response
|
9
|
+
@body = response.body.to_s
|
10
|
+
super(body)
|
11
|
+
end
|
12
|
+
|
13
|
+
def headers
|
14
|
+
headers = {}
|
15
|
+
response.each do |header, value|
|
16
|
+
headers[header] = value
|
17
|
+
end
|
18
|
+
headers
|
19
|
+
end
|
20
|
+
memoized :headers
|
21
|
+
|
22
|
+
def [](header)
|
23
|
+
headers[header]
|
24
|
+
end
|
25
|
+
|
26
|
+
def each(&block)
|
27
|
+
headers.each(&block)
|
28
|
+
end
|
29
|
+
|
30
|
+
def code
|
31
|
+
response.code.to_i
|
32
|
+
end
|
33
|
+
|
34
|
+
{:success => 200..299, :redirect => 300..399,
|
35
|
+
:client_error => 400..499, :server_error => 500..599}.each do |result, code_range|
|
36
|
+
class_eval(<<-EVAL, __FILE__, __LINE__)
|
37
|
+
def #{result}?
|
38
|
+
return false unless response
|
39
|
+
(#{code_range}).include? code
|
40
|
+
end
|
41
|
+
EVAL
|
42
|
+
end
|
43
|
+
|
44
|
+
def error?
|
45
|
+
!success? && response['content-type'] == 'application/xml' && parsed.root == 'error'
|
46
|
+
end
|
47
|
+
|
48
|
+
def error
|
49
|
+
Error.new(parsed, self)
|
50
|
+
end
|
51
|
+
memoized :error
|
52
|
+
|
53
|
+
def parsed()
|
54
|
+
# XmlSimple is picky about what kind of object it parses, so we pass in body rather than self
|
55
|
+
Parsing::XmlParser.new(body)
|
56
|
+
end
|
57
|
+
memoized :parsed
|
58
|
+
|
59
|
+
def inspect
|
60
|
+
"#<%s:0x%s %s %s>" % [self.class, object_id, response.code, response.message]
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
class Bucket
|
66
|
+
class Response < Base::Response
|
67
|
+
def bucket
|
68
|
+
parsed
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
class S3Object
|
74
|
+
class Response < Base::Response
|
75
|
+
def etag
|
76
|
+
headers['etag'][1...-1]
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
class Service
|
82
|
+
class Response < Base::Response
|
83
|
+
def empty?
|
84
|
+
parsed['buckets'].nil?
|
85
|
+
end
|
86
|
+
|
87
|
+
def buckets
|
88
|
+
parsed['buckets']['bucket'] || []
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
module ACL
|
94
|
+
class Policy
|
95
|
+
class Response < Base::Response
|
96
|
+
alias_method :policy, :parsed
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
# Requests whose response code is between 300 and 599 and contain an <Error></Error> in their body
|
102
|
+
# are wrapped in an Error::Response. This Error::Response contains an Error object which raises an exception
|
103
|
+
# that corresponds to the error in the response body. The exception object contains the ErrorResponse, so
|
104
|
+
# in all cases where a request happens, you can rescue ResponseError and have access to the ErrorResponse and
|
105
|
+
# its Error object which contains information about the ResponseError.
|
106
|
+
#
|
107
|
+
# begin
|
108
|
+
# Bucket.create(..)
|
109
|
+
# rescue ResponseError => exception
|
110
|
+
# exception.response
|
111
|
+
# # => <Error::Response>
|
112
|
+
# exception.response.error
|
113
|
+
# # => <Error>
|
114
|
+
# end
|
115
|
+
class Error
|
116
|
+
class Response < Base::Response
|
117
|
+
def error?
|
118
|
+
true
|
119
|
+
end
|
120
|
+
|
121
|
+
def inspect
|
122
|
+
"#<%s:0x%s %s %s: '%s'>" % [self.class.name, object_id, response.code, error.code, error.message]
|
123
|
+
end
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
# Guess response class name from current class name. If the guessed response class doesn't exist
|
128
|
+
# do the same thing to the current class's parent class, up the inheritance heirarchy until either
|
129
|
+
# a response class is found or until we get to the top of the heirarchy in which case we just use
|
130
|
+
# the the Base response class.
|
131
|
+
#
|
132
|
+
# Important: This implemantation assumes that the Base class has a corresponding Base::Response.
|
133
|
+
class FindResponseClass #:nodoc:
|
134
|
+
class << self
|
135
|
+
def for(start)
|
136
|
+
new(start).find
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def initialize(start)
|
141
|
+
@container = AWS::S3
|
142
|
+
@current_class = start
|
143
|
+
end
|
144
|
+
|
145
|
+
def find
|
146
|
+
self.current_class = current_class.superclass until response_class_found?
|
147
|
+
target.const_get(class_to_find)
|
148
|
+
end
|
149
|
+
|
150
|
+
private
|
151
|
+
attr_reader :container
|
152
|
+
attr_accessor :current_class
|
153
|
+
|
154
|
+
def target
|
155
|
+
container.const_get(current_name)
|
156
|
+
end
|
157
|
+
|
158
|
+
def target?
|
159
|
+
container.const_defined?(current_name)
|
160
|
+
end
|
161
|
+
|
162
|
+
def response_class_found?
|
163
|
+
target? && target.const_defined?(class_to_find)
|
164
|
+
end
|
165
|
+
|
166
|
+
def class_to_find
|
167
|
+
:Response
|
168
|
+
end
|
169
|
+
|
170
|
+
def current_name
|
171
|
+
truncate(current_class)
|
172
|
+
end
|
173
|
+
|
174
|
+
def truncate(klass)
|
175
|
+
klass.name[/[^:]+$/]
|
176
|
+
end
|
177
|
+
end
|
178
|
+
end
|
179
|
+
end
|
180
|
+
#:startdoc:
|
@@ -0,0 +1,51 @@
|
|
1
|
+
module AWS
|
2
|
+
module S3
|
3
|
+
# The service lets you find out general information about your account, like what buckets you have.
|
4
|
+
#
|
5
|
+
# Service.buckets
|
6
|
+
# # => []
|
7
|
+
class Service < Base
|
8
|
+
@@response = nil #:nodoc:
|
9
|
+
|
10
|
+
class << self
|
11
|
+
# List all your buckets.
|
12
|
+
#
|
13
|
+
# Service.buckets
|
14
|
+
# # => []
|
15
|
+
#
|
16
|
+
# For performance reasons, the bucket list will be cached. If you want avoid all caching, pass the <tt>:reload</tt>
|
17
|
+
# as an argument:
|
18
|
+
#
|
19
|
+
# Service.buckets(:reload)
|
20
|
+
def buckets
|
21
|
+
response = get('/')
|
22
|
+
if response.empty?
|
23
|
+
[]
|
24
|
+
else
|
25
|
+
response.buckets.map {|attributes| Bucket.new(attributes)}
|
26
|
+
end
|
27
|
+
end
|
28
|
+
memoized :buckets
|
29
|
+
|
30
|
+
# Sometimes methods that make requests to the S3 servers return some object, like a Bucket or an S3Object.
|
31
|
+
# Othertimes they return just <tt>true</tt>. Other times they raise an exception that you may want to rescue. Despite all these
|
32
|
+
# possible outcomes, every method that makes a request stores its response object for you in Service.response. You can always
|
33
|
+
# get to the last request's response via Service.response.
|
34
|
+
#
|
35
|
+
# objects = Bucket.objects('jukebox')
|
36
|
+
# Service.response.success?
|
37
|
+
# # => true
|
38
|
+
#
|
39
|
+
# This is also useful when an error exception is raised in the console which you weren't expecting. You can
|
40
|
+
# root around in the response to get more details of what might have gone wrong.
|
41
|
+
def response
|
42
|
+
@@response
|
43
|
+
end
|
44
|
+
|
45
|
+
def response=(response) #:nodoc:
|
46
|
+
@@response = response
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
data/lib/aws-matt/s3.rb
ADDED
@@ -0,0 +1,61 @@
|
|
1
|
+
require 'base64'
|
2
|
+
require 'cgi'
|
3
|
+
require 'uri'
|
4
|
+
require 'openssl'
|
5
|
+
require 'digest/sha1'
|
6
|
+
require 'net/https'
|
7
|
+
require 'time'
|
8
|
+
require 'date'
|
9
|
+
require 'open-uri'
|
10
|
+
|
11
|
+
$:.unshift(File.dirname(__FILE__))
|
12
|
+
require 's3/extensions'
|
13
|
+
require_library_or_gem 'builder' unless defined? Builder
|
14
|
+
require_library_or_gem 'mime/types' unless defined? MIME::Types
|
15
|
+
|
16
|
+
require 's3/base'
|
17
|
+
require 's3/version'
|
18
|
+
require 's3/parsing'
|
19
|
+
require 's3/acl'
|
20
|
+
require 's3/logging'
|
21
|
+
require 's3/bittorrent'
|
22
|
+
require 's3/service'
|
23
|
+
require 's3/owner'
|
24
|
+
require 's3/bucket'
|
25
|
+
require 's3/object'
|
26
|
+
require 's3/error'
|
27
|
+
require 's3/exceptions'
|
28
|
+
require 's3/connection'
|
29
|
+
require 's3/authentication'
|
30
|
+
require 's3/response'
|
31
|
+
|
32
|
+
AWS::S3::Base.class_eval do
|
33
|
+
include AWS::S3::Connection::Management
|
34
|
+
end
|
35
|
+
|
36
|
+
AWS::S3::Bucket.class_eval do
|
37
|
+
include AWS::S3::Logging::Management
|
38
|
+
include AWS::S3::ACL::Bucket
|
39
|
+
end
|
40
|
+
|
41
|
+
AWS::S3::S3Object.class_eval do
|
42
|
+
include AWS::S3::ACL::S3Object
|
43
|
+
include AWS::S3::BitTorrent
|
44
|
+
end
|
45
|
+
|
46
|
+
require_library_or_gem 'xmlsimple' unless defined? XmlSimple
|
47
|
+
# If libxml is installed, we use the FasterXmlSimple library, that provides most of the functionality of XmlSimple
|
48
|
+
# except it uses the xml/libxml library for xml parsing (rather than REXML). If libxml isn't installed, we just fall back on
|
49
|
+
# XmlSimple.
|
50
|
+
AWS::S3::Parsing.parser =
|
51
|
+
begin
|
52
|
+
require_library_or_gem 'xml/libxml'
|
53
|
+
# Older version of libxml aren't stable (bus error when requesting attributes that don't exist) so we
|
54
|
+
# have to use a version greater than '0.3.8.2'.
|
55
|
+
raise LoadError unless XML::Parser::VERSION > '0.3.8.2'
|
56
|
+
$:.push(File.join(File.dirname(__FILE__), '..', '..', 'support', 'faster-xml-simple', 'lib'))
|
57
|
+
require_library_or_gem 'faster_xml_simple'
|
58
|
+
FasterXmlSimple
|
59
|
+
rescue LoadError
|
60
|
+
XmlSimple
|
61
|
+
end
|
data/lib/s3fsr.rb
ADDED
@@ -0,0 +1,296 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require 'fusefs'
|
4
|
+
require File.dirname(__FILE__) + '/aws-matt/s3'
|
5
|
+
|
6
|
+
S3ORGANIZER_DIR_SUFFIX = '_$folder$'
|
7
|
+
S3SYNC_DIR_CONTENTS = '{E40327BF-517A-46e8-A6C3-AF51BC263F59}'
|
8
|
+
S3SYNC_DIR_ETAG = 'd66759af42f282e1ba19144df2d405d0'
|
9
|
+
S3SYNC_DIR_LENGTH = 38
|
10
|
+
AWS::S3::Base.establish_connection!(:access_key_id => ENV['AWS_ACCESS_KEY_ID'], :secret_access_key => ENV['AWS_SECRET_ACCESS_KEY'])
|
11
|
+
|
12
|
+
class SFile
|
13
|
+
def initialize(parent, s3obj)
|
14
|
+
@parent = parent
|
15
|
+
@s3obj = s3obj
|
16
|
+
end
|
17
|
+
def name
|
18
|
+
@s3obj.key.split('/')[-1]
|
19
|
+
end
|
20
|
+
def is_directory?
|
21
|
+
false
|
22
|
+
end
|
23
|
+
def is_file?
|
24
|
+
true
|
25
|
+
end
|
26
|
+
def value
|
27
|
+
@s3obj.value(:reload)
|
28
|
+
end
|
29
|
+
def write data
|
30
|
+
AWS::S3::S3Object.store @s3obj.key, data, @s3obj.bucket.name, @s3obj.about.to_headers
|
31
|
+
end
|
32
|
+
def delete
|
33
|
+
AWS::S3::S3Object.delete @s3obj.key, @s3obj.bucket.name
|
34
|
+
@parent.content_deleted name
|
35
|
+
end
|
36
|
+
def size
|
37
|
+
@s3obj.content_length
|
38
|
+
end
|
39
|
+
def touch
|
40
|
+
end
|
41
|
+
end
|
42
|
+
|
43
|
+
class SBaseDir
|
44
|
+
def is_directory?
|
45
|
+
true
|
46
|
+
end
|
47
|
+
def is_file?
|
48
|
+
false
|
49
|
+
end
|
50
|
+
def can_write_files?
|
51
|
+
true
|
52
|
+
end
|
53
|
+
def content_deleted name
|
54
|
+
get_contents.delete_if { |i| i.name == name }
|
55
|
+
end
|
56
|
+
def create_file child_key, content
|
57
|
+
AWS::S3::S3Object.store(child_key, content, bucket)
|
58
|
+
get_contents << SFile.new(self, AWS::S3::S3Object.find(child_key, bucket))
|
59
|
+
end
|
60
|
+
def create_dir child_key
|
61
|
+
AWS::S3::S3Object.store(child_key, S3SYNC_DIR_CONTENTS, bucket)
|
62
|
+
get_contents << SFakeDir.new(self, child_key)
|
63
|
+
end
|
64
|
+
def delete
|
65
|
+
AWS::S3::S3Object.delete @key, bucket
|
66
|
+
@parent.content_deleted name
|
67
|
+
end
|
68
|
+
def contents
|
69
|
+
get_contents.collect { |i| i.name }
|
70
|
+
end
|
71
|
+
def get(name)
|
72
|
+
get_contents.find { |i| i.name == name }
|
73
|
+
end
|
74
|
+
def size
|
75
|
+
0
|
76
|
+
end
|
77
|
+
def touch
|
78
|
+
@data = nil
|
79
|
+
end
|
80
|
+
private
|
81
|
+
def get_contents
|
82
|
+
return @data if @data != nil
|
83
|
+
puts "Loading '#{name}' from #{bucket}..."
|
84
|
+
@data = []
|
85
|
+
marker = ''
|
86
|
+
loop do
|
87
|
+
s3_bucket = AWS::S3::Bucket.find(bucket, :prefix => prefix, :delimiter => '/', :marker => marker, :max_keys => 1000)
|
88
|
+
s3_bucket.object_cache.each do |s3_obj|
|
89
|
+
# Technically we should use S3SYNC_DIR_LENGTH but aws-s3 decides it
|
90
|
+
# needs to issue an HEAD request for every dir for that.
|
91
|
+
if s3_obj.etag == S3SYNC_DIR_ETAG or s3_obj.key.end_with? S3ORGANIZER_DIR_SUFFIX
|
92
|
+
@data << SFakeDir.new(self, s3_obj.key)
|
93
|
+
else
|
94
|
+
@data << SFile.new(self, s3_obj)
|
95
|
+
end
|
96
|
+
end
|
97
|
+
s3_bucket.common_prefix_cache.reject { |p| p == '/' }.each do |prefix|
|
98
|
+
hidden = SFakeDir.new(self, prefix[0..-2])
|
99
|
+
@data << hidden unless @data.find { |i| i.name == hidden.name }
|
100
|
+
end
|
101
|
+
break unless s3_bucket.object_cache.length > 0 && s3_bucket.object_cache.length % 1000 == 0
|
102
|
+
marker = s3_bucket.object_cache.last.key
|
103
|
+
end
|
104
|
+
puts "done"
|
105
|
+
@data
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
class SFakeDir < SBaseDir
|
110
|
+
def initialize(parent, key)
|
111
|
+
@parent = parent
|
112
|
+
@key = key
|
113
|
+
@data = nil
|
114
|
+
end
|
115
|
+
def name
|
116
|
+
strip_dir_suffix @key.split('/')[-1]
|
117
|
+
end
|
118
|
+
def bucket
|
119
|
+
@parent.bucket
|
120
|
+
end
|
121
|
+
private
|
122
|
+
def prefix
|
123
|
+
strip_dir_suffix(@key) + '/'
|
124
|
+
end
|
125
|
+
def strip_dir_suffix str
|
126
|
+
str.end_with?(S3ORGANIZER_DIR_SUFFIX) ? str[0..-10] : str
|
127
|
+
end
|
128
|
+
end
|
129
|
+
|
130
|
+
class SBucketDir < SBaseDir
|
131
|
+
def initialize(parent, bucket_name)
|
132
|
+
@parent = parent # either SBucketsDir or nil
|
133
|
+
@bucket_name = bucket_name
|
134
|
+
end
|
135
|
+
def delete
|
136
|
+
raise "cannot delete bucket dir #{name}" if @parent == nil
|
137
|
+
AWS::S3::Bucket.delete name
|
138
|
+
@parent.content_deleted name
|
139
|
+
end
|
140
|
+
def bucket
|
141
|
+
@bucket_name
|
142
|
+
end
|
143
|
+
def name
|
144
|
+
@bucket_name
|
145
|
+
end
|
146
|
+
def path_to_key child_key
|
147
|
+
child_key
|
148
|
+
end
|
149
|
+
private
|
150
|
+
def prefix
|
151
|
+
""
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
class SBucketsDir < SBaseDir
|
156
|
+
def initialize
|
157
|
+
@data = nil
|
158
|
+
end
|
159
|
+
def can_write_files?
|
160
|
+
false
|
161
|
+
end
|
162
|
+
def create_file child_key, content
|
163
|
+
raise 'cannot create files outside of a bucket'
|
164
|
+
end
|
165
|
+
def create_dir child_key
|
166
|
+
AWS::S3::Bucket.create(child_key)
|
167
|
+
get_contents << SBucketDir.new(self, child_key)
|
168
|
+
end
|
169
|
+
def delete
|
170
|
+
raise 'cannot delete the buckets dir'
|
171
|
+
end
|
172
|
+
def name
|
173
|
+
return 'buckets'
|
174
|
+
end
|
175
|
+
def path_to_key child_key
|
176
|
+
# if i == -1, this is for creating a bucket
|
177
|
+
i = child_key.index('/')
|
178
|
+
i != nil ? child_key[i+1..-1] : child_key
|
179
|
+
end
|
180
|
+
private
|
181
|
+
def get_contents
|
182
|
+
return @data if @data != nil
|
183
|
+
puts "Loading buckets..."
|
184
|
+
@data = []
|
185
|
+
AWS::S3::Bucket.list(true).each do |s3_bucket|
|
186
|
+
@data << SBucketDir.new(self, s3_bucket.name)
|
187
|
+
end
|
188
|
+
puts "done"
|
189
|
+
@data
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
class S3fsr
|
194
|
+
def initialize(root)
|
195
|
+
@root = root
|
196
|
+
end
|
197
|
+
def contents(path)
|
198
|
+
o = get_object(path)
|
199
|
+
o == nil ? "" : o.contents
|
200
|
+
end
|
201
|
+
def directory?(path)
|
202
|
+
o = get_object(path) ; o == nil ? false : o.is_directory?
|
203
|
+
end
|
204
|
+
def file?(path)
|
205
|
+
o = get_object(path) ; o == nil ? false : o.is_file?
|
206
|
+
end
|
207
|
+
def executable?(path)
|
208
|
+
false
|
209
|
+
end
|
210
|
+
def size(path)
|
211
|
+
get_object(path).size
|
212
|
+
end
|
213
|
+
def read_file(path)
|
214
|
+
get_object(path).value
|
215
|
+
end
|
216
|
+
def can_write?(path)
|
217
|
+
o = get_object(path)
|
218
|
+
if o != nil
|
219
|
+
o.is_file?
|
220
|
+
else
|
221
|
+
d = get_parent_object(path) ; d == nil ? false : d.can_write_files?
|
222
|
+
end
|
223
|
+
end
|
224
|
+
def write_to(path, data)
|
225
|
+
o = get_object(path)
|
226
|
+
if o != nil
|
227
|
+
o.write data
|
228
|
+
else
|
229
|
+
d = get_parent_object(path)
|
230
|
+
if d != nil
|
231
|
+
child_key = @root.path_to_key path[1..-1]
|
232
|
+
d.create_file(child_key, data)
|
233
|
+
end
|
234
|
+
end
|
235
|
+
end
|
236
|
+
def can_delete?(path)
|
237
|
+
o = get_object(path) ; o == nil ? false : o.is_file?
|
238
|
+
end
|
239
|
+
def delete(path)
|
240
|
+
get_object(path).delete
|
241
|
+
end
|
242
|
+
def can_mkdir?(path)
|
243
|
+
return false if get_object(path) != nil
|
244
|
+
get_parent_object(path).is_directory?
|
245
|
+
end
|
246
|
+
def mkdir(path)
|
247
|
+
child_key = @root.path_to_key path[1..-1]
|
248
|
+
get_parent_object(path).create_dir(child_key)
|
249
|
+
end
|
250
|
+
def can_rmdir?(path)
|
251
|
+
return false if path == '/'
|
252
|
+
return false unless get_object(path).is_directory?
|
253
|
+
get_object(path).contents.length == 0
|
254
|
+
end
|
255
|
+
def rmdir(path)
|
256
|
+
get_object(path).delete
|
257
|
+
end
|
258
|
+
def touch(path)
|
259
|
+
get_object(path).touch
|
260
|
+
end
|
261
|
+
private
|
262
|
+
def get_parent_object(path)
|
263
|
+
# path[1..-1] because '/'.split('/') -> ['']
|
264
|
+
get_object('/' + (path[1..-1].split('/')[0..-2].join('/')))
|
265
|
+
end
|
266
|
+
def get_object(path)
|
267
|
+
curr = @root
|
268
|
+
# path[1..-1] because '/'.split('/') -> ['']
|
269
|
+
path[1..-1].split('/').each do |part|
|
270
|
+
curr = curr.get(part)
|
271
|
+
end
|
272
|
+
curr
|
273
|
+
end
|
274
|
+
end
|
275
|
+
|
276
|
+
class MethodLogger
|
277
|
+
instance_methods.each { |m| undef_method m unless m =~ /^__/ }
|
278
|
+
def initialize(obj)
|
279
|
+
@obj = obj
|
280
|
+
end
|
281
|
+
def method_missing(sym, *args, &block)
|
282
|
+
begin
|
283
|
+
puts "#{sym}(#{args})" unless sym == :respond_to? or sym == :write_to
|
284
|
+
puts "#{sym}(#{args[0].length})" if sym == :write_to
|
285
|
+
result = @obj.__send__(sym, *args, &block)
|
286
|
+
puts " #{result}" unless sym == :respond_to? or sym == :read_file
|
287
|
+
puts " #{result.length}" if sym == :read_file
|
288
|
+
result
|
289
|
+
rescue => e
|
290
|
+
puts " #{e.inspect}"
|
291
|
+
puts " #{e.backtrace}"
|
292
|
+
raise $?
|
293
|
+
end
|
294
|
+
end
|
295
|
+
end
|
296
|
+
|