s33r 0.1 → 0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,19 @@ It incorporates code from the mimetypes library (http://raa.ruby-lang.org/projec
4
4
  which is under a compatible license (same as Ruby).
5
5
 
6
6
  It is also heavily based on the sample Ruby code provided by Amazon
7
- (http://developer.amazonwebservices.com/connect/entry.jspa?externalID=135&categoryID=47).
7
+ (http://developer.amazonwebservices.com/connect/entry.jspa?externalID=135&categoryID=47). Here's
8
+ the Amazon licence:
8
9
 
9
- I picked up a couple of ideas from http://rubyforge.org/projects/rsh3ll/ too.
10
+ ***
11
+ This software code is made available "AS IS" without warranties of any
12
+ kind. You may copy, display, modify and redistribute the software
13
+ code either by itself or as incorporated into your code; provided that
14
+ you do not remove any proprietary notices. Your use of this software
15
+ code is at your own risk and you waive any claim against Amazon
16
+ Digital Services, Inc. or its affiliates with respect to your use of
17
+ this software code. (c) 2006 Amazon Digital Services, Inc. or its
18
+ affiliates.
19
+ ***
20
+
21
+ I picked up a couple of ideas from http://rubyforge.org/projects/rsh3ll/ too, e.g.
22
+ swopping out the HMAC library for OpenSSL
data/README.txt CHANGED
@@ -13,4 +13,7 @@ line client which you can use to post a file to S3 and email a link to the file
13
13
  big files to people without clogging their email inbox with enormous files. It is also intended as a demonstration
14
14
  of how to use the library. Full instructions are included at the top of the file.
15
15
 
16
- By the way, I've tested this on Linux, but not on Windows or Mac.
16
+ By the way, I've tested this on Linux, but not on Windows or Mac.
17
+
18
+ Thanks for bug reports to:
19
+ Keaka (http://keakaj.com/products.htm)
@@ -1,5 +1,5 @@
1
1
  access_key: 'youraccesskey'
2
2
  secret_key: 'yoursecretkey'
3
3
  bucket: 'name-of-bucket'
4
- from_email: 'elliot@example.com'
5
- to_email: 'elliot@example.com'
4
+ from_email: 'you@example.com'
5
+ to_email: 'you@example.com'
@@ -19,14 +19,13 @@ require 'yaml'
19
19
  require 'rubygems'
20
20
  require_gem 's33r'
21
21
  require 'net/smtp'
22
+ require 'time'
23
+ require 'parsedate'
22
24
 
23
25
  filename = ARGV[0]
24
26
  to_email = ARGV[1]
27
+ # set to the path to your config. file
25
28
  config_file = '/home/ell/.s33r'
26
- if '/path/to/your/config/file' == config_file
27
- puts 'Please set the config_file variable to the path to your config. file'
28
- exit
29
- end
30
29
 
31
30
  # load config. file
32
31
  options = YAML::load_file(config_file)
@@ -36,12 +35,27 @@ from_email = options['from_email']
36
35
  to_email ||= options['to_email']
37
36
  bucket = options['bucket']
38
37
 
38
+ # expires can be a date/time string or 'forever';
39
+ # if not set, defaults to 15 minutes
40
+ expires = options['default_expires']
41
+ base_expires = Time.now.to_i
42
+ if 'forever' == expires
43
+ # 20 years (same as forever in computer terms)
44
+ expires = base_expires + (60 * 60 * 24 * 365.25 * 20).to_i
45
+ elsif expires.is_a?(String)
46
+ datetime_parts = ParseDate.parsedate(expires)
47
+ expires = Time.gm(*datetime_parts).to_i
48
+ elsif !expires
49
+ # default to 15 minutes from now if not set
50
+ expires = base_expires + (15*60)
51
+ end
52
+
39
53
  # check for bucket
40
54
  if !bucket
41
55
  require 'readline'
42
56
  bucket = Readline.readline('No bucket found; please enter name for new bucket: ', true)
43
57
  client = S3::Client.new(access_key, secret_key)
44
- response = client.create_bucket(bucket, client.canned_acl_header('public-read'))
58
+ response = client.create_bucket(bucket)
45
59
  if response.ok?
46
60
  puts 'Created new bucket'
47
61
  else
@@ -70,27 +84,32 @@ puts str
70
84
  start_time_secs = Time.now.to_i
71
85
 
72
86
  # a client pointing at the specified bucket
73
- S3::NamedBucket.new(access_key, secret_key, bucket, :public_contents => true) do |client|
74
- response = client.put_file(filename)
75
- end
76
-
77
- time_taken_secs = Time.now.to_i - start_time_secs
78
- puts "Aah, it appears to have taken %d seconds" % time_taken_secs
79
-
80
- # post-put report
81
- if response.ok?
82
- puts "File #{filename} transferred OK"
83
- url = File.join("http://", S3::HOST, client.bucket_name, '/') + filename
84
- puts "Available at URL:"
85
- puts url
86
-
87
- if to_email
88
- message = "From:#{from_email}\r\nTo:#{to_email}\r\nSubject:You were sent a file\r\n\r\nFetch it from\n#{url}"
89
-
90
- Net::SMTP.start('localhost') do |smtp|
91
- smtp.send_message message, from_email, to_email
92
- end
87
+ S3::NamedBucket.new(access_key, secret_key, bucket) do |c|
88
+ response = c.put_file(filename, filename)
89
+ url = c.s3_authenticated_url(filename, expires)
90
+
91
+ time_taken_secs = Time.now.to_i - start_time_secs
92
+ puts "Aah, it appears to have taken %d seconds" % time_taken_secs
93
+
94
+ # post-put report
95
+ if response.ok?
96
+ puts "File #{filename} transferred OK"
97
+
98
+ pretty_expires = Time.at(expires).httpdate
99
+
100
+ puts "Available at URL:"
101
+ puts url
102
+ puts "This link will expire at %s" % pretty_expires
103
+
104
+ if to_email
105
+ message = "From:#{from_email}\r\nTo:#{to_email}\r\nSubject:You were sent a file\r\n\r\nFetch it from\n#{url}\n\n" +
106
+ "This link will expire at %s" % pretty_expires
107
+
108
+ Net::SMTP.start('localhost') do |smtp|
109
+ smtp.send_message message, from_email, to_email
110
+ end
111
+ end
112
+ else
113
+ puts 'File transfer failed'
93
114
  end
94
- else
95
- puts 'File transfer failed'
96
115
  end
@@ -1,7 +1,2 @@
1
- require 's33r/core'
2
- require 's33r/client'
3
- require 's33r/list_bucket_result'
4
- require 's33r/net_http_overrides'
5
- require 's33r/named_bucket'
6
- require 's33r/s3_exception'
7
- require 's33r/external/mimetypes'
1
+ base = File.join(File.dirname(__FILE__), 's33r')
2
+ Dir[File.join(base, "*.rb")].each { |f| require f }
@@ -0,0 +1,141 @@
1
+ # use prefix to limit keys to some subset of all available keys;
2
+ # use delimiter to group keys
3
+
4
+ require 'date'
5
+ require 'xml/libxml'
6
+
7
+ module S3
8
+ class BucketListing
9
+ attr_reader :listing_xml
10
+
11
+ # properties of the bucket are publically read-only;
12
+ # can only be reset by resetting the listing_xml via listing_xml=
13
+ # (which parses the bucket listing XML)
14
+ attr_reader :name, :delimiter, :prefix, :marker, :max_keys, :is_truncated, :contents
15
+
16
+ def initialize(bucket_listing_xml)
17
+ @contents = {}
18
+ @common_prefixes = {}
19
+ set_listing_xml(bucket_listing_xml)
20
+ end
21
+
22
+ def set_listing_xml(bucket_listing_xml)
23
+ # remove the namespace declaration: libxml doesn't like it
24
+ bucket_listing_xml.gsub!(/ xmlns="http:\/\/s3.amazonaws.com\/doc\/2006-03-01\/"/, '')
25
+ @listing_xml = bucket_listing_xml
26
+ parse_listing(bucket_listing_xml)
27
+ rescue
28
+ message = "Cannot create bucket listing from supplied XML"
29
+ message += " (was nil)" if bucket_listing_xml.nil?
30
+ raise S3Exception::InvalidBucketListing, message
31
+ end
32
+
33
+ def parse_listing(bucket_listing_xml)
34
+ doc = XML.get_xml_doc(bucket_listing_xml)
35
+
36
+ prop_setter = lambda do |prop, path|
37
+ node = doc.find("//ListBucketResult/#{path}").to_a.first
38
+ self.send("#{prop}=", node.content) if node
39
+ end
40
+
41
+ # metadata
42
+ prop_setter.call(:name, 'Name')
43
+ prop_setter.call(:delimiter, 'Delimiter')
44
+ prop_setter.call(:prefix, 'Prefix')
45
+ prop_setter.call(:marker, 'Marker')
46
+ prop_setter.call(:max_keys, 'MaxKeys')
47
+ prop_setter.call(:is_truncated, 'IsTruncated')
48
+
49
+ # contents
50
+ doc.find('//Contents').to_a.each do |node|
51
+ obj = S3Object.new
52
+ obj.set_from_node(node)
53
+ @contents[obj.key] = obj
54
+ end
55
+ end
56
+
57
+ # return an object in this bucket
58
+ def [](key)
59
+ @contents[key]
60
+ end
61
+
62
+ # setters
63
+ private
64
+ def name=(val)
65
+ @name = string_prop_normalise(val)
66
+ end
67
+
68
+ def prefix=(val)
69
+ @prefix = string_prop_normalise(val)
70
+ end
71
+
72
+ def delimiter=(val)
73
+ @delimiter = string_prop_normalise(val)
74
+ end
75
+
76
+ def marker=(val)
77
+ @marker = string_prop_normalise(val)
78
+ end
79
+
80
+ def max_keys=(val)
81
+ @max_keys = val.to_i
82
+ end
83
+
84
+ def is_truncated=(val)
85
+ @is_truncated = false
86
+ @is_truncated = true if ('true' == val || true == val || 'True' == val)
87
+ end
88
+
89
+ # normalise string properties:
90
+ # if value for XML element is nil, set property to empty string
91
+ def string_prop_normalise(val)
92
+ val = '' if val.nil?
93
+ val
94
+ end
95
+
96
+ end
97
+
98
+ class S3Object
99
+ attr_reader :bucket_name
100
+ attr_accessor :key, :last_modified, :etag, :size, :owner, :storage_class
101
+
102
+ # bucket: NamedBucket which is the parent of this object
103
+ def initialize(named_bucket=nil)
104
+ @named_bucket = named_bucket
105
+ end
106
+
107
+ # set properties from an XML string
108
+ # if using this, should be an XML document containing a <Contents> element as root
109
+ def set_from_xml_string(xml_str)
110
+ doc = XML.get_xml_doc(xml_str)
111
+ set_from_node(doc)
112
+ end
113
+
114
+ # set properties from an XML document
115
+ # XML::Document doc: XML document to parse
116
+ # options:
117
+ # :full => true: set all properties (default is to only set those in bucket listing)
118
+ def set_from_node(doc, options={})
119
+ @key = doc.xget('Key')
120
+ @last_modified = DateTime.parse(doc.xget('LastModified'))
121
+ @etag = doc.xget('ETag').gsub("\"", "")
122
+ @size = doc.xget('Size').to_i
123
+ @owner = S3User.new(doc.find('Owner').to_a.first)
124
+
125
+ if options[:full]
126
+ # TODO: if setting from a full object listing (GET on a resource key),
127
+ # do additional field setting here (e.g. x-amz-meta- headers)
128
+ end
129
+ end
130
+ end
131
+
132
+ class S3User
133
+ attr_accessor :id, :display_name
134
+
135
+ # XML::Document owner_xml_doc: <Owner> node from inside ListBucketResult <Contents> element
136
+ def initialize(owner_xml_doc)
137
+ @id = owner_xml_doc.xget('//ID')
138
+ @display_name = owner_xml_doc.xget('//DisplayName')
139
+ end
140
+ end
141
+ end
@@ -1,23 +1,35 @@
1
1
  require 'net/https'
2
2
  require 'cgi'
3
3
 
4
- # this is a very thin layer over the S3 API
5
- # TODO: need to wrap XML returned into object representation
6
4
  module S3
7
5
  include Net
8
6
 
7
+ # the client actually performs operations over the network,
8
+ # using the core to build request headers and content;
9
+ # only client-specific headers are managed here: other headers
10
+ # can be handled by the core
11
+ # TODO: need to wrap XML returned into object representation
12
+ # TODO: use customisable thread pool for requests
9
13
  class Client
10
14
  include S3
11
- attr_accessor :chunk_size, :default_headers
12
-
13
- def initialize(aws_access_key, aws_secret_access_key)
14
- @client = HTTP.new(HOST, PORT)
15
15
 
16
- # turn off SSL certificate verification
17
- @client.verify_mode = OpenSSL::SSL::VERIFY_NONE
16
+ attr_accessor :chunk_size, :default_headers
18
17
 
19
- # always use SSL
20
- @client.use_ssl = true
18
+ # options: hash of optional client config.
19
+ # :ssl => false : only use plain HTTP
20
+ # :dump_requests => true: dump each request's initial line and headers
21
+ def initialize(aws_access_key, aws_secret_access_key, options={})
22
+ if false == options[:ssl]
23
+ @client = HTTP.new(HOST, NON_SSL_PORT)
24
+ @client.use_ssl = false
25
+ else
26
+ @client = HTTP.new(HOST, PORT)
27
+ # turn off SSL certificate verification
28
+ @client.verify_mode = OpenSSL::SSL::VERIFY_NONE
29
+ @client.use_ssl = true
30
+ end
31
+
32
+ @dump_requests = (true == options[:dump_requests])
21
33
 
22
34
  # set default chunk size for streaming request body (1 Mb)
23
35
  @chunk_size = 1048576
@@ -28,6 +40,13 @@ module S3
28
40
 
29
41
  # headers sent with every request made by this client
30
42
  @client_headers = {}
43
+
44
+ yield self if block_given?
45
+ end
46
+
47
+ # wrapper round embedded client use_ssl accessor
48
+ def use_ssl?
49
+ @client.use_ssl
31
50
  end
32
51
 
33
52
  # send a request over the wire
@@ -48,20 +67,24 @@ module S3
48
67
  req[key] = value
49
68
  end
50
69
 
51
- @client.start do
52
- if req.request_body_permitted?
53
- # for streaming large files
54
- if data.respond_to?(:read)
55
- req.body_stream = data
56
- req['Content-Length'] = data.stat.size.to_s
57
- return @client.request(req, nil)
58
- # simple text strings etc.
59
- else
60
- return @client.request(req, data)
61
- end
62
- else
63
- return @client.request(req)
70
+ if req.request_body_permitted?
71
+ # for streaming files; NB Content-Length will be set by Net::HTTP
72
+ # for character-based body content
73
+ if data.respond_to?(:stat)
74
+ req.body_stream = data
75
+ req['Content-Length'] = data.stat.size.to_s
76
+ data = nil
64
77
  end
78
+ else
79
+ data = nil
80
+ end
81
+
82
+ if @dump_requests
83
+ puts req.to_s
84
+ end
85
+
86
+ @client.start do
87
+ return @client.request(req, data)
65
88
  end
66
89
 
67
90
  end
@@ -73,7 +96,7 @@ module S3
73
96
 
74
97
  # head
75
98
  def do_head(path='/', headers={})
76
- do_request('HEAD', path, headers)
99
+ do_request('HEAD', path, nil, headers)
77
100
  end
78
101
 
79
102
  # post
@@ -86,43 +109,60 @@ module S3
86
109
  do_request('PUT', path, data, headers)
87
110
  end
88
111
 
112
+ # delete
113
+ def do_delete(path, headers={})
114
+ do_request('DELETE', path, nil, headers)
115
+ end
116
+
89
117
  # return an instance of an appropriate request class
90
118
  def get_requester(method, path)
91
119
  raise S3Exception::UnsupportedHTTPMethod, "The #{method} HTTP method is not supported" if !(METHOD_VERBS.include?(method))
92
120
  eval("HTTP::" + method[0,1].upcase + method[1..-1].downcase + ".new('#{path}')")
93
121
  end
94
122
 
95
- # convert a hash of name/value pairs to querystring variables
96
- def get_querystring(pairs={})
97
- str = ''
98
- if pairs.size > 0
99
- str += "?" + pairs.map { |key, value| "#{key}=#{CGI::escape(value.to_s)}" }.join('&')
100
- end
101
- str
102
- end
103
-
104
123
  # list all buckets
105
124
  def list_all_buckets
106
125
  do_get('/')
107
126
  end
108
127
 
128
+ # list entries in a bucket
129
+ # query_params: hash of options on the bucket listing, passed as querystring parameters to S3
130
+ # :prefix => 'some_string' : restrict results to keys beginning with 'some_string'
131
+ # :marker => 'some_string' : restict results to keys occurring lexicographically after 'some_string'
132
+ # :max_keys => 1000 : return at most this number of keys (maximum possible value is 1000)
133
+ # :delimiter => 'some_string' :
134
+ def list_bucket(bucket_name, query_params={})
135
+ if query_params[:max_keys]
136
+ max_keys = query_params[:max_keys].to_i
137
+ raise S3Exception::BucketListingMaxKeysError, "max_keys option to list bucket cannot be > #{BUCKET_LIST_MAX_MAX_KEYS}" \
138
+ if max_keys > BUCKET_LIST_MAX_MAX_KEYS
139
+
140
+ # take out the max_keys parameter and move it to max-keys
141
+ query_params['max-keys'] = query_params.delete(:max_keys)
142
+ end
143
+ do_get("/#{bucket_name}" + generate_querystring(query_params))
144
+ end
145
+
109
146
  # create a bucket
110
147
  def create_bucket(bucket_name, headers={})
111
- bucket_name_valid?(bucket_name)
112
- bucket_exists?(bucket_name)
113
148
  do_put("/#{bucket_name}", nil, headers)
114
149
  end
115
150
 
116
- # list entries in a bucket
117
- def list_bucket(bucket_name)
118
- bucket_name_valid?(bucket_name)
119
- bucket_exists?(bucket_name)
120
- do_get("/#{bucket_name}")
151
+ # delete a bucket
152
+ # TODO: enable deletion of keys inside the bucket
153
+ # TODO: maybe delete keys matching a partial path
154
+ def delete_bucket(bucket_name, headers={})
155
+ do_delete("/#{bucket_name}", headers)
156
+ end
157
+
158
+ # return true if bucket exists
159
+ def bucket_exists?(bucket_name)
160
+ list_bucket(bucket_name).ok?
121
161
  end
122
162
 
123
- # put some resource onto S3
124
- def put_resource(data, bucket_name, resource_key, headers={})
125
- do_put(File.join("/#{bucket_name}", "#{CGI::escape(resource_key)}"), data, headers)
163
+ # put some generic resource onto S3
164
+ def put_resource(bucket_name, resource_key, data, headers={})
165
+ do_put("/#{bucket_name}/" + "#{CGI::escape(resource_key)}", data, headers)
126
166
  end
127
167
 
128
168
  # put a string onto S3
@@ -132,11 +172,19 @@ module S3
132
172
  end
133
173
 
134
174
  # put a file onto S3
135
- def put_file(filename, bucket_name, resource_key=nil, headers={})
175
+ # options: to simplify setting of some headers with specific meaning to S3
176
+ # :render_as_attachment => true: set the Content-Disposition for this file to "attachment" and set
177
+ # the default filename for saving the file (when accessed by a web browser) to _filename_
178
+ def put_file(filename, bucket_name, resource_key=nil, headers={}, options={})
136
179
  # default to the file path as the resource key if none explicitly set
137
180
  if resource_key.nil?
138
181
  resource_key = filename
139
182
  end
183
+
184
+ # set Content-Disposition header
185
+ if options[:render_as_attachment]
186
+ headers['Content-Disposition'] = "attachment; filename=#{File.basename(filename)}"
187
+ end
140
188
 
141
189
  # content type is explicitly set in the headers
142
190
  if headers[:content_type]
@@ -153,29 +201,12 @@ module S3
153
201
  # the data we want to put (handle to file, so we can stream from it)
154
202
  File.open(filename) do |data|
155
203
  # send the put request
156
- put_resource(data, bucket_name, resource_key, headers)
157
- end
158
- end
159
-
160
- # guess a file's mime type
161
- # NB if the mime_type for a file cannot be guessed, "text/plain" is used
162
- def guess_mime_type(filename)
163
- mime_type = MIME::Types.type_for(filename)[0]
164
- mime_type ||= MIME::Types['text/plain'][0]
165
- mime_type
166
- end
167
-
168
- # ensure that a bucket_name is well-formed
169
- def bucket_name_valid?(bucket_name)
170
- if '/' == bucket_name[0,1]
171
- raise S3Exception::MalformedBucketName, "Bucket name cannot have a leading slash"
204
+ put_resource(bucket_name, resource_key, data, headers)
172
205
  end
173
206
  end
174
-
175
- # TODO: proper check for existence of bucket;
176
- # throw error if bucket does not exist (see bucket_name_valid? for example)
177
- def bucket_exists?(bucket_name)
178
- false
207
+
208
+ # TODO: delete resource by bucket and key
209
+ def delete_resource(bucket_name, resource_key)
179
210
  end
180
211
 
181
212
  # add any default headers which should be sent with every request from the client;