s33r 0.4.2 → 0.5

Sign up to get free protection for your applications and to get access to all the features.
Files changed (114) hide show
  1. data/examples/cli/instant_download_server.rb +88 -0
  2. data/examples/cli/s3cli.rb +31 -52
  3. data/examples/cli/simple.rb +16 -6
  4. data/examples/fores33r/app/controllers/browser_controller.rb +12 -10
  5. data/examples/fores33r/app/helpers/application_helper.rb +2 -1
  6. data/examples/fores33r/app/views/browser/_upload.rhtml +1 -1
  7. data/examples/fores33r/app/views/browser/index.rhtml +4 -4
  8. data/examples/fores33r/config/environment.rb +5 -3
  9. data/examples/fores33r/log/development.log +2259 -0
  10. data/examples/fores33r/log/mongrel.log +59 -0
  11. data/examples/s3.yaml +2 -6
  12. data/lib/s33r/bucket.rb +103 -0
  13. data/lib/s33r/bucket_listing.rb +33 -76
  14. data/lib/s33r/client.rb +305 -446
  15. data/lib/s33r/networking.rb +197 -0
  16. data/lib/s33r/s33r_exception.rb +29 -18
  17. data/lib/s33r/s33r_http.rb +36 -18
  18. data/lib/s33r/s3_acl.rb +32 -52
  19. data/lib/s33r/s3_logging.rb +117 -0
  20. data/lib/s33r/s3_obj.rb +124 -69
  21. data/lib/s33r/utility.rb +447 -0
  22. data/test/cases/spec_acl.rb +10 -40
  23. data/test/cases/spec_bucket_listing.rb +12 -32
  24. data/test/cases/spec_logging.rb +47 -0
  25. data/test/cases/spec_networking.rb +11 -0
  26. data/test/cases/spec_s3_object.rb +44 -5
  27. data/test/cases/spec_utility.rb +264 -0
  28. data/test/files/acl.xml +0 -6
  29. data/test/files/config.yaml +5 -0
  30. data/test/files/logging_status_disabled.xml +3 -0
  31. data/test/files/logging_status_enabled.xml +7 -0
  32. data/test/test_setup.rb +7 -2
  33. metadata +16 -94
  34. data/examples/cli/acl_x.rb +0 -41
  35. data/examples/cli/logging_x.rb +0 -20
  36. data/examples/fores33r/README +0 -183
  37. data/html/classes/MIME.html +0 -120
  38. data/html/classes/MIME/InvalidContentType.html +0 -119
  39. data/html/classes/MIME/Type.html +0 -1173
  40. data/html/classes/MIME/Types.html +0 -566
  41. data/html/classes/Net.html +0 -108
  42. data/html/classes/Net/HTTPGenericRequest.html +0 -233
  43. data/html/classes/Net/HTTPResponse.html +0 -271
  44. data/html/classes/S33r.html +0 -986
  45. data/html/classes/S33r/BucketListing.html +0 -434
  46. data/html/classes/S33r/Client.html +0 -1575
  47. data/html/classes/S33r/LoggingResource.html +0 -222
  48. data/html/classes/S33r/NamedBucket.html +0 -693
  49. data/html/classes/S33r/OrderlyXmlMarkup.html +0 -165
  50. data/html/classes/S33r/S33rException.html +0 -124
  51. data/html/classes/S33r/S33rException/BucketListingMaxKeysError.html +0 -111
  52. data/html/classes/S33r/S33rException/BucketNotLogTargetable.html +0 -119
  53. data/html/classes/S33r/S33rException/InvalidBucketListing.html +0 -111
  54. data/html/classes/S33r/S33rException/InvalidPermission.html +0 -111
  55. data/html/classes/S33r/S33rException/InvalidS3GroupType.html +0 -111
  56. data/html/classes/S33r/S33rException/MalformedBucketName.html +0 -111
  57. data/html/classes/S33r/S33rException/MethodNotAvailable.html +0 -111
  58. data/html/classes/S33r/S33rException/MissingBucketName.html +0 -111
  59. data/html/classes/S33r/S33rException/MissingRequiredHeaders.html +0 -111
  60. data/html/classes/S33r/S33rException/MissingResource.html +0 -111
  61. data/html/classes/S33r/S33rException/S3FallenOver.html +0 -111
  62. data/html/classes/S33r/S33rException/TryingToPutEmptyResource.html +0 -117
  63. data/html/classes/S33r/S33rException/UnsupportedCannedACL.html +0 -111
  64. data/html/classes/S33r/S33rException/UnsupportedHTTPMethod.html +0 -111
  65. data/html/classes/S33r/S3ACL.html +0 -125
  66. data/html/classes/S33r/S3ACL/ACLDoc.html +0 -521
  67. data/html/classes/S33r/S3ACL/AmazonCustomer.html +0 -168
  68. data/html/classes/S33r/S3ACL/CanonicalUser.html +0 -212
  69. data/html/classes/S33r/S3ACL/Grant.html +0 -403
  70. data/html/classes/S33r/S3ACL/Grantee.html +0 -239
  71. data/html/classes/S33r/S3ACL/Group.html +0 -178
  72. data/html/classes/S33r/S3Object.html +0 -618
  73. data/html/classes/S33r/Sync.html +0 -152
  74. data/html/classes/XML.html +0 -202
  75. data/html/classes/XML/Document.html +0 -125
  76. data/html/classes/XML/Node.html +0 -124
  77. data/html/created.rid +0 -1
  78. data/html/files/CHANGELOG.html +0 -107
  79. data/html/files/MIT-LICENSE.html +0 -129
  80. data/html/files/README_txt.html +0 -259
  81. data/html/files/lib/s33r/bucket_listing_rb.html +0 -101
  82. data/html/files/lib/s33r/builder_rb.html +0 -108
  83. data/html/files/lib/s33r/client_rb.html +0 -111
  84. data/html/files/lib/s33r/core_rb.html +0 -113
  85. data/html/files/lib/s33r/libxml_extensions_rb.html +0 -101
  86. data/html/files/lib/s33r/libxml_loader_rb.html +0 -109
  87. data/html/files/lib/s33r/logging_rb.html +0 -108
  88. data/html/files/lib/s33r/mimetypes_rb.html +0 -120
  89. data/html/files/lib/s33r/named_bucket_rb.html +0 -101
  90. data/html/files/lib/s33r/s33r_exception_rb.html +0 -101
  91. data/html/files/lib/s33r/s33r_http_rb.html +0 -108
  92. data/html/files/lib/s33r/s3_acl_rb.html +0 -108
  93. data/html/files/lib/s33r/s3_obj_rb.html +0 -108
  94. data/html/files/lib/s33r/sync_rb.html +0 -101
  95. data/html/files/lib/s33r_rb.html +0 -101
  96. data/html/fr_class_index.html +0 -66
  97. data/html/fr_file_index.html +0 -44
  98. data/html/fr_method_index.html +0 -183
  99. data/html/index.html +0 -24
  100. data/html/rdoc-style.css +0 -208
  101. data/lib/s33r/core.rb +0 -296
  102. data/lib/s33r/logging.rb +0 -43
  103. data/lib/s33r/named_bucket.rb +0 -148
  104. data/lib/s33r/sync.rb +0 -13
  105. data/test/cases/spec_all_buckets.rb +0 -28
  106. data/test/cases/spec_client.rb +0 -101
  107. data/test/cases/spec_core.rb +0 -128
  108. data/test/cases/spec_namedbucket.rb +0 -46
  109. data/test/cases/spec_sync.rb +0 -34
  110. data/test/files/all_buckets.xml +0 -21
  111. data/test/files/client_config.yml +0 -5
  112. data/test/files/namedbucket_config.yml +0 -8
  113. data/test/files/namedbucket_config2.yml +0 -8
  114. data/test/test_bucket_setup.rb +0 -41
@@ -0,0 +1,59 @@
1
+ ** Daemonized, any open files are closed. Look at /home/ell/dev/s3/s33r/examples/fores33r/tmp/fores33r.pid and log/mongrel.log for info.
2
+ ** Starting Mongrel listening at 0.0.0.0:3333
3
+ ** Starting Rails with development environment...
4
+ /opt/lampp/lib/ruby/gems/1.8/gems/activesupport-1.3.1/lib/active_support/dependencies.rb:123:in `const_missing': uninitialized constant Client (NameError)
5
+ from /home/ell/dev/s3/s33r/examples/fores33r/config/environment.rb:62
6
+ from /opt/lampp/lib/ruby/site_ruby/1.8/rubygems/custom_require.rb:27:in `require'
7
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/rails.rb:161:in `rails'
8
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:112:in `cloaker_'
9
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/configurator.rb:134:in `listener'
10
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:98:in `cloaker_'
11
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/configurator.rb:51:in `initialize'
12
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:85:in `run'
13
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/command.rb:211:in `run'
14
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:231
15
+ from /opt/lampp/bin/mongrel_rails:18
16
+ ** Daemonized, any open files are closed. Look at /home/ell/dev/s3/s33r/examples/fores33r/tmp/fores33r.pid and log/mongrel.log for info.
17
+ ** Starting Mongrel listening at 0.0.0.0:3333
18
+ ** Starting Rails with development environment...
19
+ /opt/lampp/lib/ruby/gems/1.8/gems/activesupport-1.3.1/lib/active_support/dependencies.rb:123:in `const_missing': uninitialized constant Client (NameError)
20
+ from /home/ell/dev/s3/s33r/examples/fores33r/config/environment.rb:62
21
+ from /opt/lampp/lib/ruby/site_ruby/1.8/rubygems/custom_require.rb:27:in `require'
22
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/rails.rb:161:in `rails'
23
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:112:in `cloaker_'
24
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/configurator.rb:134:in `listener'
25
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:98:in `cloaker_'
26
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/configurator.rb:51:in `initialize'
27
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:85:in `run'
28
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/command.rb:211:in `run'
29
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:231
30
+ from /opt/lampp/bin/mongrel_rails:18
31
+ ** Daemonized, any open files are closed. Look at /home/ell/dev/s3/s33r/examples/fores33r/tmp/fores33r.pid and log/mongrel.log for info.
32
+ ** Starting Mongrel listening at 0.0.0.0:3333
33
+ ** Starting Rails with development environment...
34
+ /opt/lampp/lib/ruby/gems/1.8/gems/activesupport-1.3.1/lib/active_support/dependencies.rb:123:in `const_missing': uninitialized constant Client (NameError)
35
+ from /home/ell/dev/s3/s33r/examples/fores33r/config/environment.rb:62
36
+ from /opt/lampp/lib/ruby/site_ruby/1.8/rubygems/custom_require.rb:27:in `require'
37
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/rails.rb:161:in `rails'
38
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:112:in `cloaker_'
39
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/configurator.rb:134:in `listener'
40
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:98:in `cloaker_'
41
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/configurator.rb:51:in `initialize'
42
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:85:in `run'
43
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/command.rb:211:in `run'
44
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:231
45
+ from /opt/lampp/bin/mongrel_rails:18
46
+ ** Daemonized, any open files are closed. Look at /home/ell/dev/s3/s33r/examples/fores33r/tmp/fores33r.pid and log/mongrel.log for info.
47
+ ** Starting Mongrel listening at 0.0.0.0:3333
48
+ ** Starting Rails with development environment...
49
+ /home/ell/dev/s3/s33r/examples/fores33r/config/environment.rb:62: undefined method `load_config' for S33r::Client:Class (NoMethodError)
50
+ from /opt/lampp/lib/ruby/site_ruby/1.8/rubygems/custom_require.rb:27:in `require'
51
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/rails.rb:161:in `rails'
52
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:112:in `cloaker_'
53
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/configurator.rb:134:in `listener'
54
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:98:in `cloaker_'
55
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/configurator.rb:51:in `initialize'
56
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:85:in `run'
57
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/lib/mongrel/command.rb:211:in `run'
58
+ from /opt/lampp/lib/ruby/gems/1.8/gems/mongrel-0.3.13.4/bin/mongrel_rails:231
59
+ from /opt/lampp/bin/mongrel_rails:18
data/examples/s3.yaml CHANGED
@@ -1,11 +1,7 @@
1
1
  # note you can use ERB code in this file
2
- aws_access_key: 'yourkey'
3
- aws_secret_access_key: 'yoursecretkey'
2
+ access: 'yourkey'
3
+ secret: 'yoursecretkey'
4
4
  options:
5
5
  default_expires: '12th November 2033'
6
6
  use_ssl: true
7
7
  dump_requests: false
8
- default_prefix: 'someprefix'
9
- default_bucket: 'somebucket'
10
- from_email: 'elliot@example.com'
11
- to_email: 'elliot@example.com'
@@ -0,0 +1,103 @@
1
+ base = File.dirname(__FILE__)
2
+ require File.join(base, 'utility')
3
+
4
+ # These methods are added to any Client with a bucket
5
+ # binding set.
6
+ module S33r
7
+ class Bucket < Client
8
+
9
+ attr_accessor :name
10
+
11
+ # +options+:
12
+ # * <tt>:check => true</tt>: if setting a :bucket option, the default behaviour is not to check
13
+ # whether the bucket actually exists on S3. If you pass this option,
14
+ # S33r will only set the bucket if it is on S3 and accessible;
15
+ # if it isn't, an error is raised (NoSuchBucket).
16
+ # * <tt>:create => true</tt>: if the bucket doesn't exist, try to create it.
17
+ # S33r will check before trying to create the bucket and will just return the
18
+ # bucket if it does.
19
+ def initialize(bucket_name, options={})
20
+ super(options)
21
+
22
+ if options[:create]
23
+ options[:bucket] = bucket_name
24
+ raise last_response.s3_error unless do_put(nil, options).ok?
25
+ end
26
+ if options[:check]
27
+ raise InvalidBucket, "Bucket #{name} does not exist" unless bucket_exists?(bucket_name)
28
+ end
29
+ @name = bucket_name
30
+
31
+ yield self if block_given?
32
+ end
33
+
34
+ # Defaults for every request.
35
+ def request_defaults
36
+ super.merge(:bucket => name)
37
+ end
38
+
39
+ def exists?
40
+ bucket_exists?(name)
41
+ end
42
+
43
+ # Destroy this bucket.
44
+ #
45
+ # Pass <tt>:force => true</tt> to delete the content of
46
+ # the bucket if it exists.
47
+ def destroy(options={})
48
+ delete_bucket(name, options)
49
+ end
50
+
51
+ # Delete a key from inside the bucket.
52
+ def delete(key, options={})
53
+ options[:key] = key
54
+ do_delete(options).ok?
55
+ end
56
+
57
+ # List of keys in the bucket.
58
+ #
59
+ # +options+ are passed through to Client.listing.
60
+ #-- TODO: tests
61
+ def keys(options={})
62
+ listing(options).keys.sort
63
+ end
64
+
65
+ # Get an object from S3.
66
+ #
67
+ # By default, this will load the content of the object.
68
+ # If you don't want this, pass a :lazy option:
69
+ #
70
+ # bucket['key', :lazy]
71
+ def [](key, load_object=true)
72
+ options = {}
73
+ options[:lazy] = true if :lazy == load_object
74
+ object(key, options)
75
+ end
76
+
77
+ # +options+ are passed to bucket.get; in addition, you can use:
78
+ # * <tt>:lazy => true</tt>: this will prevent S33r from loading the
79
+ # content of the object from S3, instead just getting the object's
80
+ # metadata from the bucket listing.
81
+ def object(key, options={})
82
+ obj = listing[key]
83
+ if obj
84
+ obj.bucket = self
85
+ obj.fetch unless options[:lazy]
86
+ end
87
+ obj
88
+ end
89
+
90
+ # Can the bucket be used as a log target?
91
+ def log_receiver?
92
+ acl.log_targetable?
93
+ end
94
+
95
+ # Change whether the bucket can be used to receive logs.
96
+ #
97
+ # :on to make it a capable of receiving logs,
98
+ # :off to disable it as a log target.
99
+ def log_receiver(state=:on)
100
+ change_log_target_status(name, state)
101
+ end
102
+ end
103
+ end
@@ -1,71 +1,43 @@
1
1
  base = File.dirname(__FILE__)
2
2
  require File.join(base, 'libxml_loader')
3
- require File.join(base, 's3_acl')
3
+ require File.join(base, 's33r_exception')
4
4
 
5
5
  module S33r
6
- # Object representation of the content of a bucket.
7
- class BucketListing
8
- attr_reader :delimiter, :prefix, :marker, :max_keys, :is_truncated, :common_prefixes
9
-
6
+ # Represents a ListBucketResult
7
+ # (see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/)
8
+ class BucketListing < Hash
10
9
  # Name of the bucket this listing is for.
11
10
  attr_reader :name
12
- # Hash of objects in this bucket, keyed by their S3 keys.
13
- attr_reader :contents
14
- # A NamedBucket instance associated with this listing.
15
- attr_accessor :named_bucket
16
- # The last key listed in this BucketListing.
17
- attr_reader :last_key
18
- # Set to true to show raw parsing errors, instead of the catch all error message
19
- # (useful for debugging).
20
- attr_accessor :raw
11
+
12
+ attr_reader :delimiter, :prefix, :marker, :max_keys, :is_truncated, :common_prefixes, :contents
21
13
 
22
- # Create a new object representing a ListBucketResult.
23
- #
24
- # +bucket_listing_xml+ is a ListBucketResult document, as returned from a GET on a bucket
25
- # (see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/).
26
- #
27
- # +named_bucket+ can be set to an existing NamedBucket instance, so that any objects
28
- # inside this listing can be associated with that instance. This enables objects to be easily deleted
29
- # without having to create a new Client instance.
30
- #
31
- # If +raw+ is set to true, you get ugly parser errors.
32
- def initialize(bucket_listing_xml, named_bucket=nil, raw=false)
33
- @contents = {}
34
- @common_prefixes = {}
35
- # the NamedBucket instance associated with this listing (if any)
36
- @named_bucket = named_bucket
37
- @raw = raw
14
+ # +bucket_listing_xml+ is a ListBucketResult document, as returned from a GET on a bucket.
15
+ # +bucket+ is a Bucket instance; any objects in the listing are assigned to this bucket.
16
+ def initialize(bucket_listing_xml)
17
+ @common_prefixes = []
38
18
  set_listing_xml(bucket_listing_xml)
39
19
  end
40
20
 
41
21
  # Convert a ListBucketResult XML document into an object representation.
42
22
  def set_listing_xml(bucket_listing_xml)
43
- # proc to remove the namespace and parse the listing
44
- work = lambda do |bucket_listing_xml|
45
- # remove the namespace declaration: libxml doesn't like it
46
- bucket_listing_xml = S33r.remove_namespace(bucket_listing_xml)
23
+ begin
47
24
  parse_listing(bucket_listing_xml)
48
- end
49
-
50
- if @raw
51
- work.call(bucket_listing_xml)
52
- else
53
- begin
54
- work.call(bucket_listing_xml)
55
- rescue
56
- message = "Cannot create bucket listing from supplied XML"
57
- message += " (was nil)" if bucket_listing_xml.nil?
58
- raise S33rException::InvalidBucketListing, message
59
- end
25
+ rescue
26
+ message = "Cannot create bucket listing from supplied XML"
27
+ message += " (was nil)" if bucket_listing_xml.nil?
28
+ raise S3Exception::InvalidBucketListing, message
60
29
  end
61
30
  end
62
31
 
63
32
  # Parse raw XML ListBucketResponse from S3 into object instances.
64
33
  # The S3Objects are skeletons, and are not automatically populated
65
34
  # from S3 (their @value attributes are nil). To load the data into
66
- # an object, grab it from the listing and call its load method to
35
+ # an object, grab it from the listing and call the fetch method to
67
36
  # pull the data down from S3.
37
+ #
38
+ #-- TODO: common_prefixes
68
39
  def parse_listing(bucket_listing_xml)
40
+ bucket_listing_xml = S33r.remove_namespace(bucket_listing_xml)
69
41
  doc = XML.get_xml_doc(bucket_listing_xml)
70
42
 
71
43
  prop_setter = lambda do |prop, path|
@@ -84,41 +56,26 @@ module S33r
84
56
  # contents
85
57
  doc.find('//Contents').to_a.each do |node|
86
58
  obj = S3Object.from_xml_node(node)
87
- # Add to the content listing for the bucket
88
- @contents[obj.key] = obj
59
+ # Add to the contents for the bucket
60
+ self[obj.key] = obj
89
61
  end
90
62
  end
91
-
92
- # Get the last key in the contents hash.
93
- def last_key
94
- @contents.keys.last
95
- end
96
-
97
- # Return an object in this bucket by key.
98
- def [](key)
99
- @contents[key]
100
- end
101
-
102
- # Pretty listing of keys in alphabetical order.
103
- def pretty
104
- @contents.keys.sort.each { |k| puts k }
105
- end
106
63
 
107
64
  # Setters which perform some type casts and normalisation.
108
65
  private
109
- def name=(val); @name = string_prop_normalise(val); end
110
- def prefix=(val); @prefix = string_prop_normalise(val); end
111
- def delimiter=(val); @delimiter = string_prop_normalise(val); end
112
- def marker=(val); @marker = string_prop_normalise(val); end
113
- def max_keys=(val); @max_keys = val.to_i; end
114
- def is_truncated=(val); @is_truncated = ('true' == val || true == val || 'True' == val); end
66
+ def name=(val); @name = string_prop_normalise(val); end
67
+ def prefix=(val); @prefix = string_prop_normalise(val); end
68
+ def delimiter=(val); @delimiter = string_prop_normalise(val); end
69
+ def marker=(val); @marker = string_prop_normalise(val); end
70
+ def max_keys=(val); @max_keys = val.to_i; end
71
+ def is_truncated=(val); @is_truncated = ('true' == val || true == val || 'True' == val); end
115
72
 
116
- # normalise string properties:
117
- # if value for XML element is nil, set property to empty string
118
- def string_prop_normalise(val)
119
- val = '' if val.nil?
120
- val
121
- end
73
+ # normalise string properties:
74
+ # if value for XML element is nil, set property to empty string
75
+ def string_prop_normalise(val)
76
+ val = '' if val.nil?
77
+ val
78
+ end
122
79
 
123
80
  end
124
81
  end
data/lib/s33r/client.rb CHANGED
@@ -1,224 +1,163 @@
1
- require 'net/https'
2
- require 'cgi'
3
- require 'erb'
4
- require 'yaml'
5
1
  base = File.dirname(__FILE__)
2
+ require File.join(base, 'networking')
6
3
  require File.join(base, 's3_acl')
7
- require File.join(base, 's33r_exception')
4
+ require File.join(base, 's3_logging')
5
+ require File.join(base, 'utility')
8
6
 
9
7
  module S33r
10
- include Net
11
-
12
- # The client performs operations over the network,
13
- # using the core to build request headers and content;
14
- # only client-specific headers are managed here: other headers
15
- # can be handled by the core.
16
- #--
17
- # TODO: use customisable thread pool for requests.
18
- # TODO: timeout on requests.
19
- #--
8
+ # Use this class to do operations on the Service, e.g.
9
+ # creating buckets, deleting buckets, listing all buckets,
10
+ # returning a single bucket.
20
11
  class Client
12
+ include Networking
13
+ include S3ACL
14
+ include S3Logging
21
15
  include S33r
22
-
23
- # S3 keys.
24
- attr_accessor :aws_access_key, :aws_secret_access_key
25
16
 
26
- # Size of data chunk to be sent per request when putting data.
27
- attr_accessor :chunk_size
17
+ # Options used to create this Client.
18
+ attr_reader :created_with_options
28
19
 
29
- # Headers which should be sent with every request by default (unless overridden).
30
- attr_accessor :client_headers
20
+ #-- These are used for creating URLs.
31
21
 
32
- # Whether client should use SSL.
22
+ # Use SSL for requests.
33
23
  attr_accessor :use_ssl
34
24
 
35
- # Whether client dumps headers from requests.
36
- attr_accessor :dump_requests
25
+ # Default expiry for authenticated URLs.
26
+ attr_accessor :expires
37
27
 
38
- # Default log bucket location.
39
- attr_accessor :log_bucket
28
+ # Default canned ACL string to apply to all put requests.
29
+ attr_accessor :canned_acl
40
30
 
41
- # The options used to create the client (useful when spawning
42
- # NamedBucket instances from Client instances).
43
- attr_reader :options
44
-
45
- # Configure either an SSL-enabled or plain HTTP client.
46
- # (If using SSL, no verification of server certificate is performed.)
47
- #
48
- # +options+: hash of optional client config.:
49
- # * <tt>:use_ssl => false</tt>: only use plain HTTP for connections
50
- # * <tt>:dump_requests => true</tt>: dump each request's initial line and headers to STDOUT
51
- def initialize(aws_access_key, aws_secret_access_key, options={})
52
- @use_ssl = true
53
- @use_ssl = false if (false == options[:use_ssl])
54
- options[:use_ssl] = @use_ssl
55
-
56
- @dump_requests = (true == options[:dump_requests])
57
-
58
- # set default chunk size for streaming request body
59
- @chunk_size = DEFAULT_CHUNK_SIZE
60
-
61
- @log_bucket = options[:log_bucket]
62
-
63
- # Amazon S3 developer keys
64
- @aws_access_key = aws_access_key
65
- @aws_secret_access_key = aws_secret_access_key
66
-
67
- # headers sent with every request made by this client
68
- @client_headers = {}
69
-
70
- # keep a record of the options used to create this instance
71
- @options = options
72
- end
31
+ # Amazon keys.
32
+ attr_accessor :access, :secret
73
33
 
74
- # Get an HTTP client instance.
75
- #
76
- # NB this has been moved here so that client instances are
77
- # only instantiated when needed (so Client can be used
78
- # as an empty shell when list_buckets is called).
79
- def get_client
80
- if @use_ssl
81
- client = HTTP.new(HOST, PORT)
82
- # turn off SSL certificate verification
83
- client.verify_mode = OpenSSL::SSL::VERIFY_NONE
84
- client.use_ssl = true
85
- else
86
- client = HTTP.new(HOST, NON_SSL_PORT)
87
- client.use_ssl = false
88
- end
89
-
90
- client
34
+ # Get default options passed to every call to do_request.
35
+ def request_defaults
36
+ defaults = {}
37
+ defaults[:use_ssl] = @use_ssl
38
+ defaults[:expires] = @expires
39
+ defaults[:access] = @access
40
+ defaults[:secret] = @secret
41
+ defaults[:canned_acl] = @canned_acl
42
+ defaults
91
43
  end
92
44
 
93
- # Initialise client from YAML configuration file
94
- # (see load_config method for details of acceptable format).
95
- def Client.init(config_file)
96
- aws_access_key, aws_secret_access_key, options = load_config(config_file)
97
- Client.new(aws_access_key, aws_secret_access_key, options)
45
+ # Get the settings for this client.
46
+ def settings
47
+ request_defaults.merge(:dump_requests => dump_requests,
48
+ :chunk_size => chunk_size, :persistent => persistent)
49
+ end
50
+
51
+ # Create a plain Client.
52
+ def initialize(options={})
53
+ set_options(options)
98
54
  end
99
55
 
100
- # Load YAML config. file for a client. The config. file looks like this:
101
- #
102
- # :include: test/files/namedbucket_config.yml
103
- #
104
- # Note that the loader also runs the config. file through ERB, so you can
105
- # add dynamic blocks of ERB (Ruby) code into your files.
106
- #
107
- # The +options+ section contains other settings, either specific to the Client or
108
- # NamedBucket classes, or general application settings.
109
- # The +options+ section can be omitted, but settings for AWS keys are required.
110
- #
111
- # Returns an array <tt>[aws_access_key, aws_secret_access_key, options]</tt>, where +options+
112
- # is a hash.
113
- def Client.load_config(config_file)
114
- config = YAML::load(ERB.new(IO.read(config_file)).result)
115
- aws_access_key = config['aws_access_key']
116
- aws_secret_access_key = config['aws_secret_access_key']
56
+ # Set options for the client.
57
+ #
58
+ # +options+ may include the following which alter how the Client interacts with S3; they also
59
+ # influence URLs you may generate from the Client:
60
+ # * <tt>:access => 'aws access key'</tt> (defaults to nil)
61
+ # * <tt>:secret => 'aws secret access key'</tt> (defaults to nil)
62
+ # * <tt>:use_ssl => false</tt>: to use plain HTTP for requests
63
+ # sent by this bucket (default=true). If a bucket has :use_ssl => true,
64
+ # any URLs you generate from it will be SSL URLs unless you explicitly
65
+ # disable this behaviour (see url for details).
66
+ # * <tt>:expires => <datetime specifier></tt>: set the default value to be passed as the :expires
67
+ # option when generating authenticated URLs. Should be parseable by S33r.parse_expiry.
68
+ # * <tt>:canned_acl => 'public-read'</tt>: set a default canned acl to apply to all put
69
+ # requests.
70
+ #
71
+ # These options change the behaviour of the HTTP client which actually sends the request:
72
+ # * <tt>:chunk_size => Integer</tt>: use a non-standard chunk size;
73
+ # default is to use S33r::DEFAULT_CHUNK_SIZE.
74
+ # * <tt>:persistent => true</tt>: use persistent HTTP connections
75
+ # (default=false).
76
+ # * <tt>:dump_requests => true</tt>: to dump all request headers before the request is sent.
77
+ def set_options(options={})
78
+ # General client options.
79
+ @access = options[:access]
80
+ @secret = options[:secret]
81
+ @use_ssl = true
82
+ @use_ssl = false if (false == options[:use_ssl])
83
+ @expires = options[:expires] || 'never'
84
+ @canned_acl = options[:canned_acl] || nil
117
85
 
118
- options = {}
119
- options = S33r.keys_to_symbols(config['options']) if config['options']
86
+ # Options specific to the mechanics of the HTTP request.
87
+ @dump_requests = options[:dump_requests] || false
88
+ @chunk_size = options[:chunk_size]
89
+ @persistent = options[:persistent] || false
120
90
 
121
- [aws_access_key, aws_secret_access_key, options]
91
+ @created_with_options = options
122
92
  end
123
-
124
- # Send a request over the wire.
125
- #
126
- # This method streams +data+ if it responds to the +stat+ method
127
- # (as files do).
128
- #
129
- # Returns a Net::HTTPResponse instance.
130
- def do_request(method, path, data=nil, headers={})
131
- req = get_requester(method, path)
132
- req.chunk_size = @chunk_size
133
-
134
- # Add the S3 headers which are always required.
135
- headers = add_default_headers(headers)
136
-
137
- # Add any client-specific default headers.
138
- headers = add_client_headers(headers)
139
-
140
- # Generate the S3 authorization header.
141
- headers['Authorization'] = generate_auth_header_value(method, path, headers,
142
- @aws_access_key, @aws_secret_access_key)
143
-
144
- # Insert the headers into the request object.
145
- headers.each do |key, value|
146
- req[key] = value
147
- end
148
-
149
- # Add data to the request as a stream.
150
- if req.request_body_permitted?
151
- # For streaming files; NB Content-Length will be set by Net::HTTP
152
- # for character-based data: this section of code is only used
153
- # when reading directly from a file.
154
- if data.respond_to?(:stat)
155
- req.body_stream = data
156
- req['Content-Length'] = data.stat.size.to_s
157
- data = nil
158
- end
159
- else
160
- data = nil
161
- end
162
-
163
- if @dump_requests
164
- puts req.to_s
165
- end
166
-
167
- # Run the request.
168
- client = get_client
169
- client.start do
170
- response = client.request(req, data)
171
-
172
- # Check the response to see whether S3 is down;
173
- # raises an S3FallenOver error if S3 returns a 500-503 response code
174
- response.check_s3_availability
175
-
176
- response
177
- end
178
- end
179
-
180
- # Return an instance of an appropriate request class.
181
- def get_requester(method, path)
182
- raise S33rException::UnsupportedHTTPMethod, "The #{method} HTTP method is not supported" if !(METHOD_VERBS.include?(method))
183
- eval("HTTP::" + method[0,1].upcase + method[1..-1].downcase + ".new('#{path}')")
184
- end
185
-
93
+
186
94
  # List all buckets.
187
95
  #
188
- # Returns an array of NamedBucket instances; array will be empty if
96
+ # Returns an array of Bucket instances; array will be empty if
189
97
  # the BucketListing parse fails for any reason (i.e. no <Bucket> elements
190
98
  # occur in it).
191
- def list_buckets
192
- bucket_list_xml = do_get('/').body
99
+ #
100
+ # +options+ is passed through to get_bucket, making it possible to detach
101
+ # retrieved buckets from the Client instance, and to pass other options to
102
+ # the bucket.
103
+ def buckets(options={})
104
+ resp = do_get
105
+
106
+ bucket_list_xml = resp.body
193
107
  doc = XML.get_xml_doc(S33r.remove_namespace(bucket_list_xml))
194
108
 
195
- named_buckets = []
109
+ buckets = {}
196
110
 
197
111
  doc.find("//Bucket").to_a.each do |node|
198
112
  bucket_name = node.xget('Name')
199
113
  if bucket_name
200
- # The NamedBucket instances inherit the request dumping behaviour
114
+ # CreationDate is a string in format '2006-10-17T15:14:39.000Z'.
115
+ creation_date = Time.parse(node.xget('CreationDate'))
116
+ # The Bucket instances inherit the request dumping behaviour
201
117
  # of this client.
202
- named_buckets << NamedBucket.new(@aws_access_key, @aws_secret_access_key,
203
- {:default_bucket => bucket_name, :dump_request => self.dump_requests})
118
+ buckets[bucket_name] = get_bucket(bucket_name, options)
204
119
  end
205
120
  end
206
121
 
207
- named_buckets
122
+ buckets
208
123
  end
124
+ alias :list_buckets :buckets
209
125
 
210
- # List just bucket names.
211
- def list
212
- list_buckets.map {|bucket| bucket.name}
126
+ # Just get a sorted array of names of buckets.
127
+ def bucket_names
128
+ buckets.keys.sort
213
129
  end
214
-
130
+
131
+ # Get a Client instance bound to a bucket.
132
+ #
133
+ # +options+:
134
+ # * <tt>:orphan => true</tt>: create the Client in isolation from
135
+ # the Service and don't inherit any of its instance settings.
136
+ #
137
+ # Other options are passed through to Bucket.new.
138
+ def get_bucket(bucket_name, options={})
139
+ orphan = options.delete(:orphan)
140
+ unless orphan
141
+ options.merge!(settings) { |key, old_val, new_val| old_val }
142
+ end
143
+ bucket = Bucket.new(bucket_name, options)
144
+ yield bucket if block_given?
145
+ bucket
146
+ end
147
+
148
+ # Create a new Bucket.
149
+ def create_bucket(name, options={})
150
+ options[:create] = true
151
+ get_bucket(name, options)
152
+ end
153
+
215
154
  # List entries in a bucket.
216
155
  #
217
- # +query_params+: hash of options on the bucket listing request, passed as querystring parameters to S3
156
+ # +options+: hash of options on the bucket listing request, passed as querystring parameters to S3
218
157
  # (see http://docs.amazonwebservices.com/AmazonS3/2006-03-01/).
219
158
  # * <tt>:prefix => 'some_string'</tt>: restrict results to keys beginning with 'some_string'
220
159
  # * <tt>:marker => 'some_string'</tt>: restict results to keys occurring lexicographically after 'some_string'
221
- # * <tt>:max_keys => 1000</tt>: return at most this number of keys (maximum possible value is 1000)
160
+ # * <tt>:max_keys => Integer</tt>: return at most this number of keys (maximum possible value is 1000)
222
161
  # * <tt>:delimiter => 'some_string'</tt>: keys containing the same string between prefix and the delimiter
223
162
  # are rolled up into a CommonPrefixes element inside the response
224
163
  #
@@ -227,319 +166,239 @@ module S33r
227
166
  #
228
167
  # To page through a bucket 10 keys at a time, you can do:
229
168
  #
230
- # resp, listing = list_bucket('mybucket', :max_keys => 10)
231
- # resp, listing = list_bucket('mybucket', :max_keys => 11, :marker => listing.last_key)
232
- # resp, listing = list_bucket('mybucket', :max_keys => 11, :marker => listing.last_key)
169
+ # listing = list_bucket('mybucket', :max_keys => 10)
170
+ # listing = list_bucket('mybucket', :max_keys => 11, :marker => listing.last_key)
171
+ # listing = list_bucket('mybucket', :max_keys => 11, :marker => listing.last_key)
233
172
  # etc.
234
173
  #
235
174
  # Note in the example code, +listing+ is a BucketListing instance; call its contents method
236
175
  # to get a hash of the keys in the bucket, along with associated objects.
237
176
  #
238
- # Returns [raw_response, BucketListing instance].
239
- def list_bucket(bucket_name, query_params={})
240
- if query_params[:max_keys]
241
- max_keys = query_params[:max_keys].to_i
242
- raise S33rException::BucketListingMaxKeysError, "max_keys option to list bucket cannot be > #{BUCKET_LIST_MAX_MAX_KEYS}" \
177
+ # Returns BucketListing instance.
178
+ #-- TODO: testing
179
+ def listing(options={})
180
+ querystring = options[:querystring] || {}
181
+
182
+ # Check :max_keys isn't higher than the maximum allowed by S3.
183
+ if options[:max_keys]
184
+ max_keys = options[:max_keys].to_i
243
185
  if max_keys > BUCKET_LIST_MAX_MAX_KEYS
244
-
245
- # convert :max_keys querystring parameter to 'max-keys' parameter
246
- query_params['max-keys'] = query_params.delete(:max_keys)
186
+ raise S3Exception::BucketListingMaxKeysError, "max_keys option to list bucket cannot be > #{BUCKET_LIST_MAX_MAX_KEYS}"
187
+ end
188
+ querystring['max-keys'] = max_keys
247
189
  end
248
190
 
249
- resp = do_get("/#{bucket_name}" + generate_querystring(query_params))
250
-
251
- [resp, BucketListing.new(resp.body)]
252
- end
253
-
254
- # Create a bucket.
255
- #
256
- # Returns true if response returned a 200 code; false otherwise.
257
- def create_bucket(bucket_name, headers={})
258
- resp = do_put("/#{bucket_name}", nil, headers)
259
- resp.ok?
260
- end
261
-
262
- # Delete a bucket.
263
- #
264
- # +options+ hash can contain the following:
265
- # * <tt>:force => true</tt>: delete all keys within the bucket then delete the bucket itself
266
- #-- TODO: maybe delete keys matching a partial path
267
- #-- TODO: if multiple pages of keys in buckets, need to get them by page.
268
- def delete_bucket(bucket_name, headers={}, options={})
269
- if true == options[:force]
270
- _, bucket_listing = list_bucket(bucket_name)
271
- bucket_listing.contents.each_value do |obj|
272
- delete_resource(bucket_name, obj.key)
273
- end
191
+ ['prefix', 'marker', 'delimiter'].each do |key|
192
+ key_sym = key.to_sym
193
+ querystring[key] = options[key_sym] if options[key_sym]
274
194
  end
275
195
 
276
- do_delete("/#{bucket_name}", headers)
196
+ options[:querystring] = querystring
197
+
198
+ resp = do_get(options)
199
+
200
+ if resp.ok?
201
+ @listing = BucketListing.new(resp.body)
202
+ else
203
+ raise resp.s3_error
204
+ end
277
205
  end
278
-
279
- # Check whether a bucket exists or not.
280
- def bucket_exists?(bucket_name)
281
- resource_exists?(bucket_name)
206
+ alias :objects :listing
207
+
208
+
209
+ # List content of a bucket.
210
+ def list_bucket(bucket_name, options={})
211
+ options[:bucket] = bucket_name
212
+ listing(options)
282
213
  end
283
214
 
284
- # Create a NamedBucket instance.
285
- #
286
- # +options+ is a hash of extra options to use when creating
287
- # the NamedBucket instance (see NamedBucket.initialize);
288
- # specify :parent to use the same options used to create this Client
289
- # instance.
290
- def get_named_bucket(bucket_name, options={}, &block)
291
- options = @options if :parent == options
292
- options[:default_bucket] = bucket_name
293
- named_bucket = NamedBucket.new(@aws_access_key, @aws_secret_access_key, options)
294
- yield named_bucket if block_given?
295
- named_bucket
215
+ # Delete a Bucket.
216
+ #
217
+ # +options+:
218
+ # * <tt>:force => true</tt>: To clear the content of the bucket first.
219
+ def delete_bucket(bucket_name, options={})
220
+ options[:bucket] = bucket_name
221
+ if options[:force]
222
+ listing(options).keys.each { |key| do_delete(options.merge(:key => key)) }
223
+ end
224
+ do_delete(options).ok?
296
225
  end
297
226
 
298
- # Fetch a resource.
299
- #
300
- # Returns a plain response, not an S3Object: if you want an object back,
301
- # use get_object instead.
302
- def get_resource(bucket_name, resource_key, headers={})
303
- do_get("/#{bucket_name}/#{resource_key}", headers)
227
+ # Check whether a bucket exists on S3.
228
+ def bucket_exists?(name, options={})
229
+ options[:bucket] = name
230
+ do_head(options).ok?
304
231
  end
305
232
 
306
- # Check whether a bucket contains a key.
307
- #
308
- # Returns true if resource_key exists inside bucket_name.
309
- def resource_exists?(bucket_name, resource_key=nil)
310
- path = "/#{bucket_name}"
311
- path += "/#{resource_key}" unless resource_key.nil?
312
- do_head(path).ok?
233
+ # Put a "thing" onto S3.
234
+ #
235
+ # +thing+ may be a string, an S3Object, an S3ACL::Policy,
236
+ # a LoggingResource or a file handle.
237
+ #
238
+ # Anything you pass in +options+ will override any values
239
+ # inferred from the +thing+ (e.g. content type, key).
240
+ #
241
+ # +options+:
242
+ # * <tt>:key => 'some-key'</tt> (required unless thing is an S3Object).
243
+ # * <tt>:bucket => 'some-bucket'</tt>
244
+ # * <tt>:content_type => 'text/plain'</tt>
245
+ # * <tt>:render_as_attachment => Boolean</tt>
246
+ # * <tt>:file => true</tt>: thing is a filename, so load it as a file
247
+ # * <tt>:canned_acl => 'public'</tt>: one of S33r::CANNED_ACLS, to set a canned
248
+ # acl on a put.
249
+ #
250
+ #-- TODO: finish documentation for options
251
+ #-- TODO: implement canned_acl
252
+ #-- TODO: pass Policy as an option
253
+ def put(thing, options={}, headers={})
254
+ is_file = options[:file]
255
+
256
+ # thing is a file, so load it.
257
+ if is_file and thing.is_a?(String)
258
+ # Use the filename as the key unless it is set already.
259
+ options[:key] ||= thing
260
+
261
+ # Guess the content type unless it's been set.
262
+ unless options[:content_type]
263
+ mime_type = guess_mime_type(thing)
264
+ content_type = mime_type.simplified
265
+ options[:content_type] = content_type
266
+ end
267
+ elsif thing.is_a?(S3Object)
268
+ options[:key] ||= thing.key
269
+ data = thing.value
270
+ options[:content_type] ||= thing.content_type
271
+ options[:render_as_attachment] ||= thing.render_as_attachment
272
+ headers = metadata_headers(thing.meta)
273
+ elsif thing.is_a?(Policy) || thing.is_a?(LoggingResource)
274
+ data = thing.to_xml
275
+ options[:content_type] = 'text/xml'
276
+ else
277
+ data = thing
278
+ end
279
+
280
+ key = options[:key]
281
+
282
+ # Headers for content type etc.
283
+ headers.merge! content_headers(options[:content_type], key, options[:render_as_attachment])
284
+
285
+ if is_file
286
+ File.open(thing) do |data|
287
+ do_put(data, options, headers).ok?
288
+ end
289
+ else
290
+ do_put(data, options, headers).ok?
291
+ end
313
292
  end
314
293
 
315
- # Fetch an object. Note that this actually pulls down the
316
- # object from S3 and instantiates the S3Object instance with it.
317
- def get_object(bucket_name, resource_key, headers={})
318
- response = get_resource(bucket_name, resource_key, headers)
319
- S3Object.from_response(resource_key, response)
294
+ # Put a file onto S3 (shortcut to put).
295
+ def put_file(filename, options={}, headers={})
296
+ options[:file] = true
297
+ put(filename, options, headers)
320
298
  end
321
299
 
322
- # Fetch the ACL document for a resource.
323
- #
324
- # Raises an exception if there is a problem with the resource
325
- # (e.g. it doesn't exist).
326
- def get_acl(bucket_name, resource_key='')
327
- path = s3_acl_path(bucket_name, resource_key)
328
- response = do_get(path)
329
- if response.ok?
330
- S3ACL::ACLDoc.from_xml(response.body)
300
+ # Get an ACL.
301
+ def get_acl(options={})
302
+ options[:acl] = true
303
+ resp = do_get(options)
304
+ if resp.ok?
305
+ S3ACL::Policy.from_xml(resp.body)
331
306
  else
332
- raise S33rException::MissingResource, "Tried to get an ACL from a non-existent resource [#{path}]"
307
+ nil
333
308
  end
334
309
  end
310
+ alias :acl :get_acl
335
311
 
336
- # Put the ACL document back to a resource.
337
- #
338
- # +acl_doc+ is an S33r::S3ACL::ACLDoc instance.
339
- #
340
- # Returns true if response had a 200 code, false otherwise.
341
- # If you get a 400 Bad Request back, it means a CanonicalUser
342
- # could not be identified from the email address.
343
- def set_acl(acl_doc, bucket_name, resource_key='')
344
- path = s3_acl_path(bucket_name, resource_key)
345
- response = do_put(path, acl_doc.to_xml)
346
- response.ok?
312
+ # Set an ACL.
313
+ def set_acl(policy, options={})
314
+ options[:acl] = true
315
+ put(policy, options)
347
316
  end
317
+ alias :acl= :set_acl
348
318
 
349
- # Set up logging for a bucket and resource key.
350
- #
351
- # +logging_resource+ = a LoggingResource instance.
352
- # +bucket_name+ = a bucket to log.
353
- # +resource_key+ = a resource to log (if empty, logging
354
- # gets added to the bucket).
355
- def set_logging(logging_resource, bucket_name, resource_key='')
356
- path = s3_logging_path(bucket_name, resource_key)
357
- response = do_put(path, logging_resource.to_xml)
319
+ # Is a resource public?
320
+ def public?(options={})
321
+ get_acl(options).public_readable?
358
322
  end
359
323
 
360
- # Make a resource public (i.e. grant READ permissions
361
- # to the AllUsers group type). NB separate method is used
362
- # on buckets, to make all of their content public too.
363
- #
364
- # Returns nil if resource does not exist.
365
- def make_public(bucket_name, resource_key='')
366
- acl = get_acl(bucket_name, resource_key)
367
- if !acl.nil? and acl.add_public_read_grants
368
- set_acl(acl, bucket_name, resource_key)
369
- end
324
+ # Make a resource public
325
+ def make_public
326
+ set_acl(get_acl().add_public_read_grant)
370
327
  end
371
328
 
372
- #-- TODO
329
+ # Make a resource private
373
330
  def make_private
331
+ set_acl(get_acl().remove_public_read_grant)
374
332
  end
375
333
 
376
- # Make a bucket capable of being a target for access logging.
377
- #
378
- # Returns true if the bucket is now a possible log target;
379
- # false otherwise.
380
- #
381
- #-- TODO: tests
382
- def enable_log_target(bucket_name)
383
- acl = get_acl(bucket_name)
384
- if acl.add_log_target_grants
385
- set_acl(acl, bucket_name)
386
- end
387
- acl.log_targetable?
334
+ # Get a URL for a thing.
335
+ def url(options={})
336
+ options = request_defaults.merge(options)
337
+ s3_url(options)
388
338
  end
389
339
 
390
- # Disable permissions for access logging into a bucket.
391
- #
392
- # Returns true if the bucket is no longer log targetable;
393
- # false if it remains a log target.
394
- #
395
- #-- TODO: tests
396
- def disable_log_target(bucket_name)
397
- acl = get_acl(bucket_name)
398
- acl.remove_log_target
399
- set_acl(acl, bucket_name)
400
- !acl.log_targetable?
340
+ # Change the status of a bucket for logging.
341
+ #
342
+ # +logging_on+ = :on to turn logging on (default),
343
+ # :off to turn logging off.
344
+ def change_log_target_status(bucket_name, state=:on)
345
+ logging_on = (:on == state)
346
+ bucket = get_bucket(bucket_name)
347
+ policy = bucket.acl
348
+ logging_on ? policy.add_log_target_grants : policy.remove_log_target_grants
349
+ bucket.acl = policy
350
+ logging_on == policy.log_targetable?
401
351
  end
402
352
 
403
- # Enable logging for a resource (only buckets are supported presently).
404
- #
405
- # +log_prefix+ is the prefix for the logs.
406
- # +bucket_name+ is the bucket to log.
407
- # +log_bucket+ is the bucket to put logs into.
408
- #
409
- # options:
410
- # +:for_key => 'key'+ is the (optional) resource to log in the bucket
411
- # (NB this is not currently supported by S3).
412
- # +:log_prefix => 'prefix'+ is the (optional) log file prefix
413
- # (defaults to bucket_name + '-')
414
- #
415
- #-- TODO: tests
416
- def enable_logging(bucket_name, log_bucket=nil, options={})
417
- # Set to the default log_bucket if not set explicitly.
418
- log_bucket ||= @log_bucket
419
-
420
- resource_key = options[:for_key]
421
- resource_key ||= ''
422
-
423
- log_prefix = options[:prefix]
424
- log_prefix ||= bucket_name + '-'
425
-
426
- log_bucket_acl = get_acl(log_bucket)
427
- if !(log_bucket_acl.log_targetable?)
428
- raise BucketNotLogTargetable, "The bucket #{log_bucket} cannot be specified as a log target"
353
+ # Get the logging status for a resource.
354
+ #
355
+ # +options+:
356
+ # * <tt>:for_bucket => 'bucket'</tt>: get the logging status for a bucket.
357
+ # (Alias for :bucket; if both supplied, :bucket takes preference.)
358
+ def logging(options={})
359
+ options[:logging] = true
360
+ options[:bucket] ||= options[:for_bucket]
361
+ resp = do_get(options)
362
+ if resp.ok?
363
+ LoggingResource.from_xml(resp.body)
364
+ else
365
+ nil
429
366
  end
430
- logging_resource = LoggingResource.new(log_bucket, log_prefix)
431
- set_logging(logging_resource, bucket_name, resource_key)
432
- end
433
-
434
- # Turn off logging of a resource.
435
- #-- TODO
436
- def disable_logging
437
367
  end
438
368
 
439
- # Get the logging status of a resource.
440
- #-- TODO
441
- def get_logging
442
- end
443
-
444
- # Put some generic resource onto S3.
445
- #
446
- # To stream with this method, +data+ should respond to the +stat+
447
- # method; examples of data types which respond to this include File instances.
448
- def put_resource(bucket_name, resource_key, data, headers={})
449
- raise S33r::S33rException::TryingToPutEmptyResource, "No data to put for key '#{resource_key}'" unless data
450
- resp = do_put("/#{bucket_name}/" + "#{CGI::escape(resource_key)}", data, headers)
451
- resp.ok?
452
- end
453
-
454
- # Put a string onto S3.
455
- def put_text(string, bucket_name, resource_key, headers={})
456
- headers["Content-Type"] = "text/plain"
457
- put_resource(bucket_name, resource_key, string, headers)
458
- end
459
-
460
- # Put a file onto S3.
461
- #
462
- # If +resource_key+ is nil, the filename is used as the key instead.
463
- #
464
- # +headers+ sets some headers with the request; useful if you have an odd file type
465
- # not recognised by the mimetypes library, and want to explicitly set the Content-Type header.
466
- #
467
- # +options+ hash simplifies setting some headers with specific meaning to S3:
468
- # * <tt>:render_as_attachment => true</tt>: set the Content-Disposition for this file to "attachment" and set
469
- # the default filename for saving the file (when accessed by a web browser) to +filename+; this
470
- # turns the file into a download when opened in a browser, rather than trying to render it inline.
471
- #
472
- # Note that this method uses a handle to the file, so it can be streamed in chunks to S3.
473
- def put_file(filename, bucket_name, resource_key=nil, headers={}, options={})
474
- # default to the file path as the resource key if none explicitly set
475
- if resource_key.nil?
476
- resource_key = filename
477
- end
369
+ # Enable logging for a bucket.
370
+ #
371
+ # +target_bucket+ is the target for the logs.
372
+ # The bucket you want to log is passed in +options+.
373
+ #
374
+ # +options+
375
+ # * <tt>:bucket => 'bucket'</tt>: bucket to log.
376
+ # * <tt>:for_bucket => 'bucket'</tt>: syntactic sugar; alias for <tt>:bucket</tt>.
377
+ # If :bucket and :for_bucket are provided, :bucket takes preference.
378
+ # * <tt>:prefix => 'some-prefix-'</tt>: specify a prefix for log files;
379
+ # otherwise 'log-<bucket name>-' is used
380
+ def logs_to(target_bucket, options={})
381
+ target_bucket_name = target_bucket.is_a?(Bucket) ? target_bucket.name : target_bucket
382
+ log_prefix = options[:prefix] || "log-#{bucket_name}-"
383
+ options[:bucket] ||= options[:for_bucket]
478
384
 
479
- # set Content-Disposition header
480
- if options[:render_as_attachment]
481
- headers['Content-Disposition'] = "attachment; filename=#{File.basename(filename)}"
482
- end
483
-
484
- # content type is explicitly set in the headers, so apply to request
485
- if headers[:content_type]
486
- # use the first MIME type corresponding to this content type string
487
- # (MIME::Types returns an array of possible MIME types)
488
- mime_type = MIME::Types[headers[:content_type]][0]
489
- else
490
- # we're not going to use this much, just for parsing the content type etc.
491
- mime_type = guess_mime_type(filename)
492
- end
493
- content_type = mime_type.simplified
494
- headers['Content-Type'] = content_type
495
- headers['Content-Transfer-Encoding'] = 'binary' if mime_type.binary?
496
-
497
- # Open the file, and pass the handle to the HTTP client so content
498
- # can be streamed.
499
- File.open(filename) do |data|
500
- # send the put request
501
- put_resource(bucket_name, resource_key, data, headers)
385
+ target_bucket_acl = get_acl(:bucket => target_bucket_name)
386
+ unless target_bucket_acl.log_targetable?
387
+ raise BucketNotLogTargetable, "The bucket #{target_bucket_name} cannot be specified as a log target"
502
388
  end
389
+
390
+ logging_resource = LoggingResource.new(target_bucket_name, log_prefix)
391
+ options[:logging] = true
392
+ put(logging_resource, options)
503
393
  end
504
394
 
505
- # Delete a resource from S3.
395
+ # Turn off logging for a bucket.
506
396
  #
507
- # Note that S3 returns the same response code regardless
508
- # of whether the resource was successfully deleted, or didn't exist
509
- # in the first place.
510
- def delete_resource(bucket_name, resource_key, headers={})
511
- do_delete("/#{bucket_name}/#{resource_key}", headers)
512
- end
513
-
514
- # Add any default headers which should be sent with every request from the client.
515
- #
516
- # +headers+ is a hash of headers already set up. Any headers passed in here
517
- # override the defaults in +client_headers+.
518
- #
519
- # Returns +headers+ with the content of +client_headers+ merged in.
520
- def add_client_headers(headers)
521
- headers.merge!(client_headers) { |key, arg, default| arg }
522
- end
523
-
524
- def do_get(path='/', headers={})
525
- do_request('GET', path, nil, headers)
397
+ # +options+:
398
+ # * <tt>:bucket => 'bucket'</tt>: bucket to turn logging off for.
399
+ def logs_off(options={})
400
+ options[:logging] = true
401
+ put(LoggingResource.new, options)
526
402
  end
527
-
528
- def do_head(path='/', headers={})
529
- do_request('HEAD', path, nil, headers)
530
- end
531
-
532
- def do_post(path='/', data=nil, headers={})
533
- do_request('POST', path, data, headers)
534
- end
535
-
536
- def do_put(path='/', data=nil, headers={})
537
- do_request('PUT', path, data, headers)
538
- end
539
-
540
- def do_delete(path, headers={})
541
- do_request('DELETE', path, nil, headers)
542
- end
543
-
544
403
  end
545
- end
404
+ end