edavis10-redmine_s3 0.0.3

Sign up to get free protection for your applications and to get access to all the features.
data/README.rdoc ADDED
@@ -0,0 +1,19 @@
1
+ = S3 plugin for Redmine
2
+
3
+ == Description
4
+ This Redmine[http://www.redmine.org] plugin makes file attachments be stored on "Amazon S3"[http://aws.amazon.com/s3] rather than on the local filesystem.
5
+
6
+ == Installation
7
+ 1. Make sure Redmine is installed and cd into it's root directory
8
+ 2. ruby script/plugin install git://github.com/tigrish/redmine_s3.git
9
+ 3. cp vendor/plugins/redmine_s3/config/s3.yml.example config/s3.yml
10
+ 4. Edit config/s3.yml with your favourite editor
11
+ 5. Restart mongrel/upload to production/whatever
12
+
13
+ == Options
14
+ * The bucket specified in s3.yml will be created automatically when the plugin is loaded (this is generally when the server starts).
15
+ * If you have created a CNAME entry for your bucket set the cname_bucket option to true in s3.yml and your files will be served from that domain.
16
+ * After files are uploaded they are made public. This seems acceptable as it is also Redmine's policy for file storage.
17
+
18
+ == Reporting Bugs and Getting Help
19
+ Bugs and feature requests may be filed at http://projects.tigrish.com/projects/redmine-s3/issues
data/Rakefile ADDED
@@ -0,0 +1,38 @@
1
+ #!/usr/bin/env ruby
2
+ require 'redmine_plugin_support'
3
+
4
+ Dir[File.expand_path(File.dirname(__FILE__)) + "/lib/tasks/**/*.rake"].sort.each { |ext| load ext }
5
+
6
+ RedminePluginSupport::Base.setup do |plugin|
7
+ plugin.project_name = 'redmine_s3'
8
+ plugin.default_task = [:test]
9
+ plugin.tasks = [:doc, :release, :clean, :test, :db]
10
+ # TODO: gem not getting this automaticly
11
+ plugin.redmine_root = File.expand_path(File.dirname(__FILE__) + '/../../../')
12
+ end
13
+
14
+ begin
15
+ require 'jeweler'
16
+ Jeweler::Tasks.new do |s|
17
+ s.name = "edavis10-redmine_s3"
18
+ s.summary = "Plugin to have Redmine store uploads on S3"
19
+ s.email = "edavis@littlestreamsoftware.com"
20
+ s.homepage = "http://projects.tigrish.com/projects/redmine-s3"
21
+ s.description = "Plugin to have Redmine store uploads on S3"
22
+ s.authors = ["Christopher Dell", "Eric Davis"]
23
+ s.rubyforge_project = "littlestreamsoftware"
24
+ s.files = FileList[
25
+ "[A-Z]*",
26
+ "init.rb",
27
+ "rails/init.rb",
28
+ "{bin,generators,lib,test,app,assets,config,lang}/**/*",
29
+ 'lib/jeweler/templates/.gitignore'
30
+ ]
31
+ end
32
+ Jeweler::GemcutterTasks.new
33
+ Jeweler::RubyforgeTasks.new do |rubyforge|
34
+ rubyforge.doc_task = "rdoc"
35
+ end
36
+ rescue LoadError
37
+ puts "Jeweler, or one of its dependencies, is not available. Install it with: sudo gem install technicalpickles-jeweler -s http://gems.github.com"
38
+ end
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 0.0.3
@@ -0,0 +1,3 @@
1
+ # English strings go here for Rails i18n
2
+ en:
3
+ my_label: "My label"
@@ -0,0 +1,11 @@
1
+ production:
2
+ access_key_id:
3
+ secret_access_key:
4
+ bucket:
5
+ cname_bucket: false
6
+
7
+ development:
8
+ access_key_id:
9
+ secret_access_key:
10
+ bucket:
11
+ cname_bucket: false
data/init.rb ADDED
@@ -0,0 +1,25 @@
1
+ require 'redmine'
2
+ require 'dispatcher' # Patches to the Redmine core.
3
+
4
+ Dispatcher.to_prepare :redmine_s3 do
5
+ require_dependency 'attachment'
6
+ unless Attachment.included_modules.include? RedmineS3::AttachmentPatch
7
+ Attachment.send(:include, RedmineS3::AttachmentPatch)
8
+ end
9
+
10
+ app_dependency = Redmine::VERSION.to_a.slice(0,3).join('.') > '0.8.4' ? 'application_controller' : 'application'
11
+ require_dependency(app_dependency)
12
+ require_dependency 'attachments_controller'
13
+ unless AttachmentsController.included_modules.include? RedmineS3::AttachmentsControllerPatch
14
+ AttachmentsController.send(:include, RedmineS3::AttachmentsControllerPatch)
15
+ end
16
+
17
+ RedmineS3::Connection.create_bucket
18
+ end
19
+
20
+ Redmine::Plugin.register :redmine_s3_attachments do
21
+ name 'S3'
22
+ author 'Chris Dell'
23
+ description 'Use Amazon S3 as a storage engine for attachments'
24
+ version '0.0.3'
25
+ end
data/lang/en.yml ADDED
@@ -0,0 +1,2 @@
1
+ # English strings go here
2
+ my_label: "My label"
data/lib/S3.rb ADDED
@@ -0,0 +1,861 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # This software code is made available "AS IS" without warranties of any
4
+ # kind. You may copy, display, modify and redistribute the software
5
+ # code either by itself or as incorporated into your code; provided that
6
+ # you do not remove any proprietary notices. Your use of this software
7
+ # code is at your own risk and you waive any claim against Amazon
8
+ # Digital Services, Inc. or its affiliates with respect to your use of
9
+ # this software code. (c) 2006-2007 Amazon Digital Services, Inc. or its
10
+ # affiliates.
11
+
12
+ require 'base64'
13
+ require 'cgi'
14
+ require 'openssl'
15
+ require 'digest/sha1'
16
+ require 'net/https'
17
+ require 'rexml/document'
18
+ require 'time'
19
+ require 'uri'
20
+
21
+ # this wasn't added until v 1.8.3
22
+ if (RUBY_VERSION < '1.8.3')
23
+ class Net::HTTP::Delete < Net::HTTPRequest
24
+ METHOD = 'DELETE'
25
+ REQUEST_HAS_BODY = false
26
+ RESPONSE_HAS_BODY = true
27
+ end
28
+ end
29
+
30
+ # this module has two big classes: AWSAuthConnection and
31
+ # QueryStringAuthGenerator. both use identical apis, but the first actually
32
+ # performs the operation, while the second simply outputs urls with the
33
+ # appropriate authentication query string parameters, which could be used
34
+ # in another tool (such as your web browser for GETs).
35
+ module S3
36
+ DEFAULT_HOST = 's3.amazonaws.com'
37
+ PORTS_BY_SECURITY = { true => 443, false => 80 }
38
+ METADATA_PREFIX = 'x-amz-meta-'
39
+ AMAZON_HEADER_PREFIX = 'x-amz-'
40
+
41
+ # Location constraint for CreateBucket
42
+ module BucketLocation
43
+ DEFAULT = nil
44
+ EU = 'EU'
45
+ end
46
+
47
+ # builds the canonical string for signing.
48
+ def S3.canonical_string(method, bucket="", path="", path_args={}, headers={}, expires=nil)
49
+ interesting_headers = {}
50
+ headers.each do |key, value|
51
+ lk = key.downcase
52
+ if (lk == 'content-md5' or
53
+ lk == 'content-type' or
54
+ lk == 'date' or
55
+ lk =~ /^#{AMAZON_HEADER_PREFIX}/o)
56
+ interesting_headers[lk] = value.to_s.strip
57
+ end
58
+ end
59
+
60
+ # these fields get empty strings if they don't exist.
61
+ interesting_headers['content-type'] ||= ''
62
+ interesting_headers['content-md5'] ||= ''
63
+
64
+ # just in case someone used this. it's not necessary in this lib.
65
+ if interesting_headers.has_key? 'x-amz-date'
66
+ interesting_headers['date'] = ''
67
+ end
68
+
69
+ # if you're using expires for query string auth, then it trumps date
70
+ # (and x-amz-date)
71
+ if not expires.nil?
72
+ interesting_headers['date'] = expires
73
+ end
74
+
75
+ buf = "#{method}\n"
76
+ interesting_headers.sort { |a, b| a[0] <=> b[0] }.each do |key, value|
77
+ if key =~ /^#{AMAZON_HEADER_PREFIX}/o
78
+ buf << "#{key}:#{value}\n"
79
+ else
80
+ buf << "#{value}\n"
81
+ end
82
+ end
83
+
84
+ # build the path using the bucket and key
85
+ if not bucket.empty?
86
+ buf << "/#{bucket}"
87
+ end
88
+ # append the key (it might be empty string)
89
+ # append a slash regardless
90
+ buf << "/#{path}"
91
+
92
+ # if there is an acl, logging, or torrent parameter
93
+ # add them to the string
94
+ if path_args.has_key?('acl')
95
+ buf << '?acl'
96
+ elsif path_args.has_key?('torrent')
97
+ buf << '?torrent'
98
+ elsif path_args.has_key?('logging')
99
+ buf << '?logging'
100
+ elsif path_args.has_key?('location')
101
+ buf << '?location'
102
+ end
103
+
104
+ return buf
105
+ end
106
+
107
+ # encodes the given string with the aws_secret_access_key, by taking the
108
+ # hmac-sha1 sum, and then base64 encoding it. optionally, it will also
109
+ # url encode the result of that to protect the string if it's going to
110
+ # be used as a query string parameter.
111
+ def S3.encode(aws_secret_access_key, str, urlencode=false)
112
+ digest = OpenSSL::Digest::Digest.new('sha1')
113
+ b64_hmac =
114
+ Base64.encode64(
115
+ OpenSSL::HMAC.digest(digest, aws_secret_access_key, str)).strip
116
+
117
+ if urlencode
118
+ return CGI::escape(b64_hmac)
119
+ else
120
+ return b64_hmac
121
+ end
122
+ end
123
+
124
+ # build the path_argument string
125
+ def S3.path_args_hash_to_string(path_args={})
126
+ arg_string = ''
127
+ path_args.each { |k, v|
128
+ arg_string << (arg_string.empty? ? '?' : '&')
129
+ arg_string << k
130
+ if not v.nil?
131
+ arg_string << "=#{CGI::escape(v)}"
132
+ end
133
+ }
134
+ return arg_string
135
+ end
136
+
137
+ # uses Net::HTTP to interface with S3. note that this interface should only
138
+ # be used for smaller objects, as it does not stream the data. if you were
139
+ # to download a 1gb file, it would require 1gb of memory. also, this class
140
+ # creates a new http connection each time. it would be greatly improved with
141
+ # some connection pooling.
142
+ class AWSAuthConnection
143
+ attr_accessor :calling_format
144
+
145
+ def initialize(aws_access_key_id, aws_secret_access_key, is_secure=true,
146
+ server=DEFAULT_HOST, port=PORTS_BY_SECURITY[is_secure],
147
+ calling_format=CallingFormat::SUBDOMAIN)
148
+ @aws_access_key_id = aws_access_key_id
149
+ @aws_secret_access_key = aws_secret_access_key
150
+ @server = server
151
+ @is_secure = is_secure
152
+ @calling_format = calling_format
153
+ @port = port
154
+ end
155
+
156
+ def create_bucket(bucket, headers={})
157
+ return Response.new(make_request('PUT', bucket, '', {}, headers))
158
+ end
159
+
160
+ def create_located_bucket(bucket, location=BucketLocation::DEFAULT, headers={})
161
+ if (location != BucketLocation::DEFAULT)
162
+ xmlbody = "<CreateBucketConstraint><LocationConstraint>#{location}</LocationConstraint></CreateBucketConstraint>"
163
+ end
164
+ return Response.new(make_request('PUT', bucket, '', {}, headers, xmlbody))
165
+ end
166
+
167
+ def check_bucket_exists(bucket)
168
+ begin
169
+ make_request('HEAD', bucket, '', {}, {})
170
+ return true
171
+ rescue Net::HTTPServerException
172
+ response = $!.response
173
+ return false if (response.code.to_i == 404)
174
+ raise
175
+ end
176
+ end
177
+
178
+ # takes options :prefix, :marker, :max_keys, and :delimiter
179
+ def list_bucket(bucket, options={}, headers={})
180
+ path_args = {}
181
+ options.each { |k, v|
182
+ path_args[k] = v.to_s
183
+ }
184
+
185
+ return ListBucketResponse.new(make_request('GET', bucket, '', path_args, headers))
186
+ end
187
+
188
+ def delete_bucket(bucket, headers={})
189
+ return Response.new(make_request('DELETE', bucket, '', {}, headers))
190
+ end
191
+
192
+ def put(bucket, key, object, headers={})
193
+ object = S3Object.new(object) if not object.instance_of? S3Object
194
+
195
+ return Response.new(
196
+ make_request('PUT', bucket, CGI::escape(key), {}, headers, object.data, object.metadata)
197
+ )
198
+ end
199
+
200
+ def get(bucket, key, headers={})
201
+ return GetResponse.new(make_request('GET', bucket, CGI::escape(key), {}, headers))
202
+ end
203
+
204
+ def delete(bucket, key, headers={})
205
+ return Response.new(make_request('DELETE', bucket, CGI::escape(key), {}, headers))
206
+ end
207
+
208
+ def get_bucket_logging(bucket, headers={})
209
+ return GetResponse.new(make_request('GET', bucket, '', {'logging' => nil}, headers))
210
+ end
211
+
212
+ def put_bucket_logging(bucket, logging_xml_doc, headers={})
213
+ return Response.new(make_request('PUT', bucket, '', {'logging' => nil}, headers, logging_xml_doc))
214
+ end
215
+
216
+ def get_bucket_acl(bucket, headers={})
217
+ return get_acl(bucket, '', headers)
218
+ end
219
+
220
+ # returns an xml document representing the access control list.
221
+ # this could be parsed into an object.
222
+ def get_acl(bucket, key, headers={})
223
+ return GetResponse.new(make_request('GET', bucket, CGI::escape(key), {'acl' => nil}, headers))
224
+ end
225
+
226
+ def put_bucket_acl(bucket, acl_xml_doc, headers={})
227
+ return put_acl(bucket, '', acl_xml_doc, headers)
228
+ end
229
+
230
+ # sets the access control policy for the given resource. acl_xml_doc must
231
+ # be a string in the acl xml format.
232
+ def put_acl(bucket, key, acl_xml_doc, headers={})
233
+ return Response.new(
234
+ make_request('PUT', bucket, CGI::escape(key), {'acl' => nil}, headers, acl_xml_doc, {})
235
+ )
236
+ end
237
+
238
+ def get_bucket_location(bucket)
239
+ return LocationResponse.new(make_request('GET', bucket, '', {'location' => nil}, {}))
240
+ end
241
+
242
+ def list_all_my_buckets(headers={})
243
+ return ListAllMyBucketsResponse.new(make_request('GET', '', '', {}, headers))
244
+ end
245
+
246
+ private
247
+ def make_request(method, bucket='', key='', path_args={}, headers={}, data='', metadata={})
248
+
249
+ # build the domain based on the calling format
250
+ server = ''
251
+ if bucket.empty?
252
+ # for a bucketless request (i.e. list all buckets)
253
+ # revert to regular domain case since this operation
254
+ # does not make sense for vanity domains
255
+ server = @server
256
+ elsif @calling_format == CallingFormat::SUBDOMAIN
257
+ server = "#{bucket}.#{@server}"
258
+ elsif @calling_format == CallingFormat::VANITY
259
+ server = bucket
260
+ else
261
+ server = @server
262
+ end
263
+
264
+ # build the path based on the calling format
265
+ path = ''
266
+ if (not bucket.empty?) and (@calling_format == CallingFormat::PATH)
267
+ path << "/#{bucket}"
268
+ end
269
+ # add the slash after the bucket regardless
270
+ # the key will be appended if it is non-empty
271
+ path << "/#{key}"
272
+
273
+ # build the path_argument string
274
+ # add the ? in all cases since
275
+ # signature and credentials follow path args
276
+ path << S3.path_args_hash_to_string(path_args)
277
+
278
+ while true
279
+ http = Net::HTTP.new(server, @port)
280
+ http.use_ssl = @is_secure
281
+ http.start do
282
+ req = method_to_request_class(method).new(path)
283
+
284
+ set_headers(req, headers)
285
+ set_headers(req, metadata, METADATA_PREFIX)
286
+
287
+ set_aws_auth_header(req, @aws_access_key_id, @aws_secret_access_key, bucket, key, path_args)
288
+ if req.request_body_permitted?
289
+ resp = http.request(req, data)
290
+ else
291
+ resp = http.request(req)
292
+ end
293
+
294
+ case resp.code.to_i
295
+ when 100..299
296
+ return resp
297
+ when 300..399 # redirect
298
+ location = resp['location']
299
+ # handle missing location like a normal http error response
300
+ resp.error! if !location
301
+ uri = URI.parse(resp['location'])
302
+ server = uri.host
303
+ path = uri.request_uri
304
+ # try again...
305
+ else
306
+ resp.error!
307
+ end
308
+ end # http.start
309
+ end # while
310
+ end
311
+
312
+ def method_to_request_class(method)
313
+ case method
314
+ when 'GET'
315
+ return Net::HTTP::Get
316
+ when 'HEAD'
317
+ return Net::HTTP::Head
318
+ when 'PUT'
319
+ return Net::HTTP::Put
320
+ when 'DELETE'
321
+ return Net::HTTP::Delete
322
+ else
323
+ raise "Unsupported method #{method}"
324
+ end
325
+ end
326
+
327
+ # set the Authorization header using AWS signed header authentication
328
+ def set_aws_auth_header(request, aws_access_key_id, aws_secret_access_key, bucket='', key='', path_args={})
329
+ # we want to fix the date here if it's not already been done.
330
+ request['Date'] ||= Time.now.httpdate
331
+
332
+ # ruby will automatically add a random content-type on some verbs, so
333
+ # here we add a dummy one to 'supress' it. change this logic if having
334
+ # an empty content-type header becomes semantically meaningful for any
335
+ # other verb.
336
+ request['Content-Type'] ||= ''
337
+
338
+ canonical_string =
339
+ S3.canonical_string(request.method, bucket, key, path_args, request.to_hash, nil)
340
+ encoded_canonical = S3.encode(aws_secret_access_key, canonical_string)
341
+
342
+ request['Authorization'] = "AWS #{aws_access_key_id}:#{encoded_canonical}"
343
+ end
344
+
345
+ def set_headers(request, headers, prefix='')
346
+ headers.each do |key, value|
347
+ request[prefix + key] = value
348
+ end
349
+ end
350
+ end
351
+
352
+
353
+ # This interface mirrors the AWSAuthConnection class above, but instead
354
+ # of performing the operations, this class simply returns a url that can
355
+ # be used to perform the operation with the query string authentication
356
+ # parameters set.
357
+ class QueryStringAuthGenerator
358
+ attr_accessor :calling_format
359
+ attr_accessor :expires
360
+ attr_accessor :expires_in
361
+ attr_reader :server
362
+ attr_reader :port
363
+ attr_reader :is_secure
364
+
365
+ # by default, expire in 1 minute
366
+ DEFAULT_EXPIRES_IN = 60
367
+
368
+ def initialize(aws_access_key_id, aws_secret_access_key, is_secure=true,
369
+ server=DEFAULT_HOST, port=PORTS_BY_SECURITY[is_secure],
370
+ format=CallingFormat::SUBDOMAIN)
371
+ @aws_access_key_id = aws_access_key_id
372
+ @aws_secret_access_key = aws_secret_access_key
373
+ @protocol = is_secure ? 'https' : 'http'
374
+ @server = server
375
+ @port = port
376
+ @calling_format = format
377
+ @is_secure = is_secure
378
+ # by default expire
379
+ @expires_in = DEFAULT_EXPIRES_IN
380
+ end
381
+
382
+ # set the expires value to be a fixed time. the argument can
383
+ # be either a Time object or else seconds since epoch.
384
+ def expires=(value)
385
+ @expires = value
386
+ @expires_in = nil
387
+ end
388
+
389
+ # set the expires value to expire at some point in the future
390
+ # relative to when the url is generated. value is in seconds.
391
+ def expires_in=(value)
392
+ @expires_in = value
393
+ @expires = nil
394
+ end
395
+
396
+ def create_bucket(bucket, headers={})
397
+ return generate_url('PUT', bucket, '', {}, headers)
398
+ end
399
+
400
+ # takes options :prefix, :marker, :max_keys, and :delimiter
401
+ def list_bucket(bucket, options={}, headers={})
402
+ path_args = {}
403
+ options.each { |k, v|
404
+ path_args[k] = v.to_s
405
+ }
406
+ return generate_url('GET', bucket, '', path_args, headers)
407
+ end
408
+
409
+ def delete_bucket(bucket, headers={})
410
+ return generate_url('DELETE', bucket, '', {}, headers)
411
+ end
412
+
413
+ # don't really care what object data is. it's just for conformance with the
414
+ # other interface. If this doesn't work, check tcpdump to see if the client is
415
+ # putting a Content-Type header on the wire.
416
+ def put(bucket, key, object=nil, headers={})
417
+ object = S3Object.new(object) if not object.instance_of? S3Object
418
+ return generate_url('PUT', bucket, CGI::escape(key), {}, merge_meta(headers, object))
419
+ end
420
+
421
+ def get(bucket, key, headers={})
422
+ return generate_url('GET', bucket, CGI::escape(key), {}, headers)
423
+ end
424
+
425
+ def delete(bucket, key, headers={})
426
+ return generate_url('DELETE', bucket, CGI::escape(key), {}, headers)
427
+ end
428
+
429
+ def get_bucket_logging(bucket, headers={})
430
+ return generate_url('GET', bucket, '', {'logging' => nil}, headers)
431
+ end
432
+
433
+ def put_bucket_logging(bucket, logging_xml_doc, headers={})
434
+ return generate_url('PUT', bucket, '', {'logging' => nil}, headers)
435
+ end
436
+
437
+ def get_acl(bucket, key='', headers={})
438
+ return generate_url('GET', bucket, CGI::escape(key), {'acl' => nil}, headers)
439
+ end
440
+
441
+ def get_bucket_acl(bucket, headers={})
442
+ return get_acl(bucket, '', headers)
443
+ end
444
+
445
+ # don't really care what acl_xml_doc is.
446
+ # again, check the wire for Content-Type if this fails.
447
+ def put_acl(bucket, key, acl_xml_doc, headers={})
448
+ return generate_url('PUT', bucket, CGI::escape(key), {'acl' => nil}, headers)
449
+ end
450
+
451
+ def put_bucket_acl(bucket, acl_xml_doc, headers={})
452
+ return put_acl(bucket, '', acl_xml_doc, headers)
453
+ end
454
+
455
+ def list_all_my_buckets(headers={})
456
+ return generate_url('GET', '', '', {}, headers)
457
+ end
458
+
459
+
460
+ private
461
+ # generate a url with the appropriate query string authentication
462
+ # parameters set.
463
+ def generate_url(method, bucket="", key="", path_args={}, headers={})
464
+ expires = 0
465
+ if not @expires_in.nil?
466
+ expires = Time.now.to_i + @expires_in
467
+ elsif not @expires.nil?
468
+ expires = @expires
469
+ else
470
+ raise "invalid expires state"
471
+ end
472
+
473
+ canonical_string =
474
+ S3::canonical_string(method, bucket, key, path_args, headers, expires)
475
+ encoded_canonical =
476
+ S3::encode(@aws_secret_access_key, canonical_string)
477
+
478
+ url = CallingFormat.build_url_base(@protocol, @server, @port, bucket, @calling_format)
479
+
480
+ path_args["Signature"] = encoded_canonical.to_s
481
+ path_args["Expires"] = expires.to_s
482
+ path_args["AWSAccessKeyId"] = @aws_access_key_id.to_s
483
+ arg_string = S3.path_args_hash_to_string(path_args)
484
+
485
+ return "#{url}/#{key}#{arg_string}"
486
+ end
487
+
488
+ def merge_meta(headers, object)
489
+ final_headers = headers.clone
490
+ if not object.nil? and not object.metadata.nil?
491
+ object.metadata.each do |k, v|
492
+ final_headers[METADATA_PREFIX + k] = v
493
+ end
494
+ end
495
+ return final_headers
496
+ end
497
+ end
498
+
499
+ class S3Object
500
+ attr_accessor :data
501
+ attr_accessor :metadata
502
+ def initialize(data, metadata={})
503
+ @data, @metadata = data, metadata
504
+ end
505
+ end
506
+
507
+ # class for storing calling format constants
508
+ module CallingFormat
509
+ PATH = 0 # http://s3.amazonaws.com/bucket/key
510
+ SUBDOMAIN = 1 # http://bucket.s3.amazonaws.com/key
511
+ VANITY = 2 # http://<vanity_domain>/key -- vanity_domain resolves to s3.amazonaws.com
512
+
513
+ # build the url based on the calling format, and bucket
514
+ def CallingFormat.build_url_base(protocol, server, port, bucket, format)
515
+ build_url_base = "#{protocol}://"
516
+ if bucket.empty?
517
+ build_url_base << "#{server}:#{port}"
518
+ elsif format == SUBDOMAIN
519
+ build_url_base << "#{bucket}.#{server}:#{port}"
520
+ elsif format == VANITY
521
+ build_url_base << "#{bucket}:#{port}"
522
+ else
523
+ build_url_base << "#{server}:#{port}/#{bucket}"
524
+ end
525
+ return build_url_base
526
+ end
527
+ end
528
+
529
+ class Owner
530
+ attr_accessor :id
531
+ attr_accessor :display_name
532
+ end
533
+
534
+ class ListEntry
535
+ attr_accessor :key
536
+ attr_accessor :last_modified
537
+ attr_accessor :etag
538
+ attr_accessor :size
539
+ attr_accessor :storage_class
540
+ attr_accessor :owner
541
+ end
542
+
543
+ class ListProperties
544
+ attr_accessor :name
545
+ attr_accessor :prefix
546
+ attr_accessor :marker
547
+ attr_accessor :max_keys
548
+ attr_accessor :delimiter
549
+ attr_accessor :is_truncated
550
+ attr_accessor :next_marker
551
+ end
552
+
553
+ class CommonPrefixEntry
554
+ attr_accessor :prefix
555
+ end
556
+
557
+ # Parses the list bucket output into a list of ListEntry objects, and
558
+ # a list of CommonPrefixEntry objects if applicable.
559
+ class ListBucketParser
560
+ attr_reader :properties
561
+ attr_reader :entries
562
+ attr_reader :common_prefixes
563
+
564
+ def initialize
565
+ reset
566
+ end
567
+
568
+ def tag_start(name, attributes)
569
+ if name == 'ListBucketResult'
570
+ @properties = ListProperties.new
571
+ elsif name == 'Contents'
572
+ @curr_entry = ListEntry.new
573
+ elsif name == 'Owner'
574
+ @curr_entry.owner = Owner.new
575
+ elsif name == 'CommonPrefixes'
576
+ @common_prefix_entry = CommonPrefixEntry.new
577
+ end
578
+ end
579
+
580
+ # we have one, add him to the entries list
581
+ def tag_end(name)
582
+ text = @curr_text.strip
583
+ # this prefix is the one we echo back from the request
584
+ if name == 'Name'
585
+ @properties.name = text
586
+ elsif name == 'Prefix' and @is_echoed_prefix
587
+ @properties.prefix = text
588
+ @is_echoed_prefix = nil
589
+ elsif name == 'Marker'
590
+ @properties.marker = text
591
+ elsif name == 'MaxKeys'
592
+ @properties.max_keys = text.to_i
593
+ elsif name == 'Delimiter'
594
+ @properties.delimiter = text
595
+ elsif name == 'IsTruncated'
596
+ @properties.is_truncated = text == 'true'
597
+ elsif name == 'NextMarker'
598
+ @properties.next_marker = text
599
+ elsif name == 'Contents'
600
+ @entries << @curr_entry
601
+ elsif name == 'Key'
602
+ @curr_entry.key = text
603
+ elsif name == 'LastModified'
604
+ @curr_entry.last_modified = text
605
+ elsif name == 'ETag'
606
+ @curr_entry.etag = text
607
+ elsif name == 'Size'
608
+ @curr_entry.size = text.to_i
609
+ elsif name == 'StorageClass'
610
+ @curr_entry.storage_class = text
611
+ elsif name == 'ID'
612
+ @curr_entry.owner.id = text
613
+ elsif name == 'DisplayName'
614
+ @curr_entry.owner.display_name = text
615
+ elsif name == 'CommonPrefixes'
616
+ @common_prefixes << @common_prefix_entry
617
+ elsif name == 'Prefix'
618
+ # this is the common prefix for keys that match up to the delimiter
619
+ @common_prefix_entry.prefix = text
620
+ end
621
+ @curr_text = ''
622
+ end
623
+
624
+ def text(text)
625
+ @curr_text += text
626
+ end
627
+
628
+ def xmldecl(version, encoding, standalone)
629
+ # ignore
630
+ end
631
+
632
+ # get ready for another parse
633
+ def reset
634
+ @is_echoed_prefix = true;
635
+ @entries = []
636
+ @curr_entry = nil
637
+ @common_prefixes = []
638
+ @common_prefix_entry = nil
639
+ @curr_text = ''
640
+ end
641
+ end
642
+
643
+ class ListAllMyBucketsParser
644
+ attr_reader :entries
645
+
646
+ def initialize
647
+ reset
648
+ end
649
+
650
+ def tag_start(name, attributes)
651
+ if name == 'Bucket'
652
+ @curr_bucket = Bucket.new
653
+ end
654
+ end
655
+
656
+ # we have one, add him to the entries list
657
+ def tag_end(name)
658
+ text = @curr_text.strip
659
+ if name == 'Bucket'
660
+ @entries << @curr_bucket
661
+ elsif name == 'Name'
662
+ @curr_bucket.name = text
663
+ elsif name == 'CreationDate'
664
+ @curr_bucket.creation_date = text
665
+ end
666
+ @curr_text = ''
667
+ end
668
+
669
+ def text(text)
670
+ @curr_text += text
671
+ end
672
+
673
+ def xmldecl(version, encoding, standalone)
674
+ # ignore
675
+ end
676
+
677
+ # get ready for another parse
678
+ def reset
679
+ @entries = []
680
+ @owner = nil
681
+ @curr_bucket = nil
682
+ @curr_text = ''
683
+ end
684
+ end
685
+
686
+ class ErrorResponseParser
687
+ attr_reader :code
688
+
689
+ def self.parse(msg)
690
+ parser = ErrorResponseParser.new
691
+ REXML::Document.parse_stream(msg, parser)
692
+ parser.code
693
+ end
694
+
695
+ def initialize
696
+ @state = :init
697
+ @code = nil
698
+ end
699
+
700
+ def tag_start(name, attributes)
701
+ case @state
702
+ when :init
703
+ if name == 'Error'
704
+ @state = :tag_error
705
+ else
706
+ @state = :bad
707
+ end
708
+ when :tag_error
709
+ if name == 'Code'
710
+ @state = :tag_code
711
+ @code = ''
712
+ else
713
+ @state = :bad
714
+ end
715
+ end
716
+ end
717
+
718
+ # we have one, add him to the entries list
719
+ def tag_end(name)
720
+ case @state
721
+ when :tag_code
722
+ @state = :done
723
+ end
724
+ end
725
+
726
+ def text(text)
727
+ @code += text if @state == :tag_code
728
+ end
729
+
730
+ def xmldecl(version, encoding, standalone)
731
+ # ignore
732
+ end
733
+ end
734
+
735
+ class LocationParser
736
+ attr_reader :location
737
+
738
+ def self.parse(msg)
739
+ parser = LocationParser.new
740
+ REXML::Document.parse_stream(msg, parser)
741
+ return parser.location
742
+ end
743
+
744
+ def initialize
745
+ @state = :init
746
+ @location = nil
747
+ end
748
+
749
+ def tag_start(name, attributes)
750
+ if @state == :init
751
+ if name == 'LocationConstraint'
752
+ @state = :tag_locationconstraint
753
+ @location = ''
754
+ else
755
+ @state = :bad
756
+ end
757
+ end
758
+ end
759
+
760
+ # we have one, add him to the entries list
761
+ def tag_end(name)
762
+ case @state
763
+ when :tag_locationconstraint
764
+ @state = :done
765
+ end
766
+ end
767
+
768
+ def text(text)
769
+ @location += text if @state == :tag_locationconstraint
770
+ end
771
+
772
+ def xmldecl(version, encoding, standalone)
773
+ # ignore
774
+ end
775
+ end
776
+
777
+ class Response
778
+ attr_reader :http_response
779
+ def initialize(response)
780
+ @http_response = response
781
+ end
782
+
783
+ def message
784
+ if @http_response.body
785
+ @http_response.body
786
+ else
787
+ "#{@http_response.code} #{@http_response.message}"
788
+ end
789
+ end
790
+ end
791
+
792
+ class Bucket
793
+ attr_accessor :name
794
+ attr_accessor :creation_date
795
+ end
796
+
797
+ class GetResponse < Response
798
+ attr_reader :object
799
+ def initialize(response)
800
+ super(response)
801
+ metadata = get_aws_metadata(response)
802
+ data = response.body
803
+ @object = S3Object.new(data, metadata)
804
+ end
805
+
806
+ # parses the request headers and pulls out the s3 metadata into a hash
807
+ def get_aws_metadata(response)
808
+ metadata = {}
809
+ response.each do |key, value|
810
+ if key =~ /^#{METADATA_PREFIX}(.*)$/oi
811
+ metadata[$1] = value
812
+ end
813
+ end
814
+ return metadata
815
+ end
816
+ end
817
+
818
+ class ListBucketResponse < Response
819
+ attr_reader :properties
820
+ attr_reader :entries
821
+ attr_reader :common_prefix_entries
822
+
823
+ def initialize(response)
824
+ super(response)
825
+ if response.is_a? Net::HTTPSuccess
826
+ parser = ListBucketParser.new
827
+ REXML::Document.parse_stream(response.body, parser)
828
+ @properties = parser.properties
829
+ @entries = parser.entries
830
+ @common_prefix_entries = parser.common_prefixes
831
+ else
832
+ @entries = []
833
+ end
834
+ end
835
+ end
836
+
837
+ class ListAllMyBucketsResponse < Response
838
+ attr_reader :entries
839
+ def initialize(response)
840
+ super(response)
841
+ if response.is_a? Net::HTTPSuccess
842
+ parser = ListAllMyBucketsParser.new
843
+ REXML::Document.parse_stream(response.body, parser)
844
+ @entries = parser.entries
845
+ else
846
+ @entries = []
847
+ end
848
+ end
849
+ end
850
+
851
+ class LocationResponse < Response
852
+ attr_reader :location
853
+
854
+ def initialize(response)
855
+ super(response)
856
+ if response.is_a? Net::HTTPSuccess
857
+ @location = LocationParser.parse(response.body)
858
+ end
859
+ end
860
+ end
861
+ end
@@ -0,0 +1,42 @@
1
+ module RedmineS3
2
+ module AttachmentPatch
3
+ def self.included(base) # :nodoc:
4
+ base.extend(ClassMethods)
5
+ base.send(:include, InstanceMethods)
6
+
7
+ # Same as typing in the class
8
+ base.class_eval do
9
+ unloadable # Send unloadable so it will not be unloaded in development
10
+ attr_accessor :s3_access_key_id, :s3_secret_acces_key, :s3_bucket, :s3_bucket
11
+ after_validation :put_to_s3
12
+ before_destroy :delete_from_s3
13
+ end
14
+ end
15
+
16
+ module ClassMethods
17
+ end
18
+
19
+ module InstanceMethods
20
+ def put_to_s3
21
+ if @temp_file && (@temp_file.size > 0)
22
+ logger.debug("Uploading to #{RedmineS3::Connection.uri}/#{path_to_file}")
23
+ RedmineS3::Connection.put(path_to_file, @temp_file.read)
24
+ RedmineS3::Connection.publicly_readable!(path_to_file)
25
+ md5 = Digest::MD5.new
26
+ self.digest = md5.hexdigest
27
+ end
28
+ @temp_file = nil # so that the model's original after_save block skips writing to the fs
29
+ end
30
+
31
+ def delete_from_s3
32
+ logger.debug("Deleting #{RedmineS3::Connection.uri}/#{path_to_file}")
33
+ RedmineS3::Connection.delete(path_to_file)
34
+ end
35
+
36
+ def path_to_file
37
+ # obscure the filename by using the timestamp for the 'directory'
38
+ disk_filename.split('_').first + '/' + disk_filename
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,27 @@
1
+ module RedmineS3
2
+ module AttachmentsControllerPatch
3
+ def self.included(base) # :nodoc:
4
+ base.extend(ClassMethods)
5
+ base.send(:include, InstanceMethods)
6
+
7
+ # Same as typing in the class
8
+ base.class_eval do
9
+ unloadable # Send unloadable so it will not be unloaded in development
10
+ before_filter :redirect_to_s3, :except => :destroy
11
+ skip_before_filter :file_readable
12
+ end
13
+ end
14
+
15
+ module ClassMethods
16
+ end
17
+
18
+ module InstanceMethods
19
+ def redirect_to_s3
20
+ if @attachment.container.is_a?(Version) || @attachment.container.is_a?(Project)
21
+ @attachment.increment_download
22
+ end
23
+ redirect_to("#{RedmineS3::Connection.uri}/#{@attachment.path_to_file}")
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,60 @@
1
+ require 'S3'
2
+ module RedmineS3
3
+ class Connection
4
+ @@access_key_id = nil
5
+ @@secret_acces_key = nil
6
+ @@bucket = nil
7
+ @@uri = nil
8
+ @@conn = nil
9
+
10
+ def self.load_options
11
+ options = YAML::load( File.open(File.join(Rails.root, 'config', 's3.yml')) )
12
+ @@access_key_id = options[Rails.env]['access_key_id']
13
+ @@secret_acces_key = options[Rails.env]['secret_access_key']
14
+ @@bucket = options[Rails.env]['bucket']
15
+
16
+ if options[Rails.env]['cname_bucket'] == true
17
+ @@uri = "http://#{@@bucket}"
18
+ else
19
+ @@uri = "http://s3.amazonaws.com/#{@@bucket}"
20
+ end
21
+ end
22
+
23
+ def self.establish_connection
24
+ load_options unless @@access_key_id && @@secret_acces_key
25
+ @@conn = S3::AWSAuthConnection.new(@@access_key_id, @@secret_acces_key, false)
26
+ end
27
+
28
+ def self.conn
29
+ @@conn || establish_connection
30
+ end
31
+
32
+ def self.bucket
33
+ load_options unless @@bucket
34
+ @@bucket
35
+ end
36
+
37
+ def self.uri
38
+ load_options unless @@uri
39
+ @@uri
40
+ end
41
+
42
+ def self.create_bucket
43
+ conn.create_bucket(bucket).http_response.message
44
+ end
45
+
46
+ def self.put(filename, data)
47
+ conn.put(bucket, filename, data)
48
+ end
49
+
50
+ def self.publicly_readable!(filename)
51
+ acl_xml = conn.get_acl(bucket, filename).object.data
52
+ updated_acl = S3Helper.set_acl_public_read(acl_xml)
53
+ conn.put_acl(bucket, filename, updated_acl).http_response.message
54
+ end
55
+
56
+ def self.delete(filename)
57
+ conn.delete(bucket, filename)
58
+ end
59
+ end
60
+ end
data/lib/s3_helper.rb ADDED
@@ -0,0 +1,111 @@
1
+ require 'S3'
2
+ require 'rexml/document' # for ACL Document manipulation
3
+
4
+ module S3Helper
5
+
6
+ include REXML
7
+
8
+ # returns public URL for key
9
+ def public_link(bucket_name, key='')
10
+ url = File.join('http://', S3::DEFAULT_HOST, bucket_name, key)
11
+ str = link_to(key, url)
12
+ str
13
+ end
14
+
15
+ # sets an ACL to public-read
16
+ def self.set_acl_public_read(acl_doc)
17
+ # create Document
18
+ doc = Document.new(acl_doc)
19
+
20
+ # get AccessControlList node
21
+ acl_node = XPath.first(doc, '//AccessControlList')
22
+
23
+ # delete existing 'AllUsers' Grantee
24
+ acl_node.delete_element "//Grant[descendant::URI[text()='http://acs.amazonaws.com/groups/global/AllUsers']]"
25
+
26
+ # create a new READ grant node
27
+ grant_node = Element.new('Grant')
28
+ grantee = Element.new('Grantee')
29
+ grantee.attributes['xmlns:xsi'] = 'http://www.w3.org/2001/XMLSchema-instance'
30
+ grantee.attributes['xsi:type'] = 'Group'
31
+
32
+ uri = Element.new('URI')
33
+ uri << Text.new('http://acs.amazonaws.com/groups/global/AllUsers')
34
+ grantee.add_element(uri)
35
+ grant_node.add_element(grantee)
36
+
37
+ perm = Element.new('Permission')
38
+ perm << Text.new('READ')
39
+ grant_node.add_element(perm)
40
+
41
+ # attach the new READ grant node
42
+ acl_node.add_element(grant_node)
43
+
44
+ return doc.to_s
45
+ end
46
+
47
+ # sets an ACL to private
48
+ def self.set_acl_private(acl_doc)
49
+ # create Document
50
+ doc = Document.new(acl_doc)
51
+
52
+ # get AccessControlList node
53
+ acl_node = XPath.first(doc, '//AccessControlList')
54
+
55
+ # delete existing 'AllUsers' Grantee
56
+ acl_node.delete_element "//Grant[descendant::URI[text()='http://acs.amazonaws.com/groups/global/AllUsers']]"
57
+
58
+ return doc.to_s
59
+ end
60
+
61
+ # sets an ACL to public-read-write
62
+ def self.set_acl_public_read_write(acl_doc)
63
+ # create Document
64
+ doc = Document.new(acl_doc)
65
+
66
+ # get AccessControlList node
67
+ acl_node = XPath.first(doc, '//AccessControlList')
68
+
69
+ # delete existing 'AllUsers' Grantee
70
+ acl_node.delete_element "//Grant[descendant::URI[text()='http://acs.amazonaws.com/groups/global/AllUsers']]"
71
+
72
+ # create a new READ grant node
73
+ grant_node = Element.new('Grant')
74
+ grantee = Element.new('Grantee')
75
+ grantee.attributes['xmlns:xsi'] = 'http://www.w3.org/2001/XMLSchema-instance'
76
+ grantee.attributes['xsi:type'] = 'Group'
77
+
78
+ uri = Element.new('URI')
79
+ uri << Text.new('http://acs.amazonaws.com/groups/global/AllUsers')
80
+ grantee.add_element(uri)
81
+ grant_node.add_element(grantee)
82
+
83
+ perm = Element.new('Permission')
84
+ perm << Text.new('READ')
85
+ grant_node.add_element(perm)
86
+
87
+ # attach the new grant node
88
+ acl_node.add_element(grant_node)
89
+
90
+ # create a new WRITE grant node
91
+ grant_node = Element.new('Grant')
92
+ grantee = Element.new('Grantee')
93
+ grantee.attributes['xmlns:xsi'] = 'http://www.w3.org/2001/XMLSchema-instance'
94
+ grantee.attributes['xsi:type'] = 'Group'
95
+
96
+ uri = Element.new('URI')
97
+ uri << Text.new('http://acs.amazonaws.com/groups/global/AllUsers')
98
+ grantee.add_element(uri)
99
+ grant_node.add_element(grantee)
100
+
101
+ perm = Element.new('Permission')
102
+ perm << Text.new('WRITE')
103
+ grant_node.add_element(perm)
104
+
105
+ # attach the new grant tree
106
+ acl_node.add_element(grant_node)
107
+
108
+ return doc.to_s
109
+ end
110
+
111
+ end
data/rails/init.rb ADDED
@@ -0,0 +1 @@
1
+ require File.dirname(__FILE__) + "/../init"
@@ -0,0 +1,5 @@
1
+ # Load the normal Rails helper
2
+ require File.expand_path(File.dirname(__FILE__) + '/../../../../test/test_helper')
3
+
4
+ # Ensure that we are using the temporary fixture path
5
+ Engines::Testing.set_fixture_path
metadata ADDED
@@ -0,0 +1,69 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: edavis10-redmine_s3
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.3
5
+ platform: ruby
6
+ authors:
7
+ - Christopher Dell
8
+ - Eric Davis
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+
13
+ date: 2010-02-23 00:00:00 -08:00
14
+ default_executable:
15
+ dependencies: []
16
+
17
+ description: Plugin to have Redmine store uploads on S3
18
+ email: edavis@littlestreamsoftware.com
19
+ executables: []
20
+
21
+ extensions: []
22
+
23
+ extra_rdoc_files:
24
+ - README.rdoc
25
+ files:
26
+ - README.rdoc
27
+ - Rakefile
28
+ - VERSION
29
+ - config/locales/en.yml
30
+ - config/s3.yml.example
31
+ - init.rb
32
+ - lang/en.yml
33
+ - lib/S3.rb
34
+ - lib/redmine_s3/attachment_patch.rb
35
+ - lib/redmine_s3/attachments_controller_patch.rb
36
+ - lib/redmine_s3/connection.rb
37
+ - lib/s3_helper.rb
38
+ - rails/init.rb
39
+ - test/test_helper.rb
40
+ has_rdoc: true
41
+ homepage: http://projects.tigrish.com/projects/redmine-s3
42
+ licenses: []
43
+
44
+ post_install_message:
45
+ rdoc_options:
46
+ - --charset=UTF-8
47
+ require_paths:
48
+ - lib
49
+ required_ruby_version: !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - ">="
52
+ - !ruby/object:Gem::Version
53
+ version: "0"
54
+ version:
55
+ required_rubygems_version: !ruby/object:Gem::Requirement
56
+ requirements:
57
+ - - ">="
58
+ - !ruby/object:Gem::Version
59
+ version: "0"
60
+ version:
61
+ requirements: []
62
+
63
+ rubyforge_project: littlestreamsoftware
64
+ rubygems_version: 1.3.5
65
+ signing_key:
66
+ specification_version: 3
67
+ summary: Plugin to have Redmine store uploads on S3
68
+ test_files:
69
+ - test/test_helper.rb