condo 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. data/LGPL3-LICENSE +165 -0
  2. data/README.textile +20 -0
  3. data/Rakefile +40 -0
  4. data/app/assets/javascripts/condo.js +7 -0
  5. data/app/assets/javascripts/condo/amazon.js +409 -0
  6. data/app/assets/javascripts/condo/base64.js +192 -0
  7. data/app/assets/javascripts/condo/controller.js +162 -0
  8. data/app/assets/javascripts/condo/google.js +292 -0
  9. data/app/assets/javascripts/condo/rackspace.js +340 -0
  10. data/app/assets/javascripts/condo/spark-md5.js +470 -0
  11. data/app/assets/javascripts/condo/uploader.js +298 -0
  12. data/lib/condo.rb +267 -0
  13. data/lib/condo/configuration.rb +129 -0
  14. data/lib/condo/engine.rb +36 -0
  15. data/lib/condo/errors.rb +9 -0
  16. data/lib/condo/strata/amazon_s3.rb +301 -0
  17. data/lib/condo/strata/google_cloud_storage.rb +306 -0
  18. data/lib/condo/strata/rackspace_cloud_files.rb +223 -0
  19. data/lib/condo/version.rb +3 -0
  20. data/lib/tasks/condo_tasks.rake +4 -0
  21. data/test/condo_test.rb +27 -0
  22. data/test/dummy/README.rdoc +261 -0
  23. data/test/dummy/Rakefile +7 -0
  24. data/test/dummy/app/assets/javascripts/application.js +15 -0
  25. data/test/dummy/app/assets/stylesheets/application.css +13 -0
  26. data/test/dummy/app/controllers/application_controller.rb +3 -0
  27. data/test/dummy/app/helpers/application_helper.rb +2 -0
  28. data/test/dummy/app/views/layouts/application.html.erb +14 -0
  29. data/test/dummy/config.ru +4 -0
  30. data/test/dummy/config/application.rb +59 -0
  31. data/test/dummy/config/boot.rb +10 -0
  32. data/test/dummy/config/database.yml +25 -0
  33. data/test/dummy/config/environment.rb +5 -0
  34. data/test/dummy/config/environments/development.rb +37 -0
  35. data/test/dummy/config/environments/production.rb +67 -0
  36. data/test/dummy/config/environments/test.rb +37 -0
  37. data/test/dummy/config/initializers/backtrace_silencers.rb +7 -0
  38. data/test/dummy/config/initializers/inflections.rb +15 -0
  39. data/test/dummy/config/initializers/mime_types.rb +5 -0
  40. data/test/dummy/config/initializers/secret_token.rb +7 -0
  41. data/test/dummy/config/initializers/session_store.rb +8 -0
  42. data/test/dummy/config/initializers/wrap_parameters.rb +14 -0
  43. data/test/dummy/config/locales/en.yml +5 -0
  44. data/test/dummy/config/routes.rb +58 -0
  45. data/test/dummy/db/test.sqlite3 +0 -0
  46. data/test/dummy/log/test.log +25 -0
  47. data/test/dummy/public/404.html +26 -0
  48. data/test/dummy/public/422.html +26 -0
  49. data/test/dummy/public/500.html +25 -0
  50. data/test/dummy/public/favicon.ico +0 -0
  51. data/test/dummy/script/rails +6 -0
  52. data/test/integration/navigation_test.rb +10 -0
  53. data/test/test_helper.rb +15 -0
  54. metadata +180 -0
@@ -0,0 +1,165 @@
1
+ GNU LESSER GENERAL PUBLIC LICENSE
2
+ Version 3, 29 June 2007
3
+
4
+ Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
5
+ Everyone is permitted to copy and distribute verbatim copies
6
+ of this license document, but changing it is not allowed.
7
+
8
+
9
+ This version of the GNU Lesser General Public License incorporates
10
+ the terms and conditions of version 3 of the GNU General Public
11
+ License, supplemented by the additional permissions listed below.
12
+
13
+ 0. Additional Definitions.
14
+
15
+ As used herein, "this License" refers to version 3 of the GNU Lesser
16
+ General Public License, and the "GNU GPL" refers to version 3 of the GNU
17
+ General Public License.
18
+
19
+ "The Library" refers to a covered work governed by this License,
20
+ other than an Application or a Combined Work as defined below.
21
+
22
+ An "Application" is any work that makes use of an interface provided
23
+ by the Library, but which is not otherwise based on the Library.
24
+ Defining a subclass of a class defined by the Library is deemed a mode
25
+ of using an interface provided by the Library.
26
+
27
+ A "Combined Work" is a work produced by combining or linking an
28
+ Application with the Library. The particular version of the Library
29
+ with which the Combined Work was made is also called the "Linked
30
+ Version".
31
+
32
+ The "Minimal Corresponding Source" for a Combined Work means the
33
+ Corresponding Source for the Combined Work, excluding any source code
34
+ for portions of the Combined Work that, considered in isolation, are
35
+ based on the Application, and not on the Linked Version.
36
+
37
+ The "Corresponding Application Code" for a Combined Work means the
38
+ object code and/or source code for the Application, including any data
39
+ and utility programs needed for reproducing the Combined Work from the
40
+ Application, but excluding the System Libraries of the Combined Work.
41
+
42
+ 1. Exception to Section 3 of the GNU GPL.
43
+
44
+ You may convey a covered work under sections 3 and 4 of this License
45
+ without being bound by section 3 of the GNU GPL.
46
+
47
+ 2. Conveying Modified Versions.
48
+
49
+ If you modify a copy of the Library, and, in your modifications, a
50
+ facility refers to a function or data to be supplied by an Application
51
+ that uses the facility (other than as an argument passed when the
52
+ facility is invoked), then you may convey a copy of the modified
53
+ version:
54
+
55
+ a) under this License, provided that you make a good faith effort to
56
+ ensure that, in the event an Application does not supply the
57
+ function or data, the facility still operates, and performs
58
+ whatever part of its purpose remains meaningful, or
59
+
60
+ b) under the GNU GPL, with none of the additional permissions of
61
+ this License applicable to that copy.
62
+
63
+ 3. Object Code Incorporating Material from Library Header Files.
64
+
65
+ The object code form of an Application may incorporate material from
66
+ a header file that is part of the Library. You may convey such object
67
+ code under terms of your choice, provided that, if the incorporated
68
+ material is not limited to numerical parameters, data structure
69
+ layouts and accessors, or small macros, inline functions and templates
70
+ (ten or fewer lines in length), you do both of the following:
71
+
72
+ a) Give prominent notice with each copy of the object code that the
73
+ Library is used in it and that the Library and its use are
74
+ covered by this License.
75
+
76
+ b) Accompany the object code with a copy of the GNU GPL and this license
77
+ document.
78
+
79
+ 4. Combined Works.
80
+
81
+ You may convey a Combined Work under terms of your choice that,
82
+ taken together, effectively do not restrict modification of the
83
+ portions of the Library contained in the Combined Work and reverse
84
+ engineering for debugging such modifications, if you also do each of
85
+ the following:
86
+
87
+ a) Give prominent notice with each copy of the Combined Work that
88
+ the Library is used in it and that the Library and its use are
89
+ covered by this License.
90
+
91
+ b) Accompany the Combined Work with a copy of the GNU GPL and this license
92
+ document.
93
+
94
+ c) For a Combined Work that displays copyright notices during
95
+ execution, include the copyright notice for the Library among
96
+ these notices, as well as a reference directing the user to the
97
+ copies of the GNU GPL and this license document.
98
+
99
+ d) Do one of the following:
100
+
101
+ 0) Convey the Minimal Corresponding Source under the terms of this
102
+ License, and the Corresponding Application Code in a form
103
+ suitable for, and under terms that permit, the user to
104
+ recombine or relink the Application with a modified version of
105
+ the Linked Version to produce a modified Combined Work, in the
106
+ manner specified by section 6 of the GNU GPL for conveying
107
+ Corresponding Source.
108
+
109
+ 1) Use a suitable shared library mechanism for linking with the
110
+ Library. A suitable mechanism is one that (a) uses at run time
111
+ a copy of the Library already present on the user's computer
112
+ system, and (b) will operate properly with a modified version
113
+ of the Library that is interface-compatible with the Linked
114
+ Version.
115
+
116
+ e) Provide Installation Information, but only if you would otherwise
117
+ be required to provide such information under section 6 of the
118
+ GNU GPL, and only to the extent that such information is
119
+ necessary to install and execute a modified version of the
120
+ Combined Work produced by recombining or relinking the
121
+ Application with a modified version of the Linked Version. (If
122
+ you use option 4d0, the Installation Information must accompany
123
+ the Minimal Corresponding Source and Corresponding Application
124
+ Code. If you use option 4d1, you must provide the Installation
125
+ Information in the manner specified by section 6 of the GNU GPL
126
+ for conveying Corresponding Source.)
127
+
128
+ 5. Combined Libraries.
129
+
130
+ You may place library facilities that are a work based on the
131
+ Library side by side in a single library together with other library
132
+ facilities that are not Applications and are not covered by this
133
+ License, and convey such a combined library under terms of your
134
+ choice, if you do both of the following:
135
+
136
+ a) Accompany the combined library with a copy of the same work based
137
+ on the Library, uncombined with any other library facilities,
138
+ conveyed under the terms of this License.
139
+
140
+ b) Give prominent notice with the combined library that part of it
141
+ is a work based on the Library, and explaining where to find the
142
+ accompanying uncombined form of the same work.
143
+
144
+ 6. Revised Versions of the GNU Lesser General Public License.
145
+
146
+ The Free Software Foundation may publish revised and/or new versions
147
+ of the GNU Lesser General Public License from time to time. Such new
148
+ versions will be similar in spirit to the present version, but may
149
+ differ in detail to address new problems or concerns.
150
+
151
+ Each version is given a distinguishing version number. If the
152
+ Library as you received it specifies that a certain numbered version
153
+ of the GNU Lesser General Public License "or any later version"
154
+ applies to it, you have the option of following the terms and
155
+ conditions either of that published version or of any later version
156
+ published by the Free Software Foundation. If the Library as you
157
+ received it does not specify a version number of the GNU Lesser
158
+ General Public License, you may choose any version of the GNU Lesser
159
+ General Public License ever published by the Free Software Foundation.
160
+
161
+ If the Library as you received it specifies that a proxy can decide
162
+ whether future versions of the GNU Lesser General Public License shall
163
+ apply, that proxy's public statement of acceptance of any version is
164
+ permanent authorization for you to choose that version for the
165
+ Library.
@@ -0,0 +1,20 @@
1
+ h1. Condominios
2
+
3
+ A Rails plugin that makes direct uploads to multiple cloud storage providers easy.
4
+ Only supports "XMLHttpRequest Level 2":http://en.wikipedia.org/wiki/XMLHttpRequest capable browsers and cloud providers that have a "RESTful API":http://en.wikipedia.org/wiki/Representational_state_transfer with "CORS":http://en.wikipedia.org/wiki/Cross-origin_resource_sharing support.
5
+
6
+ Why compromise?
7
+
8
+ Get started now: @gem install condo@
9
+
10
+
11
+ h2. License
12
+
13
+ GNU Lesser General Public License v3 (LGPL version 3)
14
+
15
+
16
+ h2. Usage
17
+
18
+ Coming Soon!
19
+
20
+
@@ -0,0 +1,40 @@
1
+ #!/usr/bin/env rake
2
+ begin
3
+ require 'bundler/setup'
4
+ rescue LoadError
5
+ puts 'You must `gem install bundler` and `bundle install` to run rake tasks'
6
+ end
7
+ begin
8
+ require 'rdoc/task'
9
+ rescue LoadError
10
+ require 'rdoc/rdoc'
11
+ require 'rake/rdoctask'
12
+ RDoc::Task = Rake::RDocTask
13
+ end
14
+
15
+ RDoc::Task.new(:rdoc) do |rdoc|
16
+ rdoc.rdoc_dir = 'rdoc'
17
+ rdoc.title = 'Condo'
18
+ rdoc.options << '--line-numbers'
19
+ rdoc.rdoc_files.include('README.rdoc')
20
+ rdoc.rdoc_files.include('lib/**/*.rb')
21
+ end
22
+
23
+ APP_RAKEFILE = File.expand_path("../test/dummy/Rakefile", __FILE__)
24
+ load 'rails/tasks/engine.rake'
25
+
26
+
27
+
28
+ Bundler::GemHelper.install_tasks
29
+
30
+ require 'rake/testtask'
31
+
32
+ Rake::TestTask.new(:test) do |t|
33
+ t.libs << 'lib'
34
+ t.libs << 'test'
35
+ t.pattern = 'test/**/*_test.rb'
36
+ t.verbose = false
37
+ end
38
+
39
+
40
+ task :default => :test
@@ -0,0 +1,7 @@
1
+ //= require condo/spark-md5
2
+ //= require condo/base64
3
+ //= require condo/uploader
4
+ //= require condo/amazon
5
+ //= require condo/rackspace
6
+ //= require condo/google
7
+ //= require condo/controller
@@ -0,0 +1,409 @@
1
+ /**
2
+ * CoTag Condo Amazon S3 Strategy
3
+ * Direct to cloud resumable uploads for Amazon S3
4
+ *
5
+ * Copyright (c) 2012 CoTag Media.
6
+ *
7
+ * @author Stephen von Takach <steve@cotag.me>
8
+ * @copyright 2012 cotag.me
9
+ *
10
+ *
11
+ * References:
12
+ * * https://github.com/umdjs/umd
13
+ * * https://github.com/addyosmani/jquery-plugin-patterns
14
+ * *
15
+ *
16
+ **/
17
+
18
+ (function (factory) {
19
+ if (typeof define === 'function' && define.amd) {
20
+ // AMD
21
+ define(['jquery', 'spark-md5', 'base64', 'condo_uploader'], factory);
22
+ } else {
23
+ // Browser globals
24
+ factory(jQuery, window.SparkMD5, window.base64, window.CondoUploader);
25
+ }
26
+ }(function ($, MD5, base64, uploads, undefined) {
27
+ 'use strict';
28
+
29
+ //
30
+ // TODO:: Create an Amazon, google factory etc
31
+ // We should split all these into different files too (controller and factories separate from directives and views)
32
+ // So we can have different views for the same controller
33
+ //
34
+ uploads.factory('Condo.AmazonS3', ['$rootScope', '$q', function($rootScope, $q) {
35
+ var PENDING = 0,
36
+ STARTED = 1,
37
+ PAUSED = 2,
38
+ UPLOADING = 3,
39
+ COMPLETED = 4,
40
+ ABORTED = 5,
41
+
42
+
43
+
44
+ hexToBin = function(input) {
45
+ var result = "";
46
+
47
+ if ((input.length % 2) > 0) {
48
+ input = '0' + input;
49
+ }
50
+
51
+ for (var i = 0, length = input.length; i < length; i += 2) {
52
+ result += String.fromCharCode(parseInt(input.slice(i, i + 2), 16));
53
+ }
54
+
55
+ return result;
56
+ },
57
+
58
+
59
+ Amazon = function (api, file) {
60
+ var self = this,
61
+ strategy = null,
62
+ part_size = 5242880, // Multi-part uploads should be bigger then this
63
+ defaultError = function(reason) {
64
+ self.pause(reason);
65
+ },
66
+
67
+ restart = function() {
68
+ strategy = null;
69
+ },
70
+
71
+
72
+ completeUpload = function() {
73
+ api.update().then(function(data) {
74
+ self.state = COMPLETED;
75
+ }, defaultError);
76
+ },
77
+
78
+
79
+ //
80
+ // We need to sign our uploads so amazon can confirm they are valid for us
81
+ // Part numbers can be any number from 1 to 10,000 - inclusive
82
+ // TODO:: use http://updates.html5rocks.com/2011/12/Transferable-Objects-Lightning-Fast
83
+ // where available :)
84
+ //
85
+ build_request = function(part_number) {
86
+ var result = $q.defer(),
87
+ reader = new FileReader(),
88
+ fail = function(){
89
+ result.reject('file read failed');
90
+ },
91
+ current_part;
92
+
93
+ if (file.size > part_size) { // If file bigger then 5mb we expect a chunked upload
94
+ var endbyte = part_number * part_size;
95
+ if (endbyte > file.size)
96
+ endbyte = file.size;
97
+ current_part = file.slice((part_number - 1) * part_size, endbyte);
98
+ } else {
99
+ current_part = file;
100
+ }
101
+
102
+ reader.onload = function(e) {
103
+ result.resolve({
104
+ data: current_part,
105
+ data_id: MD5.hashBinary(e.target.result),
106
+ part_number: part_number
107
+ });
108
+
109
+
110
+ if(!$rootScope.$$phase) {
111
+ $rootScope.$apply(); // This triggers the promise response if required
112
+ }
113
+ };
114
+ reader.onerror = fail;
115
+ reader.onabort = fail;
116
+ reader.readAsBinaryString(current_part);
117
+
118
+ return result.promise;
119
+ },
120
+
121
+ //
122
+ // Direct file upload strategy
123
+ //
124
+ AmazonDirect = function(data) {
125
+ //
126
+ // resume
127
+ // abort
128
+ // pause
129
+ //
130
+ var $this = this,
131
+ finalising = false;
132
+
133
+ //
134
+ // Update the parent
135
+ //
136
+ self.state = UPLOADING;
137
+
138
+
139
+ //
140
+ // This will only be called when the upload has finished and we need to inform the application
141
+ //
142
+ this.resume = function() {
143
+ self.state = UPLOADING;
144
+ completeUpload();
145
+ }
146
+
147
+ this.pause = function() {
148
+ api.abort();
149
+
150
+ if(!finalising) {
151
+ restart(); // Should occur before events triggered
152
+ self.progress = 0;
153
+ }
154
+ };
155
+
156
+
157
+ //
158
+ // AJAX for upload goes here
159
+ //
160
+ data['data'] = file;
161
+ api.process_request(data, function(progress) {
162
+ self.progress = progress;
163
+ }).then(function(result) {
164
+ finalising = true;
165
+ $this.resume(); // Resume informs the application that the upload is complete
166
+ }, function(reason) {
167
+ self.progress = 0;
168
+ defaultError(reason);
169
+ });
170
+ }, // END DIRECT
171
+
172
+
173
+ //
174
+ // Chunked upload strategy--------------------------------------------------
175
+ //
176
+ AmazonChunked = function (data, first_chunk) {
177
+ //
178
+ // resume
179
+ // abort
180
+ // pause
181
+ //
182
+ var part_ids = [],
183
+ last_part = 0,
184
+
185
+
186
+ generatePartManifest = function() {
187
+ var list = '<CompleteMultipartUpload>';
188
+
189
+ for (var i = 0, length = part_ids.length; i < length; i += 1) {
190
+ list += '<Part><PartNumber>' + (i + 1) + '</PartNumber><ETag>"' + part_ids[i] + '"</ETag></Part>';
191
+ }
192
+ list += '</CompleteMultipartUpload>';
193
+ return list;
194
+ },
195
+
196
+ //
197
+ // Get the next part signature
198
+ //
199
+ next_part = function(part_number) {
200
+ //
201
+ // Check if we are past the end of the file
202
+ //
203
+ if ((part_number - 1) * part_size < file.size) {
204
+ build_request(part_number).then(function(result) {
205
+ if (self.state != UPLOADING)
206
+ return; // upload was paused or aborted as we were reading the file
207
+
208
+ api.edit(part_number, base64.encode(hexToBin(result.data_id))).
209
+ then(function(data) {
210
+ set_part(data, result);
211
+ }, defaultError);
212
+
213
+ }, function(reason){
214
+ self.pause(reason);
215
+ }); // END BUILD_REQUEST
216
+
217
+ } else {
218
+ //
219
+ // We're after the final commit
220
+ //
221
+ api.edit('finish').
222
+ then(function(request) {
223
+ request['data'] = generatePartManifest();
224
+ api.process_request(request).then(completeUpload, defaultError);
225
+ }, defaultError);
226
+ }
227
+ },
228
+
229
+
230
+ //
231
+ // Send a part to amazon
232
+ //
233
+ set_part = function(request, part_info) {
234
+ request['data'] = part_info.data;
235
+ api.process_request(request, function(progress) {
236
+ self.progress = (part_info.part_number - 1) * part_size + progress;
237
+ }).then(function(result) {
238
+ part_ids.push(part_info.data_id); // We need to record the list of part IDs for completion
239
+ last_part = part_info.part_number;
240
+ next_part(last_part + 1);
241
+ }, function(reason) {
242
+ self.progress = (part_info.part_number - 1) * part_size;
243
+ defaultError(reason);
244
+ });
245
+ };
246
+
247
+
248
+ self.state = UPLOADING;
249
+
250
+ this.resume = function() {
251
+ self.state = UPLOADING;
252
+ next_part(last_part + 1);
253
+ };
254
+
255
+ this.pause = function() {
256
+ api.abort();
257
+ };
258
+
259
+
260
+ //
261
+ // We need to check if we are grabbing a parts list or creating an upload
262
+ //
263
+ api.process_request(data).then(function(response) {
264
+ if(data.type == 'parts') { // was the original request for a list of parts
265
+ //
266
+ // NextPartNumberMarker == the final part in the current request
267
+ // TODO:: if IsTruncated is set then we need to keep getting parts
268
+ //
269
+ response = $(response);
270
+ var next = parseInt(response.find('NextPartNumberMarker').eq(0).text()),
271
+ etags = response.find('ETag');
272
+
273
+ etags.each(function(index) {
274
+ part_ids.push($(this).text().replace(/"{1}/gi,'')); // Removes " from strings
275
+ });
276
+
277
+ last_part = next; // So we can resume
278
+ next_part(next + 1); // As NextPartNumberMarker is just the last part uploaded
279
+ } else {
280
+ //
281
+ // We've created the upload - we need to update the application with the upload id.
282
+ // This will also return the request for uploading the first part which we've already prepared
283
+ //
284
+ api.update({
285
+ resumable_id: $(response).find('UploadId').eq(0).text(),
286
+ file_id: base64.encode(hexToBin(first_chunk.data_id)),
287
+ part: 1
288
+ }).then(function(data) {
289
+ set_part(data, first_chunk); // Parts start at 1
290
+ }, function(reason) {
291
+ defaultError(reason);
292
+ restart(); // Easier to start from the beginning
293
+ });
294
+ }
295
+ }, function(reason) {
296
+ defaultError(reason);
297
+ restart(); // We need to get a new request signature
298
+ });
299
+ }; // END CHUNKED
300
+
301
+
302
+ //
303
+ // Variables required for all drivers
304
+ //
305
+ this.state = PENDING;
306
+ this.progress = 0;
307
+ this.message = 'pending';
308
+ this.name = file.name;
309
+ this.size = file.size;
310
+
311
+
312
+ //
313
+ // Support file slicing
314
+ //
315
+ if (typeof(file.slice) != 'function')
316
+ file.slice = file.webkitSlice || file.mozSlice;
317
+
318
+
319
+ this.start = function(){
320
+ if(strategy == null) { // We need to create the upload
321
+ //
322
+ // Update part size if required
323
+ //
324
+ if((part_size * 9999) < file.size) {
325
+ part_size = file.size / 9999;
326
+ if(part_size > (5 * 1024 * 1024 * 1024)) { // 5GB limit on part sizes
327
+ this.abort('file too big');
328
+ return;
329
+ }
330
+ }
331
+
332
+ this.message = null;
333
+ this.state = STARTED;
334
+ strategy = {}; // This function shouldn't be called twice so we need a state (TODO:: fix this)
335
+
336
+ build_request(1).then(function(result) {
337
+ if (self.state != STARTED)
338
+ return; // upload was paused or aborted as we were reading the file
339
+
340
+ api.create({file_id: base64.encode(hexToBin(result.data_id))}).
341
+ then(function(data) {
342
+ if(data.type == 'direct_upload') {
343
+ strategy = new AmazonDirect(data);
344
+ } else {
345
+ strategy = new AmazonChunked(data, result);
346
+ }
347
+ }, defaultError);
348
+
349
+ }, function(reason){
350
+ self.pause(reason);
351
+ }); // END BUILD_REQUEST
352
+
353
+
354
+ } else if (this.state == PAUSED) { // We need to resume the upload if it is paused
355
+ this.message = null;
356
+ strategy.resume();
357
+ }
358
+ };
359
+
360
+ this.pause = function(reason) {
361
+ if(strategy != null && this.state == UPLOADING) { // Check if the upload is uploading
362
+ this.state = PAUSED;
363
+ strategy.pause();
364
+ } else if (this.state <= STARTED) {
365
+ this.state = PAUSED;
366
+ restart();
367
+ }
368
+ if(this.state == PAUSED)
369
+ this.message = reason;
370
+ };
371
+
372
+ this.abort = function(reason) {
373
+ if(strategy != null && this.state < COMPLETED) { // Check the upload has not finished
374
+ var old_state = this.state;
375
+
376
+ this.state = ABORTED;
377
+ api.abort();
378
+
379
+
380
+ //
381
+ // As we may not have successfully deleted the upload
382
+ // or we aborted before we received a response from create
383
+ //
384
+ restart(); // nullifies strategy
385
+
386
+
387
+ //
388
+ // if we have an upload_id then we should destroy the upload
389
+ // we won't worry if this fails as it should be automatically cleaned up by the back end
390
+ //
391
+ if(old_state > STARTED) {
392
+ api.destroy();
393
+ }
394
+
395
+ this.message = reason;
396
+ }
397
+ };
398
+ }; // END AMAZON
399
+
400
+
401
+ return {
402
+ new_upload: function(api, file) {
403
+ return new Amazon(api, file);
404
+ }
405
+ };
406
+
407
+ }]);
408
+
409
+ }));