condo 1.0.6 → 2.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.textile +19 -32
- data/lib/condo.rb +124 -127
- data/lib/condo/configuration.rb +41 -76
- data/lib/condo/engine.rb +32 -39
- data/lib/condo/errors.rb +6 -8
- data/lib/condo/strata/amazon_s3.rb +246 -294
- data/lib/condo/strata/google_cloud_storage.rb +238 -272
- data/lib/condo/strata/open_stack_swift.rb +251 -0
- data/lib/condo/version.rb +1 -1
- metadata +31 -96
- data/app/assets/javascripts/condo.js +0 -9
- data/app/assets/javascripts/condo/amazon.js +0 -403
- data/app/assets/javascripts/condo/condo.js +0 -184
- data/app/assets/javascripts/condo/config.js +0 -69
- data/app/assets/javascripts/condo/google.js +0 -338
- data/app/assets/javascripts/condo/md5/hash.worker.emulator.js +0 -23
- data/app/assets/javascripts/condo/md5/hash.worker.js +0 -11
- data/app/assets/javascripts/condo/md5/hasher.js +0 -119
- data/app/assets/javascripts/condo/md5/spark-md5.js +0 -599
- data/app/assets/javascripts/condo/rackspace.js +0 -326
- data/app/assets/javascripts/condo/services/abstract-md5.js.erb +0 -86
- data/app/assets/javascripts/condo/services/base64.js +0 -184
- data/app/assets/javascripts/condo/services/broadcaster.js +0 -26
- data/app/assets/javascripts/condo/services/uploader.js +0 -302
- data/app/assets/javascripts/core/core.js +0 -4
- data/app/assets/javascripts/core/services/1-safe-apply.js +0 -17
- data/app/assets/javascripts/core/services/2-messaging.js +0 -171
- data/lib/condo/strata/rackspace_cloud_files.rb +0 -245
- data/test/condo_test.rb +0 -27
- data/test/dummy/README.rdoc +0 -261
- data/test/dummy/Rakefile +0 -7
- data/test/dummy/app/assets/javascripts/application.js +0 -15
- data/test/dummy/app/assets/stylesheets/application.css +0 -13
- data/test/dummy/app/controllers/application_controller.rb +0 -3
- data/test/dummy/app/helpers/application_helper.rb +0 -2
- data/test/dummy/app/views/layouts/application.html.erb +0 -14
- data/test/dummy/config.ru +0 -4
- data/test/dummy/config/application.rb +0 -59
- data/test/dummy/config/boot.rb +0 -10
- data/test/dummy/config/database.yml +0 -25
- data/test/dummy/config/environment.rb +0 -5
- data/test/dummy/config/environments/development.rb +0 -37
- data/test/dummy/config/environments/production.rb +0 -67
- data/test/dummy/config/environments/test.rb +0 -37
- data/test/dummy/config/initializers/backtrace_silencers.rb +0 -7
- data/test/dummy/config/initializers/inflections.rb +0 -15
- data/test/dummy/config/initializers/mime_types.rb +0 -5
- data/test/dummy/config/initializers/secret_token.rb +0 -7
- data/test/dummy/config/initializers/session_store.rb +0 -8
- data/test/dummy/config/initializers/wrap_parameters.rb +0 -14
- data/test/dummy/config/locales/en.yml +0 -5
- data/test/dummy/config/routes.rb +0 -58
- data/test/dummy/public/404.html +0 -26
- data/test/dummy/public/422.html +0 -26
- data/test/dummy/public/500.html +0 -25
- data/test/dummy/public/favicon.ico +0 -0
- data/test/dummy/script/rails +0 -6
- data/test/integration/navigation_test.rb +0 -10
- data/test/test_helper.rb +0 -15
@@ -1,69 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* CoTag Condo
|
3
|
-
* Direct to cloud resumable uploads
|
4
|
-
*
|
5
|
-
* Copyright (c) 2012 CoTag Media.
|
6
|
-
*
|
7
|
-
* @author Stephen von Takach <steve@cotag.me>
|
8
|
-
* @copyright 2012 cotag.me
|
9
|
-
*
|
10
|
-
*
|
11
|
-
* References:
|
12
|
-
* * http://docs.angularjs.org/api/AUTO.$provide
|
13
|
-
* * http://jsfiddle.net/pkozlowski_opensource/PxdSP/14/
|
14
|
-
*
|
15
|
-
**/
|
16
|
-
|
17
|
-
|
18
|
-
(function (angular, undefined) {
|
19
|
-
'use strict';
|
20
|
-
|
21
|
-
|
22
|
-
//
|
23
|
-
// Create a provider for defining the configuration
|
24
|
-
//
|
25
|
-
angular.module('Condo').
|
26
|
-
provider('Condo.Config', function() {
|
27
|
-
|
28
|
-
//
|
29
|
-
// Controller options
|
30
|
-
//
|
31
|
-
this.endpoint = '/uploads'; // Default endpoint path
|
32
|
-
this.autostart = true; // Start uploading as soon as the file is added?
|
33
|
-
this.ignore_errors = true; // Continue to autostart after an error?
|
34
|
-
this.parallelism = 1; // number of autostarted uploads at once
|
35
|
-
this.size_limit = undefined; // defaults to unlimited
|
36
|
-
this.file_checker = function(file) { // client side filtering of files
|
37
|
-
return true;
|
38
|
-
};
|
39
|
-
|
40
|
-
//
|
41
|
-
// Directive options (specifically for the condo default interface)
|
42
|
-
//
|
43
|
-
this.delegate = undefined; // defaults to the condo interface container
|
44
|
-
this.drop_targets = undefined; // defaults to the condo interface container
|
45
|
-
this.hover_class = 'drag-hover'; // for styling the interface
|
46
|
-
this.supress_notifications = false; // this prevents js alerts about warnings and errors if you are observing these yourself (Condo.Broadcast)
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
this.$get = function() {
|
51
|
-
var self = this;
|
52
|
-
|
53
|
-
return {
|
54
|
-
endpoint: self.endpoint,
|
55
|
-
autostart: self.autostart,
|
56
|
-
ignore_errors: self.ignore_errors,
|
57
|
-
parallelism: self.parallelism,
|
58
|
-
file_checker: self.file_checker,
|
59
|
-
size_limit: self.size_limit,
|
60
|
-
|
61
|
-
delegate: self.delegate,
|
62
|
-
drop_targets: self.drop_targets,
|
63
|
-
hover_class: self.hover_class,
|
64
|
-
supress_notifications: self.supress_notifications
|
65
|
-
};
|
66
|
-
};
|
67
|
-
});
|
68
|
-
|
69
|
-
})(angular);
|
@@ -1,338 +0,0 @@
|
|
1
|
-
/**
|
2
|
-
* CoTag Condo Google Strategy
|
3
|
-
* Direct to cloud resumable uploads for Google Cloud Storage
|
4
|
-
*
|
5
|
-
* Copyright (c) 2012 CoTag Media.
|
6
|
-
*
|
7
|
-
* @author Stephen von Takach <steve@cotag.me>
|
8
|
-
* @copyright 2012 cotag.me
|
9
|
-
*
|
10
|
-
*
|
11
|
-
* References:
|
12
|
-
* *
|
13
|
-
*
|
14
|
-
**/
|
15
|
-
|
16
|
-
|
17
|
-
(function(angular, base64, undefined) {
|
18
|
-
'use strict';
|
19
|
-
|
20
|
-
angular.module('Condo').
|
21
|
-
|
22
|
-
factory('Condo.Google', ['$q', 'Condo.Md5', function($q, md5) {
|
23
|
-
var PENDING = 0,
|
24
|
-
STARTED = 1,
|
25
|
-
PAUSED = 2,
|
26
|
-
UPLOADING = 3,
|
27
|
-
COMPLETED = 4,
|
28
|
-
ABORTED = 5,
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
hexToBin = function(input) {
|
33
|
-
var result = "", i, length;
|
34
|
-
|
35
|
-
if ((input.length % 2) > 0) {
|
36
|
-
input = '0' + input;
|
37
|
-
}
|
38
|
-
|
39
|
-
for (i = 0, length = input.length; i < length; i += 2) {
|
40
|
-
result += String.fromCharCode(parseInt(input.slice(i, i + 2), 16));
|
41
|
-
}
|
42
|
-
|
43
|
-
return result;
|
44
|
-
},
|
45
|
-
|
46
|
-
|
47
|
-
GoogleCloudStorage = function (api, file) {
|
48
|
-
var self = this,
|
49
|
-
strategy = null,
|
50
|
-
pausing = false,
|
51
|
-
defaultError = function(reason) {
|
52
|
-
self.error = !pausing;
|
53
|
-
pausing = false;
|
54
|
-
self.pause(reason);
|
55
|
-
},
|
56
|
-
|
57
|
-
restart = function() {
|
58
|
-
strategy = null;
|
59
|
-
},
|
60
|
-
|
61
|
-
|
62
|
-
completeUpload = function() {
|
63
|
-
api.update().then(function() {
|
64
|
-
self.progress = self.size; // Update to 100%
|
65
|
-
self.state = COMPLETED;
|
66
|
-
}, defaultError);
|
67
|
-
},
|
68
|
-
|
69
|
-
|
70
|
-
//
|
71
|
-
// We need to sign our uploads so Google can confirm they are valid for us
|
72
|
-
//
|
73
|
-
build_request = function(chunk) {
|
74
|
-
return md5.hash(chunk).then(function(val) {
|
75
|
-
return {
|
76
|
-
data: chunk,
|
77
|
-
data_id: base64.encode(hexToBin(val))
|
78
|
-
};
|
79
|
-
}, function(reason){
|
80
|
-
return $q.reject(reason);
|
81
|
-
});
|
82
|
-
},
|
83
|
-
|
84
|
-
//
|
85
|
-
// Direct file upload strategy
|
86
|
-
//
|
87
|
-
GoogleDirect = function(data) {
|
88
|
-
//
|
89
|
-
// resume
|
90
|
-
// abort
|
91
|
-
// pause
|
92
|
-
//
|
93
|
-
var $this = this,
|
94
|
-
finalising = false;
|
95
|
-
|
96
|
-
//
|
97
|
-
// Update the parent
|
98
|
-
//
|
99
|
-
self.state = UPLOADING;
|
100
|
-
|
101
|
-
|
102
|
-
//
|
103
|
-
// This will only be called when the upload has finished and we need to inform the application
|
104
|
-
//
|
105
|
-
this.resume = function() {
|
106
|
-
self.state = UPLOADING;
|
107
|
-
completeUpload();
|
108
|
-
};
|
109
|
-
|
110
|
-
this.pause = function() {
|
111
|
-
api.abort();
|
112
|
-
|
113
|
-
if(!finalising) {
|
114
|
-
restart(); // Should occur before events triggered
|
115
|
-
self.progress = 0;
|
116
|
-
}
|
117
|
-
};
|
118
|
-
|
119
|
-
|
120
|
-
//
|
121
|
-
// AJAX for upload goes here
|
122
|
-
//
|
123
|
-
data['data'] = file;
|
124
|
-
api.process_request(data, function(progress) {
|
125
|
-
self.progress = progress;
|
126
|
-
}).then(function() {
|
127
|
-
finalising = true;
|
128
|
-
$this.resume(); // Resume informs the application that the upload is complete
|
129
|
-
}, function(reason) {
|
130
|
-
self.progress = 0;
|
131
|
-
defaultError(reason);
|
132
|
-
});
|
133
|
-
}, // END DIRECT
|
134
|
-
|
135
|
-
|
136
|
-
//
|
137
|
-
// Resumable upload strategy--------------------------------------------------
|
138
|
-
//
|
139
|
-
GoogleResumable = function (data, file_hash, finalising) {
|
140
|
-
var getQueryParams = function(qs) {
|
141
|
-
qs = qs.split("+").join(" ");
|
142
|
-
|
143
|
-
var params = {}, tokens,
|
144
|
-
re = /[?&]?([^=]+)=([^&]*)/g;
|
145
|
-
|
146
|
-
while (tokens = re.exec(qs)) { // NOTE:: we expect the assignment here
|
147
|
-
params[decodeURIComponent(tokens[1])] = decodeURIComponent(tokens[2]);
|
148
|
-
}
|
149
|
-
|
150
|
-
return params;
|
151
|
-
},
|
152
|
-
|
153
|
-
|
154
|
-
resume_upload = function(request, file_hash, range_start) {
|
155
|
-
request.data = file_hash.data;
|
156
|
-
api.process_request(request, function(progress) {
|
157
|
-
self.progress = range_start + progress;
|
158
|
-
}).then(function(result) {
|
159
|
-
finalising = true;
|
160
|
-
completeUpload();
|
161
|
-
}, function(reason) {
|
162
|
-
defaultError(reason);
|
163
|
-
});
|
164
|
-
};
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
self.state = UPLOADING;
|
170
|
-
|
171
|
-
this.resume = function() {
|
172
|
-
self.state = UPLOADING;
|
173
|
-
if (finalising == true) {
|
174
|
-
completeUpload();
|
175
|
-
} else {
|
176
|
-
api.create({file_id: file_hash.data_id}).
|
177
|
-
then(function(data) {
|
178
|
-
if(data.type == 'direct_upload') {
|
179
|
-
strategy = new GoogleDirect(data);
|
180
|
-
} else {
|
181
|
-
strategy = new GoogleResumable(data, file_hash);
|
182
|
-
}
|
183
|
-
}, defaultError);
|
184
|
-
}
|
185
|
-
};
|
186
|
-
|
187
|
-
this.pause = function() {
|
188
|
-
api.abort();
|
189
|
-
};
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
api.process_request(data).then(function(response) {
|
194
|
-
//
|
195
|
-
// Check if we were grabbing a parts list or creating an upload
|
196
|
-
//
|
197
|
-
if(data.type == 'status') { // the request was for the byte we are up to
|
198
|
-
// Get the byte we were up to here and update the application
|
199
|
-
var range_start = parseInt(response[1].getResponseHeader('Range').split('-')[1], 10) + 1;
|
200
|
-
|
201
|
-
build_request(file.slice(range_start)).then(function(result) {
|
202
|
-
if (self.state != UPLOADING) {
|
203
|
-
return; // upload was paused or aborted as we were reading the file
|
204
|
-
}
|
205
|
-
|
206
|
-
api.edit(range_start, result.data_id).
|
207
|
-
then(function(data) {
|
208
|
-
resume_upload(data, result, range_start);
|
209
|
-
}, defaultError);
|
210
|
-
|
211
|
-
}, defaultError); // END BUILD_REQUEST
|
212
|
-
} else {
|
213
|
-
//
|
214
|
-
// We've created the upload - we need to update our application with the upload id.
|
215
|
-
// This will also return the request for uploading the file which we've already prepared
|
216
|
-
//
|
217
|
-
api.update({
|
218
|
-
resumable_id: getQueryParams(response[1].getResponseHeader('Location').split('?')[1]).upload_id, // grab the upload_id from the Location header
|
219
|
-
file_id: file_hash.data_id,
|
220
|
-
part: 0 // part for google === the byte we are up to
|
221
|
-
}).then(function(data) {
|
222
|
-
resume_upload(data, file_hash, 0); // As this is the first upload attempt we want to upload from byte 0
|
223
|
-
}, function(reason) {
|
224
|
-
defaultError(reason);
|
225
|
-
restart(); // Easier to start from the beginning
|
226
|
-
});
|
227
|
-
}
|
228
|
-
}, function(reason) {
|
229
|
-
defaultError(reason);
|
230
|
-
restart(); // We need to get a new request signature
|
231
|
-
});
|
232
|
-
}; // END RESUMABLE
|
233
|
-
|
234
|
-
|
235
|
-
//
|
236
|
-
// Variables required for all drivers
|
237
|
-
//
|
238
|
-
this.state = PENDING;
|
239
|
-
this.progress = 0;
|
240
|
-
this.message = 'pending';
|
241
|
-
this.name = file.name;
|
242
|
-
this.size = file.size;
|
243
|
-
this.error = false;
|
244
|
-
|
245
|
-
|
246
|
-
//
|
247
|
-
// Support file slicing
|
248
|
-
//
|
249
|
-
if (typeof(file.slice) != 'function') {
|
250
|
-
file.slice = file.webkitSlice || file.mozSlice;
|
251
|
-
}
|
252
|
-
|
253
|
-
|
254
|
-
this.start = function(){
|
255
|
-
if(strategy == null) { // We need to create the upload
|
256
|
-
|
257
|
-
this.error = false;
|
258
|
-
pausing = false;
|
259
|
-
this.message = null;
|
260
|
-
this.state = STARTED;
|
261
|
-
strategy = {}; // This function shouldn't be called twice so we need a state
|
262
|
-
|
263
|
-
build_request(file).then(function(result) {
|
264
|
-
if (self.state != STARTED) { return; } // upload was paused or aborted as we were reading the file
|
265
|
-
|
266
|
-
api.create({file_id: result.data_id}).
|
267
|
-
then(function(data) {
|
268
|
-
if(data.type == 'direct_upload') {
|
269
|
-
strategy = new GoogleDirect(data);
|
270
|
-
} else {
|
271
|
-
strategy = new GoogleResumable(data, result);
|
272
|
-
}
|
273
|
-
}, defaultError);
|
274
|
-
|
275
|
-
}, defaultError); // END BUILD_REQUEST
|
276
|
-
|
277
|
-
|
278
|
-
} else if (this.state == PAUSED) { // We need to resume the upload if it is paused
|
279
|
-
this.error = false;
|
280
|
-
pausing = false;
|
281
|
-
this.message = null;
|
282
|
-
strategy.resume();
|
283
|
-
}
|
284
|
-
};
|
285
|
-
|
286
|
-
this.pause = function(reason) {
|
287
|
-
if(strategy != null && this.state == UPLOADING) { // Check if the upload is uploading
|
288
|
-
this.state = PAUSED;
|
289
|
-
pausing = true;
|
290
|
-
strategy.pause();
|
291
|
-
} else if (this.state <= STARTED) {
|
292
|
-
this.state = PAUSED;
|
293
|
-
restart();
|
294
|
-
}
|
295
|
-
if(this.state == PAUSED) { this.message = reason; }
|
296
|
-
};
|
297
|
-
|
298
|
-
this.abort = function(reason) {
|
299
|
-
if(strategy != null && this.state < COMPLETED) { // Check the upload has not finished
|
300
|
-
var old_state = this.state;
|
301
|
-
|
302
|
-
this.state = ABORTED;
|
303
|
-
api.abort();
|
304
|
-
|
305
|
-
|
306
|
-
//
|
307
|
-
// As we may not have successfully deleted the upload
|
308
|
-
// or we aborted before we received a response from create
|
309
|
-
//
|
310
|
-
restart(); // nullifies strategy
|
311
|
-
|
312
|
-
|
313
|
-
//
|
314
|
-
// if we have an upload_id then we should destroy the upload
|
315
|
-
// we won't worry if this fails as it should be automatically cleaned up by the back end
|
316
|
-
//
|
317
|
-
if(old_state > STARTED) {
|
318
|
-
api.destroy();
|
319
|
-
}
|
320
|
-
|
321
|
-
this.message = reason;
|
322
|
-
}
|
323
|
-
};
|
324
|
-
}; // END GOOGLE
|
325
|
-
|
326
|
-
|
327
|
-
return {
|
328
|
-
new_upload: function(api, file) {
|
329
|
-
return new GoogleCloudStorage(api, file);
|
330
|
-
}
|
331
|
-
};
|
332
|
-
}]).
|
333
|
-
|
334
|
-
config(['Condo.ApiProvider', function (ApiProvider) {
|
335
|
-
ApiProvider.register('GoogleCloudStorage', 'Condo.Google');
|
336
|
-
}]);
|
337
|
-
|
338
|
-
})(angular, window.base64);
|
@@ -1,23 +0,0 @@
|
|
1
|
-
//= require condo/md5/spark-md5
|
2
|
-
//= require condo/md5/hasher
|
3
|
-
|
4
|
-
|
5
|
-
function CondoHashWorkerEmulator(callback) {
|
6
|
-
|
7
|
-
// Create an API that looks like postMessage
|
8
|
-
this.postMessage = function (data, portArray) {
|
9
|
-
hasher.hash(data); // Clone the data if required JSON.parse(JSON.stringify(message)); // - Don't think it is required
|
10
|
-
}
|
11
|
-
|
12
|
-
|
13
|
-
this.terminate = function () {
|
14
|
-
// No special clean-up needed.
|
15
|
-
}
|
16
|
-
|
17
|
-
function messageEvtEmulator(rawMessage) {
|
18
|
-
callback({ data: rawMessage });
|
19
|
-
}
|
20
|
-
|
21
|
-
// Create an instance of downloader.
|
22
|
-
var hasher = new CondoMD5Hasher(messageEvtEmulator, false);
|
23
|
-
}
|
@@ -1,119 +0,0 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
var CondoMD5Hasher = (function(global) {
|
4
|
-
|
5
|
-
var part_size = 1048576, // This is the amount of the file we read into memory as we are building the hash (1mb)
|
6
|
-
async = true,
|
7
|
-
newReader = !!navigator.userAgent.toLowerCase().match(/opera/); // Opera bug (opera can only use a reader once)
|
8
|
-
|
9
|
-
|
10
|
-
//
|
11
|
-
// Firefox does not have FileReader in webworkers? WTF
|
12
|
-
// See issue #2
|
13
|
-
//
|
14
|
-
if (!!!global.FileReader) {
|
15
|
-
async = false;
|
16
|
-
global.FileReader = global.FileReaderSync;
|
17
|
-
}
|
18
|
-
|
19
|
-
//
|
20
|
-
// Some browsers have a vendor prefix on slice
|
21
|
-
//
|
22
|
-
if (!!!Blob.prototype.slice) {
|
23
|
-
Blob.prototype.slice = Blob.prototype.webkitSlice || Blob.prototype.mozSlice;
|
24
|
-
}
|
25
|
-
|
26
|
-
return function(callback, inWorker) {
|
27
|
-
|
28
|
-
|
29
|
-
//
|
30
|
-
// Opera claims to have async readers in webworkers however it is actually just
|
31
|
-
// a synchronous reader in disguise
|
32
|
-
//
|
33
|
-
if (newReader && inWorker) {
|
34
|
-
async = false;
|
35
|
-
global.FileReader = global.FileReaderSync || global.FileReader;
|
36
|
-
}
|
37
|
-
|
38
|
-
|
39
|
-
//
|
40
|
-
// responds with: {success: true|false, result: <Object>}
|
41
|
-
//
|
42
|
-
this.hash = function(blob) {
|
43
|
-
|
44
|
-
var current_part,
|
45
|
-
md5 = new global.SparkMD5.ArrayBuffer(),
|
46
|
-
part_number = 0,
|
47
|
-
length = Math.ceil(blob.size / part_size),
|
48
|
-
reader,
|
49
|
-
|
50
|
-
fail = function() {
|
51
|
-
callback({
|
52
|
-
success: false,
|
53
|
-
result: 'file read failed'
|
54
|
-
});
|
55
|
-
},
|
56
|
-
|
57
|
-
hashData = function(e) {
|
58
|
-
md5.append(e.target.result);
|
59
|
-
if(part_number * part_size >= blob.size) {
|
60
|
-
callback({
|
61
|
-
success: true,
|
62
|
-
result: md5.end()
|
63
|
-
});
|
64
|
-
} else {
|
65
|
-
if (newReader) configureReader();
|
66
|
-
processPart();
|
67
|
-
}
|
68
|
-
},
|
69
|
-
|
70
|
-
processPart = function() {
|
71
|
-
var endbyte = 0;
|
72
|
-
|
73
|
-
part_number += 1;
|
74
|
-
|
75
|
-
if (blob.size > part_size) { // If blob bigger then part_size we will slice it up
|
76
|
-
endbyte = part_number * part_size;
|
77
|
-
if (endbyte > blob.size)
|
78
|
-
endbyte = blob.size;
|
79
|
-
|
80
|
-
current_part = blob.slice((part_number - 1) * part_size, endbyte);
|
81
|
-
} else {
|
82
|
-
current_part = blob;
|
83
|
-
}
|
84
|
-
|
85
|
-
if(async)
|
86
|
-
reader.readAsArrayBuffer(current_part);
|
87
|
-
else {
|
88
|
-
setTimeout(function() {
|
89
|
-
try {
|
90
|
-
hashData({
|
91
|
-
target: {
|
92
|
-
result: reader.readAsArrayBuffer(current_part)
|
93
|
-
}
|
94
|
-
});
|
95
|
-
} catch (e) {
|
96
|
-
fail();
|
97
|
-
}
|
98
|
-
}, 0);
|
99
|
-
}
|
100
|
-
|
101
|
-
},
|
102
|
-
|
103
|
-
configureReader = function() {
|
104
|
-
reader = new global.FileReader();
|
105
|
-
if(async) {
|
106
|
-
reader.onload = hashData;
|
107
|
-
reader.onerror = fail;
|
108
|
-
reader.onabort = fail;
|
109
|
-
}
|
110
|
-
};
|
111
|
-
|
112
|
-
|
113
|
-
configureReader();
|
114
|
-
processPart();
|
115
|
-
};
|
116
|
-
};
|
117
|
-
|
118
|
-
})(this);
|
119
|
-
|