dbox 0.6.15 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/History.txt CHANGED
@@ -1,3 +1,8 @@
1
+ == 0.7.0 / 2012-11-05
2
+ * Major Enhancements
3
+ * Changed to be case insensitive, like Dropbox.
4
+ * Removed support for concurrent uploads/downloads, as it was often encountering throttling from Dropbox APIs.
5
+
1
6
  == 0.6.15 / 2012-07-26
2
7
  * Bug Fixes
3
8
  * Fixed issue with gem in 0.6.14.
data/README.md CHANGED
@@ -263,12 +263,3 @@ $ export DROPBOX_AUTH_SECRET=pqej9rmnj0i1gcxr4
263
263
  > File.read("#{ENV['HOME']}/Dropbox/Public/hello.txt")
264
264
  => "Oh, Hello"
265
265
  ```
266
-
267
- Advanced
268
- --------
269
-
270
- To speed up your syncs, you can manually set the number concurrent dropbox operations to execute. (The default is 2.)
271
-
272
- ```sh
273
- $ export DROPBOX_CONCURRENCY=5
274
- ```
data/Rakefile CHANGED
@@ -18,7 +18,7 @@ Jeweler::Tasks.new do |gem|
18
18
  gem.add_dependency "oauth", ">= 0.4.5"
19
19
  gem.add_dependency "json", ">= 1.5.3"
20
20
  gem.add_dependency "sqlite3", ">= 1.3.3"
21
- gem.add_dependency "activesupport", ">= 3.0.1"
21
+ gem.add_dependency "insensitive_hash", ">= 0.3.0"
22
22
  end
23
23
  Jeweler::RubygemsDotOrgTasks.new
24
24
 
data/TODO.txt CHANGED
@@ -1,3 +1,8 @@
1
+ * Upgrade to newest Dropbox Ruby SDK.
2
+ * Look into using the new /chunked_upload support instead of custom streaming upload.
3
+ * Look into using /delta api.
4
+ * Try to get rid of extra updated dirs on pull after push (have push update all the mtimes and such that it affects, so the pull is clean). -- This will clean up the tests more than anything.
5
+ * Ensure Dropbox API best practices are being followed as closely as possible (https://www.dropbox.com/developers/reference/bestpractice).
1
6
  * Look into occasional hanging on pull operations.
2
7
  * Look into memory leak when syncing a large amount of files.
3
8
  * Look into 10000 file limit on metadata operations.
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.6.15
1
+ 0.7.0
data/dbox.gemspec CHANGED
@@ -5,11 +5,11 @@
5
5
 
6
6
  Gem::Specification.new do |s|
7
7
  s.name = "dbox"
8
- s.version = "0.6.15"
8
+ s.version = "0.7.0"
9
9
 
10
10
  s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
11
11
  s.authors = ["Ken Pratt"]
12
- s.date = "2012-07-26"
12
+ s.date = "2012-11-06"
13
13
  s.description = "An easy-to-use Dropbox client with fine-grained control over syncs."
14
14
  s.email = "ken@kenpratt.net"
15
15
  s.executables = ["dbox"]
@@ -32,7 +32,6 @@ Gem::Specification.new do |s|
32
32
  "lib/dbox/database.rb",
33
33
  "lib/dbox/db.rb",
34
34
  "lib/dbox/loggable.rb",
35
- "lib/dbox/parallel_tasks.rb",
36
35
  "lib/dbox/syncer.rb",
37
36
  "lib/dbox/utils.rb",
38
37
  "sample_polling_script.rb",
@@ -54,7 +53,7 @@ Gem::Specification.new do |s|
54
53
  s.homepage = "http://github.com/kenpratt/dbox"
55
54
  s.licenses = ["MIT"]
56
55
  s.require_paths = ["lib"]
57
- s.rubygems_version = "1.8.16"
56
+ s.rubygems_version = "1.8.24"
58
57
  s.summary = "Dropbox made easy."
59
58
 
60
59
  if s.respond_to? :specification_version then
@@ -65,20 +64,20 @@ Gem::Specification.new do |s|
65
64
  s.add_runtime_dependency(%q<oauth>, [">= 0.4.5"])
66
65
  s.add_runtime_dependency(%q<json>, [">= 1.5.3"])
67
66
  s.add_runtime_dependency(%q<sqlite3>, [">= 1.3.3"])
68
- s.add_runtime_dependency(%q<activesupport>, [">= 3.0.1"])
67
+ s.add_runtime_dependency(%q<insensitive_hash>, [">= 0.3.0"])
69
68
  else
70
69
  s.add_dependency(%q<multipart-post>, [">= 1.1.2"])
71
70
  s.add_dependency(%q<oauth>, [">= 0.4.5"])
72
71
  s.add_dependency(%q<json>, [">= 1.5.3"])
73
72
  s.add_dependency(%q<sqlite3>, [">= 1.3.3"])
74
- s.add_dependency(%q<activesupport>, [">= 3.0.1"])
73
+ s.add_dependency(%q<insensitive_hash>, [">= 0.3.0"])
75
74
  end
76
75
  else
77
76
  s.add_dependency(%q<multipart-post>, [">= 1.1.2"])
78
77
  s.add_dependency(%q<oauth>, [">= 0.4.5"])
79
78
  s.add_dependency(%q<json>, [">= 1.5.3"])
80
79
  s.add_dependency(%q<sqlite3>, [">= 1.3.3"])
81
- s.add_dependency(%q<activesupport>, [">= 3.0.1"])
80
+ s.add_dependency(%q<insensitive_hash>, [">= 0.3.0"])
82
81
  end
83
82
  end
84
83
 
data/lib/dbox.rb CHANGED
@@ -9,14 +9,13 @@ require "yaml"
9
9
  require "logger"
10
10
  require "cgi"
11
11
  require "sqlite3"
12
- require "active_support/core_ext/hash/indifferent_access"
12
+ require "insensitive_hash/minimal"
13
13
 
14
14
  require "dbox/loggable"
15
15
  require "dbox/utils"
16
16
  require "dbox/api"
17
17
  require "dbox/database"
18
18
  require "dbox/db"
19
- require "dbox/parallel_tasks"
20
19
  require "dbox/syncer"
21
20
 
22
21
  module Dbox
data/lib/dbox/api.rb CHANGED
@@ -89,7 +89,7 @@ module Dbox
89
89
  def handle_response(path, res, &else_proc)
90
90
  case res
91
91
  when Hash
92
- HashWithIndifferentAccess.new(res)
92
+ InsensitiveHash[res]
93
93
  when String
94
94
  res
95
95
  when Net::HTTPNotFound
data/lib/dbox/database.rb CHANGED
@@ -55,20 +55,21 @@ module Dbox
55
55
  @db.execute_batch(%{
56
56
  CREATE TABLE IF NOT EXISTS metadata (
57
57
  id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
58
- remote_path varchar(255) NOT NULL,
58
+ remote_path text COLLATE NOCASE UNIQUE NOT NULL,
59
59
  version integer NOT NULL
60
60
  );
61
61
  CREATE TABLE IF NOT EXISTS entries (
62
62
  id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
63
- path varchar(255) UNIQUE NOT NULL,
63
+ path text COLLATE NOCASE UNIQUE NOT NULL,
64
64
  is_dir boolean NOT NULL,
65
65
  parent_id integer REFERENCES entries(id) ON DELETE CASCADE,
66
- local_hash varchar(255),
67
- remote_hash varchar(255),
66
+ local_hash text,
67
+ remote_hash text,
68
68
  modified datetime,
69
- revision varchar(255)
69
+ revision text
70
70
  );
71
71
  CREATE INDEX IF NOT EXISTS entry_parent_ids ON entries(parent_id);
72
+ CREATE INDEX IF NOT EXISTS entry_path ON entries(path);
72
73
  })
73
74
  end
74
75
 
@@ -82,7 +83,7 @@ module Dbox
82
83
  ALTER TABLE metadata RENAME TO metadata_old;
83
84
  CREATE TABLE metadata (
84
85
  id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
85
- remote_path varchar(255) NOT NULL,
86
+ remote_path text NOT NULL,
86
87
  version integer NOT NULL
87
88
  );
88
89
  INSERT INTO metadata SELECT id, remote_path, version FROM metadata_old;
@@ -121,12 +122,12 @@ module Dbox
121
122
  ALTER TABLE entries RENAME TO entries_old;
122
123
  CREATE TABLE entries (
123
124
  id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
124
- path varchar(255) UNIQUE NOT NULL,
125
+ path text UNIQUE NOT NULL,
125
126
  is_dir boolean NOT NULL,
126
127
  parent_id integer REFERENCES entries(id) ON DELETE CASCADE,
127
- hash varchar(255),
128
+ hash text,
128
129
  modified datetime,
129
- revision varchar(255)
130
+ revision text
130
131
  );
131
132
  INSERT INTO entries SELECT id, path, is_dir, parent_id, hash, modified, null FROM entries_old;
132
133
  })
@@ -153,13 +154,13 @@ module Dbox
153
154
  ALTER TABLE entries RENAME TO entries_old;
154
155
  CREATE TABLE entries (
155
156
  id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
156
- path varchar(255) UNIQUE NOT NULL,
157
+ path text UNIQUE NOT NULL,
157
158
  is_dir boolean NOT NULL,
158
159
  parent_id integer REFERENCES entries(id) ON DELETE CASCADE,
159
- local_hash varchar(255),
160
- remote_hash varchar(255),
160
+ local_hash text,
161
+ remote_hash text,
161
162
  modified datetime,
162
- revision varchar(255)
163
+ revision text
163
164
  );
164
165
  INSERT INTO entries SELECT id, path, is_dir, parent_id, null, hash, modified, revision FROM entries_old;
165
166
  })
@@ -181,6 +182,50 @@ module Dbox
181
182
  COMMIT;
182
183
  })
183
184
  end
185
+
186
+ if metadata[:version] < 5
187
+ log.info "Migrating to database schema v5"
188
+
189
+ # make path be case insensitive
190
+ @db.execute_batch(%{
191
+ BEGIN TRANSACTION;
192
+
193
+ -- migrate metadata table
194
+ ALTER TABLE metadata RENAME TO metadata_old;
195
+ CREATE TABLE IF NOT EXISTS metadata (
196
+ id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
197
+ remote_path text COLLATE NOCASE UNIQUE NOT NULL,
198
+ version integer NOT NULL
199
+ );
200
+ INSERT INTO metadata SELECT id, remote_path, version FROM metadata_old;
201
+ DROP TABLE metadata_old;
202
+
203
+ -- migrate entries table
204
+ ALTER TABLE entries RENAME TO entries_old;
205
+ CREATE TABLE entries (
206
+ id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
207
+ path text COLLATE NOCASE UNIQUE NOT NULL,
208
+ is_dir boolean NOT NULL,
209
+ parent_id integer REFERENCES entries(id) ON DELETE CASCADE,
210
+ local_hash text,
211
+ remote_hash text,
212
+ modified datetime,
213
+ revision text
214
+ );
215
+ INSERT INTO entries SELECT id, path, is_dir, parent_id, local_hash, remote_hash, modified, revision FROM entries_old;
216
+ DROP TABLE entries_old;
217
+
218
+ -- recreate indexes
219
+ DROP INDEX IF EXISTS entry_parent_ids;
220
+ DROP INDEX IF EXISTS entry_path;
221
+ CREATE INDEX entry_parent_ids ON entries(parent_id);
222
+ CREATE INDEX entry_path ON entries(path);
223
+
224
+ -- update version
225
+ UPDATE metadata SET version = 5;
226
+ COMMIT;
227
+ })
228
+ end
184
229
  end
185
230
 
186
231
  METADATA_COLS = [ :remote_path, :version ] # don't need to return id
@@ -189,7 +234,7 @@ module Dbox
189
234
  def bootstrap(remote_path)
190
235
  @db.execute(%{
191
236
  INSERT INTO metadata (remote_path, version) VALUES (?, ?);
192
- }, remote_path, 4)
237
+ }, remote_path, 5)
193
238
  @db.execute(%{
194
239
  INSERT INTO entries (path, is_dir) VALUES (?, ?)
195
240
  }, "", 1)
@@ -341,6 +386,8 @@ module Dbox
341
386
  h = make_fields(entry_cols, res)
342
387
  h[:is_dir] = (h[:is_dir] == 1)
343
388
  h[:modified] = Time.at(h[:modified]) if h[:modified]
389
+ h[:local_path] = relative_to_local_path(h[:path])
390
+ h[:remote_path] = relative_to_remote_path(h[:path])
344
391
  h
345
392
  else
346
393
  nil
data/lib/dbox/syncer.rb CHANGED
@@ -1,6 +1,5 @@
1
1
  module Dbox
2
2
  class Syncer
3
- DEFAULT_CONCURRENCY = 2
4
3
  MIN_BYTES_TO_STREAM_DOWNLOAD = 1024 * 100 # 100kB
5
4
 
6
5
  include Loggable
@@ -36,11 +35,6 @@ module Dbox
36
35
  @@_api ||= API.connect
37
36
  end
38
37
 
39
- def self.concurrency
40
- n = ENV["DROPBOX_CONCURRENCY"].to_i
41
- n > 0 ? n : DEFAULT_CONCURRENCY
42
- end
43
-
44
38
  class Operation
45
39
  include Loggable
46
40
  include Utils
@@ -53,11 +47,7 @@ module Dbox
53
47
  end
54
48
 
55
49
  def api
56
- Thread.current[:api] || @api
57
- end
58
-
59
- def clone_api_into_current_thread
60
- Thread.current[:api] = api.clone()
50
+ @api
61
51
  end
62
52
 
63
53
  def metadata
@@ -78,7 +68,7 @@ module Dbox
78
68
 
79
69
  def current_dir_entries_as_hash(dir)
80
70
  if dir[:id]
81
- out = {}
71
+ out = InsensitiveHash.new
82
72
  database.contents(dir[:id]).each {|e| out[e[:path]] = e }
83
73
  out
84
74
  else
@@ -99,17 +89,20 @@ module Dbox
99
89
  end
100
90
 
101
91
  def saving_parent_timestamp(entry, &proc)
102
- local_path = relative_to_local_path(entry[:path])
103
- parent = File.dirname(local_path)
92
+ parent = File.dirname(entry[:local_path])
104
93
  saving_timestamp(parent, &proc)
105
94
  end
106
95
 
107
96
  def update_file_timestamp(entry)
108
- File.utime(Time.now, entry[:modified], relative_to_local_path(entry[:path]))
97
+ begin
98
+ File.utime(Time.now, entry[:modified], entry[:local_path])
99
+ rescue Errno::ENOENT
100
+ nil
101
+ end
109
102
  end
110
103
 
111
104
  def gather_remote_info(entry)
112
- res = api.metadata(relative_to_remote_path(entry[:path]), entry[:remote_hash])
105
+ res = api.metadata(entry[:remote_path], entry[:remote_hash])
113
106
  case res
114
107
  when Hash
115
108
  out = process_basic_remote_props(res)
@@ -133,6 +126,8 @@ module Dbox
133
126
  def process_basic_remote_props(res)
134
127
  out = {}
135
128
  out[:path] = remote_to_relative_path(res[:path])
129
+ out[:local_path] = relative_to_local_path(out[:path])
130
+ out[:remote_path] = relative_to_remote_path(out[:path])
136
131
  out[:modified] = parse_time(res[:modified])
137
132
  out[:is_dir] = res[:is_dir]
138
133
  out[:remote_hash] = res[:hash] if res[:hash]
@@ -188,62 +183,57 @@ module Dbox
188
183
  parent_ids_of_failed_entries = []
189
184
  changelist = { :created => [], :deleted => [], :updated => [], :failed => [] }
190
185
 
191
- # spin up a parallel task queue
192
- ptasks = ParallelTasks.new(Syncer.concurrency) { clone_api_into_current_thread() }
193
- ptasks.start
194
-
195
186
  changes.each do |op, c|
196
187
  case op
197
188
  when :create
198
189
  c[:parent_id] ||= lookup_id_by_path(c[:parent_path])
199
190
  if c[:is_dir]
200
- # directory creation cannot go in a thread, since later
201
- # operations might depend on the directory being there
191
+ # create the local directory
202
192
  create_dir(c)
203
193
  database.add_entry(c[:path], true, c[:parent_id], c[:modified], c[:revision], c[:remote_hash], nil)
204
194
  changelist[:created] << c[:path]
205
195
  else
206
- ptasks.add do
207
- begin
208
- res = create_file(c)
209
- local_hash = calculate_hash(relative_to_local_path(c[:path]))
210
- database.add_entry(c[:path], false, c[:parent_id], c[:modified], c[:revision], c[:remote_hash], local_hash)
211
- changelist[:created] << c[:path]
212
- if res.kind_of?(Array) && res[0] == :conflict
213
- changelist[:conflicts] ||= []
214
- changelist[:conflicts] << res[1]
215
- end
216
- rescue Exception => e
217
- log.error "Error while downloading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
218
- parent_ids_of_failed_entries << c[:parent_id]
219
- changelist[:failed] << { :operation => :create, :path => c[:path], :error => e }
196
+ # download the new file
197
+ begin
198
+ res = create_file(c)
199
+ local_hash = calculate_hash(c[:local_path])
200
+ database.add_entry(c[:path], false, c[:parent_id], c[:modified], c[:revision], c[:remote_hash], local_hash)
201
+ changelist[:created] << c[:path]
202
+ if res.kind_of?(Array) && res[0] == :conflict
203
+ changelist[:conflicts] ||= []
204
+ changelist[:conflicts] << res[1]
220
205
  end
206
+ rescue Exception => e
207
+ log.error "Error while downloading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
208
+ parent_ids_of_failed_entries << c[:parent_id]
209
+ changelist[:failed] << { :operation => :create, :path => c[:path], :error => e }
221
210
  end
222
211
  end
223
212
  when :update
224
213
  if c[:is_dir]
214
+ # update the local directory
225
215
  update_dir(c)
226
216
  database.update_entry_by_path(c[:path], :modified => c[:modified], :revision => c[:revision], :remote_hash => c[:remote_hash])
227
217
  changelist[:updated] << c[:path]
228
218
  else
229
- ptasks.add do
230
- begin
231
- res = update_file(c)
232
- local_hash = calculate_hash(relative_to_local_path(c[:path]))
233
- database.update_entry_by_path(c[:path], :modified => c[:modified], :revision => c[:revision], :remote_hash => c[:remote_hash], :local_hash => local_hash)
234
- changelist[:updated] << c[:path]
235
- if res.kind_of?(Array) && res[0] == :conflict
236
- changelist[:conflicts] ||= []
237
- changelist[:conflicts] << res[1]
238
- end
239
- rescue Exception => e
240
- log.error "Error while downloading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
241
- parent_ids_of_failed_entries << c[:parent_id]
242
- changelist[:failed] << { :operation => :create, :path => c[:path], :error => e }
219
+ # download updates to the file
220
+ begin
221
+ res = update_file(c)
222
+ local_hash = calculate_hash(c[:local_path])
223
+ database.update_entry_by_path(c[:path], :modified => c[:modified], :revision => c[:revision], :remote_hash => c[:remote_hash], :local_hash => local_hash)
224
+ changelist[:updated] << c[:path]
225
+ if res.kind_of?(Array) && res[0] == :conflict
226
+ changelist[:conflicts] ||= []
227
+ changelist[:conflicts] << res[1]
243
228
  end
229
+ rescue Exception => e
230
+ log.error "Error while downloading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
231
+ parent_ids_of_failed_entries << c[:parent_id]
232
+ changelist[:failed] << { :operation => :create, :path => c[:path], :error => e }
244
233
  end
245
234
  end
246
235
  when :delete
236
+ # delete the local directory/file
247
237
  c[:is_dir] ? delete_dir(c) : delete_file(c)
248
238
  database.delete_entry_by_path(c[:path])
249
239
  changelist[:deleted] << c[:path]
@@ -255,9 +245,6 @@ module Dbox
255
245
  end
256
246
  end
257
247
 
258
- # wait for operations to finish
259
- ptasks.finish
260
-
261
248
  # clear hashes on any dirs with children that failed so that
262
249
  # they are processed again on next pull
263
250
  parent_ids_of_failed_entries.uniq.each do |id|
@@ -320,24 +307,20 @@ module Dbox
320
307
  end
321
308
 
322
309
  # add any deletions
323
- out += (existing_entries.keys.sort - found_paths.sort).map do |p|
310
+ out += case_insensitive_difference(existing_entries.keys, found_paths).map do |p|
324
311
  [:delete, existing_entries[p]]
325
312
  end
326
313
  end
327
314
 
328
315
  # recursively process new & existing subdirectories in parallel
329
- threads = recur_dirs.map do |operation, dir|
330
- Thread.new do
331
- begin
332
- clone_api_into_current_thread()
333
- Thread.current[:out] = calculate_changes(dir, operation)
334
- rescue Exception => e
335
- log.error "Error while caclulating changes for #{operation} on #{dir[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
336
- Thread.current[:out] = [[:failed, dir.merge({ :operation => operation, :error => e })]]
337
- end
316
+ recur_dirs.each do |operation, dir|
317
+ begin
318
+ out += calculate_changes(dir, operation)
319
+ rescue Exception => e
320
+ log.error "Error while caclulating changes for #{operation} on #{dir[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
321
+ out += [[:failed, dir.merge({ :operation => operation, :error => e })]]
338
322
  end
339
323
  end
340
- threads.each {|t| t.join; out += t[:out] }
341
324
 
342
325
  out
343
326
  end
@@ -351,7 +334,7 @@ module Dbox
351
334
  end
352
335
 
353
336
  def create_dir(dir)
354
- local_path = relative_to_local_path(dir[:path])
337
+ local_path = dir[:local_path]
355
338
  log.info "Creating #{local_path}"
356
339
  saving_parent_timestamp(dir) do
357
340
  FileUtils.mkdir_p(local_path)
@@ -364,7 +347,7 @@ module Dbox
364
347
  end
365
348
 
366
349
  def delete_dir(dir)
367
- local_path = relative_to_local_path(dir[:path])
350
+ local_path = dir[:local_path]
368
351
  log.info "Deleting #{local_path}"
369
352
  saving_parent_timestamp(dir) do
370
353
  FileUtils.rm_r(local_path)
@@ -382,7 +365,7 @@ module Dbox
382
365
  end
383
366
 
384
367
  def delete_file(file)
385
- local_path = relative_to_local_path(file[:path])
368
+ local_path = file[:local_path]
386
369
  log.info "Deleting file: #{local_path}"
387
370
  saving_parent_timestamp(file) do
388
371
  FileUtils.rm_rf(local_path)
@@ -390,8 +373,8 @@ module Dbox
390
373
  end
391
374
 
392
375
  def download_file(file)
393
- local_path = relative_to_local_path(file[:path])
394
- remote_path = relative_to_remote_path(file[:path])
376
+ local_path = file[:local_path]
377
+ remote_path = file[:remote_path]
395
378
 
396
379
  # check to ensure we aren't overwriting an untracked file or a
397
380
  # file with local modifications
@@ -449,41 +432,34 @@ module Dbox
449
432
  log.debug "Executing changes:\n" + changes.map {|c| c.inspect }.join("\n")
450
433
  changelist = { :created => [], :deleted => [], :updated => [], :failed => [] }
451
434
 
452
- # spin up a parallel task queue
453
- ptasks = ParallelTasks.new(Syncer.concurrency) { clone_api_into_current_thread() }
454
- ptasks.start
455
-
456
435
  changes.each do |op, c|
457
436
  case op
458
437
  when :create
459
438
  c[:parent_id] ||= lookup_id_by_path(c[:parent_path])
460
439
 
461
440
  if c[:is_dir]
462
- # directory creation cannot go in a thread, since later
463
- # operations might depend on the directory being there
441
+ # create the remote directiory
464
442
  create_dir(c)
465
443
  database.add_entry(c[:path], true, c[:parent_id], nil, nil, nil, nil)
466
444
  force_metadata_update_from_server(c)
467
445
  changelist[:created] << c[:path]
468
446
  else
469
- # spin up a thread to upload the file
470
- ptasks.add do
471
- begin
472
- local_hash = calculate_hash(relative_to_local_path(c[:path]))
473
- res = upload_file(c)
474
- database.add_entry(c[:path], false, c[:parent_id], nil, nil, nil, local_hash)
475
- if c[:path] == res[:path]
476
- force_metadata_update_from_server(c)
477
- changelist[:created] << c[:path]
478
- else
479
- log.warn "#{c[:path]} had a conflict and was renamed to #{res[:path]} on the server"
480
- changelist[:conflicts] ||= []
481
- changelist[:conflicts] << { :original => c[:path], :renamed => res[:path] }
482
- end
483
- rescue Exception => e
484
- log.error "Error while uploading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
485
- changelist[:failed] << { :operation => :create, :path => c[:path], :error => e }
447
+ # upload a new file
448
+ begin
449
+ local_hash = calculate_hash(c[:local_path])
450
+ res = upload_file(c)
451
+ database.add_entry(c[:path], false, c[:parent_id], nil, nil, nil, local_hash)
452
+ if case_insensitive_equal(c[:path], res[:path])
453
+ force_metadata_update_from_server(c)
454
+ changelist[:created] << c[:path]
455
+ else
456
+ log.warn "#{c[:path]} had a conflict and was renamed to #{res[:path]} on the server"
457
+ changelist[:conflicts] ||= []
458
+ changelist[:conflicts] << { :original => c[:path], :renamed => res[:path] }
486
459
  end
460
+ rescue Exception => e
461
+ log.error "Error while uploading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
462
+ changelist[:failed] << { :operation => :create, :path => c[:path], :error => e }
487
463
  end
488
464
  end
489
465
  when :update
@@ -494,46 +470,41 @@ module Dbox
494
470
 
495
471
  # only update files -- nothing to do to update a dir
496
472
  if !c[:is_dir]
497
-
498
- # spin up a thread to upload the file
499
- ptasks.add do
500
- begin
501
- local_hash = calculate_hash(relative_to_local_path(c[:path]))
502
- res = upload_file(c)
503
- database.update_entry_by_path(c[:path], :local_hash => local_hash)
504
- if c[:path] == res[:path]
505
- force_metadata_update_from_server(c)
506
- changelist[:updated] << c[:path]
507
- else
508
- log.warn "#{c[:path]} had a conflict and was renamed to #{res[:path]} on the server"
509
- changelist[:conflicts] ||= []
510
- changelist[:conflicts] << { :original => c[:path], :renamed => res[:path] }
511
- end
512
- rescue Exception => e
513
- log.error "Error while uploading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
514
- changelist[:failed] << { :operation => :update, :path => c[:path], :error => e }
473
+ # upload changes to a file
474
+ begin
475
+ local_hash = calculate_hash(c[:local_path])
476
+ res = upload_file(c)
477
+ database.update_entry_by_path(c[:path], :local_hash => local_hash)
478
+ if case_insensitive_equal(c[:path], res[:path])
479
+ force_metadata_update_from_server(c)
480
+ changelist[:updated] << c[:path]
481
+ else
482
+ log.warn "#{c[:path]} had a conflict and was renamed to #{res[:path]} on the server"
483
+ changelist[:conflicts] ||= []
484
+ changelist[:conflicts] << { :original => c[:path], :renamed => res[:path] }
515
485
  end
486
+ rescue Exception => e
487
+ log.error "Error while uploading #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
488
+ changelist[:failed] << { :operation => :update, :path => c[:path], :error => e }
516
489
  end
517
490
  end
518
491
  when :delete
519
- # spin up a thread to delete the file/dir
520
- ptasks.add do
492
+ # delete a remote file/directory
493
+ begin
521
494
  begin
522
- begin
523
- if c[:is_dir]
524
- delete_dir(c)
525
- else
526
- delete_file(c)
527
- end
528
- rescue Dbox::RemoteMissing
529
- # safe to delete even if remote is already gone
495
+ if c[:is_dir]
496
+ delete_dir(c)
497
+ else
498
+ delete_file(c)
530
499
  end
531
- database.delete_entry_by_path(c[:path])
532
- changelist[:deleted] << c[:path]
533
- rescue Exception => e
534
- log.error "Error while deleting #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
535
- changelist[:failed] << { :operation => :delete, :path => c[:path], :error => e }
500
+ rescue Dbox::RemoteMissing
501
+ # safe to delete even if remote is already gone
536
502
  end
503
+ database.delete_entry_by_path(c[:path])
504
+ changelist[:deleted] << c[:path]
505
+ rescue Exception => e
506
+ log.error "Error while deleting #{c[:path]}: #{e.inspect}\n#{e.backtrace.join("\n")}"
507
+ changelist[:failed] << { :operation => :delete, :path => c[:path], :error => e }
537
508
  end
538
509
  when :failed
539
510
  changelist[:failed] << { :operation => c[:operation], :path => c[:path], :error => c[:error] }
@@ -542,9 +513,6 @@ module Dbox
542
513
  end
543
514
  end
544
515
 
545
- # wait for operations to finish
546
- ptasks.finish
547
-
548
516
  # sort & return output
549
517
  sort_changelist(changelist)
550
518
  end
@@ -559,7 +527,17 @@ module Dbox
559
527
  child_paths = list_contents(dir).sort
560
528
 
561
529
  child_paths.each do |p|
562
- c = { :path => p, :modified => mtime(p), :is_dir => is_dir(p), :parent_path => dir[:path], :local_hash => calculate_hash(relative_to_local_path(p)) }
530
+ local_path = relative_to_local_path(p)
531
+ remote_path = relative_to_remote_path(p)
532
+ c = {
533
+ :path => p,
534
+ :local_path => local_path,
535
+ :remote_path => remote_path,
536
+ :modified => mtime(local_path),
537
+ :is_dir => is_dir(local_path),
538
+ :parent_path => dir[:path],
539
+ :local_hash => calculate_hash(local_path)
540
+ }
563
541
  if entry = existing_entries[p]
564
542
  c[:id] = entry[:id]
565
543
  recur_dirs << c if c[:is_dir] # queue dir for later
@@ -572,7 +550,7 @@ module Dbox
572
550
  end
573
551
 
574
552
  # add any deletions
575
- out += (existing_entries.keys.sort - child_paths).map do |p|
553
+ out += case_insensitive_difference(existing_entries.keys, child_paths).map do |p|
576
554
  [:delete, existing_entries[p]]
577
555
  end
578
556
 
@@ -585,11 +563,11 @@ module Dbox
585
563
  end
586
564
 
587
565
  def mtime(path)
588
- File.mtime(relative_to_local_path(path))
566
+ File.mtime(path)
589
567
  end
590
568
 
591
569
  def is_dir(path)
592
- File.directory?(relative_to_local_path(path))
570
+ File.directory?(path)
593
571
  end
594
572
 
595
573
  def modified?(entry, res)
@@ -607,30 +585,30 @@ module Dbox
607
585
  end
608
586
 
609
587
  def list_contents(dir)
610
- local_path = relative_to_local_path(dir[:path])
588
+ local_path = dir[:local_path]
611
589
  paths = Dir.entries(local_path).reject {|s| s == "." || s == ".." || s.start_with?(".") }
612
590
  paths.map {|p| local_to_relative_path(File.join(local_path, p)) }
613
591
  end
614
592
 
615
593
  def create_dir(dir)
616
- remote_path = relative_to_remote_path(dir[:path])
594
+ remote_path = dir[:remote_path]
617
595
  log.info "Creating #{remote_path}"
618
596
  api.create_dir(remote_path)
619
597
  end
620
598
 
621
599
  def delete_dir(dir)
622
- remote_path = relative_to_remote_path(dir[:path])
600
+ remote_path = dir[:remote_path]
623
601
  api.delete_dir(remote_path)
624
602
  end
625
603
 
626
604
  def delete_file(file)
627
- remote_path = relative_to_remote_path(file[:path])
605
+ remote_path = file[:remote_path]
628
606
  api.delete_file(remote_path)
629
607
  end
630
608
 
631
609
  def upload_file(file)
632
- local_path = relative_to_local_path(file[:path])
633
- remote_path = relative_to_remote_path(file[:path])
610
+ local_path = file[:local_path]
611
+ remote_path = file[:remote_path]
634
612
  db_entry = database.find_by_path(file[:path])
635
613
  last_revision = db_entry ? db_entry[:revision] : nil
636
614
  res = api.put_file(remote_path, local_path, last_revision)
data/lib/dbox/utils.rb CHANGED
@@ -25,8 +25,8 @@ module Dbox
25
25
 
26
26
  # assumes local_path is defined
27
27
  def local_to_relative_path(path)
28
- if path.include?(local_path)
29
- path.sub(local_path, "").sub(/^\//, "")
28
+ if path =~ /^#{local_path}\/?(.*)$/i
29
+ $1
30
30
  else
31
31
  raise BadPath, "Not a local path: #{path}"
32
32
  end
@@ -34,8 +34,8 @@ module Dbox
34
34
 
35
35
  # assumes remote_path is defined
36
36
  def remote_to_relative_path(path)
37
- if path.include?(remote_path)
38
- path.sub(remote_path, "").sub(/^\//, "")
37
+ if path =~ /^#{remote_path}\/?(.*)$/i
38
+ $1
39
39
  else
40
40
  raise BadPath, "Not a remote path: #{path}"
41
41
  end
@@ -44,9 +44,9 @@ module Dbox
44
44
  # assumes local_path is defined
45
45
  def relative_to_local_path(path)
46
46
  if path && path.length > 0
47
- File.join(local_path, path)
47
+ case_insensitive_join(local_path, path)
48
48
  else
49
- local_path
49
+ case_insensitive_resolve(local_path)
50
50
  end
51
51
  end
52
52
 
@@ -59,6 +59,37 @@ module Dbox
59
59
  end
60
60
  end
61
61
 
62
+ def case_insensitive_resolve(path)
63
+ if File.exists?(path)
64
+ path
65
+ else
66
+ matches = Dir.glob(path, File::FNM_CASEFOLD)
67
+ case matches.size
68
+ when 0 then path
69
+ when 1 then matches.first
70
+ else raise(RuntimeError, "Oops, you have multiple files with the same case. Please delete one of them, as Dropbox is case insensitive. (#{matches.join(', ')})")
71
+ end
72
+ end
73
+ end
74
+
75
+ def case_insensitive_join(path, *rest)
76
+ if rest.length == 0
77
+ case_insensitive_resolve(path)
78
+ else
79
+ rest = rest.map {|s| s.split(File::SEPARATOR) }.flatten
80
+ case_insensitive_join(File.join(case_insensitive_resolve(path), rest[0]), *rest[1..-1])
81
+ end
82
+ end
83
+
84
+ def case_insensitive_difference(a, b)
85
+ b = b.map(&:downcase).sort
86
+ a.reject {|s| b.include?(s.downcase) }
87
+ end
88
+
89
+ def case_insensitive_equal(a, b)
90
+ a && b && a.downcase == b.downcase
91
+ end
92
+
62
93
  def calculate_hash(filepath)
63
94
  begin
64
95
  Digest::MD5.file(filepath).to_s
@@ -71,7 +102,7 @@ module Dbox
71
102
 
72
103
  def find_nonconflicting_path(filepath)
73
104
  proposed = filepath
74
- while File.exists?(proposed)
105
+ while File.exists?(case_insensitive_resolve(proposed))
75
106
  dir, p = File.split(proposed)
76
107
  p = p.sub(/^(.*?)( \((\d+)\))?(\..*?)?$/) { "#{$1} (#{$3 ? $3.to_i + 1 : 1})#{$4}" }
77
108
  proposed = File.join(dir, p)
data/spec/dbox_spec.rb CHANGED
@@ -460,6 +460,134 @@ describe Dbox do
460
460
  Dbox.sync(@local).should eql(:pull => { :created => ["au_revoir.txt", "farewell.txt", "hello (1).txt"], :deleted => [], :updated => ["", "goodbye.txt"], :failed => [] },
461
461
  :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
462
462
  end
463
+
464
+ it "should be able to handle a file that has changed case" do
465
+ Dbox.create(@remote, @local)
466
+ make_file "#{@local}/hello.txt"
467
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
468
+ :push => { :created => ["hello.txt"], :deleted => [], :updated => [], :failed => [] })
469
+ rename_file "#{@local}/hello.txt", "#{@local}/HELLO.txt"
470
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [""], :failed => [] },
471
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
472
+ end
473
+
474
+ it "should be able to handle a file that has changed case remotely" do
475
+ Dbox.create(@remote, @local)
476
+ @alternate = "#{ALTERNATE_LOCAL_TEST_PATH}/#{@name}"
477
+ Dbox.clone(@remote, @alternate)
478
+ make_file "#{@local}/hello.txt"
479
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
480
+ :push => { :created => ["hello.txt"], :deleted => [], :updated => [], :failed => [] })
481
+ Dbox.sync(@alternate).should eql(:pull => { :created => ["hello.txt"], :deleted => [], :updated => [""], :failed => [] },
482
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
483
+ rename_file "#{@local}/hello.txt", "#{@local}/HELLO.txt"
484
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [""], :failed => [] },
485
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
486
+ Dbox.sync(@alternate).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
487
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
488
+ end
489
+
490
+ it "should be able to handle a folder that has changed case" do
491
+ Dbox.create(@remote, @local)
492
+ mkdir "#{@local}/foo"
493
+ make_file "#{@local}/foo/hello.txt"
494
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
495
+ :push => { :created => ["foo", "foo/hello.txt"], :deleted => [], :updated => [], :failed => [] })
496
+ rename_file "#{@local}/foo", "#{@local}/FOO"
497
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => ["", "foo"], :failed => [] },
498
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
499
+ make_file "#{@local}/FOO/hello2.txt"
500
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
501
+ :push => { :created => ["FOO/hello2.txt"], :deleted => [], :updated => [], :failed => [] })
502
+ end
503
+
504
+ it "should be able to handle a folder that has changed case remotely" do
505
+ Dbox.create(@remote, @local)
506
+ @alternate = "#{ALTERNATE_LOCAL_TEST_PATH}/#{@name}"
507
+ Dbox.clone(@remote, @alternate)
508
+ mkdir "#{@local}/foo"
509
+ make_file "#{@local}/foo/hello.txt"
510
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
511
+ :push => { :created => ["foo", "foo/hello.txt"], :deleted => [], :updated => [], :failed => [] })
512
+ Dbox.sync(@alternate).should eql(:pull => { :created => ["foo", "foo/hello.txt"], :deleted => [], :updated => [""], :failed => [] },
513
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
514
+ rename_file "#{@local}/foo", "#{@local}/FOO"
515
+ make_file "#{@local}/FOO/hello2.txt"
516
+ make_file "#{@alternate}/foo/hello3.txt"
517
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => ["", "foo"], :failed => [] },
518
+ :push => { :created => ["FOO/hello2.txt"], :deleted => [], :updated => [], :failed => [] })
519
+ Dbox.sync(@alternate).should eql(:pull => { :created => ["foo/hello2.txt"], :deleted => [], :updated => ["foo"], :failed => [] },
520
+ :push => { :created => ["foo/hello3.txt"], :deleted => [], :updated => [], :failed => [] })
521
+ Dbox.sync(@local).should eql(:pull => { :created => ["foo/hello3.txt"], :deleted => [], :updated => ["foo"], :failed => [] },
522
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
523
+ end
524
+
525
+ it "should be able to handle creating a new file of a different case from a deleted file" do
526
+ Dbox.create(@remote, @local)
527
+ mkdir "#{@local}/foo"
528
+ make_file "#{@local}/foo/hello.txt"
529
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
530
+ :push => { :created => ["foo", "foo/hello.txt"], :deleted => [], :updated => [], :failed => [] })
531
+ rm_rf "#{@local}/foo"
532
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => ["", "foo"], :failed => [] },
533
+ :push => { :created => [], :deleted => ["foo"], :updated => [], :failed => [] })
534
+ mkdir "#{@local}/FOO"
535
+ make_file "#{@local}/FOO/HELLO.txt"
536
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [""], :failed => [] },
537
+ :push => { :created => ["FOO", "FOO/HELLO.txt"], :deleted => [], :updated => [], :failed => [] })
538
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => ["", "FOO"], :failed => [] },
539
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
540
+ end
541
+
542
+ it "should be able to handle creating a new file of a different case from a deleted file remotely" do
543
+ Dbox.create(@remote, @local)
544
+ @alternate = "#{ALTERNATE_LOCAL_TEST_PATH}/#{@name}"
545
+ Dbox.clone(@remote, @alternate)
546
+
547
+ mkdir "#{@local}/foo"
548
+ make_file "#{@local}/foo/hello.txt"
549
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
550
+ :push => { :created => ["foo", "foo/hello.txt"], :deleted => [], :updated => [], :failed => [] })
551
+ Dbox.sync(@alternate).should eql(:pull => { :created => ["foo", "foo/hello.txt"], :deleted => [], :updated => [""], :failed => [] },
552
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
553
+ rm_rf "#{@alternate}/foo"
554
+ Dbox.sync(@alternate).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
555
+ :push => { :created => [], :deleted => ["foo"], :updated => [], :failed => [] })
556
+ mkdir "#{@alternate}/FOO"
557
+ make_file "#{@alternate}/FOO/HELLO.txt"
558
+ make_file "#{@alternate}/FOO/HELLO2.txt"
559
+ Dbox.sync(@alternate).should eql(:pull => { :created => [], :deleted => [], :updated => [""], :failed => [] },
560
+ :push => { :created => ["FOO", "FOO/HELLO.txt", "FOO/HELLO2.txt"], :deleted => [], :updated => [], :failed => [] })
561
+
562
+ rename_file "#{@alternate}/FOO", "#{@alternate}/Foo"
563
+ make_file "#{@alternate}/Foo/Hello3.txt"
564
+ Dbox.sync(@alternate).should eql(:pull => { :created => [], :deleted => [], :updated => ["", "FOO"], :failed => [] },
565
+ :push => { :created => ["Foo/Hello3.txt"], :deleted => [], :updated => [], :failed => [] })
566
+
567
+ Dbox.sync(@local).should eql(:pull => { :created => ["foo/HELLO2.txt", "foo/Hello3.txt"], :deleted => [], :updated => ["", "FOO", "foo/HELLO.txt"], :failed => [] },
568
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
569
+ end
570
+
571
+ it "should be able to handle nested directories with case changes" do
572
+ Dbox.create(@remote, @local)
573
+ @alternate = "#{ALTERNATE_LOCAL_TEST_PATH}/#{@name}"
574
+ Dbox.clone(@remote, @alternate)
575
+
576
+ mkdir "#{@local}/foo"
577
+ make_file "#{@local}/foo/hello.txt"
578
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => [], :failed => [] },
579
+ :push => { :created => ["foo", "foo/hello.txt"], :deleted => [], :updated => [], :failed => [] })
580
+ Dbox.sync(@alternate).should eql(:pull => { :created => ["foo", "foo/hello.txt"], :deleted => [], :updated => [""], :failed => [] },
581
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
582
+
583
+ rename_file "#{@local}/foo", "#{@local}/FOO"
584
+ mkdir "#{@local}/FOO/BAR"
585
+ make_file "#{@local}/FOO/BAR/hello2.txt"
586
+ Dbox.sync(@local).should eql(:pull => { :created => [], :deleted => [], :updated => ["", "foo"], :failed => [] },
587
+ :push => { :created => ["FOO/BAR", "FOO/BAR/hello2.txt"], :deleted => [], :updated => [], :failed => [] })
588
+ Dbox.sync(@alternate).should eql(:pull => { :created => ["FOO/BAR/hello2.txt", "foo/BAR"], :deleted => [], :updated => ["foo"], :failed => [] },
589
+ :push => { :created => [], :deleted => [], :updated => [], :failed => [] })
590
+ end
463
591
  end
464
592
 
465
593
  describe "#move" do
data/spec/spec_helper.rb CHANGED
@@ -40,6 +40,11 @@ def make_file(filepath, size_in_kb=1)
40
40
  `dd if=/dev/urandom of="#{filepath.gsub('"','\"')}" bs=1024 count=#{size_in_kb} 1>/dev/null 2>/dev/null`
41
41
  end
42
42
 
43
+ def rename_file(oldpath, newpath)
44
+ FileUtils.mv oldpath, "#{oldpath}-tmp"
45
+ FileUtils.mv "#{oldpath}-tmp", newpath
46
+ end
47
+
43
48
  RSpec::Matchers.define :exist do
44
49
  match do |actual|
45
50
  File.exists?(actual) == true
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dbox
3
3
  version: !ruby/object:Gem::Version
4
- hash: 25
4
+ hash: 3
5
5
  prerelease:
6
6
  segments:
7
7
  - 0
8
- - 6
9
- - 15
10
- version: 0.6.15
8
+ - 7
9
+ - 0
10
+ version: 0.7.0
11
11
  platform: ruby
12
12
  authors:
13
13
  - Ken Pratt
@@ -15,7 +15,7 @@ autorequire:
15
15
  bindir: bin
16
16
  cert_chain: []
17
17
 
18
- date: 2012-07-26 00:00:00 Z
18
+ date: 2012-11-06 00:00:00 Z
19
19
  dependencies:
20
20
  - !ruby/object:Gem::Dependency
21
21
  name: multipart-post
@@ -82,19 +82,19 @@ dependencies:
82
82
  type: :runtime
83
83
  version_requirements: *id004
84
84
  - !ruby/object:Gem::Dependency
85
- name: activesupport
85
+ name: insensitive_hash
86
86
  prerelease: false
87
87
  requirement: &id005 !ruby/object:Gem::Requirement
88
88
  none: false
89
89
  requirements:
90
90
  - - ">="
91
91
  - !ruby/object:Gem::Version
92
- hash: 5
92
+ hash: 19
93
93
  segments:
94
+ - 0
94
95
  - 3
95
96
  - 0
96
- - 1
97
- version: 3.0.1
97
+ version: 0.3.0
98
98
  type: :runtime
99
99
  version_requirements: *id005
100
100
  description: An easy-to-use Dropbox client with fine-grained control over syncs.
@@ -121,7 +121,6 @@ files:
121
121
  - lib/dbox/database.rb
122
122
  - lib/dbox/db.rb
123
123
  - lib/dbox/loggable.rb
124
- - lib/dbox/parallel_tasks.rb
125
124
  - lib/dbox/syncer.rb
126
125
  - lib/dbox/utils.rb
127
126
  - sample_polling_script.rb
@@ -168,7 +167,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
168
167
  requirements: []
169
168
 
170
169
  rubyforge_project:
171
- rubygems_version: 1.8.16
170
+ rubygems_version: 1.8.24
172
171
  signing_key:
173
172
  specification_version: 3
174
173
  summary: Dropbox made easy.
@@ -1,80 +0,0 @@
1
- require "thread"
2
-
3
- #
4
- # Usage:
5
- #
6
- # puts "Creating task queue with 5 concurrent workers"
7
- # tasks = ParallelTasks.new(5) { puts "Worker thread starting up" }
8
- #
9
- # puts "Starting workers"
10
- # tasks.start
11
- #
12
- # puts "Making some work"
13
- # 20.times do
14
- # tasks.add do
15
- # x = rand(5)
16
- # puts "Sleeping for #{x}s"
17
- # sleep x
18
- # end
19
- # end
20
- #
21
- # puts "Waiting for workers to finish"
22
- # tasks.finish
23
- #
24
- # puts "Done"
25
- #
26
- class ParallelTasks
27
- def initialize(num_workers, &initialization_proc)
28
- @num_workers = num_workers
29
- @initialization_proc = initialization_proc
30
- @workers = []
31
- @work_queue = Queue.new
32
- @semaphore = Mutex.new
33
- @done_making_tasks = false
34
- end
35
-
36
- def start
37
- @num_workers.times do
38
- @workers << Thread.new do
39
- @initialization_proc.call if @initialization_proc
40
- done = false
41
- while !done
42
- task = nil
43
- @semaphore.synchronize do
44
- unless @work_queue.empty?
45
- task = @work_queue.pop()
46
- else
47
- if @done_making_tasks
48
- done = true
49
- else
50
- sleep 0.1
51
- end
52
- end
53
- end
54
- if task
55
- begin
56
- task.call
57
- rescue Exception => e
58
- log.error e.inspect
59
- end
60
- end
61
- end
62
- end
63
- end
64
- end
65
-
66
- def add(&proc)
67
- @work_queue << proc
68
- end
69
-
70
- def finish
71
- @done_making_tasks = true
72
- begin
73
- @workers.each {|t| t.join }
74
- rescue Exception => e
75
- log.error "Error waiting for workers to complete tasks: #{e.inspect}"
76
- @workers.each {|t| t.kill }
77
- raise e
78
- end
79
- end
80
- end