dbox 0.4.4 → 0.5.0
Sign up to get free protection for your applications and to get access to all the features.
- data/README.md +1 -1
- data/Rakefile +2 -0
- data/TODO.txt +5 -2
- data/VERSION +1 -1
- data/dbox.gemspec +13 -3
- data/lib/dbox.rb +31 -11
- data/lib/dbox/api.rb +15 -5
- data/lib/dbox/database.rb +216 -0
- data/lib/dbox/db.rb +17 -3
- data/lib/dbox/parallel_tasks.rb +68 -0
- data/lib/dbox/syncer.rb +595 -0
- data/sample_polling_script.rb +45 -0
- data/spec/dbox_spec.rb +82 -30
- data/spec/spec_helper.rb +4 -7
- data/vendor/dropbox-client-ruby/README +2 -0
- data/vendor/dropbox-client-ruby/lib/dropbox.rb +36 -3
- metadata +42 -6
data/README.md
CHANGED
@@ -3,7 +3,7 @@ dbox
|
|
3
3
|
|
4
4
|
An easy way to push and pull your Dropbox folders, with fine-grained control over what folder you are syncing, where you are syncing it to, and when you are doing it.
|
5
5
|
|
6
|
-
**IMPORTANT:** This is **not** an automated Dropbox client. It will exit after sucessfully pushing/pulling, so if you want regular updates, you can run it in cron, a loop, etc.
|
6
|
+
**IMPORTANT:** This is **not** an automated Dropbox client. It will exit after sucessfully pushing/pulling, so if you want regular updates, you can run it in cron, a loop, etc. If you do want to run it in a loop, take a look at [sample_polling_script.rb](http://github.com/kenpratt/dbox/blob/master/sample_polling_script.rb).
|
7
7
|
|
8
8
|
|
9
9
|
Installation
|
data/Rakefile
CHANGED
@@ -17,6 +17,8 @@ Jeweler::Tasks.new do |gem|
|
|
17
17
|
gem.add_dependency "multipart-post", ">= 1.1.2"
|
18
18
|
gem.add_dependency "oauth", ">= 0.4.5"
|
19
19
|
gem.add_dependency "json", ">= 1.5.3"
|
20
|
+
gem.add_dependency "sqlite3", ">= 1.3.3"
|
21
|
+
gem.add_dependency "activesupport", ">= 3.0.1"
|
20
22
|
end
|
21
23
|
Jeweler::RubygemsDotOrgTasks.new
|
22
24
|
|
data/TODO.txt
CHANGED
@@ -1,3 +1,6 @@
|
|
1
|
-
*
|
1
|
+
* Refactor threading in change detection in pull to use ParallelTasks?
|
2
|
+
* Saving SQLite db changes dir timestamp -- try to preserve it?
|
2
3
|
* Add a "sync" command that pushes and pulls in one go
|
3
|
-
*
|
4
|
+
* See if prepared statements speed up operations on large repos much
|
5
|
+
* Look down directory tree until you hit a .dbox.sqlite3 file so you can use commands from anywhere inside a tree (like git)
|
6
|
+
* Add support for partial push/pull (subtree push/pull)?
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.
|
1
|
+
0.5.0
|
data/dbox.gemspec
CHANGED
@@ -5,11 +5,11 @@
|
|
5
5
|
|
6
6
|
Gem::Specification.new do |s|
|
7
7
|
s.name = %q{dbox}
|
8
|
-
s.version = "0.
|
8
|
+
s.version = "0.5.0"
|
9
9
|
|
10
10
|
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
|
11
11
|
s.authors = [%q{Ken Pratt}]
|
12
|
-
s.date = %q{2011-
|
12
|
+
s.date = %q{2011-09-19}
|
13
13
|
s.description = %q{An easy-to-use Dropbox client with fine-grained control over syncs.}
|
14
14
|
s.email = %q{ken@kenpratt.net}
|
15
15
|
s.executables = [%q{dbox}]
|
@@ -28,8 +28,12 @@ Gem::Specification.new do |s|
|
|
28
28
|
"dbox.gemspec",
|
29
29
|
"lib/dbox.rb",
|
30
30
|
"lib/dbox/api.rb",
|
31
|
+
"lib/dbox/database.rb",
|
31
32
|
"lib/dbox/db.rb",
|
32
33
|
"lib/dbox/loggable.rb",
|
34
|
+
"lib/dbox/parallel_tasks.rb",
|
35
|
+
"lib/dbox/syncer.rb",
|
36
|
+
"sample_polling_script.rb",
|
33
37
|
"spec/dbox_spec.rb",
|
34
38
|
"spec/spec_helper.rb",
|
35
39
|
"vendor/dropbox-client-ruby/LICENSE",
|
@@ -45,7 +49,7 @@ Gem::Specification.new do |s|
|
|
45
49
|
s.homepage = %q{http://github.com/kenpratt/dbox}
|
46
50
|
s.licenses = [%q{MIT}]
|
47
51
|
s.require_paths = [%q{lib}]
|
48
|
-
s.rubygems_version = %q{1.8.
|
52
|
+
s.rubygems_version = %q{1.8.5}
|
49
53
|
s.summary = %q{Dropbox made easy.}
|
50
54
|
|
51
55
|
if s.respond_to? :specification_version then
|
@@ -55,15 +59,21 @@ Gem::Specification.new do |s|
|
|
55
59
|
s.add_runtime_dependency(%q<multipart-post>, [">= 1.1.2"])
|
56
60
|
s.add_runtime_dependency(%q<oauth>, [">= 0.4.5"])
|
57
61
|
s.add_runtime_dependency(%q<json>, [">= 1.5.3"])
|
62
|
+
s.add_runtime_dependency(%q<sqlite3>, [">= 1.3.3"])
|
63
|
+
s.add_runtime_dependency(%q<activesupport>, [">= 3.0.1"])
|
58
64
|
else
|
59
65
|
s.add_dependency(%q<multipart-post>, [">= 1.1.2"])
|
60
66
|
s.add_dependency(%q<oauth>, [">= 0.4.5"])
|
61
67
|
s.add_dependency(%q<json>, [">= 1.5.3"])
|
68
|
+
s.add_dependency(%q<sqlite3>, [">= 1.3.3"])
|
69
|
+
s.add_dependency(%q<activesupport>, [">= 3.0.1"])
|
62
70
|
end
|
63
71
|
else
|
64
72
|
s.add_dependency(%q<multipart-post>, [">= 1.1.2"])
|
65
73
|
s.add_dependency(%q<oauth>, [">= 0.4.5"])
|
66
74
|
s.add_dependency(%q<json>, [">= 1.5.3"])
|
75
|
+
s.add_dependency(%q<sqlite3>, [">= 1.3.3"])
|
76
|
+
s.add_dependency(%q<activesupport>, [">= 3.0.1"])
|
67
77
|
end
|
68
78
|
end
|
69
79
|
|
data/lib/dbox.rb
CHANGED
@@ -8,10 +8,15 @@ require "time"
|
|
8
8
|
require "yaml"
|
9
9
|
require "logger"
|
10
10
|
require "cgi"
|
11
|
+
require "sqlite3"
|
12
|
+
require "active_support/core_ext/hash/indifferent_access"
|
11
13
|
|
12
14
|
require "dbox/loggable"
|
13
15
|
require "dbox/api"
|
16
|
+
require "dbox/database"
|
14
17
|
require "dbox/db"
|
18
|
+
require "dbox/parallel_tasks"
|
19
|
+
require "dbox/syncer"
|
15
20
|
|
16
21
|
module Dbox
|
17
22
|
def self.authorize
|
@@ -19,41 +24,47 @@ module Dbox
|
|
19
24
|
end
|
20
25
|
|
21
26
|
def self.create(remote_path, local_path)
|
27
|
+
log.debug "Creating (remote: #{remote_path}, local: #{local_path})"
|
22
28
|
remote_path = clean_remote_path(remote_path)
|
23
29
|
local_path = clean_local_path(local_path)
|
24
|
-
|
30
|
+
migrate_dbfile(local_path)
|
31
|
+
Dbox::Syncer.create(remote_path, local_path)
|
25
32
|
end
|
26
33
|
|
27
34
|
def self.clone(remote_path, local_path)
|
35
|
+
log.debug "Cloning (remote: #{remote_path}, local: #{local_path})"
|
28
36
|
remote_path = clean_remote_path(remote_path)
|
29
37
|
local_path = clean_local_path(local_path)
|
30
|
-
|
38
|
+
migrate_dbfile(local_path)
|
39
|
+
Dbox::Syncer.clone(remote_path, local_path)
|
31
40
|
end
|
32
41
|
|
33
42
|
def self.pull(local_path)
|
43
|
+
log.debug "Pulling (local: #{local_path})"
|
34
44
|
local_path = clean_local_path(local_path)
|
35
|
-
|
45
|
+
migrate_dbfile(local_path)
|
46
|
+
Dbox::Syncer.pull(local_path)
|
36
47
|
end
|
37
48
|
|
38
49
|
def self.push(local_path)
|
50
|
+
log.debug "Pushing (local: #{local_path})"
|
39
51
|
local_path = clean_local_path(local_path)
|
40
|
-
|
52
|
+
migrate_dbfile(local_path)
|
53
|
+
Dbox::Syncer.push(local_path)
|
41
54
|
end
|
42
55
|
|
43
56
|
def self.move(new_remote_path, local_path)
|
57
|
+
log.debug "Moving (new remote: #{new_remote_path}, local: #{local_path})"
|
44
58
|
new_remote_path = clean_remote_path(new_remote_path)
|
45
59
|
local_path = clean_local_path(local_path)
|
46
|
-
|
60
|
+
migrate_dbfile(local_path)
|
61
|
+
Dbox::Syncer.move(new_remote_path, local_path)
|
47
62
|
end
|
48
63
|
|
49
64
|
def self.exists?(local_path)
|
50
65
|
local_path = clean_local_path(local_path)
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
def self.corrupt?(local_path)
|
55
|
-
local_path = clean_local_path(local_path)
|
56
|
-
Dbox::DB.corrupt?(local_path)
|
66
|
+
migrate_dbfile(local_path)
|
67
|
+
Dbox::Database.exists?(local_path)
|
57
68
|
end
|
58
69
|
|
59
70
|
private
|
@@ -68,4 +79,13 @@ module Dbox
|
|
68
79
|
raise(ArgumentError, "Missing local path") unless path
|
69
80
|
File.expand_path(path)
|
70
81
|
end
|
82
|
+
|
83
|
+
def self.migrate_dbfile(path)
|
84
|
+
if Dbox::DB.exists?(path)
|
85
|
+
log.warn "Old database file format found -- migrating to new database format"
|
86
|
+
Dbox::Database.migrate_from_old_db_format(Dbox::DB.load(path))
|
87
|
+
Dbox::DB.destroy!(path)
|
88
|
+
log.warn "Migration complete"
|
89
|
+
end
|
90
|
+
end
|
71
91
|
end
|
data/lib/dbox/api.rb
CHANGED
@@ -30,12 +30,18 @@ module Dbox
|
|
30
30
|
api
|
31
31
|
end
|
32
32
|
|
33
|
+
attr_reader :client
|
34
|
+
|
33
35
|
# IMPORTANT: API.new is private. Please use API.authorize or API.connect as the entry point.
|
34
36
|
private_class_method :new
|
35
37
|
def initialize
|
36
38
|
@conf = self.class.conf
|
37
39
|
end
|
38
40
|
|
41
|
+
def initialize_copy(other)
|
42
|
+
@client = other.client.clone()
|
43
|
+
end
|
44
|
+
|
39
45
|
def connect
|
40
46
|
auth_key = ENV["DROPBOX_AUTH_KEY"]
|
41
47
|
auth_secret = ENV["DROPBOX_AUTH_SECRET"]
|
@@ -52,13 +58,17 @@ module Dbox
|
|
52
58
|
res = yield
|
53
59
|
case res
|
54
60
|
when Hash
|
55
|
-
res
|
61
|
+
HashWithIndifferentAccess.new(res)
|
56
62
|
when String
|
57
63
|
res
|
58
64
|
when Net::HTTPNotFound
|
59
65
|
raise RemoteMissing, "#{path} does not exist on Dropbox"
|
60
66
|
when Net::HTTPForbidden
|
61
67
|
raise RequestDenied, "Operation on #{path} denied"
|
68
|
+
when Net::HTTPNotModified
|
69
|
+
:not_modified
|
70
|
+
when true
|
71
|
+
true
|
62
72
|
else
|
63
73
|
raise RuntimeError, "Unexpected result: #{res.inspect}"
|
64
74
|
end
|
@@ -68,10 +78,10 @@ module Dbox
|
|
68
78
|
end
|
69
79
|
end
|
70
80
|
|
71
|
-
def metadata(path = "/")
|
81
|
+
def metadata(path = "/", hash = nil)
|
72
82
|
log.debug "Fetching metadata for #{path}"
|
73
83
|
run(path) do
|
74
|
-
res = @client.metadata(@conf["root"], escape_path(path))
|
84
|
+
res = @client.metadata(@conf["root"], escape_path(path), 10000, hash)
|
75
85
|
log.debug res.inspect
|
76
86
|
res
|
77
87
|
end
|
@@ -96,10 +106,10 @@ module Dbox
|
|
96
106
|
end
|
97
107
|
end
|
98
108
|
|
99
|
-
def get_file(path)
|
109
|
+
def get_file(path, output_file_obj)
|
100
110
|
log.info "Downloading #{path}"
|
101
111
|
run(path) do
|
102
|
-
@client.get_file(@conf["root"], escape_path(path))
|
112
|
+
@client.get_file(@conf["root"], escape_path(path), output_file_obj)
|
103
113
|
end
|
104
114
|
end
|
105
115
|
|
@@ -0,0 +1,216 @@
|
|
1
|
+
module Dbox
|
2
|
+
class DatabaseError < RuntimeError; end
|
3
|
+
|
4
|
+
class Database
|
5
|
+
include Loggable
|
6
|
+
|
7
|
+
DB_FILENAME = ".dbox.sqlite3"
|
8
|
+
|
9
|
+
def self.create(remote_path, local_path)
|
10
|
+
db = new(local_path)
|
11
|
+
if db.bootstrapped?
|
12
|
+
raise DatabaseError, "Database already initialized -- please use 'dbox pull' or 'dbox push'."
|
13
|
+
end
|
14
|
+
db.bootstrap(remote_path, local_path)
|
15
|
+
db
|
16
|
+
end
|
17
|
+
|
18
|
+
def self.load(local_path)
|
19
|
+
db = new(local_path)
|
20
|
+
unless db.bootstrapped?
|
21
|
+
raise DatabaseError, "Database not initialized -- please run 'dbox create' or 'dbox clone'."
|
22
|
+
end
|
23
|
+
db
|
24
|
+
end
|
25
|
+
|
26
|
+
def self.exists?(local_path)
|
27
|
+
File.exists?(File.join(local_path, DB_FILENAME))
|
28
|
+
end
|
29
|
+
|
30
|
+
def self.migrate_from_old_db_format(old_db)
|
31
|
+
new_db = create(old_db.remote_path, old_db.local_path)
|
32
|
+
new_db.delete_entry_by_path("") # clear out root record
|
33
|
+
new_db.migrate_entry_from_old_db_format(old_db.root)
|
34
|
+
end
|
35
|
+
|
36
|
+
# IMPORTANT: Database.new is private. Please use Database.create
|
37
|
+
# or Database.load as the entry point.
|
38
|
+
private_class_method :new
|
39
|
+
def initialize(local_path)
|
40
|
+
FileUtils.mkdir_p(local_path)
|
41
|
+
@db = SQLite3::Database.new(File.join(local_path, DB_FILENAME))
|
42
|
+
@db.trace {|sql| log.debug sql.strip }
|
43
|
+
@db.execute("PRAGMA foreign_keys = ON;")
|
44
|
+
ensure_schema_exists
|
45
|
+
end
|
46
|
+
|
47
|
+
def ensure_schema_exists
|
48
|
+
@db.execute_batch(%{
|
49
|
+
CREATE TABLE IF NOT EXISTS metadata (
|
50
|
+
id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
51
|
+
local_path varchar(255) NOT NULL,
|
52
|
+
remote_path varchar(255) NOT NULL,
|
53
|
+
version integer NOT NULL
|
54
|
+
);
|
55
|
+
CREATE TABLE IF NOT EXISTS entries (
|
56
|
+
id integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
57
|
+
path varchar(255) UNIQUE NOT NULL,
|
58
|
+
is_dir boolean NOT NULL,
|
59
|
+
parent_id integer REFERENCES entries(id) ON DELETE CASCADE,
|
60
|
+
hash varchar(255),
|
61
|
+
modified datetime,
|
62
|
+
revision integer
|
63
|
+
);
|
64
|
+
CREATE INDEX IF NOT EXISTS entry_parent_ids ON entries(parent_id);
|
65
|
+
})
|
66
|
+
end
|
67
|
+
|
68
|
+
METADATA_COLS = [ :local_path, :remote_path, :version ] # don't need to return id
|
69
|
+
ENTRY_COLS = [ :id, :path, :is_dir, :parent_id, :hash, :modified, :revision ]
|
70
|
+
|
71
|
+
def bootstrap(remote_path, local_path)
|
72
|
+
@db.execute(%{
|
73
|
+
INSERT INTO metadata (local_path, remote_path, version) VALUES (?, ?, ?);
|
74
|
+
}, local_path, remote_path, 1)
|
75
|
+
@db.execute(%{
|
76
|
+
INSERT INTO entries (path, is_dir) VALUES (?, ?)
|
77
|
+
}, "", 1)
|
78
|
+
end
|
79
|
+
|
80
|
+
def bootstrapped?
|
81
|
+
n = @db.get_first_value(%{
|
82
|
+
SELECT count(id) FROM metadata LIMIT 1;
|
83
|
+
})
|
84
|
+
n && n > 0
|
85
|
+
end
|
86
|
+
|
87
|
+
def metadata
|
88
|
+
cols = METADATA_COLS
|
89
|
+
res = @db.get_first_row(%{
|
90
|
+
SELECT #{cols.join(',')} FROM metadata LIMIT 1;
|
91
|
+
})
|
92
|
+
make_fields(cols, res) if res
|
93
|
+
end
|
94
|
+
|
95
|
+
def update_metadata(fields)
|
96
|
+
set_str = fields.keys.map {|k| "#{k}=?" }.join(",")
|
97
|
+
@db.execute(%{
|
98
|
+
UPDATE metadata SET #{set_str};
|
99
|
+
}, *fields.values)
|
100
|
+
end
|
101
|
+
|
102
|
+
def root_dir
|
103
|
+
find_entry("WHERE parent_id is NULL")
|
104
|
+
end
|
105
|
+
|
106
|
+
def find_by_path(path)
|
107
|
+
raise(ArgumentError, "path cannot be null") unless path
|
108
|
+
find_entry("WHERE path=?", path)
|
109
|
+
end
|
110
|
+
|
111
|
+
def contents(dir_id)
|
112
|
+
raise(ArgumentError, "dir_id cannot be null") unless dir_id
|
113
|
+
find_entries("WHERE parent_id=?", dir_id)
|
114
|
+
end
|
115
|
+
|
116
|
+
def subdirs(dir_id)
|
117
|
+
raise(ArgumentError, "dir_id cannot be null") unless dir_id
|
118
|
+
find_entries("WHERE parent_id=? AND is_dir=1", dir_id)
|
119
|
+
end
|
120
|
+
|
121
|
+
def add_entry(path, is_dir, parent_id, modified, revision, hash)
|
122
|
+
insert_entry(:path => path, :is_dir => is_dir, :parent_id => parent_id, :modified => modified, :revision => revision, :hash => hash)
|
123
|
+
end
|
124
|
+
|
125
|
+
def update_entry_by_path(path, fields)
|
126
|
+
raise(ArgumentError, "path cannot be null") unless path
|
127
|
+
update_entry(["WHERE path=?", path], fields)
|
128
|
+
end
|
129
|
+
|
130
|
+
def delete_entry_by_path(path)
|
131
|
+
raise(ArgumentError, "path cannot be null") unless path
|
132
|
+
delete_entry("WHERE path=?", path)
|
133
|
+
end
|
134
|
+
|
135
|
+
def migrate_entry_from_old_db_format(entry, parent = nil)
|
136
|
+
# insert entry into sqlite db
|
137
|
+
add_entry(entry.path, entry.dir?, (parent ? parent[:id] : nil), entry.modified_at, entry.revision, nil)
|
138
|
+
|
139
|
+
# recur on children
|
140
|
+
if entry.dir?
|
141
|
+
new_parent = find_by_path(entry.path)
|
142
|
+
entry.contents.each {|child_path, child| migrate_entry_from_old_db_format(child, new_parent) }
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
private
|
147
|
+
|
148
|
+
def find_entry(conditions = "", *args)
|
149
|
+
res = @db.get_first_row(%{
|
150
|
+
SELECT #{ENTRY_COLS.join(",")} FROM entries #{conditions} LIMIT 1;
|
151
|
+
}, *args)
|
152
|
+
entry_res_to_fields(res)
|
153
|
+
end
|
154
|
+
|
155
|
+
def find_entries(conditions = "", *args)
|
156
|
+
out = []
|
157
|
+
@db.execute(%{
|
158
|
+
SELECT #{ENTRY_COLS.join(",")} FROM entries #{conditions} ORDER BY path ASC;
|
159
|
+
}, *args) do |res|
|
160
|
+
out << entry_res_to_fields(res)
|
161
|
+
end
|
162
|
+
out
|
163
|
+
end
|
164
|
+
|
165
|
+
def insert_entry(fields)
|
166
|
+
log.debug "Inserting entry: #{fields.inspect}"
|
167
|
+
h = fields.clone
|
168
|
+
h[:modified] = h[:modified].to_i if h[:modified]
|
169
|
+
h[:is_dir] = (h[:is_dir] ? 1 : 0) unless h[:is_dir].nil?
|
170
|
+
@db.execute(%{
|
171
|
+
INSERT INTO entries (#{h.keys.join(",")})
|
172
|
+
VALUES (#{(["?"] * h.size).join(",")});
|
173
|
+
}, *h.values)
|
174
|
+
end
|
175
|
+
|
176
|
+
def update_entry(where_clause, fields)
|
177
|
+
log.debug "Updating entry: #{where_clause}, #{fields.inspect}"
|
178
|
+
h = fields.clone
|
179
|
+
h[:modified] = h[:modified].to_i if h[:modified]
|
180
|
+
conditions, *args = *where_clause
|
181
|
+
set_str = h.keys.map {|k| "#{k}=?" }.join(",")
|
182
|
+
@db.execute(%{
|
183
|
+
UPDATE entries SET #{set_str} #{conditions};
|
184
|
+
}, *(h.values + args))
|
185
|
+
end
|
186
|
+
|
187
|
+
def delete_entry(conditions = "", *args)
|
188
|
+
@db.execute(%{
|
189
|
+
DELETE FROM entries #{conditions};
|
190
|
+
}, *args)
|
191
|
+
end
|
192
|
+
|
193
|
+
def entry_res_to_fields(res)
|
194
|
+
if res
|
195
|
+
h = make_fields(ENTRY_COLS, res)
|
196
|
+
h[:is_dir] = (h[:is_dir] == 1)
|
197
|
+
h[:modified] = Time.at(h[:modified]) if h[:modified]
|
198
|
+
h.delete(:hash) unless h[:is_dir]
|
199
|
+
h
|
200
|
+
else
|
201
|
+
nil
|
202
|
+
end
|
203
|
+
end
|
204
|
+
|
205
|
+
def make_fields(keys, vals)
|
206
|
+
if keys && vals
|
207
|
+
raise ArgumentError.new("Can't make a fields hash with #{keys.size} keys and #{vals.size} vals") unless keys.size == vals.size
|
208
|
+
out = {}
|
209
|
+
keys.each_with_index {|k, i| out[k] = vals[i] }
|
210
|
+
out
|
211
|
+
else
|
212
|
+
nil
|
213
|
+
end
|
214
|
+
end
|
215
|
+
end
|
216
|
+
end
|