rhosync 2.1.10 → 2.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG +9 -0
- data/Rakefile +4 -4
- data/lib/rhosync/api/push_deletes.rb +4 -1
- data/lib/rhosync/api/push_objects.rb +4 -1
- data/lib/rhosync/client.rb +2 -0
- data/lib/rhosync/client_sync.rb +10 -2
- data/lib/rhosync/document.rb +14 -0
- data/lib/rhosync/model.rb +2 -2
- data/lib/rhosync/read_state.rb +2 -0
- data/lib/rhosync/server.rb +1 -1
- data/lib/rhosync/source.rb +34 -7
- data/lib/rhosync/source_adapter.rb +5 -9
- data/lib/rhosync/source_sync.rb +56 -24
- data/lib/rhosync/store.rb +60 -6
- data/lib/rhosync/version.rb +1 -1
- data/spec/api/get_client_params_spec.rb +1 -1
- data/spec/api/get_source_params_spec.rb +2 -1
- data/spec/api/rhosync_api_spec.rb +3 -2
- data/spec/client_sync_spec.rb +18 -0
- data/spec/model_spec.rb +2 -2
- data/spec/server/server_spec.rb +14 -0
- data/spec/source_sync_spec.rb +73 -1
- data/spec/store_spec.rb +35 -0
- data/tasks/redis.rake +2 -2
- metadata +34 -33
data/CHANGELOG
CHANGED
@@ -1,3 +1,12 @@
|
|
1
|
+
## 2.1.11 (not released)
|
2
|
+
* #17526603 - implement clientreset support for specified sources
|
3
|
+
* #18356697 - store lock is never released (support request #1466)
|
4
|
+
* use redis 2.2.12 by default
|
5
|
+
* #18672811 - edge case produces race condition which leads to corruption of Store data
|
6
|
+
* #18508155 - on failed syncs allow the user to retry it up to pre-defined number of times
|
7
|
+
* #18888077 - implement Redis transactions optimization for push_objects and push_deletes
|
8
|
+
* #19254217 - hard dependency on sinatra 1.2.7 (to be fixed in 2.1.12)
|
9
|
+
|
1
10
|
## 2.1.10
|
2
11
|
* #16001227 - raise exceptions on c2dm errors
|
3
12
|
* #1018 - delete read state for user as well
|
data/Rakefile
CHANGED
@@ -1,4 +1,6 @@
|
|
1
1
|
require 'yaml'
|
2
|
+
load 'tasks/redis.rake'
|
3
|
+
|
2
4
|
$:.unshift File.join(File.dirname(__FILE__),'lib')
|
3
5
|
require 'rhosync'
|
4
6
|
|
@@ -64,6 +66,7 @@ begin
|
|
64
66
|
gemspec.files = FileList["[A-Z]*", "{bench,bin,generators,lib,spec,tasks}/**/*"]
|
65
67
|
|
66
68
|
# TODO: Due to https://www.pivotaltracker.com/story/show/3417862, we can't use JSON 1.4.3
|
69
|
+
gemspec.add_dependency "sinatra", "= 1.2.7"
|
67
70
|
gemspec.add_dependency "json", "~>1.4.2"
|
68
71
|
gemspec.add_dependency "sqlite3-ruby", "~>1.2.5"
|
69
72
|
gemspec.add_dependency "rubyzip", "~>0.9.4"
|
@@ -71,7 +74,6 @@ begin
|
|
71
74
|
gemspec.add_dependency "redis", "~>2.1.1"
|
72
75
|
gemspec.add_dependency "resque", "~>1.14.0"
|
73
76
|
gemspec.add_dependency "rest-client", "~>1.6.1"
|
74
|
-
gemspec.add_dependency "sinatra", "~>1.2"
|
75
77
|
gemspec.add_dependency "templater", "~>1.0.0"
|
76
78
|
gemspec.add_dependency "rake", "~>0.9.2"
|
77
79
|
gemspec.add_development_dependency "log4r", "~>1.1.7"
|
@@ -108,6 +110,4 @@ end
|
|
108
110
|
def ask(msg)
|
109
111
|
print msg
|
110
112
|
STDIN.gets.chomp
|
111
|
-
end
|
112
|
-
|
113
|
-
load 'tasks/redis.rake'
|
113
|
+
end
|
@@ -1,6 +1,9 @@
|
|
1
1
|
Server.api :push_deletes do |params,user|
|
2
2
|
source = Source.load(params[:source_id],{:app_id=>APP_NAME,:user_id=>params[:user_id]})
|
3
3
|
source_sync = SourceSync.new(source)
|
4
|
-
|
4
|
+
timeout = params[:timeout] || 10
|
5
|
+
raise_on_expire = params[:raise_on_expire] || false
|
6
|
+
rebuild_md = params[:rebuild_md].nil? ? true : params[:rebuild_md]
|
7
|
+
source_sync.push_deletes(params[:objects],timeout,raise_on_expire,rebuild_md)
|
5
8
|
'done'
|
6
9
|
end
|
@@ -1,6 +1,9 @@
|
|
1
1
|
Server.api :push_objects do |params,user|
|
2
2
|
source = Source.load(params[:source_id],{:app_id=>APP_NAME,:user_id=>params[:user_id]})
|
3
3
|
source_sync = SourceSync.new(source)
|
4
|
-
|
4
|
+
timeout = params[:timeout] || 10
|
5
|
+
raise_on_expire = params[:raise_on_expire] || false
|
6
|
+
rebuild_md = params[:rebuild_md].nil? ? true : params[:rebuild_md]
|
7
|
+
source_sync.push_objects(params[:objects],timeout,raise_on_expire,rebuild_md)
|
5
8
|
'done'
|
6
9
|
end
|
data/lib/rhosync/client.rb
CHANGED
@@ -8,6 +8,7 @@ module Rhosync
|
|
8
8
|
field :phone_id,:string
|
9
9
|
field :user_id,:string
|
10
10
|
field :app_id,:string
|
11
|
+
field :last_sync,:datetime
|
11
12
|
attr_accessor :source_name
|
12
13
|
validates_presence_of :app_id, :user_id
|
13
14
|
|
@@ -29,6 +30,7 @@ module Rhosync
|
|
29
30
|
end
|
30
31
|
|
31
32
|
def self.load(id,params)
|
33
|
+
params.merge!(:last_sync => Time.now)
|
32
34
|
validate_attributes(params)
|
33
35
|
super(id,params)
|
34
36
|
end
|
data/lib/rhosync/client_sync.rb
CHANGED
@@ -204,8 +204,16 @@ module Rhosync
|
|
204
204
|
|
205
205
|
class << self
|
206
206
|
# Resets the store for a given app,client
|
207
|
-
|
208
|
-
|
207
|
+
# Resets the store for a given app,client
|
208
|
+
def reset(client, params=nil)
|
209
|
+
return unless client
|
210
|
+
if params == nil or params[:sources] == nil
|
211
|
+
client.flash_data('*')
|
212
|
+
else
|
213
|
+
params[:sources].each do |source|
|
214
|
+
client.flash_source_data('*', source['name'])
|
215
|
+
end
|
216
|
+
end
|
209
217
|
end
|
210
218
|
|
211
219
|
def search_all(client,params=nil)
|
data/lib/rhosync/document.rb
CHANGED
@@ -21,10 +21,24 @@ module Document
|
|
21
21
|
Store.delete_data(docname(doctype),data)
|
22
22
|
end
|
23
23
|
|
24
|
+
def update_objects(doctype,updates)
|
25
|
+
Store.update_objects(docname(doctype),updates)
|
26
|
+
end
|
27
|
+
|
28
|
+
def remove_objects(doctype,deletes)
|
29
|
+
Store.delete_objects(docname(doctype),deletes)
|
30
|
+
end
|
31
|
+
|
24
32
|
def flash_data(doctype)
|
25
33
|
Store.flash_data(docname(doctype))
|
26
34
|
end
|
27
35
|
|
36
|
+
def flash_source_data(doctype, from_source)
|
37
|
+
self.source_name=from_source
|
38
|
+
docnamestr = docname('') + doctype
|
39
|
+
Store.flash_data(docnamestr)
|
40
|
+
end
|
41
|
+
|
28
42
|
def rename(srcdoctype,dstdoctype)
|
29
43
|
Store.rename(docname(srcdoctype),docname(dstdoctype))
|
30
44
|
end
|
data/lib/rhosync/model.rb
CHANGED
@@ -33,14 +33,14 @@ module Rhosync
|
|
33
33
|
# specified amount.
|
34
34
|
def increment!(name,amount=1)
|
35
35
|
raise ArgumentError, "Only integer fields can be incremented." unless self.class.fields.include?({:name => name.to_s, :type => :integer})
|
36
|
-
redis.
|
36
|
+
redis.incrby(field_key(name), amount)
|
37
37
|
end
|
38
38
|
|
39
39
|
# Decrement the specified integer field by 1 or the
|
40
40
|
# specified amount.
|
41
41
|
def decrement!(name,amount=1)
|
42
42
|
raise ArgumentError, "Only integer fields can be decremented." unless self.class.fields.include?({:name => name.to_s, :type => :integer})
|
43
|
-
redis.
|
43
|
+
redis.decrby(field_key(name), amount)
|
44
44
|
end
|
45
45
|
|
46
46
|
def next_id #:nodoc:
|
data/lib/rhosync/read_state.rb
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
module Rhosync
|
2
2
|
class ReadState < Model
|
3
3
|
field :refresh_time, :integer
|
4
|
+
field :retry_counter, :integer
|
4
5
|
|
5
6
|
def self.create(fields)
|
6
7
|
fields[:id] = get_id(fields)
|
@@ -8,6 +9,7 @@ module Rhosync
|
|
8
9
|
fields.delete(:user_id)
|
9
10
|
fields.delete(:source_name)
|
10
11
|
fields[:refresh_time] ||= Time.now.to_i
|
12
|
+
fields[:retry_counter] ||= 0
|
11
13
|
super(fields,{})
|
12
14
|
end
|
13
15
|
|
data/lib/rhosync/server.rb
CHANGED
data/lib/rhosync/source.rb
CHANGED
@@ -90,7 +90,7 @@ module Rhosync
|
|
90
90
|
|
91
91
|
# source fields
|
92
92
|
define_fields([:id, :rho__id, :name, :url, :login, :password, :callback_url, :partition_type, :sync_type,
|
93
|
-
:queue, :query_queue, :cud_queue, :belongs_to, :has_many], [:source_id, :priority])
|
93
|
+
:queue, :query_queue, :cud_queue, :belongs_to, :has_many], [:source_id, :priority, :retry_limit])
|
94
94
|
|
95
95
|
def initialize(fields)
|
96
96
|
self.name = fields['name'] || fields[:name]
|
@@ -112,6 +112,7 @@ module Rhosync
|
|
112
112
|
fields[:rho__id] = fields[:name]
|
113
113
|
fields[:belongs_to] = fields[:belongs_to].to_json if fields[:belongs_to]
|
114
114
|
fields[:schema] = fields[:schema].to_json if fields[:schema]
|
115
|
+
fields[:retry_limit] = fields[:retry_limit] ? fields[:retry_limit] : 0
|
115
116
|
end
|
116
117
|
|
117
118
|
def self.create(fields,params)
|
@@ -262,15 +263,41 @@ module Rhosync
|
|
262
263
|
self.poll_interval == 0 or
|
263
264
|
(self.poll_interval != -1 and self.read_state.refresh_time <= Time.now.to_i)
|
264
265
|
end
|
265
|
-
|
266
|
+
|
266
267
|
def if_need_refresh(client_id=nil,params=nil)
|
267
|
-
need_refresh =
|
268
|
-
check = check_refresh_time
|
269
|
-
s.read_state.refresh_time = Time.now.to_i + s.poll_interval if check
|
270
|
-
check
|
271
|
-
end
|
268
|
+
need_refresh = check_refresh_time
|
272
269
|
yield client_id,params if need_refresh
|
273
270
|
end
|
271
|
+
|
272
|
+
def update_refresh_time(query_failure = false)
|
273
|
+
if self.poll_interval == 0
|
274
|
+
self.read_state.refresh_time = Time.now.to_i + self.poll_interval
|
275
|
+
return
|
276
|
+
end
|
277
|
+
|
278
|
+
allowed_update = true
|
279
|
+
# reset number of retries on succesfull query
|
280
|
+
# or if last refresh was more than 'poll_interval' time ago
|
281
|
+
if not query_failure or (Time.now.to_i - self.read_state.refresh_time >= self.poll_interval)
|
282
|
+
self.read_state.retry_counter = 0
|
283
|
+
end
|
284
|
+
|
285
|
+
# do not reset the refresh time on failure
|
286
|
+
# if retry limit is not reached
|
287
|
+
if query_failure
|
288
|
+
if self.read_state.retry_counter < self.retry_limit
|
289
|
+
allowed_update = false
|
290
|
+
self.read_state.increment!(:retry_counter)
|
291
|
+
# we have reached the limit - update the refresh time
|
292
|
+
# and reset the counter
|
293
|
+
else
|
294
|
+
self.read_state.retry_counter = 0
|
295
|
+
end
|
296
|
+
end
|
297
|
+
if allowed_update
|
298
|
+
self.read_state.refresh_time = Time.now.to_i + self.poll_interval
|
299
|
+
end
|
300
|
+
end
|
274
301
|
|
275
302
|
private
|
276
303
|
def poll_interval_key
|
@@ -41,20 +41,16 @@ module Rhosync
|
|
41
41
|
|
42
42
|
def sync
|
43
43
|
if @result and @result.empty?
|
44
|
-
@source.
|
45
|
-
|
46
|
-
s.put_value(:md_size,0)
|
47
|
-
end
|
44
|
+
@source.flash_data(:md)
|
45
|
+
@source.put_value(:md_size,0)
|
48
46
|
else
|
49
47
|
if @result
|
50
48
|
Store.put_data(@tmp_docname,@result)
|
51
49
|
@stash_size += @result.size
|
52
50
|
end
|
53
|
-
@source.
|
54
|
-
|
55
|
-
|
56
|
-
s.put_value(:md_size,@stash_size)
|
57
|
-
end
|
51
|
+
@source.flash_data(:md)
|
52
|
+
Store.rename(@tmp_docname,@source.docname(:md))
|
53
|
+
@source.put_value(:md_size,@stash_size)
|
58
54
|
end
|
59
55
|
end
|
60
56
|
|
data/lib/rhosync/source_sync.rb
CHANGED
@@ -59,12 +59,18 @@ module Rhosync
|
|
59
59
|
end
|
60
60
|
|
61
61
|
def do_query(params=nil)
|
62
|
-
@source.
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
62
|
+
@source.lock(:md) do
|
63
|
+
@source.if_need_refresh do
|
64
|
+
Stats::Record.update("source:query:#{@source.name}") do
|
65
|
+
if _auth_op('login')
|
66
|
+
result = self.read(nil,params)
|
67
|
+
_auth_op('logoff')
|
68
|
+
end
|
69
|
+
# update refresh time
|
70
|
+
query_failure = Store.get_keys(@source.docname(:errors)).size > 0
|
71
|
+
@source.update_refresh_time(query_failure)
|
72
|
+
end
|
73
|
+
end
|
68
74
|
end
|
69
75
|
end
|
70
76
|
|
@@ -75,31 +81,57 @@ module Rhosync
|
|
75
81
|
@source.app_id,@source.user_id,client_id,params)
|
76
82
|
end
|
77
83
|
|
78
|
-
def push_objects(objects,timeout=10,raise_on_expire=false)
|
84
|
+
def push_objects(objects,timeout=10,raise_on_expire=false,rebuild_md=true)
|
79
85
|
@source.lock(:md,timeout,raise_on_expire) do |s|
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
86
|
+
diff_count = 0
|
87
|
+
# in case of rebuild_md
|
88
|
+
# we clean-up and rebuild the whole :md doc
|
89
|
+
# on every request
|
90
|
+
if(rebuild_md)
|
91
|
+
doc = @source.get_data(:md)
|
92
|
+
orig_doc_size = doc.size
|
93
|
+
objects.each do |id,obj|
|
94
|
+
doc[id] ||= {}
|
95
|
+
doc[id].merge!(obj)
|
96
|
+
end
|
97
|
+
diff_count = doc.size - orig_doc_size
|
98
|
+
@source.put_data(:md,doc)
|
99
|
+
else
|
100
|
+
# if rebuild_md == false
|
101
|
+
# we only operate on specific set values
|
102
|
+
# which brings a big optimization
|
103
|
+
# in case of small transactions
|
104
|
+
diff_count = @source.update_objects(:md, objects)
|
105
|
+
end
|
106
|
+
|
88
107
|
@source.update_count(:md_size,diff_count)
|
89
108
|
end
|
90
109
|
end
|
91
110
|
|
92
|
-
def push_deletes(objects,timeout=10,raise_on_expire=false)
|
111
|
+
def push_deletes(objects,timeout=10,raise_on_expire=false,rebuild_md=true)
|
93
112
|
@source.lock(:md,timeout,raise_on_expire) do |s|
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
doc
|
98
|
-
|
99
|
-
|
100
|
-
|
113
|
+
diff_count = 0
|
114
|
+
if(rebuild_md)
|
115
|
+
# in case of rebuild_md
|
116
|
+
# we clean-up and rebuild the whole :md doc
|
117
|
+
# on every request
|
118
|
+
doc = @source.get_data(:md)
|
119
|
+
orig_doc_size = doc.size
|
120
|
+
objects.each do |id|
|
121
|
+
doc.delete(id)
|
122
|
+
end
|
123
|
+
diff_count = doc.size - orig_doc_size
|
124
|
+
@source.put_data(:md,doc)
|
125
|
+
else
|
126
|
+
# if rebuild_md == false
|
127
|
+
# we only operate on specific set values
|
128
|
+
# which brings a big optimization
|
129
|
+
# in case of small transactions
|
130
|
+
diff_count = -@source.remove_objects(:md, objects)
|
131
|
+
end
|
132
|
+
|
101
133
|
@source.update_count(:md_size,diff_count)
|
102
|
-
end
|
134
|
+
end
|
103
135
|
end
|
104
136
|
|
105
137
|
private
|
data/lib/rhosync/store.rb
CHANGED
@@ -46,12 +46,66 @@ module Rhosync
|
|
46
46
|
end
|
47
47
|
true
|
48
48
|
end
|
49
|
-
|
49
|
+
|
50
|
+
# updates objects for a given doctype, source, user
|
51
|
+
# create new objects if necessary
|
52
|
+
def update_objects(dockey, data={})
|
53
|
+
return 0 unless dockey and data
|
54
|
+
|
55
|
+
new_object_count = 0
|
56
|
+
doc = get_data(dockey)
|
57
|
+
@@db.pipelined do
|
58
|
+
data.each do |key,value|
|
59
|
+
is_create = doc[key].nil?
|
60
|
+
new_object_count += 1 if is_create
|
61
|
+
value.each do |attrib,value|
|
62
|
+
next if _is_reserved?(attrib, value)
|
63
|
+
|
64
|
+
new_element = setelement(key,attrib,value)
|
65
|
+
element_exists = is_create ? false : doc[key].has_key?(attrib)
|
66
|
+
if element_exists
|
67
|
+
existing_element = setelement(key,attrib,doc[key][attrib])
|
68
|
+
if existing_element != new_element
|
69
|
+
@@db.srem(dockey, existing_element)
|
70
|
+
@@db.sadd(dockey, new_element)
|
71
|
+
end
|
72
|
+
else
|
73
|
+
@@db.sadd(dockey, new_element)
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
new_object_count
|
79
|
+
end
|
80
|
+
|
81
|
+
# Removes objects from a given doctype,source,user
|
82
|
+
def delete_objects(dockey,data=[])
|
83
|
+
return 0 unless dockey and data
|
84
|
+
|
85
|
+
deleted_object_count = 0
|
86
|
+
doc = get_data(dockey)
|
87
|
+
@@db.pipelined do
|
88
|
+
data.each do |id|
|
89
|
+
if doc[id]
|
90
|
+
doc[id].each do |name,value|
|
91
|
+
@@db.srem(dockey, setelement(id,name,value))
|
92
|
+
end
|
93
|
+
deleted_object_count += 1
|
94
|
+
end
|
95
|
+
doc.delete(id)
|
96
|
+
end
|
97
|
+
end
|
98
|
+
deleted_object_count
|
99
|
+
end
|
100
|
+
|
50
101
|
# Adds a simple key/value pair
|
51
102
|
def put_value(dockey,value)
|
52
103
|
if dockey
|
53
|
-
|
54
|
-
|
104
|
+
if value
|
105
|
+
@@db.set(dockey,value.to_s)
|
106
|
+
else
|
107
|
+
@@db.del(dockey)
|
108
|
+
end
|
55
109
|
end
|
56
110
|
end
|
57
111
|
|
@@ -145,7 +199,7 @@ module Rhosync
|
|
145
199
|
def lock(dockey,timeout=0,raise_on_expire=false)
|
146
200
|
m_lock = get_lock(dockey,timeout,raise_on_expire)
|
147
201
|
res = yield
|
148
|
-
release_lock(dockey,m_lock)
|
202
|
+
release_lock(dockey,m_lock,raise_on_expire)
|
149
203
|
res
|
150
204
|
end
|
151
205
|
|
@@ -187,8 +241,8 @@ module Rhosync
|
|
187
241
|
# Time.now.to_i+timeout+1
|
188
242
|
# end
|
189
243
|
|
190
|
-
def release_lock(dockey,lock)
|
191
|
-
@@db.del(_lock_key(dockey)) if (lock >= Time.now.to_i)
|
244
|
+
def release_lock(dockey,lock,raise_on_expire=false)
|
245
|
+
@@db.del(_lock_key(dockey)) if raise_on_expire or Rhosync.raise_on_expired_lock or (lock >= Time.now.to_i)
|
192
246
|
end
|
193
247
|
|
194
248
|
# Create a copy of srckey in dstkey
|
data/lib/rhosync/version.rb
CHANGED
@@ -6,7 +6,7 @@ describe "RhosyncApiGetClientParams" do
|
|
6
6
|
it "should list client attributes" do
|
7
7
|
post "/api/get_client_params", {:api_token => @api_token, :client_id =>@c.id}
|
8
8
|
res = JSON.parse(last_response.body)
|
9
|
-
res.delete_if { |attrib| attrib['name'] == 'rho__id' }
|
9
|
+
res.delete_if { |attrib| attrib['name'] == 'rho__id' || attrib['name'] == 'last_sync'}
|
10
10
|
res.sort{|x,y| x['name']<=>y['name']}.should == [
|
11
11
|
{"name"=>"device_type", "value"=>"Apple", "type"=>"string"},
|
12
12
|
{"name"=>"device_pin", "value"=>"abcd", "type"=>"string"},
|
@@ -15,7 +15,8 @@ describe "RhosyncApiGetSourceParams" do
|
|
15
15
|
{"name"=>"password", "value"=>"testpass", "type"=>"string"},
|
16
16
|
{"name"=>"priority", "value"=>3, "type"=>"integer"},
|
17
17
|
{"name"=>"callback_url", "value"=>nil, "type"=>"string"},
|
18
|
-
{"name"=>"poll_interval", "value"=>300, "type"=>"integer"},
|
18
|
+
{"name"=>"poll_interval", "value"=>300, "type"=>"integer"},
|
19
|
+
{"name"=>"retry_limit", "value"=>0, "type"=>"integer"},
|
19
20
|
{"name"=>"partition_type", "value"=>"user", "type"=>"string"},
|
20
21
|
{"name"=>"sync_type", "value"=>"incremental", "type"=>"string"},
|
21
22
|
{"name"=>"belongs_to", "type"=>"string", "value"=>nil},
|
@@ -121,7 +121,7 @@ describe "RhosyncApi" do
|
|
121
121
|
|
122
122
|
it "should list client attributes using direct api call" do
|
123
123
|
res = RhosyncApi::get_client_params('',@api_token,@c.id)
|
124
|
-
res.delete_if { |attrib| attrib['name'] == 'rho__id' }
|
124
|
+
res.delete_if { |attrib| attrib['name'] == 'rho__id' || attrib['name'] == 'last_sync'}
|
125
125
|
res.sort{|x,y| x['name']<=>y['name']}.should == [
|
126
126
|
{"name"=>"device_type", "value"=>"Apple", "type"=>"string"},
|
127
127
|
{"name"=>"device_pin", "value"=>"abcd", "type"=>"string"},
|
@@ -164,7 +164,8 @@ describe "RhosyncApi" do
|
|
164
164
|
{"name"=>"password", "value"=>"testpass", "type"=>"string"},
|
165
165
|
{"name"=>"priority", "value"=>3, "type"=>"integer"},
|
166
166
|
{"name"=>"callback_url", "value"=>nil, "type"=>"string"},
|
167
|
-
{"name"=>"poll_interval", "value"=>300, "type"=>"integer"},
|
167
|
+
{"name"=>"poll_interval", "value"=>300, "type"=>"integer"},
|
168
|
+
{"name"=>"retry_limit", "type"=>"integer", "value"=>0},
|
168
169
|
{"name"=>"partition_type", "value"=>"user", "type"=>"string"},
|
169
170
|
{"name"=>"sync_type", "value"=>"incremental", "type"=>"string"},
|
170
171
|
{"name"=>"belongs_to", "type"=>"string", "value"=>nil},
|
data/spec/client_sync_spec.rb
CHANGED
@@ -150,6 +150,24 @@ describe "ClientSync" do
|
|
150
150
|
verify_result(@c.docname(:cd) => {})
|
151
151
|
Client.load(@c.id,{:source_name => @s.name}).should_not be_nil
|
152
152
|
end
|
153
|
+
|
154
|
+
it "should handle reset on individual source adapters" do
|
155
|
+
@c.source_name = 'SampleAdapter'
|
156
|
+
set_state(@c.docname(:cd) => @data)
|
157
|
+
verify_result(@c.docname(:cd) => @data)
|
158
|
+
|
159
|
+
@c.source_name = 'SimpleAdapter'
|
160
|
+
set_state(@c.docname(:cd) => @data)
|
161
|
+
verify_result(@c.docname(:cd) => @data)
|
162
|
+
|
163
|
+
sources = [{'name'=>'SimpleAdapter'}]
|
164
|
+
ClientSync.reset(@c, {:sources => sources})
|
165
|
+
|
166
|
+
@c.source_name = 'SampleAdapter'
|
167
|
+
verify_result(@c.docname(:cd) => @data)
|
168
|
+
@c.source_name = 'SimpleAdapter'
|
169
|
+
verify_result(@c.docname(:cd) => {})
|
170
|
+
end
|
153
171
|
end
|
154
172
|
|
155
173
|
describe "search" do
|
data/spec/model_spec.rb
CHANGED
@@ -207,12 +207,12 @@ describe Rhosync::Model do
|
|
207
207
|
end
|
208
208
|
|
209
209
|
it "should send INCR when #increment! is called on an integer" do
|
210
|
-
@redisMock.should_receive(:
|
210
|
+
@redisMock.should_receive(:incrby).with("test_increments:1:foo", 1)
|
211
211
|
@x.increment!(:foo)
|
212
212
|
end
|
213
213
|
|
214
214
|
it "should send DECR when #decrement! is called on an integer" do
|
215
|
-
@redisMock.should_receive(:
|
215
|
+
@redisMock.should_receive(:decrby).with("test_increments:1:foo", 1)
|
216
216
|
@x.decrement!(:foo)
|
217
217
|
end
|
218
218
|
|
data/spec/server/server_spec.rb
CHANGED
@@ -177,6 +177,20 @@ describe "Server" do
|
|
177
177
|
verify_result(@c.docname(:cd) => {})
|
178
178
|
end
|
179
179
|
|
180
|
+
it "should respond to clientreset with individual adapters" do
|
181
|
+
@c.source_name = 'SimpleAdapter'
|
182
|
+
set_state(@c.docname(:cd) => @data)
|
183
|
+
@c.source_name = 'SampleAdapter'
|
184
|
+
set_state(@c.docname(:cd) => @data)
|
185
|
+
sources = [{'name' => 'SimpleAdapter'}]
|
186
|
+
get "/application/clientreset", :client_id => @c.id,:version => ClientSync::VERSION, :sources => sources
|
187
|
+
JSON.parse(last_response.body).should == @source_config
|
188
|
+
@c.source_name = 'SampleAdapter'
|
189
|
+
verify_result(@c.docname(:cd) => @data)
|
190
|
+
@c.source_name = 'SimpleAdapter'
|
191
|
+
verify_result(@c.docname(:cd) => {})
|
192
|
+
end
|
193
|
+
|
180
194
|
it "should switch client user if client user_id doesn't match session user" do
|
181
195
|
set_test_data('test_db_storage',@data)
|
182
196
|
get "/application",:client_id => @c.id,:source_name => @s.name,:version => ClientSync::VERSION
|
data/spec/source_sync_spec.rb
CHANGED
@@ -208,6 +208,77 @@ describe "SourceSync" do
|
|
208
208
|
it "should do search with exception raised" do
|
209
209
|
verify_read_operation_with_error('search')
|
210
210
|
end
|
211
|
+
|
212
|
+
it "should do query with exception raised and update refresh time only after retries limit is exceeded" do
|
213
|
+
@s.retry_limit = 1
|
214
|
+
msg = "Error during query"
|
215
|
+
set_test_data('test_db_storage',{},msg,"query error")
|
216
|
+
res = @ss.do_query
|
217
|
+
verify_result(@s.docname(:md) => {},
|
218
|
+
@s.docname(:errors) => {'query-error'=>{'message'=>msg}})
|
219
|
+
# 1) if retry_limit is set to N - then, first N retries should not update refresh_time
|
220
|
+
@s.read_state.retry_counter.should == 1
|
221
|
+
@s.read_state.refresh_time.should <= Time.now.to_i
|
222
|
+
|
223
|
+
# try once more and fail again
|
224
|
+
set_test_data('test_db_storage',{},msg,"query error")
|
225
|
+
res = @ss.do_query
|
226
|
+
verify_result(@s.docname(:md) => {},
|
227
|
+
@s.docname(:errors) => {'query-error'=>{'message'=>msg}})
|
228
|
+
|
229
|
+
# 2) if retry_limit is set to N and number of retries exceeded it - update refresh_time
|
230
|
+
@s.read_state.retry_counter.should == 0
|
231
|
+
@s.read_state.refresh_time.should > Time.now.to_i
|
232
|
+
end
|
233
|
+
|
234
|
+
it "should do query with exception raised and restore state with succesfull retry" do
|
235
|
+
@s.retry_limit = 1
|
236
|
+
msg = "Error during query"
|
237
|
+
set_test_data('test_db_storage',{},msg,"query error")
|
238
|
+
res = @ss.do_query
|
239
|
+
verify_result(@s.docname(:md) => {},
|
240
|
+
@s.docname(:errors) => {'query-error'=>{'message'=>msg}})
|
241
|
+
# 1) if retry_limit is set to N - then, first N retries should not update refresh_time
|
242
|
+
@s.read_state.retry_counter.should == 1
|
243
|
+
@s.read_state.refresh_time.should <= Time.now.to_i
|
244
|
+
|
245
|
+
# try once more (with success)
|
246
|
+
expected = {'1'=>@product1,'2'=>@product2}
|
247
|
+
set_test_data('test_db_storage',expected)
|
248
|
+
@ss.do_query
|
249
|
+
verify_result(@s.docname(:md) => expected,
|
250
|
+
@s.docname(:errors) => {})
|
251
|
+
@s.read_state.retry_counter.should == 0
|
252
|
+
@s.read_state.refresh_time.should > Time.now.to_i
|
253
|
+
end
|
254
|
+
|
255
|
+
it "should do query with exception raised and update refresh time if retry_limit is 0" do
|
256
|
+
@s.retry_limit = 0
|
257
|
+
msg = "Error during query"
|
258
|
+
set_test_data('test_db_storage',{},msg,"query error")
|
259
|
+
res = @ss.do_query
|
260
|
+
verify_result(@s.docname(:md) => {},
|
261
|
+
@s.docname(:errors) => {'query-error'=>{'message'=>msg}})
|
262
|
+
# if poll_interval is set to 0 - refresh time should be updated
|
263
|
+
@s.read_state.retry_counter.should == 0
|
264
|
+
@s.read_state.refresh_time.should > Time.now.to_i
|
265
|
+
end
|
266
|
+
|
267
|
+
it "should do query with exception raised and update refresh time if poll_interval == 0" do
|
268
|
+
@s.retry_limit = 1
|
269
|
+
@s.poll_interval = 0
|
270
|
+
msg = "Error during query"
|
271
|
+
set_test_data('test_db_storage',{},msg,"query error")
|
272
|
+
prev_refresh_time = @s.read_state.refresh_time
|
273
|
+
# make sure refresh time is expired
|
274
|
+
sleep(1)
|
275
|
+
res = @ss.do_query
|
276
|
+
verify_result(@s.docname(:md) => {},
|
277
|
+
@s.docname(:errors) => {'query-error'=>{'message'=>msg}})
|
278
|
+
# if poll_interval is set to 0 - refresh time should be updated
|
279
|
+
@s.read_state.retry_counter.should == 0
|
280
|
+
@s.read_state.refresh_time.should > prev_refresh_time
|
281
|
+
end
|
211
282
|
end
|
212
283
|
|
213
284
|
describe "app-level partitioning" do
|
@@ -221,7 +292,8 @@ describe "SourceSync" do
|
|
221
292
|
verify_result("source:#{@test_app_name}:__shared__:#{@s_fields[:name]}:md" => expected)
|
222
293
|
Store.db.keys("read_state:#{@test_app_name}:__shared__*").sort.should ==
|
223
294
|
[ "read_state:#{@test_app_name}:__shared__:SampleAdapter:refresh_time",
|
224
|
-
"read_state:#{@test_app_name}:__shared__:SampleAdapter:
|
295
|
+
"read_state:#{@test_app_name}:__shared__:SampleAdapter:retry_counter",
|
296
|
+
"read_state:#{@test_app_name}:__shared__:SampleAdapter:rho__id"].sort
|
225
297
|
end
|
226
298
|
end
|
227
299
|
|
data/spec/store_spec.rb
CHANGED
@@ -51,6 +51,41 @@ describe "Store" do
|
|
51
51
|
Store.get_data('mydata').should == data
|
52
52
|
end
|
53
53
|
|
54
|
+
it "should update_objects with simple data and one changed attribute" do
|
55
|
+
data = { '1' => { 'hello' => 'world', "attr1" => 'value1' } }
|
56
|
+
update_data = { '1' => {'attr1' => 'value2'}}
|
57
|
+
Store.put_data('mydata', data)
|
58
|
+
Store.get_data('mydata').should == data
|
59
|
+
Store.update_objects('mydata', update_data)
|
60
|
+
data['1'].merge!(update_data['1'])
|
61
|
+
Store.get_data('mydata').should == data
|
62
|
+
end
|
63
|
+
|
64
|
+
it "should update_objects with simple data and verify that srem and sadd is called only on affected fields" do
|
65
|
+
data = { '1' => { 'hello' => 'world', "attr1" => 'value1' } }
|
66
|
+
update_data = { '1' => {'attr1' => 'value2', 'new_attr' => 'new_val', 'hello' => 'world'},
|
67
|
+
'2' => {'whole_new_object' => 'new_value' } }
|
68
|
+
Store.put_data('mydata', data)
|
69
|
+
Store.db.should_receive(:srem).exactly(1).times
|
70
|
+
Store.db.should_receive(:sadd).exactly(3).times
|
71
|
+
Store.update_objects('mydata', update_data)
|
72
|
+
end
|
73
|
+
|
74
|
+
it "should delete_objects with simple data" do
|
75
|
+
data = { '1' => { 'hello' => 'world', "attr1" => 'value1' } }
|
76
|
+
Store.put_data('mydata', data)
|
77
|
+
Store.delete_objects('mydata', ['1'])
|
78
|
+
Store.get_data('mydata').should == {}
|
79
|
+
end
|
80
|
+
|
81
|
+
it "should delete_objects with simple data and verify that srem is called only on affected fields" do
|
82
|
+
data = { '1' => { 'hello' => 'world', "attr1" => 'value1' } }
|
83
|
+
Store.put_data('mydata', data)
|
84
|
+
Store.db.should_receive(:srem).exactly(2).times
|
85
|
+
Store.db.should_receive(:sadd).exactly(0).times
|
86
|
+
Store.delete_objects('mydata', ['1'])
|
87
|
+
end
|
88
|
+
|
54
89
|
it "should add simple array data to new set" do
|
55
90
|
@data = ['1','2','3']
|
56
91
|
Store.put_data(@s.docname(:md),@data).should == true
|
data/tasks/redis.rake
CHANGED
@@ -7,7 +7,7 @@ def windows?
|
|
7
7
|
end
|
8
8
|
|
9
9
|
if windows?
|
10
|
-
$redis_ver = "redis-2.2.
|
10
|
+
$redis_ver = "redis-2.2.12"
|
11
11
|
$redis_zip = "C:/#{$redis_ver}.zip"
|
12
12
|
$redis_dest = "C:/"
|
13
13
|
end
|
@@ -161,7 +161,7 @@ namespace :redis do
|
|
161
161
|
else
|
162
162
|
sh 'rm -rf /tmp/redis/' if File.exists?("#{RedisRunner.redisdir}")
|
163
163
|
sh 'git clone git://github.com/antirez/redis.git /tmp/redis -n'
|
164
|
-
sh "cd #{RedisRunner.redisdir} && git reset --hard && git checkout 2.2.
|
164
|
+
sh "cd #{RedisRunner.redisdir} && git reset --hard && git checkout 2.2.12"
|
165
165
|
end
|
166
166
|
end
|
167
167
|
|
metadata
CHANGED
@@ -1,13 +1,13 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rhosync
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
hash:
|
4
|
+
hash: 29
|
5
5
|
prerelease:
|
6
6
|
segments:
|
7
7
|
- 2
|
8
8
|
- 1
|
9
|
-
-
|
10
|
-
version: 2.1.
|
9
|
+
- 11
|
10
|
+
version: 2.1.11
|
11
11
|
platform: ruby
|
12
12
|
authors:
|
13
13
|
- Rhomobile
|
@@ -15,12 +15,28 @@ autorequire:
|
|
15
15
|
bindir: bin
|
16
16
|
cert_chain: []
|
17
17
|
|
18
|
-
date: 2011-
|
18
|
+
date: 2011-10-04 00:00:00 Z
|
19
19
|
dependencies:
|
20
20
|
- !ruby/object:Gem::Dependency
|
21
|
-
name:
|
21
|
+
name: sinatra
|
22
22
|
prerelease: false
|
23
23
|
requirement: &id001 !ruby/object:Gem::Requirement
|
24
|
+
none: false
|
25
|
+
requirements:
|
26
|
+
- - "="
|
27
|
+
- !ruby/object:Gem::Version
|
28
|
+
hash: 17
|
29
|
+
segments:
|
30
|
+
- 1
|
31
|
+
- 2
|
32
|
+
- 7
|
33
|
+
version: 1.2.7
|
34
|
+
type: :runtime
|
35
|
+
version_requirements: *id001
|
36
|
+
- !ruby/object:Gem::Dependency
|
37
|
+
name: json
|
38
|
+
prerelease: false
|
39
|
+
requirement: &id002 !ruby/object:Gem::Requirement
|
24
40
|
none: false
|
25
41
|
requirements:
|
26
42
|
- - ~>
|
@@ -32,11 +48,11 @@ dependencies:
|
|
32
48
|
- 2
|
33
49
|
version: 1.4.2
|
34
50
|
type: :runtime
|
35
|
-
version_requirements: *
|
51
|
+
version_requirements: *id002
|
36
52
|
- !ruby/object:Gem::Dependency
|
37
53
|
name: sqlite3-ruby
|
38
54
|
prerelease: false
|
39
|
-
requirement: &
|
55
|
+
requirement: &id003 !ruby/object:Gem::Requirement
|
40
56
|
none: false
|
41
57
|
requirements:
|
42
58
|
- - ~>
|
@@ -48,11 +64,11 @@ dependencies:
|
|
48
64
|
- 5
|
49
65
|
version: 1.2.5
|
50
66
|
type: :runtime
|
51
|
-
version_requirements: *
|
67
|
+
version_requirements: *id003
|
52
68
|
- !ruby/object:Gem::Dependency
|
53
69
|
name: rubyzip
|
54
70
|
prerelease: false
|
55
|
-
requirement: &
|
71
|
+
requirement: &id004 !ruby/object:Gem::Requirement
|
56
72
|
none: false
|
57
73
|
requirements:
|
58
74
|
- - ~>
|
@@ -64,11 +80,11 @@ dependencies:
|
|
64
80
|
- 4
|
65
81
|
version: 0.9.4
|
66
82
|
type: :runtime
|
67
|
-
version_requirements: *
|
83
|
+
version_requirements: *id004
|
68
84
|
- !ruby/object:Gem::Dependency
|
69
85
|
name: uuidtools
|
70
86
|
prerelease: false
|
71
|
-
requirement: &
|
87
|
+
requirement: &id005 !ruby/object:Gem::Requirement
|
72
88
|
none: false
|
73
89
|
requirements:
|
74
90
|
- - ">="
|
@@ -80,11 +96,11 @@ dependencies:
|
|
80
96
|
- 1
|
81
97
|
version: 2.1.1
|
82
98
|
type: :runtime
|
83
|
-
version_requirements: *
|
99
|
+
version_requirements: *id005
|
84
100
|
- !ruby/object:Gem::Dependency
|
85
101
|
name: redis
|
86
102
|
prerelease: false
|
87
|
-
requirement: &
|
103
|
+
requirement: &id006 !ruby/object:Gem::Requirement
|
88
104
|
none: false
|
89
105
|
requirements:
|
90
106
|
- - ~>
|
@@ -96,11 +112,11 @@ dependencies:
|
|
96
112
|
- 1
|
97
113
|
version: 2.1.1
|
98
114
|
type: :runtime
|
99
|
-
version_requirements: *
|
115
|
+
version_requirements: *id006
|
100
116
|
- !ruby/object:Gem::Dependency
|
101
117
|
name: resque
|
102
118
|
prerelease: false
|
103
|
-
requirement: &
|
119
|
+
requirement: &id007 !ruby/object:Gem::Requirement
|
104
120
|
none: false
|
105
121
|
requirements:
|
106
122
|
- - ~>
|
@@ -112,11 +128,11 @@ dependencies:
|
|
112
128
|
- 0
|
113
129
|
version: 1.14.0
|
114
130
|
type: :runtime
|
115
|
-
version_requirements: *
|
131
|
+
version_requirements: *id007
|
116
132
|
- !ruby/object:Gem::Dependency
|
117
133
|
name: rest-client
|
118
134
|
prerelease: false
|
119
|
-
requirement: &
|
135
|
+
requirement: &id008 !ruby/object:Gem::Requirement
|
120
136
|
none: false
|
121
137
|
requirements:
|
122
138
|
- - ~>
|
@@ -128,21 +144,6 @@ dependencies:
|
|
128
144
|
- 1
|
129
145
|
version: 1.6.1
|
130
146
|
type: :runtime
|
131
|
-
version_requirements: *id007
|
132
|
-
- !ruby/object:Gem::Dependency
|
133
|
-
name: sinatra
|
134
|
-
prerelease: false
|
135
|
-
requirement: &id008 !ruby/object:Gem::Requirement
|
136
|
-
none: false
|
137
|
-
requirements:
|
138
|
-
- - ~>
|
139
|
-
- !ruby/object:Gem::Version
|
140
|
-
hash: 11
|
141
|
-
segments:
|
142
|
-
- 1
|
143
|
-
- 2
|
144
|
-
version: "1.2"
|
145
|
-
type: :runtime
|
146
147
|
version_requirements: *id008
|
147
148
|
- !ruby/object:Gem::Dependency
|
148
149
|
name: templater
|
@@ -569,7 +570,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
569
570
|
requirements: []
|
570
571
|
|
571
572
|
rubyforge_project:
|
572
|
-
rubygems_version: 1.8.
|
573
|
+
rubygems_version: 1.8.10
|
573
574
|
signing_key:
|
574
575
|
specification_version: 3
|
575
576
|
summary: RhoSync Synchronization Framework
|