dm-rinda-adapter 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
data/Manifest ADDED
@@ -0,0 +1,16 @@
1
+ Manifest
2
+ Rakefile
3
+ lib/rinda-patch.rb
4
+ lib/rinda_adapter.rb
5
+ spec/legacy/README
6
+ spec/legacy/adapter_shared_spec.rb
7
+ spec/legacy/spec_helper.rb
8
+ spec/lib/adapter_helpers.rb
9
+ spec/lib/collection_helpers.rb
10
+ spec/lib/counter_adapter.rb
11
+ spec/lib/pending_helpers.rb
12
+ spec/lib/rspec_immediate_feedback_formatter.rb
13
+ spec/rcov.opts
14
+ spec/rinda-adapter_spec.rb
15
+ spec/spec.opts
16
+ spec/spec_helper.rb
data/Rakefile ADDED
@@ -0,0 +1,47 @@
1
+ require 'rubygems'
2
+ require 'rake'
3
+ require 'echoe'
4
+
5
+ def with_gem(gemname, &blk)
6
+ begin
7
+ require gemname
8
+ blk.call
9
+ rescue LoadError => e
10
+ puts "Failed to load gem #{gemname} because #{e}."
11
+ end
12
+ end
13
+
14
+ Echoe.new('dm-rinda-adapter', '0.1.0') do |p|
15
+ p.description = "A datamapper adapter to connect to a rinda tuplespace"
16
+ p.url = "http://github.com/sfeu/dm-rinda-adapter"
17
+ p.author = "Sebastian Feuerstack"
18
+ p.email = "sebastian @nospam@ feuerstack.de"
19
+ p.ignore_pattern = ["tmp/*", "script/*","#*.*#"]
20
+ p.development_dependencies = []
21
+ p.need_tar_gz = false
22
+ p.need_tgz = false
23
+ end
24
+
25
+ with_gem 'spec/rake/spectask' do
26
+
27
+ desc 'Run all specs'
28
+ Spec::Rake::SpecTask.new(:spec) do |t|
29
+ t.spec_opts << '--options' << 'spec/spec.opts' if File.exists?('spec/spec.opts')
30
+ t.libs << 'lib'
31
+ t.spec_files = FileList['spec/**_spec.rb']
32
+ end
33
+
34
+ desc 'Default: Run Specs'
35
+ task :default => :spec
36
+
37
+ desc 'Run all tests'
38
+ task :test => :spec
39
+
40
+ end
41
+
42
+ with_gem 'yard' do
43
+ desc "Generate Yardoc"
44
+ YARD::Rake::YardocTask.new do |t|
45
+ t.files = ['lib/**/*.rb', 'README.markdown']
46
+ end
47
+ end
@@ -0,0 +1,29 @@
1
+ # -*- encoding: utf-8 -*-
2
+
3
+ Gem::Specification.new do |s|
4
+ s.name = %q{dm-rinda-adapter}
5
+ s.version = "0.1.0"
6
+
7
+ s.required_rubygems_version = Gem::Requirement.new(">= 1.2") if s.respond_to? :required_rubygems_version=
8
+ s.authors = [%q{Sebastian Feuerstack}]
9
+ s.date = %q{2011-06-30}
10
+ s.description = %q{A datamapper adapter to connect to a rinda tuplespace}
11
+ s.email = %q{sebastian @nospam@ feuerstack.de}
12
+ s.extra_rdoc_files = [%q{lib/rinda-patch.rb}, %q{lib/rinda_adapter.rb}]
13
+ s.files = [%q{Manifest}, %q{Rakefile}, %q{lib/rinda-patch.rb}, %q{lib/rinda_adapter.rb}, %q{spec/legacy/README}, %q{spec/legacy/adapter_shared_spec.rb}, %q{spec/legacy/spec_helper.rb}, %q{spec/lib/adapter_helpers.rb}, %q{spec/lib/collection_helpers.rb}, %q{spec/lib/counter_adapter.rb}, %q{spec/lib/pending_helpers.rb}, %q{spec/lib/rspec_immediate_feedback_formatter.rb}, %q{spec/rcov.opts}, %q{spec/rinda-adapter_spec.rb}, %q{spec/spec.opts}, %q{spec/spec_helper.rb}, %q{dm-rinda-adapter.gemspec}]
14
+ s.homepage = %q{http://github.com/sfeu/dm-rinda-adapter}
15
+ s.rdoc_options = [%q{--line-numbers}, %q{--inline-source}, %q{--title}, %q{Dm-rinda-adapter}, %q{--main}, %q{README.rdoc}]
16
+ s.require_paths = [%q{lib}]
17
+ s.rubyforge_project = %q{dm-rinda-adapter}
18
+ s.rubygems_version = %q{1.8.5}
19
+ s.summary = %q{A datamapper adapter to connect to a rinda tuplespace}
20
+
21
+ if s.respond_to? :specification_version then
22
+ s.specification_version = 3
23
+
24
+ if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
25
+ else
26
+ end
27
+ else
28
+ end
29
+ end
@@ -0,0 +1,25 @@
1
+ module Rinda # dirty monkey patching to retrieve unique running ID, primitiv einc beacuse touble nested tuplebag storage with symbols.
2
+ class TupleSpace
3
+
4
+ def initialize(period=60)
5
+ super()
6
+ @bag = TupleBag.new
7
+ @read_waiter = TupleBag.new
8
+ @take_waiter = TupleBag.new
9
+ @notify_waiter = TupleBag.new
10
+ @period = period
11
+ @keeper = nil
12
+ @id = 0
13
+ end
14
+
15
+
16
+ def writeID(tuple,sec=nil)
17
+ synchronize do
18
+ @id =@id+1
19
+ tuple["id"] = @id
20
+ write(tuple,sec)
21
+ @id
22
+ end
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,448 @@
1
+ require 'dm-core'
2
+ gem 'dm-core', '>=0.10.0'
3
+ require "rinda/tuplespace"
4
+ require 'monitor'
5
+ require 'rinda-patch'
6
+
7
+ module DataMapper
8
+
9
+ class Repository
10
+ def notify(action,query,callback,model,dm_query,time)
11
+ adapter.notify(action,query,callback,model,dm_query,time)
12
+ end
13
+
14
+ def wait(action,query,callback,model,dm_query,time)
15
+ adapter.notify(action,query,callback,model,dm_query,time)
16
+ end
17
+ end
18
+
19
+ module Model
20
+ def notify(action,query,callback,time = nil)
21
+ q = scoped_query(query)
22
+ q.repository.notify(action,query,callback,self,q,time)
23
+ end
24
+ def wait(action,query,callback,time = nil)
25
+ q = scoped_query(query)
26
+ q.repository.notify(action,query,callback,self,q, time)
27
+ end
28
+ end
29
+
30
+ module Adapters
31
+
32
+ #monkey patching new notification methods
33
+ class AbstractAdapter
34
+ def notify(action,query,callback,model,dm_query,time)
35
+ raise NotImplementedError, "#{self.class}#notify not implemented"
36
+ end
37
+ def wait(action,query,callback,model,dm_query,time)
38
+ raise NotImplementedError, "#{self.class}#wait not implemented"
39
+ end
40
+
41
+ end # class AbstractAdapter
42
+
43
+ # This is probably the simplest functional adapter possible. It simply
44
+ # stores and queries from a hash containing the model classes as keys,
45
+ # and an array of hashes. It is not persistent whatsoever; when the Ruby
46
+ # process finishes, everything that was stored it lost. However, it doesn't
47
+ # require any other external libraries, such as data_objects, so it is ideal
48
+ # for writing specs against. It also serves as an excellent example for
49
+ # budding adapter developers, so it is critical that it remains well documented
50
+ # and up to date.
51
+ class RindaAdapter < AbstractAdapter
52
+ #include MonitorMixin
53
+
54
+ # Used by DataMapper to put records into a data-store: "INSERT" in SQL-speak.
55
+ # It takes an array of the resources (model instances) to be saved. Resources
56
+ # each have a key that can be used to quickly look them up later without
57
+ # searching, if the adapter supports it.
58
+ #
59
+ # @param [Enumerable(Resource)] resources
60
+ # The set of resources (model instances)
61
+ #
62
+ # @api semipublic
63
+ def create(resources)
64
+ name = self.name
65
+ # DataMapper.logger << "create #{resources.first.model}"
66
+
67
+ resources.each do |resource|
68
+ model = resource.model
69
+ serial = model.serial(name)
70
+
71
+ # DataMapper.logger << "res #{resource.inspect}"
72
+ #initialize_serial(resource, rand(2**32))
73
+ #DataMapper.logger << "att #{resource.attributes(:field).inspect}"
74
+
75
+ saveblock = { }
76
+
77
+ resource.attributes.each do |key, value|
78
+ # DataMapper.logger << "before convert #{resource.model.properties[key].type}"
79
+ saveblock[key.to_s]=convert_to_ts(resource.model.properties[key].type, value)
80
+ end
81
+ # model = resource.model
82
+ # attributes = resource.dirty_attributes
83
+
84
+ # model.properties_with_subclasses(name).each do |property|
85
+ # next unless attributes.key?(property)
86
+
87
+ # value = attributes[property]
88
+ # saveblock[property.field.to_s]=convert_to_ts(property.type, value)
89
+ #end
90
+ # add model name to be included into tuple
91
+ saveblock["_model_"]=resources.first.model.storage_name(name).to_s
92
+
93
+ DataMapper.logger << "write #{saveblock.inspect}"
94
+ @monitor.synchronize do
95
+ if serial
96
+ id = @ts.writeID saveblock
97
+ serial.set!(resource, id)
98
+ else
99
+ @ts.write saveblock
100
+ end
101
+
102
+ # @ts.write saveblock
103
+ #initialize_serial(resource,id)
104
+ end
105
+
106
+ end
107
+ end
108
+
109
+ # Looks up one record or a collection of records from the data-store:
110
+ # "SELECT" in SQL.
111
+ #
112
+ # @param [Query] query
113
+ # The query to be used to seach for the resources
114
+ #
115
+ # @return [Array]
116
+ # An Array of Hashes containing the key-value pairs for
117
+ # each record
118
+ #
119
+ # @api semipublic
120
+ def read(query)
121
+
122
+
123
+ # DataMapper.logger << "query #{query.model.to_s}"
124
+ # DataMapper.logger << "query #{query.fields.inspect}"
125
+ queryblock = generate_query_with_conditions(query)
126
+ DataMapper.logger << "ts query #{queryblock.inspect}"
127
+ result=@ts.read_all(queryblock)
128
+
129
+ DataMapper.logger << "result #{result.inspect}"
130
+ #Kernel.const_get(s)
131
+
132
+ query.fields.each do |property|
133
+ if (property.type == DataMapper::Types::Discriminator)
134
+
135
+ key = property.name.to_s
136
+ result.each do |entry|
137
+ entry[key]=eval(entry[key].to_s)
138
+ end
139
+ end
140
+ end
141
+ # DataMapper.logger << "result after transformation of discriminators #{result.inspect}"
142
+
143
+ query.filter_records(result)
144
+ end
145
+
146
+ # Used by DataMapper to update the attributes on existing records in a
147
+ # data-store: "UPDATE" in SQL-speak. It takes a hash of the attributes
148
+ # to update with, as well as a collection object that specifies which resources
149
+ # should be updated.
150
+ #
151
+ # @param [Hash] attributes
152
+ # A set of key-value pairs of the attributes to update the resources with.
153
+ # @param [DataMapper::Collection] resources
154
+ # The collection of resources to update.
155
+ #
156
+ # @api semipublic
157
+ def update(attributes, collection)
158
+ DataMapper.logger << "update attributes: #{attributes.inspect} collection: #{collection.inspect}"
159
+ query = collection.query
160
+
161
+ query = generate_query_with_conditions(query)
162
+ # generate_query(collection.model)
163
+
164
+ records_to_delete=[]
165
+ @monitor.synchronize do
166
+ result=@ts.read_all(query)
167
+
168
+ records_to_delete = collection.query.filter_records(result)
169
+
170
+ records_to_delete.each do |record|
171
+ result=@ts.take(record)
172
+ saveblock ={ }
173
+ attributes.each do |key, value|
174
+ # DataMapper.logger << "key: #{key.name} value: #{value}"
175
+ saveblock[key.name.to_s]=convert_to_ts(key.name, value)
176
+ end
177
+ new = result.merge saveblock
178
+ @ts.write(new)
179
+
180
+ DataMapper.logger << "replaced: #{result.inspect} with: #{new.inspect}"
181
+ end
182
+ end # class synchronize
183
+
184
+ return records_to_delete.size
185
+ #end # class mutex synchronize
186
+ end
187
+
188
+ # Destroys all the records matching the given query. "DELETE" in SQL.
189
+ #
190
+ # @param [DataMapper::Collection] resources
191
+ # The collection of resources to delete.
192
+ #
193
+ # @return [Integer]
194
+ # The number of records that were deleted.
195
+ #
196
+ # @api semipublic
197
+ def delete(collection)
198
+ #DataMapper.logger << "delete #{collection.model.to_s}"
199
+ query = generate_query(collection.model)
200
+ # @mutex.synchronize do
201
+
202
+ result=@ts.read_all(query)
203
+
204
+ records_to_delete = collection.query.filter_records(result)
205
+ #DataMapper.logger << "entries to delete #{records_to_delete.inspect}"
206
+
207
+ records_to_delete.each do |record|
208
+ result=@ts.take(record)
209
+ end
210
+ records_to_delete.size
211
+ #end # class mutex synchronize
212
+ end
213
+
214
+
215
+ def wait(action,query,callback,model,dm_query,time = 10000)
216
+
217
+ query = generate_query(model).merge create_conditions(dm_query)
218
+
219
+ x = Thread.start do
220
+ begin
221
+ t = @ts.read query,(time/1000)
222
+ end until t and check_descendents(model,t) # quick patch that belongs into tuplespace
223
+
224
+ repository = dm_query.repository
225
+ model = dm_query.model
226
+ identity_fields = model.key(repository.name).map &:name
227
+
228
+ retrieve = identity_fields.map do |x| t[x.to_s] end
229
+
230
+ resource = model.get(*retrieve)
231
+ callback.call resource
232
+ end
233
+ x
234
+ end
235
+
236
+ def notify(action,query,callback,model,dm_query,time = nil)
237
+ x = Thread.start do
238
+ observer = notifyInternal(model, action, query,time)
239
+ DataMapper.logger << "waiting on #{model.to_s} model new #{action} changes with a state change to #{query.inspect}"
240
+
241
+ observer.each do |e,t|
242
+ @monitor.synchronize {
243
+ DataMapper.logger << "TRIGGERED on #{model.to_s} model new #{action} changes with a state change to #{query.inspect}"
244
+
245
+ if check_descendents(model,t) # quick patch that belongs into tuplespace
246
+ DataMapper.logger << "#{e} change detected for #{t.inspect}"
247
+ resource = nil
248
+
249
+ repository = dm_query.repository
250
+ model = dm_query.model
251
+ identity_fields = model.key(repository.name).map &:name
252
+
253
+ DataMapper.logger << "rep: #{repository.name} model:#{model} identifier key: #{identity_fields.inspect}"
254
+
255
+ retrieve = identity_fields.map do |x| t[x.to_s] end
256
+
257
+ resource = model.get(*retrieve)
258
+ DataMapper.logger << "found resource #{resource.inspect}"
259
+
260
+ callback.call resource
261
+ end
262
+ }
263
+ end
264
+ end
265
+ return x
266
+ end
267
+
268
+ private
269
+ def create_conditions(conditions)
270
+ newconditions={}
271
+ #newconditions["classtype"]=resource.attributes[:classtype].to_s
272
+ conditions.each do |key, value|
273
+ if value.is_a? Regexp
274
+ newconditions[key.to_s]=value
275
+ else
276
+ newconditions[key.to_s]=value.to_s
277
+ end
278
+ end
279
+ newconditions
280
+ end
281
+
282
+ # Returns a tupleSpace Observer that waits for an {action} bason on a hash of
283
+ # {conditions}
284
+ def notifyInternal(model,action,conditions,time = nil)
285
+ query = generate_query(model)
286
+ DataMapper.logger << "notify query generated #{query.inspect}"
287
+ # DataMapper.logger << "notify query generated11111 #{resource.attributes.inspect}"
288
+
289
+ # ressource.attributes.key?("classtype")
290
+
291
+ # value = attributes[property]
292
+
293
+ newconditions = create_conditions(conditions)
294
+
295
+ query = query.merge newconditions
296
+ DataMapper.logger << "notify query after merge of conditions #{query.inspect}"
297
+
298
+ if (time)
299
+ @ts.notify action,query,(time/1000)
300
+ else
301
+ @ts.notify action,query
302
+ end
303
+ end
304
+
305
+
306
+ def check_descendents (model,result)
307
+ if (result["classtype"].nil?) # in case there is no inheritanence relationship
308
+ return true
309
+ end
310
+ descendents = model.descendants.to_ary
311
+
312
+ # transform array to hash for quicker lookup
313
+ desc_lookup = Hash[*descendents.collect { |v|
314
+ [v.to_s, v.to_s]
315
+ }.flatten]
316
+ # p " identified following descendents #{desc_lookup.inspect}"
317
+ #result = {"classtype" => nil }
318
+ return desc_lookup[result["classtype"]]
319
+ end
320
+
321
+
322
+ def generate_query(model)
323
+ queryblock={ }
324
+ queryblock["_model_"]=model.storage_name(name).to_s
325
+ model.properties.each do |property|
326
+ queryblock[property.name.to_s]=nil
327
+ end
328
+ queryblock
329
+ end
330
+
331
+ def generate_query_with_conditions(query)
332
+ model = query.model
333
+
334
+ queryblock={ }
335
+ queryblock["_model_"]=model.storage_name(name).to_s
336
+
337
+ # properties = model.properties
338
+ # properties.each do |property|
339
+ query.fields.each do |property|
340
+ queryblock[property.field.to_s]=nil
341
+ end
342
+
343
+ # DataMapper.logger << "Conditions #{query.conditions.inspect}"
344
+
345
+ conditions_statement(query.conditions, queryblock )
346
+
347
+ end
348
+
349
+ def comparison_statement(comparison,queryblock,negate=false)
350
+
351
+ value = comparison.value
352
+
353
+ if comparison.slug == :eql and not comparison.relationship?
354
+ # DataMapper.logger << "comparison with eql #{comparison.inspect}"
355
+
356
+ if not negate
357
+ subject = comparison.subject
358
+ column_name = subject.field
359
+ queryblock[column_name]=value
360
+ end
361
+
362
+ # elsif comparison.relationship?
363
+ # DataMapper.logger << "comparison with relationship #{comparison.inspect}"
364
+
365
+ # if value.respond_to?(:query) && value.respond_to?(:loaded?) && !value.loaded?
366
+ # return subquery(value.query, subject, qualify)
367
+ # else
368
+ # return conditions_statement(comparison.foreign_key_mapping, queryblock)
369
+ # end
370
+ end
371
+ return queryblock
372
+ end
373
+
374
+ def conditions_statement(conditions,queryblock, negate = false)
375
+ case conditions
376
+ when Query::Conditions::NotOperation then negate_operation(conditions.operand, queryblock,negate)
377
+ when Query::Conditions::AbstractOperation then operation_statement(conditions,queryblock,negate)
378
+ when Query::Conditions::AbstractComparison then comparison_statement(conditions,queryblock,negate)
379
+ when Array
380
+ statement, bind_values = conditions # handle raw conditions
381
+ [ "(#{statement})", bind_values ].compact
382
+
383
+ else
384
+ return queryblock
385
+ end
386
+ end
387
+
388
+ # @api private
389
+ def operation_statement(operation,queryblock,negate=false)
390
+ operation.each do |operand|
391
+ # DataMapper.logger << "operation #{operand.inspect}"
392
+ queryblock = conditions_statement(operand,queryblock,negate)
393
+ end
394
+ return queryblock
395
+ end
396
+
397
+ # @api private
398
+ def negate_operation(operand, queryblock,negate)
399
+ if negate
400
+ return conditions_statement(operand, queryblock,false)
401
+ else
402
+ return conditions_statement(operand, queryblock,true)
403
+ end
404
+
405
+ #statement = "NOT(#{statement})" unless statement.nil?
406
+ # [ statement, bind_values ]
407
+ # return queryblick
408
+ end
409
+
410
+ # Make a new instance of the adapter. The @records ivar is the 'data-store'
411
+ # for this adapter. It is not shared amongst multiple incarnations of this
412
+ # adapter, eg DataMapper.setup(:default, :adapter => :in_memory);
413
+ # DataMapper.setup(:alternate, :adapter => :in_memory) do not share the
414
+ # data-store between them.
415
+ #
416
+ # @param [String, Symbol] name
417
+ # The name of the Repository using this adapter.
418
+ # @param [String, Hash] uri_or_options
419
+ # The connection uri string, or a hash of options to set up
420
+ # the adapter
421
+ #
422
+ # @api semipublic
423
+ def initialize(name, options = {})
424
+ super
425
+ @records = {}
426
+ if (@options[:local])
427
+ @ts = @options[:local]
428
+ else
429
+ @ts = DRbObject.new_with_uri("druby://#{@options[:host]}:#{@options[:port]}")
430
+ end
431
+ @monitor = Monitor.new
432
+ end
433
+
434
+ def convert_to_ts(key,value)
435
+ # DataMapper.logger << "key1 #{key.inspect} convert #{value.inspect} class #{value.class}"
436
+
437
+ if (key== DataMapper::Types::Discriminator)
438
+ return value.to_s
439
+ else
440
+ return value
441
+ end
442
+ end
443
+
444
+ end # class InMemoryAdapter
445
+
446
+ const_added(:RindaAdapter)
447
+ end # module Adapters
448
+ end # module DataMapper
@@ -0,0 +1,6 @@
1
+ This directory contains the old specs, written for 0.1.0. They are retained
2
+ here while the specs are separated into spec/{public,semipublic}.
3
+
4
+ Examples marked as @done indicate those for which there are equivalents in
5
+ spec/{public,semipublic}. Once all specs are moved (or rewritten, as
6
+ appropriate), the legacy specs will be removed.