dm-persevere-adapter 0.52.1 → 0.60.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,97 @@
1
+ module DataMapper
2
+ module Resource
3
+
4
+ def get_new_objects
5
+ new_parents = parent_resources.select{|p| p.new? }
6
+ new_children = child_collections.collect{ |collection| collection.select{|c| c.new? }}.flatten
7
+ new_children_of_new_parents = new_parents.map{ |np| np.__send__(:child_collections).collect{ |n| select{ |p| p.new? }}}.flatten
8
+ new_parents_of_new_children = new_children.map{ |nc| nc.__send__(:parent_resources).select{|p| p.new? }}.flatten
9
+ [ new_parents, new_children, new_children_of_new_parents, new_parents_of_new_children, self.new? ? self : [] ].flatten.uniq
10
+ end
11
+
12
+ def create_hook
13
+ op = original_attributes.dup
14
+ _create
15
+ @_original_attributes = op.dup
16
+ end
17
+
18
+ alias _old_update _update
19
+ def _update
20
+ if repository.adapter.is_a?(DataMapper::Adapters::PersevereAdapter)
21
+ # remove from the identity map
22
+ remove_from_identity_map
23
+
24
+ repository.update(dirty_attributes, collection_for_self)
25
+
26
+ # remove the cached key in case it is updated
27
+ remove_instance_variable(:@_key)
28
+
29
+ add_to_identity_map
30
+
31
+ true
32
+ else
33
+ _old_update
34
+ end
35
+ end
36
+
37
+ alias _old_save _save
38
+ def _save(safe)
39
+ objects = get_new_objects
40
+ objects.each do |obj|
41
+ obj.__send__(:save_self, safe)
42
+ end
43
+ _old_save(safe)
44
+ end
45
+
46
+ ##
47
+ # Convert a DataMapper Resource to a JSON.
48
+ #
49
+ # @param [Query] query
50
+ # The DataMapper query object passed in
51
+ #
52
+ # @api semipublic
53
+ def to_json_hash(include_relationships=true)
54
+ json_rsrc = Hash.new
55
+ relations = self.model.relationships.keys
56
+
57
+ if include_relationships
58
+ self.model.relationships.each do |nom, relation|
59
+ value = relation.get!(self)
60
+ parent = relation.parent_model
61
+ child = relation.child_model
62
+
63
+ unless value.nil?
64
+ json_rsrc[nom] = case relation
65
+ # KEEP THIS CASE AT THE TOP BECAUSE IT IS_A OneToMany ARGH
66
+ when DataMapper::Associations::ManyToMany::Relationship then
67
+ [value].flatten.map{ |v| { "$ref" => "../#{v.model.storage_name}/#{v.key.first}" } }
68
+ when DataMapper::Associations::ManyToOne::Relationship then
69
+ { "$ref" => "../#{value.model.storage_name}/#{value.key.first}" }
70
+ when DataMapper::Associations::OneToMany::Relationship then
71
+ value.map{ |v| { "$ref" => "../#{v.model.storage_name}/#{v.key.first}" } }
72
+ when DataMapper::Associations::OneToOne::Relationship then
73
+ { "$ref" => "../#{value.model.storage_name}/#{value.first.key.first}" }
74
+ end
75
+ end
76
+ end
77
+ end
78
+
79
+ attributes(:property).each do |property, value|
80
+ next if value.nil? || (value.is_a?(Array) && value.empty?) || relations.include?(property.name.to_s)
81
+
82
+ json_rsrc[property.field] = case value
83
+ when DateTime then value.new_offset(0).strftime("%Y-%m-%dT%H:%M:%SZ")
84
+ when Date then value.to_s
85
+ when Time then value.strftime("%H:%M:%S")
86
+ when Float then value.to_f
87
+ when BigDecimal then value.to_f
88
+ when Integer then value.to_i
89
+ else # when String, TrueClass, FalseClass then
90
+ self[property.name]
91
+ end
92
+ end
93
+
94
+ json_rsrc
95
+ end
96
+ end
97
+ end
@@ -9,7 +9,6 @@
9
9
  #
10
10
  require 'net/http'
11
11
  require 'uri'
12
-
13
12
  require 'rubygems'
14
13
  require 'json'
15
14
 
@@ -3,16 +3,21 @@ require 'dm-core'
3
3
  require 'dm-aggregates'
4
4
  require 'dm-types'
5
5
  require 'extlib'
6
- # require 'json'
7
6
  require 'bigdecimal'
7
+ require 'digest/md5'
8
8
 
9
- require 'model_json_support'
10
- require 'persevere'
9
+ # Things we add or override in DataMapper
10
+ require 'dm/associations/many_to_many'
11
+ require 'dm/model'
12
+ require 'dm/property'
13
+ require 'dm/query'
14
+ require 'dm/resource'
11
15
 
12
- require 'types/property'
13
- require 'types/json_reference'
14
- require 'types/json_reference_collection'
16
+ require 'persevere'
15
17
 
18
+ #
19
+ # Override BigDecimal to_json because it's ugly and doesn't work for us
20
+ #
16
21
  class BigDecimal
17
22
  alias to_json_old to_json
18
23
 
@@ -31,13 +36,12 @@ module DataMapper
31
36
 
32
37
  connect if @persevere.nil?
33
38
  resources = Array.new
34
- json_query = make_json_query(query)
39
+ json_query = query.to_json_query
35
40
  path = "/#{query.model.storage_name}/#{json_query}"
36
-
41
+
37
42
  response = @persevere.retrieve(path)
38
43
 
39
44
  if response.code == "200"
40
- # results = JSON.parse(response.body)
41
45
  results = [response.body]
42
46
  results.each do |row_of_results|
43
47
  row = query.fields.zip([row_of_results].flatten).map do |field, value|
@@ -127,12 +131,18 @@ module DataMapper
127
131
  def create_model_storage(model)
128
132
  name = self.name
129
133
  properties = model.properties_with_subclasses(name)
130
-
134
+
131
135
  return false if storage_exists?(model.storage_name(name))
132
136
  return false if properties.empty?
133
137
 
134
- schema_hash = model.to_json_schema_compatible_hash()
135
-
138
+ # Make sure storage for referenced objects exists
139
+ model.relationships.each_pair do |n, r|
140
+ if ! storage_exists?(r.child_model.storage_name)
141
+ put_schema({'id' => r.child_model.storage_name, 'properties' => {}})
142
+ end
143
+ end
144
+ schema_hash = model.to_json_schema_hash()
145
+
136
146
  return true unless put_schema(schema_hash) == false
137
147
  false
138
148
  end
@@ -147,14 +157,16 @@ module DataMapper
147
157
  def upgrade_model_storage(model)
148
158
  name = self.name
149
159
  properties = model.properties_with_subclasses(name)
150
-
160
+
161
+ DataMapper.logger.debug("Upgrading #{model.name}")
162
+
151
163
  if success = create_model_storage(model)
152
164
  return properties
153
165
  end
154
166
 
155
- new_schema_hash = model.to_json_schema_compatible_hash()
167
+ new_schema_hash = model.to_json_schema_hash()
156
168
  current_schema_hash = get_schema(new_schema_hash['id'])[0]
157
- # Diff of what is there and what will be added.
169
+ # TODO: Diff of what is there and what will be added.
158
170
 
159
171
  new_properties = properties.map do |property|
160
172
  prop_name = property.name.to_s
@@ -178,7 +190,7 @@ module DataMapper
178
190
  # @api semipublic
179
191
  def destroy_model_storage(model)
180
192
  return true unless storage_exists?(model.storage_name(name))
181
- schema_hash = model.to_json_schema_compatible_hash()
193
+ schema_hash = model.to_json_schema_hash()
182
194
  return true unless delete_schema(schema_hash) == false
183
195
  false
184
196
  end
@@ -207,20 +219,20 @@ module DataMapper
207
219
  scale = Property::DEFAULT_SCALE_BIGDECIMAL
208
220
 
209
221
  @type_map ||= {
210
- Types::Serial => { :primitive => 'integer' },
211
- Types::Boolean => { :primitive => 'boolean' },
212
- Integer => { :primitive => 'integer'},
213
- String => { :primitive => 'string'},
214
- Class => { :primitive => 'string'},
215
- BigDecimal => { :primitive => 'number'},
216
- Float => { :primitive => 'number'},
217
- DateTime => { :primitive => 'string', :format => 'date-time'},
218
- Date => { :primitive => 'string', :format => 'date'},
219
- Time => { :primitive => 'string', :format => 'time'},
220
- TrueClass => { :primitive => 'boolean'},
221
- Types::Text => { :primitive => 'string'},
222
+ Types::Serial => { :primitive => 'integer' },
223
+ Types::Boolean => { :primitive => 'boolean' },
224
+ Integer => { :primitive => 'integer'},
225
+ String => { :primitive => 'string'},
226
+ Class => { :primitive => 'string'},
227
+ BigDecimal => { :primitive => 'number'},
228
+ Float => { :primitive => 'number'},
229
+ DateTime => { :primitive => 'string', :format => 'date-time'},
230
+ Date => { :primitive => 'string', :format => 'date'},
231
+ Time => { :primitive => 'string', :format => 'time'},
232
+ TrueClass => { :primitive => 'boolean'},
233
+ Types::Text => { :primitive => 'string'},
222
234
  DataMapper::Types::Object => { :primitive => 'string'},
223
- DataMapper::Types::URI => { :primitive => 'string', :format => 'uri'}
235
+ DataMapper::Types::URI => { :primitive => 'string', :format => 'uri'}
224
236
  }.freeze
225
237
  end
226
238
  end
@@ -246,18 +258,20 @@ module DataMapper
246
258
  def create(resources)
247
259
  connect if @persevere.nil?
248
260
  created = 0
249
- resources.each do |resource|
261
+
262
+ check_schemas
263
+
264
+ resources.each do |resource|
250
265
  serial = resource.model.serial(self.name)
251
266
  path = "/#{resource.model.storage_name}/"
252
- payload = make_json_compatible_hash(resource)
253
-
254
- payload.delete(:id)
255
-
267
+ # Invoke to_json_hash with a boolean to indicate this is a create
268
+ # We might want to make this a post-to_json_hash cleanup instead
269
+ payload = resource.to_json_hash(false)
270
+ # scrub_data(payload)
256
271
  response = @persevere.create(path, payload)
257
272
 
258
273
  # Check the response, this needs to be more robust and raise
259
274
  # exceptions when there's a problem
260
-
261
275
  if response.code == "201"# good:
262
276
  rsrc_hash = JSON.parse(response.body)
263
277
  # Typecast attributes, DM expects them properly cast
@@ -270,7 +284,7 @@ module DataMapper
270
284
  when Time then rsrc_hash[prop.field.to_s] = value.getlocal
271
285
  end
272
286
  end
273
-
287
+
274
288
  serial.set!(resource, rsrc_hash["id"]) unless serial.nil?
275
289
 
276
290
  created += 1
@@ -303,7 +317,9 @@ module DataMapper
303
317
  def update(attributes, query)
304
318
  connect if @persevere.nil?
305
319
  updated = 0
306
-
320
+
321
+ check_schemas
322
+
307
323
  if ! query.is_a?(DataMapper::Query)
308
324
  resources = [query].flatten
309
325
  else
@@ -313,9 +329,8 @@ module DataMapper
313
329
  resources.each do |resource|
314
330
  tblname = resource.model.storage_name
315
331
  path = "/#{tblname}/#{resource.key.first}"
316
-
317
- payload = make_json_compatible_hash(resource)
318
-
332
+ payload = resource.to_json_hash()
333
+ # scrub_data(payload)
319
334
  result = @persevere.update(path, payload)
320
335
 
321
336
  if result.code == "200"
@@ -362,29 +377,58 @@ module DataMapper
362
377
  def read_many(query)
363
378
  connect if @persevere.nil?
364
379
 
365
- resources = Array.new
366
- json_query, headers = make_json_query(query)
380
+ # check_schemas
367
381
 
382
+ resources = Array.new
368
383
  tblname = query.model.storage_name
384
+
385
+ json_query, headers = query.to_json_query
386
+
369
387
  path = "/#{tblname}/#{json_query}"
370
- # puts path
388
+ DataMapper.logger.debug("--> PATH/QUERY: #{path}")
389
+
371
390
  response = @persevere.retrieve(path, headers)
372
-
391
+
373
392
  if response.code.match(/20?/)
374
393
  results = JSON.parse(response.body)
375
394
  results.each do |rsrc_hash|
376
395
  # Typecast attributes, DM expects them properly cast
377
396
  query.fields.each do |prop|
378
- value = rsrc_hash[prop.field.to_s]
379
- if prop.field == 'id'
380
- rsrc_hash[prop.field.to_s] = prop.typecast(value.to_s.match(/(#{tblname})?\/?([a-zA-Z0-9_-]+$)/)[2])
381
- else
382
- rsrc_hash[prop.field.to_s] = prop.typecast(value) unless value.nil?
397
+ object_reference = false
398
+ pname = prop.field.to_s
399
+ if pname[-3,3] == "_id"
400
+ pname = pname[0..-4]
401
+ object_reference = true
402
+ end
403
+ value = rsrc_hash[pname]
404
+ # debugger
405
+ # Dereference references
406
+ unless value.nil?
407
+ if value.is_a?(Hash)
408
+ if value.has_key?("$ref")
409
+ value = value["$ref"].split("/")[-1]
410
+ else
411
+ # value = value["id"].split("/")[-1]
412
+ end
413
+ elsif value.is_a?(Array)
414
+ value = value.map do |v|
415
+ if v.has_key?("$ref")
416
+ v = v["$ref"].split("/")[-1]
417
+ else
418
+ # v = v["id"].split("/")[-1]
419
+ end
420
+ end
421
+ end
422
+ if prop.field == 'id'
423
+ rsrc_hash[pname] = prop.typecast(value.to_s.match(/(#{tblname})?\/?([a-zA-Z0-9_-]+$)/)[2])
424
+ else
425
+ rsrc_hash[pname] = prop.typecast(value)
426
+ end
383
427
  end
384
428
  # Shift date/time objects to the correct timezone because persevere is UTC
385
429
  case prop
386
- when DateTime then rsrc_hash[prop.field.to_s] = value.new_offset(Rational(Time.now.getlocal.gmt_offset/3600, 24))
387
- when Time then rsrc_hash[prop.field.to_s] = value.getlocal
430
+ when DateTime then rsrc_hash[pname] = value.new_offset(Rational(Time.now.getlocal.gmt_offset/3600, 24))
431
+ when Time then rsrc_hash[pname] = value.getlocal
388
432
  end
389
433
  end
390
434
  end
@@ -394,11 +438,14 @@ module DataMapper
394
438
 
395
439
  # This won't work if the RegExp is nested more then 1 layer deep.
396
440
  if query.conditions.class == DataMapper::Query::Conditions::AndOperation
397
- regexp_conds = query.conditions.operands.select{ |obj| obj.is_a?(DataMapper::Query::Conditions::RegexpComparison) ||
398
- (obj.is_a?(DataMapper::Query::Conditions::NotOperation) && obj.operand.is_a?(DataMapper::Query::Conditions::RegexpComparison))}
441
+ regexp_conds = query.conditions.operands.select do |obj|
442
+ obj.is_a?(DataMapper::Query::Conditions::RegexpComparison) ||
443
+ ( obj.is_a?(DataMapper::Query::Conditions::NotOperation) && obj.operand.is_a?(DataMapper::Query::Conditions::RegexpComparison) )
444
+ end
399
445
  regexp_conds.each{|cond| resources = resources.select{|resource| cond.matches?(resource)} }
400
446
 
401
447
  end
448
+
402
449
  # query.match_records(resources)
403
450
  resources
404
451
  end
@@ -419,6 +466,8 @@ module DataMapper
419
466
  connect if @persevere.nil?
420
467
 
421
468
  deleted = 0
469
+
470
+ # check_schemas
422
471
 
423
472
  if ! query.is_a?(DataMapper::Query)
424
473
  resources = [query].flatten
@@ -447,6 +496,9 @@ module DataMapper
447
496
  def get_schema(name = nil, project = nil)
448
497
  path = nil
449
498
  single = false
499
+
500
+ # check_schemas
501
+
450
502
  if name.nil? & project.nil?
451
503
  path = "/Class/"
452
504
  elsif project.nil?
@@ -462,12 +514,16 @@ module DataMapper
462
514
  schemas.each do |schema|
463
515
  schema['properties']['id'] = { 'type' => "serial", 'index' => true }
464
516
  end
517
+ # save_schemas
518
+
465
519
  return name.nil? ? schemas : schemas[0..0]
466
520
  else
467
521
  return false
468
522
  end
469
523
  end
470
524
 
525
+ ##
526
+ #
471
527
  def put_schema(schema_hash, project = nil)
472
528
  path = "/Class/"
473
529
  if ! project.nil?
@@ -476,23 +532,34 @@ module DataMapper
476
532
  schema_hash['id'] = "#{project}/#{schema_hash['id']}"
477
533
  end
478
534
  else
479
- puts "You need an id key/value in the hash"
535
+ DataMapper.logger.error("You need an id key/value in the hash")
480
536
  end
481
537
  end
482
- schema_hash['properties'].delete('id') if schema_hash['properties'].has_key?('id')
538
+
539
+ # check_schemas
540
+
541
+ scrub_schema(schema_hash['properties'])
483
542
  schema_hash['extends'] = { "$ref" => "/Class/Versioned" } if @options[:versioned]
543
+
484
544
  result = @persevere.create(path, schema_hash)
485
545
  if result.code == '201'
546
+ # save_schemas
547
+
486
548
  return JSON.parse(result.body)
487
549
  else
488
550
  return false
489
551
  end
490
552
  end
491
553
 
554
+ ##
555
+ #
492
556
  def update_schema(schema_hash, project = nil)
493
557
  id = schema_hash['id']
494
558
  payload = schema_hash.reject{|key,value| key.to_sym.eql?(:id) }
495
- payload['properties'].delete('id') if payload['properties'].has_key?('id')
559
+ scrub_schema(payload['properties'])
560
+ payload['extends'] = { "$ref" => "/Class/Versioned" } if @options[:versioned]
561
+
562
+ # check_schemas
496
563
 
497
564
  if project.nil?
498
565
  path = "/Class/#{id}"
@@ -503,12 +570,15 @@ module DataMapper
503
570
  result = @persevere.update(path, payload)
504
571
 
505
572
  if result.code == '200'
573
+ # save_schemas
506
574
  return result.body
507
575
  else
508
576
  return false
509
577
  end
510
578
  end
511
579
 
580
+ ##
581
+ #
512
582
  def delete_schema(schema_hash, project = nil)
513
583
  if ! project.nil?
514
584
  if schema_hash.has_key?("id")
@@ -516,13 +586,17 @@ module DataMapper
516
586
  schema_hash['id'] = "#{project}/#{schema_hash['id']}"
517
587
  end
518
588
  else
519
- puts "You need an id key/value in the hash"
589
+ DataMapper.logger.error("You need an id key/value in the hash")
520
590
  end
521
591
  end
592
+
593
+ # check_schemas
594
+
522
595
  path = "/Class/#{schema_hash['id']}"
523
596
  result = @persevere.delete(path)
524
597
 
525
598
  if result.code == "204"
599
+ # save_schemas
526
600
  return true
527
601
  else
528
602
  return false
@@ -546,7 +620,6 @@ module DataMapper
546
620
  # the adapter
547
621
  #
548
622
  # @api semipublic
549
-
550
623
  def initialize(name, uri_or_options)
551
624
  super
552
625
 
@@ -565,13 +638,18 @@ module DataMapper
565
638
 
566
639
  @resource_naming_convention = NamingConventions::Resource::Underscored
567
640
  @identity_maps = {}
568
- @classes = []
569
641
  @persevere = nil
570
642
  @prepped = false
571
-
643
+ @schema_backups = Array.new
644
+ @last_backup = nil
645
+
572
646
  connect
573
647
  end
574
648
 
649
+ private
650
+
651
+ ##
652
+ #
575
653
  def connect
576
654
  if ! @prepped
577
655
  uri = URI::HTTP.build(@options).to_s
@@ -580,27 +658,71 @@ module DataMapper
580
658
  end
581
659
  end
582
660
 
583
- def prep_persvr
661
+ def scrub_data(json_hash)
662
+ items = [DataMapper::Model.descendants.map{|c| "#{c.name.downcase}_id"}].flatten
663
+ items.each { |item| json_hash.delete(item) if json_hash.has_key?(item) }
664
+ json_hash.reject! { |k,v| v.nil? }
665
+ json_hash
666
+ end
667
+
668
+ ##
669
+ #
670
+ def scrub_schema(json_hash)
671
+ items = [DataMapper::Model.descendants.map{|c| "#{c.name.downcase}_id"}, 'id'].flatten
672
+ items.each { |item| json_hash.delete(item) if json_hash.has_key?(item) }
673
+ json_hash
674
+ end
675
+
676
+ def check_schemas
677
+ schemas = @persevere.retrieve("/Class").body
678
+ md5 = Digest::MD5.hexdigest(schemas)
679
+
680
+ if ! @last_backup.nil?
681
+ if @last_backup[:hash] != md5
682
+ DataMapper.logger.debug("Schemas changed, do you know why? (#{md5} :: #{@last_backup[:hash]})")
683
+ @schema_backups.each do |sb|
684
+ if sb[:hash] == md5
685
+ DataMapper.logger.debug("Schemas reverted to #{sb.inspect}")
686
+ end
687
+ end
688
+ end
689
+ end
690
+ end
691
+
692
+ def save_schemas
693
+ schemas = @persevere.retrieve("/Class").body
694
+ md5 = Digest::MD5.hexdigest(schemas)
695
+ @last_backup = { :hash => md5, :schemas => schemas, :timestamp => Time.now }
696
+ @schema_backups << @last_backup
697
+ # Dump to filesystem
698
+ end
699
+
700
+ def get_classes
584
701
  # Because this is an AbstractAdapter and not a
585
702
  # DataObjectAdapter, we can't assume there are any schemas
586
703
  # present, so we retrieve the ones that exist and keep them up
587
704
  # to date
705
+ classes = Array.new
588
706
  result = @persevere.retrieve('/Class[=id]')
589
707
  if result.code == "200"
590
708
  hresult = JSON.parse(result.body)
591
709
  hresult.each do |cname|
592
710
  junk,name = cname.split("/")
593
- @classes << name
711
+ classes << name
594
712
  end
595
- @prepped = true
596
713
  else
597
- puts "Error retrieving existing tables: ", result
714
+ DataMapper.logger.error("Error retrieving existing tables: #{result}")
598
715
  end
599
-
716
+ classes
717
+ end
718
+
719
+ ##
720
+ #
721
+ def prep_persvr
600
722
  #
601
723
  # If the user specified a versioned datastore load the versioning REST code
602
724
  #
603
- if ! @classes.include?("Versioned") && @options[:versioned]
725
+ unless get_classes.include?("Versioned") && @options[:versioned]
604
726
  versioned_class = <<-EOF
605
727
  {
606
728
  id: "Versioned",
@@ -651,176 +773,10 @@ module DataMapper
651
773
  response = @persevere.persevere.send_request('POST', URI.encode('/Class/'), versioned_class, { 'Content-Type' => 'application/javascript' } )
652
774
  rescue Timeout::Error, Errno::EINVAL, Errno::ECONNRESET, EOFError,
653
775
  Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError, Net::ProtocolError => e
654
- puts "Persevere Create Failed: #{e}, Trying again."
776
+ DataMapper.logger.error("Persevere Create Failed: #{e}, Trying again.")
655
777
  end
656
778
  end
657
779
  end
658
-
659
- ##
660
- # Convert a DataMapper Resource to a JSON.
661
- #
662
- # @param [Query] query
663
- # The DataMapper query object passed in
664
- #
665
- # @api semipublic
666
- def make_json_compatible_hash(resource)
667
- json_rsrc = Hash.new
668
- resource.attributes(:property).each do |property, value|
669
-
670
- next if value.nil? || (value.is_a?(Array) && value.empty?)
671
-
672
- if property.type == DataMapper::Types::JsonReference ||
673
- property.type == DataMapper::Types::JsonReferenceCollection
674
- json_rsrc[property.field] = property.value(value)
675
- else
676
- json_rsrc[property.field] = case value
677
- when DateTime then value.new_offset(0).strftime("%Y-%m-%dT%H:%M:%SZ")
678
- when Date then value.to_s
679
- when Time then value.strftime("%H:%M:%S")
680
- when Float then value.to_f
681
- when BigDecimal then value.to_f
682
- when Integer then value.to_i
683
- else resource[property.name]
684
- end
685
- end
686
- end
687
-
688
- json_rsrc
689
- end
690
-
691
- ##
692
- # Convert a DataMapper Query to a JSON Query.
693
- #
694
- # @param [Query] query
695
- # The DataMapper query object passed in
696
- #
697
- # @api semipublic
698
- def make_json_query(query)
699
- def process_in(value, candidate_set)
700
- result_string = Array.new
701
- candidate_set.to_a.each do |candidate|
702
- result_string << "#{value}=#{candidate}"
703
- end
704
- if result_string.length > 0
705
- "(#{result_string.join("|")})"
706
- else
707
- "#{value}=''"
708
- end
709
- end
710
-
711
- def munge_condition(condition)
712
- cond = condition.loaded_value
713
-
714
- cond = "\"#{cond}\"" if cond.is_a?(String)
715
- cond = "date(%10.f)" % (Time.parse(cond.to_s).to_f * 1000) if cond.is_a?(DateTime)
716
- cond = 'undefined' if cond.nil?
717
- return cond
718
- end
719
-
720
- def process_condition(condition)
721
- case condition
722
- # Persevere 1.0 regular expressions are disable for security so we pass them back for DataMapper query filtering
723
- # without regular expressions, the like operator is inordinately challenging hence we pass it back
724
- # when :regexp then "RegExp(\"#{condition.value.source}\").test(#{condition.subject.name})"
725
- when DataMapper::Query::Conditions::RegexpComparison then []
726
- when DataMapper::Query::Conditions::LikeComparison then "#{condition.subject.field}='#{condition.loaded_value.gsub('%', '*')}'"
727
- when DataMapper::Query::Conditions::AndOperation then
728
- inside = condition.operands.map { |op| process_condition(op) }.flatten
729
- inside.empty? ? [] : "(#{inside.join("&")})"
730
- when DataMapper::Query::Conditions::OrOperation then "(#{condition.operands.map { |op| process_condition(op) }.join("|")})"
731
- when DataMapper::Query::Conditions::NotOperation then
732
- inside = process_condition(condition.operand)
733
- inside.empty? ? [] : "!(%s)" % inside
734
- when DataMapper::Query::Conditions::InclusionComparison then process_in(condition.subject.name, condition.value)
735
- when DataMapper::Query::Conditions::EqualToComparison then
736
- "#{condition.subject.field}=#{munge_condition(condition)}"
737
- when DataMapper::Query::Conditions::GreaterThanComparison then
738
- "#{condition.subject.field}>#{munge_condition(condition)}"
739
- when DataMapper::Query::Conditions::LessThanComparison then
740
- "#{condition.subject.field}<#{munge_condition(condition)}"
741
- when DataMapper::Query::Conditions::GreaterThanOrEqualToComparison then
742
- "#{condition.subject.field}>=#{munge_condition(condition)}"
743
- when DataMapper::Query::Conditions::LessThanOrEqualToComparison then
744
- "#{condition.subject.field}<=#{munge_condition(condition)}"
745
- when DataMapper::Query::Conditions::NullOperation then []
746
- when Array then
747
- old_statement, bind_values = condition
748
- statement = old_statement.dup
749
- bind_values.each{ |bind_value| statement.sub!('?', bind_value.to_s) }
750
- statement.gsub(' ', '')
751
- else condition.to_s.gsub(' ', '')
752
- end
753
- end
754
-
755
- json_query = ""
756
- query_terms = Array.new
757
- order_operations = Array.new
758
- field_ops = Array.new
759
- fields = Array.new
760
- headers = Hash.new
761
-
762
- query_terms << process_condition(query.conditions)
763
-
764
- if query_terms.flatten.length != 0
765
- json_query += "[?#{query_terms.join("][?")}]"
766
- end
767
-
768
- query.fields.each do |field|
769
- if field.respond_to?(:operator)
770
- field_ops << case field.operator
771
- when :count then
772
- if field.target.is_a?(DataMapper::Property)
773
- "[?#{field.target.field}!=undefined].length"
774
- else # field.target is all.
775
- ".length"
776
- end
777
- when :min
778
- if field.target.type == DateTime || field.target.type == Time || field.target.type == Date
779
- "[=#{field.target.field}]"
780
- else
781
- ".min(?#{field.target.field})"
782
- end
783
- when :max
784
- if field.target.type == DateTime || field.target.type == Time || field.target.type == Date
785
- "[=#{field.target.field}]"
786
- else
787
- ".max(?#{field.target.field})"
788
- end
789
- when :sum
790
- ".sum(?#{field.target.field})"
791
- when :avg
792
- "[=#{field.target.field}]"
793
- end
794
- else
795
- fields << "'#{field.field}':#{field.field}"
796
- end
797
- end
798
-
799
- json_query += field_ops.join("")
800
-
801
- if query.order && query.order.any?
802
- query.order.map do |direction|
803
- order_operations << case direction.operator
804
- when :asc then "[\/#{direction.target.field}]"
805
- when :desc then "[\\#{direction.target.field}]"
806
- end
807
- end
808
- end
809
-
810
- json_query += order_operations.join("")
811
-
812
- json_query += "[={" + fields.join(',') + "}]" unless fields.empty?
813
-
814
- offset = query.offset.to_i
815
- limit = query.limit.nil? ? nil : query.limit.to_i + offset - 1
816
-
817
- if offset != 0 || !limit.nil?
818
- headers.merge!({"Range", "items=#{offset}-#{limit}"})
819
- end
820
- # puts "#{query.inspect}"
821
- # puts json_query, headers
822
- return json_query, headers
823
- end
824
780
  end # class PersevereAdapter
825
781
  const_added(:PersevereAdapter)
826
782
  end # module Adapters