parse-stack 1.6.0 → 1.6.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 521333b9d84cd7319911a0e9b368fbfa6133e98e
4
- data.tar.gz: 543f3204ab4f4a9df14cbe88aa71be7bd0c69c47
3
+ metadata.gz: e6f2fcd731c3e5c535c2fcd5b6ad7de0411ee3b8
4
+ data.tar.gz: d43b5c15e4ca7ff3528d53c09d9f333ea5cca2cd
5
5
  SHA512:
6
- metadata.gz: 08b3de4ccf9cd5fdaa9045676b1147600bd3698d0cb99bc526d5172defbedc93e039d994f28350c291ad384470b96830d0e5186dbc5b6710255a95445af57a3b
7
- data.tar.gz: 206b597203c7df406463f4a778934c7d79cf3ac76a950803c843bd5fd8b9c5020bdda7b6cd69938c1c738034973114759307b25c6ce1f451e998ac3c15b0797a
6
+ metadata.gz: 7521b1712daddb91201f06062a327443f65467c4098441b68e06de9a53b862c5e2fe36d5f66abb68d974aae098b5a9218315e9de0ab494b47032ad02a81c0a94
7
+ data.tar.gz: 75e194bbed8d4aebbb8c1ab2fd08f1d4b6c1b1c58191df19bcaf356bfceac761c40874321049e2a83b622b02b68cdf1b167652cac9f796c3f2a1a59aca062297
data/Changes.md CHANGED
@@ -1,5 +1,13 @@
1
1
  ## Parse-Stack Changelog
2
2
 
3
+ ### 1.6.1
4
+ - NEW: Batch requests are now parallelized.
5
+ - `skip` in queries no longer capped to 10,000.
6
+ - `limit` in queries no longer capped at 1000.
7
+ - `all()` queries can now return as many results as possible.
8
+ - NEW: `each()` method on Parse::Object subclasses to iterate
9
+ over all records in the colleciton.
10
+
3
11
  ### 1.6.0
4
12
  - NEW: Auto generate models based on your remote schema.
5
13
  - The default server url is now 'http://localhost:1337/parse'.
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- parse-stack (1.6.0)
4
+ parse-stack (1.6.1)
5
5
  active_model_serializers (>= 0.9, < 1)
6
6
  activemodel (>= 4.2.1, < 6)
7
7
  activesupport (>= 4.2.1, < 6)
@@ -27,11 +27,10 @@ GEM
27
27
  erubis (~> 2.7.0)
28
28
  rails-dom-testing (~> 2.0)
29
29
  rails-html-sanitizer (~> 1.0, >= 1.0.2)
30
- active_model_serializers (0.10.2)
30
+ active_model_serializers (0.10.3)
31
31
  actionpack (>= 4.1, < 6)
32
32
  activemodel (>= 4.1, < 6)
33
- jsonapi (~> 0.1.1.beta2)
34
- railties (>= 4.1, < 6)
33
+ jsonapi (= 0.1.1.beta2)
35
34
  activemodel (5.0.0.1)
36
35
  activesupport (= 5.0.0.1)
37
36
  activesupport (5.0.0.1)
@@ -53,11 +52,9 @@ GEM
53
52
  faraday_middleware (0.10.1)
54
53
  faraday (>= 0.7.4, < 1.0)
55
54
  i18n (0.7.0)
56
- jsonapi (0.1.1.beta6)
57
- jsonapi-parser (= 0.1.1.beta3)
58
- jsonapi-renderer (= 0.1.1.beta1)
59
- jsonapi-parser (0.1.1.beta3)
60
- jsonapi-renderer (0.1.1.beta1)
55
+ json (1.8.3)
56
+ jsonapi (0.1.1.beta2)
57
+ json (~> 1.8)
61
58
  loofah (2.0.3)
62
59
  nokogiri (>= 1.5.9)
63
60
  method_source (0.8.2)
@@ -85,15 +82,8 @@ GEM
85
82
  nokogiri (~> 1.6.0)
86
83
  rails-html-sanitizer (1.0.3)
87
84
  loofah (~> 2.0)
88
- railties (5.0.0.1)
89
- actionpack (= 5.0.0.1)
90
- activesupport (= 5.0.0.1)
91
- method_source
92
- rake (>= 0.8.7)
93
- thor (>= 0.18.1, < 2.0)
94
85
  rake (11.3.0)
95
86
  slop (3.6.0)
96
- thor (0.19.1)
97
87
  thread_safe (0.3.5)
98
88
  tzinfo (1.2.2)
99
89
  thread_safe (~> 0.1)
@@ -36,7 +36,7 @@ module Parse
36
36
  # @see Array.destroy
37
37
  class BatchOperation
38
38
  include Enumerable
39
-
39
+
40
40
  # @!attribute requests
41
41
  # @return [Array] the set of requests in this batch.
42
42
 
@@ -130,10 +130,8 @@ module Parse
130
130
  def submit(segment = 50)
131
131
  @responses = []
132
132
  @requests.uniq!(&:signature)
133
- @requests.each_slice(segment) do |slice|
134
- @responses << client.batch_request( BatchOperation.new(slice) )
135
- #throttle
136
- # sleep (slice.count.to_f / MAX_REQ_SEC.to_f )
133
+ @responses = @requests.each_slice(segment).to_a.threaded_map(2) do |slice|
134
+ client.batch_request( BatchOperation.new(slice) )
137
135
  end
138
136
  @responses.flatten!
139
137
  #puts "Requests: #{@requests.count} == Response: #{@responses.count}"
@@ -148,7 +148,7 @@ module Parse
148
148
  @store.delete "mk:#{url.to_s}" # master key cache-key
149
149
  @store.delete @cache_key # final key
150
150
  end
151
- rescue Errno::EINVAL, Redis::CannotConnectError => e
151
+ rescue Errno::EINVAL, Redis::CannotConnectError, Redis::TimeoutError => e
152
152
  # if the cache store fails to connect, catch the exception but proceed
153
153
  # with the regular request, but turn off caching for this request. It is possible
154
154
  # that the cache connection resumes at a later point, so this is temporary.
@@ -158,10 +158,15 @@ module Parse
158
158
 
159
159
  @app.call(env).on_complete do |response_env|
160
160
  # Only cache GET requests with valid HTTP status codes whose content-length
161
- # is greater than 20. Otherwise they could be errors, successes and empty result sets.
161
+ # is between 20 bytes and 1MB. Otherwise they could be errors, successes and empty result sets.
162
+
162
163
  if @enabled && method == :get && CACHEABLE_HTTP_CODES.include?(response_env.status) &&
163
- response_env.present? && response_env.response_headers[CONTENT_LENGTH_KEY].to_i > 20
164
+ response_env.present? && response_env.response_headers[CONTENT_LENGTH_KEY].to_i.between?(20,1_000_000)
165
+ begin
164
166
  @store.store(@cache_key, response_env, expires: @expires) # ||= response_env.body
167
+ rescue => e
168
+ puts "[Parse::Cache] Store Error: #{e}"
169
+ end
165
170
  end # if
166
171
  # do something with the response
167
172
  # response_env[:response_headers].merge!(...)
@@ -169,6 +169,54 @@ module Parse
169
169
  obj
170
170
  end
171
171
 
172
+ # This methods allow you to efficiently iterate over all the records in the collection
173
+ # (lower memory cost) at a minor cost of performance. This method utilizes
174
+ # the `created_at` field of Parse records to order and iterate over all records,
175
+ # therefore you should not use this method if you want to perform a query
176
+ # with constraints against `created_at` field or need specific type of ordering.
177
+ # @param constraints [Hash] a set of query constraints.
178
+ # @yield a block which will iterate through each matching record.
179
+ # @example
180
+ #
181
+ # post = Post.first
182
+ # # iterate over all comments matching conditions
183
+ # Comments.each( post: post) do |comment|
184
+ # # ...
185
+ # end
186
+ # @return [Parse::Object] the last Parse::Object record processed.
187
+ def each(constraints = {}, **opts, &block)
188
+ #anchor_date = opts[:anchor_date] || Parse::Date.now
189
+ batch_size = 250
190
+ start_cursor = first( order: :created_at.asc, keys: :created_at )
191
+ constraints.merge! cache: false, limit: batch_size, order: :created_at.asc
192
+ all_query = query(constraints)
193
+ cursor = start_cursor
194
+ # the exclusion set is a set of ids not to include the next query.
195
+ exclusion_set = []
196
+ loop do
197
+ _q = query(constraints.dup)
198
+ _q.where(:created_at.on_or_after => cursor.created_at)
199
+ # set of ids not to include in the next query. non-performant, but accurate.
200
+ _q.where(:id.nin => exclusion_set) unless exclusion_set.empty?
201
+ results = _q.results # get results
202
+
203
+ break cursor if results.empty? # break if no results
204
+ results.each(&block)
205
+ next_cursor = results.last
206
+ # break if we got less than the maximum requested
207
+ break next_cursor if results.count < batch_size
208
+ # break if the next object is the same as the current object.
209
+ break next_cursor if cursor.id == next_cursor.id
210
+ # The exclusion set is used in the case where multiple records have the exact
211
+ # same created_at date (down to the microsecond). This prevents getting the same
212
+ # record in the next query request.
213
+ exclusion_set = results.select { |r| r.created_at == next_cursor.created_at }.map(&:id)
214
+ results = nil
215
+ cursor = next_cursor
216
+ end
217
+
218
+ end
219
+
172
220
  # Auto save all objects matching the query constraints. This method is
173
221
  # meant to be used with a block. Any objects that are modified in the block
174
222
  # will be batched for a save operation. This uses the `updated_at` field to
@@ -185,7 +233,7 @@ module Parse
185
233
  # @return [Boolean] whether there were any errors.
186
234
  def save_all(constraints = {})
187
235
  force = false
188
-
236
+ batch_size = 250
189
237
  iterator_block = nil
190
238
  if block_given?
191
239
  iterator_block = Proc.new
@@ -202,7 +250,7 @@ module Parse
202
250
  constraints.merge! :updated_at.on_or_before => anchor_date
203
251
  constraints.merge! cache: false
204
252
  # oldest first, so we create a reduction-cycle
205
- constraints.merge! order: :updated_at.asc, limit: 100
253
+ constraints.merge! order: :updated_at.asc, limit: batch_size
206
254
  update_query = query(constraints)
207
255
  #puts "Setting Anchor Date: #{anchor_date}"
208
256
  cursor = nil
@@ -231,6 +279,7 @@ module Parse
231
279
  # cursor_item = results.max_by(&updated_comparison_block).updated_at
232
280
  # puts "[Parse::SaveAll] Updated #{results.count} records updated <= #{cursor.updated_at}"
233
281
 
282
+ break if results.count < batch_size # we didn't hit a cap on results.
234
283
  if cursor.is_a?(Parse::Object)
235
284
  update_query.where :updated_at.gte => cursor.updated_at
236
285
 
data/lib/parse/query.rb CHANGED
@@ -386,16 +386,14 @@ module Parse
386
386
  self #chaining
387
387
  end #order
388
388
 
389
- # Use with limit to paginate through results. Default is 0 with
390
- # maximum value being 10,000.
389
+ # Use with limit to paginate through results. Default is 0.
391
390
  # @example
392
391
  # # get the next 3 songs after the first 10
393
392
  # Song.all :limit => 3, :skip => 10
394
- # @param count [Integer] The number of records to skip.
393
+ # @param amount [Integer] The number of records to skip.
395
394
  # @return [self]
396
- def skip(count)
397
- # min <= count <= max
398
- @skip = [ 0, count.to_i, 10_000].sort[1]
395
+ def skip(amount)
396
+ @skip = [0,amount.to_i].max
399
397
  @results = nil
400
398
  self #chaining
401
399
  end
@@ -403,21 +401,21 @@ module Parse
403
401
  # Limit the number of objects returned by the query. The default is 100, with
404
402
  # Parse allowing a maximum of 1000. The framework also allows a value of
405
403
  # `:max`. Utilizing this will have the framework continually intelligently
406
- # utilize `:skip` to continue to paginate through results until an empty
407
- # result set is received or the `:skip` limit is reached (10,000). When
408
- # utilizing `all()`, `:max` is the default option for `:limit`.
404
+ # utilize `:skip` to continue to paginate through results until no more results
405
+ # match the query criteria. When utilizing `all()`, `:max` is the default
406
+ # option for `:limit`.
409
407
  # @example
410
408
  # Song.all :limit => 1 # same as Song.first
411
- # Song.all :limit => 1000 # maximum allowed by Parse
412
- # Song.all :limit => :max # up to 11,000 records (theoretical).
409
+ # Song.all :limit => 2025 # large limits supported.
410
+ # Song.all :limit => :max # as many records as possible.
413
411
  # @param count [Integer,Symbol] The number of records to return. You may pass :max
414
412
  # to get as many as 11_000 records with the aid if skipping.
415
413
  # @return [self]
416
414
  def limit(count)
417
- if count == :max
418
- @limit = 11_000
419
- elsif count.is_a?(Numeric)
420
- @limit = [ 0, count.to_i, 11_000].sort[1]
415
+ if count.is_a?(Numeric)
416
+ @limit = [ 0, count.to_i ].max
417
+ elsif count == :max
418
+ @limit = :max
421
419
  else
422
420
  @limit = nil
423
421
  end
@@ -643,32 +641,51 @@ module Parse
643
641
  # max_results is used to iterate through as many API requests as possible using
644
642
  # :skip and :limit paramter.
645
643
  # @!visibility private
646
- def max_results(raw: false)
644
+ def max_results(raw: false, on_batch: nil, discard_results: false)
647
645
  compiled_query = compile
648
- query_limit = compiled_query[:limit] ||= 1_000
649
- query_skip = compiled_query[:skip] ||= 0
650
- compiled_query[:limit] = 1_000
651
- iterations = (query_limit/1000.0).ceil
646
+ batch_size = 1_000
652
647
  results = []
648
+ # determine if there is a user provided hard limit
649
+ _limit = (@limit.is_a?(Numeric) && @limit > 0) ? @limit : nil
650
+ compiled_query[:skip] ||= 0
651
+
652
+ loop do
653
+ # always reset the batch size
654
+ compiled_query[:limit] = batch_size
655
+
656
+ # if a hard limit was set by the user, then if the remaining amount
657
+ # is less than the batch size, set the new limit to the remaining amount.
658
+ unless _limit.nil?
659
+ compiled_query[:limit] = _limit if _limit < batch_size
660
+ end
653
661
 
654
- iterations.times do |idx|
655
662
  response = fetch!( compiled_query )
656
663
  break if response.error? || response.results.empty?
657
664
 
658
665
  items = response.results
659
666
  items = decode(items) unless raw
660
-
667
+ # if a block is provided, we do not keep the results after processing.
661
668
  if block_given?
662
669
  items.each(&Proc.new)
663
670
  else
664
- results += items
671
+ # concat results unless discard_results is true
672
+ results += items unless discard_results
665
673
  end
674
+
675
+ on_batch.call(items) if on_batch.present?
666
676
  # if we get less than the maximum set of results, most likely the next
667
677
  # query will return emtpy results - no need to perform it.
668
678
  break if items.count < compiled_query[:limit]
679
+
680
+ # if we have a set limit, then subtract from the total amount the user requested
681
+ # from the total in the current result set. Break if we've reached our limit.
682
+ unless _limit.nil?
683
+ _limit -= items.count
684
+ break if _limit < 1
685
+ end
686
+
669
687
  # add to the skip count for the next iteration
670
- compiled_query[:skip] += 1_000
671
- break if compiled_query[:skip] > 10_000
688
+ compiled_query[:skip] += batch_size
672
689
  end
673
690
  results
674
691
  end
@@ -722,14 +739,14 @@ module Parse
722
739
  # @return [Array<Parse::Object>] if raw is set to false, a list of matching Parse::Object subclasses.
723
740
  def results(raw: false)
724
741
  if @results.nil?
725
- if @limit.nil? || @limit.to_i <= 1_000
742
+ if block_given?
743
+ max_results(raw: raw, &Proc.new)
744
+ elsif @limit.is_a?(Numeric)
726
745
  response = fetch!( compile )
727
746
  return [] if response.error?
728
747
  items = raw ? response.results : decode(response.results)
729
748
  return items.each(&Proc.new) if block_given?
730
749
  @results = items
731
- elsif block_given?
732
- return max_results(raw: raw, &Proc.new)
733
750
  else
734
751
  @results = max_results(raw: raw)
735
752
  end
@@ -780,7 +797,6 @@ module Parse
780
797
  def compile(encode: true, includeClassName: false)
781
798
  run_callbacks :prepare do
782
799
  q = {} #query
783
- q[:limit] = 11_000 if @limit == :max
784
800
  q[:limit] = @limit if @limit.is_a?(Numeric) && @limit > 0
785
801
  q[:skip] = @skip if @skip > 0
786
802
 
@@ -6,6 +6,6 @@ module Parse
6
6
  # The Parse Server SDK for Ruby
7
7
  module Stack
8
8
  # The current version.
9
- VERSION = "1.6.0"
9
+ VERSION = "1.6.1"
10
10
  end
11
11
  end
@@ -93,7 +93,7 @@ module Parse
93
93
  classNames = routes[trigger].keys.dup
94
94
  if include_wildcard && classNames.include?('*') #then create the list for all classes
95
95
  classNames.delete '*' #delete the wildcard before we expand it
96
- classNames = classNames + Parse.registered_classes - ['_Session']
96
+ classNames = classNames + Parse.registered_classes
97
97
  classNames.uniq!
98
98
  end
99
99
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: parse-stack
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.6.0
4
+ version: 1.6.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Anthony Persaud
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2016-11-24 00:00:00.000000000 Z
11
+ date: 2016-12-01 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activemodel
@@ -274,7 +274,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
274
274
  version: '0'
275
275
  requirements: []
276
276
  rubyforge_project:
277
- rubygems_version: 2.6.6
277
+ rubygems_version: 2.5.1
278
278
  signing_key:
279
279
  specification_version: 4
280
280
  summary: Parse-Server Ruby Client and Relational Mapper