esse 0.3.4 → 0.4.0.rc1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e0d63306d9028972cd58998169513b15c761f1ecd6fe67d2d6d04a6eb995cdae
4
- data.tar.gz: f4877f14109d22c40dbec51eef17edc86d60231d488929cc773749deeb8cb6b4
3
+ metadata.gz: f6023e2f4fe6b5d8e45dd09e7c98bb250d8862ea3adc769f119011efc07191c5
4
+ data.tar.gz: 852a46c60435d7c0848254d6b79a4a29ee89efe9e0700975d7d651660ad874d8
5
5
  SHA512:
6
- metadata.gz: ee0b45292911de3db131af146d212e36f94ad8782021c89e20da74813135ebff6c586996bd33e355bb6af0e190db6a3be34eba1aebb4fa51f75abb4adf4cf1a1
7
- data.tar.gz: 70c69e8941c07ec18f3d14164ef211308ea72752bc22e1fa6e20dc0578d6c43aa549e681cca1177b148c6e37b9d2420f21f5a9ba9da4414a70a6814196a3d2e4
6
+ metadata.gz: 40b5c6d0ed7ff8ce793cd8be088c01f9377c62cf58366b201c8ee988f04e9f2f3e53118a48c165857394dc100c26fd06b300d653f2afdead076430d19210c554
7
+ data.tar.gz: f3b4f4acadea569ef1108c1d3d3a2745c965177c29f307e5076357d89aec1f050f778933a350ccb75c0128a295d0f1349899452ea041551fae8ae574fb75793d
@@ -95,6 +95,13 @@ module Esse
95
95
  end
96
96
  print_message(stats.join(', ') + '.')
97
97
  end
98
+
99
+ def elasticsearch_reindex(event)
100
+ print_message '[%<runtime>s] Reindex from %<from>s to %<to>s successfuly completed',
101
+ from: colorize(event[:request].dig(:body, :source, :index), :bold),
102
+ to: colorize(event[:request].dig(:body, :dest, :index), :bold),
103
+ runtime: formatted_runtime(event[:runtime])
104
+ end
98
105
  end
99
106
  end
100
107
  end
@@ -16,14 +16,18 @@ module Esse
16
16
  DESC
17
17
  option :suffix, type: :string, default: nil, aliases: '-s', desc: 'Suffix to append to index name'
18
18
  option :import, type: :boolean, default: true, desc: 'Import documents before point alias to the new index'
19
+ option :reindex, type: :boolean, default: false, desc: 'Use _reindex API to import documents from the old index to the new index'
19
20
  option :optimize, type: :boolean, default: true, desc: 'Optimize index before import documents by disabling refresh_interval and setting number_of_replicas to 0'
21
+ option :settings, type: :hash, default: nil, desc: 'List of settings to pass to the index class. Example: --settings=refresh_interval:1s,number_of_replicas:0'
20
22
  def reset(*index_classes)
21
23
  require_relative 'index/reset'
22
- Reset.new(indices: index_classes, **options.to_h.transform_keys(&:to_sym)).run
24
+ opts = HashUtils.deep_transform_keys(options.to_h, &:to_sym)
25
+ if opts[:import] && opts[:reindex]
26
+ raise ArgumentError, 'You cannot use --import and --reindex together'
27
+ end
28
+ Reset.new(indices: index_classes, **opts).run
23
29
  end
24
30
 
25
- # @TODO Add reindex task to create a new index and import documents from the old index using _reindex API
26
-
27
31
  desc 'create *INDEX_CLASSES', 'Creates indices for the given classes'
28
32
  long_desc <<-DESC
29
33
  Creates index and applies mapping and settings for the given classes.
@@ -33,9 +37,11 @@ module Esse
33
37
  DESC
34
38
  option :suffix, type: :string, default: nil, aliases: '-s', desc: 'Suffix to append to index name'
35
39
  option :alias, type: :boolean, default: false, aliases: '-a', desc: 'Update alias after create index'
40
+ option :settings, type: :hash, default: nil, desc: 'List of settings to pass to the index class. Example: --settings=index.refresh_interval:-1,index.number_of_replicas:0'
36
41
  def create(*index_classes)
37
42
  require_relative 'index/create'
38
- Create.new(indices: index_classes, **options.to_h.transform_keys(&:to_sym)).run
43
+ opts = HashUtils.deep_transform_keys(options.to_h, &:to_sym)
44
+ Create.new(indices: index_classes, **opts).run
39
45
  end
40
46
 
41
47
  desc 'delete *INDEX_CLASSES', 'Deletes indices for the given classes'
@@ -58,9 +64,11 @@ module Esse
58
64
  desc 'update_settings *INDEX_CLASS', 'Closes the index for read/write operations, updates the index settings, and open it again'
59
65
  option :suffix, type: :string, default: nil, aliases: '-s', desc: 'Suffix to append to index name'
60
66
  option :type, type: :string, default: nil, aliases: '-t', desc: 'Document Type to update mapping for'
67
+ option :settings, type: :hash, default: nil, desc: 'List of settings to pass to the index class. Example: --settings=index.refresh_interval:-1,index.number_of_replicas:0'
61
68
  def update_settings(*index_classes)
62
69
  require_relative 'index/update_settings'
63
- UpdateSettings.new(indices: index_classes, **options.to_h.transform_keys(&:to_sym)).run
70
+ opts = HashUtils.deep_transform_keys(options.to_h, &:to_sym)
71
+ UpdateSettings.new(indices: index_classes, **opts).run
64
72
  end
65
73
 
66
74
  desc 'update_mapping *INDEX_CLASS', 'Create or update a mapping'
@@ -89,18 +97,17 @@ module Esse
89
97
  option :suffix, type: :string, default: nil, aliases: '-s', desc: 'Suffix to append to index name'
90
98
  option :context, type: :hash, default: {}, required: true, desc: 'List of options to pass to the index class'
91
99
  option :repo, type: :string, default: nil, alias: '-r', desc: 'Repository to use for import'
92
- option :eager_include_document_attributes, type: :string, default: nil, desc: 'Comma separated list of lazy document attributes to include to the bulk index request'
93
- option :lazy_update_document_attributes, type: :string, default: nil, desc: 'Comma separated list of lazy document attributes to bulk update after the bulk index request'
100
+ option :preload_lazy_attributes, type: :string, default: nil, desc: 'Command separated list of lazy document attributes to preload using search API before the bulk import. Or pass `true` to preload all lazy attributes'
101
+ option :eager_load_lazy_attributes, type: :string, default: nil, desc: 'Comma separated list of lazy document attributes to include to the bulk index request. Or pass `true` to include all lazy attributes'
102
+ option :update_lazy_attributes, type: :string, default: nil, desc: 'Comma separated list of lazy document attributes to bulk update after the bulk index request Or pass `true` to include all lazy attributes'
103
+
94
104
  def import(*index_classes)
95
105
  require_relative 'index/import'
96
106
  opts = HashUtils.deep_transform_keys(options.to_h, &:to_sym)
97
- opts.delete(:lazy_update_document_attributes) if opts[:lazy_update_document_attributes] == 'false'
98
- opts.delete(:eager_include_document_attributes) if opts[:eager_include_document_attributes] == 'false'
99
- if (val = opts[:eager_include_document_attributes])
100
- opts[:eager_include_document_attributes] = (val == 'true') ? true : val.split(',')
101
- end
102
- if (val = opts[:lazy_update_document_attributes])
103
- opts[:lazy_update_document_attributes] = (val == 'true') ? true : val.split(',')
107
+ %i[preload_lazy_attributes eager_load_lazy_attributes update_lazy_attributes].each do |key|
108
+ if (val = opts.delete(key)) && val != 'false'
109
+ opts[key] = (val == 'true') ? true : val.split(',')
110
+ end
104
111
  end
105
112
  Import.new(indices: index_classes, **opts).run
106
113
  end
@@ -13,8 +13,10 @@ Esse.configure do |config|
13
13
 
14
14
  # Global index settings
15
15
  # cluster.settings = {
16
- # number_of_shards: 5,
17
- # number_of_replicas: 0,
16
+ # index: {
17
+ # number_of_shards: 5,
18
+ # number_of_replicas: 0,
19
+ # }
18
20
  # }
19
21
 
20
22
  # Global index mappings
data/lib/esse/config.rb CHANGED
@@ -10,8 +10,11 @@ module Esse
10
10
  # cluster.index_prefix = 'backend'
11
11
  # cluster.client = Elasticsearch::Client.new
12
12
  # cluster.settings = {
13
- # number_of_shards: 2,
14
- # number_of_replicas: 0
13
+ # index: {
14
+ # number_of_shards: 2,
15
+ # number_of_replicas: 1
16
+ # },
17
+ # analysis: { ... }
15
18
  # }
16
19
  # cluster.mappings = {
17
20
  # dynamic_templates: [...]
data/lib/esse/core.rb CHANGED
@@ -6,6 +6,7 @@ module Esse
6
6
  require_relative 'primitives'
7
7
  require_relative 'collection'
8
8
  require_relative 'document'
9
+ require_relative 'document_for_partial_update'
9
10
  require_relative 'document_lazy_attribute'
10
11
  require_relative 'lazy_document_header'
11
12
  require_relative 'hash_document'
@@ -91,4 +92,10 @@ module Esse
91
92
 
92
93
  !!(object.is_a?(Esse::Document) && object.id)
93
94
  end
95
+
96
+ def self.document_match_with_header?(document, id, routing, type)
97
+ id && id.to_s == document.id.to_s &&
98
+ routing == document.routing &&
99
+ (LazyDocumentHeader::ACCEPTABLE_DOC_TYPES.include?(document.type) && LazyDocumentHeader::ACCEPTABLE_DOC_TYPES.include?(type) || document.type == type)
100
+ end
94
101
  end
data/lib/esse/document.rb CHANGED
@@ -2,11 +2,13 @@
2
2
 
3
3
  module Esse
4
4
  class Document
5
+ MUTATIONS_FALLBACK = {}.freeze
6
+
5
7
  attr_reader :object, :options
6
8
 
7
9
  def initialize(object, **options)
8
10
  @object = object
9
- @options = options
11
+ @options = options.freeze
10
12
  end
11
13
 
12
14
  # @return [String, Number] the document ID
@@ -84,11 +86,16 @@ module Esse
84
86
  id.nil?
85
87
  end
86
88
 
87
- def ==(other)
88
- other.is_a?(self.class) && (
89
- id == other.id && type == other.type && routing == other.routing && meta == other.meta && source == other.source
90
- )
89
+ def eql?(other, match_lazy_doc_header: false)
90
+ if match_lazy_doc_header
91
+ other.eql?(self)
92
+ else
93
+ other.is_a?(Esse::Document) && (
94
+ id.to_s == other.id.to_s && type == other.type && routing == other.routing && meta == other.meta
95
+ )
96
+ end
91
97
  end
98
+ alias_method :==, :eql?
92
99
 
93
100
  def doc_header
94
101
  { _id: id }.tap do |h|
@@ -97,10 +104,17 @@ module Esse
97
104
  end
98
105
  end
99
106
 
107
+ def document_for_partial_update(source)
108
+ DocumentForPartialUpdate.new(self, source: source)
109
+ end
110
+
100
111
  def inspect
101
112
  attributes = %i[id routing source].map do |attr|
102
113
  value = send(attr)
103
- "#{attr}: #{value.inspect}" if value
114
+ next unless value
115
+ "#{attr}: #{value.inspect}"
116
+ rescue
117
+ nil
104
118
  end.compact.join(', ')
105
119
  attributes << " mutations: #{@__mutations__.inspect}" if @__mutations__
106
120
  "#<#{self.class.name || 'Esse::Document'} #{attributes}>"
@@ -112,7 +126,9 @@ module Esse
112
126
  instance_variable_set(:@__mutated_source__, nil)
113
127
  end
114
128
 
115
- protected
129
+ def mutations
130
+ @__mutations__ || MUTATIONS_FALLBACK
131
+ end
116
132
 
117
133
  def mutated_source
118
134
  return source unless @__mutations__
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Esse
4
+ class DocumentForPartialUpdate < Esse::Document
5
+ extend Forwardable
6
+
7
+ def_delegators :object, :id, :type, :routing, :options
8
+
9
+ attr_reader :source
10
+
11
+ def initialize(lazy_header, source:)
12
+ @source = source
13
+ super(lazy_header)
14
+ end
15
+ end
16
+ end
data/lib/esse/events.rb CHANGED
@@ -56,5 +56,7 @@ module Esse
56
56
  register_event 'elasticsearch.exist'
57
57
  register_event 'elasticsearch.count'
58
58
  register_event 'elasticsearch.get'
59
+ register_event 'elasticsearch.reindex'
60
+ register_event 'elasticsearch.update_by_query'
59
61
  end
60
62
  end
@@ -1,27 +1,35 @@
1
1
  module Esse
2
2
  module Import
3
3
  class Bulk
4
- def initialize(type: nil, index: nil, delete: nil, create: nil, update: nil)
5
- @index = Array(index).select(&method(:valid_doc?)).reject(&:ignore_on_index?).map do |doc|
4
+ def self.build_from_documents(type: nil, index: nil, delete: nil, create: nil, update: nil)
5
+ index = Array(index).select(&Esse.method(:document?)).reject(&:ignore_on_index?).map do |doc|
6
6
  value = doc.to_bulk
7
7
  value[:_type] ||= type if type
8
- { index: value }
8
+ value
9
9
  end
10
- @create = Array(create).select(&method(:valid_doc?)).reject(&:ignore_on_index?).map do |doc|
10
+ create = Array(create).select(&Esse.method(:document?)).reject(&:ignore_on_index?).map do |doc|
11
11
  value = doc.to_bulk
12
12
  value[:_type] ||= type if type
13
- { create: value }
13
+ value
14
14
  end
15
- @update = Array(update).select(&method(:valid_doc?)).reject(&:ignore_on_index?).map do |doc|
15
+ update = Array(update).select(&Esse.method(:document?)).reject(&:ignore_on_index?).map do |doc|
16
16
  value = doc.to_bulk(operation: :update)
17
17
  value[:_type] ||= type if type
18
- { update: value }
18
+ value
19
19
  end
20
- @delete = Array(delete).select(&method(:valid_doc?)).reject(&:ignore_on_delete?).map do |doc|
20
+ delete = Array(delete).select(&Esse.method(:document?)).reject(&:ignore_on_delete?).map do |doc|
21
21
  value = doc.to_bulk(data: false)
22
22
  value[:_type] ||= type if type
23
- { delete: value }
23
+ value
24
24
  end
25
+ new(index: index, delete: delete, create: create, update: update)
26
+ end
27
+
28
+ def initialize(index: nil, delete: nil, create: nil, update: nil)
29
+ @index = Esse::ArrayUtils.wrap(index).map { |payload| { index: payload } }
30
+ @create = Esse::ArrayUtils.wrap(create).map { |payload| { create: payload } }
31
+ @update = Esse::ArrayUtils.wrap(update).map { |payload| { update: payload } }
32
+ @delete = Esse::ArrayUtils.wrap(delete).map { |payload| { delete: payload } }
25
33
  end
26
34
 
27
35
  # Return an array of RequestBody instances
@@ -68,10 +76,6 @@ module Esse
68
76
 
69
77
  private
70
78
 
71
- def valid_doc?(doc)
72
- Esse.document?(doc)
73
- end
74
-
75
79
  def optimistic_request
76
80
  request = Import::RequestBodyAsJson.new
77
81
  request.create = @create
@@ -114,7 +114,7 @@ module Esse
114
114
  def update(doc = nil, suffix: nil, **options)
115
115
  if document?(doc)
116
116
  options[:id] = doc.id
117
- options[:body] = { doc: doc.source }
117
+ options[:body] = { doc: doc.mutated_source }
118
118
  options[:type] = doc.type if doc.type?
119
119
  options[:routing] = doc.routing if doc.routing?
120
120
  end
@@ -140,7 +140,7 @@ module Esse
140
140
  def index(doc = nil, suffix: nil, **options)
141
141
  if document?(doc)
142
142
  options[:id] = doc.id
143
- options[:body] = doc.source
143
+ options[:body] = doc.mutated_source
144
144
  options[:type] = doc.type if doc.type?
145
145
  options[:routing] = doc.routing if doc.routing?
146
146
  end
@@ -171,13 +171,53 @@ module Esse
171
171
  }.merge(options)
172
172
  cluster.may_update_type!(definition)
173
173
 
174
+ to_index = []
175
+ to_create = []
176
+ to_update = []
177
+ to_delete = []
178
+ Esse::ArrayUtils.wrap(index).each do |doc|
179
+ if doc.is_a?(Hash)
180
+ to_index << doc
181
+ elsif Esse.document?(doc) && !doc.ignore_on_index?
182
+ hash = doc.to_bulk
183
+ hash[:_type] ||= type if type
184
+ to_index << hash
185
+ end
186
+ end
187
+ Esse::ArrayUtils.wrap(create).each do |doc|
188
+ if doc.is_a?(Hash)
189
+ to_create << doc
190
+ elsif Esse.document?(doc) && !doc.ignore_on_index?
191
+ hash = doc.to_bulk
192
+ hash[:_type] ||= type if type
193
+ to_create << hash
194
+ end
195
+ end
196
+ Esse::ArrayUtils.wrap(update).each do |doc|
197
+ if doc.is_a?(Hash)
198
+ to_update << doc
199
+ elsif Esse.document?(doc) && !doc.ignore_on_index?
200
+ hash = doc.to_bulk(operation: :update)
201
+ hash[:_type] ||= type if type
202
+ to_update << hash
203
+ end
204
+ end
205
+ Esse::ArrayUtils.wrap(delete).each do |doc|
206
+ if doc.is_a?(Hash)
207
+ to_delete << doc
208
+ elsif Esse.document?(doc) && !doc.ignore_on_delete?
209
+ hash = doc.to_bulk(data: false)
210
+ hash[:_type] ||= type if type
211
+ to_delete << hash
212
+ end
213
+ end
214
+
174
215
  # @TODO Wrap the return in a some other Stats object with more information
175
216
  Esse::Import::Bulk.new(
176
- **definition.slice(:type),
177
- create: create,
178
- delete: delete,
179
- index: index,
180
- update: update,
217
+ create: to_create,
218
+ delete: to_delete,
219
+ index: to_index,
220
+ update: to_update,
181
221
  ).each_request do |request_body|
182
222
  cluster.api.bulk(**definition, body: request_body.body) do |event_payload|
183
223
  event_payload[:body_stats] = request_body.stats
@@ -198,36 +238,61 @@ module Esse
198
238
  # @option [String, nil] :suffix The index suffix. Defaults to the nil.
199
239
  # @option [Hash] :context The collection context. This value will be passed as argument to the collection
200
240
  # May be SQL condition or any other filter you have defined on the collection.
241
+ # @option [Boolean, Array<String>] :eager_load_lazy_attributes A list of lazy document attributes to include to the bulk index request.
242
+ # Or pass `true` to include all lazy attributes.
243
+ # @option [Boolean, Array<String>] :update_lazy_attributes A list of lazy document attributes to bulk update each after the bulk import.
244
+ # Or pass `true` to update all lazy attributes.
245
+ # @option [Boolean, Array<String>] :preload_lazy_attributes A list of lazy document attributes to preload using search API before the bulk import.
246
+ # Or pass `true` to preload all lazy attributes.
201
247
  # @return [Numeric] The number of documents imported
202
- def import(*repo_types, context: {}, eager_include_document_attributes: false, lazy_update_document_attributes: false, suffix: nil, **options)
248
+ def import(*repo_types, context: {}, eager_load_lazy_attributes: false, update_lazy_attributes: false, preload_lazy_attributes: false, suffix: nil, **options)
203
249
  repo_types = repo_hash.keys if repo_types.empty?
204
250
  count = 0
205
251
 
252
+ if options.key?(:eager_include_document_attributes)
253
+ warn 'The `eager_include_document_attributes` option is deprecated. Use `eager_load_lazy_attributes` instead.'
254
+ eager_load_lazy_attributes = options.delete(:eager_include_document_attributes)
255
+ end
256
+ if options.key?(:lazy_update_document_attributes)
257
+ warn 'The `lazy_update_document_attributes` option is deprecated. Use `update_lazy_attributes` instead.'
258
+ update_lazy_attributes = options.delete(:lazy_update_document_attributes)
259
+ end
260
+
206
261
  repo_hash.slice(*repo_types).each do |repo_name, repo|
207
- doc_attrs = {eager: [], lazy: []}
208
- doc_attrs[:eager] = repo.lazy_document_attribute_names(eager_include_document_attributes)
209
- doc_attrs[:lazy] = repo.lazy_document_attribute_names(lazy_update_document_attributes)
210
- doc_attrs[:lazy] -= doc_attrs[:eager]
262
+ # Elasticsearch 6.x and older have multiple types per index.
263
+ # This gem supports multiple types per index for backward compatibility, but we recommend to update
264
+ # your elasticsearch to a at least 7.x version and use a single type per index.
265
+ #
266
+ # Note that the repository name will be used as the document type.
267
+ # mapping_default_type
268
+ bulk_kwargs = { suffix: suffix, type: repo_name, **options }
269
+ cluster.may_update_type!(bulk_kwargs)
211
270
 
212
271
  context ||= {}
213
- context[:lazy_attributes] = doc_attrs[:eager] if doc_attrs[:eager].any?
272
+ context[:eager_load_lazy_attributes] = eager_load_lazy_attributes
273
+ context[:preload_lazy_attributes] = preload_lazy_attributes
214
274
  repo.each_serialized_batch(**context) do |batch|
215
- # Elasticsearch 6.x and older have multiple types per index.
216
- # This gem supports multiple types per index for backward compatibility, but we recommend to update
217
- # your elasticsearch to a at least 7.x version and use a single type per index.
218
- #
219
- # Note that the repository name will be used as the document type.
220
- # mapping_default_type
221
- kwargs = { suffix: suffix, type: repo_name, **options }
222
- cluster.may_update_type!(kwargs)
223
-
224
- bulk(**kwargs, index: batch)
225
-
226
- doc_attrs[:lazy].each do |attr_name|
227
- partial_docs = repo.documents_for_lazy_attribute(attr_name, batch.reject(&:ignore_on_index?))
228
- next if partial_docs.empty?
229
-
230
- bulk(**kwargs, update: partial_docs)
275
+ bulk(**bulk_kwargs, index: batch)
276
+
277
+ if update_lazy_attributes != false
278
+ attrs = repo.lazy_document_attribute_names(update_lazy_attributes)
279
+ attrs -= repo.lazy_document_attribute_names(eager_load_lazy_attributes)
280
+ update_attrs = attrs.each_with_object(Hash.new { |h, k| h[k] = {} }) do |attr_name, memo|
281
+ filtered_docs = batch.reject do |doc|
282
+ doc.ignore_on_index? || doc.mutations.key?(attr_name)
283
+ end
284
+ next if filtered_docs.empty?
285
+
286
+ repo.retrieve_lazy_attribute_values(attr_name, filtered_docs).each do |doc, value|
287
+ memo[doc.doc_header][attr_name] = value
288
+ end
289
+ end
290
+ if update_attrs.any?
291
+ bulk_update = update_attrs.map do |header, values|
292
+ header.merge(data: {doc: values})
293
+ end
294
+ bulk(**bulk_kwargs, update: bulk_update)
295
+ end
231
296
  end
232
297
 
233
298
  count += batch.size
@@ -236,6 +301,20 @@ module Esse
236
301
  count
237
302
  end
238
303
 
304
+ # Update documents by query
305
+ #
306
+ # @param options [Hash] Hash of paramenters that will be passed along to elasticsearch request
307
+ # @option [String, nil] :suffix The index suffix. Defaults to the nil.
308
+ #
309
+ # @return [Hash] The elasticsearch response hash
310
+ def update_by_query(suffix: nil, **options)
311
+ definition = {
312
+ index: index_name(suffix: suffix),
313
+ }.merge(options)
314
+ cluster.may_update_type!(definition)
315
+ cluster.api.update_by_query(**definition)
316
+ end
317
+
239
318
  protected
240
319
 
241
320
  def document?(doc)
@@ -26,10 +26,10 @@ module Esse
26
26
  #
27
27
  # @see http://www.elasticsearch.org/blog/changing-mapping-with-zero-downtime/
28
28
  # @see Esse::Transport#create_index
29
- def create_index(suffix: nil, body: nil, **options)
29
+ def create_index(suffix: nil, body: nil, settings: nil, **options)
30
30
  options = CREATE_INDEX_RESERVED_KEYWORDS.merge(options)
31
31
  name = build_real_index_name(suffix)
32
- definition = body || [settings_hash, mappings_hash].reduce(&:merge)
32
+ definition = body || [settings_hash(settings: settings), mappings_hash].reduce(&:merge)
33
33
 
34
34
  if options.delete(:alias) && name != index_name
35
35
  definition[:aliases] = { index_name => {} }
@@ -48,34 +48,40 @@ module Esse
48
48
  # @return [Hash] the elasticsearch response
49
49
  #
50
50
  # @see https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-open-close.html
51
- def reset_index(suffix: index_suffix, optimize: true, import: true, reindex: false, **options)
51
+ def reset_index(suffix: index_suffix, settings: nil, optimize: true, import: true, reindex: false, refresh: nil, **options)
52
52
  cluster.throw_error_when_readonly!
53
53
 
54
54
  suffix ||= Esse.timestamp
55
55
  suffix = Esse.timestamp while index_exist?(suffix: suffix)
56
56
 
57
- if optimize
58
- definition = [settings_hash, mappings_hash].reduce(&:merge)
57
+ if optimize && import
58
+ definition = [settings_hash(settings: settings), mappings_hash].reduce(&:merge)
59
59
  number_of_replicas = definition.dig(Esse::SETTING_ROOT_KEY, :index, :number_of_replicas)
60
60
  refresh_interval = definition.dig(Esse::SETTING_ROOT_KEY, :index, :refresh_interval)
61
61
  new_number_of_replicas = ((definition[Esse::SETTING_ROOT_KEY] ||= {})[:index] ||= {})[:number_of_replicas] = 0
62
62
  new_refresh_interval = ((definition[Esse::SETTING_ROOT_KEY] ||= {})[:index] ||= {})[:refresh_interval] = '-1'
63
63
  create_index(**options, suffix: suffix, alias: false, body: definition)
64
64
  else
65
- create_index(**options, suffix: suffix, alias: false)
65
+ create_index(**options, suffix: suffix, alias: false, settings: settings)
66
66
  end
67
67
 
68
68
  if index_exist? && aliases.none?
69
69
  cluster.api.delete_index(index: index_name)
70
70
  end
71
71
  if import
72
- import(**options, suffix: suffix)
73
- elsif reindex && (_from = indices_pointing_to_alias).any?
74
- # @TODO: Reindex using the reindex API
72
+ import_kwargs = import.is_a?(Hash) ? import : {}
73
+ import_kwargs[:refresh] ||= refresh if refresh
74
+ import(**options, **import_kwargs, suffix: suffix)
75
+ elsif reindex && (source_indexes = indices_pointing_to_alias).any?
76
+ reindex_kwargs = reindex.is_a?(Hash) ? reindex : {}
77
+ reindex_kwargs[:wait_for_completion] = true unless reindex_kwargs.key?(:wait_for_completion)
78
+ source_indexes.each do |from|
79
+ cluster.api.reindex(**options, body: { source: { index: from }, dest: { index: index_name(suffix: suffix) } }, refresh: refresh)
80
+ end
75
81
  end
76
82
 
77
- if optimize && number_of_replicas != new_number_of_replicas || refresh_interval != new_refresh_interval
78
- update_settings(suffix: suffix)
83
+ if optimize && import && number_of_replicas != new_number_of_replicas || refresh_interval != new_refresh_interval
84
+ update_settings(suffix: suffix, settings: settings)
79
85
  refresh(suffix: suffix)
80
86
  end
81
87
 
@@ -152,16 +158,17 @@ module Esse
152
158
  #
153
159
  # @param :suffix [String, nil] :suffix The index suffix
154
160
  # @see Esse::Transport#update_settings
155
- def update_settings(suffix: nil, **options)
161
+ def update_settings(suffix: nil, settings: nil, **options)
156
162
  response = nil
157
163
 
158
- settings = HashUtils.deep_transform_keys(settings_hash.fetch(Esse::SETTING_ROOT_KEY), &:to_s)
164
+ settings = HashUtils.deep_transform_keys(settings_hash(settings: settings).fetch(Esse::SETTING_ROOT_KEY), &:to_sym)
159
165
  if options[:body]
160
- settings = settings.merge(HashUtils.deep_transform_keys(options.delete(:body), &:to_s))
166
+ body = HashUtils.deep_transform_keys(options.delete(:body), &:to_sym)
167
+ settings = HashUtils.deep_merge(settings, body)
161
168
  end
162
- settings.delete('number_of_shards') # Can't change number of shards for an index
163
- settings['index']&.delete('number_of_shards')
164
- analysis = settings.delete('analysis')
169
+ settings.delete(:number_of_shards) # Can't change number of shards for an index
170
+ settings[:index]&.delete(:number_of_shards)
171
+ analysis = settings.delete(:analysis)
165
172
 
166
173
  if settings.any?
167
174
  response = cluster.api.update_settings(index: index_name(suffix: suffix), body: settings, **options)
@@ -24,7 +24,7 @@ module Esse
24
24
  # @return [Enumerator] All serialized entries
25
25
  def documents(repo_name = nil, **kwargs)
26
26
  Enumerator.new do |yielder|
27
- each_serialized_batch(repo_name, **kwargs) do |documents, **_collection_kargs|
27
+ each_serialized_batch(repo_name, **kwargs) do |documents|
28
28
  documents.each { |document| yielder.yield(document) }
29
29
  end
30
30
  end
@@ -4,9 +4,28 @@ module Esse
4
4
  # https://github.com/elastic/elasticsearch-ruby/blob/master/elasticsearch-api/lib/elasticsearch/api/actions/indices/put_settings.rb
5
5
  class Index
6
6
  module ClassMethods
7
- def settings_hash
7
+ # Elasticsearch supports passing index.* related settings directly in the body of the request.
8
+ # We are moving it to the index key to make it more explicit and to be the source-of-truth when merging settings.
9
+ # So the settings `{ number_of_shards: 1 }` will be transformed to `{ index: { number_of_shards: 1 } }`
10
+ INDEX_SIMPLIFIED_SETTINGS = %i[
11
+ number_of_shards
12
+ number_of_replicas
13
+ refresh_interval
14
+ ].freeze
15
+
16
+ def settings_hash(settings: nil)
8
17
  hash = setting.body
9
- { Esse::SETTING_ROOT_KEY => (hash.key?(Esse::SETTING_ROOT_KEY) ? hash[Esse::SETTING_ROOT_KEY] : hash) }
18
+ values = (hash.key?(Esse::SETTING_ROOT_KEY) ? hash[Esse::SETTING_ROOT_KEY] : hash)
19
+ values = HashUtils.explode_keys(values)
20
+ if settings.is_a?(Hash)
21
+ values = HashUtils.deep_merge(values, HashUtils.explode_keys(settings))
22
+ end
23
+ INDEX_SIMPLIFIED_SETTINGS.each do |key|
24
+ next unless values.key?(key)
25
+
26
+ (values[:index] ||= {}).merge!(key => values.delete(key))
27
+ end
28
+ { Esse::SETTING_ROOT_KEY => values }
10
29
  end
11
30
 
12
31
  # Define /_settings definition by each index.
@@ -18,7 +37,7 @@ module Esse
18
37
  #
19
38
  # class UserIndex < Esse::Index
20
39
  # settings {
21
- # number_of_replicas: 4,
40
+ # index: { number_of_replicas: 4 }
22
41
  # }
23
42
  # end
24
43
  #
@@ -2,11 +2,19 @@
2
2
 
3
3
  module Esse
4
4
  class LazyDocumentHeader
5
+ ACCEPTABLE_CLASSES = [Esse::LazyDocumentHeader, Esse::Document].freeze
6
+ ACCEPTABLE_DOC_TYPES = [nil, '_doc', 'doc'].freeze
7
+
5
8
  def self.coerce_each(values)
9
+ values = Esse::ArrayUtils.wrap(values)
10
+ return values if values.all? do |value|
11
+ ACCEPTABLE_CLASSES.any? { |klass| value.is_a?(klass) }
12
+ end
13
+
6
14
  arr = []
7
- Esse::ArrayUtils.wrap(values).map do |value|
15
+ values.flatten.map do |value|
8
16
  instance = coerce(value)
9
- arr << instance if instance&.valid?
17
+ arr << instance if instance && !instance.id.nil?
10
18
  end
11
19
  arr
12
20
  end
@@ -17,57 +25,61 @@ module Esse
17
25
  if value.is_a?(Esse::LazyDocumentHeader)
18
26
  value
19
27
  elsif value.is_a?(Esse::Document)
20
- new(value.doc_header)
28
+ value
21
29
  elsif value.is_a?(Hash)
22
30
  resp = value.transform_keys do |key|
23
31
  case key
24
32
  when :_id, :id, '_id', 'id'
25
- :_id
33
+ :id
26
34
  when :_routing, :routing, '_routing', 'routing'
27
35
  :routing
28
36
  when :_type, :type, '_type', 'type'
29
- :_type
37
+ :type
30
38
  else
31
39
  key.to_sym
32
40
  end
33
41
  end
34
- new(resp)
42
+ resp[:id] ||= nil
43
+ new(**resp)
35
44
  elsif String === value || Integer === value
36
- new(_id: value)
45
+ new(id: value)
37
46
  end
38
47
  end
39
48
 
40
- def initialize(attributes)
41
- @attributes = attributes
42
- end
49
+ attr_reader :id, :type, :routing, :options
43
50
 
44
- def valid?
45
- !@attributes[:_id].nil?
51
+ def initialize(id:, type: nil, routing: nil, **extra_attributes)
52
+ @id = id
53
+ @type = type
54
+ @routing = routing
55
+ @options = extra_attributes.freeze
46
56
  end
47
57
 
48
58
  def to_h
49
- @attributes
50
- end
51
-
52
- def id
53
- @attributes.fetch(:_id)
54
- end
55
-
56
- def type
57
- @attributes[:_type]
59
+ options.merge(_id: id).tap do |hash|
60
+ hash[:_type] = type if type
61
+ hash[:routing] = routing if routing
62
+ end
58
63
  end
59
64
 
60
- def routing
61
- @attributes[:routing]
65
+ def document_for_partial_update(source)
66
+ Esse::DocumentForPartialUpdate.new(self, source: source)
62
67
  end
63
68
 
64
- def to_doc(source = {})
65
- HashDocument.new(source.merge(@attributes))
69
+ def doc_header
70
+ { _id: id }.tap do |hash|
71
+ hash[:_type] = type if type
72
+ hash[:routing] = routing if routing
73
+ end
66
74
  end
67
75
 
68
- def eql?(other)
69
- self.class == other.class && @attributes == other.instance_variable_get(:@attributes)
76
+ def eql?(other, **)
77
+ ACCEPTABLE_CLASSES.any? { |klass| other.is_a?(klass) } &&
78
+ id.to_s == other.id.to_s &&
79
+ routing == other.routing &&
80
+ ((ACCEPTABLE_DOC_TYPES.include?(type) && ACCEPTABLE_DOC_TYPES.include?(other.type)) || type == other.type)
70
81
  end
71
82
  alias_method :==, :eql?
72
83
  end
73
84
  end
85
+
@@ -47,5 +47,15 @@ module Esse
47
47
  end
48
48
  end
49
49
  end
50
+
51
+ def explode_keys(hash, separator = '.')
52
+ hash.each_with_object({}) do |(key, value), result|
53
+ is_symbol = key.is_a?(Symbol)
54
+ keys = key.to_s.split(separator)
55
+ last_key = keys.pop
56
+ current = keys.reduce(result) { |memo, k| memo[is_symbol ? k.to_sym : k] ||= {} }
57
+ current[is_symbol ? last_key.to_sym : last_key] = value
58
+ end
59
+ end
50
60
  end
51
61
  end
@@ -16,7 +16,7 @@ module Esse
16
16
 
17
17
  def documents_for_lazy_attribute(name, ids_or_doc_headers)
18
18
  retrieve_lazy_attribute_values(name, ids_or_doc_headers).map do |doc_header, datum|
19
- doc_header.to_doc(name => datum)
19
+ doc_header.document_for_partial_update(name => datum)
20
20
  end
21
21
  end
22
22
 
@@ -36,11 +36,10 @@ module Esse
36
36
  return [] unless result.is_a?(Hash)
37
37
 
38
38
  result.each_with_object({}) do |(key, value), memo|
39
- if key.is_a?(LazyDocumentHeader) && (doc = docs.find { |d| d == key || d.id == key.id })
40
- memo[doc] = value
41
- elsif (doc = docs.find { |d| d.id == key })
42
- memo[doc] = value
43
- end
39
+ val = docs.find { |doc| doc.eql?(key, match_lazy_doc_header: true) || doc.id == key }
40
+ next unless val
41
+
42
+ memo[val] = value
44
43
  end
45
44
  end
46
45
  end
@@ -69,27 +69,65 @@ module Esse
69
69
  @collection_proc = collection_klass || block
70
70
  end
71
71
 
72
+ # Expose the collection class to let external plugins and extensions to access it.
73
+ # @return [Class, nil] The collection class
74
+ # IDEA: When collection is defined as a block, it should setup a class with the block content.
75
+ def collection_class
76
+ return unless @collection_proc.is_a?(Class)
77
+
78
+ @collection_proc
79
+ end
80
+
72
81
  # Wrap collection data into serialized batches
73
82
  #
74
83
  # @param [Hash] kwargs The context
75
84
  # @return [Enumerator] The enumerator
76
85
  # @yield [Array, **context] serialized collection and the optional context from the collection
77
- def each_serialized_batch(lazy_attributes: false, **kwargs)
86
+ def each_serialized_batch(eager_load_lazy_attributes: false, preload_lazy_attributes: false, **kwargs)
87
+ if kwargs.key?(:lazy_attributes)
88
+ warn 'The `lazy_attributes` option is deprecated. Use `eager_load_lazy_attributes` instead.'
89
+ eager_load_lazy_attributes = kwargs.delete(:lazy_attributes)
90
+ end
91
+
92
+ lazy_attrs_to_eager_load = lazy_document_attribute_names(eager_load_lazy_attributes)
93
+ lazy_attrs_to_search_preload = lazy_document_attribute_names(preload_lazy_attributes)
94
+ lazy_attrs_to_search_preload -= lazy_attrs_to_eager_load
95
+
78
96
  each_batch(**kwargs) do |*args|
79
97
  batch, collection_context = args
80
98
  collection_context ||= {}
81
99
  entries = [*batch].map { |entry| serialize(entry, **collection_context) }.compact
82
- if lazy_attributes
83
- attrs = lazy_attributes.is_a?(Array) ? lazy_attributes : lazy_document_attribute_names(lazy_attributes)
84
- attrs.each do |attr_name|
85
- retrieve_lazy_attribute_values(attr_name, entries).each do |doc_header, value|
86
- doc = entries.find { |d| doc_header.id.to_s == d.id.to_s && doc_header.type == d.type && doc_header.routing == d.routing }
87
- doc&.mutate(attr_name) { value }
100
+ lazy_attrs_to_eager_load.each do |attr_name|
101
+ retrieve_lazy_attribute_values(attr_name, entries).each do |doc_header, value|
102
+ doc = entries.find { |d| d.eql?(doc_header, match_lazy_doc_header: true) }
103
+ doc&.mutate(attr_name) { value }
104
+ end
105
+ end
106
+
107
+ if lazy_attrs_to_search_preload.any?
108
+ entries.group_by(&:routing).each do |routing, docs|
109
+ search_request = {
110
+ query: { ids: { values: docs.map(&:id) } },
111
+ size: docs.size,
112
+ _source: lazy_attrs_to_search_preload
113
+ }
114
+ search_request[:routing] = routing if routing
115
+ index.search(**search_request).response.hits.each do |hit|
116
+ header = [hit['_id'], hit['_routing'], hit['_type']]
117
+ next if header[0].nil?
118
+
119
+ hit.dig('_source')&.each do |attr_name, attr_value|
120
+ real_attr_name = lazy_document_attribute_names(attr_name).first
121
+ next if real_attr_name.nil?
122
+
123
+ doc = entries.find { |d| Esse.document_match_with_header?(d, *header) }
124
+ doc&.mutate(real_attr_name) { attr_value }
125
+ end
88
126
  end
89
127
  end
90
128
  end
91
129
 
92
- yield entries, **kwargs
130
+ yield entries
93
131
  end
94
132
  end
95
133
 
@@ -101,7 +139,7 @@ module Esse
101
139
  # @return [Enumerator] All serialized entries
102
140
  def documents(**kwargs)
103
141
  Enumerator.new do |yielder|
104
- each_serialized_batch(**kwargs) do |docs, **_collection_kargs|
142
+ each_serialized_batch(**kwargs) do |docs|
105
143
  docs.each { |document| yielder.yield(document) }
106
144
  end
107
145
  end
@@ -185,6 +185,83 @@ module Esse
185
185
  payload[:response] = coerce_exception { client.indices.put_settings(**opts) }
186
186
  end
187
187
  end
188
+
189
+ # Allows to copy documents from one index to another, optionally filtering the source
190
+ # documents by a query, changing the destination index settings, or fetching the
191
+ # documents from a remote cluster.
192
+ #
193
+ # @option arguments [Boolean] :refresh Should the affected indexes be refreshed?
194
+ # @option arguments [Time] :timeout Time each individual bulk request should wait for shards that are unavailable.
195
+ # @option arguments [String] :wait_for_active_shards Sets the number of shard copies that must be active before proceeding with the reindex operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)
196
+ # @option arguments [Boolean] :wait_for_completion Should the request should block until the reindex is complete.
197
+ # @option arguments [Number] :requests_per_second The throttle to set on this request in sub-requests per second. -1 means no throttle.
198
+ # @option arguments [Time] :scroll Control how long to keep the search context alive
199
+ # @option arguments [Number|string] :slices The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`.
200
+ # @option arguments [Number] :max_docs Maximum number of documents to process (default: all documents)
201
+ # @option arguments [Hash] :headers Custom HTTP headers
202
+ # @option arguments [Hash] :body The search definition using the Query DSL and the prototype for the index request. (*Required*)
203
+ #
204
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html
205
+ def reindex(body:, **options)
206
+ throw_error_when_readonly!
207
+
208
+ Esse::Events.instrument('elasticsearch.reindex') do |payload|
209
+ payload[:request] = opts = options.merge(body: body)
210
+ payload[:response] = coerce_exception { client.reindex(**opts) }
211
+ end
212
+ end
213
+
214
+ # Performs an update on every document in the index without changing the source,
215
+ # for example to pick up a mapping change.
216
+ #
217
+ # @option arguments [List] :index A comma-separated list of index names to search; use `_all` or empty string to perform the operation on all indices (*Required*)
218
+ # @option arguments [String] :analyzer The analyzer to use for the query string
219
+ # @option arguments [Boolean] :analyze_wildcard Specify whether wildcard and prefix queries should be analyzed (default: false)
220
+ # @option arguments [String] :default_operator The default operator for query string query (AND or OR) (options: AND, OR)
221
+ # @option arguments [String] :df The field to use as default where no field prefix is given in the query string
222
+ # @option arguments [Number] :from Starting offset (default: 0)
223
+ # @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when unavailable (missing or closed)
224
+ # @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
225
+ # @option arguments [String] :conflicts What to do when the update by query hits version conflicts? (options: abort, proceed)
226
+ # @option arguments [String] :expand_wildcards Whether to expand wildcard expression to concrete indices that are open, closed or both. (options: open, closed, hidden, none, all)
227
+ # @option arguments [Boolean] :lenient Specify whether format-based query failures (such as providing text to a numeric field) should be ignored
228
+ # @option arguments [String] :pipeline Ingest pipeline to set on index requests made by this action. (default: none)
229
+ # @option arguments [String] :preference Specify the node or shard the operation should be performed on (default: random)
230
+ # @option arguments [String] :q Query in the Lucene query string syntax
231
+ # @option arguments [List] :routing A comma-separated list of specific routing values
232
+ # @option arguments [Time] :scroll Specify how long a consistent view of the index should be maintained for scrolled search
233
+ # @option arguments [String] :search_type Search operation type (options: query_then_fetch, dfs_query_then_fetch)
234
+ # @option arguments [Time] :search_timeout Explicit timeout for each search request. Defaults to no timeout.
235
+ # @option arguments [Number] :size Deprecated, please use `max_docs` instead
236
+ # @option arguments [Number] :max_docs Maximum number of documents to process (default: all documents)
237
+ # @option arguments [List] :sort A comma-separated list of <field>:<direction> pairs
238
+ # @option arguments [List] :_source True or false to return the _source field or not, or a list of fields to return
239
+ # @option arguments [List] :_source_excludes A list of fields to exclude from the returned _source field
240
+ # @option arguments [List] :_source_includes A list of fields to extract and return from the _source field
241
+ # @option arguments [Number] :terminate_after The maximum number of documents to collect for each shard, upon reaching which the query execution will terminate early.
242
+ # @option arguments [List] :stats Specific 'tag' of the request for logging and statistical purposes
243
+ # @option arguments [Boolean] :version Specify whether to return document version as part of a hit
244
+ # @option arguments [Boolean] :version_type Should the document increment the version number (internal) on hit or not (reindex)
245
+ # @option arguments [Boolean] :request_cache Specify if request cache should be used for this request or not, defaults to index level setting
246
+ # @option arguments [Boolean] :refresh Should the affected indexes be refreshed?
247
+ # @option arguments [Time] :timeout Time each individual bulk request should wait for shards that are unavailable.
248
+ # @option arguments [String] :wait_for_active_shards Sets the number of shard copies that must be active before proceeding with the update by query operation. Defaults to 1, meaning the primary shard only. Set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1)
249
+ # @option arguments [Number] :scroll_size Size on the scroll request powering the update by query
250
+ # @option arguments [Boolean] :wait_for_completion Should the request should block until the update by query operation is complete.
251
+ # @option arguments [Number] :requests_per_second The throttle to set on this request in sub-requests per second. -1 means no throttle.
252
+ # @option arguments [Number|string] :slices The number of slices this task should be divided into. Defaults to 1, meaning the task isn't sliced into subtasks. Can be set to `auto`.
253
+ # @option arguments [Hash] :headers Custom HTTP headers
254
+ # @option arguments [Hash] :body The search definition using the Query DSL
255
+ #
256
+ # @see https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html
257
+ def update_by_query(index:, **options)
258
+ throw_error_when_readonly!
259
+
260
+ Esse::Events.instrument('elasticsearch.update_by_query') do |payload|
261
+ payload[:request] = opts = options.merge(index: index)
262
+ payload[:response] = coerce_exception { client.update_by_query(**opts) }
263
+ end
264
+ end
188
265
  end
189
266
 
190
267
  include InstanceMethods
data/lib/esse/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Esse
4
- VERSION = '0.3.4'
4
+ VERSION = '0.4.0.rc1'
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: esse
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.4
4
+ version: 0.4.0.rc1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Marcos G. Zimmermann
8
8
  autorequire:
9
9
  bindir: exec
10
10
  cert_chain: []
11
- date: 2024-07-18 00:00:00.000000000 Z
11
+ date: 2024-08-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: multi_json
@@ -248,6 +248,7 @@ files:
248
248
  - lib/esse/deprecations/repository_backend_delegator.rb
249
249
  - lib/esse/deprecations/serializer.rb
250
250
  - lib/esse/document.rb
251
+ - lib/esse/document_for_partial_update.rb
251
252
  - lib/esse/document_lazy_attribute.rb
252
253
  - lib/esse/dynamic_template.rb
253
254
  - lib/esse/errors.rb
@@ -318,9 +319,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
318
319
  version: 2.3.0
319
320
  required_rubygems_version: !ruby/object:Gem::Requirement
320
321
  requirements:
321
- - - ">="
322
+ - - ">"
322
323
  - !ruby/object:Gem::Version
323
- version: '0'
324
+ version: 1.3.1
324
325
  requirements: []
325
326
  rubygems_version: 3.0.3.1
326
327
  signing_key: