clone_kit 0.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +2 -0
- data/.rspec +2 -0
- data/.rubocop.kapost.yml +99 -0
- data/.rubocop.yml +31 -0
- data/.ruby-version +1 -0
- data/Gemfile +6 -0
- data/README.md +79 -0
- data/Rakefile +8 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/clone_kit.gemspec +37 -0
- data/lib/clone_kit.rb +46 -0
- data/lib/clone_kit/cloners/mongoid_merging_ruleset_cloner.rb +106 -0
- data/lib/clone_kit/cloners/mongoid_ruleset_cloner.rb +135 -0
- data/lib/clone_kit/cloners/no_op.rb +11 -0
- data/lib/clone_kit/decorators/embedded_cloner_decorator.rb +31 -0
- data/lib/clone_kit/emitters/empty.rb +15 -0
- data/lib/clone_kit/event_outlet.rb +27 -0
- data/lib/clone_kit/graph.rb +44 -0
- data/lib/clone_kit/merge_attributes_tool.rb +99 -0
- data/lib/clone_kit/operation.rb +79 -0
- data/lib/clone_kit/rule.rb +30 -0
- data/lib/clone_kit/rules/allow_only_mongoid_fields.rb +45 -0
- data/lib/clone_kit/rules/except.rb +21 -0
- data/lib/clone_kit/rules/remap.rb +44 -0
- data/lib/clone_kit/rules/safe_remap.rb +26 -0
- data/lib/clone_kit/shared_id_map.rb +55 -0
- data/lib/clone_kit/specification.rb +52 -0
- data/lib/clone_kit/strategies/synchronous.rb +33 -0
- data/lib/clone_kit/version.rb +5 -0
- metadata +227 -0
@@ -0,0 +1,135 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "clone_kit/rules/allow_only_mongoid_fields"
|
4
|
+
require "clone_kit/decorators/embedded_cloner_decorator"
|
5
|
+
|
6
|
+
module CloneKit
|
7
|
+
module Cloners
|
8
|
+
class MongoidRulesetCloner
|
9
|
+
attr_accessor :rules
|
10
|
+
|
11
|
+
def initialize(model_klass, rules: [])
|
12
|
+
self.model_klass = model_klass
|
13
|
+
self.rules = [
|
14
|
+
CloneKit::Rules::AllowOnlyMongoidFields.new(model_klass)
|
15
|
+
] + rules
|
16
|
+
end
|
17
|
+
|
18
|
+
def clone_ids(ids, operation)
|
19
|
+
initialize_cloner(operation)
|
20
|
+
|
21
|
+
map = {}
|
22
|
+
result = []
|
23
|
+
|
24
|
+
each_existing_record(ids) do |attributes|
|
25
|
+
attributes = clone(attributes)
|
26
|
+
result << apply_rules_and_save(map, attributes)
|
27
|
+
end
|
28
|
+
|
29
|
+
CloneKit::SharedIdMap.new(operation.id).insert_many(model_klass, map)
|
30
|
+
|
31
|
+
result
|
32
|
+
end
|
33
|
+
|
34
|
+
protected
|
35
|
+
|
36
|
+
attr_accessor :model_klass,
|
37
|
+
:current_operation
|
38
|
+
|
39
|
+
def clone(attributes)
|
40
|
+
attributes = attributes.deep_dup
|
41
|
+
clone_all_embedded_fields(attributes)
|
42
|
+
attributes
|
43
|
+
end
|
44
|
+
|
45
|
+
def clone_all_embedded_fields(attributes)
|
46
|
+
model_klass.embedded_relations.each do |name, metadata|
|
47
|
+
attributes[name] = clone_embedded_field(attributes[name], metadata)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
def clone_embedded_field(item, metadata)
|
52
|
+
first_item = if item.is_a?(Array)
|
53
|
+
item = item.compact
|
54
|
+
item[0]
|
55
|
+
else
|
56
|
+
item
|
57
|
+
end
|
58
|
+
|
59
|
+
return nil if first_item.nil?
|
60
|
+
|
61
|
+
cloner = MongoidRulesetCloner.new(polymorphic_class(metadata.class_name, first_item))
|
62
|
+
embedded_cloner = CloneKit::Decorators::EmbeddedClonerDecorator.new(cloner, records: Array.wrap(item))
|
63
|
+
|
64
|
+
embedded_attributes = embedded_cloner.clone_embedded(current_operation)
|
65
|
+
|
66
|
+
if metadata.macro == :embeds_many
|
67
|
+
embedded_attributes
|
68
|
+
else
|
69
|
+
embedded_attributes[0]
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def apply_rules_and_save(mapping, attributes)
|
74
|
+
new_id = BSON::ObjectId.new
|
75
|
+
old_id = attributes["_id"]
|
76
|
+
mapping[attributes["_id"]] = new_id
|
77
|
+
attributes["_id"] = new_id
|
78
|
+
|
79
|
+
rules.each do |rule|
|
80
|
+
begin
|
81
|
+
rule.fix(old_id, attributes)
|
82
|
+
rescue StandardError => e
|
83
|
+
message = "Unhandled error when applying rule #{rule.class.name} to #{model_klass} #{new_id}: #{e.class}"
|
84
|
+
current_operation.error(message)
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
save_or_fail(attributes)
|
89
|
+
attributes
|
90
|
+
end
|
91
|
+
|
92
|
+
def save_or_fail(attributes)
|
93
|
+
document_klass = model_klass
|
94
|
+
document_klass = attributes["_type"].constantize if attributes.key?("_type")
|
95
|
+
|
96
|
+
model_that_we_wont_save = document_klass.new(attributes)
|
97
|
+
|
98
|
+
if model_that_we_wont_save.valid?
|
99
|
+
model_klass.collection.insert(attributes)
|
100
|
+
else
|
101
|
+
details = model_that_we_wont_save.errors.full_messages.to_sentence
|
102
|
+
id = attributes["_id"]
|
103
|
+
current_operation.error("#{model_klass} #{id} failed model validation and was not cloned: #{details}")
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
def each_existing_record(ids)
|
108
|
+
ids.each do |id|
|
109
|
+
record = model_klass.collection.find(_id: id).one
|
110
|
+
next if record.nil?
|
111
|
+
|
112
|
+
yield record
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
def initialize_cloner(operation)
|
117
|
+
@current_operation = operation
|
118
|
+
|
119
|
+
rules.each do |rule|
|
120
|
+
rule.current_operation = @current_operation
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
private
|
125
|
+
|
126
|
+
def polymorphic_class(class_name, item)
|
127
|
+
if item.key?("_type")
|
128
|
+
item["_type"]
|
129
|
+
else
|
130
|
+
class_name
|
131
|
+
end.constantize
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
135
|
+
end
|
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "delegate"
|
4
|
+
|
5
|
+
module CloneKit
|
6
|
+
module Decorators
|
7
|
+
class EmbeddedClonerDecorator < SimpleDelegator
|
8
|
+
attr_reader :records
|
9
|
+
|
10
|
+
def initialize(cloner, records:)
|
11
|
+
@records = records
|
12
|
+
|
13
|
+
cloner.define_singleton_method(:each_existing_record) do |ids, &block|
|
14
|
+
records.compact.select { |r| ids.include?(r["_id"]) }.each do |record|
|
15
|
+
block.call(record)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
cloner.define_singleton_method(:save_or_fail) do |attributes|
|
20
|
+
# NOP
|
21
|
+
end
|
22
|
+
|
23
|
+
super(cloner)
|
24
|
+
end
|
25
|
+
|
26
|
+
def clone_embedded(operation)
|
27
|
+
clone_ids(records.compact.map { |r| r["_id"] }, operation)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module CloneKit
|
4
|
+
class EventOutlet
|
5
|
+
if defined?(Rails)
|
6
|
+
delegate :info, :warn, :error, to: :rails_logger
|
7
|
+
else
|
8
|
+
def info(message)
|
9
|
+
puts message
|
10
|
+
end
|
11
|
+
|
12
|
+
def warn(message)
|
13
|
+
puts message
|
14
|
+
end
|
15
|
+
|
16
|
+
def error(message)
|
17
|
+
puts message
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
def rails_logger
|
24
|
+
Rails.logger
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "tsort"
|
4
|
+
|
5
|
+
module CloneKit
|
6
|
+
class Graph
|
7
|
+
include TSort
|
8
|
+
|
9
|
+
def initialize
|
10
|
+
@vertices = {}
|
11
|
+
end
|
12
|
+
|
13
|
+
def nodes
|
14
|
+
tsort
|
15
|
+
@vertices
|
16
|
+
end
|
17
|
+
|
18
|
+
def include?(vertex)
|
19
|
+
@vertices.key?(vertex)
|
20
|
+
end
|
21
|
+
|
22
|
+
alias topological_sort tsort
|
23
|
+
|
24
|
+
def tsort_each_node(&block)
|
25
|
+
@vertices.each_key(&block)
|
26
|
+
end
|
27
|
+
|
28
|
+
def tsort_each_child(node, &block)
|
29
|
+
@vertices[node].each(&block)
|
30
|
+
end
|
31
|
+
|
32
|
+
def add_vertex(vertex, *neighbors)
|
33
|
+
existing = @vertices[vertex]
|
34
|
+
|
35
|
+
@vertices[vertex.to_s] = if existing.nil?
|
36
|
+
Array(neighbors).uniq
|
37
|
+
else
|
38
|
+
(@vertices[vertex.to_s] + Array(neighbors)).uniq
|
39
|
+
end
|
40
|
+
|
41
|
+
neighbors.each { |n| add_vertex(n) }
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,99 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module CloneKit
|
4
|
+
# Given an array of hashes representing records, this class is able to resolve
|
5
|
+
# values among them using a variety of strategies. The strategies all merge right-to-left,
|
6
|
+
# meaning that the last record is given precidence over the first.
|
7
|
+
#
|
8
|
+
# hashes
|
9
|
+
# assigns to a target hash a list of hash attributes that are (deeply)
|
10
|
+
# from the others
|
11
|
+
#
|
12
|
+
# arrays
|
13
|
+
# assigns to a target hash a list of array attributes that are concatenated
|
14
|
+
# and uniquified from the others
|
15
|
+
#
|
16
|
+
# cluster
|
17
|
+
# assigns to a target hash a list of attributes that are copied from the first
|
18
|
+
# record that returns true using the given block
|
19
|
+
#
|
20
|
+
# last
|
21
|
+
# assigns to a target hash a list of attributes that are copied from the
|
22
|
+
# last record
|
23
|
+
#
|
24
|
+
# any
|
25
|
+
# assigns to a target hash a list of attributes from any other record where
|
26
|
+
# that attribute is not blank.
|
27
|
+
#
|
28
|
+
# max/min
|
29
|
+
# assigns to a target hash the maximum/minimum value from other records for each
|
30
|
+
# from a list of attribute
|
31
|
+
#
|
32
|
+
class MergeAttributesTool
|
33
|
+
def initialize(mergeable)
|
34
|
+
self.mergeable = mergeable
|
35
|
+
end
|
36
|
+
|
37
|
+
def hashes(target, *attributes)
|
38
|
+
attributes.each do |att|
|
39
|
+
result = {}
|
40
|
+
mergeable.each do |m|
|
41
|
+
result = result.deep_merge(m[att])
|
42
|
+
end
|
43
|
+
target[att] = result
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
def arrays(target, *attributes)
|
48
|
+
attributes.each do |att|
|
49
|
+
new_val = mergeable.flat_map { |m| m[att] }.uniq
|
50
|
+
target[att] = new_val
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
def cluster(target, *attributes)
|
55
|
+
mergeable.reverse_each do |m|
|
56
|
+
next unless yield m
|
57
|
+
|
58
|
+
attributes.each do |att|
|
59
|
+
target[att] = m[att]
|
60
|
+
end
|
61
|
+
break
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def last(target, *attributes)
|
66
|
+
attributes.each do |att|
|
67
|
+
target[att] = mergeable[-1][att]
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
def any(target, *attributes)
|
72
|
+
attributes.each do |att|
|
73
|
+
mergeable.reverse_each do |m|
|
74
|
+
val = m[att]
|
75
|
+
unless val.blank?
|
76
|
+
target[att] = val
|
77
|
+
break
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
def max(target, *attributes)
|
84
|
+
attributes.each do |att|
|
85
|
+
target[att] = mergeable.map { |m| m[att] }.compact.max
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
def min(target, *attributes)
|
90
|
+
attributes.each do |att|
|
91
|
+
target[att] = mergeable.map { |m| m[att] }.compact.min
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
private
|
96
|
+
|
97
|
+
attr_accessor :mergeable
|
98
|
+
end
|
99
|
+
end
|
@@ -0,0 +1,79 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "forwardable"
|
4
|
+
require "securerandom"
|
5
|
+
require "clone_kit/event_outlet"
|
6
|
+
require "clone_kit/strategies/synchronous"
|
7
|
+
|
8
|
+
module CloneKit
|
9
|
+
class Operation
|
10
|
+
extend Forwardable
|
11
|
+
|
12
|
+
attr_reader :id,
|
13
|
+
:arguments,
|
14
|
+
:already_cloned
|
15
|
+
|
16
|
+
def initialize(arguments: {},
|
17
|
+
id: SecureRandom.uuid,
|
18
|
+
already_cloned: [],
|
19
|
+
strategy: Strategies::Synchronous,
|
20
|
+
event_outlet: CloneKit::EventOutlet.new)
|
21
|
+
self.id = id
|
22
|
+
self.arguments = arguments
|
23
|
+
self.already_cloned = already_cloned
|
24
|
+
self.event_outlet = event_outlet
|
25
|
+
self.strategy = strategy.new(self)
|
26
|
+
end
|
27
|
+
|
28
|
+
def process
|
29
|
+
if next_batch.empty?
|
30
|
+
# Done!
|
31
|
+
after_process
|
32
|
+
elsif first_unspecified_model_dependency.present?
|
33
|
+
fail "A clone dependency was added for #{first_unspecified_model_dependency}, but it has no clone specification"
|
34
|
+
else
|
35
|
+
specs = next_batch.map { |model| CloneKit.spec[model] }
|
36
|
+
strategy.clone_next_batch(specs, BatchCompleteHandler)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def_delegators :event_outlet, :info, :warn, :error
|
41
|
+
|
42
|
+
private
|
43
|
+
|
44
|
+
attr_accessor :strategy,
|
45
|
+
:event_outlet
|
46
|
+
|
47
|
+
attr_writer :id,
|
48
|
+
:arguments,
|
49
|
+
:already_cloned
|
50
|
+
|
51
|
+
def after_process
|
52
|
+
CloneKit.graph.nodes.each do |model, _|
|
53
|
+
CloneKit.spec[model].after_operation_block.call(self)
|
54
|
+
end
|
55
|
+
|
56
|
+
strategy.all_batches_complete
|
57
|
+
end
|
58
|
+
|
59
|
+
def next_batch
|
60
|
+
@next_batch ||= CloneKit.cloneable_models(already_cloned)
|
61
|
+
end
|
62
|
+
|
63
|
+
def first_unspecified_model_dependency
|
64
|
+
next_batch.detect { |model| CloneKit.spec[model].nil? }
|
65
|
+
end
|
66
|
+
|
67
|
+
class BatchCompleteHandler
|
68
|
+
def complete(success, options)
|
69
|
+
op = Operation.new(options.fetch("operation"))
|
70
|
+
|
71
|
+
if success
|
72
|
+
op.process
|
73
|
+
else
|
74
|
+
op.error(options.fetch("failure_message", "Unknown error"))
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
79
|
+
end
|