elasticsearch-index-transfer 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +10 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Gemfile +6 -0
- data/Gemfile.lock +65 -0
- data/LICENSE.txt +21 -0
- data/README.md +118 -0
- data/Rakefile +2 -0
- data/bin/console +14 -0
- data/bin/setup +8 -0
- data/config/secrets.yml.sample +11 -0
- data/elasticsearch-index-transfer.gemspec +38 -0
- data/lib/elasticsearch-index-transfer.rb +1 -0
- data/lib/elasticsearch/index/transfer.rb +21 -0
- data/lib/elasticsearch/index/transfer/elasticsearch.rb +91 -0
- data/lib/elasticsearch/index/transfer/s3.rb +72 -0
- data/lib/elasticsearch/index/transfer/version.rb +7 -0
- metadata +132 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 2efac1cf0f35abe6a31256262233fd700ee9254f
|
4
|
+
data.tar.gz: 94cf8d1b1d7af11cc3eaf9eb73994dbf58cad580
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 6e11ebc89303c538c6ba4d4539a97031295cec89a895f5e1bc59caedaef27c5bc8deeffce5fcd6bf25c835e83ae119925353dd17bb8604e41040c915a45bf6d5
|
7
|
+
data.tar.gz: c8f2fcb0e8b2c0d2b55de27e82898b3c8da4419ee59aedbe1c474678efab682beade763d7f523828464b61548865bcec79486e0fd02f0ceacb28829d88f936fc
|
data/.gitignore
ADDED
data/CODE_OF_CONDUCT.md
ADDED
@@ -0,0 +1,74 @@
|
|
1
|
+
# Contributor Covenant Code of Conduct
|
2
|
+
|
3
|
+
## Our Pledge
|
4
|
+
|
5
|
+
In the interest of fostering an open and welcoming environment, we as
|
6
|
+
contributors and maintainers pledge to making participation in our project and
|
7
|
+
our community a harassment-free experience for everyone, regardless of age, body
|
8
|
+
size, disability, ethnicity, gender identity and expression, level of experience,
|
9
|
+
nationality, personal appearance, race, religion, or sexual identity and
|
10
|
+
orientation.
|
11
|
+
|
12
|
+
## Our Standards
|
13
|
+
|
14
|
+
Examples of behavior that contributes to creating a positive environment
|
15
|
+
include:
|
16
|
+
|
17
|
+
* Using welcoming and inclusive language
|
18
|
+
* Being respectful of differing viewpoints and experiences
|
19
|
+
* Gracefully accepting constructive criticism
|
20
|
+
* Focusing on what is best for the community
|
21
|
+
* Showing empathy towards other community members
|
22
|
+
|
23
|
+
Examples of unacceptable behavior by participants include:
|
24
|
+
|
25
|
+
* The use of sexualized language or imagery and unwelcome sexual attention or
|
26
|
+
advances
|
27
|
+
* Trolling, insulting/derogatory comments, and personal or political attacks
|
28
|
+
* Public or private harassment
|
29
|
+
* Publishing others' private information, such as a physical or electronic
|
30
|
+
address, without explicit permission
|
31
|
+
* Other conduct which could reasonably be considered inappropriate in a
|
32
|
+
professional setting
|
33
|
+
|
34
|
+
## Our Responsibilities
|
35
|
+
|
36
|
+
Project maintainers are responsible for clarifying the standards of acceptable
|
37
|
+
behavior and are expected to take appropriate and fair corrective action in
|
38
|
+
response to any instances of unacceptable behavior.
|
39
|
+
|
40
|
+
Project maintainers have the right and responsibility to remove, edit, or
|
41
|
+
reject comments, commits, code, wiki edits, issues, and other contributions
|
42
|
+
that are not aligned to this Code of Conduct, or to ban temporarily or
|
43
|
+
permanently any contributor for other behaviors that they deem inappropriate,
|
44
|
+
threatening, offensive, or harmful.
|
45
|
+
|
46
|
+
## Scope
|
47
|
+
|
48
|
+
This Code of Conduct applies both within project spaces and in public spaces
|
49
|
+
when an individual is representing the project or its community. Examples of
|
50
|
+
representing a project or community include using an official project e-mail
|
51
|
+
address, posting via an official social media account, or acting as an appointed
|
52
|
+
representative at an online or offline event. Representation of a project may be
|
53
|
+
further defined and clarified by project maintainers.
|
54
|
+
|
55
|
+
## Enforcement
|
56
|
+
|
57
|
+
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
58
|
+
reported by contacting the project team at imranjannatiitkgp@gmail.com. All
|
59
|
+
complaints will be reviewed and investigated and will result in a response that
|
60
|
+
is deemed necessary and appropriate to the circumstances. The project team is
|
61
|
+
obligated to maintain confidentiality with regard to the reporter of an incident.
|
62
|
+
Further details of specific enforcement policies may be posted separately.
|
63
|
+
|
64
|
+
Project maintainers who do not follow or enforce the Code of Conduct in good
|
65
|
+
faith may face temporary or permanent repercussions as determined by other
|
66
|
+
members of the project's leadership.
|
67
|
+
|
68
|
+
## Attribution
|
69
|
+
|
70
|
+
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
71
|
+
available at [http://contributor-covenant.org/version/1/4][version]
|
72
|
+
|
73
|
+
[homepage]: http://contributor-covenant.org
|
74
|
+
[version]: http://contributor-covenant.org/version/1/4/
|
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
@@ -0,0 +1,65 @@
|
|
1
|
+
PATH
|
2
|
+
remote: .
|
3
|
+
specs:
|
4
|
+
elasticsearch-index-transfer (0.1.0)
|
5
|
+
|
6
|
+
GEM
|
7
|
+
remote: https://rubygems.org/
|
8
|
+
specs:
|
9
|
+
aws-eventstream (1.0.1)
|
10
|
+
aws-partitions (1.95.0)
|
11
|
+
aws-sdk-core (3.22.1)
|
12
|
+
aws-eventstream (~> 1.0)
|
13
|
+
aws-partitions (~> 1.0)
|
14
|
+
aws-sigv4 (~> 1.0)
|
15
|
+
jmespath (~> 1.0)
|
16
|
+
aws-sdk-kms (1.6.0)
|
17
|
+
aws-sdk-core (~> 3)
|
18
|
+
aws-sigv4 (~> 1.0)
|
19
|
+
aws-sdk-s3 (1.17.0)
|
20
|
+
aws-sdk-core (~> 3, >= 3.21.2)
|
21
|
+
aws-sdk-kms (~> 1)
|
22
|
+
aws-sigv4 (~> 1.0)
|
23
|
+
aws-sigv4 (1.0.3)
|
24
|
+
diff-lcs (1.3)
|
25
|
+
elasticsearch (6.1.0)
|
26
|
+
elasticsearch-api (= 6.1.0)
|
27
|
+
elasticsearch-transport (= 6.1.0)
|
28
|
+
elasticsearch-api (6.1.0)
|
29
|
+
multi_json
|
30
|
+
elasticsearch-transport (6.1.0)
|
31
|
+
faraday
|
32
|
+
multi_json
|
33
|
+
faraday (0.15.2)
|
34
|
+
multipart-post (>= 1.2, < 3)
|
35
|
+
jmespath (1.4.0)
|
36
|
+
multi_json (1.13.1)
|
37
|
+
multipart-post (2.0.0)
|
38
|
+
rake (10.5.0)
|
39
|
+
rspec (3.7.0)
|
40
|
+
rspec-core (~> 3.7.0)
|
41
|
+
rspec-expectations (~> 3.7.0)
|
42
|
+
rspec-mocks (~> 3.7.0)
|
43
|
+
rspec-core (3.7.1)
|
44
|
+
rspec-support (~> 3.7.0)
|
45
|
+
rspec-expectations (3.7.0)
|
46
|
+
diff-lcs (>= 1.2.0, < 2.0)
|
47
|
+
rspec-support (~> 3.7.0)
|
48
|
+
rspec-mocks (3.7.0)
|
49
|
+
diff-lcs (>= 1.2.0, < 2.0)
|
50
|
+
rspec-support (~> 3.7.0)
|
51
|
+
rspec-support (3.7.1)
|
52
|
+
|
53
|
+
PLATFORMS
|
54
|
+
ruby
|
55
|
+
|
56
|
+
DEPENDENCIES
|
57
|
+
aws-sdk-s3
|
58
|
+
bundler (~> 1.16)
|
59
|
+
elasticsearch
|
60
|
+
elasticsearch-index-transfer!
|
61
|
+
rake (~> 10.0)
|
62
|
+
rspec (~> 3.2)
|
63
|
+
|
64
|
+
BUNDLED WITH
|
65
|
+
1.16.1
|
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2018 Imran
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,118 @@
|
|
1
|
+
# Elasticsearch::Index::Transfer
|
2
|
+
|
3
|
+
Ruby gem for transfering elasticsearch index data from one source to another. Currently this gem can transfer elasticsearch index data between
|
4
|
+
|
5
|
+
* elasticsearch to elasticsearch
|
6
|
+
* elasticsearch to s3(AWS S3)
|
7
|
+
* s3 to elasticsearch
|
8
|
+
|
9
|
+
This gem is using [scroll API](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html) provided by elasticsearch for backing up the elasticsearch index data.
|
10
|
+
|
11
|
+
## Installation
|
12
|
+
|
13
|
+
Add this line to your application's Gemfile:
|
14
|
+
|
15
|
+
```ruby
|
16
|
+
gem 'elasticsearch-index-transfer'
|
17
|
+
```
|
18
|
+
|
19
|
+
And then execute:
|
20
|
+
|
21
|
+
$ bundle
|
22
|
+
|
23
|
+
Or install it yourself as:
|
24
|
+
|
25
|
+
$ gem install elasticsearch-index-transfer
|
26
|
+
|
27
|
+
## Usage
|
28
|
+
|
29
|
+
If you are using irb console
|
30
|
+
|
31
|
+
```ruby
|
32
|
+
require 'elasticsearch-index-transfer'
|
33
|
+
```
|
34
|
+
|
35
|
+
#### From one elasticsearch host to another elasticsearch host
|
36
|
+
```ruby
|
37
|
+
options = {
|
38
|
+
"source": {
|
39
|
+
"elasticsearch": {
|
40
|
+
"host": * source-host-ip *,
|
41
|
+
"port": * source-host-port *,
|
42
|
+
"index": * elasticsearch-index-name *
|
43
|
+
}
|
44
|
+
},
|
45
|
+
"target": {
|
46
|
+
"elasticsearch": {
|
47
|
+
"host": * target-host-ip *,
|
48
|
+
"port": * target-host-port *,
|
49
|
+
"index": * elasticsearch-index-name *
|
50
|
+
}
|
51
|
+
}
|
52
|
+
}
|
53
|
+
Elasticsearch::Index::Transfer.execute(options)
|
54
|
+
```
|
55
|
+
|
56
|
+
#### From elasticsearch host to s3(AWS S3)
|
57
|
+
```ruby
|
58
|
+
options = {
|
59
|
+
"source": {
|
60
|
+
"elasticsearch": {
|
61
|
+
"host": * source-host-ip *,
|
62
|
+
"port": * source-host-port *,
|
63
|
+
"index": * elasticsearch-index-name *
|
64
|
+
}
|
65
|
+
},
|
66
|
+
"target": {
|
67
|
+
"s3": {
|
68
|
+
"region": * S3-region-name *,
|
69
|
+
"access_key_id": * S3-access-key-id *,
|
70
|
+
"secret_access_key": * S3-secret-access-key *,
|
71
|
+
"bucket": * S3-bucket-name *,
|
72
|
+
"prefix": * S3-folder/prefix * # optional
|
73
|
+
}
|
74
|
+
}
|
75
|
+
}
|
76
|
+
Elasticsearch::Index::Transfer.execute(options)
|
77
|
+
```
|
78
|
+
|
79
|
+
#### From s3(AWS S3) to elasticsearch host
|
80
|
+
This gem can only transfer data from AWS S3 to elasticsearch host only if backup on S3 is made by this gem only.
|
81
|
+
|
82
|
+
```ruby
|
83
|
+
options = {
|
84
|
+
"source": {
|
85
|
+
"s3": {
|
86
|
+
"region": * S3-region-name *,
|
87
|
+
"access_key_id": * S3-access-key-id *,
|
88
|
+
"secret_access_key": * S3-secret-access-key *,
|
89
|
+
"bucket": * S3-bucket-name *,
|
90
|
+
"prefix": * S3-folder/prefix * # optional
|
91
|
+
}
|
92
|
+
},
|
93
|
+
"target": {
|
94
|
+
"elasticsearch": {
|
95
|
+
"host": * targer-host-ip *,
|
96
|
+
"port": * target-host-port *,
|
97
|
+
"index": * elasticsearch-index-name * # if index name not given it will use index name of backed up index.
|
98
|
+
}
|
99
|
+
},
|
100
|
+
|
101
|
+
}
|
102
|
+
Elasticsearch::Index::Transfer.execute(options)
|
103
|
+
```
|
104
|
+
|
105
|
+
## Test
|
106
|
+
```ruby
|
107
|
+
rspec spec/elasticsearch-index-transfer.rb
|
108
|
+
```
|
109
|
+
|
110
|
+
## Contribute
|
111
|
+
|
112
|
+
Issue Tracker: [https://github.com/imran3180/elasticsearch-index-transfer/issues](https://github.com/imran3180/elasticsearch-index-transfer/issues)
|
113
|
+
|
114
|
+
Pull Request: [https://github.com/imran3180/elasticsearch-index-transfer/pulls](https://github.com/imran3180/elasticsearch-index-transfer/pulls)
|
115
|
+
|
116
|
+
## License
|
117
|
+
|
118
|
+
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
data/Rakefile
ADDED
data/bin/console
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require "bundler/setup"
|
4
|
+
require "elasticsearch/index/transfer"
|
5
|
+
|
6
|
+
# You can add fixtures and/or initialization code here to make experimenting
|
7
|
+
# with your gem easier. You can also use a different console, if you like.
|
8
|
+
|
9
|
+
# (If you use this, don't forget to add pry to your Gemfile!)
|
10
|
+
# require "pry"
|
11
|
+
# Pry.start
|
12
|
+
|
13
|
+
require "irb"
|
14
|
+
IRB.start(__FILE__)
|
data/bin/setup
ADDED
@@ -0,0 +1,11 @@
|
|
1
|
+
elasticsearch:
|
2
|
+
host: "elasticsearch-host"
|
3
|
+
port: "elasticsearch-port"
|
4
|
+
index: "elasticsearch-index"
|
5
|
+
|
6
|
+
s3:
|
7
|
+
access_key_id: "access_key_id"
|
8
|
+
secret_access_key: "secret_access_key"
|
9
|
+
region: "region"
|
10
|
+
bucket: "bucket-name"
|
11
|
+
prefix: "prefix/folder path"
|
@@ -0,0 +1,38 @@
|
|
1
|
+
|
2
|
+
lib = File.expand_path("../lib", __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require "elasticsearch/index/transfer/version"
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "elasticsearch-index-transfer"
|
8
|
+
spec.version = Elasticsearch::Index::Transfer::VERSION
|
9
|
+
spec.authors = ["Imran"]
|
10
|
+
spec.email = ["imranjannatiitkgp@gmail.com"]
|
11
|
+
|
12
|
+
spec.summary = "Ruby gem for transferring elasticsearch index from one source to another source"
|
13
|
+
spec.description = "Ruby gem for transferring elasticsearch index from one source to another source"
|
14
|
+
spec.homepage = "https://github.com/imran3180/elasticsearch-index-transfer"
|
15
|
+
spec.license = "MIT"
|
16
|
+
|
17
|
+
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
|
18
|
+
# to allow pushing to a single host or delete this section to allow pushing to any host.
|
19
|
+
if spec.respond_to?(:metadata)
|
20
|
+
# spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
|
21
|
+
else
|
22
|
+
raise "RubyGems 2.0 or newer is required to protect against " \
|
23
|
+
"public gem pushes."
|
24
|
+
end
|
25
|
+
|
26
|
+
spec.files = `git ls-files -z`.split("\x0").reject do |f|
|
27
|
+
f.match(%r{^(test|spec|features)/})
|
28
|
+
end
|
29
|
+
spec.bindir = "exe"
|
30
|
+
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
|
31
|
+
spec.require_paths = ["lib"]
|
32
|
+
|
33
|
+
spec.add_development_dependency "bundler", "~> 1.16"
|
34
|
+
spec.add_development_dependency "rake", "~> 10.0"
|
35
|
+
spec.add_development_dependency "elasticsearch"
|
36
|
+
spec.add_development_dependency "aws-sdk-s3"
|
37
|
+
spec.add_development_dependency "rspec", "~> 3.2"
|
38
|
+
end
|
@@ -0,0 +1 @@
|
|
1
|
+
require "elasticsearch/index/transfer"
|
@@ -0,0 +1,21 @@
|
|
1
|
+
require "elasticsearch/index/transfer/version"
|
2
|
+
require "elasticsearch/index/transfer/elasticsearch"
|
3
|
+
require "elasticsearch/index/transfer/s3"
|
4
|
+
|
5
|
+
module Elasticsearch
|
6
|
+
module Index
|
7
|
+
module Transfer
|
8
|
+
|
9
|
+
def self.execute options
|
10
|
+
source = options.fetch(:source) rescue (raise "key(source) missing in the options")
|
11
|
+
target = options.fetch(:target) rescue (raise "key(target) missing in the options")
|
12
|
+
|
13
|
+
source_client = source.keys.first
|
14
|
+
target_client = target.keys.first
|
15
|
+
|
16
|
+
send("#{source_client}_extract", source_client, source[source_client], target_client, target[target_client])
|
17
|
+
true
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,91 @@
|
|
1
|
+
require 'elasticsearch'
|
2
|
+
|
3
|
+
module Elasticsearch
|
4
|
+
module Index
|
5
|
+
module Transfer
|
6
|
+
|
7
|
+
BATCH_SIZE = 1000
|
8
|
+
|
9
|
+
attr_accessor :host, :port, :index, :batch_size
|
10
|
+
|
11
|
+
def self.elasticsearch_extract source_client, source_options, target_client, target_options
|
12
|
+
elasticsearch_extract_params(source_options)
|
13
|
+
target_options[:index] = target_options[:index] || @index # copyping the source index name
|
14
|
+
client = Elasticsearch::Client.new(host: @host, port: @port)
|
15
|
+
batch_no = 0
|
16
|
+
|
17
|
+
# config setup into the target
|
18
|
+
settings = client.indices.get_settings(index: @index)[@index]
|
19
|
+
settings["settings"]["index"] = settings["settings"]["index"].select{|key, value| ["number_of_shards", "number_of_replicas"].include?(key)}
|
20
|
+
mapping = client.indices.get_mapping(index: @index)[@index]
|
21
|
+
aliases = client.indices.get_alias(index: @index)[@index]
|
22
|
+
|
23
|
+
# Transferring the first batch
|
24
|
+
response = client.search index: @index, scroll: '5m', body: {size: @batch_size, sort: ['_doc']}
|
25
|
+
total = response["hits"]["total"]
|
26
|
+
batch_size = @batch_size
|
27
|
+
batch_count = total/batch_size
|
28
|
+
|
29
|
+
_configs = {index: @index, body: mapping.merge(aliases).merge(settings), batch_count: batch_count, total: total}
|
30
|
+
send("#{target_client}_write_settings", target_options, _configs)
|
31
|
+
|
32
|
+
data = process_hits(response["hits"]["hits"])
|
33
|
+
send("#{target_client}_ingest", target_options, data, batch_no)
|
34
|
+
|
35
|
+
puts "Elasticsearch Transfer(#{source_client}-to-#{target_client}): Total Document count - #{total}"
|
36
|
+
puts "Elasticsearch Transfer(#{source_client}-to-#{target_client}): Batch-(#{batch_no}/#{batch_count}) transfered successfully."
|
37
|
+
|
38
|
+
# Transferring the subsequent batches
|
39
|
+
batch_no = batch_no + 1
|
40
|
+
while response = client.scroll(body: {scroll_id: response['_scroll_id']}, scroll: '5m') and not response['hits']['hits'].empty? do
|
41
|
+
data = process_hits(response["hits"]["hits"])
|
42
|
+
send("#{target_client}_ingest", target_options, data, batch_no)
|
43
|
+
puts "Elasticsearch Transfer(#{source_client}-to-#{target_client}): Batch-(#{batch_no}/#{batch_count}) transfered successfully."
|
44
|
+
batch_no = batch_no + 1
|
45
|
+
end
|
46
|
+
puts "Elasticsearch Transfer(#{source_client}-to-#{target_client}): Done."
|
47
|
+
end
|
48
|
+
|
49
|
+
def self.elasticsearch_ingest options, data, batch_no
|
50
|
+
elasticsearch_extract_params(options)
|
51
|
+
client = Elasticsearch::Client.new(host: @host, port: @port)
|
52
|
+
data.each do |record|
|
53
|
+
record["index"]["_index"] = @index
|
54
|
+
end
|
55
|
+
client.bulk(body: data)
|
56
|
+
end
|
57
|
+
|
58
|
+
def self.elasticsearch_write_settings options, _configs
|
59
|
+
elasticsearch_extract_params(options)
|
60
|
+
client = Elasticsearch::Client.new(host: @host, port: @port)
|
61
|
+
begin
|
62
|
+
client.indices.create(index: @index, body: _configs["body"])
|
63
|
+
rescue
|
64
|
+
raise "Index(#{@index}) is already present on (#{@host})"
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
private
|
69
|
+
def self.elasticsearch_extract_params options
|
70
|
+
@host = options.fetch(:host) rescue (raise "elasticsearch-host is missing from the options")
|
71
|
+
@port = options.fetch(:port) rescue (raise "elasticsearch-port is missing from the options")
|
72
|
+
@index = options.fetch(:index) rescue (raise "elasticsearch-index is missing from the options")
|
73
|
+
@batch_size = options.fetch(:batch_size) rescue BATCH_SIZE # default batch size is 5000
|
74
|
+
end
|
75
|
+
|
76
|
+
def self.process_hits hits
|
77
|
+
docs = []
|
78
|
+
hits.each do |hit|
|
79
|
+
hit.delete("_score")
|
80
|
+
hit.delete("sort")
|
81
|
+
hit["data"] = hit.delete("_source")
|
82
|
+
doc = {}
|
83
|
+
doc["index"] = hit
|
84
|
+
docs << doc
|
85
|
+
end
|
86
|
+
docs
|
87
|
+
end
|
88
|
+
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
@@ -0,0 +1,72 @@
|
|
1
|
+
require "aws-sdk-s3"
|
2
|
+
|
3
|
+
module Elasticsearch
|
4
|
+
module Index
|
5
|
+
module Transfer
|
6
|
+
|
7
|
+
attr_accessor :region, :access_key_id, :secret_access_key, :bucket, :prefix
|
8
|
+
|
9
|
+
def self.s3_extract source_client, source_options, target_client, target_options
|
10
|
+
s3_extract_params(source_options)
|
11
|
+
client = Aws::S3::Client.new(access_key_id: @access_key_id, secret_access_key: @secret_access_key, region: @region)
|
12
|
+
begin
|
13
|
+
client.head_bucket({bucket: @bucket})
|
14
|
+
rescue Aws::S3::Errors::NotFound
|
15
|
+
raise "Bucket - #{@bucket} not found."
|
16
|
+
end
|
17
|
+
begin
|
18
|
+
response = client.get_object(bucket: @bucket, key: "#{@prefix}_config.json").body.read
|
19
|
+
rescue Aws::S3::Errors::NoSuchKey
|
20
|
+
raise "_config.json not found. es-backup does not exists at specified location(S3:#{@bucket}/#{prefix})"
|
21
|
+
end
|
22
|
+
|
23
|
+
# config setup into the target
|
24
|
+
_configs = JSON.parse(response)
|
25
|
+
target_options[:index] = target_options[:index] || _configs["index"] # if target index is not provided used stored index name
|
26
|
+
send("#{target_client}_write_settings", target_options, _configs)
|
27
|
+
puts "Elasticsearch Transfer(#{source_client}-to-#{target_client}): Total Document count - #{_configs['total']}"
|
28
|
+
|
29
|
+
# Transferring the index data
|
30
|
+
batch_count = _configs["batch_count"]
|
31
|
+
(0..batch_count).each do |batch_no|
|
32
|
+
# puts "processing batch_no = #{batch_no}..."
|
33
|
+
data = JSON.parse(client.get_object(bucket: @bucket, key: "#{@prefix}batch-#{batch_no}.json").body.read)
|
34
|
+
send("#{target_client}_ingest", target_options, data, batch_no)
|
35
|
+
puts "Elasticsearch Transfer(#{source_client}-to-#{target_client}): Batch-(#{batch_no}/#{batch_count}) transfered successfully."
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def self.s3_ingest options, data, batch_no
|
40
|
+
s3_extract_params(options)
|
41
|
+
client = Aws::S3::Client.new(access_key_id: @access_key_id, secret_access_key: @secret_access_key, region: @region)
|
42
|
+
client.put_object(body: data.to_json,
|
43
|
+
bucket: @bucket,
|
44
|
+
key: "#{@prefix}batch-#{batch_no}.json")
|
45
|
+
end
|
46
|
+
|
47
|
+
def self.s3_write_settings options, _configs
|
48
|
+
s3_extract_params(options)
|
49
|
+
client = Aws::S3::Client.new(access_key_id: @access_key_id, secret_access_key: @secret_access_key, region: @region)
|
50
|
+
begin
|
51
|
+
client.head_bucket({bucket: @bucket})
|
52
|
+
rescue Aws::S3::Errors::NotFound
|
53
|
+
raise "Bucket - #{@bucket} not found. Please create the bucket if it does not exists"
|
54
|
+
end
|
55
|
+
client.put_object(body: _configs.to_json,
|
56
|
+
bucket: @bucket,
|
57
|
+
key: "#{@prefix}_config.json")
|
58
|
+
end
|
59
|
+
|
60
|
+
private
|
61
|
+
def self.s3_extract_params options
|
62
|
+
@region = options.fetch(:region)
|
63
|
+
@access_key_id = options.fetch(:access_key_id)
|
64
|
+
@secret_access_key = options.fetch(:secret_access_key)
|
65
|
+
@bucket = options.fetch(:bucket)
|
66
|
+
@prefix = options.fetch(:prefix) rescue ""
|
67
|
+
@prefix = "#{@prefix}/" if not @prefix.empty? and @prefix[-1] != '/'
|
68
|
+
end
|
69
|
+
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
metadata
ADDED
@@ -0,0 +1,132 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: elasticsearch-index-transfer
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Imran
|
8
|
+
autorequire:
|
9
|
+
bindir: exe
|
10
|
+
cert_chain: []
|
11
|
+
date: 2018-07-17 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: bundler
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.16'
|
20
|
+
type: :development
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.16'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: rake
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '10.0'
|
34
|
+
type: :development
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '10.0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: elasticsearch
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
type: :development
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ">="
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '0'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: aws-sdk-s3
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ">="
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '0'
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: rspec
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '3.2'
|
76
|
+
type: :development
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '3.2'
|
83
|
+
description: Ruby gem for transferring elasticsearch index from one source to another
|
84
|
+
source
|
85
|
+
email:
|
86
|
+
- imranjannatiitkgp@gmail.com
|
87
|
+
executables: []
|
88
|
+
extensions: []
|
89
|
+
extra_rdoc_files: []
|
90
|
+
files:
|
91
|
+
- ".gitignore"
|
92
|
+
- CODE_OF_CONDUCT.md
|
93
|
+
- Gemfile
|
94
|
+
- Gemfile.lock
|
95
|
+
- LICENSE.txt
|
96
|
+
- README.md
|
97
|
+
- Rakefile
|
98
|
+
- bin/console
|
99
|
+
- bin/setup
|
100
|
+
- config/secrets.yml.sample
|
101
|
+
- elasticsearch-index-transfer.gemspec
|
102
|
+
- lib/elasticsearch-index-transfer.rb
|
103
|
+
- lib/elasticsearch/index/transfer.rb
|
104
|
+
- lib/elasticsearch/index/transfer/elasticsearch.rb
|
105
|
+
- lib/elasticsearch/index/transfer/s3.rb
|
106
|
+
- lib/elasticsearch/index/transfer/version.rb
|
107
|
+
homepage: https://github.com/imran3180/elasticsearch-index-transfer
|
108
|
+
licenses:
|
109
|
+
- MIT
|
110
|
+
metadata: {}
|
111
|
+
post_install_message:
|
112
|
+
rdoc_options: []
|
113
|
+
require_paths:
|
114
|
+
- lib
|
115
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
116
|
+
requirements:
|
117
|
+
- - ">="
|
118
|
+
- !ruby/object:Gem::Version
|
119
|
+
version: '0'
|
120
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - ">="
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: '0'
|
125
|
+
requirements: []
|
126
|
+
rubyforge_project:
|
127
|
+
rubygems_version: 2.6.14
|
128
|
+
signing_key:
|
129
|
+
specification_version: 4
|
130
|
+
summary: Ruby gem for transferring elasticsearch index from one source to another
|
131
|
+
source
|
132
|
+
test_files: []
|