migrate-ssdb2pika 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.gitignore +2 -0
- data/Gemfile +7 -0
- data/Gemfile.lock +31 -0
- data/LICENSE +22 -0
- data/README.md +86 -0
- data/Rakefile +10 -0
- data/bin/ssdb2pika +59 -0
- data/lib/migrate-ssdb2pika.rb +9 -0
- data/lib/migrate_ssdb2pika/migration.rb +289 -0
- data/lib/migrate_ssdb2pika/redis_ssdb_proxy.rb +115 -0
- data/lib/migrate_ssdb2pika/version.rb +4 -0
- data/lib/migrate_ssdb2pika.rb +2 -0
- data/migrate-ssdb2pika.gemspec +25 -0
- metadata +99 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 14d100832a2fcc8b020eda9a27aa38cd55859f077b69753a56242b7a39af13c5
|
4
|
+
data.tar.gz: a8053aecb1241cde07d94afa5785eeabd0026e76e206d519aaa6f90ecbec39bf
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: bd9db88bf123cdeca2f46ccd60344169a4aec4e4981590a207b4d1b8dcb374917dcd15bc17f0f1702bbf96298910492089c1fd2a39236cc2fc158f2dcbf45152
|
7
|
+
data.tar.gz: 931ae8685de07b2c3d0673cbdd985984f94cfadca8551948de417d1fbab2dd49c83316a347c247ba3b1490f5e17474ee119be6e4afe59aed02df4c5aa00b4a8b
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
@@ -0,0 +1,31 @@
|
|
1
|
+
GIT
|
2
|
+
remote: https://github.com/xiewenwei/ssdb-rb.git
|
3
|
+
revision: 911a14a06545431b547fb61379050c3a53b0ce29
|
4
|
+
branch: feature/support-hlist-and-hscan
|
5
|
+
specs:
|
6
|
+
ssdb (0.1.3)
|
7
|
+
|
8
|
+
PATH
|
9
|
+
remote: .
|
10
|
+
specs:
|
11
|
+
migrate-ssdb2pika (0.1.0)
|
12
|
+
redis (>= 3)
|
13
|
+
|
14
|
+
GEM
|
15
|
+
remote: https://rubygems.org/
|
16
|
+
specs:
|
17
|
+
minitest (5.11.3)
|
18
|
+
rake (10.5.0)
|
19
|
+
redis (4.1.0)
|
20
|
+
|
21
|
+
PLATFORMS
|
22
|
+
ruby
|
23
|
+
|
24
|
+
DEPENDENCIES
|
25
|
+
migrate-ssdb2pika!
|
26
|
+
minitest (~> 5.0)
|
27
|
+
rake (~> 10.0)
|
28
|
+
ssdb!
|
29
|
+
|
30
|
+
BUNDLED WITH
|
31
|
+
1.16.2
|
data/LICENSE
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Copyright (c) 2019 Vincent Xie
|
2
|
+
|
3
|
+
MIT License
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining
|
6
|
+
a copy of this software and associated documentation files (the
|
7
|
+
"Software"), to deal in the Software without restriction, including
|
8
|
+
without limitation the rights to use, copy, modify, merge, publish,
|
9
|
+
distribute, sublicense, and/or sell copies of the Software, and to
|
10
|
+
permit persons to whom the Software is furnished to do so, subject to
|
11
|
+
the following conditions:
|
12
|
+
|
13
|
+
The above copyright notice and this permission notice shall be
|
14
|
+
included in all copies or substantial portions of the Software.
|
15
|
+
|
16
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
17
|
+
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
18
|
+
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
19
|
+
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
20
|
+
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
21
|
+
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
22
|
+
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,86 @@
|
|
1
|
+
|
2
|
+
# 迁移 SSDB 到 Redis/Pika 工具
|
3
|
+
|
4
|
+
该 Gem 用于 迁移 SSDB 到 Redis/Pika, 包括支持双写工具和迁移历史数据工具,下面分别介绍用法。
|
5
|
+
|
6
|
+
## 1、SSDB 和 Redis/Pika 双写代理
|
7
|
+
|
8
|
+
### 使用方法
|
9
|
+
|
10
|
+
* 在目标项目(通常是 Rails 应用)引入 `migrate-ssdb2pika` gem
|
11
|
+
|
12
|
+
```ruby
|
13
|
+
gem 'migrate-ssdb2pika',
|
14
|
+
git: 'https://github.com/xiewenwei/migrate-ssdb2pika.git'
|
15
|
+
```
|
16
|
+
|
17
|
+
* 配置 SSDB 连接生成以支持双写
|
18
|
+
|
19
|
+
例如:
|
20
|
+
|
21
|
+
```ruby
|
22
|
+
ssdb = Redis.new(host: '192.168.1.3', port: 7981)
|
23
|
+
pika = Redis.new(host: '192.168.1.5', port: 7981)
|
24
|
+
|
25
|
+
$ssdb = MigrateSsdb2pika.new_client(master: ssdb, slave: pika, master_is_ssdb: true)
|
26
|
+
```
|
27
|
+
|
28
|
+
$ssdb 就是支持双写的 client connection
|
29
|
+
|
30
|
+
### 特别注意事项
|
31
|
+
|
32
|
+
SSDB 使用 zclear 方法删除 zset 的 key,而 Redis/Pika 并不支持 zclear 方法,所以需要特殊处理。
|
33
|
+
|
34
|
+
如果原来有直接使用 zclear 的话,需要改写成如下的形式:
|
35
|
+
|
36
|
+
```ruby
|
37
|
+
if $ssdb.respond_to? :clear_zset
|
38
|
+
$ssdb.clear_zset key
|
39
|
+
else
|
40
|
+
$ssdb.del key
|
41
|
+
end
|
42
|
+
```
|
43
|
+
|
44
|
+
迁移完成后改为只使用 `$ssdb.del key` 即可。
|
45
|
+
|
46
|
+
SSDB 和 Redis/Pika 双写代理和迁移历史数据工具尽管放在一个 Gem 里,但它们是相互独立的。
|
47
|
+
|
48
|
+
## 2、迁移历史数据工具
|
49
|
+
|
50
|
+
### 安装
|
51
|
+
|
52
|
+
* 克隆 migrate-ssdb2pika 项目到运行机器
|
53
|
+
* 执行 `bundle install`
|
54
|
+
|
55
|
+
### 运行方法
|
56
|
+
|
57
|
+
```shell
|
58
|
+
bin/ssdb2pika --ssdb_host=xxx --ssdb_port=xxx \
|
59
|
+
--pika_host=xxx --pika_port=xx \
|
60
|
+
-m <kv|hash|zset|queue|all>
|
61
|
+
```
|
62
|
+
|
63
|
+
例如:
|
64
|
+
|
65
|
+
```shell
|
66
|
+
bin/ssdb2pika --ssdb_host=192.168.0.10 --ssdb_port=7981 \
|
67
|
+
--pika_host=192.168.0.12 --pika_port=7981 -m all
|
68
|
+
```
|
69
|
+
|
70
|
+
如果担心执行时间过长,可以使用 `nohup` 方式执行。
|
71
|
+
|
72
|
+
```shell
|
73
|
+
nohup bin/ssdb2pika --ssdb_host=192.168.0.10 --ssdb_port=7981 \
|
74
|
+
--pika_host=192.168.0.12 --pika_port=7981 -m all &
|
75
|
+
```
|
76
|
+
|
77
|
+
### 原理
|
78
|
+
|
79
|
+
直接使用 `ssdb-rb` sdk
|
80
|
+
|
81
|
+
* 通过 `scan` 遍历 key-value 数据
|
82
|
+
* 通过 `hlist` 遍历 hash 所有 name,通过 `hscan` 遍历某一个 hash 里所有 field-value
|
83
|
+
* 通过 `zlist` 遍历 zset 所有 name,通过 `zscan` 遍历某一个 zset 里所有 member-score
|
84
|
+
* 通过 `qlist` 遍历 queue 所有 name
|
85
|
+
|
86
|
+
更改 `ssdb-rb` 代码以支持 hlist/hscan/qlist/qrange
|
data/Rakefile
ADDED
data/bin/ssdb2pika
ADDED
@@ -0,0 +1,59 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
|
3
|
+
require "pathname"
|
4
|
+
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile",
|
5
|
+
Pathname.new(__FILE__).realpath)
|
6
|
+
|
7
|
+
require "rubygems"
|
8
|
+
require "bundler/setup"
|
9
|
+
|
10
|
+
require 'optparse'
|
11
|
+
require 'migrate_ssdb2pika/migration'
|
12
|
+
|
13
|
+
def parse_options
|
14
|
+
options = {
|
15
|
+
mode: 'all',
|
16
|
+
write: 'yes'
|
17
|
+
}
|
18
|
+
|
19
|
+
optparse = OptionParser.new do |opts|
|
20
|
+
opts.banner = "Usage: bin/ssdb2pika <options>"
|
21
|
+
|
22
|
+
opts.on('', '--ssdb_host host', 'host of ssdb') do |value|
|
23
|
+
options[:ssdb_host] = value
|
24
|
+
end
|
25
|
+
opts.on('', '--ssdb_port port', 'port of ssdb') do |value|
|
26
|
+
options[:ssdb_port] = value
|
27
|
+
end
|
28
|
+
|
29
|
+
opts.on('', '--pika_host host', 'host of pika/redis') do |value|
|
30
|
+
options[:pika_host] = value
|
31
|
+
end
|
32
|
+
opts.on('', '--pika_port port', 'port of pika/redis') do |value|
|
33
|
+
options[:pika_port] = value
|
34
|
+
end
|
35
|
+
|
36
|
+
opts.on('-m', '--mode mode', 'Mode (kv/hash/zset/queue default all)') do |value|
|
37
|
+
options[:mode] = value
|
38
|
+
end
|
39
|
+
opts.on('-w', '--write write', 'write pika or not (yes/no)') do |value|
|
40
|
+
options[:write] = value
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
# 检查参数
|
45
|
+
begin
|
46
|
+
optparse.parse!
|
47
|
+
mandatory = [:ssdb_host, :ssdb_port, :pika_host, :pika_port]
|
48
|
+
missing = mandatory.select{ |param| options[param].nil? }
|
49
|
+
raise OptionParser::MissingArgument, missing.join(', ') unless missing.empty?
|
50
|
+
options
|
51
|
+
rescue OptionParser::ParseError => e
|
52
|
+
puts e
|
53
|
+
puts optparse
|
54
|
+
exit
|
55
|
+
end
|
56
|
+
|
57
|
+
end
|
58
|
+
|
59
|
+
MigrateSsdb2pika::Migration.new(parse_options).start
|
@@ -0,0 +1,289 @@
|
|
1
|
+
|
2
|
+
require 'redis'
|
3
|
+
require 'ssdb'
|
4
|
+
|
5
|
+
module MigrateSsdb2pika
|
6
|
+
class Migration
|
7
|
+
SCAN_LIMIT = 100
|
8
|
+
|
9
|
+
def initialize(options)
|
10
|
+
@options = options
|
11
|
+
end
|
12
|
+
|
13
|
+
def start
|
14
|
+
setup_client
|
15
|
+
log_message("start")
|
16
|
+
case @options[:mode]
|
17
|
+
when 'kv'
|
18
|
+
scan_key_values
|
19
|
+
log_message("scan_key_values done")
|
20
|
+
when 'hash'
|
21
|
+
scan_all_hashs
|
22
|
+
log_message("scan_all_hashs done")
|
23
|
+
when 'zset'
|
24
|
+
scan_all_zsets
|
25
|
+
log_message("scan_all_zsets done")
|
26
|
+
when 'queue'
|
27
|
+
scan_all_queues
|
28
|
+
log_message("scan_all_queues done")
|
29
|
+
else
|
30
|
+
scan_key_values
|
31
|
+
log_message("scan_key_values done")
|
32
|
+
scan_all_hashs
|
33
|
+
log_message("scan_all_hashs done")
|
34
|
+
scan_all_zsets
|
35
|
+
log_message("scan_all_zsets done")
|
36
|
+
scan_all_queues
|
37
|
+
log_message("scan_all_queues done")
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
def log_message(msg)
|
44
|
+
puts "[#{Time.now.to_s}] #{msg}"
|
45
|
+
end
|
46
|
+
|
47
|
+
def setup_client
|
48
|
+
@ssdb = SSDB.new url: "ssdb://#{@options[:ssdb_host]}:#{@options[:ssdb_port]}"
|
49
|
+
@ssdb_redis = Redis.new url: "redis://#{@options[:ssdb_host]}:#{@options[:ssdb_port]}"
|
50
|
+
@pika = Redis.new url: "redis://#{@options[:pika_host]}:#{@options[:pika_port]}"
|
51
|
+
end
|
52
|
+
|
53
|
+
# key-value scan
|
54
|
+
def scan_key_values
|
55
|
+
count = 0
|
56
|
+
key_start = ''
|
57
|
+
while true
|
58
|
+
entries = @ssdb.scan(key_start, '', limit: SCAN_LIMIT)
|
59
|
+
count += 1
|
60
|
+
puts "scan_key_values #{count} name_start: #{key_start}"
|
61
|
+
if entries.size == 0
|
62
|
+
break
|
63
|
+
end
|
64
|
+
|
65
|
+
array = []
|
66
|
+
entries.each do |key, value|
|
67
|
+
array << key
|
68
|
+
array << value
|
69
|
+
end
|
70
|
+
|
71
|
+
if @options[:write] == 'yes'
|
72
|
+
@pika.mset *array
|
73
|
+
else
|
74
|
+
puts "array size #{array.size}"
|
75
|
+
end
|
76
|
+
key_start = entries.last[0]
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
# hash hlist
|
81
|
+
def scan_all_hashs(limit: SCAN_LIMIT)
|
82
|
+
count = 0
|
83
|
+
name_start = ''
|
84
|
+
while true
|
85
|
+
names = @ssdb.hlist(name_start, '', limit: limit)
|
86
|
+
count += 1
|
87
|
+
puts "scan_all_hashs #{count} name_start: #{name_start}"
|
88
|
+
if names.size == 0
|
89
|
+
break
|
90
|
+
end
|
91
|
+
|
92
|
+
hash_len = []
|
93
|
+
@ssdb_redis.pipelined do |redis|
|
94
|
+
names.each do |name|
|
95
|
+
hash_len << redis.hlen(name)
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
short_names = []
|
100
|
+
long_names = []
|
101
|
+
names.each_with_index do |name, index|
|
102
|
+
if hash_len[index].value.to_i > 1000
|
103
|
+
long_names << name
|
104
|
+
else
|
105
|
+
short_names << name
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
scan_group_hashs(short_names)
|
110
|
+
|
111
|
+
long_names.each { |name| scan_one_hash(name) }
|
112
|
+
|
113
|
+
name_start = names.last
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
# hash hscan
|
118
|
+
def scan_group_hashs(names)
|
119
|
+
# 批量读取
|
120
|
+
result = []
|
121
|
+
@ssdb_redis.pipelined do |redis|
|
122
|
+
names.each do |name|
|
123
|
+
result << redis.hgetall(name)
|
124
|
+
end
|
125
|
+
end
|
126
|
+
# 批量写入
|
127
|
+
if @options[:write] == 'yes'
|
128
|
+
@pika.pipelined do |redis|
|
129
|
+
names.each_with_index do |name, index|
|
130
|
+
array = result[index].value.to_a
|
131
|
+
if array && array.size > 0
|
132
|
+
array.flatten!
|
133
|
+
redis.hmset name, *array
|
134
|
+
end
|
135
|
+
end
|
136
|
+
end
|
137
|
+
else
|
138
|
+
puts "result size #{result.size}"
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
# hash hscan
|
143
|
+
def scan_one_hash(name)
|
144
|
+
key_start = ''
|
145
|
+
while true
|
146
|
+
entries = @ssdb.hscan(name, key_start, '', limit: 1000)
|
147
|
+
|
148
|
+
if entries.size == 0
|
149
|
+
break
|
150
|
+
end
|
151
|
+
|
152
|
+
array = []
|
153
|
+
entries.each do |key, value|
|
154
|
+
array << key
|
155
|
+
array << value
|
156
|
+
end
|
157
|
+
|
158
|
+
if @options[:write] == 'yes'
|
159
|
+
@pika.hmset name, *array
|
160
|
+
else
|
161
|
+
puts "#{name} array size #{array.size}"
|
162
|
+
end
|
163
|
+
|
164
|
+
key_start = entries.last[0]
|
165
|
+
end
|
166
|
+
end
|
167
|
+
|
168
|
+
# zset zlist
|
169
|
+
def scan_all_zsets(limit: SCAN_LIMIT)
|
170
|
+
count = 0
|
171
|
+
name_start = ''
|
172
|
+
while true
|
173
|
+
names = @ssdb.zlist(name_start, '', limit: limit)
|
174
|
+
|
175
|
+
count += 1
|
176
|
+
puts "scan_all_zsets #{count} name_start: #{name_start}"
|
177
|
+
|
178
|
+
if names.size == 0
|
179
|
+
break
|
180
|
+
end
|
181
|
+
|
182
|
+
zset_len = []
|
183
|
+
@ssdb_redis.pipelined do |redis|
|
184
|
+
names.each do |name|
|
185
|
+
zset_len << redis.zcard(name)
|
186
|
+
end
|
187
|
+
end
|
188
|
+
|
189
|
+
short_names = []
|
190
|
+
long_names = []
|
191
|
+
names.each_with_index do |name, index|
|
192
|
+
if zset_len[index].value.to_i > 1000
|
193
|
+
long_names << name
|
194
|
+
else
|
195
|
+
short_names << name
|
196
|
+
end
|
197
|
+
end
|
198
|
+
|
199
|
+
scan_group_zsets(short_names)
|
200
|
+
|
201
|
+
long_names.each { |name| scan_one_zset(name) }
|
202
|
+
|
203
|
+
name_start = names.last
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
# zset zscan
|
208
|
+
def scan_group_zsets(names)
|
209
|
+
# 批量读取
|
210
|
+
result = []
|
211
|
+
@ssdb_redis.pipelined do |redis|
|
212
|
+
names.each do |name|
|
213
|
+
result << redis.zrange(name, 0, -1, with_scores: true)
|
214
|
+
end
|
215
|
+
end
|
216
|
+
# 批量写入
|
217
|
+
if @options[:write] == 'yes'
|
218
|
+
@pika.pipelined do |redis|
|
219
|
+
names.each_with_index do |name, index|
|
220
|
+
array = result[index].value.to_a
|
221
|
+
if array && array.size > 0
|
222
|
+
array.each { |pair| pair.reverse! }
|
223
|
+
redis.zadd name, array
|
224
|
+
end
|
225
|
+
end
|
226
|
+
end
|
227
|
+
else
|
228
|
+
puts "result size #{result.size}"
|
229
|
+
end
|
230
|
+
end
|
231
|
+
|
232
|
+
# zset zscan
|
233
|
+
def scan_one_zset(name)
|
234
|
+
score_start = ''
|
235
|
+
while true
|
236
|
+
entries = @ssdb.zscan(name, score_start, '', limit: SCAN_LIMIT)
|
237
|
+
if entries.size == 0
|
238
|
+
break
|
239
|
+
end
|
240
|
+
|
241
|
+
array = entries.map {|key, value| [value, key] }
|
242
|
+
|
243
|
+
if @options[:write] == 'yes'
|
244
|
+
@pika.zadd name, array
|
245
|
+
else
|
246
|
+
puts "#{name} array size #{array.size}"
|
247
|
+
end
|
248
|
+
|
249
|
+
score_start = entries.last[1].to_i + 1
|
250
|
+
end
|
251
|
+
end
|
252
|
+
|
253
|
+
# queue qlist
|
254
|
+
def scan_all_queues
|
255
|
+
count = 0
|
256
|
+
name_start = ''
|
257
|
+
while true
|
258
|
+
names = @ssdb.qlist(name_start, '', limit: 1000)
|
259
|
+
count += 1
|
260
|
+
puts "scan_all_queues #{count} name_start: #{name_start}"
|
261
|
+
if names.size == 0
|
262
|
+
break
|
263
|
+
end
|
264
|
+
|
265
|
+
names.each_with_index do |name, index|
|
266
|
+
scan_one_queue(name)
|
267
|
+
if index % 10 == 0
|
268
|
+
puts "process #{name} #{index}"
|
269
|
+
end
|
270
|
+
end
|
271
|
+
name_start = names.last
|
272
|
+
end
|
273
|
+
end
|
274
|
+
|
275
|
+
# queue qrange
|
276
|
+
def scan_one_queue(name)
|
277
|
+
array = @ssdb.qrange(name, 0, -1)
|
278
|
+
if array && array.size > 0
|
279
|
+
if @options[:write] == 'yes'
|
280
|
+
array.each do |item|
|
281
|
+
@pika.lpush name, item
|
282
|
+
end
|
283
|
+
else
|
284
|
+
puts "#{name} array size #{array.size}"
|
285
|
+
end
|
286
|
+
end
|
287
|
+
end
|
288
|
+
end
|
289
|
+
end
|
@@ -0,0 +1,115 @@
|
|
1
|
+
|
2
|
+
module MigrateSsdb2pika
|
3
|
+
class RedisSsdbProxy
|
4
|
+
attr_accessor :master, :slave
|
5
|
+
|
6
|
+
def initialize(master: nil, slave: nil, master_is_ssdb: true)
|
7
|
+
self.master = master
|
8
|
+
self.slave = slave
|
9
|
+
|
10
|
+
# 植入 ssdb? 方法
|
11
|
+
if master_is_ssdb
|
12
|
+
master.define_singleton_method(:ssdb?) { true }
|
13
|
+
else
|
14
|
+
master.define_singleton_method(:ssdb?) { false }
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
def clear_zset(key)
|
19
|
+
if master.ssdb?
|
20
|
+
master.zclear key
|
21
|
+
slave.del key
|
22
|
+
else
|
23
|
+
master.del key
|
24
|
+
slave.zclear key
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def clear_hash(key)
|
29
|
+
if master.ssdb?
|
30
|
+
master.hclear key
|
31
|
+
slave.del key
|
32
|
+
else
|
33
|
+
master.del key
|
34
|
+
slave.hclear key
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
class << self
|
39
|
+
private
|
40
|
+
|
41
|
+
def send_to_slave(command)
|
42
|
+
class_eval <<-EOS
|
43
|
+
def #{command}(*args, &block)
|
44
|
+
slave.#{command}(*args, &block)
|
45
|
+
end
|
46
|
+
EOS
|
47
|
+
end
|
48
|
+
|
49
|
+
def send_to_master(command)
|
50
|
+
class_eval <<-EOS
|
51
|
+
def #{command}(*args, &block)
|
52
|
+
master.#{command}(*args, &block)
|
53
|
+
end
|
54
|
+
EOS
|
55
|
+
end
|
56
|
+
|
57
|
+
def send_to_both(command)
|
58
|
+
class_eval <<-EOS
|
59
|
+
def #{command}(*args, &block)
|
60
|
+
slave.#{command}(*args, &block)
|
61
|
+
master.#{command}(*args, &block)
|
62
|
+
end
|
63
|
+
EOS
|
64
|
+
end
|
65
|
+
end
|
66
|
+
|
67
|
+
send_to_master :dbsize
|
68
|
+
send_to_master :exists
|
69
|
+
send_to_master :get
|
70
|
+
send_to_master :getbit
|
71
|
+
send_to_master :getrange
|
72
|
+
send_to_master :hexists
|
73
|
+
send_to_master :hget
|
74
|
+
send_to_master :hgetall
|
75
|
+
send_to_master :hkeys
|
76
|
+
send_to_master :hlen
|
77
|
+
send_to_master :hmget
|
78
|
+
send_to_master :hvals
|
79
|
+
send_to_master :keys
|
80
|
+
send_to_master :lindex
|
81
|
+
send_to_master :llen
|
82
|
+
send_to_master :lrange
|
83
|
+
send_to_master :mget
|
84
|
+
send_to_master :randomkey
|
85
|
+
send_to_master :scard
|
86
|
+
send_to_master :sdiff
|
87
|
+
send_to_master :sinter
|
88
|
+
send_to_master :sismember
|
89
|
+
send_to_master :smembers
|
90
|
+
send_to_master :sort
|
91
|
+
send_to_master :srandmember
|
92
|
+
send_to_master :strlen
|
93
|
+
send_to_master :sunion
|
94
|
+
send_to_master :ttl
|
95
|
+
send_to_master :type
|
96
|
+
send_to_master :zcard
|
97
|
+
send_to_master :zcount
|
98
|
+
send_to_master :zrange
|
99
|
+
send_to_master :zrangebyscore
|
100
|
+
send_to_master :zrank
|
101
|
+
send_to_master :zrevrange
|
102
|
+
send_to_master :zscore
|
103
|
+
|
104
|
+
# all write opreate send to master slave both
|
105
|
+
def method_missing(name, *args, &block)
|
106
|
+
if master.respond_to?(name)
|
107
|
+
self.class.send(:send_to_both, name)
|
108
|
+
slave.send(name, *args, &block)
|
109
|
+
master.send(name, *args, &block)
|
110
|
+
else
|
111
|
+
super
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end # RedisSsdbProxy
|
115
|
+
end # MigrateSsdb2pika
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# coding: utf-8
|
2
|
+
lib = File.expand_path('../lib', __FILE__)
|
3
|
+
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
|
4
|
+
require 'migrate_ssdb2pika/version'
|
5
|
+
|
6
|
+
Gem::Specification.new do |spec|
|
7
|
+
spec.name = "migrate-ssdb2pika"
|
8
|
+
spec.version = MigrateSsdb2pika::VERSION
|
9
|
+
spec.authors = ["Vincent Xie"]
|
10
|
+
spec.email = ["xiewenwei@gmail.com"]
|
11
|
+
|
12
|
+
spec.summary = %q{迁移 SSDB 数据到 Redis 或 Pika 工具集}
|
13
|
+
spec.description = %q{迁移 SSDB 数据到 Redis 或 Pika 工具集,提供迁移过程中的双写工具类和迁移历史数据工具}
|
14
|
+
spec.homepage = "http://github.com/xiewenwei/migrate-ssdb2pika.git"
|
15
|
+
|
16
|
+
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
|
17
|
+
spec.bindir = "bin"
|
18
|
+
spec.executables = ["ssdb2pika"]
|
19
|
+
spec.require_paths = ["lib"]
|
20
|
+
|
21
|
+
spec.add_dependency "redis", ">= 3"
|
22
|
+
|
23
|
+
spec.add_development_dependency "rake", "~> 10.0"
|
24
|
+
spec.add_development_dependency "minitest", "~> 5.0"
|
25
|
+
end
|
metadata
ADDED
@@ -0,0 +1,99 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: migrate-ssdb2pika
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Vincent Xie
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2019-03-29 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: redis
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '3'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '3'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: rake
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '10.0'
|
34
|
+
type: :development
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '10.0'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: minitest
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '5.0'
|
48
|
+
type: :development
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '5.0'
|
55
|
+
description: 迁移 SSDB 数据到 Redis 或 Pika 工具集,提供迁移过程中的双写工具类和迁移历史数据工具
|
56
|
+
email:
|
57
|
+
- xiewenwei@gmail.com
|
58
|
+
executables:
|
59
|
+
- ssdb2pika
|
60
|
+
extensions: []
|
61
|
+
extra_rdoc_files: []
|
62
|
+
files:
|
63
|
+
- ".gitignore"
|
64
|
+
- Gemfile
|
65
|
+
- Gemfile.lock
|
66
|
+
- LICENSE
|
67
|
+
- README.md
|
68
|
+
- Rakefile
|
69
|
+
- bin/ssdb2pika
|
70
|
+
- lib/migrate-ssdb2pika.rb
|
71
|
+
- lib/migrate_ssdb2pika.rb
|
72
|
+
- lib/migrate_ssdb2pika/migration.rb
|
73
|
+
- lib/migrate_ssdb2pika/redis_ssdb_proxy.rb
|
74
|
+
- lib/migrate_ssdb2pika/version.rb
|
75
|
+
- migrate-ssdb2pika.gemspec
|
76
|
+
homepage: http://github.com/xiewenwei/migrate-ssdb2pika.git
|
77
|
+
licenses: []
|
78
|
+
metadata: {}
|
79
|
+
post_install_message:
|
80
|
+
rdoc_options: []
|
81
|
+
require_paths:
|
82
|
+
- lib
|
83
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
84
|
+
requirements:
|
85
|
+
- - ">="
|
86
|
+
- !ruby/object:Gem::Version
|
87
|
+
version: '0'
|
88
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
89
|
+
requirements:
|
90
|
+
- - ">="
|
91
|
+
- !ruby/object:Gem::Version
|
92
|
+
version: '0'
|
93
|
+
requirements: []
|
94
|
+
rubyforge_project:
|
95
|
+
rubygems_version: 2.7.7
|
96
|
+
signing_key:
|
97
|
+
specification_version: 4
|
98
|
+
summary: 迁移 SSDB 数据到 Redis 或 Pika 工具集
|
99
|
+
test_files: []
|