fluent-plugin-mysql-replicator 0.2.2 → 0.2.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/README.md CHANGED
@@ -16,7 +16,47 @@ gem install fluent-plugin-mysql-replicator
16
16
  /usr/lib64/fluent/ruby/bin/fluent-gem install fluent-plugin-mysql-replicator
17
17
  `````
18
18
 
19
- ## Tutorial for Quickstart
19
+ ## Included plugins
20
+
21
+ * Input Plugin: mysql_replicator
22
+ * Input Plugin: mysql_replicator_multi
23
+ * Output Plugin: mysql_replicator_elasticsearch
24
+ * Output Plugin: mysql_replicator_solr (experimental)
25
+
26
+ ## Output example
27
+
28
+ It is a example when detecting insert/update/delete events.
29
+
30
+ ### sample query
31
+
32
+ `````
33
+ $ mysql -e "create database myweb"
34
+ $ mysql myweb -e "create table search_test(id int auto_increment, text text, PRIMARY KEY (id))"
35
+ $ sleep 10
36
+ $ mysql myweb -e "insert into search_test(text) values('aaa')"
37
+ $ sleep 10
38
+ $ mysql myweb -e "update search_test set text='bbb' where text = 'aaa'"
39
+ $ sleep 10
40
+ $ mysql myweb -e "delete from search_test where text='bbb'"
41
+ `````
42
+
43
+ ### result
44
+
45
+ `````
46
+ $ tail -f /var/log/td-agent/td-agent.log
47
+ 2013-11-25 18:22:25 +0900 replicator.myweb.search_test.insert.id: {"id":"1","text":"aaa"}
48
+ 2013-11-25 18:22:35 +0900 replicator.myweb.search_test.update.id: {"id":"1","text":"bbb"}
49
+ 2013-11-25 18:22:45 +0900 replicator.myweb.search_test.delete.id: {"id":"1"}
50
+ `````
51
+
52
+ ## Configuration Examples
53
+
54
+ * [mysql_single_table_to_elasticsearch.md](https://github.com/y-ken/fluent-plugin-mysql-replicator/blob/master/example/mysql_single_table_to_elasticsearch.md)
55
+ * [mysql_multi_table_to_elasticsearch.md](https://github.com/y-ken/fluent-plugin-mysql-replicator/blob/master/example/mysql_multi_table_to_elasticsearch.md)
56
+ * [mysql_single_table_to_solr.md](https://github.com/y-ken/fluent-plugin-mysql-replicator/blob/master/example/mysql_single_table_to_solr.md)
57
+ * [mysql_multi_table_to_solr.md](https://github.com/y-ken/fluent-plugin-mysql-replicator/blob/master/example/mysql_multi_table_to_solr.md)
58
+
59
+ ## Tutorial for Quickstart (mysql_replicator)
20
60
 
21
61
  It is useful for these purpose.
22
62
 
@@ -67,7 +107,7 @@ On syncing 300 million rows table, it will consume around 800MB of memory with r
67
107
  port 9200
68
108
 
69
109
  # Set Elasticsearch index, type, and unique id (primary_key) from tag.
70
- tag_format (?<index_name>[^\.]+)\.(?<type_name>[^\.]+).(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
110
+ tag_format (?<index_name>[^\.]+)\.(?<type_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
71
111
 
72
112
  # Set frequency of sending bulk request to Elasticsearch node.
73
113
  flush_interval 5s
@@ -78,29 +118,7 @@ On syncing 300 million rows table, it will consume around 800MB of memory with r
78
118
  </match>
79
119
  `````
80
120
 
81
- ### sample query
82
-
83
- `````
84
- $ mysql -e "create database myweb"
85
- $ mysql myweb -e "create table search_test(id int auto_increment, text text, PRIMARY KEY (id))"
86
- $ sleep 10
87
- $ mysql myweb -e "insert into search_test(text) values('aaa')"
88
- $ sleep 10
89
- $ mysql myweb -e "update search_test set text='bbb' where text = 'aaa'"
90
- $ sleep 10
91
- $ mysql myweb -e "delete from search_test where text='bbb'"
92
- `````
93
-
94
- ### result
95
-
96
- `````
97
- $ tail -f /var/log/td-agent/td-agent.log
98
- 2013-11-25 18:22:25 +0900 replicator.insert.id: {"id":"1","text":"aaa"}
99
- 2013-11-25 18:22:35 +0900 replicator.update.id: {"id":"1","text":"bbb"}
100
- 2013-11-25 18:22:45 +0900 replicator.delete.id: {"id":"1"}
101
- `````
102
-
103
- ## Tutorial for Production
121
+ ## Tutorial for Production (mysql_replicator_multi)
104
122
 
105
123
  It is very useful to replicate a millions of records and/or multiple tables with multiple threads.
106
124
  This architecture is storing hash table in mysql management table instead of ruby internal memory.
@@ -197,11 +215,14 @@ it is a sample which you have inserted row.
197
215
  port 9200
198
216
 
199
217
  # Set Elasticsearch index, type, and unique id (primary_key) from tag.
200
- tag_format (?<index_name>[^\.]+)\.(?<type_name>[^\.]+).(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
218
+ tag_format (?<index_name>[^\.]+)\.(?<type_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
201
219
 
202
220
  # Set frequency of sending bulk request to Elasticsearch node.
203
221
  flush_interval 5s
204
-
222
+
223
+ # Set maximum retry interval (required fluentd >= 0.10.41)
224
+ max_retry_wait 1800
225
+
205
226
  # Queued chunks are flushed at shutdown process.
206
227
  flush_at_shutdown yes
207
228
  </match>
@@ -0,0 +1,43 @@
1
+ ## case study
2
+
3
+ It is a guide to replicate multiple mysql table to elasticsearch.
4
+
5
+ ## configuration
6
+
7
+ ```
8
+ <source>
9
+ type mysql_replicator_multi
10
+
11
+ # Database connection setting for manager table.
12
+ manager_host localhost
13
+ manager_username your_mysql_user
14
+ manager_password your_mysql_password
15
+ manager_database replicator_manager
16
+
17
+ # Format output tag for each events. Placeholders usage as described below.
18
+ tag replicator.${name}.${event}.${primary_key}
19
+ # ${name} : the value of `replicator_manager.settings.name` in manager table.
20
+ # ${event} : the variation of row event type by insert/update/delete.
21
+ # ${primary_key} : the value of `replicator_manager.settings.primary_key` in manager table.
22
+ </source>
23
+
24
+ <match replicator.**>
25
+ type mysql_replicator_elasticsearch
26
+
27
+ # Set Elasticsearch connection.
28
+ host localhost
29
+ port 9200
30
+
31
+ # Set Elasticsearch index, type, and unique id (primary_key) from tag.
32
+ tag_format (?<index_name>[^\.]+)\.(?<type_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
33
+
34
+ # Set frequency of sending bulk request to Elasticsearch node.
35
+ flush_interval 5s
36
+
37
+ # Set maximum retry interval (required fluentd >= 0.10.41)
38
+ max_retry_wait 1800
39
+
40
+ # Queued chunks are flushed at shutdown process.
41
+ flush_at_shutdown yes
42
+ </match>
43
+ ```
@@ -0,0 +1,69 @@
1
+ ## case study
2
+
3
+ It is a guide to replicate multiple mysql table to solr.
4
+
5
+ ## configuration
6
+
7
+ ```
8
+ <source>
9
+ type mysql_replicator_multi
10
+
11
+ # Database connection setting for manager table.
12
+ manager_host localhost
13
+ manager_username your_mysql_user
14
+ manager_password your_mysql_password
15
+ manager_database replicator_manager
16
+
17
+ # Format output tag for each events. Placeholders usage as described below.
18
+ tag replicator.${name}.${event}.${primary_key}
19
+ # ${name} : the value of `replicator_manager.settings.name` in manager table.
20
+ # ${event} : the variation of row event type by insert/update/delete.
21
+ # ${primary_key} : the value of `replicator_manager.settings.primary_key` in manager table.
22
+ </source>
23
+
24
+ <match replicator.**>
25
+ type mysql_replicator_solr
26
+
27
+ # Set Solr connection.
28
+ host localhost
29
+ port 8983
30
+
31
+ # Set Solr core name and unique id (primary_key) from tag.
32
+ # On this case, solr url will be http://localhost:8983/solr/${core_name}
33
+ tag_format (?<core_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
34
+
35
+ # Set frequency of sending bulk request to Solr.
36
+ flush_interval 5s
37
+
38
+ # Set maximum retry interval (required fluentd >= 0.10.41)
39
+ max_retry_wait 1800
40
+
41
+ # Queued chunks are flushed at shutdown process.
42
+ flush_at_shutdown yes
43
+ </match>
44
+ ```
45
+
46
+ When you use default core (won't specify), change the value of `tag_format` like below.
47
+
48
+ ```
49
+ <match replicator.**>
50
+ type mysql_replicator_solr
51
+
52
+ # Set Solr connection.
53
+ host localhost
54
+ port 8983
55
+
56
+ # Set Solr core name and unique id (primary_key) from tag.
57
+ # On this case, solr url will be http://localhost:8983/solr/
58
+ tag_format (?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
59
+
60
+ # Set frequency of sending bulk request to Solr.
61
+ flush_interval 5s
62
+
63
+ # Set maximum retry interval (required fluentd >= 0.10.41)
64
+ max_retry_wait 1800
65
+
66
+ # Queued chunks are flushed at shutdown process.
67
+ flush_at_shutdown yes
68
+ </match>
69
+ ```
@@ -0,0 +1,52 @@
1
+ ## case study
2
+
3
+ It is a guide to replicate single mysql table to elasticsearch.
4
+
5
+ ## configuration
6
+
7
+ ```
8
+ <source>
9
+ type mysql_replicator
10
+
11
+ # Set connection settings for replicate source.
12
+ host localhost
13
+ username your_mysql_user
14
+ password your_mysql_password
15
+ database myweb
16
+
17
+ # Set replicate query configuration.
18
+ query SELECT id, text, updated_at from search_test;
19
+ primary_key id # specify unique key (default: id)
20
+ interval 10s # execute query interval (default: 1m)
21
+
22
+ # Enable detect deletion event not only insert/update events. (default: yes)
23
+ # It is useful to use `enable_delete no` that keep following recently updated record with this query.
24
+ # `SELECT * FROM search_test WHERE DATE_ADD(updated_at, INTERVAL 5 MINUTE) > NOW();`
25
+ enable_delete yes
26
+
27
+ # Format output tag for each events. Placeholders usage as described below.
28
+ tag replicator.myweb.search_test.${event}.${primary_key}
29
+ # ${event} : the variation of row event type by insert/update/delete.
30
+ # ${primary_key} : the value of `replicator_manager.settings.primary_key` in manager table.
31
+ </source>
32
+
33
+ <match replicator.**>
34
+ type mysql_replicator_elasticsearch
35
+
36
+ # Set Elasticsearch connection.
37
+ host localhost
38
+ port 9200
39
+
40
+ # Set Elasticsearch index, type, and unique id (primary_key) from tag.
41
+ tag_format (?<core_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
42
+
43
+ # Set frequency of sending bulk request to Elasticsearch node.
44
+ flush_interval 5s
45
+
46
+ # Set maximum retry interval (required fluentd >= 0.10.41)
47
+ max_retry_wait 1800
48
+
49
+ # Queued chunks are flushed at shutdown process.
50
+ flush_at_shutdown yes
51
+ </match>
52
+ ```
@@ -0,0 +1,79 @@
1
+ ## case study
2
+
3
+ It is a guide to replicate single mysql table to solr.
4
+
5
+ ## configuration
6
+
7
+ ```
8
+ <source>
9
+ type mysql_replicator
10
+
11
+ # Set connection settings for replicate source.
12
+ host localhost
13
+ username your_mysql_user
14
+ password your_mysql_password
15
+ database myweb
16
+
17
+ # Set replicate query configuration.
18
+ query SELECT id, text, updated_at from search_test;
19
+ primary_key id # specify unique key (default: id)
20
+ interval 10s # execute query interval (default: 1m)
21
+
22
+ # Enable detect deletion event not only insert/update events. (default: yes)
23
+ # It is useful to use `enable_delete no` that keep following recently updated record with this query.
24
+ # `SELECT * FROM search_test WHERE DATE_ADD(updated_at, INTERVAL 5 MINUTE) > NOW();`
25
+ enable_delete yes
26
+
27
+ # Format output tag for each events. Placeholders usage as described below.
28
+ tag replicator.myweb.search_test.${event}.${primary_key}
29
+ # ${event} : the variation of row event type by insert/update/delete.
30
+ # ${primary_key} : the value of `replicator_manager.settings.primary_key` in manager table.
31
+ </source>
32
+
33
+ <match replicator.**>
34
+ type mysql_replicator_solr
35
+
36
+ # Set Solr connection.
37
+ host localhost
38
+ port 8983
39
+
40
+ # Set Solr core name and unique id (primary_key) from tag.
41
+ # On this case, solr url will be http://localhost:8983/solr/${core_name}
42
+ tag_format (?<core_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
43
+
44
+ # Set frequency of sending bulk request to Solr.
45
+ flush_interval 5s
46
+
47
+ # Set maximum retry interval (required fluentd >= 0.10.41)
48
+ max_retry_wait 1800
49
+
50
+ # Queued chunks are flushed at shutdown process.
51
+ flush_at_shutdown yes
52
+ </match>
53
+ ```
54
+
55
+ When you use default core (won't specify), change the value of `tag_format` like below.
56
+ On this case, the solr url will be set `http://localhost:8983/solr`
57
+
58
+ ```
59
+ <match replicator.**>
60
+ type mysql_replicator_solr
61
+
62
+ # Set Solr connection.
63
+ host localhost
64
+ port 8983
65
+
66
+ # Set Solr core name and unique id (primary_key) from tag.
67
+ # On this case, solr url will be http://localhost:8983/solr/
68
+ tag_format (?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
69
+
70
+ # Set frequency of sending bulk request to Solr.
71
+ flush_interval 5s
72
+
73
+ # Set maximum retry interval (required fluentd >= 0.10.41)
74
+ max_retry_wait 1800
75
+
76
+ # Queued chunks are flushed at shutdown process.
77
+ flush_at_shutdown yes
78
+ </match>
79
+ ```
@@ -1,11 +1,11 @@
1
1
  # -*- encoding: utf-8 -*-
2
2
  Gem::Specification.new do |s|
3
3
  s.name = "fluent-plugin-mysql-replicator"
4
- s.version = "0.2.2"
4
+ s.version = "0.2.3"
5
5
  s.authors = ["Kentaro Yoshida"]
6
6
  s.email = ["y.ken.studio@gmail.com"]
7
7
  s.homepage = "https://github.com/y-ken/fluent-plugin-mysql-replicator"
8
- s.summary = %q{Fluentd input plugin to track insert/update/delete event from MySQL database server. Not only that, it could multiple table replication into Elasticsearch nodes. It's comming support replicate to another RDB/noSQL.}
8
+ s.summary = %q{Fluentd input plugin to track insert/update/delete event from MySQL database server. Not only that, it could multiple table replication into Elasticsearch/Solr. It's comming support replicate to another RDB/noSQL.}
9
9
 
10
10
  s.files = `git ls-files`.split("\n")
11
11
  s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
@@ -17,4 +17,5 @@ Gem::Specification.new do |s|
17
17
 
18
18
  s.add_runtime_dependency "fluentd"
19
19
  s.add_runtime_dependency "mysql2"
20
+ s.add_runtime_dependency "rsolr"
20
21
  end
@@ -28,7 +28,7 @@ module Fluent
28
28
  raise Fluent::ConfigError, "mysql_replicator: missing 'tag' parameter. Please add following line into config like 'tag replicator.mydatabase.mytable.${event}.${primary_key}'"
29
29
  end
30
30
 
31
- $log.info "adding mysql_replicator worker. :tag=>#{tag} :query=>[#{@query}] :interval=>#{@interval}sec :enable_delete=>#{enable_delete}"
31
+ $log.info "adding mysql_replicator worker. :tag=>#{tag} :query=>#{@query} :interval=>#{@interval}sec :enable_delete=>#{enable_delete}"
32
32
  end
33
33
 
34
34
  def start
@@ -89,7 +89,7 @@ module Fluent
89
89
  end
90
90
  end
91
91
  elapsed_time = sprintf("%0.02f", Time.now - start_time)
92
- $log.info "mysql_replicator: finished execution :tag=>#{tag} :elapsed_time=>#{elapsed_time} seconds"
92
+ $log.info "mysql_replicator: finished execution :tag=>#{tag} :elapsed_time=>#{elapsed_time} sec"
93
93
  sleep @interval
94
94
  end
95
95
  end
@@ -58,7 +58,7 @@ module Fluent
58
58
  def poll(config)
59
59
  begin
60
60
  @manager_db = get_manager_connection
61
- masked_config = config.map {|k,v| (k == 'password') ? v.to_s.gsub(/./, '*') : v}
61
+ masked_config = Hash[config.map {|k,v| (k == 'password') ? [k, v.to_s.gsub(/./, '*')] : [k,v]}]
62
62
  @mutex.synchronize {
63
63
  $log.info "mysql_replicator_multi: polling start. :config=>#{masked_config}"
64
64
  }
@@ -83,7 +83,7 @@ module Fluent
83
83
  end
84
84
  db.close
85
85
  elapsed_time = sprintf("%0.02f", Time.now - start_time)
86
- $log.info "mysql_replicator_multi: finished execution :setting_name=>#{config['name']} :elapsed_time=>#{elapsed_time} seconds"
86
+ $log.info "mysql_replicator_multi: finished execution :setting_name=>#{config['name']} :elapsed_time=>#{elapsed_time} sec"
87
87
  sleep config['interval']
88
88
  end
89
89
  rescue StandardError => e
@@ -8,7 +8,7 @@ class Fluent::MysqlReplicatorElasticsearchOutput < Fluent::BufferedOutput
8
8
  config_param :port, :integer, :default => 9200
9
9
  config_param :tag_format, :string, :default => nil
10
10
 
11
- DEFAULT_TAG_FORMAT = /(?<index_name>[^\.]+)\.(?<type_name>[^\.]+).(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$/
11
+ DEFAULT_TAG_FORMAT = /(?<index_name>[^\.]+)\.(?<type_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$/
12
12
 
13
13
  def initialize
14
14
  super
@@ -0,0 +1,57 @@
1
+ require 'rsolr'
2
+
3
+ class Fluent::MysqlReplicatorSolrOutput < Fluent::BufferedOutput
4
+ Fluent::Plugin.register_output('mysql_replicator_solr', self)
5
+
6
+ config_param :host, :string, :default => 'localhost'
7
+ config_param :port, :integer, :default => 8983
8
+ config_param :tag_format, :string, :default => nil
9
+
10
+ DEFAULT_TAG_FORMAT = /(?<core_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$/
11
+
12
+ def initialize
13
+ super
14
+ end
15
+
16
+ def configure(conf)
17
+ super
18
+
19
+ if @tag_format.nil? || @tag_format == DEFAULT_TAG_FORMAT
20
+ @tag_format = DEFAULT_TAG_FORMAT
21
+ else
22
+ @tag_format = Regexp.new(conf['tag_format'])
23
+ end
24
+ end
25
+
26
+ def start
27
+ super
28
+ end
29
+
30
+ def format(tag, time, record)
31
+ [tag, time, record].to_msgpack
32
+ end
33
+
34
+ def shutdown
35
+ super
36
+ end
37
+
38
+ def write(chunk)
39
+ solr_connection = {}
40
+
41
+ chunk.msgpack_each do |tag, time, record|
42
+ tag_parts = tag.match(@tag_format)
43
+ id_key = tag_parts['primary_key']
44
+ core_name = tag_parts['core_name'].nil? ? '' : tag_parts['core_name']
45
+ url = "http://#{@host}:#{@port}/solr/#{core_name}"
46
+ solr_connection[url] = RSolr.connect(:url => url) if solr_connection[url].nil?
47
+ if tag_parts['event'] == 'delete'
48
+ solr_connection[url].delete_by_id record[id_key]
49
+ else
50
+ message = Hash[record.map{ |k, v| [k.to_sym, v] }]
51
+ message[:id] = record[id_key] if id_key && record[id_key]
52
+ solr_connection[url].add message
53
+ end
54
+ end
55
+ solr_connection.each {|solr| solr.commit }
56
+ end
57
+ end
data/test/helper.rb CHANGED
@@ -25,6 +25,7 @@ end
25
25
  require 'fluent/plugin/in_mysql_replicator'
26
26
  require 'fluent/plugin/in_mysql_replicator_multi'
27
27
  require 'fluent/plugin/out_mysql_replicator_elasticsearch'
28
+ require 'fluent/plugin/out_mysql_replicator_solr'
28
29
 
29
30
  class Test::Unit::TestCase
30
31
  end
@@ -0,0 +1,24 @@
1
+ require 'helper'
2
+
3
+ class MysqlReplicatorSolrOutput < Test::Unit::TestCase
4
+
5
+ def setup
6
+ Fluent::Test.setup
7
+ end
8
+
9
+ CONFIG = %[
10
+ host localhost
11
+ port 8983
12
+ tag_format (?<core_name>[^\.]+)\.(?<event>[^\.]+)\.(?<primary_key>[^\.]+)$
13
+ ]
14
+
15
+ def create_driver(conf=CONFIG,tag='test')
16
+ Fluent::Test::OutputTestDriver.new(Fluent::MysqlReplicatorSolrOutput, tag).configure(conf)
17
+ end
18
+
19
+ def test_configure
20
+ d = create_driver(%[])
21
+ assert_equal 'localhost', d.instance.host
22
+ assert_equal 8983, d.instance.port
23
+ end
24
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-mysql-replicator
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.2
4
+ version: 0.2.3
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-12-13 00:00:00.000000000 Z
12
+ date: 2013-12-16 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: rake
@@ -75,6 +75,22 @@ dependencies:
75
75
  - - ! '>='
76
76
  - !ruby/object:Gem::Version
77
77
  version: '0'
78
+ - !ruby/object:Gem::Dependency
79
+ name: rsolr
80
+ requirement: !ruby/object:Gem::Requirement
81
+ none: false
82
+ requirements:
83
+ - - ! '>='
84
+ - !ruby/object:Gem::Version
85
+ version: '0'
86
+ type: :runtime
87
+ prerelease: false
88
+ version_requirements: !ruby/object:Gem::Requirement
89
+ none: false
90
+ requirements:
91
+ - - ! '>='
92
+ - !ruby/object:Gem::Version
93
+ version: '0'
78
94
  description:
79
95
  email:
80
96
  - y.ken.studio@gmail.com
@@ -88,15 +104,21 @@ files:
88
104
  - LICENSE
89
105
  - README.md
90
106
  - Rakefile
107
+ - example/mysql_multi_table_to_elasticsearch.md
108
+ - example/mysql_multi_table_to_solr.md
109
+ - example/mysql_single_table_to_elasticsearch.md
110
+ - example/mysql_single_table_to_solr.md
91
111
  - fluent-plugin-mysql-replicator.gemspec
92
112
  - lib/fluent/plugin/in_mysql_replicator.rb
93
113
  - lib/fluent/plugin/in_mysql_replicator_multi.rb
94
114
  - lib/fluent/plugin/out_mysql_replicator_elasticsearch.rb
115
+ - lib/fluent/plugin/out_mysql_replicator_solr.rb
95
116
  - setup_mysql_replicator_multi.sql
96
117
  - test/helper.rb
97
118
  - test/plugin/test_in_mysql_replicator.rb
98
119
  - test/plugin/test_in_mysql_replicator_multi.rb
99
120
  - test/plugin/test_out_mysql_replicator_elasticsearch.rb
121
+ - test/plugin/test_out_mysql_replicator_solr.rb
100
122
  homepage: https://github.com/y-ken/fluent-plugin-mysql-replicator
101
123
  licenses: []
102
124
  post_install_message:
@@ -121,10 +143,11 @@ rubygems_version: 1.8.23
121
143
  signing_key:
122
144
  specification_version: 3
123
145
  summary: Fluentd input plugin to track insert/update/delete event from MySQL database
124
- server. Not only that, it could multiple table replication into Elasticsearch nodes.
146
+ server. Not only that, it could multiple table replication into Elasticsearch/Solr.
125
147
  It's comming support replicate to another RDB/noSQL.
126
148
  test_files:
127
149
  - test/helper.rb
128
150
  - test/plugin/test_in_mysql_replicator.rb
129
151
  - test/plugin/test_in_mysql_replicator_multi.rb
130
152
  - test/plugin/test_out_mysql_replicator_elasticsearch.rb
153
+ - test/plugin/test_out_mysql_replicator_solr.rb