droonga-engine 1.0.9 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (195) hide show
  1. checksums.yaml +4 -4
  2. data/.travis.yml +1 -0
  3. data/benchmark/timer-watcher/benchmark.rb +44 -0
  4. data/bin/droonga-engine-absorb-data +246 -187
  5. data/bin/droonga-engine-catalog-generate +12 -12
  6. data/bin/droonga-engine-catalog-modify +4 -4
  7. data/bin/droonga-engine-join +352 -171
  8. data/bin/droonga-engine-set-role +54 -0
  9. data/bin/droonga-engine-unjoin +107 -112
  10. data/droonga-engine.gemspec +3 -3
  11. data/install.sh +55 -36
  12. data/install/centos/functions.sh +2 -2
  13. data/install/debian/functions.sh +2 -2
  14. data/lib/droonga/address.rb +26 -24
  15. data/lib/droonga/buffered_tcp_socket.rb +65 -10
  16. data/lib/droonga/catalog/base.rb +9 -6
  17. data/lib/droonga/catalog/dataset.rb +17 -41
  18. data/lib/droonga/catalog/fetcher.rb +64 -0
  19. data/lib/droonga/catalog/generator.rb +245 -0
  20. data/lib/droonga/catalog/loader.rb +66 -0
  21. data/lib/droonga/{catalog_modifier.rb → catalog/modifier.rb} +11 -18
  22. data/lib/droonga/catalog/replicas_volume.rb +123 -0
  23. data/lib/droonga/catalog/schema.rb +37 -37
  24. data/lib/droonga/catalog/single_volume.rb +11 -3
  25. data/lib/droonga/catalog/slice.rb +10 -6
  26. data/lib/droonga/catalog/{collection_volume.rb → slices_volume.rb} +47 -11
  27. data/lib/droonga/catalog/version1.rb +47 -19
  28. data/lib/droonga/catalog/version2.rb +11 -10
  29. data/lib/droonga/catalog/version2_validator.rb +4 -4
  30. data/lib/droonga/catalog/volume.rb +17 -5
  31. data/lib/droonga/changable.rb +25 -0
  32. data/lib/droonga/cluster.rb +237 -0
  33. data/lib/droonga/collector_runner.rb +4 -0
  34. data/lib/droonga/collectors.rb +2 -1
  35. data/lib/droonga/collectors/recursive_sum.rb +26 -0
  36. data/lib/droonga/command/droonga_engine.rb +404 -127
  37. data/lib/droonga/command/droonga_engine_service.rb +47 -11
  38. data/lib/droonga/command/droonga_engine_worker.rb +21 -1
  39. data/lib/droonga/command/remote_command_base.rb +78 -0
  40. data/lib/droonga/command/serf_event_handler.rb +29 -20
  41. data/lib/droonga/data_absorber_client.rb +222 -0
  42. data/lib/droonga/database_scanner.rb +106 -0
  43. data/lib/droonga/{live_nodes_list_loader.rb → deferrable.rb} +11 -24
  44. data/lib/droonga/differ.rb +58 -0
  45. data/lib/droonga/dispatcher.rb +155 -32
  46. data/lib/droonga/distributed_command_planner.rb +9 -11
  47. data/lib/droonga/engine.rb +83 -78
  48. data/lib/droonga/engine/version.rb +1 -1
  49. data/lib/droonga/engine_node.rb +301 -0
  50. data/lib/droonga/engine_state.rb +62 -40
  51. data/lib/droonga/farm.rb +44 -5
  52. data/lib/droonga/file_observer.rb +16 -12
  53. data/lib/droonga/fluent_message_receiver.rb +98 -29
  54. data/lib/droonga/fluent_message_sender.rb +30 -23
  55. data/lib/droonga/forward_buffer.rb +160 -0
  56. data/lib/droonga/forwarder.rb +73 -40
  57. data/lib/droonga/handler.rb +7 -6
  58. data/lib/droonga/handler_messenger.rb +15 -6
  59. data/lib/droonga/handler_runner.rb +6 -1
  60. data/lib/droonga/internal_fluent_message_receiver.rb +28 -8
  61. data/lib/droonga/job_pusher.rb +10 -7
  62. data/lib/droonga/job_receiver.rb +6 -4
  63. data/lib/droonga/logger.rb +7 -1
  64. data/lib/droonga/node_name.rb +90 -0
  65. data/lib/droonga/node_role.rb +72 -0
  66. data/lib/droonga/path.rb +34 -9
  67. data/lib/droonga/planner.rb +73 -7
  68. data/lib/droonga/plugin/async_command.rb +154 -0
  69. data/lib/droonga/plugins/catalog.rb +1 -0
  70. data/lib/droonga/plugins/crud.rb +22 -6
  71. data/lib/droonga/plugins/dump.rb +66 -135
  72. data/lib/droonga/plugins/groonga/delete.rb +13 -0
  73. data/lib/droonga/plugins/search/distributed_search_planner.rb +4 -4
  74. data/lib/droonga/plugins/system.rb +5 -26
  75. data/lib/droonga/plugins/system/absorb_data.rb +405 -0
  76. data/lib/droonga/plugins/system/statistics.rb +71 -0
  77. data/lib/droonga/plugins/system/status.rb +53 -0
  78. data/lib/droonga/process_control_protocol.rb +3 -1
  79. data/lib/droonga/process_supervisor.rb +32 -15
  80. data/lib/droonga/reducer.rb +69 -0
  81. data/lib/droonga/safe_file_writer.rb +1 -1
  82. data/lib/droonga/serf.rb +207 -276
  83. data/lib/droonga/serf/agent.rb +228 -0
  84. data/lib/droonga/serf/command.rb +94 -0
  85. data/lib/droonga/serf/downloader.rb +120 -0
  86. data/lib/droonga/serf/remote_command.rb +348 -0
  87. data/lib/droonga/serf/tag.rb +56 -0
  88. data/lib/droonga/service_installation.rb +2 -2
  89. data/lib/droonga/session.rb +49 -1
  90. data/lib/droonga/single_step.rb +6 -11
  91. data/lib/droonga/single_step_definition.rb +32 -1
  92. data/lib/droonga/slice.rb +14 -9
  93. data/lib/droonga/supervisor.rb +27 -20
  94. data/lib/droonga/test/stub_handler_messenger.rb +2 -1
  95. data/lib/droonga/timestamp.rb +69 -0
  96. data/lib/droonga/worker_process_agent.rb +33 -15
  97. data/sample/cluster-state.json +8 -0
  98. data/sample/cluster/Rakefile +30 -6
  99. data/test/command/fixture/integer-key-table.jsons +11 -0
  100. data/test/command/fixture/string-key-table.jsons +11 -0
  101. data/test/command/run-test.rb +4 -0
  102. data/test/command/suite/add/error/invalid-integer.expected +3 -3
  103. data/test/command/suite/add/error/invalid-time.expected +3 -3
  104. data/test/command/suite/add/{minimum.expected → key-integer.expected} +0 -0
  105. data/test/command/suite/add/{minimum.test → key-integer.test} +0 -0
  106. data/test/command/suite/add/key-string.expected +6 -0
  107. data/test/command/suite/add/key-string.test +9 -0
  108. data/test/command/suite/add/mismatched-key-type/acceptable/integer-for-string.expected +6 -0
  109. data/test/command/suite/add/mismatched-key-type/acceptable/integer-for-string.test +9 -0
  110. data/test/command/suite/add/mismatched-key-type/acceptable/string-for-integer.expected +6 -0
  111. data/test/command/suite/add/mismatched-key-type/acceptable/string-for-integer.test +9 -0
  112. data/test/command/suite/add/without-values.expected +6 -0
  113. data/test/command/suite/add/without-values.test +11 -0
  114. data/test/command/suite/dump/column/index.expected +33 -1
  115. data/test/command/suite/dump/column/index.test +1 -0
  116. data/test/command/suite/dump/column/scalar.expected +29 -1
  117. data/test/command/suite/dump/column/scalar.test +1 -0
  118. data/test/command/suite/dump/column/vector.expected +29 -1
  119. data/test/command/suite/dump/column/vector.test +1 -0
  120. data/test/command/suite/dump/record/scalar.catalog.json +12 -0
  121. data/test/command/suite/dump/record/scalar.expected +84 -0
  122. data/test/command/suite/dump/record/scalar.test +16 -0
  123. data/test/command/suite/dump/record/vector/reference.expected +83 -1
  124. data/test/command/suite/dump/record/vector/reference.test +1 -0
  125. data/test/command/suite/dump/table/array.expected +27 -1
  126. data/test/command/suite/dump/table/array.test +1 -0
  127. data/test/command/suite/dump/table/double_array_trie.expected +27 -1
  128. data/test/command/suite/dump/table/double_array_trie.test +1 -0
  129. data/test/command/suite/dump/table/hash.expected +27 -1
  130. data/test/command/suite/dump/table/hash.test +1 -0
  131. data/test/command/suite/dump/table/patricia_trie.expected +27 -1
  132. data/test/command/suite/dump/table/patricia_trie.test +1 -0
  133. data/test/command/suite/groonga/delete/{success.expected → key-integer.expected} +0 -0
  134. data/test/command/suite/groonga/delete/key-integer.test +17 -0
  135. data/test/command/suite/groonga/delete/key-string.expected +19 -0
  136. data/test/command/suite/groonga/delete/{success.test → key-string.test} +4 -6
  137. data/test/command/suite/groonga/delete/mismatched-type-key/acceptable/integer-for-string.expected +19 -0
  138. data/test/command/suite/groonga/delete/mismatched-type-key/acceptable/integer-for-string.test +17 -0
  139. data/test/command/suite/groonga/delete/mismatched-type-key/acceptable/string-for-integer.expected +19 -0
  140. data/test/command/suite/groonga/delete/mismatched-type-key/acceptable/string-for-integer.test +17 -0
  141. data/test/command/suite/message/error/missing-dataset.test +1 -0
  142. data/test/command/suite/system/absorb-data/records.catalog.json +58 -0
  143. data/test/command/suite/system/absorb-data/records.expected +32 -0
  144. data/test/command/suite/system/absorb-data/records.test +24 -0
  145. data/test/command/suite/system/statistics/object/count/empty.expected +11 -0
  146. data/test/command/suite/system/statistics/object/count/empty.test +12 -0
  147. data/test/command/suite/system/statistics/object/count/per-volume/empty.catalog.json +36 -0
  148. data/test/command/suite/system/statistics/object/count/per-volume/empty.expected +19 -0
  149. data/test/command/suite/system/statistics/object/count/per-volume/empty.test +12 -0
  150. data/test/command/suite/system/statistics/object/count/per-volume/record.catalog.json +40 -0
  151. data/test/command/suite/system/statistics/object/count/per-volume/record.expected +19 -0
  152. data/test/command/suite/system/statistics/object/count/per-volume/record.test +23 -0
  153. data/test/command/suite/system/statistics/object/count/per-volume/schema.catalog.json +40 -0
  154. data/test/command/suite/system/statistics/object/count/per-volume/schema.expected +19 -0
  155. data/test/command/suite/system/statistics/object/count/per-volume/schema.test +13 -0
  156. data/test/command/suite/system/statistics/object/count/record.catalog.json +12 -0
  157. data/test/command/suite/system/statistics/object/count/record.expected +11 -0
  158. data/test/command/suite/system/statistics/object/count/record.test +23 -0
  159. data/test/command/suite/system/statistics/object/count/schema.catalog.json +12 -0
  160. data/test/command/suite/system/statistics/object/count/schema.expected +11 -0
  161. data/test/command/suite/system/statistics/object/count/schema.test +13 -0
  162. data/test/command/suite/system/status.expected +3 -2
  163. data/test/unit/catalog/test_dataset.rb +4 -1
  164. data/test/unit/{test_catalog_generator.rb → catalog/test_generator.rb} +2 -2
  165. data/test/unit/catalog/test_replicas_volume.rb +79 -0
  166. data/test/unit/catalog/test_single_volume.rb +2 -2
  167. data/test/unit/catalog/test_slice.rb +33 -1
  168. data/test/unit/catalog/{test_collection_volume.rb → test_slices_volume.rb} +72 -11
  169. data/test/unit/catalog/test_version2.rb +3 -0
  170. data/test/unit/helper/distributed_search_planner_helper.rb +2 -2
  171. data/test/unit/plugins/catalog/test_fetch.rb +4 -4
  172. data/test/unit/plugins/crud/test_add.rb +44 -4
  173. data/test/unit/plugins/groonga/test_column_create.rb +4 -4
  174. data/test/unit/plugins/groonga/test_column_list.rb +4 -4
  175. data/test/unit/plugins/groonga/test_column_remove.rb +4 -4
  176. data/test/unit/plugins/groonga/test_column_rename.rb +4 -4
  177. data/test/unit/plugins/groonga/test_delete.rb +73 -10
  178. data/test/unit/plugins/groonga/test_table_create.rb +4 -4
  179. data/test/unit/plugins/groonga/test_table_list.rb +4 -4
  180. data/test/unit/plugins/groonga/test_table_remove.rb +4 -4
  181. data/test/unit/plugins/search/test_handler.rb +4 -4
  182. data/test/unit/plugins/search/test_planner.rb +4 -2
  183. data/test/unit/plugins/system/test_status.rb +31 -15
  184. data/test/unit/plugins/test_watch.rb +16 -16
  185. data/test/unit/test_address.rb +4 -4
  186. metadata +134 -35
  187. data/lib/droonga/catalog/volume_collection.rb +0 -79
  188. data/lib/droonga/catalog_fetcher.rb +0 -53
  189. data/lib/droonga/catalog_generator.rb +0 -243
  190. data/lib/droonga/catalog_loader.rb +0 -56
  191. data/lib/droonga/command/remote.rb +0 -404
  192. data/lib/droonga/data_absorber.rb +0 -264
  193. data/lib/droonga/node_status.rb +0 -71
  194. data/lib/droonga/serf_downloader.rb +0 -115
  195. data/test/unit/catalog/test_volume_collection.rb +0 -78
@@ -21,17 +21,17 @@ require "json"
21
21
  require "pathname"
22
22
 
23
23
  require "droonga/engine/version"
24
- require "droonga/catalog_generator"
24
+ require "droonga/catalog/generator"
25
25
  require "droonga/safe_file_writer"
26
26
  require "droonga/service_installation"
27
27
 
28
28
  service_installation = Droonga::ServiceInstallation.new
29
29
  service_installation.ensure_using_service_base_directory
30
30
 
31
- generator = Droonga::CatalogGenerator.new
31
+ generator = Droonga::Catalog::Generator.new
32
32
  current_dataset = {}
33
33
  datasets = {
34
- Droonga::CatalogGenerator::DEFAULT_DATASET => current_dataset
34
+ Droonga::Catalog::Generator::DEFAULT_DATASET => current_dataset
35
35
  }
36
36
 
37
37
  options = OpenStruct.new
@@ -51,37 +51,37 @@ end
51
51
  parser.on("--dataset=NAME",
52
52
  "Add a dataset its name is NAME.",
53
53
  "And set the NAME to the current dataset.",
54
- "(#{Droonga::CatalogGenerator::DEFAULT_DATASET})") do |name|
54
+ "(#{Droonga::Catalog::Generator::DEFAULT_DATASET})") do |name|
55
55
  current_dataset = datasets[name] = {}
56
56
  end
57
57
  parser.on("--n-workers=N", Integer,
58
58
  "Use N workers for the current dataset.",
59
- "(#{Droonga::CatalogGenerator::DEFAULT_N_WORKERS})") do |n|
59
+ "(#{Droonga::Catalog::Generator::DEFAULT_N_WORKERS})") do |n|
60
60
  current_dataset[:n_workers] = n
61
61
  end
62
62
  parser.on("--hosts=NAME1,NAME2,...", Array,
63
63
  "Use given hosts for replicas of the current dataset.",
64
- "(#{Droonga::CatalogGenerator::DEFAULT_HOSTS.join(",")})") do |hosts|
64
+ "(#{Droonga::Catalog::Generator::DEFAULT_HOSTS.join(",")})") do |hosts|
65
65
  current_dataset[:hosts] = hosts
66
66
  end
67
67
  parser.on("--port=PORT", Integer,
68
68
  "Use the PORT as the port for the current dataset.",
69
- "(#{Droonga::CatalogGenerator::DEFAULT_PORT})") do |port|
69
+ "(#{Droonga::Catalog::Generator::DEFAULT_PORT})") do |port|
70
70
  current_dataset[:port] = port
71
71
  end
72
72
  parser.on("--tag=TAG",
73
73
  "Use the TAG as the tag for the current dataset.",
74
- "(#{Droonga::CatalogGenerator::DEFAULT_TAG})") do |tag|
74
+ "(#{Droonga::Catalog::Generator::DEFAULT_TAG})") do |tag|
75
75
  current_dataset[:tag] = tag
76
76
  end
77
77
  parser.on("--n-slices=N", Integer,
78
78
  "Use N slices for each replica.",
79
- "(#{Droonga::CatalogGenerator::DEFAULT_N_SLICES})") do |n|
79
+ "(#{Droonga::Catalog::Generator::DEFAULT_N_SLICES})") do |n|
80
80
  current_dataset[:n_slices] = n
81
81
  end
82
82
  parser.on("--plugins=PLUGIN1,PLUGIN2,...", Array,
83
83
  "Use PLUGINS for the current dataset.",
84
- "(#{Droonga::CatalogGenerator::DEFAULT_PLUGINS.join(",")})") do |plugins|
84
+ "(#{Droonga::Catalog::Generator::DEFAULT_PLUGINS.join(",")})") do |plugins|
85
85
  current_dataset[:plugins] = plugins
86
86
  end
87
87
  parser.on("--schema=PATH",
@@ -102,8 +102,8 @@ parser.on("--replicas=PATH",
102
102
  end
103
103
  parser.parse!(ARGV)
104
104
 
105
- if datasets[Droonga::CatalogGenerator::DEFAULT_DATASET].empty?
106
- datasets.delete(Droonga::CatalogGenerator::DEFAULT_DATASET)
105
+ if datasets[Droonga::Catalog::Generator::DEFAULT_DATASET].empty?
106
+ datasets.delete(Droonga::Catalog::Generator::DEFAULT_DATASET)
107
107
  end
108
108
 
109
109
  if service_installation.user_exist? and
@@ -21,17 +21,17 @@ require "json"
21
21
  require "pathname"
22
22
 
23
23
  require "droonga/engine/version"
24
- require "droonga/catalog_generator"
24
+ require "droonga/catalog/generator"
25
25
  require "droonga/safe_file_writer"
26
26
  require "droonga/service_installation"
27
27
 
28
28
  service_installation = Droonga::ServiceInstallation.new
29
29
  service_installation.ensure_using_service_base_directory
30
30
 
31
- generator = Droonga::CatalogGenerator.new
31
+ generator = Droonga::Catalog::Generator.new
32
32
  current_dataset = {}
33
33
  datasets = {
34
- Droonga::CatalogGenerator::DEFAULT_DATASET => current_dataset
34
+ Droonga::Catalog::Generator::DEFAULT_DATASET => current_dataset
35
35
  }
36
36
 
37
37
  options = OpenStruct.new
@@ -65,7 +65,7 @@ end
65
65
  parser.on("--dataset=NAME",
66
66
  "Add a dataset its name is NAME.",
67
67
  "And set the NAME to the current dataset.",
68
- "(#{Droonga::CatalogGenerator::DEFAULT_DATASET})") do |name|
68
+ "(#{Droonga::Catalog::Generator::DEFAULT_DATASET})") do |name|
69
69
  current_dataset = datasets[name] = {}
70
70
  end
71
71
  parser.on("--replica-hosts=NAME1,NAME2,...", Array,
@@ -1,6 +1,6 @@
1
1
  #!/usr/bin/env ruby
2
2
  #
3
- # Copyright (C) 2014 Droonga Project
3
+ # Copyright (C) 2014-2015 Droonga Project
4
4
  #
5
5
  # This library is free software; you can redistribute it and/or
6
6
  # modify it under the terms of the GNU Lesser General Public
@@ -19,207 +19,388 @@ require "slop"
19
19
  require "json"
20
20
  require "pathname"
21
21
  require "socket"
22
+ require "coolio"
22
23
 
23
24
  require "droonga/engine/version"
24
25
  require "droonga/path"
25
- require "droonga/catalog_generator"
26
+ require "droonga/node_name"
27
+ require "droonga/node_role"
28
+ require "droonga/catalog/dataset"
29
+ require "droonga/catalog/fetcher"
30
+ require "droonga/catalog/loader"
26
31
  require "droonga/safe_file_writer"
27
- require "droonga/data_absorber"
32
+ require "droonga/data_absorber_client"
28
33
  require "droonga/serf"
29
34
 
30
- class JoinCommand
31
- def run
32
- parse_options
33
- trap_signals
34
-
35
- puts "Start to join a new node #{@options[:host]}"
36
- puts " to the cluster of #{@options["replica-source-host"]}"
37
- puts " via #{@options["receiver-host"]} (this host)"
38
- puts ""
39
-
40
- set_node_role
41
- do_join
42
- sleep(5) #TODO: wait for restarting of the joining node. this should be done more safely.
43
- do_copy unless @options["no-copy"]
44
- set_effective_message_timestamp
45
- update_other_nodes
46
- reset_node_role
47
- puts("Done.")
48
- exit(true)
49
- end
35
+ module Droonga
36
+ module Command
37
+ class Join
38
+ class MissingRequiredParameter < StandardError
39
+ end
50
40
 
51
- private
52
- def parse_options
53
- options = Slop.parse(:help => true) do |option|
54
- option.on("no-copy", "Don't copy data from the source cluster.",
55
- :default => false)
56
-
57
- option.separator("Connections:")
58
- option.on(:host=,
59
- "Host name of the new node to be joined.",
60
- :required => true)
61
- option.on("replica-source-host=",
62
- "Host name of the soruce node in the cluster to be connected.",
63
- :required => true)
64
- option.on("receiver-host=",
65
- "Host name of this host.",
66
- :default => Socket.gethostname)
67
- option.on(:dataset=,
68
- "Dataset name of for the node to be joined.",
69
- :default => Droonga::CatalogGenerator::DEFAULT_DATASET)
70
- option.on(:port=,
71
- "Port number of the source cluster to be connected.",
72
- :as => Integer,
73
- :default => Droonga::CatalogGenerator::DEFAULT_PORT)
74
- option.on(:tag=,
75
- "Tag name of the soruce cluster to be connected.",
76
- :default => Droonga::CatalogGenerator::DEFAULT_TAG)
77
- option.on("records-per-second=",
78
- "Maximum number of records per second to be copied. " +
79
- "'#{Droonga::Client::RateLimiter::NO_LIMIT}' means no limit.",
80
- :as => Integer,
81
- :default => Droonga::DataAbsorber::DEFAULT_MESSAGES_PER_SECOND)
82
- end
83
- @options = options
84
- rescue Slop::MissingOptionError => error
85
- $stderr.puts(error)
86
- exit(false)
87
- end
41
+ def run
42
+ @loop = Coolio::Loop.default
88
43
 
89
- def joining_node
90
- "#{@options[:host]}:#{@options[:port]}/#{@options[:tag]}"
91
- end
44
+ parse_options
45
+ trap_signals
92
46
 
93
- def source_node
94
- "#{@options["replica-source-host"]}:#{@options[:port]}/#{@options[:tag]}"
95
- end
47
+ puts "Start to join a new node #{joining_node.host}"
48
+ puts " to the cluster of #{source_node.host}"
49
+ puts " via #{@options["receiver-host"]} (this host)"
50
+ puts " port = #{joining_node.port}"
51
+ puts " tag = #{joining_node.tag}"
52
+ puts " dataset = #{dataset}"
53
+ puts ""
96
54
 
97
- def run_remote_command(target, command, options)
98
- serf = Droonga::Serf.new(nil, target)
99
- result = serf.send_query(command, options)
100
- #puts(result[:result])
101
- puts(result[:error]) unless result[:error].empty?
102
- result[:response]
103
- end
55
+ if should_copy? and not absorber.empty_destination?
56
+ $stderr.puts("Error: The joining node's dataset #{dataset} is not empty.")
57
+ $stderr.puts(" You must clear all data of the node before joining.")
58
+ return false
59
+ end
104
60
 
105
- def absorber
106
- @absorber ||= prepare_absorber
107
- end
61
+ puts "Source Cluster ID: #{source_cluster_id}"
62
+ puts ""
108
63
 
109
- def prepare_absorber
110
- absorber_options = {
111
- :dataset => @options[:dataset],
112
- :source_host => @options["replica-source-host"],
113
- :destination_host => @options[:host],
114
- :receiver_host => @options["receiver-host"],
115
- :port => @options[:port],
116
- :tag => @options[:tag],
117
- :messages_per_second => @options["records-per-second"],
118
- }
119
- Droonga::DataAbsorber.new(absorber_options)
120
- end
64
+ begin
65
+ set_joining_node_role
66
+ do_join
67
+ register_to_existing_nodes
68
+ set_source_node_role
69
+ if should_copy?
70
+ #XXX If any command is received by the source node after changing of its role,
71
+ # the timestamp of last processed mesasge is unexpectedly updated by them.
72
+ # Be careful to not send any command to the source node on this timing!!
73
+ update_accept_messages_newer_than_timestamp
74
+ successed = copy_data
75
+ unless successed
76
+ do_cancel
77
+ return false
78
+ end
79
+ end
80
+ reset_source_node_role
81
+ reset_joining_node_role
82
+ puts("Done.")
83
+ true
84
+ rescue Exception => exception
85
+ puts("Unexpected exception: #{exception.message}")
86
+ puts(exception.backtrace.join("\n"))
87
+ do_cancel
88
+ false
89
+ end
90
+ end
91
+
92
+ private
93
+ def parse_options
94
+ options = Slop.parse(:help => true) do |option|
95
+ option.on("no-copy", "Don't copy data from the source cluster.",
96
+ :default => false)
121
97
 
122
- def set_node_role
123
- if absorber.source_node_suspendable?
124
- run_remote_command(source_node, "change_role",
125
- "node" => source_node,
126
- "role" => "source")
98
+ option.separator("Target:")
99
+ option.on(:host=,
100
+ "Host name of the new node to be joined.",
101
+ :required => true)
102
+ option.on("replica-source-host=",
103
+ "Host name of the soruce node in the cluster to be connected.",
104
+ :required => true)
105
+
106
+ option.on(:port=,
107
+ "Port number of the source cluster to be connected.",
108
+ :as => Integer,
109
+ :default => NodeName::DEFAULT_PORT)
110
+ option.on(:tag=,
111
+ "Tag name of the soruce cluster to be connected.",
112
+ :default => NodeName::DEFAULT_TAG)
113
+ option.on(:dataset=,
114
+ "Dataset name of for the node to be joined.",
115
+ :default => Catalog::Dataset::DEFAULT_NAME)
116
+
117
+ option.separator("Connections:")
118
+ option.on("receiver-host=",
119
+ "Host name of this host.",
120
+ :default => Socket.gethostname)
121
+
122
+ option.separator("Miscellaneous:")
123
+ option.on("records-per-second=",
124
+ "Maximum number of records per second to be copied. " +
125
+ "'#{Client::RateLimiter::NO_LIMIT}' means no limit.",
126
+ :as => Integer,
127
+ :default => DataAbsorberClient::DEFAULT_MESSAGES_PER_SECOND)
128
+ option.on("progress-interval-seconds=",
129
+ "Interval seconds to report progress.",
130
+ :as => Integer,
131
+ :default => DataAbsorberClient::DEFAULT_PROGRESS_INTERVAL_SECONDS)
132
+ option.on(:verbose, "Output details for internal operations.",
133
+ :default => false)
134
+ end
135
+ @options = options
136
+ rescue Slop::MissingOptionError => error
137
+ $stderr.puts(error)
138
+ raise MissingRequiredParameter.new
127
139
  end
128
- run_remote_command(joining_node, "change_role",
129
- "node" => joining_node,
130
- "role" => "destination")
131
- @node_role_changed = true
132
- end
133
140
 
134
- def reset_node_role
135
- if absorber.source_node_suspendable?
136
- run_remote_command(source_node, "change_role",
137
- "node" => source_node,
138
- "role" => "")
141
+ def dataset
142
+ @options[:dataset]
139
143
  end
140
- run_remote_command(joining_node, "change_role",
141
- "node" => joining_node,
142
- "role" => "")
143
- @node_role_changed = false
144
- end
145
144
 
146
- def do_join
147
- puts("Joining new replica to the cluster...")
148
- run_remote_command(joining_node, "join",
149
- "node" => joining_node,
150
- "type" => "replica",
151
- "source" => source_node,
152
- "dataset" => @options[:dataset],
153
- "copy" => !@options["no-copy"])
154
- end
145
+ def should_copy?
146
+ not @options["no-copy"]
147
+ end
148
+
149
+ def joining_node
150
+ @joining_node ||= NodeName.new(:host => @options[:host],
151
+ :port => @options[:port],
152
+ :tag => @options[:tag])
153
+ end
154
+
155
+ def source_node
156
+ @source_node ||= NodeName.new(:host => @options["replica-source-host"],
157
+ :port => @options[:port],
158
+ :tag => @options[:tag])
159
+ end
160
+
161
+ def source_node_serf
162
+ @source_node_serf ||= Serf.new(source_node.to_s,
163
+ :verbose => @options[:verbose])
164
+ end
165
+
166
+ def joining_node_serf
167
+ @joining_node_serf ||= Serf.new(joining_node.to_s,
168
+ :verbose => @options[:verbose])
169
+ end
170
+
171
+ def source_cluster_id
172
+ source_catalog.cluster_id
173
+ end
174
+
175
+ def all_nodes
176
+ existing_nodes + [joining_node]
177
+ end
178
+
179
+ def existing_nodes
180
+ @existing_nodes ||= prepare_existing_nodes
181
+ end
182
+
183
+ def prepare_existing_nodes
184
+ generator = Catalog::Generator.new
185
+ generator.load(raw_source_catalog)
186
+
187
+ dataset = generator.dataset_for_host(source_node.host)
188
+ other_hosts = dataset.replicas.hosts
189
+ other_hosts.collect do |host|
190
+ NodeName.new(:host => host,
191
+ :port => source_node.port,
192
+ :tag => source_node.tag)
193
+ end
194
+ end
195
+
196
+ def source_catalog
197
+ @source_catalog ||= parse_source_catalog
198
+ end
199
+
200
+ def parse_source_catalog
201
+ loader = Catalog::Loader.new
202
+ loader.parse(raw_source_catalog)
203
+ end
204
+
205
+ def raw_source_catalog
206
+ @raw_source_catalog ||= fetch_source_catalog
207
+ end
208
+
209
+ def fetch_source_catalog
210
+ fetcher = Catalog::Fetcher.new(:host => source_node.host,
211
+ :port => source_node.port,
212
+ :tag => source_node.tag,
213
+ :receiver_host => @options["receiver-host"])
214
+ fetcher.fetch(:dataset => dataset)
215
+ end
216
+
217
+ def absorber
218
+ @absorber ||= prepare_absorber
219
+ end
220
+
221
+ def prepare_absorber
222
+ absorber_options = {
223
+ :host => joining_node.host,
224
+ :port => joining_node.port,
225
+ :tag => joining_node.tag,
226
+ :dataset => dataset,
227
+
228
+ :source_host => source_node.host,
229
+ :source_port => source_node.port,
230
+ :source_tag => source_node.tag,
231
+ :source_dataset => dataset,
232
+
233
+ :receiver_host => @options["receiver-host"],
234
+
235
+ :messages_per_second => @options["records-per-second"],
236
+ :progress_interval_seconds => @options["progress-interval-seconds"],
237
+ :target_role => NodeRole::ABSORB_DESTINATION,
238
+
239
+ :client_options => {
240
+ :backend => :coolio,
241
+ :loop => @loop,
242
+ },
243
+ }
244
+ DataAbsorberClient.new(absorber_options)
245
+ end
246
+
247
+ def set_source_node_role
248
+ return if source_node_serf.role == NodeRole::ABSORB_SOURCE
249
+ if absorber.source_node_suspendable?
250
+ puts("Changing role of the source node...")
251
+ source_node_serf.ensure_restarted do
252
+ source_node_serf.send_query("change_role",
253
+ "node" => source_node.to_s,
254
+ "role" => NodeRole::ABSORB_SOURCE)
255
+ end
256
+ end
257
+ end
155
258
 
156
- def do_copy
157
- @start_time_in_seconds = Time.new.to_i
158
- puts("Copying data from the source node...")
159
- last_progress = ""
160
- while true
161
- sleep(3)
162
- response = run_remote_command(joining_node, "report_status",
163
- "node" => joining_node,
164
- "key" => "absorbing")
165
- if response
166
- absorbing = response["value"]
167
- break unless absorbing
168
- end
169
-
170
- progress = absorber.report_progress(@start_time_in_seconds)
171
- if progress
172
- printf("%s", "#{" " * last_progress.size}\r")
173
- printf("%s", "#{progress}\r")
259
+ def set_joining_node_role
260
+ return if joining_node_serf.role == NodeRole::ABSORB_DESTINATION
261
+ puts("Changing role of the joining node...")
262
+ joining_node_serf.ensure_restarted do
263
+ joining_node_serf.send_query("change_role",
264
+ "node" => joining_node.to_s,
265
+ "role" => NodeRole::ABSORB_DESTINATION)
266
+ end
267
+ end
268
+
269
+ def reset_source_node_role
270
+ return if source_node_serf.role == NodeRole::SERVICE_PROVIDER
271
+ if absorber.source_node_suspendable?
272
+ puts("Restoring role of the source node...")
273
+ source_node_serf.ensure_restarted do
274
+ source_node_serf.send_query("change_role",
275
+ "node" => source_node.to_s,
276
+ "role" => NodeRole::SERVICE_PROVIDER)
277
+ end
278
+ end
279
+ end
280
+
281
+ def reset_joining_node_role
282
+ return if joining_node_serf.role == NodeRole::SERVICE_PROVIDER
283
+ puts("Restoring role of the joining node...")
284
+ joining_node_serf.ensure_restarted do
285
+ joining_node_serf.send_query("change_role",
286
+ "node" => joining_node.to_s,
287
+ "role" => NodeRole::SERVICE_PROVIDER)
288
+ end
289
+ end
290
+
291
+ def do_join
292
+ puts("Configuring the joining node as a new replica for the cluster...")
293
+ joining_node_serf.ensure_restarted do
294
+ joining_node_serf.send_query("join",
295
+ "node" => joining_node.to_s,
296
+ "type" => "replica",
297
+ "source" => source_node.to_s,
298
+ "dataset" => dataset)
299
+ end
300
+ end
301
+
302
+ def copy_data
303
+ puts("Copying data from the source node...")
304
+
305
+ last_progress = nil
306
+ absorber.run do |progress|
307
+ if last_progress
308
+ printf("%s", "#{" " * last_progress[:message].size}\r")
309
+ end
310
+ printf("%s", "#{progress[:message]}\r")
174
311
  last_progress = progress
175
312
  end
313
+ @loop.run
314
+
315
+ if absorber.error_message
316
+ puts(absorber.error_message)
317
+ return false
318
+ end
319
+
320
+ puts ""
321
+ true
176
322
  end
177
- puts ""
178
- end
179
323
 
180
- def set_effective_message_timestamp
181
- response = run_remote_command(source_node, "report_status",
182
- "node" => source_node,
183
- "key" => "last_processed_message_timestamp")
184
- timestamp = response["value"]
185
- if timestamp and not timestamp.empty?
186
- puts "The timestamp of the last processed message in the source node: #{timestamp}"
187
- puts "Setting effective message timestamp for the destination node..."
188
- response = run_remote_command(joining_node, "set_status",
189
- "node" => joining_node,
190
- "key" => "effective_message_timestamp",
191
- "value" => timestamp)
324
+ GETTING_LAST_MESSAGE_TIMESTAMP_MAX_RETRY_COUNT = 10
325
+ GETTING_LAST_MESSAGE_TIMESTAMP_RETRY_INTERVAL_SECONDS = 10
326
+
327
+ def try_get_last_message_timestamp(retry_count=0)
328
+ puts "Getting the timestamp of the last processed message in the source node..."
329
+ timestamp = source_node_serf.last_message_timestamp
330
+ unless timestamp
331
+ if retry_count < GETTING_LAST_MESSAGE_TIMESTAMP_MAX_RETRY_COUNT
332
+ puts "Failed. Retrying..."
333
+ sleep(GETTING_LAST_MESSAGE_TIMESTAMP_RETRY_INTERVAL_SECONDS)
334
+ timestamp = try_get_last_message_timestamp(retry_count + 1)
335
+ end
336
+ end
337
+ timestamp
192
338
  end
193
- end
194
339
 
195
- def update_other_nodes
196
- puts("Update existing hosts in the cluster...")
197
- run_remote_command(source_node, "add_replicas",
198
- "dataset" => @options[:dataset],
199
- "hosts" => [@options[:host]])
200
- end
340
+ def update_accept_messages_newer_than_timestamp
341
+ timestamp = try_get_last_message_timestamp
342
+ if timestamp and not timestamp.empty?
343
+ puts "The timestamp of the last processed message at the source node: #{timestamp}"
344
+ puts "Setting new node to ignore messages older than the timestamp..."
345
+ joining_node_serf.ensure_restarted do
346
+ joining_node_serf.send_query("accept_messages_newer_than",
347
+ "node" => joining_node.to_s,
348
+ "timestamp" => timestamp)
349
+ end
350
+ else
351
+ $stderr.puts("WARNING: Couldn't get the time stamp of " +
352
+ "the last processed message from the source node. " +
353
+ "Any message will be forwarded to the joining node.")
354
+ end
355
+ end
201
356
 
202
- def trap_signals
203
- trap(:TERM) do
204
- do_cancel
205
- trap(:TERM, "DEFAULT")
357
+ def register_to_existing_nodes
358
+ puts("Registering new node to existing nodes...")
359
+ source_node_serf.ensure_restarted(*existing_nodes) do
360
+ source_node_serf.send_query("add_replicas",
361
+ "cluster_id" => source_cluster_id,
362
+ "dataset" => dataset,
363
+ "hosts" => [joining_node.host])
364
+ end
365
+ @node_registered = true
206
366
  end
207
367
 
208
- trap(:INT) do
209
- do_cancel
210
- trap(:INT, "DEFAULT")
368
+ def unregister_from_existing_nodes
369
+ puts("Unregistering new node from existing nodes...")
370
+ source_node_serf.ensure_restarted(*existing_nodes) do
371
+ source_node_serf.send_query("remove_replicas",
372
+ "cluster_id" => source_cluster_id,
373
+ "dataset" => dataset,
374
+ "hosts" => [joining_node.host])
375
+ end
376
+ @node_registered = false
211
377
  end
212
378
 
213
- trap(:QUIT) do
214
- do_cancel
215
- trap(:QUIT, "DEFAULT")
379
+ def trap_signals
380
+ trap(:TERM) do
381
+ trap(:TERM, "DEFAULT")
382
+ do_cancel
383
+ end
384
+
385
+ trap(:INT) do
386
+ trap(:INT, "DEFAULT")
387
+ do_cancel
388
+ end
389
+
390
+ trap(:QUIT) do
391
+ trap(:QUIT, "DEFAULT")
392
+ do_cancel
393
+ end
216
394
  end
217
- end
218
395
 
219
- def do_cancel
220
- #XXX we have to write more codes to cancel remote processes!
221
- reset_node_role if @node_role_changed
396
+ def do_cancel
397
+ #XXX we have to write more codes to cancel remote processes!
398
+ unregister_from_existing_nodes if @node_registered
399
+ reset_joining_node_role
400
+ reset_source_node_role
401
+ end
402
+ end
222
403
  end
223
404
  end
224
405
 
225
- JoinCommand.new.run
406
+ exit(Droonga::Command::Join.new.run)