filecluster 0.4.5 → 0.4.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 65712f8a51ce15000b05922338f8621f5cd4caec
4
+ data.tar.gz: 4802352703fc5964a40517f8c7d95eb2a87c0892
5
+ SHA512:
6
+ metadata.gz: 57a574d9d3ab392e09861425b267fda89c7485e9b107253db65edf77d58809067eb60e1d1b1072cdfd7cd1befc2578ab83eb65063cee9121553a129c78cb9340
7
+ data.tar.gz: 94dc33d6b766cfd6958ae38f0e1760ef5557dd81fd63ed314687ce08e3c925cbe4f7aea0d6483a4750b3a2cb4c89a5413c01482ea00878e8db9b7b9ecbf2b7d3
data/README.md CHANGED
@@ -59,6 +59,7 @@ Can be used the following variables:
59
59
  |daemon_tasks_copy_threads_limit|10|copy tasks threads count limit for one storage|
60
60
  |daemon_tasks_delete_threads_limit|10|delete tasks threads count limit for one storage|
61
61
  |daemon_copy_tasks_per_host_limit|10|copy tasks count limit for one host|
62
+ |daemon_copy_speed_per_host_limit|0|copy tasks speed limit for hosts, change via fc-manage copy_speed|
62
63
  |daemon_global_tasks_group_limit|1000|select limit for create copy tasks|
63
64
  |daemon_global_error_items_ttl|86400|ttl for items with error status before delete|
64
65
  |daemon_global_error_items_storages_ttl|86400|ttl for items_storages with error status before delete|
data/bin/fc-daemon CHANGED
@@ -17,6 +17,7 @@ $tasks_copy_threads = {} # copy threads by storage name
17
17
  $tasks_delete_threads = {} # delete threads by storage name
18
18
  $check_threads = {} # check threads by storage name
19
19
  $copy_count = 0 # copy tasks count for current host
20
+ $copy_speed = 0 # copy tasks speed sum for current host
20
21
  $exit_signal = false
21
22
  $global_daemon_thread = nil
22
23
  $update_tasks_thread = nil
data/bin/fc-manage CHANGED
@@ -64,6 +64,14 @@ Command:
64
64
  list show all FC::Var-s
65
65
  show <variable> show current value for variable
66
66
  change <variable> change variable
67
+ }],
68
+ 'copy_speed' => [
69
+ 'show and change copy speed limit for FC hosts',
70
+ %q{Usage: fc-manage [options] copy_speed <command>
71
+ Command:
72
+ list show all limits
73
+ change <host> change current copy speed limit for host
74
+ add add copy speed limit for host
67
75
  }],
68
76
  'item' => [
69
77
  'show and manage items',
@@ -79,8 +87,8 @@ desc = %q{Get info and manage for storages, policies and items.
79
87
  Usage: fc-manage [options] <command> [<args>]
80
88
  Commands:
81
89
  }
82
- commands_help.each{|key, val| desc << " #{key}#{" "*(10-key.size)}#{val[0]}\n"}
83
- desc << " help show help for commands ('fc-manage help <command>')\n"
90
+ commands_help.each{|key, val| desc << " #{key}#{" "*(11-key.size)}#{val[0]}\n"}
91
+ desc << " help show help for commands ('fc-manage help <command>')\n"
84
92
  $options = option_parser_init(descriptions, desc)
85
93
  FC::Storage.instance_variable_set(:@uname, $options[:curr_host]) if $options[:curr_host] && $options[:curr_host] != FC::Storage.curr_host
86
94
  trap("INT", proc {exit})
data/bin/fc-setup-db CHANGED
@@ -8,22 +8,35 @@ require 'utils'
8
8
  require 'readline'
9
9
 
10
10
  descriptions = {
11
- :host => {:short => 'h', :full => 'host', :default => 'localhost', :text => 'mysql host name, default "localhost"', :save => true},
12
- :database => {:short => 'd', :full => 'db', :default => 'fc', :text => 'mysql database, default "fc"', :save => true},
13
- :username => {:short => 'u', :full => 'user', :default => 'root', :text => 'mysql user, default "root"', :save => true},
14
- :password => {:short => 'p', :full => 'password', :default => '', :text => 'mysql password, default ""', :save => true},
15
- :port => {:short => 'P', :full => 'port', :default => '3306', :text => 'mysql port, default "3306"', :save => true},
16
- :prefix => {:short => 't', :full => 'prefix', :default => '', :text => 'tables prefix, default ""', :save => true},
17
- :init_tables =>{:short => 'i', :full => 'init', :default => false, :text => 'init tables, default no', :no_val => true},
18
- :force => {:short => 'f', :full => 'force', :default => false, :text => 'do not ask questions', :no_val => true}
11
+ :host => {:short => 'h', :full => 'host', :default => 'localhost', :text => 'mysql host name, default "localhost"', :save => true},
12
+ :database => {:short => 'd', :full => 'db', :default => 'fc', :text => 'mysql database, default "fc"', :save => true},
13
+ :username => {:short => 'u', :full => 'user', :default => 'root', :text => 'mysql user, default "root"', :save => true},
14
+ :password => {:short => 'p', :full => 'password', :default => '', :text => 'mysql password, default ""', :save => true},
15
+ :port => {:short => 'P', :full => 'port', :default => '3306', :text => 'mysql port, default "3306"', :save => true},
16
+ :prefix => {:short => 't', :full => 'prefix', :default => '', :text => 'tables prefix, default ""', :save => true},
17
+ :init_tables =>{:short => 'i', :full => 'init', :default => false, :text => 'init tables, default no', :no_val => true},
18
+ :force => {:short => 'f', :full => 'force', :default => false, :text => 'do not ask questions', :no_val => true},
19
+ :migrations => {:short => 'm', :full => 'migrations',:default => false, :text => 'Make not ask questions', :no_val => true}
19
20
  }
20
21
  desc = %q{Setup FileCluster database connection options.
21
- Create tables if nessary.
22
+ Create tables on --init.
23
+ Make database migrations on --migrations.
24
+ If no host, database, username, password, port, prefix try to use current db.yml.
22
25
  Usage: fc-init-db [options]}
23
26
  options = option_parser_init(descriptions, desc)
24
- options.delete('optparse')
25
27
  trap("INT", proc {exit})
26
28
 
29
+ if !options[:__keys][:host] && !options[:__keys][:database] && !options[:__keys][:username] && !options[:__keys][:password] && !options[:__keys][:port] && !options[:__keys][:prefix]
30
+ default_db_config = File.expand_path(File.dirname(__FILE__))+'/db.yml'
31
+ if File.exists?(default_db_config)
32
+ db_options = Psych.load(File.read(default_db_config))
33
+ options.merge!(db_options)
34
+
35
+ end
36
+ end
37
+ options.delete('optparse')
38
+ options.delete(:__keys)
39
+
27
40
  puts options.inspect.gsub(/[\{\}\:]/, "").gsub(", ", "\n").gsub(/(.{7,})=>/, "\\1:\t").gsub("=>", ":\t\t")
28
41
 
29
42
  s = options[:force] ? 'y' : Readline.readline("Continue? (y/n) ", false).strip.downcase
@@ -37,12 +50,19 @@ if s == "y" || s == "yes"
37
50
  FC::DB.init_db
38
51
  puts "ok"
39
52
  end
40
- print "Save to config.. "
41
- options.select!{|key, val| descriptions[key][:save]}
42
- File.open(File.expand_path(File.dirname(__FILE__))+'/db.yml', 'w') do |f|
43
- f.write(options.to_yaml)
53
+ unless default_db_config
54
+ print "Save to config.. "
55
+ options.select!{|key, val| descriptions[key][:save]}
56
+ File.open(File.expand_path(File.dirname(__FILE__))+'/db.yml', 'w') do |f|
57
+ f.write(options.to_yaml)
58
+ end
59
+ puts "ok"
60
+ end
61
+ if options[:migrations]
62
+ print "Make migrations.. "
63
+ FC::DB.migrations
64
+ puts "ok"
44
65
  end
45
- puts "ok"
46
66
  else
47
67
  puts "Canceled."
48
68
  end
@@ -4,7 +4,7 @@ class CopyTaskThread < BaseThread
4
4
  Thread.current[:tasks_processed] = 0 unless Thread.current[:tasks_processed]
5
5
  while task = $tasks_copy[storage_name].shift do
6
6
  $curr_tasks << task
7
- $log.debug("CopyTaskThread(#{storage_name}): run task for item_storage ##{task.id}, copy_count=#{$copy_count}")
7
+ $log.debug("CopyTaskThread(#{storage_name}): run task for item_storage ##{task.id}, copy_count=#{$copy_count}, copy_speed=#{$copy_speed}")
8
8
  make_copy(task)
9
9
  $curr_tasks.delete(task)
10
10
  $log.debug("CopyTaskThread(#{storage_name}): finish task for item_storage ##{task.id}")
@@ -15,7 +15,18 @@ class CopyTaskThread < BaseThread
15
15
 
16
16
  def make_copy(task)
17
17
  sleep 0.1 while $copy_count > FC::Var.get('daemon_copy_tasks_per_host_limit', 10).to_i
18
+ limit = FC::Var.get_current_speed_limit
19
+ speed_limit = nil
20
+ if limit
21
+ if $copy_count == 0
22
+ speed_limit = (limit - $copy_speed) / 0.75
23
+ else
24
+ sleep 0.1 while (speed_limit = (limit - $copy_speed) / ($copy_count + 0.4)) < limit*0.1
25
+ end
26
+ end
18
27
  $copy_count += 1
28
+ $copy_speed += speed_limit if speed_limit
29
+
19
30
  storage = $storages.detect{|s| s.name == task.storage_name}
20
31
  begin
21
32
  item = FC::Item.find(task.item_id)
@@ -35,11 +46,12 @@ class CopyTaskThread < BaseThread
35
46
  end
36
47
  src_storage = $all_storages.detect{|s| s.name == src_item_storage.storage_name}
37
48
  $log.debug("Copy from #{src_storage.name} to #{storage.name} #{storage.path}#{item.name}")
38
- item.copy_item_storage(src_storage, storage, task)
49
+ item.copy_item_storage(src_storage, storage, task, speed_limit)
39
50
  rescue Exception => e
40
51
  error "Copy item_storage error: #{e.message}; #{e.backtrace.join(', ')}", :item_id => task.item_id, :item_storage_id => task.id
41
52
  $curr_tasks.delete(task)
42
53
  ensure
43
54
  $copy_count -= 1 if $copy_count > 0
55
+ $copy_speed -= speed_limit if speed_limit
44
56
  end
45
57
  end
data/lib/fc/copy_rule.rb CHANGED
@@ -30,14 +30,10 @@ module FC
30
30
 
31
31
  # get available storage for copy
32
32
  def self.get_proper_storage_for_copy(options)
33
- exclude = options[:exclude] || []
34
33
  rules = check_all(options[:item_id].to_i, options[:size].to_i, options[:item_copies].to_i, options[:name].to_s, options[:tag].to_s, options[:dir] ? true : false, options[:src_storage])
35
34
  result = nil
36
- rules.each do |rule|
37
- result = rule.get_copy_storages.select do |storage|
38
- !exclude.include?(storage.name) && storage.up? && storage.size + options[:size].to_i < storage.size_limit
39
- end.first
40
- break if result
35
+ rules.detect do |rule|
36
+ result = FC::Storage.select_proper_storage_for_create(rule.get_copy_storages, options[:size].to_i, options[:exclude] || [])
41
37
  end
42
38
  result
43
39
  end
data/lib/fc/db.rb CHANGED
@@ -6,13 +6,27 @@ module FC
6
6
 
7
7
  def self.connect_by_config(options)
8
8
  @options = options.clone
9
- @options[:port] = options[:port].to_i if options[:port]
10
- @prefix = options[:prefix].to_s if options[:prefix]
11
- @connects = {}
9
+ @options[:port] = @options[:port].to_i if @options[:port]
10
+ @prefix = @options[:prefix].to_s if @options[:prefix]
11
+ @connects = {} unless @connects
12
12
  @connects[Thread.current.object_id] = Mysql2::Client.new(@options)
13
13
  end
14
14
 
15
- def self.connect
15
+ def self.connect(options = {})
16
+ if !@options
17
+ if defined?(ActiveRecord::Base) && ActiveRecord::Base.connection
18
+ connection = ActiveRecord::Base.connection.instance_variable_get(:@connection)
19
+ @options = connection.query_options.clone
20
+ @options.merge!(options)
21
+ @prefix = @options[:prefix].to_s if @options[:prefix]
22
+ @connects = {} unless @connects
23
+ @connects[Thread.current.object_id] = connection
24
+ else
25
+ self.connect_by_config(options)
26
+ end
27
+ else
28
+ @options.merge!(options)
29
+ end
16
30
  if @options[:multi_threads]
17
31
  @connects[Thread.current.object_id] ||= Mysql2::Client.new(@options)
18
32
  else
@@ -20,9 +34,18 @@ module FC
20
34
  end
21
35
  end
22
36
 
37
+ def self.connect!(options = {})
38
+ self.connect(options)
39
+ end
40
+
41
+ # deprecated!
23
42
  def self.connect=(connect, options = {})
24
43
  self.connect_by_config connect.query_options.merge(options).merge(:as => :hash)
25
44
  end
45
+ class << self
46
+ extend Gem::Deprecate
47
+ deprecate :connect=, :connect!, 2016, 01
48
+ end
26
49
 
27
50
  def self.close
28
51
  if @options[:multi_threads]
@@ -37,7 +60,9 @@ module FC
37
60
 
38
61
  # connect.query with deadlock solution
39
62
  def self.query(sql)
40
- FC::DB.connect.query(sql)
63
+ r = FC::DB.connect.query(sql)
64
+ r = r.each(:as => :hash){} if r
65
+ r
41
66
  rescue Mysql2::Error => e
42
67
  if e.message.match('Deadlock found when trying to get lock')
43
68
  puts "Deadlock"
@@ -51,14 +76,14 @@ module FC
51
76
  else
52
77
  raise e
53
78
  end
54
- end
79
+ end
55
80
 
56
81
  def self.server_time
57
82
  FC::DB.query("SELECT UNIX_TIMESTAMP() as curr_time").first['curr_time'].to_i
58
83
  end
59
84
 
60
- def self.init_db
61
- FC::DB.connect.query(%{
85
+ def self.init_db(silent = false)
86
+ FC::DB.query(%{
62
87
  CREATE TABLE #{@prefix}items (
63
88
  id int NOT NULL AUTO_INCREMENT,
64
89
  name varchar(1024) NOT NULL DEFAULT '',
@@ -78,10 +103,10 @@ module FC
78
103
  proc_time = %{
79
104
  SET NEW.time = UNIX_TIMESTAMP();
80
105
  }
81
- FC::DB.connect.query("CREATE TRIGGER fc_items_before_insert BEFORE INSERT on #{@prefix}items FOR EACH ROW BEGIN #{proc_time} END")
82
- FC::DB.connect.query("CREATE TRIGGER fc_items_before_update BEFORE UPDATE on #{@prefix}items FOR EACH ROW BEGIN #{proc_time} END")
106
+ FC::DB.query("CREATE TRIGGER fc_items_before_insert BEFORE INSERT on #{@prefix}items FOR EACH ROW BEGIN #{proc_time} END")
107
+ FC::DB.query("CREATE TRIGGER fc_items_before_update BEFORE UPDATE on #{@prefix}items FOR EACH ROW BEGIN #{proc_time} END")
83
108
 
84
- FC::DB.connect.query(%{
109
+ FC::DB.query(%{
85
110
  CREATE TABLE #{@prefix}storages (
86
111
  id int NOT NULL AUTO_INCREMENT,
87
112
  name varchar(255) NOT NULL DEFAULT '',
@@ -108,10 +133,10 @@ module FC
108
133
  #{proc}
109
134
  END IF;
110
135
  }
111
- FC::DB.connect.query("CREATE TRIGGER fc_storages_after_delete AFTER DELETE on #{@prefix}storages FOR EACH ROW BEGIN #{proc} END")
112
- FC::DB.connect.query("CREATE TRIGGER fc_storages_after_update AFTER UPDATE on #{@prefix}storages FOR EACH ROW BEGIN #{proc_update} END")
136
+ FC::DB.query("CREATE TRIGGER fc_storages_after_delete AFTER DELETE on #{@prefix}storages FOR EACH ROW BEGIN #{proc} END")
137
+ FC::DB.query("CREATE TRIGGER fc_storages_after_update AFTER UPDATE on #{@prefix}storages FOR EACH ROW BEGIN #{proc_update} END")
113
138
 
114
- FC::DB.connect.query(%{
139
+ FC::DB.query(%{
115
140
  CREATE TABLE #{@prefix}policies (
116
141
  id int NOT NULL AUTO_INCREMENT,
117
142
  name varchar(255) NOT NULL DEFAULT '',
@@ -125,10 +150,10 @@ module FC
125
150
  SELECT GROUP_CONCAT(name ORDER BY FIND_IN_SET(name, NEW.create_storages)) INTO @create_storages_list FROM #{@prefix}storages WHERE FIND_IN_SET(name, NEW.create_storages);
126
151
  SET NEW.create_storages = @create_storages_list;
127
152
  }
128
- FC::DB.connect.query("CREATE TRIGGER fc_policies_before_insert BEFORE INSERT on #{@prefix}policies FOR EACH ROW BEGIN #{proc} END")
129
- FC::DB.connect.query("CREATE TRIGGER fc_policies_before_update BEFORE UPDATE on #{@prefix}policies FOR EACH ROW BEGIN #{proc} END")
153
+ FC::DB.query("CREATE TRIGGER fc_policies_before_insert BEFORE INSERT on #{@prefix}policies FOR EACH ROW BEGIN #{proc} END")
154
+ FC::DB.query("CREATE TRIGGER fc_policies_before_update BEFORE UPDATE on #{@prefix}policies FOR EACH ROW BEGIN #{proc} END")
130
155
 
131
- FC::DB.connect.query(%{
156
+ FC::DB.query(%{
132
157
  CREATE TABLE #{@prefix}items_storages (
133
158
  id int NOT NULL AUTO_INCREMENT,
134
159
  item_id int DEFAULT NULL,
@@ -165,13 +190,13 @@ module FC
165
190
  #{proc.gsub('NEW', 'OLD')}
166
191
  UPDATE #{@prefix}storages SET size=size-@item_size WHERE name = OLD.storage_name;
167
192
  }
168
- FC::DB.connect.query("CREATE TRIGGER fc_items_storages_before_insert BEFORE INSERT on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc_time} END")
169
- FC::DB.connect.query("CREATE TRIGGER fc_items_storages_before_update BEFORE UPDATE on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc_time} END")
170
- FC::DB.connect.query("CREATE TRIGGER fc_items_storages_after_update AFTER UPDATE on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc} END")
171
- FC::DB.connect.query("CREATE TRIGGER fc_items_storages_after_insert AFTER INSERT on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc_add} END")
172
- FC::DB.connect.query("CREATE TRIGGER fc_items_storages_after_delete AFTER DELETE on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc_del} END")
193
+ FC::DB.query("CREATE TRIGGER fc_items_storages_before_insert BEFORE INSERT on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc_time} END")
194
+ FC::DB.query("CREATE TRIGGER fc_items_storages_before_update BEFORE UPDATE on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc_time} END")
195
+ FC::DB.query("CREATE TRIGGER fc_items_storages_after_update AFTER UPDATE on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc} END")
196
+ FC::DB.query("CREATE TRIGGER fc_items_storages_after_insert AFTER INSERT on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc_add} END")
197
+ FC::DB.query("CREATE TRIGGER fc_items_storages_after_delete AFTER DELETE on #{@prefix}items_storages FOR EACH ROW BEGIN #{proc_del} END")
173
198
 
174
- FC::DB.connect.query(%{
199
+ FC::DB.query(%{
175
200
  CREATE TABLE #{@prefix}errors (
176
201
  id int NOT NULL AUTO_INCREMENT,
177
202
  item_id int DEFAULT NULL,
@@ -182,9 +207,9 @@ module FC
182
207
  PRIMARY KEY (id), KEY (item_id), KEY (item_storage_id), KEY (host), KEY (time)
183
208
  ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci
184
209
  })
185
- FC::DB.connect.query("CREATE TRIGGER fc_errors_before_insert BEFORE INSERT on #{@prefix}errors FOR EACH ROW BEGIN #{proc_time} END")
210
+ FC::DB.query("CREATE TRIGGER fc_errors_before_insert BEFORE INSERT on #{@prefix}errors FOR EACH ROW BEGIN #{proc_time} END")
186
211
 
187
- FC::DB.connect.query(%{
212
+ FC::DB.query(%{
188
213
  CREATE TABLE #{@prefix}copy_rules (
189
214
  id int NOT NULL AUTO_INCREMENT,
190
215
  copy_storages text NOT NULL DEFAULT '',
@@ -193,7 +218,7 @@ module FC
193
218
  ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci
194
219
  })
195
220
 
196
- FC::DB.connect.query(%{
221
+ FC::DB.query(%{
197
222
  CREATE TABLE #{@prefix}vars (
198
223
  name varchar(255) DEFAULT NULL,
199
224
  val varchar(255) DEFAULT NULL,
@@ -202,19 +227,41 @@ module FC
202
227
  PRIMARY KEY (name)
203
228
  ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci
204
229
  })
205
- FC::DB.connect.query("CREATE TRIGGER fc_vars_before_insert BEFORE INSERT on #{@prefix}vars FOR EACH ROW BEGIN #{proc_time} END")
206
- FC::DB.connect.query("CREATE TRIGGER fc_vars_before_update BEFORE UPDATE on #{@prefix}vars FOR EACH ROW BEGIN #{proc_time} END")
207
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_cycle_time', val='30', descr='time between global daemon checks and storages available checks'")
208
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_global_wait_time', val='120', descr='time between runs global daemon if it does not running'")
209
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_tasks_copy_group_limit', val='1000', descr='select limit for copy tasks'")
210
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_tasks_delete_group_limit', val='10000', descr='select limit for delete tasks'")
211
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_tasks_copy_threads_limit', val='10', descr='copy tasks threads count limit for one storage'")
212
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_tasks_delete_threads_limit', val='10', descr='delete tasks threads count limit for one storage'")
213
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_copy_tasks_per_host_limit', val='10', descr='copy tasks count limit for one host'")
214
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_global_tasks_group_limit', val='1000', descr='select limit for create copy tasks'")
215
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_global_error_items_ttl', val='86400', descr='ttl for items with error status before delete'")
216
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_global_error_items_storages_ttl', val='86400', descr='ttl for items_storages with error status before delete'")
217
- FC::DB.connect.query("INSERT INTO #{@prefix}vars SET name='daemon_restart_period', val='86400', descr='time between fc-daemon self restart'")
230
+ FC::DB.query("CREATE TRIGGER fc_vars_before_insert BEFORE INSERT on #{@prefix}vars FOR EACH ROW BEGIN #{proc_time} END")
231
+ FC::DB.query("CREATE TRIGGER fc_vars_before_update BEFORE UPDATE on #{@prefix}vars FOR EACH ROW BEGIN #{proc_time} END")
232
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_cycle_time', val='30', descr='time between global daemon checks and storages available checks'")
233
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_global_wait_time', val='120', descr='time between runs global daemon if it does not running'")
234
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_tasks_copy_group_limit', val='1000', descr='select limit for copy tasks'")
235
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_tasks_delete_group_limit', val='10000', descr='select limit for delete tasks'")
236
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_tasks_copy_threads_limit', val='10', descr='copy tasks threads count limit for one storage'")
237
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_tasks_delete_threads_limit', val='10', descr='delete tasks threads count limit for one storage'")
238
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_copy_tasks_per_host_limit', val='10', descr='copy tasks count limit for one host'")
239
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_global_tasks_group_limit', val='1000', descr='select limit for create copy tasks'")
240
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_global_error_items_ttl', val='86400', descr='ttl for items with error status before delete'")
241
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_global_error_items_storages_ttl', val='86400', descr='ttl for items_storages with error status before delete'")
242
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_restart_period', val='86400', descr='time between fc-daemon self restart'")
243
+
244
+ FC::DB.migrations(silent)
245
+ end
246
+
247
+ def self.version
248
+ return 1
249
+ end
250
+
251
+ def self.migrations(silent = false)
252
+ next_version = FC::DB.query("SELECT val FROM #{FC::DB.prefix}vars WHERE name='db_version'").first['val'].to_i + 1 rescue 1
253
+ while self.respond_to?("migrate_#{next_version}")
254
+ puts "migrate to #{next_version}" unless silent
255
+ self.send("migrate_#{next_version}")
256
+ FC::DB.query("REPLACE #{FC::DB.prefix}vars SET val=#{next_version}, name='db_version'")
257
+ next_version += 1
258
+ end
259
+ end
260
+
261
+ def self.migrate_1
262
+ FC::DB.query("ALTER TABLE #{@prefix}storages ADD COLUMN url_weight int NOT NULL DEFAULT 0")
263
+ FC::DB.query("ALTER TABLE #{@prefix}storages ADD COLUMN write_weight int NOT NULL DEFAULT 0")
264
+ FC::DB.query("INSERT INTO #{@prefix}vars SET name='daemon_copy_speed_per_host_limit', val='', descr='copy tasks speed limit for hosts, change via fc-manage copy_speed'")
218
265
  end
219
266
  end
220
267
  end
data/lib/fc/item.rb CHANGED
@@ -67,19 +67,23 @@ module FC
67
67
  def make_item_storage(storage, status = 'new')
68
68
  # new storage_item?
69
69
  item_storage = FC::ItemStorage.where('item_id=? AND storage_name=?', id, storage.name).first
70
- item_storage.delete if item_storage
70
+ if item_storage
71
+ item_storage.delete
72
+ storage.size = storage.size.to_i - size.to_i
73
+ end
71
74
 
72
75
  item_storage = FC::ItemStorage.new({:item_id => id, :storage_name => storage.name, :status => status})
73
76
  item_storage.save
77
+ storage.size = storage.size.to_i + size.to_i
74
78
  item_storage
75
79
  end
76
80
 
77
- def copy_item_storage(src, storage, item_storage, remove_local = false)
81
+ def copy_item_storage(src, storage, item_storage, remove_local = false, speed_limit = nil)
78
82
  begin
79
83
  if src.instance_of?(FC::Storage)
80
- src.copy_to_local(name, "#{storage.path}#{name}")
84
+ src.copy_to_local(name, "#{storage.path}#{name}", speed_limit)
81
85
  else
82
- storage.copy_path(src, name, remove_local)
86
+ storage.copy_path(src, name, remove_local, speed_limit)
83
87
  end
84
88
  md5_on_storage = storage.md5_sum(name)
85
89
  rescue Exception => e
@@ -130,7 +134,24 @@ module FC
130
134
  def get_available_storages
131
135
  r = FC::DB.query("SELECT st.* FROM #{FC::Storage.table_name} as st, #{FC::ItemStorage.table_name} as ist WHERE
132
136
  ist.item_id = #{id} AND ist.status='ready' AND ist.storage_name = st.name")
133
- r.map{|data| FC::Storage.create_from_fiels(data)}.select {|storage| storage.up? }
137
+ r.map{|data| FC::Storage.create_from_fiels(data)}.select {|storage| storage.up? && storage.url_weight.to_i >= 0}
138
+ end
139
+
140
+ def urls
141
+ get_available_storages.map{|storage| File.join(storage.url, name)}
142
+ end
143
+
144
+ def url
145
+ available_storages = get_available_storages()
146
+ # sort by random(url_weight)
147
+ best_storage = available_storages.map{ |storage|
148
+ [storage, Kernel.rand(storage.url_weight.to_i * 100)]
149
+ }.sort{ |a, b|
150
+ a[1] <=> b[1]
151
+ }.map{|el| el[0]}.last
152
+ best_storage = available_storages.sample unless best_storage
153
+ raise "URL find - no avable storage for item #{id}" unless best_storage
154
+ File.join(best_storage.url, name)
134
155
  end
135
156
  end
136
157
  end
data/lib/fc/policy.rb CHANGED
@@ -28,36 +28,20 @@ module FC
28
28
  @create_storages_cache
29
29
  end
30
30
 
31
- # get available storages for create by size
32
- def get_proper_storages_for_create(size, exclude = [])
33
- get_create_storages.select do |storage|
34
- !exclude.include?(storage.name) && storage.up? && storage.size + size < storage.size_limit
35
- end
36
- end
37
-
38
31
  # get available storage for create by size and local item path
39
32
  def get_proper_storage_for_create(size, local_path = nil)
40
- storages = get_proper_storages_for_create(size)
41
- dev = File.stat(local_path).dev if local_path
42
-
43
- # sort by current_host and free size
44
- storages.sort do |a, b|
45
- if FC::Storage.curr_host == a.host && FC::Storage.curr_host == b.host
46
- if local_path && dev == File.stat(a.path).dev
47
- 1
48
- elsif local_path && dev == File.stat(b.path).dev
49
- -1
50
- else
51
- a.free_rate <=> b.free_rate
52
- end
53
- elsif FC::Storage.curr_host == a.host
54
- 1
55
- elsif FC::Storage.curr_host == b.host
56
- -1
57
- else
58
- a.free_rate <=> b.free_rate
33
+ FC::Storage.select_proper_storage_for_create(get_create_storages, size) do |storages|
34
+ local_storages = storages.select{|storage| FC::Storage.curr_host == storage.host}
35
+ # find same storage device as local_path device
36
+ if local_path && !local_storages.empty?
37
+ dev = File.stat(local_path).dev
38
+ dev_storage = local_storages.select{|storage| dev == File.stat(storage.path).dev}.first
39
+ local_storages = [dev_storage] if dev_storage
59
40
  end
60
- end.last
41
+ # if no local storages - use all storages
42
+ local_storages = storages if local_storages.empty?
43
+ local_storages
44
+ end
61
45
  end
62
46
  end
63
47
  end
data/lib/fc/storage.rb CHANGED
@@ -3,7 +3,7 @@ require 'shellwords'
3
3
 
4
4
  module FC
5
5
  class Storage < DbBase
6
- set_table :storages, 'name, host, path, url, size, size_limit, check_time, copy_storages'
6
+ set_table :storages, 'name, host, path, url, size, size_limit, check_time, copy_storages, url_weight, write_weight'
7
7
 
8
8
  class << self
9
9
  attr_accessor :check_time_limit, :storages_cache_time, :get_copy_storages_mutex
@@ -16,6 +16,19 @@ module FC
16
16
  @uname || @uname = `uname -n`.chomp
17
17
  end
18
18
 
19
+ def self.select_proper_storage_for_create(storages, size, exclude = [])
20
+ list = storages.select do |storage|
21
+ !exclude.include?(storage.name) && storage.up? && storage.size + size < storage.size_limit && storage.write_weight.to_i >= 0
22
+ end
23
+ list = yield(list) if block_given?
24
+ # sort by random(free_rate * write_weight)
25
+ list.map{ |storage|
26
+ [storage, Kernel.rand(storage.free_rate * (storage.write_weight.to_i == 0 ? 0.01 : storage.write_weight.to_i) * 1000000000)]
27
+ }.sort{ |a, b|
28
+ a[1] <=> b[1]
29
+ }.map{|el| el[0]}.last
30
+ end
31
+
19
32
  def initialize(params = {})
20
33
  path = (params['path'] || params[:path])
21
34
  if path && !path.to_s.empty?
@@ -35,7 +48,8 @@ module FC
35
48
  end
36
49
 
37
50
  def free_rate
38
- free.to_f / size_limit
51
+ rate = free.to_f / size_limit
52
+ rate < 0 ? 0.0 : rate
39
53
  end
40
54
 
41
55
  def get_copy_storages
@@ -63,7 +77,7 @@ module FC
63
77
  end
64
78
 
65
79
  # copy local_path to storage
66
- def copy_path(local_path, file_name, try_move = false)
80
+ def copy_path(local_path, file_name, try_move = false, speed_limit = nil)
67
81
  dst_path = "#{self.path}#{file_name}"
68
82
 
69
83
  cmd = "rm -rf #{dst_path.shellescape}; mkdir -p #{File.dirname(dst_path).shellescape}"
@@ -72,23 +86,25 @@ module FC
72
86
  raise r if $?.exitstatus != 0
73
87
 
74
88
  op = try_move && self.class.curr_host == host && File.stat(local_path).dev == File.stat(File.dirname(dst_path)).dev ? 'mv' : 'cp -r'
89
+ speed_limit = (speed_limit * 1000).to_i if speed_limit.to_i > 0
75
90
  cmd = self.class.curr_host == host ?
76
91
  "#{op} #{local_path.shellescape} #{dst_path.shellescape}" :
77
- "scp -r -q -oBatchMode=yes -oStrictHostKeyChecking=no #{local_path.shellescape} #{self.host}:\"#{dst_path.shellescape}\""
92
+ "scp -r -q -oBatchMode=yes -oStrictHostKeyChecking=no #{speed_limit.to_i > 0 ? '-l '+speed_limit.to_s : ''} #{local_path.shellescape} #{self.host}:\"#{dst_path.shellescape}\""
78
93
  r = `#{cmd} 2>&1`
79
94
  raise r if $?.exitstatus != 0
80
95
  end
81
96
 
82
97
  # copy object to local_path
83
- def copy_to_local(file_name, local_path)
98
+ def copy_to_local(file_name, local_path, speed_limit = nil)
84
99
  src_path = "#{self.path}#{file_name}"
85
100
 
86
101
  r = `rm -rf #{local_path.shellescape}; mkdir -p #{File.dirname(local_path).shellescape} 2>&1`
87
102
  raise r if $?.exitstatus != 0
88
103
 
104
+ speed_limit = (speed_limit * 1000).to_i if speed_limit.to_i > 0
89
105
  cmd = self.class.curr_host == host ?
90
106
  "cp -r #{src_path.shellescape} #{local_path.shellescape}" :
91
- "scp -r -q -oBatchMode=yes -oStrictHostKeyChecking=no #{self.host}:\"#{src_path.shellescape}\" #{local_path.shellescape}"
107
+ "scp -r -q -oBatchMode=yes -oStrictHostKeyChecking=no #{speed_limit.to_i > 0 ? '-l '+speed_limit.to_s : ''} #{self.host}:\"#{src_path.shellescape}\" #{local_path.shellescape}"
92
108
  r = `#{cmd} 2>&1`
93
109
  raise r if $?.exitstatus != 0
94
110
  end
@@ -102,6 +118,7 @@ module FC
102
118
  r = `#{cmd} 2>&1`
103
119
  raise r if $?.exitstatus != 0
104
120
 
121
+ speed_limit = (speed_limit * 1000).to_i if speed_limit.to_i > 0
105
122
  cmd = self.class.curr_host == host ?
106
123
  "ls -la #{dst_path.shellescape}" :
107
124
  "ssh -q -oBatchMode=yes -oStrictHostKeyChecking=no #{self.host} \"ls -la #{dst_path.shellescape}\""
@@ -134,9 +151,7 @@ module FC
134
151
 
135
152
  # get available storage for copy by size
136
153
  def get_proper_storage_for_copy(size, exclude = [])
137
- get_copy_storages.select do |storage|
138
- !exclude.include?(storage.name) && storage.up? && storage.size + size < storage.size_limit
139
- end.first
154
+ FC::Storage.select_proper_storage_for_create(get_copy_storages, size, exclude)
140
155
  end
141
156
  end
142
157
  end
data/lib/fc/var.rb CHANGED
@@ -35,5 +35,28 @@ module FC
35
35
  end
36
36
  @all_vars
37
37
  end
38
+
39
+ def self.get_speed_limits
40
+ limits = {
41
+ 'all' => nil
42
+ }
43
+ list = self.get('daemon_copy_speed_per_host_limit', '').to_s
44
+ limits.merge! Hash[list.split(';;').map{|v| v.split('::')}]
45
+ limits.each{|host, val| limits[host] = val.to_f > 0 ? val.to_f : nil }
46
+ end
47
+
48
+ def self.set_speed_limit(host, val)
49
+ limits = self.get_speed_limits
50
+ limits[host.to_s] = val.to_f
51
+ list = limits.map{|h, v| "#{h}::#{v}"}.join(';;')
52
+ self.set('daemon_copy_speed_per_host_limit', list)
53
+ end
54
+
55
+ def self.get_current_speed_limit
56
+ limits = self.get_speed_limits
57
+ limit = limits[FC::Storage.curr_host]
58
+ limit = limits['all'] unless limit
59
+ limit
60
+ end
38
61
  end
39
62
  end
data/lib/fc/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module FC
2
- VERSION = "0.4.5"
2
+ VERSION = "0.4.6"
3
3
  end
data/lib/manage.rb CHANGED
@@ -5,3 +5,4 @@ require "manage/show"
5
5
  require "manage/copy_rules"
6
6
  require "manage/var"
7
7
  require "manage/item"
8
+ require "manage/copy_speed"
@@ -0,0 +1,74 @@
1
+ # encoding: utf-8
2
+ require 'shellwords'
3
+
4
+ def copy_speed_list
5
+ FC::Var.get_speed_limits.each do |name, val|
6
+ puts name.to_s+(val ? " - limit: #{val}Mbit" : " - unlimit")
7
+ end
8
+ end
9
+
10
+ def copy_speed_add
11
+ hosts = ['all'] + all_hosts
12
+ puts "Add copy speed limit"
13
+ begin
14
+ host = stdin_read_val("Host (default #{FC::Storage.curr_host})", true).strip
15
+ host = FC::Storage.curr_host if host.empty?
16
+ puts "Host can be one of: #{hosts.join(', ')}" unless hosts.index(host)
17
+ end until hosts.index(host)
18
+ limit = stdin_read_val("Speed limit, Mbit/s (default 0 - unlimit)", true).to_f
19
+ puts %Q{\nCopy speed limit
20
+ Host: #{host}
21
+ Speed limit: #{limit > 0 ? limit : 'unlimit'}}
22
+ s = Readline.readline("Continue? (y/n) ", false).strip.downcase
23
+ puts ""
24
+ if s == "y" || s == "yes"
25
+ begin
26
+ FC::Var.set_speed_limit(host, limit)
27
+ rescue Exception => e
28
+ puts "Error: #{e.message}"
29
+ exit
30
+ end
31
+ puts "ok"
32
+ else
33
+ puts "Canceled."
34
+ end
35
+ end
36
+
37
+ def copy_speed_change
38
+ if host = find_host
39
+ puts "Change copy speed limit for host #{host}"
40
+ curr_limit = FC::Var.get_speed_limits[host]
41
+ limit = stdin_read_val("Speed limit, Mbit/s (now #{curr_limit ? curr_limit.to_s+', 0 to unlimit' : 'unlimit'})", true)
42
+ puts limit.to_f
43
+ puts limit == ''
44
+ limit = limit == '' ? curr_limit : limit.to_f
45
+ puts %Q{\nCopy speed limit
46
+ Host: #{host}
47
+ Speed limit: #{limit > 0 ? limit : 'unlimit'}}
48
+ s = Readline.readline("Continue? (y/n) ", false).strip.downcase
49
+ puts ""
50
+ if s == "y" || s == "yes"
51
+ begin
52
+ FC::Var.set_speed_limit(host, limit)
53
+ rescue Exception => e
54
+ puts "Error: #{e.message}"
55
+ exit
56
+ end
57
+ puts "ok"
58
+ else
59
+ puts "Canceled."
60
+ end
61
+ end
62
+ end
63
+
64
+ private
65
+
66
+ def all_hosts
67
+ FC::Storage.where('1').map(&:host).uniq
68
+ end
69
+
70
+ def find_host
71
+ host = ARGV[2].to_s.strip
72
+ puts "Storage with host #{host} not found." unless (['all'] + all_hosts).index(host)
73
+ host
74
+ end
@@ -24,6 +24,8 @@ def storages_show
24
24
  Host: #{storage.host}
25
25
  Path: #{storage.path}
26
26
  Url: #{storage.url}
27
+ Url weight: #{storage.url_weight}
28
+ Write weight #{storage.write_weight}
27
29
  Size: #{size_to_human storage.size} (#{(storage.size_rate*100).to_i}%)
28
30
  Free: #{size_to_human storage.free} (#{(storage.free_rate*100).to_i}%)
29
31
  Size limit: #{size_to_human storage.size_limit}
@@ -40,6 +42,8 @@ def storages_add
40
42
  name = stdin_read_val('Name')
41
43
  path = stdin_read_val('Path')
42
44
  url = stdin_read_val('Url')
45
+ url_weight = stdin_read_val('URL weight', true).to_i
46
+ write_weight = stdin_read_val('Write weight', true).to_i
43
47
  size_limit = human_to_size stdin_read_val('Size limit') {|val| "Size limit not is valid size." unless human_to_size(val)}
44
48
  copy_storages = stdin_read_val('Copy storages', true)
45
49
  storages = FC::Storage.where.map(&:name)
@@ -47,7 +51,7 @@ def storages_add
47
51
  begin
48
52
  path = path +'/' unless path[-1] == '/'
49
53
  path = '/' + path unless path[0] == '/'
50
- storage = FC::Storage.new(:name => name, :host => host, :path => path, :url => url, :size_limit => size_limit, :copy_storages => copy_storages)
54
+ storage = FC::Storage.new(:name => name, :host => host, :path => path, :url => url, :size_limit => size_limit, :copy_storages => copy_storages, :url_weight => url_weight, :write_weight => write_weight)
51
55
  print "Calc current size.. "
52
56
  size = storage.file_size('', true)
53
57
  puts "ok"
@@ -61,6 +65,8 @@ def storages_add
61
65
  Host: #{host}
62
66
  Path: #{path}
63
67
  Url: #{url}
68
+ URL weight: #{url_weight}
69
+ Write weight: #{write_weight}
64
70
  Size: #{size_to_human size} (#{(size.to_f*100 / size_limit).to_i}%)
65
71
  Free: #{size_to_human free} (#{(free.to_f*100 / size_limit).to_i}%)
66
72
  Size limit: #{size_to_human size_limit}
@@ -117,6 +123,8 @@ def storages_change
117
123
  host = stdin_read_val("Host (now #{storage.host})", true)
118
124
  path = stdin_read_val("Path (now #{storage.path})", true)
119
125
  url = stdin_read_val("Url (now #{storage.url})", true)
126
+ url_weight = stdin_read_val("URL weight (now #{storage.url_weight})", true)
127
+ write_weight = stdin_read_val("Write weight (now #{storage.write_weight})", true)
120
128
  size_limit = stdin_read_val("Size (now #{size_to_human(storage.size_limit)})", true) {|val| "Size limit not is valid size." if !val.empty? && !human_to_size(val)}
121
129
  copy_storages = stdin_read_val("Copy storages (now #{storage.copy_storages})", true)
122
130
 
@@ -130,6 +138,8 @@ def storages_change
130
138
  puts "ok"
131
139
  end
132
140
  storage.url = url unless url.empty?
141
+ storage.url_weight = url_weight.to_i unless url_weight.empty?
142
+ storage.write_weight = write_weight.to_i unless write_weight.empty?
133
143
  storage.size_limit = human_to_size(size_limit) unless size_limit.empty?
134
144
  storages = FC::Storage.where.map(&:name)
135
145
  storage.copy_storages = copy_storages.split(',').select{|s| storages.member?(s.strip)}.join(',').strip unless copy_storages.empty?
@@ -139,6 +149,8 @@ def storages_change
139
149
  Host: #{storage.host}
140
150
  Path: #{storage.path}
141
151
  Url: #{storage.url}
152
+ URL weight: #{storage.url_weight}
153
+ Write weight: #{storage.write_weight}
142
154
  Size: #{size_to_human storage.size} (#{(storage.size_rate*100).to_i}%)
143
155
  Free: #{size_to_human storage.free} (#{(storage.free_rate*100).to_i}%)
144
156
  Size limit: #{size_to_human storage.size_limit}
data/lib/utils.rb CHANGED
@@ -1,12 +1,17 @@
1
1
  def option_parser_init(descriptions, text)
2
- options = {}
2
+ options = {
3
+ :__keys => {}
4
+ }
3
5
  optparse = OptionParser.new do |opts|
4
6
  opts.banner = text
5
7
  opts.separator "Options:"
6
8
 
7
9
  descriptions.each_entry do |key, desc|
8
10
  options[key] = desc[:default]
9
- opts.on("-#{desc[:short]}", "--#{desc[:full]}#{desc[:no_val] ? '' : '='+desc[:full].upcase}", desc[:text]) {|s| options[key] = s }
11
+ opts.on("-#{desc[:short]}", "--#{desc[:full]}#{desc[:no_val] ? '' : '='+desc[:full].upcase}", desc[:text]) do |s|
12
+ options[:__keys][key] = s
13
+ options[key] = s
14
+ end
10
15
  end
11
16
  opts.on_tail("-?", "--help", "Show this message") do
12
17
  puts opts
@@ -25,9 +30,11 @@ end
25
30
  def size_to_human(size)
26
31
  return "0" if size == 0
27
32
  units = %w{B KB MB GB TB}
33
+ minus = size < 0
34
+ size = -1 * size if minus
28
35
  e = (Math.log(size)/Math.log(1024)).floor
29
36
  s = "%.2f" % (size.to_f / 1024**e)
30
- s.sub(/\.?0*$/, units[e])
37
+ (minus ? '-' : '')+s.sub(/\.?0*$/, units[e])
31
38
  end
32
39
 
33
40
  def human_to_size(size)
@@ -75,6 +75,15 @@ class CopyRuleTest < Test::Unit::TestCase
75
75
  @@storages[1].update_check_time
76
76
  assert_equal 'rec2-sda', FC::CopyRule.get_proper_storage_for_copy(:name => 'test1/test', :size => 5).name, 'second storages up, small file'
77
77
  assert_equal 'rec1-sda', FC::CopyRule.get_proper_storage_for_copy(:name => 'test1/test', :size => 20).name, 'second storages up, big file'
78
- assert_nil FC::CopyRule.get_proper_storage_for_copy(:name => 'test1/test', :size => 1000), 'second storages up, big file'
78
+ assert_nil FC::CopyRule.get_proper_storage_for_copy(:name => 'test1/test', :size => 1000), 'second storages up, very big file'
79
+ @@storages[1].write_weight = 100
80
+ @@storages[1].save
81
+ @@storages[2].update_check_time
82
+ assert_equal 'rec2-sda', FC::CopyRule.get_proper_storage_for_copy(:name => 'test1/test', :size => 1).name, 'all storages up, choose by weight'
83
+ @@storages[1].write_weight = -1
84
+ @@storages[1].save
85
+ @@storages[2].write_weight = -1
86
+ @@storages[2].save
87
+ assert_equal 'rec1-sda', FC::CopyRule.get_proper_storage_for_copy(:name => 'test1/test', :size => 20).name, 'all storages up, 2 disabled by weight, work second rule'
79
88
  end
80
89
  end
data/test/helper.rb CHANGED
@@ -13,5 +13,5 @@ FC::DB.connect_by_config(:username => TEST_USER, :password => TEST_PASSWORD)
13
13
  FC::DB.query("DROP DATABASE IF EXISTS #{TEST_DATABASE}")
14
14
  FC::DB.query("CREATE DATABASE #{TEST_DATABASE}")
15
15
  FC::DB.query("USE #{TEST_DATABASE}")
16
- FC::DB.init_db
16
+ FC::DB.init_db(true)
17
17
  FC::DB.options[:database] = TEST_DATABASE
data/test/item_test.rb CHANGED
@@ -3,18 +3,19 @@ require 'helper'
3
3
  class ItemTest < Test::Unit::TestCase
4
4
  class << self
5
5
  def startup
6
- @@item = FC::Item.new(:name => 'test item', :policy_id => 1, :size => 150)
6
+ @@item = FC::Item.new(:name => '/test item', :policy_id => 1, :size => 150)
7
7
  @@item.save
8
8
 
9
9
  @@storages = []
10
- @@storages << FC::Storage.new(:name => 'rec1-sda', :host => 'rec1')
11
- @@storages << FC::Storage.new(:name => 'rec2-sda', :host => 'rec2')
10
+ @@storages << FC::Storage.new(:name => 'rec1-sda', :host => 'rec1', :url => 'http://rec1/sda/')
11
+ @@storages << FC::Storage.new(:name => 'rec2-sda', :host => 'rec2', :url => 'http://rec2/sda/')
12
12
  @@item_storages = @@storages.map do |storage|
13
13
  storage.save
14
14
  item_storage = FC::ItemStorage.new(:item_id => @@item.id, :storage_name => storage.name, :status => 'ready')
15
15
  item_storage.save
16
16
  item_storage
17
17
  end
18
+ @@storages << FC::Storage.new(:name => 'rec3-sda', :host => 'rec3', :url => 'http://rec3/sda/')
18
19
  end
19
20
  def shutdown
20
21
  FC::DB.query("DELETE FROM items_storages")
@@ -29,7 +30,7 @@ class ItemTest < Test::Unit::TestCase
29
30
  assert_raise(ArgumentError) { FC::Item.create_from_local '/bla/bla' }
30
31
  assert_raise(ArgumentError) { FC::Item.create_from_local '/bla/bla', 'test' }
31
32
  assert_raise(RuntimeError) { FC::Item.create_from_local '/bla/bla', 'test', {}}
32
- assert_raise(RuntimeError) { FC::Item.create_from_local '/bla/bla/bla', 'test', policy}
33
+ assert_raise() { FC::Item.create_from_local '/bla/bla/bla', 'test', policy}
33
34
  end
34
35
 
35
36
  should "mark_deleted" do
@@ -42,13 +43,42 @@ class ItemTest < Test::Unit::TestCase
42
43
  end
43
44
  end
44
45
 
46
+ should "make_item_storage" do
47
+ storage_size = @@storages[2].size.to_i
48
+ assert_kind_of FC::ItemStorage, @@item.make_item_storage(@@storages[2])
49
+ assert_equal storage_size+@@item.size, @@storages[2].size
50
+ end
51
+
45
52
  should "get_item_storages" do
46
53
  assert_same_elements @@item_storages.map(&:id), @@item.get_item_storages.map(&:id)
47
54
  end
48
55
 
49
56
  should "item get_available_storages" do
57
+ @@storages.each{|s| s.check_time = 0; s.save}
50
58
  @@storages[0].update_check_time
51
59
  assert_equal 1, @@item.get_available_storages.count
52
60
  assert_equal @@storages[0].name, @@item.get_available_storages.first.name
53
61
  end
62
+
63
+ should "item urls" do
64
+ @@storages.each{|s| s.check_time = 0; s.save}
65
+ assert_equal 0, @@item.urls.count
66
+ @@storages.each(&:update_check_time)
67
+ assert_same_elements ["http://rec1/sda/test item", "http://rec2/sda/test item"], @@item.urls
68
+ end
69
+
70
+ should "item url by url_weight" do
71
+ @@storages.each(&:update_check_time)
72
+ @@storages.each{|s| s.url_weight = -1; s.save}
73
+ assert_raise(RuntimeError) { @@item.url }
74
+
75
+ @@storages[0].url_weight = 1
76
+ @@storages[0].save
77
+ assert_equal "http://rec1/sda/test item", @@item.url
78
+
79
+ @@storages[1].url_weight = 2
80
+ @@storages[1].save
81
+ Kernel.stubs(:rand).returns(1)
82
+ assert_equal "http://rec2/sda/test item", @@item.url
83
+ end
54
84
  end
data/test/policy_test.rb CHANGED
@@ -64,6 +64,10 @@ class PolicyTest < Test::Unit::TestCase
64
64
  assert_equal 'rec2-sdb', @@policy.get_proper_storage_for_create(9, 'test-rec2-sdb').name, 'current host, dev match'
65
65
  @@storages[3].check_time = 0;
66
66
  @@storages[3].save
67
+ original_rand = Kernel.method(:rand)
68
+ def Kernel.rand(a)
69
+ a
70
+ end
67
71
  assert_equal 'rec2-sdc', @@policy.get_proper_storage_for_create(9, 'test-rec2-sdb').name, 'current host, most free storage'
68
72
  FC::Storage.stubs(:curr_host).returns('rec3')
69
73
  @@storages[5].check_time = 0;
@@ -72,6 +76,14 @@ class PolicyTest < Test::Unit::TestCase
72
76
  FC::Storage.stubs(:curr_host).returns('rec5')
73
77
  assert_equal 'rec1-sda', @@policy.get_proper_storage_for_create(9, 'test-rec2-sdb').name, 'not current host, most free storage'
74
78
  assert_equal 'rec2-sdc', @@policy.get_proper_storage_for_create(10, 'test-rec2-sdb').name, 'not current host, big file, most free storage with free space'
79
+ @@storages[4].write_weight = -1;
80
+ @@storages[4].save
81
+ assert_equal 'rec3-sdb', @@policy.get_proper_storage_for_create(10, 'test-rec2-sdb').name, 'not current host, big file, most weight storage with free space'
82
+ @@storages[2].write_weight = 10;
83
+ @@storages[2].save
84
+ assert_equal 'rec2-sda', @@policy.get_proper_storage_for_create(10, 'test-rec2-sdb').name, 'not current host, big file, most weight storage with free space'
75
85
  File.unstub(:stat)
86
+ Kernel.send(:remove_method, :rand)
87
+ Kernel.define_singleton_method(:rand, original_rand)
76
88
  end
77
89
  end
data/test/storage_test.rb CHANGED
@@ -65,6 +65,8 @@ class StorageTest < Test::Unit::TestCase
65
65
  assert_equal 'rec1-sda', @@storages[2].get_proper_storage_for_copy(5).name, 'first storages up'
66
66
  assert_nil @@storages[2].get_proper_storage_for_copy(20), 'first storage full'
67
67
  @@storages[1].update_check_time
68
+ @@storages[0].write_weight = 100
69
+ @@storages[0].save
68
70
  assert_equal 'rec1-sda', @@storages[2].get_proper_storage_for_copy(5).name, 'second storages up, small file'
69
71
  assert_equal 'rec2-sda', @@storages[2].get_proper_storage_for_copy(20).name, 'second storages up, big file'
70
72
  assert_nil @@storages[2].get_proper_storage_for_copy(1000), 'second storages up, huge file'
metadata CHANGED
@@ -1,110 +1,97 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: filecluster
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.5
5
- prerelease:
4
+ version: 0.4.6
6
5
  platform: ruby
7
6
  authors:
8
7
  - sh
9
8
  autorequire:
10
9
  bindir: bin
11
10
  cert_chain: []
12
- date: 2015-02-11 00:00:00.000000000 Z
11
+ date: 2015-04-22 00:00:00.000000000 Z
13
12
  dependencies:
14
13
  - !ruby/object:Gem::Dependency
15
14
  name: mysql2
16
15
  requirement: !ruby/object:Gem::Requirement
17
- none: false
18
16
  requirements:
19
- - - ! '>='
17
+ - - ">="
20
18
  - !ruby/object:Gem::Version
21
19
  version: '0'
22
20
  type: :runtime
23
21
  prerelease: false
24
22
  version_requirements: !ruby/object:Gem::Requirement
25
- none: false
26
23
  requirements:
27
- - - ! '>='
24
+ - - ">="
28
25
  - !ruby/object:Gem::Version
29
26
  version: '0'
30
27
  - !ruby/object:Gem::Dependency
31
28
  name: bundler
32
29
  requirement: !ruby/object:Gem::Requirement
33
- none: false
34
30
  requirements:
35
- - - ! '>='
31
+ - - ">="
36
32
  - !ruby/object:Gem::Version
37
33
  version: '0'
38
34
  type: :development
39
35
  prerelease: false
40
36
  version_requirements: !ruby/object:Gem::Requirement
41
- none: false
42
37
  requirements:
43
- - - ! '>='
38
+ - - ">="
44
39
  - !ruby/object:Gem::Version
45
40
  version: '0'
46
41
  - !ruby/object:Gem::Dependency
47
42
  name: test-unit
48
43
  requirement: !ruby/object:Gem::Requirement
49
- none: false
50
44
  requirements:
51
- - - ! '>='
45
+ - - ">="
52
46
  - !ruby/object:Gem::Version
53
47
  version: '0'
54
48
  type: :development
55
49
  prerelease: false
56
50
  version_requirements: !ruby/object:Gem::Requirement
57
- none: false
58
51
  requirements:
59
- - - ! '>='
52
+ - - ">="
60
53
  - !ruby/object:Gem::Version
61
54
  version: '0'
62
55
  - !ruby/object:Gem::Dependency
63
56
  name: rake
64
57
  requirement: !ruby/object:Gem::Requirement
65
- none: false
66
58
  requirements:
67
- - - ! '>='
59
+ - - ">="
68
60
  - !ruby/object:Gem::Version
69
61
  version: '0'
70
62
  type: :development
71
63
  prerelease: false
72
64
  version_requirements: !ruby/object:Gem::Requirement
73
- none: false
74
65
  requirements:
75
- - - ! '>='
66
+ - - ">="
76
67
  - !ruby/object:Gem::Version
77
68
  version: '0'
78
69
  - !ruby/object:Gem::Dependency
79
70
  name: shoulda-context
80
71
  requirement: !ruby/object:Gem::Requirement
81
- none: false
82
72
  requirements:
83
- - - ! '>='
73
+ - - ">="
84
74
  - !ruby/object:Gem::Version
85
75
  version: '0'
86
76
  type: :development
87
77
  prerelease: false
88
78
  version_requirements: !ruby/object:Gem::Requirement
89
- none: false
90
79
  requirements:
91
- - - ! '>='
80
+ - - ">="
92
81
  - !ruby/object:Gem::Version
93
82
  version: '0'
94
83
  - !ruby/object:Gem::Dependency
95
84
  name: mocha
96
85
  requirement: !ruby/object:Gem::Requirement
97
- none: false
98
86
  requirements:
99
- - - ! '>='
87
+ - - ">="
100
88
  - !ruby/object:Gem::Version
101
89
  version: 0.13.3
102
90
  type: :development
103
91
  prerelease: false
104
92
  version_requirements: !ruby/object:Gem::Requirement
105
- none: false
106
93
  requirements:
107
- - - ! '>='
94
+ - - ">="
108
95
  - !ruby/object:Gem::Version
109
96
  version: 0.13.3
110
97
  description: Distributed storage
@@ -117,7 +104,7 @@ executables:
117
104
  extensions: []
118
105
  extra_rdoc_files: []
119
106
  files:
120
- - .gitignore
107
+ - ".gitignore"
121
108
  - Gemfile
122
109
  - LICENSE
123
110
  - README.md
@@ -148,6 +135,7 @@ files:
148
135
  - lib/filecluster.rb
149
136
  - lib/manage.rb
150
137
  - lib/manage/copy_rules.rb
138
+ - lib/manage/copy_speed.rb
151
139
  - lib/manage/item.rb
152
140
  - lib/manage/policies.rb
153
141
  - lib/manage/show.rb
@@ -169,33 +157,26 @@ files:
169
157
  - test/version_test.rb
170
158
  homepage: ''
171
159
  licenses: []
160
+ metadata: {}
172
161
  post_install_message:
173
162
  rdoc_options: []
174
163
  require_paths:
175
164
  - lib
176
165
  required_ruby_version: !ruby/object:Gem::Requirement
177
- none: false
178
166
  requirements:
179
- - - ! '>='
167
+ - - ">="
180
168
  - !ruby/object:Gem::Version
181
169
  version: '0'
182
- segments:
183
- - 0
184
- hash: -4597744036765809736
185
170
  required_rubygems_version: !ruby/object:Gem::Requirement
186
- none: false
187
171
  requirements:
188
- - - ! '>='
172
+ - - ">="
189
173
  - !ruby/object:Gem::Version
190
174
  version: '0'
191
- segments:
192
- - 0
193
- hash: -4597744036765809736
194
175
  requirements: []
195
176
  rubyforge_project:
196
- rubygems_version: 1.8.24
177
+ rubygems_version: 2.4.3
197
178
  signing_key:
198
- specification_version: 3
179
+ specification_version: 4
199
180
  summary: Distributed storage
200
181
  test_files:
201
182
  - test/base_test.rb