filecluster 0.5.24 → 0.5.28
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/daemon/check_thread.rb +8 -2
- data/lib/daemon/copy_task_thread.rb +1 -0
- data/lib/daemon/delete_task_thread.rb +1 -0
- data/lib/fc/item.rb +15 -3
- data/lib/fc/storage.rb +8 -3
- data/lib/fc/version.rb +1 -1
- data/lib/manage/storages.rb +22 -17
- metadata +3 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9de6ac005874dd4c6bcb167cc9c71b6417f384c4116668ebcda03f6741f3a2ce
|
4
|
+
data.tar.gz: ec2aff79aff42a1b27d68168d2260e8720b676542bf2563ffcbc03a6880c7d37
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: bf0c2a5c574e188f9af197b0acf9d328cf49eee1b89708b52f96b63cd3b60c974c2d9f832790619648cf3e67ec8904072965623b2afad01149668e9f13b9d4da
|
7
|
+
data.tar.gz: 96e31037d021574d7e6c033dea7a7c5fe685c8166b72ed257d41d8b115c23d0fe3d8892a94ed43bd8cdca31ce6fc1d222242aff297c55c762cfe56c51f9bd649
|
data/lib/daemon/check_thread.rb
CHANGED
@@ -1,5 +1,9 @@
|
|
1
1
|
class CheckThread < BaseThread
|
2
2
|
require "net/http"
|
3
|
+
|
4
|
+
@@storage_http_check = {}
|
5
|
+
HTTP_RETRIES = 3
|
6
|
+
|
3
7
|
def go(storage_name)
|
4
8
|
$log.debug("CheckThread: Run stotage check for #{storage_name}")
|
5
9
|
storage = $storages.detect{|s| s.name == storage_name}
|
@@ -10,7 +14,7 @@ class CheckThread < BaseThread
|
|
10
14
|
else
|
11
15
|
error "Storage #{storage.name} with path #{storage.path} not writable"
|
12
16
|
end
|
13
|
-
check_http(storage)
|
17
|
+
check_http(storage) if storage.http_check_enabled?
|
14
18
|
$log.debug("CheckThread: Finish stotage check for #{storage_name}")
|
15
19
|
end
|
16
20
|
|
@@ -22,8 +26,10 @@ class CheckThread < BaseThread
|
|
22
26
|
resp = request.start { |http| http.get(uri.path) } rescue nil
|
23
27
|
if resp && resp.code.to_i == 200 && resp.body.to_s.chomp == 'OK'
|
24
28
|
storage.update_http_check_time
|
29
|
+
@@storage_http_check[storage.name] = 0
|
25
30
|
else
|
26
|
-
|
31
|
+
@@storage_http_check[storage.name] = @@storage_http_check[storage.name].to_i + 1
|
32
|
+
error("Storage #{storage.name} with url #{storage.url} not readable") if @@storage_http_check[storage.name] > HTTP_RETRIES
|
27
33
|
end
|
28
34
|
rescue => err
|
29
35
|
$log.error("CheckThread: check_http error: #{err}")
|
data/lib/fc/item.rb
CHANGED
@@ -13,6 +13,7 @@ module FC
|
|
13
13
|
# :additional_fields - hash of additional FC:Item fields
|
14
14
|
# :no_md5 - don't use md5
|
15
15
|
# :speed_limit - limit copy speed
|
16
|
+
# :force_local_storage_name
|
16
17
|
# If item_name is part of local_path it processed as inplace - local_path is valid path to the item for policy
|
17
18
|
def self.create_from_local(local_path, item_name, policy, options={}, &block)
|
18
19
|
raise 'Path not exists' unless File.exists?(local_path)
|
@@ -32,14 +33,25 @@ module FC
|
|
32
33
|
item_params.delete(:not_local)
|
33
34
|
item_params.delete(:no_md5)
|
34
35
|
item_params.delete(:speed_limit)
|
36
|
+
item_params.delete(:force_local_storage_name)
|
35
37
|
raise 'Name is empty' if item_params[:name].empty?
|
36
38
|
raise 'Zero size path' if item_params[:size] == 0
|
37
39
|
|
38
40
|
if local_path.include?(item_name) && !options[:not_local]
|
39
|
-
|
40
|
-
|
41
|
+
if options[:force_local_storage_name]
|
42
|
+
storage = policy.get_create_storages.detect do |s|
|
43
|
+
s.name == options[:force_local_storage_name]
|
44
|
+
end
|
45
|
+
FC::Error.raise "force_local_storage #{storage.name} is not valid path for policy ##{policy.id}" unless storage
|
46
|
+
if local_path.index(storage.path) != 0 || local_path.sub(storage.path, '').sub(/\/$/, '').sub(/^\//, '') != item_params[:name]
|
47
|
+
FC::Error.raise "force_local_storage #{storage.name} is not valid path for local path ##{local_path}"
|
48
|
+
end
|
49
|
+
else
|
50
|
+
storage = policy.get_create_storages.detect do |s|
|
51
|
+
s.host == FC::Storage.curr_host && local_path.index(s.path) == 0 && local_path.sub(s.path, '').sub(/\/$/, '').sub(/^\//, '') == item_params[:name]
|
52
|
+
end
|
53
|
+
FC::Error.raise "local_path #{local_path} is not valid path for policy ##{policy.id}" unless storage
|
41
54
|
end
|
42
|
-
FC::Error.raise "local_path #{local_path} is not valid path for policy ##{policy.id}" unless storage
|
43
55
|
end
|
44
56
|
|
45
57
|
# new item?
|
data/lib/fc/storage.rb
CHANGED
@@ -95,6 +95,7 @@ module FC
|
|
95
95
|
end
|
96
96
|
|
97
97
|
def update_http_check_time
|
98
|
+
return unless http_check_enabled?
|
98
99
|
self.http_check_time = Time.new.to_i
|
99
100
|
save
|
100
101
|
end
|
@@ -104,9 +105,13 @@ module FC
|
|
104
105
|
end
|
105
106
|
|
106
107
|
def http_check_time_delay
|
107
|
-
Time.new.to_i - http_check_time.to_i
|
108
|
+
http_check_enabled? ? Time.new.to_i - http_check_time.to_i : 0
|
108
109
|
end
|
109
110
|
|
111
|
+
def http_check_enabled?
|
112
|
+
http_check_time.to_i >= 0
|
113
|
+
end
|
114
|
+
|
110
115
|
def up?
|
111
116
|
check_time_delay < self.class.check_time_limit
|
112
117
|
end
|
@@ -137,7 +142,7 @@ module FC
|
|
137
142
|
raise r if $?.exitstatus != 0
|
138
143
|
else
|
139
144
|
local_path += '/' if File.stat(local_path).directory?
|
140
|
-
cmd = "ionice -c 2 -n 7 rsync -e \"ssh -o StrictHostKeyChecking=no\" -a
|
145
|
+
cmd = "ionice -c 2 -n 7 rsync -e \"ssh -o StrictHostKeyChecking=no\" -a #{FC::Storage.speed_limit_to_rsync_opt(speed_limit)}--rsync-path=\"#{recreate_dirs_cmd} && ionice -c 2 -n 7 rsync\" #{local_path.shellescape} #{self.host}:\"#{dst_path.shellescape}\""
|
141
146
|
r = `#{cmd} 2>&1`
|
142
147
|
raise r if $?.exitstatus != 0
|
143
148
|
end
|
@@ -155,7 +160,7 @@ module FC
|
|
155
160
|
r = `#{cmd} 2>&1`
|
156
161
|
src_path += '/' if $?.exitstatus == 0
|
157
162
|
|
158
|
-
cmd = "ionice -c 2 -n 7 rsync -e \"ssh -o StrictHostKeyChecking=no\" -a
|
163
|
+
cmd = "ionice -c 2 -n 7 rsync -e \"ssh -o StrictHostKeyChecking=no\" -a #{FC::Storage.speed_limit_to_rsync_opt(speed_limit)}--rsync-path=\"ionice -c 2 -n 7 rsync\" #{self.host}:\"#{src_path.shellescape}\" #{local_path.shellescape}"
|
159
164
|
r = `#{cmd} 2>&1`
|
160
165
|
raise r if $?.exitstatus != 0
|
161
166
|
end
|
data/lib/fc/version.rb
CHANGED
data/lib/manage/storages.rb
CHANGED
@@ -20,21 +20,22 @@ def storages_show
|
|
20
20
|
if storage = find_storage
|
21
21
|
count = FC::DB.query("SELECT count(*) as cnt FROM #{FC::ItemStorage.table_name} WHERE storage_name='#{Mysql2::Client.escape(storage.name)}'").first['cnt']
|
22
22
|
puts %Q{Storage
|
23
|
-
Name:
|
24
|
-
Host:
|
25
|
-
DC:
|
26
|
-
Path:
|
27
|
-
Url:
|
28
|
-
Url weight:
|
29
|
-
Write weight
|
30
|
-
Size:
|
31
|
-
Free:
|
32
|
-
Size limit:
|
33
|
-
Size type:
|
34
|
-
Copy storages:
|
35
|
-
Check time:
|
36
|
-
|
37
|
-
|
23
|
+
Name: #{storage.name}
|
24
|
+
Host: #{storage.host}
|
25
|
+
DC: #{storage.dc}
|
26
|
+
Path: #{storage.path}
|
27
|
+
Url: #{storage.url}
|
28
|
+
Url weight: #{storage.url_weight}
|
29
|
+
Write weight #{storage.write_weight}
|
30
|
+
Size: #{size_to_human storage.size} (#{(storage.size_rate*100).to_i}%)
|
31
|
+
Free: #{size_to_human storage.free} (#{(storage.free_rate*100).to_i}%)
|
32
|
+
Size limit: #{size_to_human storage.size_limit}
|
33
|
+
Size type: #{storage.auto_size? ? "Auto (min #{ size_to_human storage.auto_size })" : 'Static'}
|
34
|
+
Copy storages: #{storage.copy_storages}
|
35
|
+
Check time: #{storage.check_time ? "#{Time.at(storage.check_time)} (#{storage.check_time_delay} seconds ago)" : ''}
|
36
|
+
Check http time: #{storage.http_check_enabled? ? "#{Time.at(storage.http_check_time)} (#{storage.http_check_time_delay} seconds ago)" : 'disabled'}
|
37
|
+
Status: #{storage.up? ? colorize_string('UP', :green) : colorize_string('DOWN', :red)}
|
38
|
+
Items storages: #{count}}
|
38
39
|
end
|
39
40
|
end
|
40
41
|
|
@@ -56,13 +57,14 @@ def storages_add
|
|
56
57
|
size_limit = human_to_size stdin_read_val('Size limit') {|val| "Size limit not is valid size." unless human_to_size(val)}
|
57
58
|
end
|
58
59
|
|
60
|
+
check_http = %(y yes).include?(stdin_read_val('Check http (y/n)?').downcase) ? 0 : -1
|
59
61
|
copy_storages = stdin_read_val('Copy storages', true)
|
60
62
|
storages = FC::Storage.where.map(&:name)
|
61
63
|
copy_storages = copy_storages.split(',').select{|s| storages.member?(s.strip)}.join(',').strip
|
62
64
|
begin
|
63
65
|
path = path +'/' unless path[-1] == '/'
|
64
66
|
path = '/' + path unless path[0] == '/'
|
65
|
-
storage = FC::Storage.new(:name => name, :dc => dc, :host => host, :path => path, :url => url, :size_limit => size_limit, :copy_storages => copy_storages, :url_weight => url_weight, :write_weight => write_weight, :auto_size => auto_size)
|
67
|
+
storage = FC::Storage.new(:name => name, :dc => dc, :host => host, :path => path, :url => url, :size_limit => size_limit, :copy_storages => copy_storages, :url_weight => url_weight, :write_weight => write_weight, :auto_size => auto_size, :http_check_time => check_http)
|
66
68
|
print 'Calc current size.. '
|
67
69
|
size = storage.file_size('', true)
|
68
70
|
puts "ok"
|
@@ -88,6 +90,7 @@ def storages_add
|
|
88
90
|
Free: #{size_to_human free} (#{(free.to_f*100 / size_limit).to_i}%)
|
89
91
|
Size type: #{storage.auto_size? ? "Auto (min #{ size_to_human(auto_size) })" : 'Static' }
|
90
92
|
Size limit: #{size_to_human size_limit}
|
93
|
+
Check http: #{storage.http_check_enabled? ? 'yes' : 'no' }
|
91
94
|
Copy storages #{copy_storages}}
|
92
95
|
s = Readline.readline("Continue? (y/n) ", false).strip.downcase
|
93
96
|
puts ""
|
@@ -152,6 +155,7 @@ def storages_change
|
|
152
155
|
auto_size = 0
|
153
156
|
size_limit = stdin_read_val("Size (now #{size_to_human(storage.size_limit)})", true) {|val| "Size limit not is valid size." if !val.empty? && !human_to_size(val)}
|
154
157
|
end
|
158
|
+
check_http = %(y yes).include?(stdin_read_val("Check http (now #{storage.http_check_enabled? ? 'yes' : 'no'})", true, storage.http_check_enabled? ? 'yes' : 'no').downcase)
|
155
159
|
copy_storages = stdin_read_val("Copy storages (now #{storage.copy_storages})", true)
|
156
160
|
|
157
161
|
storage.dc = dc unless dc.empty?
|
@@ -172,7 +176,7 @@ def storages_change
|
|
172
176
|
storage.size_limit = human_to_size(size_limit) unless size_limit.empty?
|
173
177
|
storages = FC::Storage.where.map(&:name)
|
174
178
|
storage.copy_storages = copy_storages.split(',').select{|s| storages.member?(s.strip)}.join(',').strip unless copy_storages.empty?
|
175
|
-
|
179
|
+
storage.http_check_time = (check_http ? 0 : -1) if storage.http_check_enabled? != check_http
|
176
180
|
puts %Q{\nStorage
|
177
181
|
Name: #{storage.name}
|
178
182
|
DC: #{storage.dc}
|
@@ -185,6 +189,7 @@ def storages_change
|
|
185
189
|
Free: #{size_to_human storage.free} (#{(storage.free_rate*100).to_i}%)
|
186
190
|
Size type: #{storage.auto_size? ? "Auto (Min #{size_to_human auto_size})" : 'Static' }
|
187
191
|
Size limit: #{size_to_human storage.size_limit }
|
192
|
+
Check http: #{storage.http_check_enabled? ? 'yes' : 'no' }
|
188
193
|
Copy storages: #{storage.copy_storages}}
|
189
194
|
s = Readline.readline("Continue? (y/n) ", false).strip.downcase
|
190
195
|
puts ""
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: filecluster
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.5.
|
4
|
+
version: 0.5.28
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- sh
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-11-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: mysql2
|
@@ -216,8 +216,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
216
216
|
- !ruby/object:Gem::Version
|
217
217
|
version: '0'
|
218
218
|
requirements: []
|
219
|
-
|
220
|
-
rubygems_version: 2.7.9
|
219
|
+
rubygems_version: 3.0.8
|
221
220
|
signing_key:
|
222
221
|
specification_version: 4
|
223
222
|
summary: Distributed storage
|