content_server 1.0.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/bin/backup_server +2 -2
- data/bin/content_server +2 -2
- data/lib/content_server/backup_server.rb +16 -7
- data/lib/content_server/content_receiver.rb +7 -11
- data/lib/content_server/content_server.rb +9 -1
- data/lib/content_server/file_streamer.rb +5 -5
- data/lib/content_server/queue_copy.rb +16 -16
- data/lib/content_server/queue_indexer.rb +20 -16
- data/lib/content_server/remote_content.rb +8 -6
- data/lib/content_server/version.rb +1 -1
- metadata +34 -34
data/bin/backup_server
CHANGED
@@ -30,9 +30,9 @@ rescue SystemExit, SignalException => exc
|
|
30
30
|
rescue Exception => exc
|
31
31
|
Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}")
|
32
32
|
if retries > 0
|
33
|
-
Log.
|
33
|
+
Log.debug1("Restarting (retries:#{retries}).\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
34
34
|
else
|
35
|
-
Log.
|
35
|
+
Log.debug1("Exiting...\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
36
36
|
Log.flush
|
37
37
|
end
|
38
38
|
retries -= 1
|
data/bin/content_server
CHANGED
@@ -27,9 +27,9 @@ rescue SystemExit, SignalException => exc
|
|
27
27
|
rescue Exception => exc
|
28
28
|
Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}")
|
29
29
|
if retries > 0
|
30
|
-
Log.
|
30
|
+
Log.debug1("Restarting (retries:#{retries}).\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
31
31
|
else
|
32
|
-
Log.
|
32
|
+
Log.debug1("Exiting...\nBacktrace:\n#{exc.backtrace.join("\n")}")
|
33
33
|
Log.flush
|
34
34
|
end
|
35
35
|
retries -= 1
|
@@ -27,6 +27,7 @@ module ContentServer
|
|
27
27
|
Params.integer('backup_check_delay', 5, 'Delay in seconds between two content vs backup checks.')
|
28
28
|
|
29
29
|
def run_backup_server
|
30
|
+
Log.info('Start backup server')
|
30
31
|
Thread.abort_on_exception = true
|
31
32
|
all_threads = []
|
32
33
|
|
@@ -35,6 +36,10 @@ module ContentServer
|
|
35
36
|
|
36
37
|
# # # # # # # # # # # #
|
37
38
|
# Initialize/Start monitoring
|
39
|
+
Log.info('Start monitoring following directories:')
|
40
|
+
Params['monitoring_paths'].each {|path|
|
41
|
+
Log.info(" Path:'#{path['path']}'")
|
42
|
+
}
|
38
43
|
monitoring_events = Queue.new
|
39
44
|
fm = FileMonitoring::FileMonitoring.new
|
40
45
|
fm.set_event_queue(monitoring_events)
|
@@ -45,6 +50,7 @@ module ContentServer
|
|
45
50
|
|
46
51
|
# # # # # # # # # # # # # #
|
47
52
|
# Initialize/Start local indexer
|
53
|
+
Log.debug1('Start indexer')
|
48
54
|
local_server_content_data_queue = Queue.new
|
49
55
|
queue_indexer = QueueIndexer.new(monitoring_events,
|
50
56
|
local_server_content_data_queue,
|
@@ -54,6 +60,7 @@ module ContentServer
|
|
54
60
|
|
55
61
|
# # # # # # # # # # # # # # # # # # # # # # # # # # #
|
56
62
|
# Initialize/Start backup server content data sender
|
63
|
+
Log.debug1('Start backup server content data sender')
|
57
64
|
dynamic_content_data = ContentData::DynamicContentData.new
|
58
65
|
#content_data_sender = ContentDataSender.new(
|
59
66
|
# Params['remote_server'],
|
@@ -61,14 +68,14 @@ module ContentServer
|
|
61
68
|
# Start sending to backup server
|
62
69
|
all_threads << Thread.new do
|
63
70
|
while true do
|
64
|
-
Log.
|
71
|
+
Log.debug1 'Waiting on local server content data queue.'
|
65
72
|
cd = local_server_content_data_queue.pop
|
66
73
|
# content_data_sender.send_content_data(cd)
|
67
74
|
dynamic_content_data.update(cd)
|
68
75
|
end
|
69
76
|
end
|
70
|
-
|
71
77
|
Params['backup_destination_folder'] = File.expand_path(Params['monitoring_paths'][0]['path'])
|
78
|
+
Log.info("backup_destination_folder is:#{Params['backup_destination_folder']}")
|
72
79
|
content_server_dynamic_content_data = ContentData::DynamicContentData.new
|
73
80
|
remote_content = ContentServer::RemoteContentClient.new(content_server_dynamic_content_data,
|
74
81
|
Params['content_server_hostname'],
|
@@ -83,21 +90,23 @@ module ContentServer
|
|
83
90
|
all_threads.concat(file_copy_client.threads)
|
84
91
|
|
85
92
|
# Each
|
93
|
+
Log.info('Start remote and local contents comparator')
|
86
94
|
all_threads << Thread.new do
|
87
95
|
loop do
|
88
96
|
sleep(Params['backup_check_delay'])
|
89
97
|
local_cd = dynamic_content_data.last_content_data()
|
90
98
|
remote_cd = content_server_dynamic_content_data.last_content_data()
|
91
99
|
diff = ContentData::ContentData.remove(local_cd, remote_cd)
|
92
|
-
Log.debug2("Files to send? #{!diff.empty?}")
|
93
100
|
#file_copy_client.request_copy(diff) unless diff.empty?
|
94
101
|
if !diff.empty?
|
95
|
-
Log.info('Backup and remote contents need a sync:')
|
96
|
-
Log.
|
97
|
-
Log.
|
102
|
+
Log.info('Start sync check. Backup and remote contents need a sync:')
|
103
|
+
Log.debug2("Backup content:\n#{local_cd}")
|
104
|
+
Log.debug2("Remote content:\n#{remote_cd}")
|
98
105
|
Log.info("Missing contents:\n#{diff}")
|
99
|
-
Log.info('Requesting
|
106
|
+
Log.info('Requesting copy files')
|
100
107
|
file_copy_client.request_copy(diff)
|
108
|
+
else
|
109
|
+
Log.info("Start sync check. Local and remote contents are equal. No sync required.")
|
101
110
|
end
|
102
111
|
end
|
103
112
|
end
|
@@ -14,17 +14,15 @@ module ContentServer
|
|
14
14
|
Socket.tcp_server_loop(@port) do |sock, client_addrinfo|
|
15
15
|
while size_of_data = sock.read(4)
|
16
16
|
size_of_data = size_of_data.unpack("l")[0]
|
17
|
-
Log.
|
17
|
+
Log.debug2("Size of data: #{size_of_data}")
|
18
18
|
data = sock.read(size_of_data)
|
19
|
-
#Log.debug3 "Data received: #{data}"
|
20
19
|
unmarshaled_data = Marshal.load(data)
|
21
|
-
#Log.debug3 "Unmarshaled data: #{unmarshaled_data}"
|
22
20
|
@queue.push unmarshaled_data
|
23
|
-
Log.
|
21
|
+
Log.debug2("Socket closed? #{sock.closed?}.")
|
24
22
|
break if sock.closed?
|
25
|
-
Log.
|
23
|
+
Log.debug2 'Waiting on sock.read'
|
26
24
|
end
|
27
|
-
Log.
|
25
|
+
Log.debug2 'Exited, socket closed or read returned nil.'
|
28
26
|
end
|
29
27
|
end
|
30
28
|
end
|
@@ -38,19 +36,17 @@ module ContentServer
|
|
38
36
|
end
|
39
37
|
|
40
38
|
def open_socket
|
41
|
-
Log.debug1
|
39
|
+
Log.debug1("Connecting to content server #{@host}:#{@port}.")
|
42
40
|
@tcp_socket = TCPSocket.new(@host, @port)
|
43
41
|
end
|
44
42
|
|
45
43
|
def send_content_data content_data
|
46
44
|
open_socket if @tcp_socket.closed?
|
47
|
-
#Log.debug3 "Data to send: #{content_data}"
|
48
45
|
marshal_data = Marshal.dump(content_data)
|
49
|
-
Log.
|
46
|
+
Log.debug2("Marshaled size: #{marshal_data.length}.")
|
50
47
|
data_size = [marshal_data.length].pack("l")
|
51
|
-
#Log.debug3 "Marshaled data: #{marshal_data}."
|
52
48
|
if data_size.nil? || marshal_data.nil?
|
53
|
-
Log.
|
49
|
+
Log.debug2('Send data is nil!!!!!!!!')
|
54
50
|
end
|
55
51
|
@tcp_socket.write data_size
|
56
52
|
@tcp_socket.write marshal_data
|
@@ -24,6 +24,7 @@ module ContentServer
|
|
24
24
|
Params.integer('local_content_data_port', 3333, 'Listen to incoming content data requests.')
|
25
25
|
|
26
26
|
def run_content_server
|
27
|
+
Log.info('Content server start')
|
27
28
|
all_threads = []
|
28
29
|
|
29
30
|
@process_variables = ThreadSafeHash::ThreadSafeHash.new
|
@@ -31,6 +32,10 @@ module ContentServer
|
|
31
32
|
|
32
33
|
# # # # # # # # # # # #
|
33
34
|
# Initialize/Start monitoring
|
35
|
+
Log.info('Start monitoring following directories:')
|
36
|
+
Params['monitoring_paths'].each {|path|
|
37
|
+
Log.info(" Path:'#{path['path']}'")
|
38
|
+
}
|
34
39
|
monitoring_events = Queue.new
|
35
40
|
fm = FileMonitoring::FileMonitoring.new
|
36
41
|
fm.set_event_queue(monitoring_events)
|
@@ -41,6 +46,7 @@ module ContentServer
|
|
41
46
|
|
42
47
|
# # # # # # # # # # # # # #
|
43
48
|
# Initialize/Start local indexer
|
49
|
+
Log.debug1('Start indexer')
|
44
50
|
local_server_content_data_queue = Queue.new
|
45
51
|
queue_indexer = QueueIndexer.new(monitoring_events,
|
46
52
|
local_server_content_data_queue,
|
@@ -50,12 +56,13 @@ module ContentServer
|
|
50
56
|
|
51
57
|
# # # # # # # # # # # # # # # # # # # # # #
|
52
58
|
# Initialize/Start content data comparator
|
59
|
+
Log.debug1('Start content data comparator')
|
53
60
|
copy_files_events = Queue.new
|
54
61
|
local_dynamic_content_data = ContentData::DynamicContentData.new
|
55
62
|
all_threads << Thread.new do
|
56
63
|
while true do
|
57
64
|
# Note: This thread should be the only consumer of local_server_content_data_queue
|
58
|
-
Log.
|
65
|
+
Log.debug1 'Waiting on local server content data.'
|
59
66
|
local_server_content_data = local_server_content_data_queue.pop
|
60
67
|
local_dynamic_content_data.update(local_server_content_data)
|
61
68
|
end
|
@@ -67,6 +74,7 @@ module ContentServer
|
|
67
74
|
|
68
75
|
# # # # # # # # # # # # # # # #
|
69
76
|
# Start copying files on demand
|
77
|
+
Log.debug1('Start copy data on demand')
|
70
78
|
copy_server = FileCopyServer.new(copy_files_events, Params['local_files_port'])
|
71
79
|
all_threads.concat(copy_server.run())
|
72
80
|
|
@@ -109,7 +109,7 @@ module ContentServer
|
|
109
109
|
@send_chunk_clb.call(checksum, offset, @streams[checksum].size, chunk, chunk_checksum)
|
110
110
|
end
|
111
111
|
else
|
112
|
-
Log.
|
112
|
+
Log.debug1("No checksum found to copy chunk. #{checksum}.")
|
113
113
|
end
|
114
114
|
end
|
115
115
|
|
@@ -213,7 +213,7 @@ module ContentServer
|
|
213
213
|
def handle_new_chunk(file_checksum, offset, content)
|
214
214
|
if offset == @streams[file_checksum].file.pos
|
215
215
|
FileReceiver.write_string_to_file(content, @streams[file_checksum].file)
|
216
|
-
Log.
|
216
|
+
Log.debug1("Written already #{@streams[file_checksum].file.pos} bytes, " \
|
217
217
|
"out of #{@streams[file_checksum].size} " \
|
218
218
|
"(#{100.0*@streams[file_checksum].file.size/@streams[file_checksum].size}%)")
|
219
219
|
return true
|
@@ -247,10 +247,10 @@ module ContentServer
|
|
247
247
|
local_file_checksum = FileIndexing::IndexAgent.get_checksum(tmp_file_path)
|
248
248
|
message = "Local checksum (#{local_file_checksum}) received checksum (#{file_checksum})."
|
249
249
|
if local_file_checksum == file_checksum
|
250
|
-
Log.
|
250
|
+
Log.debug1(message)
|
251
251
|
begin
|
252
252
|
File.rename(tmp_file_path, path)
|
253
|
-
Log.
|
253
|
+
Log.debug1("End move tmp file to permanent location #{path}.")
|
254
254
|
@file_done_clb.call(local_file_checksum, path) unless @file_done_clb.nil?
|
255
255
|
rescue IOError => e
|
256
256
|
Log.warning("Could not move tmp file to permanent file #{path}. #{e.to_s}")
|
@@ -267,7 +267,7 @@ module ContentServer
|
|
267
267
|
|
268
268
|
def self.write_string_to_file(str, file)
|
269
269
|
bytes_to_write = str.bytesize
|
270
|
-
Log.
|
270
|
+
Log.debug1("writing to file: #{file.to_s}, #{bytes_to_write} bytes.")
|
271
271
|
while bytes_to_write > 0
|
272
272
|
bytes_to_write -= file.write(str)
|
273
273
|
end
|
@@ -30,7 +30,7 @@ module ContentServer
|
|
30
30
|
# resend the ack request.
|
31
31
|
@copy_prepare = {}
|
32
32
|
@file_streamer = FileStreamer.new(method(:send_chunk))
|
33
|
-
Log.
|
33
|
+
Log.debug3("initialize FileCopyServer on port:#{port}")
|
34
34
|
end
|
35
35
|
|
36
36
|
def send_chunk(*arg)
|
@@ -39,7 +39,7 @@ module ContentServer
|
|
39
39
|
|
40
40
|
def receive_message(addr_info, message)
|
41
41
|
# Add ack message to copy queue.
|
42
|
-
Log.
|
42
|
+
Log.debug2("Content server Copy message received: #{message}")
|
43
43
|
@copy_input_queue.push(message)
|
44
44
|
end
|
45
45
|
|
@@ -48,17 +48,17 @@ module ContentServer
|
|
48
48
|
threads << @backup_tcp.tcp_thread if @backup_tcp != nil
|
49
49
|
threads << Thread.new do
|
50
50
|
while true do
|
51
|
-
Log.
|
51
|
+
Log.debug1 'Waiting on copy files events.'
|
52
52
|
message_type, message_content = @copy_input_queue.pop
|
53
53
|
|
54
54
|
if message_type == :COPY_MESSAGE
|
55
|
-
Log.
|
55
|
+
Log.debug1 "Copy files event: #{message_content}"
|
56
56
|
# Prepare source,dest map for copy.
|
57
57
|
message_content.instances.each { |key, instance|
|
58
58
|
# If not already sending.
|
59
59
|
if !@copy_prepare.key?(instance.checksum) || !@copy_prepare[instance.checksum][1]
|
60
60
|
@copy_prepare[instance.checksum] = [instance.full_path, false]
|
61
|
-
Log.
|
61
|
+
Log.debug1("Sending ack for: #{instance.checksum}")
|
62
62
|
@backup_tcp.send_obj([:ACK_MESSAGE, [instance.checksum, Time.now.to_i]])
|
63
63
|
end
|
64
64
|
}
|
@@ -67,7 +67,7 @@ module ContentServer
|
|
67
67
|
# The timestamp is of local content server! not backup server!
|
68
68
|
timestamp, ack, checksum = message_content
|
69
69
|
|
70
|
-
Log.
|
70
|
+
Log.debug1("Ack (#{ack}) received for content: #{checksum}, timestamp: #{timestamp} " \
|
71
71
|
"now: #{Time.now.to_i}")
|
72
72
|
|
73
73
|
# Copy file if ack (does not exists on backup and not too much time passed)
|
@@ -76,7 +76,7 @@ module ContentServer
|
|
76
76
|
Log.warning("File was aborted, copied, or started copy just now: #{checksum}")
|
77
77
|
else
|
78
78
|
path = @copy_prepare[checksum][0]
|
79
|
-
Log.info "Streaming
|
79
|
+
Log.info "Streaming to backup server. content: #{checksum} path:#{path}."
|
80
80
|
@file_streamer.start_streaming(checksum, path)
|
81
81
|
# Ack received, setting prepare to true
|
82
82
|
@copy_prepare[checksum][1] = true
|
@@ -99,15 +99,15 @@ module ContentServer
|
|
99
99
|
@copy_prepare.delete(file_checksum)
|
100
100
|
end
|
101
101
|
elsif message_type == :ABORT_COPY
|
102
|
-
Log.
|
102
|
+
Log.debug1("Aborting file copy: #{message_content}")
|
103
103
|
if @copy_prepare.key?(message_content)
|
104
|
-
Log.
|
104
|
+
Log.debug1("Aborting: #{@copy_prepare[message_content][0]}")
|
105
105
|
@copy_prepare.delete(message_content)
|
106
106
|
end
|
107
107
|
@file_streamer.abort_streaming(message_content)
|
108
108
|
elsif message_type == :RESET_RESUME_COPY
|
109
109
|
file_checksum, new_offset = message_content
|
110
|
-
Log.
|
110
|
+
Log.debug1("Resetting/Resuming file (#{file_checksum}) copy to #{new_offset}")
|
111
111
|
@file_streamer.reset_streaming(file_checksum, new_offset)
|
112
112
|
else
|
113
113
|
Log.error("Copy event not supported: #{message_type}")
|
@@ -133,7 +133,7 @@ module ContentServer
|
|
133
133
|
end
|
134
134
|
@local_thread.abort_on_exception = true
|
135
135
|
@process_variables = process_variables
|
136
|
-
Log.
|
136
|
+
Log.debug3("initialize FileCopyClient host:#{host} port:#{port}")
|
137
137
|
end
|
138
138
|
|
139
139
|
def threads
|
@@ -156,7 +156,7 @@ module ContentServer
|
|
156
156
|
|
157
157
|
def done_copy(local_file_checksum, local_path)
|
158
158
|
add_process_variables_info()
|
159
|
-
Log.
|
159
|
+
Log.debug1("Done copy file: #{local_path}, #{local_file_checksum}")
|
160
160
|
end
|
161
161
|
|
162
162
|
def add_process_variables_info()
|
@@ -164,7 +164,7 @@ module ContentServer
|
|
164
164
|
end
|
165
165
|
|
166
166
|
def handle_message(message)
|
167
|
-
Log.
|
167
|
+
Log.debug3('QueueFileReceiver handle message')
|
168
168
|
@local_queue.push(message)
|
169
169
|
end
|
170
170
|
|
@@ -174,9 +174,9 @@ module ContentServer
|
|
174
174
|
message_type, message_content = message
|
175
175
|
if message_type == :SEND_COPY_MESSAGE
|
176
176
|
Log.debug1("Requesting file (content data) to copy.")
|
177
|
-
Log.
|
177
|
+
Log.debug2("File requested: #{message_content.to_s}")
|
178
178
|
bytes_written = @tcp_client.send_obj([:COPY_MESSAGE, message_content])
|
179
|
-
Log.
|
179
|
+
Log.debug2("Sending copy message succeeded? bytes_written: #{bytes_written}.")
|
180
180
|
elsif message_type == :COPY_CHUNK
|
181
181
|
Log.debug1('Chunk received.')
|
182
182
|
if @file_receiver.receive_chunk(*message_content)
|
@@ -186,7 +186,7 @@ module ContentServer
|
|
186
186
|
elsif message_type == :ACK_MESSAGE
|
187
187
|
checksum, timestamp = message_content
|
188
188
|
# Here we should check file existence
|
189
|
-
Log.
|
189
|
+
Log.info("Returning ack for content: #{checksum}, timestamp: #{timestamp}")
|
190
190
|
Log.debug1("Ack: #{!@dynamic_content_data.exists?(checksum)}")
|
191
191
|
@tcp_client.send_obj([:ACK_MESSAGE, [timestamp,
|
192
192
|
!@dynamic_content_data.exists?(checksum),
|
@@ -4,6 +4,8 @@ require 'log'
|
|
4
4
|
|
5
5
|
module ContentServer
|
6
6
|
|
7
|
+
Params.integer('data_flush_delay', 300, 'Number of seconds to delay content data file flush to disk.')
|
8
|
+
|
7
9
|
# Simple indexer, gets inputs events (files to index) and outputs
|
8
10
|
# content data updates into output queue.
|
9
11
|
class QueueIndexer
|
@@ -12,6 +14,7 @@ module ContentServer
|
|
12
14
|
@input_queue = input_queue
|
13
15
|
@output_queue = output_queue
|
14
16
|
@content_data_path = content_data_path
|
17
|
+
@last_data_flush_time = nil
|
15
18
|
end
|
16
19
|
|
17
20
|
def run
|
@@ -21,13 +24,13 @@ module ContentServer
|
|
21
24
|
tmp_content_data.from_file(@content_data_path) if File.exists?(@content_data_path)
|
22
25
|
tmp_content_data.instances.each_value do |instance|
|
23
26
|
# Skipp instances (files) which did not pass the shallow check.
|
24
|
-
Log.
|
27
|
+
Log.debug1('Shallow checking content data:')
|
25
28
|
if shallow_check(instance)
|
26
|
-
Log.
|
29
|
+
Log.debug1("exists: #{instance.full_path}")
|
27
30
|
server_content_data.add_content(tmp_content_data.contents[instance.checksum])
|
28
31
|
server_content_data.add_instance(instance)
|
29
32
|
else
|
30
|
-
Log.
|
33
|
+
Log.debug1("changed: #{instance.full_path}")
|
31
34
|
# Add non existing and changed files to index queue.
|
32
35
|
@input_queue.push([FileMonitoring::FileStatEnum::STABLE, instance.full_path])
|
33
36
|
end
|
@@ -36,9 +39,9 @@ module ContentServer
|
|
36
39
|
# Start indexing on demand and write changes to queue
|
37
40
|
thread = Thread.new do
|
38
41
|
while true do
|
39
|
-
Log.
|
42
|
+
Log.debug1 'Waiting on index input queue.'
|
40
43
|
state, is_dir, path = @input_queue.pop
|
41
|
-
Log.
|
44
|
+
Log.debug1 "event: #{state}, #{is_dir}, #{path}."
|
42
45
|
|
43
46
|
# index files and add to copy queue
|
44
47
|
# delete directory with it's sub files
|
@@ -49,16 +52,16 @@ module ContentServer
|
|
49
52
|
indexer_patterns = FileIndexing::IndexerPatterns.new
|
50
53
|
indexer_patterns.add_pattern(path)
|
51
54
|
index_agent.index(indexer_patterns, server_content_data)
|
52
|
-
Log.
|
55
|
+
Log.debug1("Failed files: #{index_agent.failed_files.to_a.join(',')}.") \
|
53
56
|
if !index_agent.failed_files.empty?
|
54
|
-
Log.
|
57
|
+
Log.debug1("indexed content #{index_agent.indexed_content}.")
|
55
58
|
server_content_data.merge index_agent.indexed_content
|
56
59
|
elsif ((state == FileMonitoring::FileStatEnum::NON_EXISTING ||
|
57
60
|
state == FileMonitoring::FileStatEnum::CHANGED) && !is_dir)
|
58
61
|
# If file content changed, we should remove old instance.
|
59
62
|
key = FileIndexing::IndexAgent.global_path(path)
|
60
63
|
# Check if deleted file exists at content data.
|
61
|
-
Log.
|
64
|
+
Log.debug1("Instance to remove: #{key}")
|
62
65
|
if server_content_data.instances.key?(key)
|
63
66
|
instance_to_remove = server_content_data.instances[key]
|
64
67
|
# Remove file from content data only if it does not pass the shallow check, i.e.,
|
@@ -75,20 +78,21 @@ module ContentServer
|
|
75
78
|
end
|
76
79
|
end
|
77
80
|
elsif state == FileMonitoring::FileStatEnum::NON_EXISTING && is_dir
|
78
|
-
Log.
|
81
|
+
Log.debug1("NonExisting/Changed: #{path}")
|
79
82
|
# Remove directory but only when non-existing.
|
80
|
-
Log.
|
83
|
+
Log.debug1("Directory to remove: #{path}")
|
81
84
|
global_dir = FileIndexing::IndexAgent.global_path(path)
|
82
85
|
server_content_data = ContentData::ContentData.remove_directory(
|
83
86
|
server_content_data, global_dir)
|
84
87
|
else
|
85
|
-
Log.
|
88
|
+
Log.debug1("This case should not be handled: #{state}, #{is_dir}, #{path}.")
|
86
89
|
end
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
90
|
+
if @last_data_flush_time.nil? || @last_data_flush_time + Params['data_flush_delay'] < Time.now.to_i
|
91
|
+
Log.debug1 "Writing server content data to #{@content_data_path}."
|
92
|
+
server_content_data.to_file(@content_data_path)
|
93
|
+
@last_data_flush_time = Time.now.to_i
|
94
|
+
end
|
95
|
+
Log.debug1 'Adding server content data to queue.'
|
92
96
|
@output_queue.push(ContentData::ContentData.new(server_content_data))
|
93
97
|
end # while true do
|
94
98
|
end # Thread.new do
|
@@ -19,11 +19,12 @@ module ContentServer
|
|
19
19
|
@last_save_timestamp = nil
|
20
20
|
@content_server_content_data_path = File.join(local_backup_folder, 'remote',
|
21
21
|
host + '_' + port.to_s)
|
22
|
-
Log.
|
22
|
+
Log.debug3("Initialized RemoteContentClient: host:#{host} port:#{port} local_backup_folder:#{local_backup_folder}")
|
23
23
|
end
|
24
24
|
|
25
25
|
def receive_content(message)
|
26
26
|
Log.debug1("Backup server received Remote content data:#{message.to_s}")
|
27
|
+
Log.info("Backup server received Remote content data")
|
27
28
|
ref = @dynamic_content_data.last_content_data
|
28
29
|
@dynamic_content_data.update(message)
|
29
30
|
@last_fetch_timestamp = Time.now.to_i
|
@@ -40,9 +41,9 @@ module ContentServer
|
|
40
41
|
FileUtils.makedirs(@content_server_content_data_path) unless \
|
41
42
|
File.directory?(@content_server_content_data_path)
|
42
43
|
count = File.open(write_to, 'wb') { |f| f.write(message.to_s) }
|
43
|
-
Log.
|
44
|
+
Log.debug1("Written content data to file:#{write_to}.")
|
44
45
|
else
|
45
|
-
Log.
|
46
|
+
Log.debug1("No need to write remote content data, it has not changed.")
|
46
47
|
end
|
47
48
|
end
|
48
49
|
|
@@ -74,14 +75,15 @@ module ContentServer
|
|
74
75
|
def initialize(dynamic_content_data, port)
|
75
76
|
@dynamic_content_data = dynamic_content_data
|
76
77
|
@tcp_server = Networking::TCPServer.new(port, method(:content_requested))
|
77
|
-
Log.
|
78
|
+
Log.debug3("initialize RemoteContentServer on port:#{port}")
|
78
79
|
end
|
79
80
|
|
80
81
|
def content_requested(addr_info, message)
|
81
82
|
# Send response.
|
82
|
-
Log.info("
|
83
|
+
Log.info("Content server received content data request")
|
84
|
+
Log.debug1("Sending content data:#{@dynamic_content_data.last_content_data}")
|
83
85
|
@tcp_server.send_obj(@dynamic_content_data.last_content_data)
|
84
|
-
Log.info('
|
86
|
+
Log.info('Content server sent content data')
|
85
87
|
end
|
86
88
|
|
87
89
|
def tcp_thread
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: content_server
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.1
|
5
5
|
prerelease:
|
6
6
|
platform: ruby
|
7
7
|
authors:
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2013-05-
|
12
|
+
date: 2013-05-25 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: content_data
|
@@ -18,7 +18,7 @@ dependencies:
|
|
18
18
|
requirements:
|
19
19
|
- - '='
|
20
20
|
- !ruby/object:Gem::Version
|
21
|
-
version: 1.0.
|
21
|
+
version: 1.0.1
|
22
22
|
type: :runtime
|
23
23
|
prerelease: false
|
24
24
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -26,7 +26,7 @@ dependencies:
|
|
26
26
|
requirements:
|
27
27
|
- - '='
|
28
28
|
- !ruby/object:Gem::Version
|
29
|
-
version: 1.0.
|
29
|
+
version: 1.0.1
|
30
30
|
- !ruby/object:Gem::Dependency
|
31
31
|
name: file_indexing
|
32
32
|
requirement: !ruby/object:Gem::Requirement
|
@@ -34,7 +34,7 @@ dependencies:
|
|
34
34
|
requirements:
|
35
35
|
- - '='
|
36
36
|
- !ruby/object:Gem::Version
|
37
|
-
version: 1.0.
|
37
|
+
version: 1.0.1
|
38
38
|
type: :runtime
|
39
39
|
prerelease: false
|
40
40
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -42,7 +42,7 @@ dependencies:
|
|
42
42
|
requirements:
|
43
43
|
- - '='
|
44
44
|
- !ruby/object:Gem::Version
|
45
|
-
version: 1.0.
|
45
|
+
version: 1.0.1
|
46
46
|
- !ruby/object:Gem::Dependency
|
47
47
|
name: file_monitoring
|
48
48
|
requirement: !ruby/object:Gem::Requirement
|
@@ -50,7 +50,7 @@ dependencies:
|
|
50
50
|
requirements:
|
51
51
|
- - '='
|
52
52
|
- !ruby/object:Gem::Version
|
53
|
-
version: 1.0.
|
53
|
+
version: 1.0.1
|
54
54
|
type: :runtime
|
55
55
|
prerelease: false
|
56
56
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -58,7 +58,7 @@ dependencies:
|
|
58
58
|
requirements:
|
59
59
|
- - '='
|
60
60
|
- !ruby/object:Gem::Version
|
61
|
-
version: 1.0.
|
61
|
+
version: 1.0.1
|
62
62
|
- !ruby/object:Gem::Dependency
|
63
63
|
name: log
|
64
64
|
requirement: !ruby/object:Gem::Requirement
|
@@ -66,7 +66,7 @@ dependencies:
|
|
66
66
|
requirements:
|
67
67
|
- - '='
|
68
68
|
- !ruby/object:Gem::Version
|
69
|
-
version: 1.0.
|
69
|
+
version: 1.0.1
|
70
70
|
type: :runtime
|
71
71
|
prerelease: false
|
72
72
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -74,7 +74,7 @@ dependencies:
|
|
74
74
|
requirements:
|
75
75
|
- - '='
|
76
76
|
- !ruby/object:Gem::Version
|
77
|
-
version: 1.0.
|
77
|
+
version: 1.0.1
|
78
78
|
- !ruby/object:Gem::Dependency
|
79
79
|
name: networking
|
80
80
|
requirement: !ruby/object:Gem::Requirement
|
@@ -82,7 +82,7 @@ dependencies:
|
|
82
82
|
requirements:
|
83
83
|
- - '='
|
84
84
|
- !ruby/object:Gem::Version
|
85
|
-
version: 1.0.
|
85
|
+
version: 1.0.1
|
86
86
|
type: :runtime
|
87
87
|
prerelease: false
|
88
88
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -90,7 +90,7 @@ dependencies:
|
|
90
90
|
requirements:
|
91
91
|
- - '='
|
92
92
|
- !ruby/object:Gem::Version
|
93
|
-
version: 1.0.
|
93
|
+
version: 1.0.1
|
94
94
|
- !ruby/object:Gem::Dependency
|
95
95
|
name: params
|
96
96
|
requirement: !ruby/object:Gem::Requirement
|
@@ -98,7 +98,7 @@ dependencies:
|
|
98
98
|
requirements:
|
99
99
|
- - '='
|
100
100
|
- !ruby/object:Gem::Version
|
101
|
-
version: 1.0.
|
101
|
+
version: 1.0.1
|
102
102
|
type: :runtime
|
103
103
|
prerelease: false
|
104
104
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -106,7 +106,7 @@ dependencies:
|
|
106
106
|
requirements:
|
107
107
|
- - '='
|
108
108
|
- !ruby/object:Gem::Version
|
109
|
-
version: 1.0.
|
109
|
+
version: 1.0.1
|
110
110
|
- !ruby/object:Gem::Dependency
|
111
111
|
name: process_monitoring
|
112
112
|
requirement: !ruby/object:Gem::Requirement
|
@@ -114,7 +114,7 @@ dependencies:
|
|
114
114
|
requirements:
|
115
115
|
- - '='
|
116
116
|
- !ruby/object:Gem::Version
|
117
|
-
version: 1.0.
|
117
|
+
version: 1.0.1
|
118
118
|
type: :runtime
|
119
119
|
prerelease: false
|
120
120
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -122,7 +122,7 @@ dependencies:
|
|
122
122
|
requirements:
|
123
123
|
- - '='
|
124
124
|
- !ruby/object:Gem::Version
|
125
|
-
version: 1.0.
|
125
|
+
version: 1.0.1
|
126
126
|
- !ruby/object:Gem::Dependency
|
127
127
|
name: rake
|
128
128
|
requirement: !ruby/object:Gem::Requirement
|
@@ -146,7 +146,7 @@ dependencies:
|
|
146
146
|
requirements:
|
147
147
|
- - '='
|
148
148
|
- !ruby/object:Gem::Version
|
149
|
-
version: 1.0.
|
149
|
+
version: 1.0.1
|
150
150
|
type: :runtime
|
151
151
|
prerelease: false
|
152
152
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -154,7 +154,7 @@ dependencies:
|
|
154
154
|
requirements:
|
155
155
|
- - '='
|
156
156
|
- !ruby/object:Gem::Version
|
157
|
-
version: 1.0.
|
157
|
+
version: 1.0.1
|
158
158
|
- !ruby/object:Gem::Dependency
|
159
159
|
name: content_data
|
160
160
|
requirement: !ruby/object:Gem::Requirement
|
@@ -162,7 +162,7 @@ dependencies:
|
|
162
162
|
requirements:
|
163
163
|
- - '='
|
164
164
|
- !ruby/object:Gem::Version
|
165
|
-
version: 1.0.
|
165
|
+
version: 1.0.1
|
166
166
|
type: :runtime
|
167
167
|
prerelease: false
|
168
168
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -170,7 +170,7 @@ dependencies:
|
|
170
170
|
requirements:
|
171
171
|
- - '='
|
172
172
|
- !ruby/object:Gem::Version
|
173
|
-
version: 1.0.
|
173
|
+
version: 1.0.1
|
174
174
|
- !ruby/object:Gem::Dependency
|
175
175
|
name: file_indexing
|
176
176
|
requirement: !ruby/object:Gem::Requirement
|
@@ -178,7 +178,7 @@ dependencies:
|
|
178
178
|
requirements:
|
179
179
|
- - '='
|
180
180
|
- !ruby/object:Gem::Version
|
181
|
-
version: 1.0.
|
181
|
+
version: 1.0.1
|
182
182
|
type: :runtime
|
183
183
|
prerelease: false
|
184
184
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -186,7 +186,7 @@ dependencies:
|
|
186
186
|
requirements:
|
187
187
|
- - '='
|
188
188
|
- !ruby/object:Gem::Version
|
189
|
-
version: 1.0.
|
189
|
+
version: 1.0.1
|
190
190
|
- !ruby/object:Gem::Dependency
|
191
191
|
name: file_monitoring
|
192
192
|
requirement: !ruby/object:Gem::Requirement
|
@@ -194,7 +194,7 @@ dependencies:
|
|
194
194
|
requirements:
|
195
195
|
- - '='
|
196
196
|
- !ruby/object:Gem::Version
|
197
|
-
version: 1.0.
|
197
|
+
version: 1.0.1
|
198
198
|
type: :runtime
|
199
199
|
prerelease: false
|
200
200
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -202,7 +202,7 @@ dependencies:
|
|
202
202
|
requirements:
|
203
203
|
- - '='
|
204
204
|
- !ruby/object:Gem::Version
|
205
|
-
version: 1.0.
|
205
|
+
version: 1.0.1
|
206
206
|
- !ruby/object:Gem::Dependency
|
207
207
|
name: log
|
208
208
|
requirement: !ruby/object:Gem::Requirement
|
@@ -210,7 +210,7 @@ dependencies:
|
|
210
210
|
requirements:
|
211
211
|
- - '='
|
212
212
|
- !ruby/object:Gem::Version
|
213
|
-
version: 1.0.
|
213
|
+
version: 1.0.1
|
214
214
|
type: :runtime
|
215
215
|
prerelease: false
|
216
216
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -218,7 +218,7 @@ dependencies:
|
|
218
218
|
requirements:
|
219
219
|
- - '='
|
220
220
|
- !ruby/object:Gem::Version
|
221
|
-
version: 1.0.
|
221
|
+
version: 1.0.1
|
222
222
|
- !ruby/object:Gem::Dependency
|
223
223
|
name: networking
|
224
224
|
requirement: !ruby/object:Gem::Requirement
|
@@ -226,7 +226,7 @@ dependencies:
|
|
226
226
|
requirements:
|
227
227
|
- - '='
|
228
228
|
- !ruby/object:Gem::Version
|
229
|
-
version: 1.0.
|
229
|
+
version: 1.0.1
|
230
230
|
type: :runtime
|
231
231
|
prerelease: false
|
232
232
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -234,7 +234,7 @@ dependencies:
|
|
234
234
|
requirements:
|
235
235
|
- - '='
|
236
236
|
- !ruby/object:Gem::Version
|
237
|
-
version: 1.0.
|
237
|
+
version: 1.0.1
|
238
238
|
- !ruby/object:Gem::Dependency
|
239
239
|
name: params
|
240
240
|
requirement: !ruby/object:Gem::Requirement
|
@@ -242,7 +242,7 @@ dependencies:
|
|
242
242
|
requirements:
|
243
243
|
- - '='
|
244
244
|
- !ruby/object:Gem::Version
|
245
|
-
version: 1.0.
|
245
|
+
version: 1.0.1
|
246
246
|
type: :runtime
|
247
247
|
prerelease: false
|
248
248
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -250,7 +250,7 @@ dependencies:
|
|
250
250
|
requirements:
|
251
251
|
- - '='
|
252
252
|
- !ruby/object:Gem::Version
|
253
|
-
version: 1.0.
|
253
|
+
version: 1.0.1
|
254
254
|
- !ruby/object:Gem::Dependency
|
255
255
|
name: process_monitoring
|
256
256
|
requirement: !ruby/object:Gem::Requirement
|
@@ -258,7 +258,7 @@ dependencies:
|
|
258
258
|
requirements:
|
259
259
|
- - '='
|
260
260
|
- !ruby/object:Gem::Version
|
261
|
-
version: 1.0.
|
261
|
+
version: 1.0.1
|
262
262
|
type: :runtime
|
263
263
|
prerelease: false
|
264
264
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -266,7 +266,7 @@ dependencies:
|
|
266
266
|
requirements:
|
267
267
|
- - '='
|
268
268
|
- !ruby/object:Gem::Version
|
269
|
-
version: 1.0.
|
269
|
+
version: 1.0.1
|
270
270
|
- !ruby/object:Gem::Dependency
|
271
271
|
name: rake
|
272
272
|
requirement: !ruby/object:Gem::Requirement
|
@@ -290,7 +290,7 @@ dependencies:
|
|
290
290
|
requirements:
|
291
291
|
- - '='
|
292
292
|
- !ruby/object:Gem::Version
|
293
|
-
version: 1.0.
|
293
|
+
version: 1.0.1
|
294
294
|
type: :runtime
|
295
295
|
prerelease: false
|
296
296
|
version_requirements: !ruby/object:Gem::Requirement
|
@@ -298,7 +298,7 @@ dependencies:
|
|
298
298
|
requirements:
|
299
299
|
- - '='
|
300
300
|
- !ruby/object:Gem::Version
|
301
|
-
version: 1.0.
|
301
|
+
version: 1.0.1
|
302
302
|
description: Monitor and Index a directory and back it up to backup server.
|
303
303
|
email: bbfsdev@gmail.com
|
304
304
|
executables:
|