content_server 1.2.1 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/bin/backup_server CHANGED
@@ -20,8 +20,6 @@ ContentServer.init_globals
20
20
 
21
21
  Thread.abort_on_exception = true # TODO (genadyp) should be treated globally? by param for example.
22
22
 
23
- retries = 0 # number of retries to run a server
24
-
25
23
  begin
26
24
  RunInBackground.run { ContentServer.run_backup_server }
27
25
  rescue SystemExit, SignalException => exception
@@ -31,11 +29,6 @@ rescue SystemExit, SignalException => exception
31
29
  exit
32
30
  rescue Exception => exception
33
31
  ContentServer.handle_program_termination(exception)
34
- if retries > 0
35
- Log.debug1("Restarting (retries:#{retries}).")
36
- else
37
- Log.debug1("Exiting...")
38
- Log.flush
39
- end
40
- retries -= 1
41
- end while retries >= 0
32
+ Log.flush
33
+ exit
34
+ end
data/bin/content_server CHANGED
@@ -17,8 +17,6 @@ ContentServer.init_globals
17
17
 
18
18
  Thread.abort_on_exception = true # TODO (genadyp) should be treated globally? by param for example.
19
19
 
20
- retries = 0 # number of retries to run a server
21
-
22
20
  begin
23
21
  RunInBackground.run { ContentServer.run_content_server }
24
22
  rescue SystemExit, SignalException => exception
@@ -28,11 +26,6 @@ rescue SystemExit, SignalException => exception
28
26
  exit
29
27
  rescue Exception => exception
30
28
  ContentServer.handle_program_termination(exception)
31
- if retries > 0
32
- Log.debug1("Restarting (retries:#{retries}).")
33
- else
34
- Log.debug1("Exiting...")
35
- Log.flush
36
- end
37
- retries -= 1
38
- end while retries >= 0
29
+ Log.flush
30
+ exit
31
+ end
data/bin/testing_memory CHANGED
@@ -24,8 +24,6 @@ ContentServer.init_globals
24
24
 
25
25
  Thread.abort_on_exception = true
26
26
 
27
- retries = 0 # number of retries to run a server
28
-
29
27
  begin
30
28
  case Params['server_to_test']
31
29
  when 'content'
@@ -35,7 +33,6 @@ begin
35
33
  else
36
34
  raise ArgumentError.new "Incorrect server_to_test parameter value: #{Params['server_to_test']}"
37
35
  end
38
- retries -=1
39
36
 
40
37
  rescue SystemExit, SignalException => exc
41
38
  # TODO (genadyp) do we need to trap signals by types?
@@ -50,11 +47,6 @@ rescue Exception => exc
50
47
  "#{exc.backtrace.join("\n")}")
51
48
  Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n" +
52
49
  "#{exc.backtrace.join("\n")}")
53
- if retries > 0
54
- Log.debug1("Restarting (retries:#{retries}).")
55
- else
56
- Log.debug1("Exiting...")
57
- Log.flush
58
- end
59
- retries -= 1
60
- end while retries >= 0
50
+ Log.flush
51
+ exit
52
+ end
data/bin/testing_server CHANGED
@@ -22,8 +22,6 @@ ContentServer.init_globals
22
22
 
23
23
  Thread.abort_on_exception = true
24
24
 
25
- retries = 0 # number of retries to run a server
26
-
27
25
  begin
28
26
  case Params['server_to_test']
29
27
  when 'content'
@@ -47,11 +45,6 @@ rescue Exception => exc
47
45
  "#{exc.backtrace.join("\n")}")
48
46
  Log.error("Exception happened in #{Params['service_name']} server: #{exc.class}:#{exc.message}\nBacktrace:\n" +
49
47
  "#{exc.backtrace.join("\n")}")
50
- if retries > 0
51
- Log.debug1("Restarting (retries:#{retries}).")
52
- else
53
- Log.debug1("Exiting...")
54
- Log.flush
55
- end
56
- retries -= 1
57
- end while retries >= 0
48
+ Log.flush
49
+ exit
50
+ end
@@ -144,7 +144,7 @@ module ContentData
144
144
  modification_time]
145
145
  else
146
146
  if size != content_info[0]
147
- Log.warning 'File size different from content size while same checksum'
147
+ Log.warning('File size different from content size while same checksum')
148
148
  Log.warning("instance location:server:'#{location[0]}' path:'#{location[1]}'")
149
149
  Log.warning("instance mod time:'#{modification_time}'")
150
150
  end
@@ -168,13 +168,6 @@ module ContentData
168
168
  @instances_info.has_key?([server, path])
169
169
  end
170
170
 
171
- def stats_by_location(location)
172
- checksum = @instances_info[location]
173
- content_info = @contents_info[checksum]
174
- return nil if content_info.nil?
175
- return [content_info[0], content_info[1][location]]
176
- end
177
-
178
171
  # removes an instance record both in @instances_info and @instances_info.
179
172
  # input params: server & path - are the instance unique key (called location)
180
173
  # removes also the content, if content becomes empty after removing the instance
@@ -260,11 +253,11 @@ module ContentData
260
253
  file = File.open(filename, 'w')
261
254
  file.write("#{@contents_info.length}\n")
262
255
  each_content { |checksum, size, content_mod_time|
263
- file.write("#{checksum},#{size},#{content_mod_time}\n")
256
+ file.write("#{checksum},#{size},#{Time.at(content_mod_time)}\n")
264
257
  }
265
258
  file.write("#{@instances_info.length}\n")
266
259
  each_instance { |checksum, size, _, instance_mod_time, server, path|
267
- file.write("#{checksum},#{size},#{server},#{path},#{instance_mod_time}\n")
260
+ file.write("#{checksum},#{size},#{server},#{path},#{Time.at(instance_mod_time)}\n")
268
261
  }
269
262
  file.close
270
263
  end
@@ -278,7 +271,7 @@ module ContentData
278
271
  i += 1
279
272
  number_of_instances.times {
280
273
  if lines[i].nil?
281
- Log.warning "line ##{i} is nil !!!, Backing filename: #{filename} to #{filename}.bad"
274
+ Log.warning("line ##{i} is nil !!!, Backing filename: #{filename} to #{filename}.bad")
282
275
  FileUtils.cp(filename, "#{filename}.bad")
283
276
  Log.warning("Lines:\n#{lines[i].join("\n")}")
284
277
  else
@@ -297,7 +290,7 @@ module ContentData
297
290
  parameters[1].to_i,
298
291
  parameters[2],
299
292
  parameters[3],
300
- parameters[4].to_i)
293
+ DateTime.parse(parameters[4]).to_time.to_i)
301
294
  end
302
295
  i += 1
303
296
  }
@@ -397,19 +390,19 @@ module ContentData
397
390
  if File.size(path) != size
398
391
  is_valid = false
399
392
  err_msg = "#{path} size #{File.size(path)} differs from indexed size #{size}"
400
- Log.warning err_msg
393
+ Log.warning(err_msg)
401
394
  end
402
395
  #if ContentData.format_time(File.mtime(path)) != instance.modification_time
403
396
  if File.mtime(path).to_i != instance_mtime
404
397
  is_valid = false
405
398
  err_msg = "#{path} modification time #{File.mtime(path).to_i} differs from " \
406
399
  + "indexed #{instance_mtime}"
407
- Log.warning err_msg
400
+ Log.warning(err_msg)
408
401
  end
409
402
  else
410
403
  is_valid = false
411
404
  err_msg = "Indexed file #{path} doesn't exist"
412
- Log.warning err_msg
405
+ Log.warning(err_msg)
413
406
  end
414
407
  is_valid
415
408
  end
@@ -430,7 +423,7 @@ module ContentData
430
423
  true
431
424
  else
432
425
  err_msg = "#{path} checksum #{current_checksum} differs from indexed #{instance_checksum}"
433
- Log.warning err_msg
426
+ Log.warning(err_msg)
434
427
  false
435
428
  end
436
429
  else
@@ -39,19 +39,19 @@ module ContentServer
39
39
  $tmp_content_data_file = File.join(Params['tmp_path'], 'backup.data')
40
40
 
41
41
  if Params['enable_monitoring']
42
- Log.info("Initializing monitoring of process params on port:#{Params['process_monitoring_web_port']}")
42
+ Log.info("Initializing monitoring of process params on port:%s", Params['process_monitoring_web_port'])
43
43
  $process_vars.set('server_name', 'backup_server')
44
44
  end
45
45
 
46
46
  # # # # # # # # # # # #
47
47
  # Initialize/start monitoring and destination folder
48
48
  Params['backup_destination_folder'][0]['path']=File.expand_path(Params['backup_destination_folder'][0]['path'])
49
- Log.info("backup_destination_folder is:#{Params['backup_destination_folder'][0]['path']}")
49
+ Log.info("backup_destination_folder is:%s", Params['backup_destination_folder'][0]['path'])
50
50
  #adding destination folder to monitoring paths
51
51
  Params['monitoring_paths'] << Params['backup_destination_folder'][0]
52
52
  Log.info('Start monitoring following directories:')
53
53
  Params['monitoring_paths'].each { |path|
54
- Log.info(" Path:'#{path['path']}'")
54
+ Log.info(" Path:'%s'", path['path'])
55
55
  }
56
56
 
57
57
  # initial global local content data object
@@ -60,12 +60,14 @@ module ContentServer
60
60
 
61
61
  # Read here for initial content data that exist from previous system run
62
62
  content_data_path = Params['local_content_data_path']
63
+ last_content_data_id = nil
63
64
  if File.exists?(content_data_path) and !File.directory?(content_data_path)
64
- Log.info("reading initial content data that exist from previous system run from file:#{content_data_path}")
65
+ Log.info("reading initial content data that exist from previous system run from file:%s", content_data_path)
65
66
  $local_content_data.from_file(content_data_path)
67
+ last_content_data_id = $local_content_data.unique_id
66
68
  else
67
69
  if File.directory?(content_data_path)
68
- raise("Param:'local_content_data_path':'#{Params['local_content_data_path']}'cannot be a directory name")
70
+ raise("Param:'local_content_data_path':'%s' cannot be a directory name", Params['local_content_data_path'])
69
71
  end
70
72
  # create directory if needed
71
73
  dir = File.dirname(Params['local_content_data_path'])
@@ -93,7 +95,6 @@ module ContentServer
93
95
  Log.debug1('Init thread: flush local content data to file')
94
96
  all_threads << Thread.new do
95
97
  FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path'])
96
- last_content_data_id = nil
97
98
  loop{
98
99
  sleep(Params['data_flush_delay'])
99
100
  Log.info('Start flush local content data to file.')
@@ -135,9 +136,9 @@ module ContentServer
135
136
  $remote_content_data_lock.synchronize{
136
137
  diff = ContentData.remove($local_content_data, $remote_content_data)
137
138
  unless diff.nil? || diff.empty?
138
- Log.debug2("Backup content:\n#{$local_content_data}")
139
- Log.debug2("Remote content:\n#{$remote_content_data}")
140
- Log.debug2("Missing contents:\n#{diff}")
139
+ Log.debug2("Backup content:\n%s", $local_content_data)
140
+ Log.debug2("Remote content:\n%s", $remote_content_data)
141
+ Log.debug2("Missing contents:\n%s", diff)
141
142
  Log.info('Start sync check. Backup and remote contents need a sync, requesting copy files:')
142
143
  file_copy_client.request_copy(diff)
143
144
  else
@@ -14,15 +14,15 @@ module ContentServer
14
14
  Socket.tcp_server_loop(@port) do |sock, client_addrinfo|
15
15
  while size_of_data = sock.read(4)
16
16
  size_of_data = size_of_data.unpack("l")[0]
17
- Log.debug2("Size of data: #{size_of_data}")
17
+ Log.debug2("Size of data: %s", size_of_data)
18
18
  data = sock.read(size_of_data)
19
19
  unmarshaled_data = Marshal.load(data)
20
20
  @queue.push unmarshaled_data
21
- Log.debug2("Socket closed? #{sock.closed?}.")
21
+ Log.debug2("Socket closed? %s.", sock.closed?)
22
22
  break if sock.closed?
23
- Log.debug2 'Waiting on sock.read'
23
+ Log.debug2('Waiting on sock.read')
24
24
  end
25
- Log.debug2 'Exited, socket closed or read returned nil.'
25
+ Log.debug2('Exited, socket closed or read returned nil.')
26
26
  end
27
27
  end
28
28
  end
@@ -36,14 +36,14 @@ module ContentServer
36
36
  end
37
37
 
38
38
  def open_socket
39
- Log.debug1("Connecting to content server #{@host}:#{@port}.")
39
+ Log.debug1("Connecting to content server %s:%s.", @host, @port)
40
40
  @tcp_socket = TCPSocket.new(@host, @port)
41
41
  end
42
42
 
43
43
  def send_content_data content_data
44
44
  open_socket if @tcp_socket.closed?
45
45
  marshal_data = Marshal.dump(content_data)
46
- Log.debug2("Marshaled size: #{marshal_data.length}.")
46
+ Log.debug2("Marshaled size: %s.", marshal_data.length)
47
47
  data_size = [marshal_data.length].pack("l")
48
48
  if data_size.nil? || marshal_data.nil?
49
49
  Log.debug2('Send data is nil!!!!!!!!')
@@ -32,7 +32,7 @@ module ContentServer
32
32
  $tmp_content_data_file = File.join(Params['tmp_path'], 'content.data')
33
33
 
34
34
  if Params['enable_monitoring']
35
- Log.info("Initializing monitoring of process params on port:#{Params['process_monitoring_web_port']}")
35
+ Log.info("Initializing monitoring of process params on port:%s", Params['process_monitoring_web_port'])
36
36
  $process_vars.set('server_name', 'content_server')
37
37
  end
38
38
 
@@ -40,9 +40,8 @@ module ContentServer
40
40
  # Initialize/Start monitoring
41
41
  Log.info('Start monitoring following directories:')
42
42
  Params['monitoring_paths'].each {|path|
43
- Log.info(" Path:'#{path['path']}'")
43
+ Log.info(" Path:'%s'", path['path'])
44
44
  }
45
- monitoring_events = Queue.new
46
45
 
47
46
  # initial global local content data object
48
47
  $local_content_data_lock = Mutex.new
@@ -50,19 +49,21 @@ module ContentServer
50
49
 
51
50
  # Read here for initial content data that exist from previous system run
52
51
  content_data_path = Params['local_content_data_path']
52
+ last_content_data_id = nil
53
53
  if File.exists?(content_data_path) and !File.directory?(content_data_path)
54
- Log.info("reading initial content data that exist from previous system run from file:#{content_data_path}")
54
+ Log.info("reading initial content data that exist from previous system run from file:%s", content_data_path)
55
55
  $local_content_data.from_file(content_data_path)
56
+ last_content_data_id = $local_content_data.unique_id
56
57
  else
57
58
  if File.directory?(content_data_path)
58
- raise("Param:'local_content_data_path':'#{Params['local_content_data_path']}'cannot be a directory name")
59
+ raise("Param:'local_content_data_path':'%s'cannot be a directory name", Params['local_content_data_path'])
59
60
  end
60
61
  # create directory if needed
61
62
  dir = File.dirname(Params['local_content_data_path'])
62
63
  FileUtils.mkdir_p(dir) unless File.exists?(dir)
63
64
  end
64
65
 
65
- Log.info("Init monitoring")
66
+ Log.info('Init monitoring')
66
67
  monitoring_events = Queue.new
67
68
  fm = FileMonitoring::FileMonitoring.new()
68
69
  fm.set_event_queue(monitoring_events)
@@ -83,11 +84,10 @@ module ContentServer
83
84
  Log.debug1('Init thread: flush local content data to file')
84
85
  all_threads << Thread.new do
85
86
  FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path'])
86
- last_content_data_id = nil
87
87
  loop{
88
88
  sleep(Params['data_flush_delay'])
89
89
  Log.info('Start flush local content data to file.')
90
- $testing_memory_log.info('Start flush content data to file') if $testing_memory_log
90
+ $testing_memory_log.info('Start flush content data to file') if $testing_memory_active
91
91
  written_to_file = false
92
92
  $local_content_data_lock.synchronize{
93
93
  local_content_data_unique_id = $local_content_data.unique_id
@@ -96,11 +96,11 @@ module ContentServer
96
96
  $local_content_data.to_file($tmp_content_data_file)
97
97
  written_to_file = true
98
98
  else
99
- Log.debug1('no need to flush. content data has not changed')
99
+ Log.info('no need to flush. content data has not changed')
100
100
  end
101
101
  }
102
102
  File.rename($tmp_content_data_file, Params['local_content_data_path']) if written_to_file
103
- $testing_memory_log.info("End flush content data to file") if $testing_memory_log
103
+ $testing_memory_log.info("End flush content data to file") if $testing_memory_active
104
104
  }
105
105
  end
106
106
 
@@ -24,12 +24,12 @@ module ContentServer
24
24
 
25
25
  def self.close_delete_stream(checksum, streams_hash)
26
26
  if streams_hash.key?(checksum)
27
- Log.debug1("close_delete_stream #{streams_hash[checksum].file}")
27
+ Log.debug1("close_delete_stream %s", streams_hash[checksum].path)
28
28
  begin
29
29
  streams_hash[checksum].file.close()
30
30
  rescue IOError => e
31
31
  Log.warning("While closing stream, could not close file #{streams_hash[checksum].path}." \
32
- " #{e.to_s}")
32
+ " IOError msg:#{e.to_s}")
33
33
  end
34
34
  streams_hash.delete(checksum)
35
35
  $process_vars.set('Streams size', streams_hash.size)
@@ -105,7 +105,7 @@ module ContentServer
105
105
  checksum = content
106
106
  if @streams.key?(checksum)
107
107
  offset = @streams[checksum].file.pos
108
- Log.debug1("Sending chunk for #{checksum}, offset #{offset}.")
108
+ Log.debug1("Sending chunk for %s, offset %s.",checksum, offset)
109
109
  chunk = @streams[checksum].file.read(Params['streaming_chunk_size'])
110
110
  if chunk.nil?
111
111
  # No more to read, send end of file.
@@ -116,7 +116,7 @@ module ContentServer
116
116
  @send_chunk_clb.call(checksum, offset, @streams[checksum].size, chunk, chunk_checksum)
117
117
  end
118
118
  else
119
- Log.debug1("No checksum found to copy chunk. #{checksum}.")
119
+ Log.debug1("No stream found for copy chunk checksum %s.", checksum)
120
120
  end
121
121
  end
122
122
 
@@ -129,7 +129,7 @@ module ContentServer
129
129
  if offset > 0
130
130
  file.seek(offset)
131
131
  end
132
- Log.debug1("File streamer: #{file.to_s}.")
132
+ Log.debug1("File streamer: %s.", file)
133
133
  @streams[checksum] = Stream.new(checksum, path, file, file.size)
134
134
  $process_vars.set('Streams size', @streams.size)
135
135
  rescue IOError, Errno::ENOENT => e
@@ -223,13 +223,15 @@ module ContentServer
223
223
  def handle_new_chunk(file_checksum, offset, content)
224
224
  if offset == @streams[file_checksum].file.pos
225
225
  FileReceiver.write_string_to_file(content, @streams[file_checksum].file)
226
- Log.debug1("Written already #{@streams[file_checksum].file.pos} bytes, " \
227
- "out of #{@streams[file_checksum].size} " \
228
- "(#{100.0*@streams[file_checksum].file.size/@streams[file_checksum].size}%)")
226
+ if Params['log_debug_level'] >= 1 # added the condition here to avoid calculations
227
+ Log.debug1("Written already %s bytes, out of %s (%s%%)",
228
+ @streams[file_checksum].file.pos, @streams[file_checksum].size,
229
+ 100.0*@streams[file_checksum].file.size/@streams[file_checksum].size)
230
+ end
229
231
  return true
230
232
  else
231
233
  # Offset is wrong, send reset/resume copy from correct offset.
232
- Log.warning("Received chunk with incorrect offset #{offset}, should " \
234
+ Log.warning("Received chunk with incorrect offset #{offset}, should " + \
233
235
  "be #{@streams[file_checksum].file.pos}, file_checksum:#{file_checksum}")
234
236
  @reset_copy.call(file_checksum, @streams[file_checksum].file.pos) unless @reset_copy.nil?
235
237
  return false
@@ -246,7 +248,7 @@ module ContentServer
246
248
  # Make the directory if does not exists.
247
249
  path = FileReceiver.destination_filename(Params['backup_destination_folder'][0]['path'],
248
250
  file_checksum)
249
- Log.debug1("Moving tmp file #{@streams[file_checksum].path} to #{path}")
251
+ Log.debug1("Moving tmp file %s to %s", @streams[file_checksum].path, path)
250
252
  file_dir = File.dirname(path)
251
253
  FileUtils.makedirs(file_dir) unless File.directory?(file_dir)
252
254
  # Move tmp file to permanent location.
@@ -259,7 +261,7 @@ module ContentServer
259
261
  Log.debug1(message)
260
262
  begin
261
263
  File.rename(tmp_file_path, path)
262
- Log.debug1("End move tmp file to permanent location #{path}.")
264
+ Log.debug1("End move tmp file to permanent location %s.", path)
263
265
  @file_done_clb.call(local_file_checksum, path) unless @file_done_clb.nil?
264
266
  rescue Exception => e
265
267
  Log.warning("Could not move tmp file to permanent path #{path}." +
@@ -268,7 +270,7 @@ module ContentServer
268
270
  else
269
271
  begin
270
272
  Log.error(message)
271
- Log.debug1("Deleting tmp file: #{tmp_file_path}")
273
+ Log.debug1("Deleting tmp file: %s", tmp_file_path)
272
274
  File.delete(tmp_file_path)
273
275
  rescue Exception => e
274
276
  Log.warning("Could not delete tmp file from tmp path #{tmp_file_path}." +
@@ -282,7 +284,7 @@ module ContentServer
282
284
 
283
285
  def self.write_string_to_file(str, file)
284
286
  bytes_to_write = str.bytesize
285
- Log.debug1("writing to file: #{file.to_s}, #{bytes_to_write} bytes.")
287
+ Log.debug1("writing to file: %s, %s bytes.", file, bytes_to_write)
286
288
  while bytes_to_write > 0
287
289
  bytes_to_write -= file.write(str)
288
290
  end
@@ -36,7 +36,7 @@ module ContentServer
36
36
  # Add content to copy process. If already in copy process or waiting for copy then skip.
37
37
  # If no open places for copy then put in waiting list
38
38
  def add_content(checksum, path)
39
- Log.debug2("Try to add content:#{checksum} to copy waiting list")
39
+ Log.debug2("Try to add content:%s to copy waiting list", checksum)
40
40
  @keeper.synchronize{
41
41
  # if content is being copied or waiting then skip it
42
42
  if !@contents_under_copy[checksum]
@@ -48,16 +48,16 @@ module ContentServer
48
48
  $process_vars.set('Copy File Queue Size', @copy_input_queue.size)
49
49
  else
50
50
  # no place in copy streams. Add to waiting list
51
- Log.debug2("add content:#{checksum} to copy waiting list")
51
+ Log.debug2("add content:%s to copy waiting list", checksum)
52
52
  @contents_to_copy[checksum] = true # replace with a set
53
53
  @contents_to_copy_queue.push([checksum, path])
54
54
  $process_vars.set('contents to copy queue', @contents_to_copy_queue.size)
55
55
  end
56
56
  else
57
- Log.debug2("content:#{checksum} already in waiting list. skipping.")
57
+ Log.debug2("content:%s already in waiting list. skipping.", checksum)
58
58
  end
59
59
  else
60
- Log.debug2("content:#{checksum} is being copied. skipping.")
60
+ Log.debug2("content:%s is being copied. skipping.", checksum)
61
61
  end
62
62
  }
63
63
  end
@@ -68,22 +68,22 @@ module ContentServer
68
68
  if content_record
69
69
  if !content_record[1]
70
70
  path = content_record[0]
71
- Log.debug1("Streaming to backup server. content: #{checksum} path:#{path}.")
71
+ Log.debug1("Streaming to backup server. content: %s path:%s.", checksum, path)
72
72
  @file_streamer.start_streaming(checksum, path)
73
73
  # updating Ack
74
74
  content_record[1] = true
75
75
  else
76
- Log.warning("File already received ack: #{checksum}")
76
+ Log.warning("File already received ack: %s", checksum)
77
77
  end
78
78
  else
79
- Log.warning("File was aborted or copied: #{checksum}")
79
+ Log.warning("File was aborted or copied: %s", checksum)
80
80
  end
81
81
  }
82
82
  end
83
83
 
84
84
  def remove_content(checksum)
85
85
  @keeper.synchronize{
86
- Log.debug3("removing checksum:#{checksum} from contents under copy")
86
+ Log.debug3("removing checksum:%s from contents under copy", checksum)
87
87
  @contents_under_copy.delete(checksum)
88
88
  $process_vars.set('contents under copy', @contents_under_copy.size)
89
89
  #1 place is became available. Put another file in copy process if waiting
@@ -146,7 +146,7 @@ module ContentServer
146
146
  # resend the ack request.
147
147
  @copy_prepare = {}
148
148
  @file_streamer = FileStreamer.new(method(:send_chunk))
149
- Log.debug3("initialize FileCopyServer on port:#{port}")
149
+ Log.debug3("initialize FileCopyServer on port:%s", port)
150
150
  @file_copy_manager = FileCopyManager.new(@copy_input_queue, @file_streamer)
151
151
  end
152
152
 
@@ -157,7 +157,7 @@ module ContentServer
157
157
 
158
158
  def receive_message(addr_info, message)
159
159
  # Add ack message to copy queue.
160
- Log.debug2("Content server Copy message received: #{message}")
160
+ Log.debug2("Content server Copy message received: %s", message)
161
161
  @copy_input_queue.push(message)
162
162
  $process_vars.set('Copy File Queue Size', @copy_input_queue.size)
163
163
  end
@@ -170,10 +170,10 @@ module ContentServer
170
170
  Log.debug1 'Waiting on copy files events.'
171
171
  (message_type, message_content) = @copy_input_queue.pop
172
172
  $process_vars.set('Copy File Queue Size', @copy_input_queue.size)
173
- Log.debug1("Content copy message:#{[message_type, message_content]}")
173
+ Log.debug1("Content copy message:Type:%s content:%s", message_type, message_content)
174
174
 
175
175
  if message_type == :SEND_ACK_MESSAGE
176
- Log.debug1("Sending ack for: #{message_content}")
176
+ Log.debug1("Sending ack for: %s", message_content)
177
177
  @backup_tcp.send_obj([:ACK_MESSAGE, [message_content, Time.now.to_i]])
178
178
  elsif message_type == :COPY_MESSAGE
179
179
  message_content.each_instance { |checksum, size, content_mod_time, instance_mod_time, server, path|
@@ -184,7 +184,7 @@ module ContentServer
184
184
  # The timestamp is of local content server! not backup server!
185
185
  timestamp, ack, checksum = message_content
186
186
  Log.debug1("Ack (#{ack}) received for content: #{checksum}, timestamp: #{timestamp} " \
187
- "now: #{Time.now.to_i}")
187
+ "now: #{Time.now.to_i}") if Params['log_debug_level'] >= 1 # adding to avoid Time.now
188
188
 
189
189
  # Copy file if ack (does not exists on backup and not too much time passed)
190
190
  if ack
@@ -192,7 +192,7 @@ module ContentServer
192
192
  @file_copy_manager.receive_ack(checksum)
193
193
  else
194
194
  Log.debug1("Ack timed out span: #{Time.now.to_i - timestamp} > " \
195
- "timeout: #{Params['ack_timeout']}")
195
+ "timeout: #{Params['ack_timeout']}") if Params['log_debug_level'] >= 1 # adding to avoid Time.now
196
196
  # remove only content under copy
197
197
  @file_copy_manager.remove_content(checksum)
198
198
  end
@@ -207,8 +207,9 @@ module ContentServer
207
207
  elsif message_type == :COPY_CHUNK
208
208
  # We open the message here for printing info and deleting copy_prepare!
209
209
  file_checksum, offset, file_size, content, content_checksum = message_content
210
- Log.debug1("Send chunk for file #{file_checksum}, offset: #{offset} " \
211
- "filesize: #{file_size}, checksum:#{content_checksum}")
210
+ Log.debug1("Send chunk for file %s, offset: %s " \
211
+ "filesize: %s, checksum: %s",
212
+ file_checksum, offset, file_size, content_checksum)
212
213
  # Blocking send.
213
214
  @backup_tcp.send_obj([:COPY_CHUNK, message_content])
214
215
  if content.nil? and content_checksum.nil?
@@ -216,13 +217,14 @@ module ContentServer
216
217
  @file_copy_manager.remove_content(file_checksum)
217
218
  end
218
219
  elsif message_type == :ABORT_COPY
219
- Log.debug1("Aborting file copy: #{message_content}")
220
+ Log.debug1("Aborting file copy: %s", message_content)
220
221
  @file_streamer.abort_streaming(message_content)
221
222
  # remove only content under copy
222
223
  @file_copy_manager.remove_content(message_content)
223
224
  elsif message_type == :RESET_RESUME_COPY
224
225
  (file_checksum, new_offset) = message_content
225
- Log.debug1("Resetting/Resuming file (#{file_checksum}) copy to #{new_offset}")
226
+ Log.debug1("Resetting/Resuming file (%s) copy to %s",
227
+ file_checksum, new_offset)
226
228
  @file_streamer.reset_streaming(file_checksum, new_offset)
227
229
  else
228
230
  Log.error("Copy event not supported: #{message_type}")
@@ -248,7 +250,7 @@ module ContentServer
248
250
  end
249
251
  end
250
252
  @local_thread.abort_on_exception = true
251
- Log.debug3("initialize FileCopyClient host:#{host} port:#{port}")
253
+ Log.debug3("initialize FileCopyClient host:%s port:%s", host, port)
252
254
  end
253
255
 
254
256
  def threads
@@ -271,7 +273,7 @@ module ContentServer
271
273
 
272
274
  def done_copy(local_file_checksum, local_path)
273
275
  $process_vars.inc('num_files_received')
274
- Log.debug1("Done copy file: #{local_path}, #{local_file_checksum}")
276
+ Log.debug1("Done copy file: path %s, checksum %s", local_path, local_file_checksum)
275
277
  end
276
278
 
277
279
  def handle_message(message)
@@ -284,10 +286,10 @@ module ContentServer
284
286
  # of ack. Note that it is being executed from the class thread only!
285
287
  def handle(message)
286
288
  message_type, message_content = message
287
- Log.debug1("backup copy message: Type #{message_type}. message: #{message_content}")
289
+ Log.debug1("backup copy message: Type %s. message: %s", message_type, message_content)
288
290
  if message_type == :SEND_COPY_MESSAGE
289
291
  bytes_written = @tcp_client.send_obj([:COPY_MESSAGE, message_content])
290
- Log.debug2("Sending copy message succeeded? bytes_written: #{bytes_written}.")
292
+ Log.debug2("Sending copy message succeeded? bytes_written: %s.", bytes_written)
291
293
  elsif message_type == :COPY_CHUNK
292
294
  if @file_receiver.receive_chunk(*message_content)
293
295
  file_checksum, offset, file_size, content, content_checksum = message_content
@@ -301,7 +303,7 @@ module ContentServer
301
303
  # check if checksum exists in final destination
302
304
  dest_path = FileReceiver.destination_filename(Params['backup_destination_folder'][0]['path'], checksum)
303
305
  need_to_copy = !File.exists?(dest_path)
304
- Log.debug1("Returning ack for content:'#{checksum}' timestamp:'#{timestamp}' Ack:'#{need_to_copy}'")
306
+ Log.debug1("Returning ack for content:'%s' timestamp:'%s' Ack:'%s'", checksum, timestamp, need_to_copy)
305
307
  @tcp_client.send_obj([:ACK_MESSAGE, [timestamp,
306
308
  need_to_copy,
307
309
  checksum]])