content_server 1.3.1 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. data/lib/content_data/content_data.rb +194 -56
  2. data/lib/content_data/version.rb +1 -1
  3. data/lib/content_server/backup_server.rb +4 -27
  4. data/lib/content_server/content_server.rb +3 -27
  5. data/lib/content_server/file_streamer.rb +2 -0
  6. data/lib/content_server/remote_content.rb +1 -2
  7. data/lib/content_server/server.rb +23 -3
  8. data/lib/content_server/version.rb +1 -1
  9. data/lib/email/version.rb +1 -1
  10. data/lib/file_copy/version.rb +1 -1
  11. data/lib/file_indexing/index_agent.rb +1 -1
  12. data/lib/file_indexing/version.rb +1 -1
  13. data/lib/file_monitoring/file_monitoring.rb +45 -32
  14. data/lib/file_monitoring/monitor_path.rb +219 -181
  15. data/lib/file_monitoring/version.rb +1 -1
  16. data/lib/file_utils/file_generator/file_generator.rb +1 -1
  17. data/lib/file_utils/file_utils.rb +2 -2
  18. data/lib/file_utils/version.rb +1 -1
  19. data/lib/log/version.rb +1 -1
  20. data/lib/networking/version.rb +1 -1
  21. data/lib/params/version.rb +1 -1
  22. data/lib/process_monitoring/version.rb +1 -1
  23. data/lib/run_in_background/version.rb +1 -1
  24. data/lib/testing_memory/testing_memory.rb +1 -1
  25. data/lib/testing_server/testing_server.rb +1 -1
  26. data/lib/testing_server/version.rb +1 -1
  27. data/spec/content_data/validations_spec.rb +2 -2
  28. data/spec/content_server/file_streamer_spec.rb +5 -0
  29. data/spec/networking/tcp_spec.rb +1 -3
  30. data/spec/validations/index_validations_spec.rb +2 -2
  31. data/test/content_data/content_data_test.rb +8 -7
  32. data/test/file_generator/file_generator_spec.rb +3 -2
  33. data/test/file_monitoring/monitor_path_test.rb +38 -4
  34. data/test/file_utils/fileutil_mksymlink_test.rb +9 -0
  35. data/test/file_utils/time_modification_test.rb +6 -2
  36. data/test/run_in_background/test_app +17 -15
  37. metadata +2 -93
  38. data/lib/content_server/queue_indexer.rb +0 -86
  39. data/test/file_indexing/index_agent_test.rb +0 -51
  40. data/test/file_indexing/index_agent_test/New.txt +0 -0
  41. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/bin/libexslt.dll +0 -0
  42. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/bin/libxslt.dll +0 -0
  43. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/bin/xsltproc.exe +0 -0
  44. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libexslt/exslt.h +0 -102
  45. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libexslt/exsltconfig.h +0 -73
  46. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libexslt/exsltexports.h +0 -140
  47. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libexslt/libexslt.h +0 -29
  48. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/attributes.h +0 -38
  49. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/documents.h +0 -93
  50. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/extensions.h +0 -262
  51. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/extra.h +0 -80
  52. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/functions.h +0 -78
  53. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/imports.h +0 -75
  54. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/keys.h +0 -53
  55. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/libxslt.h +0 -30
  56. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/namespaces.h +0 -68
  57. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/numbersInternals.h +0 -69
  58. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/pattern.h +0 -81
  59. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/preproc.h +0 -43
  60. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/security.h +0 -104
  61. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/templates.h +0 -77
  62. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/transform.h +0 -207
  63. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/trio.h +0 -216
  64. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/triodef.h +0 -220
  65. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/variables.h +0 -91
  66. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/win32config.h +0 -101
  67. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/xslt.h +0 -103
  68. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/xsltInternals.h +0 -1967
  69. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/xsltconfig.h +0 -172
  70. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/xsltexports.h +0 -142
  71. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/xsltlocale.h +0 -57
  72. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/xsltutils.h +0 -309
  73. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/include/libxslt/xsltwin32config.h +0 -105
  74. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/lib/libexslt.lib +0 -0
  75. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/lib/libexslt_a.lib +0 -0
  76. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/lib/libxslt.lib +0 -0
  77. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/lib/libxslt_a.lib +0 -0
  78. data/test/file_indexing/index_agent_test/libxslt-1.1.26.win32/readme.txt +0 -22
  79. data/test/file_indexing/index_agent_test/patterns.input +0 -3
  80. data/test/file_monitoring/file_monitoring_test.rb +0 -0
  81. data/test/file_monitoring/file_monitoring_test/conf.yml +0 -4
  82. data/test/file_monitoring/file_monitoring_test/conf_win32.yml +0 -5
  83. data/test/file_monitoring/file_monitoring_test/log +0 -56
@@ -29,6 +29,8 @@ module ContentData
29
29
  # instances which was added to @contents_info
30
30
  class ContentData
31
31
 
32
+ CHUNK_SIZE = 5000
33
+
32
34
  def initialize(other = nil)
33
35
  if other.nil?
34
36
  @contents_info = {} # Checksum --> [size, paths-->time(instance), time(content)]
@@ -46,34 +48,44 @@ module ContentData
46
48
  end
47
49
 
48
50
  def clone_instances_info
49
- @instances_info.keys.inject({}) { |clone_instances_info, location|
51
+ clone_instances_info = {}
52
+ instances_info_enum = @instances_info.each_key
53
+ loop {
54
+ location = instances_info_enum.next rescue break
50
55
  clone_instances_info[[location[0].clone, location[1].clone]] = @instances_info[location].clone
51
- clone_instances_info
52
56
  }
57
+ clone_instances_info
53
58
  end
54
59
 
55
60
  def clone_contents_info
56
- @contents_info.keys.inject({}) { |clone_contents_info, checksum|
61
+ clone_contents_info = {}
62
+ contents_info_enum = @contents_info.each_key
63
+ loop {
64
+ checksum = contents_info_enum.next rescue break
57
65
  instances = @contents_info[checksum]
58
66
  size = instances[0]
59
67
  content_time = instances[2]
60
68
  instances_db = instances[1]
61
69
  instances_db_cloned = {}
62
- instances_db.keys.each { |location|
70
+ instances_db_enum = instances_db.each_key
71
+ loop {
72
+ location = instances_db_enum.next rescue break
63
73
  instance_mtime = instances_db[location]
64
74
  instances_db_cloned[[location[0].clone,location[1].clone]]=instance_mtime
65
75
  }
66
76
  clone_contents_info[checksum] = [size,
67
77
  instances_db_cloned,
68
78
  content_time]
69
- clone_contents_info
70
79
  }
80
+ clone_contents_info
71
81
  end
72
82
 
73
83
  # iterator over @contents_info data structure (not including instances)
74
84
  # block is provided with: checksum, size and content modification time
75
85
  def each_content(&block)
76
- @contents_info.keys.each { |checksum|
86
+ contents_enum = @contents_info.each_key
87
+ loop {
88
+ checksum = contents_enum.next rescue break
77
89
  content_val = @contents_info[checksum]
78
90
  # provide checksum, size and content modification time to the block
79
91
  block.call(checksum,content_val[0], content_val[2])
@@ -84,9 +96,13 @@ module ContentData
84
96
  # block is provided with: checksum, size, content modification time,
85
97
  # instance modification time, server and file path
86
98
  def each_instance(&block)
87
- @contents_info.keys.each { |checksum|
99
+ contents_enum = @contents_info.each_key
100
+ loop {
101
+ checksum = contents_enum.next rescue break
88
102
  content_info = @contents_info[checksum]
89
- content_info[1].keys.each {|location|
103
+ content_info_enum = content_info[1].each_key
104
+ loop {
105
+ location = content_info_enum.next rescue break
90
106
  # provide the block with: checksum, size, content modification time,instance modification time,
91
107
  # server and path.
92
108
  instance_modification_time = content_info[1][location]
@@ -101,7 +117,9 @@ module ContentData
101
117
  # instance modification time, server and file path
102
118
  def content_each_instance(checksum, &block)
103
119
  content_info = @contents_info[checksum]
104
- content_info[1].keys.each {|location|
120
+ instances_db_enum = content_info[1].each_key
121
+ loop {
122
+ location = instances_db_enum.next rescue break
105
123
  # provide the block with: checksum, size, content modification time,instance modification time,
106
124
  # server and path.
107
125
  instance_modification_time = content_info[1][location]
@@ -115,11 +133,7 @@ module ContentData
115
133
  end
116
134
 
117
135
  def instances_size()
118
- counter=0
119
- @contents_info.values.each { |content_info|
120
- counter += content_info[1].length
121
- }
122
- counter
136
+ @instances_info.length
123
137
  end
124
138
 
125
139
  def checksum_instances_size(checksum)
@@ -137,6 +151,13 @@ module ContentData
137
151
 
138
152
  def add_instance(checksum, size, server, path, modification_time)
139
153
  location = [server, path]
154
+
155
+ # file was changed but remove_instance was not called
156
+ if (@instances_info.include?(location) && @instances_info[location] != checksum)
157
+ Log.warning("#{server}:#{path} file already exists with different checksum")
158
+ remove_instance(server, path)
159
+ end
160
+
140
161
  content_info = @contents_info[checksum]
141
162
  if content_info.nil?
142
163
  @contents_info[checksum] = [size,
@@ -186,8 +207,10 @@ module ContentData
186
207
  # found records are removed from both @instances_info and @instances_info.
187
208
  # input params: server & dir_to_remove - are used to check each instance unique key (called location)
188
209
  # removes also content\s, if a content\s become\s empty after removing instance\s
189
- def remove_directory(server, dir_to_remove)
190
- @contents_info.keys.each { |checksum|
210
+ def remove_directory(dir_to_remove, server)
211
+ contents_enum = @contents_info.each_key
212
+ loop {
213
+ checksum = contents_enum.next rescue break
191
214
  instances = @contents_info[checksum][1]
192
215
  instances.each_key { |location|
193
216
  if location[0] == server and location[1].scan(dir_to_remove).size > 0
@@ -247,70 +270,181 @@ module ContentData
247
270
  return_str
248
271
  end
249
272
 
273
+ # Write content data to file.
274
+ # Write is using chunks (for both content chunks and instances chunks)
275
+ # Chunk is used to maximize GC affect. The temporary memory of each chunk is GCed.
276
+ # Without the chunks used in a dipper stack level, GC keeps the temporary objects as part of the stack context.
250
277
  def to_file(filename)
251
278
  content_data_dir = File.dirname(filename)
252
279
  FileUtils.makedirs(content_data_dir) unless File.directory?(content_data_dir)
253
- file = File.open(filename, 'w')
254
- file.write("#{@contents_info.length}\n")
255
- each_content { |checksum, size, content_mod_time|
256
- file.write("#{checksum},#{size},#{content_mod_time}\n")
257
- }
258
- file.write("#{@instances_info.length}\n")
259
- each_instance { |checksum, size, _, instance_mod_time, server, path|
260
- file.write("#{checksum},#{size},#{server},#{path},#{instance_mod_time}\n")
280
+ File.open(filename, 'w') { |file|
281
+ file.write("#{@contents_info.length}\n")
282
+ contents_enum = @contents_info.each_key
283
+ content_chunks = @contents_info.length / CHUNK_SIZE + 1
284
+ chunks_counter = 0
285
+ while chunks_counter < content_chunks
286
+ to_file_contents_chunk(file,contents_enum, CHUNK_SIZE)
287
+ GC.start
288
+ chunks_counter += 1
289
+ end
290
+ file.write("#{@instances_info.length}\n")
291
+ contents_enum = @contents_info.each_key
292
+ chunks_counter = 0
293
+ while chunks_counter < content_chunks
294
+ to_file_instances_chunk(file,contents_enum, CHUNK_SIZE)
295
+ GC.start
296
+ chunks_counter += 1
297
+ end
261
298
  }
262
- file.close
299
+ end
300
+
301
+ def to_file_contents_chunk(file, contents_enum, chunk_size)
302
+ chunk_counter = 0
303
+ while chunk_counter < chunk_size
304
+ checksum = contents_enum.next rescue return
305
+ content_info = @contents_info[checksum]
306
+ file.write("#{checksum},#{content_info[0]},#{content_info[2]}\n")
307
+ chunk_counter += 1
308
+ end
309
+ end
310
+
311
+ def to_file_instances_chunk(file, contents_enum, chunk_size)
312
+ chunk_counter = 0
313
+ while chunk_counter < chunk_size
314
+ checksum = contents_enum.next rescue return
315
+ content_info = @contents_info[checksum]
316
+ instances_db_enum = content_info[1].each_key
317
+ loop {
318
+ location = instances_db_enum.next rescue break
319
+ # provide the block with: checksum, size, content modification time,instance modification time,
320
+ # server and path.
321
+ instance_modification_time = content_info[1][location]
322
+ file.write("#{checksum},#{content_info[0]},#{location[0]},#{location[1]},#{instance_modification_time}\n")
323
+ }
324
+ chunk_counter += 1
325
+ break if chunk_counter == chunk_size
326
+ end
263
327
  end
264
328
 
265
329
  # TODO validation that file indeed contains ContentData missing
330
+ # Loading db from file using chunks for better memory performance
266
331
  def from_file(filename)
267
- lines = IO.readlines(filename)
268
- number_of_contents = lines[0].to_i
269
- i = 1 + number_of_contents
270
- number_of_instances = lines[i].to_i
271
- i += 1
272
- number_of_instances.times {
273
- if lines[i].nil?
274
- Log.warning("line ##{i} is nil !!!, Backing filename: #{filename} to #{filename}.bad")
275
- FileUtils.cp(filename, "#{filename}.bad")
276
- Log.warning("Lines:\n#{lines[i].join("\n")}")
277
- else
278
- parameters = lines[i].split(',')
279
- # bugfix: if file name consist a comma then parsing based on comma separating fails
280
- if (parameters.size > 5)
281
- (4..parameters.size-2).each do |i|
282
- parameters[3] = [parameters[3], parameters[i]].join(",")
283
- end
284
- (4..parameters.size-2).each do |i|
285
- parameters.delete_at(4)
286
- end
332
+ # read first line (number of contents)
333
+ # calculate line number (number of instances)
334
+ # read number of instances.
335
+ # loop over instances lines (using chunks) and add instances
336
+
337
+ File.open(filename, 'r') { |file|
338
+ # Get number of contents (at first line)
339
+ number_of_contents = file.gets # this gets the next line or return nil at EOF
340
+ unless (number_of_contents and number_of_contents.match(/^[\d]+$/)) # check that line is of Number format
341
+ return reset_load_from_file(filename, file, "number of contents should be a number. We got:#{number_of_contents}")
342
+ end
343
+ number_of_contents = number_of_contents.to_i
344
+ # advance file lines over all contents. We need only the instances data to build the content data object
345
+ # use chunks and GC
346
+ contents_chunks = number_of_contents / CHUNK_SIZE
347
+ contents_chunks += 1 if (contents_chunks * CHUNK_SIZE < number_of_contents)
348
+ chunk_index = 0
349
+ while chunk_index < contents_chunks
350
+ chunk_size = CHUNK_SIZE
351
+ if chunk_index + 1 == contents_chunks
352
+ # update last chunk size
353
+ chunk_size = number_of_contents - (chunk_index * CHUNK_SIZE)
287
354
  end
355
+ return unless read_contents_chunk(filename, file, chunk_size)
356
+ GC.start
357
+ chunk_index += 1
358
+ end
288
359
 
289
- add_instance(parameters[0],
290
- parameters[1].to_i,
291
- parameters[2],
292
- parameters[3],
293
- parameters[4].to_i)
360
+ # get number of instances
361
+ number_of_instances = file.gets
362
+ unless (number_of_instances and number_of_instances.match(/^[\d]+$/)) # check that line is of Number format
363
+ return reset_load_from_file(filename, file, "number of instances should be a Number. We got:#{number_of_instances}")
364
+ end
365
+ number_of_instances = number_of_instances.to_i
366
+ # read in instances chunks and GC
367
+ instances_chunks = number_of_instances / CHUNK_SIZE
368
+ instances_chunks += 1 if (instances_chunks * CHUNK_SIZE < number_of_instances)
369
+ chunk_index = 0
370
+ while chunk_index < instances_chunks
371
+ chunk_size = CHUNK_SIZE
372
+ if chunk_index + 1 == instances_chunks
373
+ # update last chunk size
374
+ chunk_size = number_of_instances - (chunk_index * CHUNK_SIZE)
375
+ end
376
+ return unless read_instances_chunk(filename, file, chunk_size)
377
+ GC.start
378
+ chunk_index += 1
294
379
  end
295
- i += 1
296
380
  }
297
381
  end
298
382
 
383
+ def read_contents_chunk(filename, file, chunk_size)
384
+ chunk_index = 0
385
+ while chunk_index < chunk_size
386
+ return reset_load_from_file(filename, file, "Expecting content line but " +
387
+ "reached end of file after line #{$.}") unless file.gets
388
+ chunk_index += 1
389
+ end
390
+ true
391
+ end
392
+
393
+ def read_instances_chunk(filename, file, chunk_size)
394
+ chunk_index = 0
395
+ while chunk_index < chunk_size
396
+ instance_line = file.gets
397
+ return reset_load_from_file(filename, file, "Expected to read Instance line but reached EOF") unless instance_line
398
+ parameters = instance_line.split(',')
399
+ # bugfix: if file name consist a comma then parsing based on comma separating fails
400
+ if (parameters.size > 5)
401
+ (4..parameters.size-2).each do |i|
402
+ parameters[3] = [parameters[3], parameters[i]].join(",")
403
+ end
404
+ (4..parameters.size-2).each do |i|
405
+ parameters.delete_at(4)
406
+ end
407
+ end
408
+
409
+ add_instance(parameters[0],
410
+ parameters[1].to_i,
411
+ parameters[2],
412
+ parameters[3],
413
+ parameters[4].to_i)
414
+ chunk_index += 1
415
+ end
416
+ true
417
+ end
418
+
419
+ def reset_load_from_file(file_name, file_io, err_msg)
420
+ Log.error("unexpected error reading file:#{file_name}\nError message:#{err_msg}")
421
+ @contents_info = {} # Checksum --> [size, paths-->time(instance), time(content)]
422
+ @instances_info = {} # location --> checksum to optimize instances query
423
+ file_io.close
424
+ nil
425
+ end
426
+
299
427
  # for each content, all time fields (content and instances) are replaced with the
300
428
  # min time found, while going through all time fields.
301
429
  def unify_time()
302
- @contents_info.keys.each { |checksum|
430
+ contents_enum = @contents_info.each_key
431
+ loop {
432
+ checksum = contents_enum.next rescue break
303
433
  content_info = @contents_info[checksum]
304
434
  min_time_per_checksum = content_info[2]
305
435
  instances = content_info[1]
306
- instances.keys.each { |location|
436
+ instances_enum = instances.each_key
437
+ loop {
438
+ location = instances_enum.next rescue break
307
439
  instance_mod_time = instances[location]
308
440
  if instance_mod_time < min_time_per_checksum
309
441
  min_time_per_checksum = instance_mod_time
310
442
  end
311
443
  }
312
444
  # update all instances with min time
313
- instances.keys.each { |location|
445
+ instances_enum = instances.each_key
446
+ loop {
447
+ location = instances_enum.next rescue break
314
448
  instances[location] = min_time_per_checksum
315
449
  }
316
450
  # update content time with min time
@@ -353,11 +487,15 @@ module ContentData
353
487
  end
354
488
 
355
489
  is_valid = true
356
- @contents_info.keys.each { |checksum|
490
+ contents_enum = @contents_info.each_key
491
+ loop {
492
+ checksum = contents_enum.next rescue break
357
493
  instances = @contents_info[checksum]
358
494
  content_size = instances[0]
359
495
  content_mtime = instances[2]
360
- instances[1].keys.each { |unique_path|
496
+ instances_enum = instances[1].each_key
497
+ loop {
498
+ unique_path = instances_enum.next rescue break
361
499
  instance_mtime = instances[1][unique_path]
362
500
  instance_info = [checksum, content_mtime, content_size, instance_mtime]
363
501
  instance_info.concat(unique_path)
@@ -1,3 +1,3 @@
1
1
  module ContentData
2
- VERSION = "1.1.0"
2
+ VERSION = "1.2.0"
3
3
  end
@@ -4,7 +4,6 @@ require 'thread'
4
4
 
5
5
  require 'content_data'
6
6
  require 'content_server/content_receiver'
7
- require 'content_server/queue_indexer'
8
7
  require 'content_server/queue_copy'
9
8
  require 'content_server/remote_content'
10
9
  require 'content_server/server'
@@ -57,14 +56,14 @@ module ContentServer
57
56
  # initial global local content data object
58
57
  $local_content_data_lock = Mutex.new
59
58
  $local_content_data = ContentData::ContentData.new
59
+ $last_content_data_id = $local_content_data.unique_id
60
60
 
61
61
  # Read here for initial content data that exist from previous system run
62
62
  content_data_path = Params['local_content_data_path']
63
- last_content_data_id = nil
64
63
  if File.exists?(content_data_path) and !File.directory?(content_data_path)
65
64
  Log.info("reading initial content data that exist from previous system run from file:%s", content_data_path)
66
65
  $local_content_data.from_file(content_data_path)
67
- last_content_data_id = $local_content_data.unique_id
66
+ $last_content_data_id = $local_content_data.unique_id
68
67
  else
69
68
  if File.directory?(content_data_path)
70
69
  raise("Param:'local_content_data_path':'%s' cannot be a directory name", Params['local_content_data_path'])
@@ -75,21 +74,13 @@ module ContentServer
75
74
  end
76
75
 
77
76
  Log.info("Init monitoring")
78
- monitoring_events = Queue.new
79
77
  fm = FileMonitoring::FileMonitoring.new()
80
- fm.set_event_queue(monitoring_events)
78
+
81
79
  # Start monitoring and writing changes to queue
82
80
  all_threads << Thread.new do
83
81
  fm.monitor_files
84
82
  end
85
83
 
86
- # # # # # # # # # # # # # #
87
- # Initialize/Start local indexer
88
- Log.debug1('Start indexer')
89
- queue_indexer = QueueIndexer.new(monitoring_events)
90
- # Start indexing on demand and write changes to queue
91
- all_threads << queue_indexer.run
92
-
93
84
  # # # # # # # # # # # # # # # # # # # # # # # #
94
85
  # thread: Start dump local content data to file
95
86
  Log.debug1('Init thread: flush local content data to file')
@@ -97,21 +88,7 @@ module ContentServer
97
88
  FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path'])
98
89
  loop{
99
90
  sleep(Params['data_flush_delay'])
100
- Log.info('Start flush local content data to file.')
101
- $testing_memory_log.info('Start flush content data to file') if $testing_memory_active
102
- written_to_file = false
103
- $local_content_data_lock.synchronize{
104
- local_content_data_unique_id = $local_content_data.unique_id
105
- if (local_content_data_unique_id != last_content_data_id)
106
- last_content_data_id = local_content_data_unique_id
107
- $local_content_data.to_file($tmp_content_data_file)
108
- written_to_file = true
109
- else
110
- Log.info('no need to flush. content data has not changed')
111
- end
112
- }
113
- File.rename($tmp_content_data_file, Params['local_content_data_path']) if written_to_file
114
- $testing_memory_log.info("End flush content data to file") if $testing_memory_active
91
+ ContentServer.flush_content_data
115
92
  }
116
93
  end
117
94
 
@@ -4,7 +4,6 @@ require 'thread'
4
4
 
5
5
  require 'content_data'
6
6
  require 'content_server/content_receiver'
7
- require 'content_server/queue_indexer'
8
7
  require 'content_server/queue_copy'
9
8
  require 'content_server/remote_content'
10
9
  require 'file_indexing'
@@ -46,14 +45,14 @@ module ContentServer
46
45
  # initial global local content data object
47
46
  $local_content_data_lock = Mutex.new
48
47
  $local_content_data = ContentData::ContentData.new
48
+ $last_content_data_id = $local_content_data.unique_id
49
49
 
50
50
  # Read here for initial content data that exist from previous system run
51
51
  content_data_path = Params['local_content_data_path']
52
- last_content_data_id = nil
53
52
  if File.exists?(content_data_path) and !File.directory?(content_data_path)
54
53
  Log.info("reading initial content data that exist from previous system run from file:%s", content_data_path)
55
54
  $local_content_data.from_file(content_data_path)
56
- last_content_data_id = $local_content_data.unique_id
55
+ $last_content_data_id = $local_content_data.unique_id
57
56
  else
58
57
  if File.directory?(content_data_path)
59
58
  raise("Param:'local_content_data_path':'%s'cannot be a directory name", Params['local_content_data_path'])
@@ -64,21 +63,12 @@ module ContentServer
64
63
  end
65
64
 
66
65
  Log.info('Init monitoring')
67
- monitoring_events = Queue.new
68
66
  fm = FileMonitoring::FileMonitoring.new()
69
- fm.set_event_queue(monitoring_events)
70
67
  # Start monitoring and writing changes to queue
71
68
  all_threads << Thread.new do
72
69
  fm.monitor_files
73
70
  end
74
71
 
75
- # # # # # # # # # # # # # #
76
- # Initialize/Start local indexer
77
- Log.debug1('Start indexer')
78
- queue_indexer = QueueIndexer.new(monitoring_events)
79
- # Start indexing on demand and write changes to queue
80
- all_threads << queue_indexer.run
81
-
82
72
  # # # # # # # # # # # # # # # # # # # # # # # #
83
73
  # thread: Start dump local content data to file
84
74
  Log.debug1('Init thread: flush local content data to file')
@@ -86,21 +76,7 @@ module ContentServer
86
76
  FileUtils.mkdir_p(Params['tmp_path']) unless File.directory?(Params['tmp_path'])
87
77
  loop{
88
78
  sleep(Params['data_flush_delay'])
89
- Log.info('Start flush local content data to file.')
90
- $testing_memory_log.info('Start flush content data to file') if $testing_memory_active
91
- written_to_file = false
92
- $local_content_data_lock.synchronize{
93
- local_content_data_unique_id = $local_content_data.unique_id
94
- if (local_content_data_unique_id != last_content_data_id)
95
- last_content_data_id = local_content_data_unique_id
96
- $local_content_data.to_file($tmp_content_data_file)
97
- written_to_file = true
98
- else
99
- Log.info('no need to flush. content data has not changed')
100
- end
101
- }
102
- File.rename($tmp_content_data_file, Params['local_content_data_path']) if written_to_file
103
- $testing_memory_log.info("End flush content data to file") if $testing_memory_active
79
+ ContentServer.flush_content_data
104
80
  }
105
81
  end
106
82