content_server 1.6.0 → 1.6.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,15 +1,7 @@
1
1
  ---
2
- !binary "U0hBMQ==":
3
- metadata.gz: !binary |-
4
- ODk2ZWZlZGIwODU5Y2I1YTg3YzIyZWZmNDQyOGRiOTJhMWMwODJiZg==
5
- data.tar.gz: !binary |-
6
- NWQxY2E1NGE2ZmQ3YzRlYzFjN2QwNTRlMWMxYzFjMmZhODlhNDVlNA==
2
+ SHA1:
3
+ metadata.gz: 40813241e0bc388830199783e2c5b6d5e042ce6e
4
+ data.tar.gz: 59340d51c6c59f5d73460796f790c39de77444be
7
5
  SHA512:
8
- metadata.gz: !binary |-
9
- MmViNDAxZDU0NTk4ZDY0YzNiYTkxZWVkZjAxYzMyNjZhM2Y0ZDE2MmE3NDNl
10
- YTE3MTJlZGU0NzBjZjJjYzNmM2Q5YWM1NzEyNzgxZDM4MmRmZDIyOWUxMGY3
11
- MjU4YjMxMDFlOThkNDM1M2ZiZTJhN2RjNzFkZmZhYWZkZWE4MTM=
12
- data.tar.gz: !binary |-
13
- YmQxOWYzZGFhZDY5YjNmOTBkN2VhMDBiOTQwZTdjMmQxNzhmZDgxMzE3YzVl
14
- OTg4NDRmYmE5ZDU3ZWJhMGU2YzNiYTQ2Y2M4OTEyMGQzYzNmOWJkYzc0YmMw
15
- OTJhNmQ3YTY3MDIzZDI5NmYzOThhNjA0MjQ4MTI0ZGY5OGI1Nzc=
6
+ metadata.gz: 7741efd82d5a28a6c93bdf067fb3d53310155799ed8d3bb3803e5a299b7f78e15aa36b2583674311e8bda4813f4aadc5efac5930728bd068dcdc88b9a5271494
7
+ data.tar.gz: 12192e8cbce686cef6c473cc1668da1c887c22444de5d27b39e76af8e6d344c92d1dd43226be721654b07f3d02c8daa8f1d8beb40f0b0a8d153ed6f85d8a8c29
@@ -1,6 +1,8 @@
1
+ require 'csv'
1
2
  require 'content_server/server'
2
3
  require 'log'
3
4
  require 'params'
5
+ require 'zlib'
4
6
 
5
7
  module ContentData
6
8
  Params.string('instance_check_level', 'shallow', 'Defines check level. Supported levels are: ' \
@@ -31,6 +33,8 @@ module ContentData
31
33
 
32
34
  CHUNK_SIZE = 5000
33
35
 
36
+ # NOTE Cloning is time/memory expensive operation.
37
+ # It is highly recomended to avoid it.
34
38
  def initialize(other = nil)
35
39
  if other.nil?
36
40
  @contents_info = {} # Checksum --> [size, paths-->time(instance), time(content)]
@@ -292,6 +296,7 @@ module ContentData
292
296
  end
293
297
  end
294
298
 
299
+ # Don't use in production, only for testing.
295
300
  def to_s
296
301
  return_str = ""
297
302
  contents_str = ""
@@ -306,14 +311,92 @@ module ContentData
306
311
  return_str << contents_str
307
312
  return_str << "%d\n" % [@instances_info.length]
308
313
  return_str << instances_str
314
+
315
+
316
+ symlinks_str = ""
317
+ each_symlink { |server, path, target|
318
+ symlinks_str << "%s,%s,%s\n" % [server, path, target]
319
+ }
320
+ return_str << symlinks_str
321
+
309
322
  return_str
310
323
  end
311
324
 
325
+ # Write content data to file.
326
+ def to_file(filename)
327
+ content_data_dir = File.dirname(filename)
328
+ FileUtils.makedirs(content_data_dir) unless File.directory?(content_data_dir)
329
+ Zlib::GzipWriter.open(filename) do |gz|
330
+ gz.write [@instances_info.length].to_csv
331
+ each_instance { |checksum, size, content_mod_time, instance_mod_time, server, path, inst_index_time|
332
+ gz.write [checksum, size, server, path, instance_mod_time, inst_index_time].to_csv
333
+ }
334
+ gz.write [@symlinks_info.length].to_csv
335
+ each_symlink { |file, path, target|
336
+ gz.write [file, path, target].to_csv
337
+ }
338
+ end
339
+ end
340
+
341
+ # Imports content data from file.
342
+ # This method will throw an exception if the file is not in correct format.
343
+ def from_file(filename)
344
+ unless File.exists? filename
345
+ raise ArgumentError.new "No such a file #{filename}"
346
+ end
347
+
348
+ number_of_instances = nil
349
+ number_of_symlinks = nil
350
+ Zlib::GzipReader.open(filename) { |gz|
351
+ gz.each_line do |line|
352
+ row = line.parse_csv
353
+ if number_of_instances == nil
354
+ begin
355
+ # get number of instances
356
+ number_of_instances = row[0].to_i
357
+ rescue ArgumentError
358
+ raise("Parse error of content data file:#{filename} line ##{$.}\n" +
359
+ "number of instances should be a Number. We got:#{number_of_instances}")
360
+ end
361
+ elsif number_of_instances > 0
362
+ if (6 != row.length)
363
+ raise("Parse error of content data file:#{filename} line ##{$.}\n" +
364
+ "Expected to read 6 fields ('<' separated) but got #{row.length}.\nLine:#{instance_line}")
365
+ end
366
+ add_instance(row[0], #checksum
367
+ row[1].to_i, # size
368
+ row[2], # server
369
+ row[3], # path
370
+ row[4].to_i, # mod time
371
+ row[5].to_i) # index time
372
+ number_of_instances -= 1
373
+ elsif number_of_symlinks == nil
374
+ begin
375
+ # get number of symlinks
376
+ number_of_symlinks = row[0].to_i
377
+ rescue ArgumentError
378
+ raise("Parse error of content data file:#{filename} line ##{$.}\n" +
379
+ "number of symlinks should be a Number. We got:#{number_of_symlinks}")
380
+ end
381
+ elsif number_of_symlinks > 0
382
+ if (3 != row.length)
383
+ raise("Parse error of content data file:#{filename} line ##{$.}\n" +
384
+ "Expected to read 3 fields ('<' separated) but got #{row.length}.\nLine:#{symlinks_line}")
385
+ end
386
+ @symlinks_info[[row[0], row[1]]] = row[2]
387
+ number_of_symlinks -= 1
388
+ end
389
+ end
390
+ }
391
+ end
392
+
393
+ ############## DEPRECATED: Old deprecated from/to file methods still needed for migration purposes
312
394
  # Write content data to file.
313
395
  # Write is using chunks (for both content chunks and instances chunks)
314
396
  # Chunk is used to maximize GC affect. The temporary memory of each chunk is GCed.
315
397
  # Without the chunks used in a dipper stack level, GC keeps the temporary objects as part of the stack context.
316
- def to_file(filename)
398
+ # @deprecated
399
+ def to_file_old(filename)
317
400
  content_data_dir = File.dirname(filename)
318
401
  FileUtils.makedirs(content_data_dir) unless File.directory?(content_data_dir)
319
402
  File.open(filename, 'w') { |file|
@@ -323,7 +406,7 @@ module ContentData
323
406
  content_chunks = @contents_info.length / CHUNK_SIZE + 1
324
407
  chunks_counter = 0
325
408
  while chunks_counter < content_chunks
326
- to_file_contents_chunk(file,contents_enum, CHUNK_SIZE)
409
+ to_old_file_contents_chunk(file,contents_enum, CHUNK_SIZE)
327
410
  GC.start
328
411
  chunks_counter += 1
329
412
  end
@@ -333,7 +416,7 @@ module ContentData
333
416
  contents_enum = @contents_info.each_key
334
417
  chunks_counter = 0
335
418
  while chunks_counter < content_chunks
336
- to_file_instances_chunk(file,contents_enum, CHUNK_SIZE)
419
+ to_old_file_instances_chunk(file,contents_enum, CHUNK_SIZE)
337
420
  GC.start
338
421
  chunks_counter += 1
339
422
  end
@@ -343,22 +426,22 @@ module ContentData
343
426
  file.write("#{@symlinks_info.length}\n")
344
427
  loop {
345
428
  symlink_key = symlinks_info_enum.next rescue break
346
- file.write("#{symlink_key[0]}<#{symlink_key[1]}<#{@symlinks_info[symlink_key]}\n")
429
+ file.write("#{symlink_key[0]},#{symlink_key[1]},#{@symlinks_info[symlink_key]}\n")
347
430
  }
348
431
  }
349
432
  end
350
433
 
351
- def to_file_contents_chunk(file, contents_enum, chunk_size)
434
+ def to_old_file_contents_chunk(file, contents_enum, chunk_size)
352
435
  chunk_counter = 0
353
436
  while chunk_counter < chunk_size
354
437
  checksum = contents_enum.next rescue return
355
438
  content_info = @contents_info[checksum]
356
- file.write("#{checksum}<#{content_info[0]}<#{content_info[2]}\n")
439
+ file.write("#{checksum},#{content_info[0]},#{content_info[2]}\n")
357
440
  chunk_counter += 1
358
441
  end
359
442
  end
360
443
 
361
- def to_file_instances_chunk(file, contents_enum, chunk_size)
444
+ def to_old_file_instances_chunk(file, contents_enum, chunk_size)
362
445
  chunk_counter = 0
363
446
  while chunk_counter < chunk_size
364
447
  checksum = contents_enum.next rescue return
@@ -369,8 +452,8 @@ module ContentData
369
452
  # provide the block with: checksum, size, content modification time,instance modification time,
370
453
  # server and path.
371
454
  instance_modification_time,instance_index_time = content_info[1][location]
372
- file.write("#{checksum}<#{content_info[0]}<#{location[0]}<#{location[1]}<" +
373
- "#{instance_modification_time}<#{instance_index_time}\n")
455
+ file.write("#{checksum},#{content_info[0]},#{location[0]},#{location[1]}," +
456
+ "#{instance_modification_time},#{instance_index_time}\n")
374
457
  }
375
458
  chunk_counter += 1
376
459
  break if chunk_counter == chunk_size
@@ -380,7 +463,7 @@ module ContentData
380
463
  # TODO validation that file indeed contains ContentData missing
381
464
  # TODO class level method?
382
465
  # Loading db from file using chunks for better memory performance
383
- def from_file(filename)
466
+ def from_file_old(filename)
384
467
  # read first line (number of contents)
385
468
  # calculate line number (number of instances)
386
469
  # read number of instances.
@@ -409,7 +492,7 @@ module ContentData
409
492
  # update last chunk size
410
493
  chunk_size = number_of_contents - (chunk_index * CHUNK_SIZE)
411
494
  end
412
- return unless read_contents_chunk(filename, file, chunk_size)
495
+ return unless read_old_contents_chunk(filename, file, chunk_size)
413
496
  GC.start
414
497
  chunk_index += 1
415
498
  end
@@ -431,7 +514,7 @@ module ContentData
431
514
  # update last chunk size
432
515
  chunk_size = number_of_instances - (chunk_index * CHUNK_SIZE)
433
516
  end
434
- return unless read_instances_chunk(filename, file, chunk_size)
517
+ return unless read_old_instances_chunk(filename, file, chunk_size)
435
518
  GC.start
436
519
  chunk_index += 1
437
520
  end
@@ -443,15 +526,15 @@ module ContentData
443
526
  "number of symlinks should be a Number. We got:#{number_of_symlinks}")
444
527
  end
445
528
  number_of_symlinks.to_i.times {
446
- symlinks_line = file.gets
529
+ symlinks_line = file.gets.strip
447
530
  unless symlinks_line
448
531
  raise("Parse error of content data file:#{filename} line ##{$.}\n" +
449
532
  "Expected to read symlink line but reached EOF")
450
533
  end
451
- parameters = symlinks_line.split('<')
534
+ parameters = symlinks_line.split(',')
452
535
  if (3 != parameters.length)
453
536
  raise("Parse error of content data file:#{filename} line ##{$.}\n" +
454
- "Expected to read 3 fields ('<' separated) but got #{parameters.length}.\nLine:#{symlinks_line}")
537
+ "Expected to read 3 fields (comma separated) but got #{parameters.length}.\nLine:#{symlinks_line}")
455
538
  end
456
539
 
457
540
  @symlinks_info[[parameters[0],parameters[1]]] = parameters[2]
@@ -459,7 +542,7 @@ module ContentData
459
542
  }
460
543
  end
461
544
 
462
- def read_contents_chunk(filename, file, chunk_size)
545
+ def read_old_contents_chunk(filename, file, chunk_size)
463
546
  chunk_index = 0
464
547
  while chunk_index < chunk_size
465
548
  unless file.gets
@@ -471,7 +554,7 @@ module ContentData
471
554
  true
472
555
  end
473
556
 
474
- def read_instances_chunk(filename, file, chunk_size)
557
+ def read_old_instances_chunk(filename, file, chunk_size)
475
558
  chunk_index = 0
476
559
  while chunk_index < chunk_size
477
560
  instance_line = file.gets
@@ -480,11 +563,17 @@ module ContentData
480
563
  "Expected to read Instance line but reached EOF")
481
564
  end
482
565
 
483
- parameters = instance_line.split('<')
484
- if (6 != parameters.length)
485
- raise("Parse error of content data file:#{filename} line ##{$.}\n" +
486
- "Expected to read 6 fields ('<' separated) but got #{parameters.length}.\nLine:#{instance_line}")
566
+ parameters = instance_line.split(',')
567
+ # bugfix: if file name consist a comma then parsing based on comma separating fails
568
+ if (parameters.size > 6)
569
+ (4..parameters.size-3).each do |i|
570
+ parameters[3] = [parameters[3], parameters[i]].join(",")
571
+ end
572
+ (4..parameters.size-3).each do |i|
573
+ parameters.delete_at(4)
574
+ end
487
575
  end
576
+
488
577
  add_instance(parameters[0], #checksum
489
578
  parameters[1].to_i, # size
490
579
  parameters[2], # server
@@ -495,6 +584,7 @@ module ContentData
495
584
  end
496
585
  true
497
586
  end
587
+ ########################## END OF DEPRECATED PART #######################
498
588
 
499
589
  # for each content, all time fields (content and instances) are replaced with the
500
590
  # min time found, while going through all time fields.
@@ -763,11 +853,17 @@ module ContentData
763
853
  def self.remove(a, b)
764
854
  return nil if b.nil?
765
855
  return ContentData.new(b) if a.nil?
766
- c = ContentData.new(b) # create new cloned content C from B
767
- # remove contents of A from newly cloned content A
768
- a.each_content { |checksum, size, content_mod_time|
769
- c.remove_content(checksum)
770
- }
856
+ c = ContentData.new
857
+ b.each_instance do |checksum, size, _, instance_mtime, server, path, index_time|
858
+ unless (a.content_exists(checksum))
859
+ c.add_instance(checksum,
860
+ size,
861
+ server,
862
+ path,
863
+ instance_mtime,
864
+ index_time)
865
+ end
866
+ end
771
867
  c
772
868
  end
773
869
 
@@ -1,3 +1,3 @@
1
1
  module ContentServer
2
- VERSION = "1.6.0"
2
+ VERSION = "1.6.1"
3
3
  end
@@ -7,7 +7,6 @@ require 'content_data'
7
7
  require 'file_indexing/indexer_patterns'
8
8
  require 'log'
9
9
 
10
-
11
10
  module FileIndexing
12
11
  ####################
13
12
  # Index Agent
@@ -0,0 +1,321 @@
1
+ # NOTE Code Coverage block must be issued before any of your application code is required
2
+ if ENV['BBFS_COVERAGE']
3
+ require_relative '../spec_helper.rb'
4
+ SimpleCov.command_name 'content_data'
5
+ end
6
+
7
+ require 'rspec'
8
+ require 'tempfile'
9
+ require_relative '../../lib/content_data/content_data.rb'
10
+
11
+ # NOTE the results are not exact cause they do not run in a clean environment and influenced from
12
+ # monitoring/testing code, but they give a good approximation
13
+ # Supposition: monitoring code penalty is insignificant against time/memory usage of the tested code
14
+ describe 'Content Data Performance Test', :perf =>true do
15
+
16
+ NUMBER_INSTANCES = 350_000
17
+ MAX_CHECKSUM = NUMBER_INSTANCES
18
+ INSTANCE_SIZE = 1000
19
+ SERVER = "server"
20
+ PATH = "file_"
21
+ MTIME = 1000
22
+
23
+ # in kilobytes
24
+ LIMIT_MEMORY = 250*(1024) # 250 MB
25
+ # in seconds
26
+ LIMIT_TIME = 5*60; # 5 minutes
27
+
28
+ before :all do
29
+ Params.init Array.new
30
+ Params['print_params_to_stdout'] = false
31
+ # must preced Log.init, otherwise log containing default values will be created
32
+ Params['log_write_to_file'] = false
33
+ Params['log_write_to_console'] = true
34
+ Params['log_debug_level'] = 1 # set it > 0 to enable print-outs of used time/memory
35
+ Log.init
36
+
37
+ # module variable contains an initialized ContentData
38
+ @test_cd = get_initialized
39
+ end
40
+
41
+ let (:terminator) { Limit.new(LIMIT_TIME, LIMIT_MEMORY) }
42
+
43
+ # Print-out status messages
44
+ after :each do
45
+ unless (terminator.nil?)
46
+ Log.debug1("#{self.class.description} #{example.description}: #{terminator.msg}")
47
+ end
48
+ GC.start
49
+ end
50
+
51
+ # Initialize ContentData object with generated instances
52
+ # @return [ContentData] an initialized object
53
+ def get_initialized
54
+ initialized_cd = ContentData::ContentData.new
55
+ NUMBER_INSTANCES.times do |i|
56
+ initialized_cd.add_instance(i.to_s, INSTANCE_SIZE+i, SERVER, PATH + i.to_s, MTIME+i)
57
+ end
58
+ initialized_cd
59
+ end
60
+
61
+ # TODO consider to separate it to 2 derived classes: one for memory, one for time monitoring
62
+ class Limit
63
+ attr_reader :elapsed_time, :memory_usage, :msg
64
+
65
+ def initialize(time_limit, memory_limit)
66
+ @elapsed_time = 0
67
+ @memory_usage = 0
68
+ @msg = String.new
69
+ @time_limit = time_limit
70
+ @memory_limit = memory_limit
71
+ end
72
+
73
+ def get_timer_thread(watched_thread)
74
+ Thread.new do
75
+ while (@elapsed_time < @time_limit && watched_thread.alive?)
76
+ @elapsed_time += 1
77
+ sleep 1
78
+ end
79
+ is_succeeded = true
80
+ if (watched_thread.alive?)
81
+ Thread.kill(watched_thread)
82
+ is_succeeded = false
83
+ end
84
+ @msg = (is_succeeded ? "" : "do not ") + "finished in #{@elapsed_time} seconds"
85
+ end
86
+ end
87
+
88
+ def get_memory_limit_thread(watched_thread)
89
+ Thread.new do
90
+ init_memory_usage = Process.get_memory_usage
91
+ while (@memory_usage < @memory_limit && watched_thread.alive?)
92
+ cur_memory_usage = Process.get_memory_usage - init_memory_usage
93
+ if (cur_memory_usage > @memory_usage)
94
+ @memory_usage = cur_memory_usage
95
+ end
96
+ sleep 1
97
+ end
98
+ if (watched_thread.alive?)
99
+ Thread.kill(watched_thread)
100
+ end
101
+ @msg = "memory usage: #{@memory_usage}"
102
+ end
103
+ end
104
+ end
105
+
106
+ # TODO consider more general public method call_with_limit
107
+ class Proc
108
+ # Run a procedure.
109
+ # Terminate it if it is running more then a time limit
110
+ # TODO consider usage of Process::setrlimit
111
+ # @param [Limit] limit object
112
+ def call_with_timer(limit)
113
+ call_thread = Thread.new { self.call }
114
+ limit_thread = limit.get_timer_thread(call_thread)
115
+ [call_thread, limit_thread].each { |th| th.join }
116
+ end
117
+
118
+ # Run a procedure.
119
+ # Terminate it if it is running more then a memory limit
120
+ # TODO consider usage of Process::setrlimit
121
+ # @param [Limit] limit object
122
+ def call_with_memory_limit(limit)
123
+ call_thread = Thread.new { self.call }
124
+ limit_thread = limit.get_memory_limit_thread(call_thread)
125
+ [call_thread, limit_thread].each { |th| th.join }
126
+ end
127
+ end
128
+
129
+ module Process
130
+ # @return [boolean] true when process alive, false otherwise
131
+ # @raise [Errno::EPERM] if process owned by other user, and there is no permissions to check
132
+ # (not our case)
133
+ def Process.alive?(pid)
134
+ begin
135
+ Process.kill(0, pid)
136
+ true
137
+ rescue Errno::ESRCH
138
+ false
139
+ end
140
+ end
141
+
142
+ # Get memory consumed my the process
143
+ # @param [Integer] pid, default is current pid
144
+ # @return [Integer] memory usage in kilobytes
145
+ def Process.get_memory_usage(pid = Process.pid)
146
+ if Gem::win_platform?
147
+ `tasklist /FI \"PID eq #{pid}\" /NH /FO \"CSV\"`.split(',')[4]
148
+ else
149
+ `ps -o rss= -p #{pid}`.to_i
150
+ end
151
+ end
152
+ end
153
+
154
+ context 'Object initialization' do
155
+ context 'Init one object' do
156
+ it "#{NUMBER_INSTANCES} instances in less then #{LIMIT_TIME} seconds" do
157
+ cd1 = nil
158
+ init_proc = Proc.new { cd1 = get_initialized }
159
+ init_proc.call_with_timer(terminator)
160
+ terminator.elapsed_time.should < LIMIT_TIME
161
+
162
+ # checks that test was correct
163
+ cd1.should be
164
+ cd1.instances_size.should == NUMBER_INSTANCES
165
+ end
166
+
167
+ it "#{NUMBER_INSTANCES} instances consumes less then #{LIMIT_MEMORY} KB" do
168
+ cd1 = nil
169
+ init_proc = Proc.new { cd1 = get_initialized }
170
+ init_proc.call_with_memory_limit(terminator)
171
+ terminator.memory_usage.should < LIMIT_MEMORY
172
+
173
+ # checks that test was correct
174
+ cd1.should be
175
+ cd1.instances_size.should == NUMBER_INSTANCES
176
+ end
177
+
178
+ it "clone of #{NUMBER_INSTANCES} in less then #{LIMIT_TIME} seconds" do
179
+ cd2 = nil
180
+ clone_proc = Proc.new { cd2 = ContentData::ContentData.new(@test_cd) }
181
+ clone_proc.call_with_timer(terminator)
182
+ terminator.elapsed_time.should < LIMIT_TIME
183
+
184
+ # checks that test
185
+ cd2.should be
186
+ cd2.instances_size.should == @test_cd.instances_size
187
+ end
188
+
189
+ it "clone of #{NUMBER_INSTANCES} consumes less then #{LIMIT_MEMORY} KB" do
190
+ cd2 = nil
191
+ clone_proc = Proc.new { cd2 = ContentData::ContentData.new(@test_cd) }
192
+ clone_proc.call_with_memory_limit(terminator)
193
+ terminator.memory_usage.should < LIMIT_MEMORY
194
+
195
+ # checks that test was correct
196
+ cd2.should be
197
+ cd2.instances_size.should == @test_cd.instances_size
198
+ end
199
+ end
200
+
201
+
202
+ context 'Init more then one object' do
203
+ it "two object of #{NUMBER_INSTANCES} instances each in less then #{2*LIMIT_TIME} seconds" do
204
+ cd1 = nil
205
+ cd2 = nil
206
+ build_proc = Proc.new do
207
+ cd1 = get_initialized
208
+ cd2 = get_initialized
209
+ end
210
+ build_proc.call_with_timer(terminator)
211
+ terminator.elapsed_time.should < LIMIT_TIME
212
+
213
+ # checks that test was correct
214
+ cd1.should be
215
+ cd1.instances_size.should == NUMBER_INSTANCES
216
+ cd2.should be
217
+ cd2.instances_size.should == NUMBER_INSTANCES
218
+ end
219
+
220
+ it "three object of #{NUMBER_INSTANCES} instances each in less then #{3*LIMIT_TIME} seconds" do
221
+ cd1 = nil
222
+ cd2 = nil
223
+ cd3 = nil
224
+ build_proc = Proc.new do
225
+ cd1 = get_initialized
226
+ cd2 = get_initialized
227
+ cd3 = get_initialized
228
+ end
229
+ build_proc.call_with_timer(terminator)
230
+ terminator.elapsed_time.should < LIMIT_TIME
231
+
232
+ # checks that test was correct
233
+ cd1.should be
234
+ cd1.instances_size.should == NUMBER_INSTANCES
235
+ cd2.should be
236
+ cd2.instances_size.should == NUMBER_INSTANCES
237
+ cd3.should be
238
+ cd3.instances_size.should == NUMBER_INSTANCES
239
+ end
240
+ end
241
+ end
242
+
243
+ context 'Iteration' do
244
+ it "each instance on #{NUMBER_INSTANCES} instances in less then #{LIMIT_TIME}" do
245
+ each_thread = Proc.new { @test_cd.each_instance { |ch,_,_,_,_,_,_| ch } }
246
+ each_thread.call_with_timer(terminator)
247
+ terminator.elapsed_time.should < LIMIT_TIME
248
+ end
249
+ end
250
+
251
+ context 'File operations' do
252
+ before :all do
253
+ @file = Tempfile.new('to_file_test')
254
+ @path = @file.path
255
+ end
256
+
257
+ after :all do
258
+ @file.close!
259
+ end
260
+
261
+ it "save/load on #{NUMBER_INSTANCES} instances in less then #{LIMIT_TIME} seconds" do
262
+ # Checking to_file
263
+ to_file_proc = Proc.new do
264
+ begin
265
+ @test_cd.to_file(@path)
266
+ ensure
267
+ @file.close
268
+ end
269
+ end
270
+ to_file_proc.call_with_timer(terminator)
271
+ terminator.elapsed_time.should < LIMIT_TIME
272
+
273
+ # checking from_file
274
+ cd2 = ContentData::ContentData.new
275
+ from_file_proc = Proc.new do
276
+ begin
277
+ cd2.from_file(@path)
278
+ ensure
279
+ @file.close
280
+ end
281
+ end
282
+ terminator = Limit.new(LIMIT_TIME, LIMIT_MEMORY)
283
+ from_file_proc.call_with_timer(terminator)
284
+ terminator.elapsed_time.should < LIMIT_TIME
285
+
286
+ # checks that test was correct
287
+ cd2.instances_size.should == @test_cd.instances_size
288
+ end
289
+ end
290
+
291
+ describe 'Set operations' do
292
+ before :all do
293
+ @cd2 = get_initialized
294
+ @cd2.add_instance((MAX_CHECKSUM+1).to_s, INSTANCE_SIZE, SERVER, PATH + "new", MTIME)
295
+ end
296
+
297
+ context "minus of two objects with #{NUMBER_INSTANCES} instances each" do
298
+ it "finish in less then #{LIMIT_TIME} seconds" do
299
+ res_cd = nil
300
+ minus_proc = Proc.new { res_cd = ContentData.remove(@test_cd, @cd2) }
301
+ minus_proc.call_with_timer(terminator)
302
+ terminator.elapsed_time.should < LIMIT_TIME
303
+
304
+ # checks that test was correct
305
+ res_cd.should be
306
+ res_cd.instances_size.should == 1
307
+ end
308
+
309
+ it "consume less then #{LIMIT_MEMORY} KB" do
310
+ res_cd = nil
311
+ minus_proc = Proc.new { res_cd = ContentData.remove(@test_cd, @cd2) }
312
+ minus_proc.call_with_memory_limit(terminator)
313
+ terminator.memory_usage.should < LIMIT_MEMORY
314
+
315
+ # checks that test was correct
316
+ res_cd.should be
317
+ res_cd.instances_size.should == 1
318
+ end
319
+ end
320
+ end
321
+ end
@@ -1,3 +1,4 @@
1
+ # coding: UTF-8
1
2
  # NOTE Code Coverage block must be issued before any of your application code is required
2
3
  if ENV['BBFS_COVERAGE']
3
4
  require_relative '../spec_helper.rb'
@@ -125,6 +126,9 @@ describe 'Content Data Test' do
125
126
  "/home/file_2", 44444444444)
126
127
  content_data.add_instance("B1", 60, "server_1",
127
128
  "/home/file_3", 55555555555)
129
+ content_data.add_symlink("A1", "/home/symlink_1", "home/file_1")
130
+ content_data.add_symlink("B1", "/home/symlink_2", "home/file_xxx")
131
+ content_data.add_symlink("B1", "/home/symlink_1", "home/file_3")
128
132
  file_moc_object = StringIO.new
129
133
  file_moc_object.write(content_data.to_s)
130
134
  test_file = Tempfile.new('content_data_spec.test')
@@ -134,6 +138,26 @@ describe 'Content Data Test' do
134
138
  (content_data == content_data_2).should == true
135
139
  end
136
140
 
141
+ it 'test old format with comma' do
142
+ content_data = ContentData::ContentData.new
143
+ content_data.add_instance("A1", 50, "server_1",
144
+ "/home/file,<><,ласкдфй_1", 22222222222)
145
+ content_data.add_instance("B1", 60, "server_1",
146
+ "/home/filךלחת:,!גדכשe_2", 44444444444)
147
+ content_data.add_instance("B1", 60, "server_1",
148
+ "/home/filкакакаe_3", 55555555555)
149
+ content_data.add_symlink("A1", "/home/syласдфйmlink_1", "home/file_1")
150
+ content_data.add_symlink("B1", "/home/symlinkדגכע_2", "home/file_xxx")
151
+ content_data.add_symlink("B1", "/home/symlinkדכע_1", "home/filкакакаe_3")
152
+ file_moc_object = StringIO.new
153
+ file_moc_object.write(content_data.to_s)
154
+ test_file = Tempfile.new('content_data_spec.test')
155
+ content_data.to_file_old(test_file)
156
+ content_data_2 = ContentData::ContentData.new
157
+ content_data_2.from_file_old(test_file)
158
+ (content_data == content_data_2).should == true
159
+ end
160
+
137
161
  it 'test merge' do
138
162
  content_data_a = ContentData::ContentData.new
139
163
  content_data_a.add_instance("A1", 50, "server_1",
@@ -15,7 +15,7 @@ Params['log_write_to_console'] = false
15
15
  Params['log_write_to_file'] = false
16
16
  Params['log_debug_level'] = 0
17
17
  Params['streaming_chunk_size'] = 5
18
- Params.init ARGV
18
+ Params.init Array.new
19
19
  Params['log_write_to_file'] = false
20
20
  Params['log_write_to_console'] = false
21
21
  Params['enable_monitoring'] = false
metadata CHANGED
@@ -1,209 +1,97 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: content_server
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.6.0
4
+ version: 1.6.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - BBFS Team
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-03-15 00:00:00.000000000 Z
11
+ date: 2014-05-08 00:00:00.000000000 Z
12
12
  dependencies:
13
- - !ruby/object:Gem::Dependency
14
- name: rake
15
- requirement: !ruby/object:Gem::Requirement
16
- requirements:
17
- - - '='
18
- - !ruby/object:Gem::Version
19
- version: 0.9.2.2
20
- type: :runtime
21
- prerelease: false
22
- version_requirements: !ruby/object:Gem::Requirement
23
- requirements:
24
- - - '='
25
- - !ruby/object:Gem::Version
26
- version: 0.9.2.2
27
- - !ruby/object:Gem::Dependency
28
- name: algorithms
29
- requirement: !ruby/object:Gem::Requirement
30
- requirements:
31
- - - ! '>='
32
- - !ruby/object:Gem::Version
33
- version: '0'
34
- type: :runtime
35
- prerelease: false
36
- version_requirements: !ruby/object:Gem::Requirement
37
- requirements:
38
- - - ! '>='
39
- - !ruby/object:Gem::Version
40
- version: '0'
41
- - !ruby/object:Gem::Dependency
42
- name: log4r
43
- requirement: !ruby/object:Gem::Requirement
44
- requirements:
45
- - - ! '>='
46
- - !ruby/object:Gem::Version
47
- version: '0'
48
- type: :runtime
49
- prerelease: false
50
- version_requirements: !ruby/object:Gem::Requirement
51
- requirements:
52
- - - ! '>='
53
- - !ruby/object:Gem::Version
54
- version: '0'
55
- - !ruby/object:Gem::Dependency
56
- name: eventmachine
57
- requirement: !ruby/object:Gem::Requirement
58
- requirements:
59
- - - ! '>='
60
- - !ruby/object:Gem::Version
61
- version: '0'
62
- type: :runtime
63
- prerelease: false
64
- version_requirements: !ruby/object:Gem::Requirement
65
- requirements:
66
- - - ! '>='
67
- - !ruby/object:Gem::Version
68
- version: '0'
69
- - !ruby/object:Gem::Dependency
70
- name: json
71
- requirement: !ruby/object:Gem::Requirement
72
- requirements:
73
- - - ! '>='
74
- - !ruby/object:Gem::Version
75
- version: '0'
76
- type: :runtime
77
- prerelease: false
78
- version_requirements: !ruby/object:Gem::Requirement
79
- requirements:
80
- - - ! '>='
81
- - !ruby/object:Gem::Version
82
- version: '0'
83
- - !ruby/object:Gem::Dependency
84
- name: sinatra
85
- requirement: !ruby/object:Gem::Requirement
86
- requirements:
87
- - - ! '>='
88
- - !ruby/object:Gem::Version
89
- version: '0'
90
- type: :runtime
91
- prerelease: false
92
- version_requirements: !ruby/object:Gem::Requirement
93
- requirements:
94
- - - ! '>='
95
- - !ruby/object:Gem::Version
96
- version: '0'
97
- - !ruby/object:Gem::Dependency
98
- name: thin
99
- requirement: !ruby/object:Gem::Requirement
100
- requirements:
101
- - - ! '>='
102
- - !ruby/object:Gem::Version
103
- version: '0'
104
- type: :runtime
105
- prerelease: false
106
- version_requirements: !ruby/object:Gem::Requirement
107
- requirements:
108
- - - ! '>='
109
- - !ruby/object:Gem::Version
110
- version: '0'
111
- - !ruby/object:Gem::Dependency
112
- name: rake
113
- requirement: !ruby/object:Gem::Requirement
114
- requirements:
115
- - - '='
116
- - !ruby/object:Gem::Version
117
- version: 0.9.2.2
118
- type: :runtime
119
- prerelease: false
120
- version_requirements: !ruby/object:Gem::Requirement
121
- requirements:
122
- - - '='
123
- - !ruby/object:Gem::Version
124
- version: 0.9.2.2
125
13
  - !ruby/object:Gem::Dependency
126
14
  name: algorithms
127
15
  requirement: !ruby/object:Gem::Requirement
128
16
  requirements:
129
- - - ! '>='
17
+ - - ">="
130
18
  - !ruby/object:Gem::Version
131
19
  version: '0'
132
20
  type: :runtime
133
21
  prerelease: false
134
22
  version_requirements: !ruby/object:Gem::Requirement
135
23
  requirements:
136
- - - ! '>='
24
+ - - ">="
137
25
  - !ruby/object:Gem::Version
138
26
  version: '0'
139
27
  - !ruby/object:Gem::Dependency
140
28
  name: log4r
141
29
  requirement: !ruby/object:Gem::Requirement
142
30
  requirements:
143
- - - ! '>='
31
+ - - ">="
144
32
  - !ruby/object:Gem::Version
145
33
  version: '0'
146
34
  type: :runtime
147
35
  prerelease: false
148
36
  version_requirements: !ruby/object:Gem::Requirement
149
37
  requirements:
150
- - - ! '>='
38
+ - - ">="
151
39
  - !ruby/object:Gem::Version
152
40
  version: '0'
153
41
  - !ruby/object:Gem::Dependency
154
42
  name: eventmachine
155
43
  requirement: !ruby/object:Gem::Requirement
156
44
  requirements:
157
- - - ! '>='
45
+ - - ">="
158
46
  - !ruby/object:Gem::Version
159
47
  version: '0'
160
48
  type: :runtime
161
49
  prerelease: false
162
50
  version_requirements: !ruby/object:Gem::Requirement
163
51
  requirements:
164
- - - ! '>='
52
+ - - ">="
165
53
  - !ruby/object:Gem::Version
166
54
  version: '0'
167
55
  - !ruby/object:Gem::Dependency
168
56
  name: json
169
57
  requirement: !ruby/object:Gem::Requirement
170
58
  requirements:
171
- - - ! '>='
59
+ - - ">="
172
60
  - !ruby/object:Gem::Version
173
61
  version: '0'
174
62
  type: :runtime
175
63
  prerelease: false
176
64
  version_requirements: !ruby/object:Gem::Requirement
177
65
  requirements:
178
- - - ! '>='
66
+ - - ">="
179
67
  - !ruby/object:Gem::Version
180
68
  version: '0'
181
69
  - !ruby/object:Gem::Dependency
182
70
  name: sinatra
183
71
  requirement: !ruby/object:Gem::Requirement
184
72
  requirements:
185
- - - ! '>='
73
+ - - ">="
186
74
  - !ruby/object:Gem::Version
187
75
  version: '0'
188
76
  type: :runtime
189
77
  prerelease: false
190
78
  version_requirements: !ruby/object:Gem::Requirement
191
79
  requirements:
192
- - - ! '>='
80
+ - - ">="
193
81
  - !ruby/object:Gem::Version
194
82
  version: '0'
195
83
  - !ruby/object:Gem::Dependency
196
84
  name: thin
197
85
  requirement: !ruby/object:Gem::Requirement
198
86
  requirements:
199
- - - ! '>='
87
+ - - ">="
200
88
  - !ruby/object:Gem::Version
201
89
  version: '0'
202
90
  type: :runtime
203
91
  prerelease: false
204
92
  version_requirements: !ruby/object:Gem::Requirement
205
93
  requirements:
206
- - - ! '>='
94
+ - - ">="
207
95
  - !ruby/object:Gem::Version
208
96
  version: '0'
209
97
  description: Monitor and Index a directory and back it up to backup server.
@@ -218,18 +106,24 @@ extensions:
218
106
  - ext/run_in_background/mkrf_conf.rb
219
107
  extra_rdoc_files: []
220
108
  files:
109
+ - bin/backup_server
110
+ - bin/content_server
111
+ - bin/file_utils
112
+ - bin/testing_memory
113
+ - bin/testing_server
114
+ - ext/run_in_background/mkrf_conf.rb
221
115
  - lib/content_data.rb
222
116
  - lib/content_data/content_data.rb
223
117
  - lib/content_data/version.rb
224
118
  - lib/content_server.rb
119
+ - lib/content_server/backup_server.rb
120
+ - lib/content_server/content_receiver.rb
121
+ - lib/content_server/content_server.rb
225
122
  - lib/content_server/file_streamer.rb
226
123
  - lib/content_server/queue_copy.rb
227
- - lib/content_server/server.rb
228
- - lib/content_server/content_server.rb
229
- - lib/content_server/content_receiver.rb
230
124
  - lib/content_server/remote_content.rb
125
+ - lib/content_server/server.rb
231
126
  - lib/content_server/version.rb
232
- - lib/content_server/backup_server.rb
233
127
  - lib/email.rb
234
128
  - lib/email/email.rb
235
129
  - lib/email/version.rb
@@ -237,13 +131,13 @@ files:
237
131
  - lib/file_copy/copy.rb
238
132
  - lib/file_copy/version.rb
239
133
  - lib/file_indexing.rb
240
- - lib/file_indexing/indexer_patterns.rb
241
134
  - lib/file_indexing/index_agent.rb
135
+ - lib/file_indexing/indexer_patterns.rb
242
136
  - lib/file_indexing/version.rb
243
137
  - lib/file_monitoring.rb
138
+ - lib/file_monitoring/file_monitoring.rb
244
139
  - lib/file_monitoring/monitor_path.rb
245
140
  - lib/file_monitoring/version.rb
246
- - lib/file_monitoring/file_monitoring.rb
247
141
  - lib/file_utils.rb
248
142
  - lib/file_utils/file_generator/README
249
143
  - lib/file_utils/file_generator/file_generator.rb
@@ -257,20 +151,21 @@ files:
257
151
  - lib/params.rb
258
152
  - lib/params/version.rb
259
153
  - lib/process_monitoring.rb
260
- - lib/process_monitoring/thread_safe_hash.rb
154
+ - lib/process_monitoring/monitoring.rb
261
155
  - lib/process_monitoring/monitoring_info.rb
262
156
  - lib/process_monitoring/send_email.rb
263
- - lib/process_monitoring/monitoring.rb
157
+ - lib/process_monitoring/thread_safe_hash.rb
264
158
  - lib/process_monitoring/version.rb
265
159
  - lib/run_in_background.rb
266
160
  - lib/run_in_background/version.rb
161
+ - lib/testing_memory/testing_memory.rb
267
162
  - lib/testing_server.rb
268
163
  - lib/testing_server/testing_server.rb
269
164
  - lib/testing_server/version.rb
270
- - lib/testing_memory/testing_memory.rb
271
165
  - lib/validations.rb
272
166
  - lib/validations/index_validations.rb
273
167
  - lib/validations/version.rb
168
+ - spec/content_data/content_data_performance_spec.rb
274
169
  - spec/content_data/content_data_spec.rb
275
170
  - spec/content_data/validations_spec.rb
276
171
  - spec/content_server/content_server_spec.rb
@@ -279,12 +174,6 @@ files:
279
174
  - spec/file_indexing/index_agent_spec.rb
280
175
  - spec/networking/tcp_spec.rb
281
176
  - spec/validations/index_validations_spec.rb
282
- - bin/content_server
283
- - bin/backup_server
284
- - bin/file_utils
285
- - bin/testing_server
286
- - bin/testing_memory
287
- - ext/run_in_background/mkrf_conf.rb
288
177
  homepage: http://github.com/bbfsdev/bbfs
289
178
  licenses: []
290
179
  metadata: {}
@@ -294,23 +183,24 @@ require_paths:
294
183
  - lib
295
184
  required_ruby_version: !ruby/object:Gem::Requirement
296
185
  requirements:
297
- - - ! '>='
186
+ - - ">="
298
187
  - !ruby/object:Gem::Version
299
188
  version: '0'
300
189
  required_rubygems_version: !ruby/object:Gem::Requirement
301
190
  requirements:
302
- - - ! '>='
191
+ - - ">="
303
192
  - !ruby/object:Gem::Version
304
193
  version: '0'
305
194
  requirements: []
306
195
  rubyforge_project:
307
- rubygems_version: 2.1.11
196
+ rubygems_version: 2.2.2
308
197
  signing_key:
309
198
  specification_version: 4
310
199
  summary: Servers for backing up content.
311
200
  test_files:
312
201
  - spec/content_data/content_data_spec.rb
313
202
  - spec/content_data/validations_spec.rb
203
+ - spec/content_data/content_data_performance_spec.rb
314
204
  - spec/content_server/content_server_spec.rb
315
205
  - spec/content_server/file_streamer_spec.rb
316
206
  - spec/file_copy/copy_spec.rb