content_data 0.0.2 → 0.0.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/lib/content_data.rb +2 -2
- data/lib/content_data/content_data.rb +69 -113
- data/lib/content_data/dynamic_content_data.rb +43 -0
- data/lib/content_data/version.rb +5 -0
- data/test/content_data/content_data_test.rb +54 -54
- metadata +39 -5
data/lib/content_data.rb
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
|
-
|
|
1
|
+
require 'content_data/content_data'
|
|
2
|
+
require 'content_data/dynamic_content_data'
|
|
2
3
|
|
|
3
4
|
# Data structure for an abstract layer over files.
|
|
4
5
|
# Each binary sequence is a content, each file is content instance.
|
|
5
6
|
module BBFS
|
|
6
7
|
module ContentData
|
|
7
|
-
VERSION = "0.0.1"
|
|
8
8
|
end
|
|
9
9
|
end
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
require 'log'
|
|
2
|
+
require 'params'
|
|
1
3
|
require 'time'
|
|
2
4
|
|
|
3
5
|
module BBFS
|
|
@@ -31,8 +33,6 @@ module BBFS
|
|
|
31
33
|
end
|
|
32
34
|
end
|
|
33
35
|
|
|
34
|
-
|
|
35
|
-
|
|
36
36
|
def to_s
|
|
37
37
|
"%s,%d,%s" % [@checksum, @size, ContentData.format_time(@first_appearance_time)]
|
|
38
38
|
end
|
|
@@ -42,16 +42,6 @@ module BBFS
|
|
|
42
42
|
self.size.eql? other.size and
|
|
43
43
|
self.first_appearance_time.to_i.eql? other.first_appearance_time.to_i)
|
|
44
44
|
end
|
|
45
|
-
|
|
46
|
-
# Support for protocol buffers
|
|
47
|
-
#def serialize
|
|
48
|
-
# serializer = ContentMessage.new
|
|
49
|
-
# serializer.checksum = checksum
|
|
50
|
-
# serializer.size = size
|
|
51
|
-
# serializer.first_appearance_time = ContentData.format_time(first_appearance_time)
|
|
52
|
-
#
|
|
53
|
-
# serializer
|
|
54
|
-
#end
|
|
55
45
|
end
|
|
56
46
|
|
|
57
47
|
class ContentInstance
|
|
@@ -100,7 +90,11 @@ module BBFS
|
|
|
100
90
|
end
|
|
101
91
|
|
|
102
92
|
def global_path
|
|
103
|
-
|
|
93
|
+
ContentInstance.instance_global_path(@server_name, @full_path)
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
def ContentInstance.instance_global_path(server_name, full_path)
|
|
97
|
+
"%s:%s" % [server_name, full_path]
|
|
104
98
|
end
|
|
105
99
|
|
|
106
100
|
def to_s
|
|
@@ -116,96 +110,40 @@ module BBFS
|
|
|
116
110
|
self.full_path.eql? other.full_path and
|
|
117
111
|
self.modification_time.to_i.eql? other.modification_time.to_i)
|
|
118
112
|
end
|
|
119
|
-
|
|
120
|
-
# Support for protocol buffers
|
|
121
|
-
# #def serialize
|
|
122
|
-
# serializer = ContentInstanceMessage.new
|
|
123
|
-
# serializer.checksum = checksum
|
|
124
|
-
# serializer.size = size
|
|
125
|
-
# serializer.modification_time = ContentData.format_time(modification_time)
|
|
126
|
-
# serializer.server_name = server_name
|
|
127
|
-
# serializer.device = device.to_s
|
|
128
|
-
# serializer.full_path = full_path
|
|
129
|
-
#
|
|
130
|
-
# serializer
|
|
131
|
-
#end
|
|
132
|
-
|
|
133
|
-
#@checksum
|
|
134
|
-
#@size
|
|
135
|
-
#@server_name
|
|
136
|
-
#@device
|
|
137
|
-
#@full_path
|
|
138
|
-
#@modification_time
|
|
139
113
|
end
|
|
140
114
|
|
|
115
|
+
# Unfortunately this class is used as mutable for now. So need to be carefull.
|
|
116
|
+
# TODO(kolman): Make this class imutable, but add indexing structure to it.
|
|
117
|
+
# TODO(kolman): Add wrapper to the class to enable dynamic content data
|
|
118
|
+
# (with easy access indexes)
|
|
141
119
|
class ContentData
|
|
142
120
|
attr_reader :contents, :instances
|
|
143
121
|
|
|
144
122
|
# @param content_data_serializer_str [String]
|
|
145
|
-
def initialize(
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
raise ArgumentError.new("content have to be defined") if content_serializer.nil?
|
|
154
|
-
content = Content.new(nil, nil, nil, content_serializer)
|
|
155
|
-
@contents[key] = content
|
|
156
|
-
end
|
|
157
|
-
content_data_serializer.instances.each do |entry|
|
|
158
|
-
key = entry.key
|
|
159
|
-
value = entry.value
|
|
160
|
-
content_instance_serializer = value.instance
|
|
161
|
-
raise ArgumentError.new("instance have to be defined") if content_instance_serializer.nil?
|
|
162
|
-
content_instance = ContentInstance.new(nil, nil, nil, nil, nil, nil, content_instance_serializer)
|
|
163
|
-
@instances[key] = content_instance
|
|
164
|
-
end
|
|
123
|
+
def initialize(copy = nil)
|
|
124
|
+
if copy.nil?
|
|
125
|
+
@contents = Hash.new # key is a checksum , value is a refernce to the Content object
|
|
126
|
+
@instances = Hash.new # key is an instance global path , value is a reference to the ContentInstance object
|
|
127
|
+
else
|
|
128
|
+
# Regenerate only the hashes, the values are immutable.
|
|
129
|
+
@contents = copy.contents.clone
|
|
130
|
+
@instances = copy.instances.clone
|
|
165
131
|
end
|
|
166
132
|
end
|
|
167
133
|
|
|
168
|
-
# Support for protocol buffers
|
|
169
|
-
#def serialize (serializer = nil)
|
|
170
|
-
# serializer = ContentDataMessage.new if (serializer == nil)
|
|
171
|
-
# contents.each do |key, value|
|
|
172
|
-
# hash_value = ContentDataMessage::HashEntry::HashValue.new
|
|
173
|
-
# content_serializer = value.serialize
|
|
174
|
-
# #content_serializer = ContentMessage.new
|
|
175
|
-
# #content_serializer.parse_from_string(content_serializer_str)
|
|
176
|
-
# hash_value.content = content_serializer
|
|
177
|
-
# hash_entry = ContentDataMessage::HashEntry.new
|
|
178
|
-
# hash_entry.key = key
|
|
179
|
-
# hash_entry.value = hash_value
|
|
180
|
-
# serializer.contents << hash_entry
|
|
181
|
-
# end
|
|
182
|
-
# instances.each do |key, value|
|
|
183
|
-
# hash_value = ContentDataMessage::HashEntry::HashValue.new
|
|
184
|
-
# instance_serializer = value.serialize
|
|
185
|
-
# #instance_serializer = ContentInstanceMessage.new
|
|
186
|
-
# #instance_serializer.parse_from_string(instance_serializer_str)
|
|
187
|
-
# hash_value.instance = instance_serializer
|
|
188
|
-
# hash_entry = ContentDataMessage::HashEntry.new
|
|
189
|
-
# hash_entry.key = key
|
|
190
|
-
# hash_entry.value = hash_value
|
|
191
|
-
# serializer.instances << hash_entry
|
|
192
|
-
# end
|
|
193
|
-
# serializer
|
|
194
|
-
#end
|
|
195
|
-
|
|
196
134
|
def add_content(content)
|
|
197
135
|
@contents[content.checksum] = content
|
|
198
136
|
end
|
|
199
137
|
|
|
200
138
|
def add_instance(instance)
|
|
201
139
|
if (not @contents.key?(instance.checksum))
|
|
202
|
-
|
|
140
|
+
Log.warning sprintf("Adding instance while it's" +
|
|
203
141
|
" checksum %s does not exists.\n", instance.checksum)
|
|
204
|
-
|
|
142
|
+
Log.warning sprintf("%s\n", instance.to_s)
|
|
205
143
|
return false
|
|
206
144
|
elsif (@contents[instance.checksum].size != instance.size)
|
|
207
|
-
|
|
208
|
-
|
|
145
|
+
Log.warning 'File size different from content size while same checksum'
|
|
146
|
+
Log.warning instance.to_s
|
|
209
147
|
return false
|
|
210
148
|
end
|
|
211
149
|
|
|
@@ -234,33 +172,22 @@ module BBFS
|
|
|
234
172
|
end
|
|
235
173
|
|
|
236
174
|
def ==(other)
|
|
237
|
-
|
|
238
|
-
#print "size:%s\n" % @contents.size
|
|
239
|
-
#print "other size:%s\n" % other.contents.size
|
|
240
175
|
return false if other == nil
|
|
241
176
|
return false unless @contents.size == other.contents.size
|
|
242
177
|
return false unless @instances.size == other.instances.size
|
|
243
178
|
|
|
244
179
|
@contents.keys.each { |key|
|
|
245
180
|
if (@contents[key] != other.contents[key])
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
#puts " compare - false"
|
|
249
|
-
puts @contents[key].first_appearance_time.to_i
|
|
250
|
-
puts other.contents[key].first_appearance_time.to_i
|
|
181
|
+
Log.info @contents[key].first_appearance_time.to_i
|
|
182
|
+
Log.info other.contents[key].first_appearance_time.to_i
|
|
251
183
|
return false
|
|
252
184
|
end
|
|
253
185
|
}
|
|
254
|
-
|
|
255
186
|
@instances.keys.each { |key|
|
|
256
187
|
if (@instances[key] != other.instances[key])
|
|
257
|
-
#print "%s-" % @instances[key].to_s
|
|
258
|
-
#print other.instances[key].to_s
|
|
259
|
-
#puts " compare - false"
|
|
260
188
|
return false
|
|
261
189
|
end
|
|
262
190
|
}
|
|
263
|
-
#puts "compare - true"
|
|
264
191
|
return true
|
|
265
192
|
end
|
|
266
193
|
|
|
@@ -278,6 +205,8 @@ module BBFS
|
|
|
278
205
|
end
|
|
279
206
|
|
|
280
207
|
def to_file(filename)
|
|
208
|
+
content_data_dir = File.dirname(filename)
|
|
209
|
+
FileUtils.makedirs(content_data_dir) unless File.exists?(content_data_dir)
|
|
281
210
|
File.open(filename, 'w') {|f| f.write(to_s) }
|
|
282
211
|
end
|
|
283
212
|
|
|
@@ -297,7 +226,7 @@ module BBFS
|
|
|
297
226
|
number_of_instances = lines[i].to_i
|
|
298
227
|
i += 1
|
|
299
228
|
number_of_instances.times {
|
|
300
|
-
parameters = lines[i].split(
|
|
229
|
+
parameters = lines[i].split(',')
|
|
301
230
|
# bugfix: if file name consist a comma then parsing based on comma separating fails
|
|
302
231
|
if (parameters.size > 6)
|
|
303
232
|
(5..parameters.size-2).each do |i|
|
|
@@ -318,20 +247,15 @@ module BBFS
|
|
|
318
247
|
}
|
|
319
248
|
end
|
|
320
249
|
|
|
321
|
-
def self.parse_time
|
|
250
|
+
def self.parse_time time_str
|
|
322
251
|
return nil unless time_str.instance_of? String
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
#require 'scanf.rb'
|
|
326
|
-
#time_arr = time_str.scanf("%d/%d/%d %d:%d:%d.%d")
|
|
327
|
-
#time = Time.utc(time_arr[0], time_arr[1],time_arr[2],time_arr[3],time_arr[4],time_arr[5],time_arr[6])
|
|
252
|
+
seconds_from_epoch = Integer time_str # Not using to_i here because it does not check string is integer.
|
|
253
|
+
time = Time.at seconds_from_epoch
|
|
328
254
|
end
|
|
329
255
|
|
|
330
256
|
def self.format_time(time)
|
|
331
257
|
return nil unless time.instance_of?Time
|
|
332
|
-
|
|
333
|
-
str = time.strftime( '%Y/%m/%d %H:%M:%S.%L' )
|
|
334
|
-
#puts str
|
|
258
|
+
str = time.to_i.to_s
|
|
335
259
|
return str
|
|
336
260
|
end
|
|
337
261
|
|
|
@@ -362,7 +286,7 @@ module BBFS
|
|
|
362
286
|
ret.add_content(content) unless a.content_exists(content.checksum)
|
|
363
287
|
}
|
|
364
288
|
|
|
365
|
-
#
|
|
289
|
+
#Log.info "kaka"
|
|
366
290
|
|
|
367
291
|
b.instances.values.each { |instance|
|
|
368
292
|
#print "%s - %s\n" % [instance.checksum, a.content_exists(instance.checksum).to_s]
|
|
@@ -374,6 +298,36 @@ module BBFS
|
|
|
374
298
|
return ret
|
|
375
299
|
end
|
|
376
300
|
|
|
301
|
+
def self.remove_instances(a, b)
|
|
302
|
+
return nil unless a.instance_of?ContentData
|
|
303
|
+
return nil unless b.instance_of?ContentData
|
|
304
|
+
|
|
305
|
+
ret = ContentData.new
|
|
306
|
+
b.instances.values.each do |instance|
|
|
307
|
+
if !a.instances.key?(instance.global_path)
|
|
308
|
+
ret.add_content(b.contents[instance.checksum])
|
|
309
|
+
ret.add_instance(instance)
|
|
310
|
+
end
|
|
311
|
+
end
|
|
312
|
+
return ret
|
|
313
|
+
end
|
|
314
|
+
|
|
315
|
+
def self.remove_directory(cd, global_dir_path)
|
|
316
|
+
return nil unless cd.instance_of?ContentData
|
|
317
|
+
|
|
318
|
+
ret = ContentData.new
|
|
319
|
+
cd.instances.values.each do |instance|
|
|
320
|
+
Log.debug3("global path to check: #{global_dir_path}")
|
|
321
|
+
Log.debug3("instance global path: #{instance.global_path}")
|
|
322
|
+
if instance.global_path.scan(global_dir_path).size == 0
|
|
323
|
+
Log.debug3("Adding instance.")
|
|
324
|
+
ret.add_content(cd.contents[instance.checksum])
|
|
325
|
+
ret.add_instance(instance)
|
|
326
|
+
end
|
|
327
|
+
end
|
|
328
|
+
return ret
|
|
329
|
+
end
|
|
330
|
+
|
|
377
331
|
# returns the common content in both a and b
|
|
378
332
|
def self.intersect(a, b)
|
|
379
333
|
b_minus_a = ContentData.remove(a, b)
|
|
@@ -391,12 +345,12 @@ module BBFS
|
|
|
391
345
|
checksum = instance.checksum
|
|
392
346
|
time = instance.modification_time
|
|
393
347
|
|
|
394
|
-
unless (checksum2instances.has_key?checksum)
|
|
348
|
+
unless (checksum2instances.has_key? checksum)
|
|
395
349
|
checksum2instances[checksum] = []
|
|
396
350
|
end
|
|
397
351
|
checksum2instances[checksum] << instance
|
|
398
352
|
|
|
399
|
-
if (not checksum2time.has_key?checksum)
|
|
353
|
+
if (not checksum2time.has_key? checksum)
|
|
400
354
|
checksum2time[checksum] = time
|
|
401
355
|
elsif ((checksum2time[checksum] <=> time) > 0)
|
|
402
356
|
checksum2time[checksum] = time
|
|
@@ -406,7 +360,7 @@ module BBFS
|
|
|
406
360
|
# update min time table with time information from contents
|
|
407
361
|
db.contents.each do |checksum, content|
|
|
408
362
|
time = content.first_appearance_time
|
|
409
|
-
if (not checksum2time.has_key?checksum)
|
|
363
|
+
if (not checksum2time.has_key? checksum)
|
|
410
364
|
checksum2time[checksum] = time
|
|
411
365
|
elsif ((checksum2time[checksum] <=> time) > 0)
|
|
412
366
|
checksum2time[checksum] = time
|
|
@@ -430,7 +384,9 @@ module BBFS
|
|
|
430
384
|
if ((instance.modification_time <=> time) == 0)
|
|
431
385
|
mod_db.add_instance(instance)
|
|
432
386
|
else # must be bigger then found min time
|
|
433
|
-
mod_instance = ContentInstance.new(instance.checksum, instance.size,
|
|
387
|
+
mod_instance = ContentInstance.new(instance.checksum, instance.size,
|
|
388
|
+
instance.server_name, instance.device,
|
|
389
|
+
instance.full_path, time)
|
|
434
390
|
mod_db.add_instance(mod_instance)
|
|
435
391
|
end
|
|
436
392
|
end
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
require 'thread'
|
|
2
|
+
|
|
3
|
+
module BBFS
|
|
4
|
+
module ContentData
|
|
5
|
+
|
|
6
|
+
# TODO(kolman): When content data is immutable, remove the clones (waste).
|
|
7
|
+
class DynamicContentData
|
|
8
|
+
def initialize()
|
|
9
|
+
@last_content_data = nil
|
|
10
|
+
@last_content_data_available_mutex = Mutex.new
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def update(content_data)
|
|
14
|
+
ref = ContentData.new(content_data)
|
|
15
|
+
@last_content_data_available_mutex.synchronize {
|
|
16
|
+
@last_content_data = ref
|
|
17
|
+
}
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def exists?(checksum)
|
|
21
|
+
ref = nil
|
|
22
|
+
@last_content_data_available_mutex.synchronize {
|
|
23
|
+
ref = @last_content_data
|
|
24
|
+
}
|
|
25
|
+
#Log.debug3("@last_content_data is nil? #{@last_content_data.nil?}")
|
|
26
|
+
#Log.debug3(@last_content_data.to_s) unless @last_content_data.nil?
|
|
27
|
+
#Log.debug3("Exists?:#{@last_content_data.content_exists(checksum)}") \
|
|
28
|
+
# unless @last_content_data.nil?
|
|
29
|
+
return ref.content_exists(checksum) if ref != nil
|
|
30
|
+
false
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def last_content_data
|
|
34
|
+
ref = nil
|
|
35
|
+
@last_content_data_available_mutex.synchronize {
|
|
36
|
+
ref = @last_content_data
|
|
37
|
+
}
|
|
38
|
+
return ContentData.new(ref)
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
end # module ContentData
|
|
43
|
+
end # module BBFS
|
|
@@ -9,102 +9,102 @@ module BBFS
|
|
|
9
9
|
class TestContentData < Test::Unit::TestCase
|
|
10
10
|
def test_content
|
|
11
11
|
content_data = ContentData.new
|
|
12
|
-
content_data.add_content(Content.new("D12A1C98A3", 765, ContentData.parse_time("
|
|
13
|
-
content_data.add_content(Content.new("B12A1C98A3", 123123, ContentData.parse_time("
|
|
14
|
-
content_data.add_content(Content.new("D1234C98A3", 12444, ContentData.parse_time("
|
|
15
|
-
content_data.add_content(Content.new("DB12A1C233", 2, ContentData.parse_time("
|
|
16
|
-
content_data.add_content(Content.new("DB12A4338A", 12412, ContentData.parse_time("
|
|
17
|
-
content_data.add_content(Content.new("232A1C98A3", 124424, ContentData.parse_time("
|
|
18
|
-
content_data.add_content(Content.new("AC12A1C983", 1242, ContentData.parse_time("
|
|
19
|
-
content_data.add_content(Content.new("AAC12A1C983", 1242,ContentData.parse_time("
|
|
12
|
+
content_data.add_content(Content.new("D12A1C98A3", 765, ContentData.parse_time("1296527039")))
|
|
13
|
+
content_data.add_content(Content.new("B12A1C98A3", 123123, ContentData.parse_time("1296527039")))
|
|
14
|
+
content_data.add_content(Content.new("D1234C98A3", 12444, ContentData.parse_time("1296527039")))
|
|
15
|
+
content_data.add_content(Content.new("DB12A1C233", 2, ContentData.parse_time("1296527039")))
|
|
16
|
+
content_data.add_content(Content.new("DB12A4338A", 12412, ContentData.parse_time("1296527039")))
|
|
17
|
+
content_data.add_content(Content.new("232A1C98A3", 124424, ContentData.parse_time("1296527039")))
|
|
18
|
+
content_data.add_content(Content.new("AC12A1C983", 1242, ContentData.parse_time("1296527039")))
|
|
19
|
+
content_data.add_content(Content.new("AAC12A1C983", 1242,ContentData.parse_time("1296527039")))
|
|
20
20
|
|
|
21
21
|
content_data.add_instance(ContentInstance.new("DB12A1C233", 765, "large_server_1", "dev1",
|
|
22
|
-
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("
|
|
22
|
+
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
23
23
|
content_data.add_instance(ContentInstance.new("DB12A4338A", 765, "large_server_1", "dev2",
|
|
24
|
-
"/home/kuku/dev/lala/k1.txt", ContentData.parse_time("
|
|
24
|
+
"/home/kuku/dev/lala/k1.txt", ContentData.parse_time("1296527039")))
|
|
25
25
|
content_data.add_instance(ContentInstance.new("232A1C98A3", 765, "large_server_1", "dev3",
|
|
26
|
-
"/home/kuku/dev/k.txt", ContentData.parse_time("
|
|
26
|
+
"/home/kuku/dev/k.txt", ContentData.parse_time("1296527039")))
|
|
27
27
|
content_data.add_instance(ContentInstance.new("DB12A4338A", 765, "large_server_2", "dev2",
|
|
28
|
-
"/home/lala/k.txt", ContentData.parse_time("
|
|
28
|
+
"/home/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
29
29
|
content_data.add_instance(ContentInstance.new("D1234C98A3", 765, "large_server_2", "dev1",
|
|
30
|
-
"/home/kuku/lala/k.txt", ContentData.parse_time("
|
|
30
|
+
"/home/kuku/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
31
31
|
content_data.add_instance(ContentInstance.new("D12A1C98A3", 765, "large_server_2", "dev1",
|
|
32
|
-
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("
|
|
32
|
+
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
33
33
|
content_data.add_instance(ContentInstance.new("AC12A1C983", 765, "large_server_2", "dev2",
|
|
34
|
-
"/home/kuku/dev/lala/k1.txt", ContentData.parse_time("
|
|
34
|
+
"/home/kuku/dev/lala/k1.txt", ContentData.parse_time("1296527039")))
|
|
35
35
|
content_data.add_instance(ContentInstance.new("232A1C98A3", 765, "large_server_2", "dev3",
|
|
36
|
-
"/home/kuku/dev/k.txt", ContentData.parse_time("
|
|
36
|
+
"/home/kuku/dev/k.txt", ContentData.parse_time("1296527039")))
|
|
37
37
|
content_data.add_instance(ContentInstance.new("D12A1C98A3", 765, "large_server_2", "dev2",
|
|
38
|
-
"/home/lala/k.txt", ContentData.parse_time("
|
|
38
|
+
"/home/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
39
39
|
content_data.add_instance(ContentInstance.new("D1234C98A3", 12412, "large_server_2", "dev1",
|
|
40
|
-
"/home/kuku/lala/k.txt", ContentData.parse_time("
|
|
40
|
+
"/home/kuku/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
41
41
|
content_data.add_instance(ContentInstance.new("DB12A4338A", 12412, "large_server_2", "dev1",
|
|
42
|
-
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("
|
|
42
|
+
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
43
43
|
content_data.add_instance(ContentInstance.new("AC12A1C983", 12412, "large_server_2", "dev2",
|
|
44
|
-
"/home/kuku/kuku/dev/lala/k1.txt", ContentData.parse_time("
|
|
44
|
+
"/home/kuku/kuku/dev/lala/k1.txt", ContentData.parse_time("1296527039")))
|
|
45
45
|
content_data.add_instance(ContentInstance.new("232A1C98A3", 12412, "large_server_2", "dev3",
|
|
46
|
-
"/home/kuku/kuku/dev/k.txt", ContentData.parse_time("
|
|
46
|
+
"/home/kuku/kuku/dev/k.txt", ContentData.parse_time("1296527039")))
|
|
47
47
|
content_data.add_instance(ContentInstance.new("DB12A4338A", 12412, "large_server_1", "dev2",
|
|
48
|
-
"/home/kuku/lala/k.txt", ContentData.parse_time("
|
|
48
|
+
"/home/kuku/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
49
49
|
content_data.add_instance(ContentInstance.new("D1234C98A3", 12412, "large_server_1", "dev1",
|
|
50
|
-
"/home/kuku/kuku/lala/k.txt", ContentData.parse_time("
|
|
50
|
+
"/home/kuku/kuku/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
51
51
|
|
|
52
52
|
#print content_data.to_s
|
|
53
53
|
|
|
54
|
-
assert_equal("8\nD12A1C98A3,765,
|
|
54
|
+
assert_equal("8\nD12A1C98A3,765,1296527039\nB12A1C98A3,123123,1296527039\nD1234C98A3,12444,1296527039\nDB12A1C233,2,1296527039\nDB12A4338A,12412,1296527039\n232A1C98A3,124424,1296527039\nAC12A1C983,1242,1296527039\nAAC12A1C983,1242,1296527039\n3\nDB12A4338A,12412,large_server_2,dev1,/home/kuku/dev/lala/k.txt,1296527039\nD12A1C98A3,765,large_server_2,dev2,/home/lala/k.txt,1296527039\nDB12A4338A,12412,large_server_1,dev2,/home/kuku/lala/k.txt,1296527039\n",
|
|
55
55
|
content_data.to_s)
|
|
56
|
-
content_data.to_file("content_data_test.data")
|
|
56
|
+
content_data.to_file(File.join File.dirname(__FILE__), "/content_data_test.data")
|
|
57
57
|
new_content_data = ContentData.new()
|
|
58
|
-
new_content_data.from_file("content_data_test.data")
|
|
58
|
+
new_content_data.from_file(File.join File.dirname(__FILE__), "/content_data_test.data")
|
|
59
59
|
assert_equal(new_content_data, content_data)
|
|
60
60
|
|
|
61
61
|
content_data2 = ContentData.new
|
|
62
|
-
content_data2.add_content(Content.new("AD12A1C98A3", 765, ContentData.parse_time("
|
|
63
|
-
content_data2.add_content(Content.new("AB12A1C98A3", 123123, ContentData.parse_time("
|
|
64
|
-
content_data2.add_content(Content.new("AD1234C98A3", 12444, ContentData.parse_time("
|
|
65
|
-
content_data2.add_content(Content.new("ADB12A1C233", 2, ContentData.parse_time("
|
|
66
|
-
content_data2.add_content(Content.new("ADB12A4338A", 12412, ContentData.parse_time("
|
|
67
|
-
content_data2.add_content(Content.new("A232A1C98A3", 124424, ContentData.parse_time("
|
|
68
|
-
content_data2.add_content(Content.new("AAC12A1C983", 1242, ContentData.parse_time("
|
|
62
|
+
content_data2.add_content(Content.new("AD12A1C98A3", 765, ContentData.parse_time("1296527039")))
|
|
63
|
+
content_data2.add_content(Content.new("AB12A1C98A3", 123123, ContentData.parse_time("1296527039")))
|
|
64
|
+
content_data2.add_content(Content.new("AD1234C98A3", 12444, ContentData.parse_time("1296527039")))
|
|
65
|
+
content_data2.add_content(Content.new("ADB12A1C233", 2, ContentData.parse_time("1296527039")))
|
|
66
|
+
content_data2.add_content(Content.new("ADB12A4338A", 12412, ContentData.parse_time("1296527039")))
|
|
67
|
+
content_data2.add_content(Content.new("A232A1C98A3", 124424, ContentData.parse_time("1296527039")))
|
|
68
|
+
content_data2.add_content(Content.new("AAC12A1C983", 1242, ContentData.parse_time("1296527039")))
|
|
69
69
|
|
|
70
70
|
content_data2.add_instance(ContentInstance.new("ADB12A1C233", 765, "large_server_11", "dev1",
|
|
71
|
-
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("
|
|
71
|
+
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
72
72
|
content_data2.add_instance(ContentInstance.new("ADB12A4338A", 765, "large_server_11", "dev2",
|
|
73
|
-
"/home/kuku/dev/lala/k1.txt", ContentData.parse_time("
|
|
73
|
+
"/home/kuku/dev/lala/k1.txt", ContentData.parse_time("1296527039")))
|
|
74
74
|
content_data2.add_instance(ContentInstance.new("A232A1C98A3", 765, "large_server_11", "dev3",
|
|
75
|
-
"/home/kuku/dev/k.txt", ContentData.parse_time("
|
|
75
|
+
"/home/kuku/dev/k.txt", ContentData.parse_time("1296527039")))
|
|
76
76
|
content_data2.add_instance(ContentInstance.new("ADB12A4338A", 765, "large_server_12", "dev2",
|
|
77
|
-
"/home/lala/k.txt", ContentData.parse_time("
|
|
77
|
+
"/home/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
78
78
|
content_data2.add_instance(ContentInstance.new("AD1234C98A3", 765, "large_server_12", "dev1",
|
|
79
|
-
"/home/kuku/lala/k.txt", ContentData.parse_time("
|
|
79
|
+
"/home/kuku/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
80
80
|
content_data2.add_instance(ContentInstance.new("AD12A1C98A3", 765, "large_server_12", "dev1",
|
|
81
|
-
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("
|
|
81
|
+
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
82
82
|
content_data2.add_instance(ContentInstance.new("AAC12A1C983", 765, "large_server_12", "dev2",
|
|
83
|
-
"/home/kuku/dev/lala/k1.txt", ContentData.parse_time("
|
|
83
|
+
"/home/kuku/dev/lala/k1.txt", ContentData.parse_time("1296527039")))
|
|
84
84
|
content_data2.add_instance(ContentInstance.new("A232A1C98A3", 765, "large_server_12", "dev3",
|
|
85
|
-
"/home/kuku/dev/k.txt", ContentData.parse_time("
|
|
85
|
+
"/home/kuku/dev/k.txt", ContentData.parse_time("1296527039")))
|
|
86
86
|
content_data2.add_instance(ContentInstance.new("AD12A1C98A3", 765, "large_server_12", "dev2",
|
|
87
|
-
"/home/lala/k.txt", ContentData.parse_time("
|
|
87
|
+
"/home/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
88
88
|
content_data2.add_instance(ContentInstance.new("AD1234C98A3", 12412, "large_server_12", "dev1",
|
|
89
|
-
"/home/kuku/lala/k.txt", ContentData.parse_time("
|
|
89
|
+
"/home/kuku/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
90
90
|
content_data2.add_instance(ContentInstance.new("ADB12A4338A", 12412, "large_server_12", "dev1",
|
|
91
|
-
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("
|
|
91
|
+
"/home/kuku/dev/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
92
92
|
content_data2.add_instance(ContentInstance.new("AAC12A1C983", 12412, "large_server_12", "dev2",
|
|
93
|
-
"/home/kuku/kuku/dev/lala/k1.txt", ContentData.parse_time("
|
|
93
|
+
"/home/kuku/kuku/dev/lala/k1.txt", ContentData.parse_time("1296527039")))
|
|
94
94
|
content_data2.add_instance(ContentInstance.new("A232A1C98A3", 12412, "large_server_12", "dev3",
|
|
95
|
-
"/home/kuku/kuku/dev/k.txt", ContentData.parse_time("
|
|
95
|
+
"/home/kuku/kuku/dev/k.txt", ContentData.parse_time("1296527039")))
|
|
96
96
|
content_data2.add_instance(ContentInstance.new("ADB12A4338A", 12412, "large_server_11", "dev2",
|
|
97
|
-
"/home/kuku/lala/k.txt", ContentData.parse_time("
|
|
97
|
+
"/home/kuku/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
98
98
|
content_data2.add_instance(ContentInstance.new("AD1234C98A3", 12412, "large_server_11", "dev1",
|
|
99
|
-
"/home/kuku/kuku/lala/k.txt", ContentData.parse_time("
|
|
99
|
+
"/home/kuku/kuku/lala/k.txt", ContentData.parse_time("1296527039")))
|
|
100
100
|
|
|
101
101
|
old_content_data = ContentData.new
|
|
102
102
|
old_content_data.merge(content_data)
|
|
103
103
|
assert_equal(true, old_content_data == content_data)
|
|
104
104
|
content_data.merge(content_data2)
|
|
105
|
-
content_data.to_file("content_data_test2.data")
|
|
105
|
+
content_data.to_file(File.join File.dirname(__FILE__), "/content_data_test2.data")
|
|
106
106
|
new_content_data2 = ContentData.new()
|
|
107
|
-
new_content_data2.from_file("content_data_test2.data")
|
|
107
|
+
new_content_data2.from_file(File.join File.dirname(__FILE__), "/content_data_test2.data")
|
|
108
108
|
assert_equal(true, new_content_data2 == content_data)
|
|
109
109
|
assert_equal(false, new_content_data2 == old_content_data)
|
|
110
110
|
|
|
@@ -112,8 +112,8 @@ module BBFS
|
|
|
112
112
|
assert_equal(false, old_content_data == cd3)
|
|
113
113
|
cd4 = ContentData.remove(cd3, content_data)
|
|
114
114
|
#assert_equal(content_data.to_s, "")
|
|
115
|
-
assert_equal(cd3.to_s, "7\nD12A1C98A3,765,
|
|
116
|
-
assert_equal(cd4.to_s, "7\nAAC12A1C983,1242,
|
|
115
|
+
assert_equal(cd3.to_s, "7\nD12A1C98A3,765,1296527039\nB12A1C98A3,123123,1296527039\nD1234C98A3,12444,1296527039\nDB12A1C233,2,1296527039\nDB12A4338A,12412,1296527039\n232A1C98A3,124424,1296527039\nAC12A1C983,1242,1296527039\n3\nDB12A4338A,12412,large_server_2,dev1,/home/kuku/dev/lala/k.txt,1296527039\nD12A1C98A3,765,large_server_2,dev2,/home/lala/k.txt,1296527039\nDB12A4338A,12412,large_server_1,dev2,/home/kuku/lala/k.txt,1296527039\n")
|
|
116
|
+
assert_equal(cd4.to_s, "7\nAAC12A1C983,1242,1296527039\nAD12A1C98A3,765,1296527039\nAB12A1C98A3,123123,1296527039\nAD1234C98A3,12444,1296527039\nADB12A1C233,2,1296527039\nADB12A4338A,12412,1296527039\nA232A1C98A3,124424,1296527039\n3\nADB12A4338A,12412,large_server_12,dev1,/home/kuku/dev/lala/k.txt,1296527039\nAD12A1C98A3,765,large_server_12,dev2,/home/lala/k.txt,1296527039\nADB12A4338A,12412,large_server_11,dev2,/home/kuku/lala/k.txt,1296527039\n")
|
|
117
117
|
cd5 = ContentData.merge(cd3, cd4)
|
|
118
118
|
assert_equal(cd5, content_data)
|
|
119
119
|
|
|
@@ -123,14 +123,14 @@ module BBFS
|
|
|
123
123
|
assert_equal(cd4, intersect)
|
|
124
124
|
|
|
125
125
|
# Content serialization test
|
|
126
|
-
#content = Content.new("D12A1C98A3", 765, ContentData.parse_time("
|
|
126
|
+
#content = Content.new("D12A1C98A3", 765, ContentData.parse_time("1296527039"))
|
|
127
127
|
#content_serializer = content.serialize()
|
|
128
128
|
#content_copy = Content.new(nil, nil, nil, content_serializer)
|
|
129
129
|
#assert_equal(content, content_copy)
|
|
130
130
|
|
|
131
131
|
# ContentInstance serialization test
|
|
132
132
|
#instance = ContentInstance.new("DB12A1C233", 765, "large_server_1", "dev1",
|
|
133
|
-
# "/home/kuku/dev/lala/k.txt", ContentData.parse_time("
|
|
133
|
+
# "/home/kuku/dev/lala/k.txt", ContentData.parse_time("1296527039"))
|
|
134
134
|
#instance_serializer = instance.serialize()
|
|
135
135
|
#instance_copy = ContentInstance.new(nil, nil, nil, nil, nil, nil, instance_serializer)
|
|
136
136
|
#assert_equal(instance, instance_copy)
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: content_data
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.0.
|
|
4
|
+
version: 0.0.8
|
|
5
5
|
prerelease:
|
|
6
6
|
platform: ruby
|
|
7
7
|
authors:
|
|
@@ -9,10 +9,42 @@ authors:
|
|
|
9
9
|
autorequire:
|
|
10
10
|
bindir: bin
|
|
11
11
|
cert_chain: []
|
|
12
|
-
date: 2012-
|
|
13
|
-
dependencies:
|
|
12
|
+
date: 2012-09-02 00:00:00.000000000 Z
|
|
13
|
+
dependencies:
|
|
14
|
+
- !ruby/object:Gem::Dependency
|
|
15
|
+
name: log
|
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
|
17
|
+
none: false
|
|
18
|
+
requirements:
|
|
19
|
+
- - ! '>='
|
|
20
|
+
- !ruby/object:Gem::Version
|
|
21
|
+
version: '0'
|
|
22
|
+
type: :runtime
|
|
23
|
+
prerelease: false
|
|
24
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
25
|
+
none: false
|
|
26
|
+
requirements:
|
|
27
|
+
- - ! '>='
|
|
28
|
+
- !ruby/object:Gem::Version
|
|
29
|
+
version: '0'
|
|
30
|
+
- !ruby/object:Gem::Dependency
|
|
31
|
+
name: params
|
|
32
|
+
requirement: !ruby/object:Gem::Requirement
|
|
33
|
+
none: false
|
|
34
|
+
requirements:
|
|
35
|
+
- - ! '>='
|
|
36
|
+
- !ruby/object:Gem::Version
|
|
37
|
+
version: '0'
|
|
38
|
+
type: :runtime
|
|
39
|
+
prerelease: false
|
|
40
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
41
|
+
none: false
|
|
42
|
+
requirements:
|
|
43
|
+
- - ! '>='
|
|
44
|
+
- !ruby/object:Gem::Version
|
|
45
|
+
version: '0'
|
|
14
46
|
description: ! 'Data structure for an abstract layer over files. Each binary sequence
|
|
15
|
-
is a
|
|
47
|
+
is a '
|
|
16
48
|
email: kolmanv@gmail.com
|
|
17
49
|
executables: []
|
|
18
50
|
extensions: []
|
|
@@ -20,6 +52,8 @@ extra_rdoc_files: []
|
|
|
20
52
|
files:
|
|
21
53
|
- lib/content_data.rb
|
|
22
54
|
- lib/content_data/content_data.rb
|
|
55
|
+
- lib/content_data/dynamic_content_data.rb
|
|
56
|
+
- lib/content_data/version.rb
|
|
23
57
|
- test/content_data/content_data_test.rb
|
|
24
58
|
homepage: http://github.com/kolmanv/bbfs
|
|
25
59
|
licenses: []
|
|
@@ -41,7 +75,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
41
75
|
version: '0'
|
|
42
76
|
requirements: []
|
|
43
77
|
rubyforge_project:
|
|
44
|
-
rubygems_version: 1.8.
|
|
78
|
+
rubygems_version: 1.8.23
|
|
45
79
|
signing_key:
|
|
46
80
|
specification_version: 3
|
|
47
81
|
summary: Data structure for an abstract layer over files.
|