ruby-msg 1.2.17.3 → 1.3.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,36 +0,0 @@
1
- require 'iconv'
2
- require 'date'
3
-
4
- require 'ole/base'
5
-
6
- module Ole # :nodoc:
7
- # FIXME
8
- module Types
9
- FROM_UTF16 = Iconv.new 'utf-8', 'utf-16le'
10
- TO_UTF16 = Iconv.new 'utf-16le', 'utf-8'
11
- EPOCH = DateTime.parse '1601-01-01'
12
- # Create a +DateTime+ object from a struct +FILETIME+
13
- # (http://msdn2.microsoft.com/en-us/library/ms724284.aspx).
14
- #
15
- # Converts +str+ to two 32 bit time values, comprising the high and low 32 bits of
16
- # the 100's of nanoseconds since 1st january 1601 (Epoch).
17
- def self.load_time str
18
- low, high = str.unpack 'L2'
19
- # we ignore these, without even warning about it
20
- return nil if low == 0 and high == 0
21
- time = EPOCH + (high * (1 << 32) + low) * 1e-7 / 86400 rescue return
22
- # extra sanity check...
23
- unless (1800...2100) === time.year
24
- Log.warn "ignoring unlikely time value #{time.to_s}"
25
- return nil
26
- end
27
- time
28
- end
29
-
30
- # Convert a binary guid into a plain string (will move to proper class later).
31
- def self.load_guid str
32
- "{%08x-%04x-%04x-%02x%02x-#{'%02x' * 6}}" % str.unpack('L S S CC C6')
33
- end
34
- end
35
- end
36
-
@@ -1,51 +0,0 @@
1
-
2
- #
3
- # A file with general support functions used by most files in the project.
4
- #
5
-
6
- require 'logger'
7
-
8
- class File # :nodoc:
9
- # for consistency with StringIO and others. makes more sense than forcing
10
- # them to provide a #stat
11
- def size
12
- stat.size
13
- end
14
- end
15
-
16
- class Symbol # :nodoc:
17
- def to_proc
18
- proc { |a| a.send self }
19
- end
20
- end
21
-
22
- module Enumerable # :nodoc:
23
- # 1.9 backport
24
- def group_by
25
- hash = Hash.new { |hash, key| hash[key] = [] }
26
- each { |item| hash[yield(item)] << item }
27
- hash
28
- end
29
-
30
- def sum initial=0
31
- inject(initial) { |a, b| a + b }
32
- end
33
- end
34
-
35
- class Logger # :nodoc:
36
- # A helper method for creating <tt>Logger</tt>s which produce call stack
37
- # in their output
38
- def self.new_with_callstack logdev=STDERR
39
- log = Logger.new logdev
40
- log.level = WARN
41
- log.formatter = proc do |severity, time, progname, msg|
42
- # find where we were called from, in our code
43
- callstack = caller.dup
44
- callstack.shift while callstack.first =~ /\/logger\.rb:\d+:in/
45
- from = callstack.first.sub(/:in `(.*?)'/, ":\\1")
46
- "[%s %s]\n%-7s%s\n" % [time.strftime('%H:%M:%S'), from, severity, msg.to_s]
47
- end
48
- log
49
- end
50
- end
51
-
@@ -1,139 +0,0 @@
1
- #! /usr/bin/ruby -w
2
-
3
- TEST_DIR = File.dirname __FILE__
4
- $: << "#{TEST_DIR}/../lib"
5
-
6
- require 'test/unit'
7
- require 'ole/storage'
8
- require 'digest/sha1'
9
- require 'stringio'
10
-
11
- #
12
- # = TODO
13
- #
14
- # These tests could be a lot more complete.
15
- #
16
-
17
- class TestRangesIO < Test::Unit::TestCase
18
- def setup
19
- # why not :) ?
20
- # repeats too
21
- ranges = [100..200, 0..10, 100..150]
22
- @io = RangesIO.new open("#{TEST_DIR}/test_storage.rb"), ranges, :close_parent => true
23
- end
24
-
25
- def teardown
26
- @io.close
27
- end
28
-
29
- def test_basic
30
- assert_equal 160, @io.size
31
- # this will map to the start of the file:
32
- @io.pos = 100
33
- assert_equal '#! /usr/bi', @io.read(10)
34
- end
35
-
36
- # should test range_and_offset specifically
37
-
38
- def test_reading
39
- # test selection of initial range, offset within that range
40
- pos = 100
41
- @io.seek pos
42
- # test advancing of pos properly, by...
43
- chunked = (0...10).map { @io.read 10 }.join
44
- # given the file is 160 long:
45
- assert_equal 60, chunked.length
46
- @io.seek pos
47
- # comparing with a flat read
48
- assert_equal chunked, @io.read(60)
49
- end
50
- end
51
-
52
- # should test resizeable and migrateable IO.
53
-
54
- class TestStorageRead < Test::Unit::TestCase
55
- def setup
56
- @ole = Ole::Storage.open "#{TEST_DIR}/test_word_6.doc", 'rb'
57
- end
58
-
59
- def teardown
60
- @ole.close
61
- end
62
-
63
- def test_header
64
- # should have further header tests, testing the validation etc.
65
- assert_equal 17, @ole.header.to_a.length
66
- assert_equal 117, @ole.header.dirent_start
67
- assert_equal 1, @ole.header.num_bat
68
- assert_equal 1, @ole.header.num_sbat
69
- assert_equal 0, @ole.header.num_mbat
70
- end
71
-
72
- def test_fat
73
- # the fat block has all the numbers from 5..118 bar 117
74
- bbat_table = [112] + ((5..118).to_a - [112, 117])
75
- assert_equal bbat_table, @ole.bbat.table.reject { |i| i >= (1 << 32) - 3 }, 'bbat'
76
- sbat_table = (1..43).to_a - [2, 3]
77
- assert_equal sbat_table, @ole.sbat.table.reject { |i| i >= (1 << 32) - 3 }, 'sbat'
78
- end
79
-
80
- def test_directories
81
- assert_equal 5, @ole.dirents.length, 'have all directories'
82
- # a more complicated one would be good for this
83
- assert_equal 4, @ole.root.children.length, 'properly nested directories'
84
- end
85
-
86
- def test_utf16_conversion
87
- assert_equal 'Root Entry', @ole.root.name
88
- assert_equal 'WordDocument', @ole.root.children[2].name
89
- end
90
-
91
- def test_data
92
- # test the ole storage type
93
- type = 'Microsoft Word 6.0-Dokument'
94
- assert_equal type, @ole.root["\001CompObj"].read[/^.{32}([^\x00]+)/m, 1]
95
- # i was actually not loading data correctly before, so carefully check everything here
96
- hashes = [-482597081, 285782478, 134862598, -863988921]
97
- assert_equal hashes, @ole.root.children.map { |child| child.read.hash }
98
- end
99
- end
100
-
101
- class TestStorageWrite < Test::Unit::TestCase
102
- def sha1 str
103
- Digest::SHA1.hexdigest str
104
- end
105
-
106
- # FIXME
107
- # don't really want to lock down the actual internal api's yet. this will just
108
- # ensure for the time being that #flush continues to work properly. need a host
109
- # of checks involving writes that resize their file bigger/smaller, that resize
110
- # the bats to more blocks, that resizes the sb_blocks, that has migration etc.
111
- def test_write_hash
112
- io = StringIO.open File.read("#{TEST_DIR}/test_word_6.doc")
113
- assert_equal '9974e354def8471225f548f82b8d81c701221af7', sha1(io.string)
114
- Ole::Storage.open(io) { }
115
- assert_equal 'efa8cfaf833b30b1d1d9381771ddaafdfc95305c', sha1(io.string)
116
- # add a repack test here
117
- Ole::Storage.open io, &:repack
118
- assert_equal 'c8bb9ccacf0aaad33677e1b2a661ee6e66a48b5a', sha1(io.string)
119
- end
120
-
121
- def test_plain_repack
122
- io = StringIO.open File.read("#{TEST_DIR}/test_word_6.doc")
123
- assert_equal '9974e354def8471225f548f82b8d81c701221af7', sha1(io.string)
124
- Ole::Storage.open io, &:repack
125
- # note equivalence to the above flush, repack, flush
126
- assert_equal 'c8bb9ccacf0aaad33677e1b2a661ee6e66a48b5a', sha1(io.string)
127
- end
128
-
129
- def test_create_from_scratch_hash
130
- io = StringIO.new
131
- Ole::Storage.new(io) { }
132
- assert_equal '6bb9d6c1cdf1656375e30991948d70c5fff63d57', sha1(io.string)
133
- # more repack test, note invariance
134
- Ole::Storage.open io, &:repack
135
- assert_equal '6bb9d6c1cdf1656375e30991948d70c5fff63d57', sha1(io.string)
136
- end
137
- end
138
-
139
-
Binary file
Binary file
Binary file