concord_cacher 0.0.5 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Rakefile +1 -1
- data/concord_cacher.gemspec +2 -2
- data/lib/concord/cacher.rb +40 -215
- data/lib/concord/diy_local_cacher.rb +9 -29
- data/lib/concord/java_proxy_cacher.rb +31 -7
- data/lib/concord_cacher.rb +1 -1
- data/spec/data/element_reference.otml +1 -1
- data/spec/diy_local_cacher_spec.rb +60 -19
- data/spec/helpers/cache_helper.rb +10 -2
- data/spec/java_proxy_cacher_spec.rb +22 -11
- data.tar.gz.sig +0 -0
- metadata +3 -3
- metadata.gz.sig +0 -0
data/Rakefile
CHANGED
@@ -6,7 +6,7 @@ require 'spec/rake/spectask'
|
|
6
6
|
require './lib/concord_cacher.rb'
|
7
7
|
|
8
8
|
require 'echoe'
|
9
|
-
Echoe.new('concord_cacher', '0.0
|
9
|
+
Echoe.new('concord_cacher', '0.1.0') do |p|
|
10
10
|
p.description = "concord_cacher provides support for locally caching a resource and all referenced resources in multiple different ways. It is intended for using with other Concord Consortium projects and not necessarily for outside projects."
|
11
11
|
p.summary = "Support for locally caching a resource and all referenced resources in multiple different ways"
|
12
12
|
p.url = "http://github.com/psndcsrv/concord_cacher"
|
data/concord_cacher.gemspec
CHANGED
@@ -2,12 +2,12 @@
|
|
2
2
|
|
3
3
|
Gem::Specification.new do |s|
|
4
4
|
s.name = %q{concord_cacher}
|
5
|
-
s.version = "0.0
|
5
|
+
s.version = "0.1.0"
|
6
6
|
|
7
7
|
s.required_rubygems_version = Gem::Requirement.new(">= 1.2") if s.respond_to? :required_rubygems_version=
|
8
8
|
s.authors = ["Aaron Unger"]
|
9
9
|
s.cert_chain = ["/Users/aunger/gem-public_cert.pem"]
|
10
|
-
s.date = %q{2010-
|
10
|
+
s.date = %q{2010-04-05}
|
11
11
|
s.description = %q{concord_cacher provides support for locally caching a resource and all referenced resources in multiple different ways. It is intended for using with other Concord Consortium projects and not necessarily for outside projects.}
|
12
12
|
s.email = %q{aunger @nospam@ concord.org}
|
13
13
|
s.extra_rdoc_files = ["README.textile", "lib/concord_cacher.rb", "lib/concord/cacher.rb", "lib/concord/diy_local_cacher.rb", "lib/concord/java_proxy_cacher.rb"]
|
data/lib/concord/cacher.rb
CHANGED
@@ -3,240 +3,65 @@ class ::Concord::Cacher
|
|
3
3
|
require 'open-uri'
|
4
4
|
require 'cgi'
|
5
5
|
require 'rexml/document'
|
6
|
-
|
7
|
-
DEBUG = false
|
8
6
|
|
9
|
-
|
10
|
-
URL_REGEX = /(http[s]?:\/\/[^'"]+)/i
|
11
|
-
# the imageBytes can be referenced by a OTImage object
|
12
|
-
SRC_REGEX = /(?:src|href|imageBytes|authoredDataURL)[ ]?=[ ]?['"]([^'"]+)/i
|
13
|
-
NLOGO_REGEX = /import-drawing "([^"]+)"/i
|
14
|
-
MW_REGEX = /<resource>(.*?mml)<\/resource>/
|
15
|
-
ALWAYS_SKIP_REGEX = /^(mailto|jres)/i
|
16
|
-
RECURSE_ONCE_REGEX = /html$/i # (resourceFile =~ /otml$/ || resourceFile =~ /html/)
|
17
|
-
RECURSE_FOREVER_REGEX = /(otml|cml|mml|nlogo)$/i
|
18
|
-
|
19
|
-
attr_reader :otml_url, :cache_dir, :uuid, :errors
|
7
|
+
attr_reader :main_resource, :errors
|
20
8
|
|
21
9
|
def initialize(opts = {})
|
22
|
-
defaults = {:rewrite_urls => false, :verbose => false, :cache_headers => true, :create_map => true}
|
23
|
-
opts = defaults.merge(opts)
|
24
10
|
raise ArgumentError, "Must include :url, and :cache_dir in the options hash." unless opts[:url] && opts[:cache_dir]
|
25
|
-
|
26
|
-
@
|
27
|
-
@
|
28
|
-
@
|
29
|
-
@
|
30
|
-
|
31
|
-
@
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
@
|
11
|
+
|
12
|
+
@main_resource = Concord::Resource.new
|
13
|
+
@main_resource.url = opts.delete(:url)
|
14
|
+
@main_resource.cache_dir = opts.delete(:cache_dir)
|
15
|
+
@main_resource.extras = opts
|
16
|
+
@main_resource.uri = URI.parse(@main_resource.url)
|
17
|
+
@main_resource.load
|
18
|
+
|
19
|
+
calculate_main_file_absolute_url
|
20
|
+
end
|
21
|
+
|
22
|
+
def calculate_main_file_absolute_url
|
23
|
+
orig_uri = @main_resource.uri
|
24
|
+
codebase = ''
|
25
|
+
if ((orig_uri.kind_of?(URI::HTTP) || orig_uri.kind_of?(URI::HTTPS)) && orig_uri.absolute?)
|
26
|
+
@main_resource.uri = orig_uri
|
41
27
|
else
|
42
28
|
# this probably references something on the local fs. we need to extract the document's codebase, if there is ony
|
43
|
-
if @content =~ /<otrunk[^>]+codebase[ ]?=[ ]?['"]([^'"]+)/
|
44
|
-
|
45
|
-
@
|
46
|
-
|
29
|
+
if @main_resource.content =~ /<otrunk[^>]+codebase[ ]?=[ ]?['"]([^'"]+)/
|
30
|
+
codebase = "#{$1}"
|
31
|
+
@main_resource.content.sub!(/codebase[ ]?=[ ]?['"][^'"]+['"]/,"")
|
32
|
+
codebase.sub!(/\/$/,'')
|
33
|
+
codebase = "#{codebase}/#{@main_resource.remote_filename}" unless codebase =~ /otml$/
|
34
|
+
@main_resource.uri = URI.parse(codebase)
|
47
35
|
else
|
48
|
-
@
|
36
|
+
@main_resource.uri = orig_uri
|
49
37
|
end
|
50
38
|
end
|
51
39
|
|
52
|
-
@
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
40
|
+
if @main_resource.uri.relative?
|
41
|
+
# we need the main URI to be absolute so that we can use it to resolve references
|
42
|
+
file_root = URI.parse("file:///")
|
43
|
+
@main_resource.uri = file_root.merge(@main_resource.uri)
|
44
|
+
end
|
45
|
+
end
|
58
46
|
|
59
47
|
def cache
|
60
48
|
copy_otml_to_local_cache
|
61
|
-
|
62
|
-
write_url_to_hash_map if @create_map
|
63
|
-
end
|
64
|
-
|
65
|
-
def generate_main_filename
|
66
|
-
raise NotImplementedError, "You should be using this class through one of its sub-classes!"
|
49
|
+
print_errors if ::Concord::Resource.verbose
|
67
50
|
end
|
68
|
-
|
69
|
-
def generate_filename(opts = {})
|
70
|
-
raise NotImplementedError, "You should be using this class through one of its sub-classes!"
|
71
|
-
end
|
72
|
-
|
73
|
-
def generate_uuid
|
74
|
-
raise NotImplementedError, "You should be using this class through one of its sub-classes!"
|
75
|
-
end
|
76
51
|
|
77
52
|
def copy_otml_to_local_cache
|
78
53
|
# save the file in the local server directories
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
uri = URI.parse(@otml_url)
|
83
|
-
if uri.relative?
|
84
|
-
# we need the main URI to be absolute so that we can use it to resolve references
|
85
|
-
file_root = URI.parse("file:///")
|
86
|
-
uri = file_root.merge(uri)
|
87
|
-
end
|
88
|
-
@content = parse_file("#{@cache_dir}#{@filename}", @content, @cache_dir, uri, true)
|
89
|
-
|
90
|
-
write_resource(@cache_dir + filename, @content)
|
91
|
-
write_property_map(@cache_dir + filename + ".hdrs", @content_headers) if @cache_headers
|
92
|
-
@url_to_hash_map[@otml_url + @filename + ".otml"] = filename
|
93
|
-
|
94
|
-
puts "\nThere were #{@errors.length} artifacts with errors.\n" if @verbose
|
95
|
-
@errors.each do |k,v|
|
96
|
-
puts "In #{k}:" if @verbose
|
97
|
-
v.uniq.each do |e|
|
98
|
-
puts " #{e}" if @verbose
|
99
|
-
end
|
100
|
-
end
|
101
|
-
end
|
102
|
-
|
103
|
-
def parse_file(orig_filename, content, cache_dir, parent_url, recurse)
|
104
|
-
short_filename = /\/([^\/]+)$/.match(orig_filename)[1]
|
105
|
-
print "\n#{short_filename}: " if @verbose
|
106
|
-
processed_lines = []
|
107
|
-
lines = content.split("\n")
|
108
|
-
lines.each do |line|
|
109
|
-
line = CGI.unescapeHTML(line)
|
110
|
-
match_indexes = []
|
111
|
-
while (
|
112
|
-
( match = (
|
113
|
-
URL_REGEX.match(line) ||
|
114
|
-
SRC_REGEX.match(line) ||
|
115
|
-
(/.*\.nlogo/.match(short_filename) ? NLOGO_REGEX.match(line) : nil) ||
|
116
|
-
(/.*\.(:?cml|mml)/.match(short_filename) ? MW_REGEX.match(line) : nil)
|
117
|
-
)
|
118
|
-
) && (! match_indexes.include?(match.begin(1)))
|
119
|
-
)
|
120
|
-
print "\nMatched url: #{match[1]}: " if DEBUG
|
121
|
-
match_indexes << match.begin(1)
|
122
|
-
# get the resource from that location, save it locally
|
123
|
-
# match_url = match[1].gsub(/\s+/,"").gsub(/[\?\#&;=\+,<>"\{\}\|\\\^\[\]].*$/,"")
|
124
|
-
match_url = match[1]
|
125
|
-
# puts("pre: #{match[1]}, post: #{match_url}") if DEBUG
|
126
|
-
begin
|
127
|
-
resource_url = URI.parse(CGI.unescapeHTML(match_url))
|
128
|
-
rescue
|
129
|
-
@errors[parent_url] ||= []
|
130
|
-
@errors[parent_url] << "Bad URL: '#{CGI.unescapeHTML(match_url)}', skipping."
|
131
|
-
print 'x' if @verbose
|
132
|
-
next
|
133
|
-
end
|
134
|
-
if (resource_url.relative?)
|
135
|
-
# relative URL's need to have their parent document's codebase appended before trying to download
|
136
|
-
resource_url = parent_url.merge(resource_url.to_s)
|
137
|
-
end
|
138
|
-
resourceFile = match_url
|
139
|
-
resourceFile = resourceFile.gsub(/http[s]?:\/\//,"")
|
140
|
-
resourceFile = resourceFile.gsub(/\/$/,"")
|
141
|
-
|
142
|
-
if (resourceFile.length < 1) || ALWAYS_SKIP_REGEX.match(resourceFile)
|
143
|
-
print "S" if @verbose
|
144
|
-
next
|
145
|
-
end
|
146
|
-
|
147
|
-
begin
|
148
|
-
resource_content = ""
|
149
|
-
resource_headers = {}
|
150
|
-
open(resource_url.scheme == 'file' ? resource_url.path : resource_url.to_s) do |r|
|
151
|
-
resource_headers = r.respond_to?("meta") ? r.meta : {}
|
152
|
-
resource_headers['_http_version'] = "HTTP/1.1 #{r.respond_to?("status") ? r.status.join(" ") : "200 OK"}"
|
153
|
-
resource_content = r.read
|
154
|
-
end
|
155
|
-
rescue OpenURI::HTTPError, Timeout::Error, Errno::ENOENT => e
|
156
|
-
@errors[parent_url] ||= []
|
157
|
-
@errors[parent_url] << "Problem getting file: #{resource_url.to_s}, Error: #{e}"
|
158
|
-
print 'X' if @verbose
|
159
|
-
next
|
160
|
-
end
|
161
|
-
|
162
|
-
localFile = generate_filename(:content => resource_content, :url => resource_url)
|
163
|
-
@url_to_hash_map[resource_url.to_s] = localFile
|
164
|
-
line.sub!(match_url.to_s,localFile.to_s) if @rewrite_urls
|
165
|
-
|
166
|
-
|
167
|
-
# skip downloading already existing files.
|
168
|
-
# because we're working with sha1 hashes we can be reasonably certain the content is a complete match
|
169
|
-
if File.exists?(cache_dir + localFile)
|
170
|
-
print 's' if @verbose
|
171
|
-
else
|
172
|
-
# if it's an otml/html file, we should parse it too (only one level down)
|
173
|
-
if (recurse && (RECURSE_ONCE_REGEX.match(resourceFile) || RECURSE_FOREVER_REGEX.match(resourceFile)))
|
174
|
-
puts "recursively parsing '#{resource_url.to_s}'" if DEBUG
|
175
|
-
recurse_further = false
|
176
|
-
if RECURSE_FOREVER_REGEX.match(resourceFile)
|
177
|
-
recurse_further = true
|
178
|
-
end
|
179
|
-
begin
|
180
|
-
write_resource(cache_dir + localFile, "") # touch the file so that we avoid recursion
|
181
|
-
resource_content = parse_file(cache_dir + resourceFile, resource_content, cache_dir, resource_url, recurse_further)
|
182
|
-
rescue OpenURI::HTTPError => e
|
183
|
-
@errors[parent_url] ||= []
|
184
|
-
@errors[parent_url] << "Problem getting or writing file: #{resource_url.to_s}, Error: #{e}"
|
185
|
-
print 'X' if @verbose
|
186
|
-
next
|
187
|
-
end
|
188
|
-
end
|
189
|
-
begin
|
190
|
-
write_resource(cache_dir + localFile, resource_content)
|
191
|
-
write_property_map(cache_dir + localFile + ".hdrs", resource_headers) if @cache_headers
|
192
|
-
print "." if @verbose
|
193
|
-
rescue Exception => e
|
194
|
-
@errors[parent_url] ||= []
|
195
|
-
@errors[parent_url] << "Problem getting or writing file: #{resource_url.to_s}, Error: #{e}"
|
196
|
-
print 'X' if @verbose
|
197
|
-
end
|
198
|
-
end
|
199
|
-
end
|
200
|
-
processed_lines << line
|
201
|
-
end
|
202
|
-
|
203
|
-
print ".\n" if @verbose
|
204
|
-
return processed_lines.join("\n")
|
205
|
-
end
|
206
|
-
|
207
|
-
def write_resource(filename, content)
|
208
|
-
f = File.new(filename, "w")
|
209
|
-
f.write(content)
|
210
|
-
f.flush
|
211
|
-
f.close
|
54
|
+
@main_resource.should_recurse = true
|
55
|
+
@main_resource.process
|
56
|
+
@main_resource.write
|
212
57
|
end
|
213
58
|
|
214
|
-
def
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
File.open(filename, "w") do |f|
|
221
|
-
f.write('<?xml version="1.0" encoding="UTF-8"?>' + "\n")
|
222
|
-
f.write('<!DOCTYPE properties SYSTEM "http://java.sun.com/dtd/properties.dtd">' + "\n")
|
223
|
-
f.write('<properties>' + "\n")
|
224
|
-
hash_map.each do |url,hash|
|
225
|
-
f.write("<entry key='#{CGI.escapeHTML(url)}'>#{hash}</entry>\n")
|
226
|
-
end
|
227
|
-
f.write('</properties>' + "\n")
|
228
|
-
f.flush
|
229
|
-
end
|
230
|
-
end
|
231
|
-
|
232
|
-
def load_existing_map
|
233
|
-
map_content = ::REXML::Document.new(File.new(@cache_dir + "url_map.xml")).root
|
234
|
-
map_content.elements.each("entry") do |entry|
|
235
|
-
k = entry.attributes["key"]
|
236
|
-
if ! (@url_to_hash_map.include? k)
|
237
|
-
val = entry.text
|
238
|
-
@url_to_hash_map[k] = val
|
239
|
-
# puts "Adding previously defined url: #{k} => #{val}" if DEBUG
|
59
|
+
def print_errors
|
60
|
+
puts "\nThere were #{@errors.length} artifacts with errors.\n"
|
61
|
+
::Concord::Resource.errors.each do |k,v|
|
62
|
+
puts "In #{k}:"
|
63
|
+
v.uniq.each do |e|
|
64
|
+
puts " #{e}"
|
240
65
|
end
|
241
66
|
end
|
242
67
|
end
|
@@ -1,35 +1,15 @@
|
|
1
|
+
require 'concord/cacher'
|
2
|
+
|
1
3
|
class ::Concord::DiyLocalCacher < ::Concord::Cacher
|
2
|
-
require '
|
3
|
-
require '
|
4
|
-
require 'fileutils'
|
4
|
+
require 'concord/resource'
|
5
|
+
require 'concord/filename_generators/diy_generator'
|
5
6
|
|
6
7
|
def initialize(opts = {})
|
7
|
-
raise
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
8
|
+
raise ::ArgumentError, "Must include :activity in the options hash." unless opts[:activity]
|
9
|
+
::Concord::Resource.cache_headers = false
|
10
|
+
::Concord::Resource.rewrite_urls = true
|
11
|
+
::Concord::Resource.create_map = false
|
12
|
+
::Concord::Resource.filename_generator = ::Concord::FilenameGenerators::DiyGenerator
|
12
13
|
super
|
13
14
|
end
|
14
|
-
|
15
|
-
def generate_main_filename
|
16
|
-
"#{generate_uuid}.otml"
|
17
|
-
end
|
18
|
-
|
19
|
-
def generate_uuid
|
20
|
-
@activity.uuid
|
21
|
-
end
|
22
|
-
|
23
|
-
def generate_filename(opts = {})
|
24
|
-
raise InvalidArgumentError, "Must include :url key in opts" unless opts[:url]
|
25
|
-
raise InvalidArgumentError, ":url value must be an instance of URI" unless opts[:url].kind_of?(::URI)
|
26
|
-
uri = opts[:url]
|
27
|
-
uri_path = uri.path.split('/')
|
28
|
-
uri_path = ["","index.html"] if uri_path.size == 0
|
29
|
-
uri_path.unshift("") if uri_path.size == 1
|
30
|
-
file_ext = uri_path[-1].split('.')[-1]
|
31
|
-
file = ::Digest::SHA1.hexdigest(uri.to_s)
|
32
|
-
file += ".#{file_ext}" if file_ext
|
33
|
-
return file
|
34
|
-
end
|
35
15
|
end
|
@@ -1,16 +1,40 @@
|
|
1
|
+
require 'concord/cacher'
|
2
|
+
|
1
3
|
class ::Concord::JavaProxyCacher < ::Concord::Cacher
|
2
4
|
require 'digest/sha1'
|
5
|
+
require 'concord/helper'
|
6
|
+
require 'concord/resource'
|
7
|
+
require 'concord/filename_generators/java_proxy_generator'
|
3
8
|
|
4
|
-
|
5
|
-
|
9
|
+
include ::Concord::Helper
|
10
|
+
|
11
|
+
def initialize(opts = {})
|
12
|
+
::Concord::Resource.create_map = true
|
13
|
+
::Concord::Resource.cache_headers = true
|
14
|
+
::Concord::Resource.rewrite_urls = false
|
15
|
+
::Concord::Resource.filename_generator = ::Concord::FilenameGenerators::JavaProxyGenerator
|
16
|
+
super
|
17
|
+
end
|
18
|
+
|
19
|
+
def cache
|
20
|
+
super
|
21
|
+
write_url_to_hash_map
|
6
22
|
end
|
7
23
|
|
8
|
-
def
|
9
|
-
|
24
|
+
def write_url_to_hash_map
|
25
|
+
load_existing_map if (File.exists?(@main_resource.cache_dir + "url_map.xml"))
|
26
|
+
write_property_map(@main_resource.cache_dir + "url_map.xml", ::Concord::Resource.url_map)
|
10
27
|
end
|
11
28
|
|
12
|
-
def
|
13
|
-
|
14
|
-
|
29
|
+
def load_existing_map
|
30
|
+
map_content = ::REXML::Document.new(File.new(@main_resource.cache_dir + "url_map.xml")).root
|
31
|
+
map_content.elements.each("entry") do |entry|
|
32
|
+
k = entry.attributes["key"]
|
33
|
+
if ! (::Concord::Resource.url_map.include? k)
|
34
|
+
val = entry.text
|
35
|
+
::Concord::Resource.url_map[k] = val
|
36
|
+
# puts "Adding previously defined url: #{k} => #{val}" if DEBUG
|
37
|
+
end
|
38
|
+
end
|
15
39
|
end
|
16
40
|
end
|
data/lib/concord_cacher.rb
CHANGED
@@ -2,7 +2,7 @@ $:.unshift(File.dirname(__FILE__)) unless
|
|
2
2
|
$:.include?(File.dirname(__FILE__)) || $:.include?(File.expand_path(File.dirname(__FILE__)))
|
3
3
|
|
4
4
|
module Concord
|
5
|
-
require 'concord/
|
5
|
+
require 'concord/filename_generators'
|
6
6
|
require 'concord/diy_local_cacher'
|
7
7
|
require 'concord/java_proxy_cacher'
|
8
8
|
end
|
@@ -7,7 +7,7 @@
|
|
7
7
|
<root>
|
8
8
|
<OTText>
|
9
9
|
<!-- Absolute references -->
|
10
|
-
<a href="http://
|
10
|
+
<a href="http://www.concord.org/~aunger/">Visit the Concord Website</a>
|
11
11
|
<img src="http://portal.concord.org/images/icons/chart_bar.png" />
|
12
12
|
<OTBlob imageBytes="http://portal.concord.org/images/icons/chart_pie.png" />
|
13
13
|
|
@@ -10,31 +10,20 @@ require 'fileutils'
|
|
10
10
|
|
11
11
|
include FileUtils
|
12
12
|
|
13
|
-
require 'openssl'
|
14
|
-
module OpenSSL
|
15
|
-
module SSL
|
16
|
-
remove_const :VERIFY_PEER
|
17
|
-
end
|
18
|
-
end
|
19
|
-
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
|
20
|
-
|
21
13
|
SPEC_ROOT = File.expand_path(File.dirname(__FILE__))
|
22
14
|
|
23
|
-
$\ = "<br/>\n"
|
24
|
-
|
25
15
|
describe 'DIY Local Cacher' do
|
26
16
|
include CacheHelper
|
27
17
|
|
28
18
|
before(:each) do
|
29
19
|
@klass = Concord::DiyLocalCacher
|
30
20
|
@cache = File.join(SPEC_ROOT, '..', 'tmp','diy_local')
|
31
|
-
rm_rf(@cache)
|
32
21
|
mkdir_p(@cache)
|
33
22
|
@cache += '/'
|
34
23
|
end
|
35
24
|
|
36
25
|
after(:each) do
|
37
|
-
|
26
|
+
rm_rf(@cache)
|
38
27
|
end
|
39
28
|
|
40
29
|
def mockup(file)
|
@@ -68,7 +57,6 @@ describe 'DIY Local Cacher' do
|
|
68
57
|
end
|
69
58
|
|
70
59
|
it 'should create a cached file of the original url' do
|
71
|
-
url = File.join(SPEC_ROOT,'data','empty.otml')
|
72
60
|
cache('empty.otml', :activity => mockup('empty.otml'))
|
73
61
|
exists?('hash.otml')
|
74
62
|
end
|
@@ -79,6 +67,13 @@ describe 'DIY Local Cacher' do
|
|
79
67
|
cache('empty.otml', :activity => mockup('empty.otml'))
|
80
68
|
does_not_exist?("#{expected_filename}.hdrs")
|
81
69
|
end
|
70
|
+
|
71
|
+
it 'should strip the codebase from the otrunk element' do
|
72
|
+
cache('codebase.otml', :activity => mockup('codebase.otml'))
|
73
|
+
|
74
|
+
file_content = File.read(File.join(@cache,'hash.otml'))
|
75
|
+
file_content.should_not match(/<otrunk.*?codebase=.*?>/)
|
76
|
+
end
|
82
77
|
end
|
83
78
|
|
84
79
|
describe 'standard uri syntax' do
|
@@ -113,7 +108,7 @@ describe 'DIY Local Cacher' do
|
|
113
108
|
it 'should cache 6 referenced files' do
|
114
109
|
expected_files = []
|
115
110
|
expected_files << 'hash.otml' # element_reference.otml
|
116
|
-
expected_files << filename_for('http://
|
111
|
+
expected_files << filename_for('http://www.concord.org/~aunger/')
|
117
112
|
expected_files << filename_for('http://portal.concord.org/images/icons/chart_bar.png')
|
118
113
|
expected_files << filename_for('http://portal.concord.org/images/icons/chart_pie.png')
|
119
114
|
expected_files << filename_for('resources/text.txt', File.join(SPEC_ROOT,'data','element_reference.otml'))
|
@@ -131,7 +126,7 @@ describe 'DIY Local Cacher' do
|
|
131
126
|
it 'should rewrite the urls in the main otml file' do
|
132
127
|
expected_urls = []
|
133
128
|
unexpected_urls = []
|
134
|
-
unexpected_urls << 'http://
|
129
|
+
unexpected_urls << 'http://www.concord.org/~aunger/'
|
135
130
|
unexpected_urls << 'http://portal.concord.org/images/icons/chart_bar.png'
|
136
131
|
unexpected_urls << 'http://portal.concord.org/images/icons/chart_pie.png'
|
137
132
|
unexpected_urls << File.join('resources','text.txt')
|
@@ -248,18 +243,59 @@ describe 'DIY Local Cacher' do
|
|
248
243
|
end
|
249
244
|
|
250
245
|
describe 'embedded nlogo files' do
|
246
|
+
it 'should download absolute referenced nlogo files' do
|
247
|
+
expected_files = []
|
248
|
+
expected_files << filename_for('http://otrunk.concord.org/examples/LOOPS/models/Spaceship.1D.docking.nlogo')
|
249
|
+
|
250
|
+
cache('nlogo_absolute.otml', :activity => mockup('nlogo_absolute.otml'))
|
251
|
+
|
252
|
+
expected_files.each do |f|
|
253
|
+
exists?(f)
|
254
|
+
end
|
255
|
+
end
|
256
|
+
|
257
|
+
it 'should download relative referenced nlogo files' do
|
258
|
+
expected_files = []
|
259
|
+
expected_files << filename_for('resources/nlogo/SpaceRescue.Practice1.nlogo', File.join(SPEC_ROOT,'data','nlogo_relative.otml'))
|
260
|
+
|
261
|
+
cache('nlogo_relative.otml', :activity => mockup('nlogo_relative.otml'))
|
262
|
+
|
263
|
+
expected_files.each do |f|
|
264
|
+
exists?(f)
|
265
|
+
end
|
266
|
+
end
|
267
|
+
|
251
268
|
it 'should correctly download resources referenced from within netlogo model files'
|
252
269
|
end
|
253
270
|
|
254
271
|
describe 'embedded mw files' do
|
255
|
-
it 'should download absolute referenced cml files'
|
256
|
-
|
272
|
+
it 'should download absolute referenced cml files' do
|
273
|
+
expected_files = []
|
274
|
+
expected_files << filename_for('http://otrunk.concord.org/examples/LOOPS/models/statesofmatter/statesOfMatterPage1.cml')
|
275
|
+
|
276
|
+
cache('mw_model_absolute.otml', :activity => mockup('mw_model_absolute.otml'))
|
277
|
+
|
278
|
+
expected_files.each do |f|
|
279
|
+
exists?(f)
|
280
|
+
end
|
281
|
+
end
|
282
|
+
|
283
|
+
it 'should download relative referenced cml files' do
|
284
|
+
expected_files = []
|
285
|
+
expected_files << filename_for('resources/statesofmatter/statesOfMatterPage1.cml', File.join(SPEC_ROOT,'data','mw_model_relative.otml'))
|
286
|
+
|
287
|
+
cache('mw_model_relative.otml', :activity => mockup('mw_model_relative.otml'))
|
288
|
+
|
289
|
+
expected_files.each do |f|
|
290
|
+
exists?(f)
|
291
|
+
end
|
292
|
+
end
|
257
293
|
|
258
294
|
it 'should correctly download mmls referenced from within mw cml files' do
|
259
295
|
expected_files = []
|
260
296
|
expected_files << filename_for('http://otrunk.concord.org/examples/LOOPS/models/statesofmatter/statesOfMatterPage1$0.mml')
|
261
297
|
|
262
|
-
cache('
|
298
|
+
cache('mw_model_absolute.otml', :activity => mockup('mw_model_absolute.otml'))
|
263
299
|
|
264
300
|
expected_files.each do |f|
|
265
301
|
exists?(f)
|
@@ -271,7 +307,12 @@ describe 'DIY Local Cacher' do
|
|
271
307
|
end
|
272
308
|
|
273
309
|
describe 'never cache' do
|
274
|
-
it 'should always skip
|
310
|
+
it 'should always skip some references' do
|
311
|
+
url = File.join(SPEC_ROOT,'data','always_skip.otml')
|
312
|
+
expected_filename = 'hash.otml'
|
313
|
+
cache('always_skip.otml', :activity => mockup('always_skip.otml'))
|
314
|
+
cache_size.should == 1
|
315
|
+
end
|
275
316
|
end
|
276
317
|
|
277
318
|
describe 'recursion limits' do
|
@@ -1,6 +1,6 @@
|
|
1
1
|
module CacheHelper
|
2
2
|
def cache(file, opts = {})
|
3
|
-
options = {:url => File.join(SPEC_ROOT,'data',file), :cache_dir => @cache
|
3
|
+
options = {:url => File.join(SPEC_ROOT,'data',file), :cache_dir => @cache}.merge(opts)
|
4
4
|
cacher = @klass.new(options)
|
5
5
|
cacher.cache
|
6
6
|
end
|
@@ -19,4 +19,12 @@ module CacheHelper
|
|
19
19
|
def cache_size
|
20
20
|
Dir.glob(@cache + "/**/*").select{|f| File.file?(f) }.size
|
21
21
|
end
|
22
|
-
end
|
22
|
+
end
|
23
|
+
|
24
|
+
require 'openssl'
|
25
|
+
module OpenSSL
|
26
|
+
module SSL
|
27
|
+
remove_const :VERIFY_PEER
|
28
|
+
end
|
29
|
+
end
|
30
|
+
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
|
@@ -10,14 +10,6 @@ require 'fileutils'
|
|
10
10
|
|
11
11
|
include FileUtils
|
12
12
|
|
13
|
-
require 'openssl'
|
14
|
-
module OpenSSL
|
15
|
-
module SSL
|
16
|
-
remove_const :VERIFY_PEER
|
17
|
-
end
|
18
|
-
end
|
19
|
-
OpenSSL::SSL::VERIFY_PEER = OpenSSL::SSL::VERIFY_NONE
|
20
|
-
|
21
13
|
SPEC_ROOT = File.expand_path(File.dirname(__FILE__))
|
22
14
|
|
23
15
|
|
@@ -74,6 +66,20 @@ describe 'Java Proxy Cacher' do
|
|
74
66
|
does_not_exist?('8f0ebcb45d7ba71a541d4781329f4a6900c7ee65') # http://portal.concord.org/images/icons/delete.png
|
75
67
|
end
|
76
68
|
|
69
|
+
it 'should handle a url with trailing spaces gracefully' do
|
70
|
+
url = File.join(SPEC_ROOT,'data','url_with_space.otml')
|
71
|
+
expected_filename = ::Digest::SHA1.hexdigest(File.read(url))
|
72
|
+
|
73
|
+
lambda {
|
74
|
+
cache('url_with_space.otml')
|
75
|
+
}.should_not raise_error
|
76
|
+
|
77
|
+
cache_size.should == 5
|
78
|
+
|
79
|
+
exists?(expected_filename)
|
80
|
+
exists?('d1cea238486aeeba9215d56bf71efc243754fe48') # http://portal.concord.org/images/icons/chart_line.png
|
81
|
+
end
|
82
|
+
|
77
83
|
it 'should handle an empty url gracefully' do
|
78
84
|
url = File.join(SPEC_ROOT,'data','empty_url.otml')
|
79
85
|
expected_filename = ::Digest::SHA1.hexdigest(File.read(url))
|
@@ -120,8 +126,8 @@ describe 'Java Proxy Cacher' do
|
|
120
126
|
describe 'element references syntax' do
|
121
127
|
it 'should cache 6 referenced files' do
|
122
128
|
expected_files = []
|
123
|
-
expected_files << '
|
124
|
-
expected_files << '
|
129
|
+
expected_files << '836ba09d9d7288cf735f555e7a9b9b314ad2f6ef' # element_reference.otml
|
130
|
+
expected_files << '20e89b62dda582d80e1832050f4998d64c801c03' # http://www.concord.org/~aunger/
|
125
131
|
expected_files << '4e9576a56db3d142113b8905d7aa93e31c9f441b' # http://portal.concord.org/images/icons/chart_bar.png
|
126
132
|
expected_files << '41f082b7e69a399679a47acfdcd7e7a204e49745' # http://portal.concord.org/images/icons/chart_pie.png
|
127
133
|
expected_files << 'cbe7ac86926fd3b8aa8659842a1d8c299d8966a7' # resources/text.txt
|
@@ -191,7 +197,12 @@ describe 'Java Proxy Cacher' do
|
|
191
197
|
end
|
192
198
|
|
193
199
|
describe 'never cache' do
|
194
|
-
it 'should always skip
|
200
|
+
it 'should always skip some references' do
|
201
|
+
url = File.join(SPEC_ROOT,'data','always_skip.otml')
|
202
|
+
expected_filename = ::Digest::SHA1.hexdigest(File.read(url))
|
203
|
+
cache('always_skip.otml')
|
204
|
+
cache_size.should == 3
|
205
|
+
end
|
195
206
|
end
|
196
207
|
|
197
208
|
describe 'recursion limits' do
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -4,9 +4,9 @@ version: !ruby/object:Gem::Version
|
|
4
4
|
prerelease: false
|
5
5
|
segments:
|
6
6
|
- 0
|
7
|
+
- 1
|
7
8
|
- 0
|
8
|
-
|
9
|
-
version: 0.0.5
|
9
|
+
version: 0.1.0
|
10
10
|
platform: ruby
|
11
11
|
authors:
|
12
12
|
- Aaron Unger
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
8kT2T2VF
|
36
36
|
-----END CERTIFICATE-----
|
37
37
|
|
38
|
-
date: 2010-
|
38
|
+
date: 2010-04-05 00:00:00 -04:00
|
39
39
|
default_executable:
|
40
40
|
dependencies: []
|
41
41
|
|
metadata.gz.sig
CHANGED
Binary file
|