wonko_the_sane 0.1.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/bin/wonko_the_sane +91 -0
- data/data/minecraft.json +668 -0
- data/data/mods.json +2475 -0
- data/data/sources.json +427 -0
- data/data/timestamps.json +13 -0
- data/lib/wonko_the_sane.rb +66 -0
- data/lib/wonko_the_sane/input/base_input.rb +55 -0
- data/lib/wonko_the_sane/input/forge_installer_profile_input.rb +134 -0
- data/lib/wonko_the_sane/input/forgefiles_mods_input.rb +30 -0
- data/lib/wonko_the_sane/input/jenkins_input.rb +46 -0
- data/lib/wonko_the_sane/input/mojang_input.rb +214 -0
- data/lib/wonko_the_sane/reader_writer.rb +163 -0
- data/lib/wonko_the_sane/registry.rb +61 -0
- data/lib/wonko_the_sane/rules.rb +69 -0
- data/lib/wonko_the_sane/timestamps.rb +30 -0
- data/lib/wonko_the_sane/tools/update_nem.rb +85 -0
- data/lib/wonko_the_sane/util/configuration.rb +35 -0
- data/lib/wonko_the_sane/util/deep_storage_cache.rb +65 -0
- data/lib/wonko_the_sane/util/extraction_cache.rb +30 -0
- data/lib/wonko_the_sane/util/file_hash_cache.rb +42 -0
- data/lib/wonko_the_sane/util/http_cache.rb +143 -0
- data/lib/wonko_the_sane/util/maven_identifier.rb +39 -0
- data/lib/wonko_the_sane/util/task_stack.rb +21 -0
- data/lib/wonko_the_sane/version.rb +3 -0
- data/lib/wonko_the_sane/version_index.rb +33 -0
- data/lib/wonko_the_sane/version_parser.rb +115 -0
- data/lib/wonko_the_sane/versionlists/base_version_list.rb +103 -0
- data/lib/wonko_the_sane/versionlists/curse_version_list.rb +52 -0
- data/lib/wonko_the_sane/versionlists/forge_version_list.rb +142 -0
- data/lib/wonko_the_sane/versionlists/forgefiles_mods_list.rb +24 -0
- data/lib/wonko_the_sane/versionlists/jenkins_version_list.rb +29 -0
- data/lib/wonko_the_sane/versionlists/liteloader_version_list.rb +66 -0
- data/lib/wonko_the_sane/versionlists/vanilla_legacy_version_list.rb +39 -0
- data/lib/wonko_the_sane/versionlists/vanilla_version_list.rb +35 -0
- data/lib/wonko_the_sane/wonko_version.rb +184 -0
- metadata +324 -0
@@ -0,0 +1,65 @@
|
|
1
|
+
require 'aws-sdk-resources'
|
2
|
+
require 'wonko_the_sane/util/http_cache'
|
3
|
+
|
4
|
+
module WonkoTheSane
|
5
|
+
module Util
|
6
|
+
class DeepStorageCache
|
7
|
+
def initialize(manifest_file)
|
8
|
+
lines = File.read('.aws-credentials').split "\n"
|
9
|
+
@resource = Aws::S3::Resource.new region: 'eu-west-1', credentials: Aws::Credentials.new(lines[0], lines[1])
|
10
|
+
@bucket = @resource.bucket 'wonkoweb-02jandal-xyz'
|
11
|
+
|
12
|
+
@manifest = @bucket.object 'manifest.json'
|
13
|
+
@entries = @manifest.exists? ? JSON.parse(@manifest.get.body, symbolize_keys: true) : {}
|
14
|
+
end
|
15
|
+
|
16
|
+
def get_info(url, options = {})
|
17
|
+
return @entries[url] if @entries.key? url
|
18
|
+
|
19
|
+
ctxt = options[:ctxt] || 'DeepStorageCache'
|
20
|
+
|
21
|
+
file = HTTPCache.file url, check_stale: false, ctxt: options[:ctxt]
|
22
|
+
info = info_for_file file, url
|
23
|
+
|
24
|
+
@entries[url] = info
|
25
|
+
@manifest.put body: JSON.pretty_generate(@entries)
|
26
|
+
|
27
|
+
object = @bucket.object info[:file]
|
28
|
+
unless object.exists? && object.size == info[:size]
|
29
|
+
TaskStack.in_background do
|
30
|
+
# convert the hex-encoded md5 to a base64-encoded md5, which is what S3 expects
|
31
|
+
# http://anthonylewis.com/2011/02/09/to-hex-and-back-with-ruby/
|
32
|
+
md5 = [info[:md5].scan(/../).map { |x| x.hex.chr }.join].pack 'm0'
|
33
|
+
|
34
|
+
Logging.logger[ctxt].debug "Uploading backup of #{url} to S3..."
|
35
|
+
object.put body: file, content_md5: md5, metadata: Hash[info.map { |k,v| [k.to_s, v.to_s]}]
|
36
|
+
Logging.logger[ctxt].debug 'Backup successfully uploaded to S3'
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
info
|
41
|
+
end
|
42
|
+
|
43
|
+
def self.get_info(url, options = {})
|
44
|
+
if File.exists? '.aws-credentials'
|
45
|
+
@@instance ||= DeepStorageCache.new 'cache/deep_storage.json'
|
46
|
+
@@instance.get_info url, options
|
47
|
+
else
|
48
|
+
info_for_file HTTPCache.file(url, check_stale: false, ctxt: options[:ctxt]), url
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
private
|
53
|
+
|
54
|
+
def info_for_file(file, url)
|
55
|
+
{
|
56
|
+
url: url,
|
57
|
+
file: url.gsub(/[&:$@=+,?\\^`><\{\}\[\]#%'"~|]/, '_'),
|
58
|
+
size: file.size,
|
59
|
+
md5: FileHashCache.get_md5(file),
|
60
|
+
sha256: FileHashCache.get(file)
|
61
|
+
}
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
class ExtractionCache
|
2
|
+
def initialize(basedir)
|
3
|
+
@basedir = basedir
|
4
|
+
FileUtils.mkdir_p @basedir unless Dir.exist? @basedir
|
5
|
+
end
|
6
|
+
|
7
|
+
def get(archive, type, file)
|
8
|
+
out = path(archive, type, file)
|
9
|
+
FileUtils.mkdir_p File.dirname(out) unless Dir.exist? File.dirname(out)
|
10
|
+
if not File.exist? out
|
11
|
+
if type == :zip
|
12
|
+
Zip::File.open archive do |arch|
|
13
|
+
File.write out, arch.glob(file).first.get_input_stream.read
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
return File.read out
|
19
|
+
end
|
20
|
+
|
21
|
+
@@defaultCache = ExtractionCache.new 'cache/extraction'
|
22
|
+
def self.get(archive, type, file)
|
23
|
+
@@defaultCache.get archive, type, file
|
24
|
+
end
|
25
|
+
|
26
|
+
private
|
27
|
+
def path(archive, type, file)
|
28
|
+
@basedir + '/' + File.basename(archive) + '/' + file
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
class FileHashCache
|
2
|
+
def initialize(file, algorithm)
|
3
|
+
@file = file
|
4
|
+
@algorithm = algorithm
|
5
|
+
@data = JSON.parse File.read(@file), symbolize_names: true if File.exists? @file
|
6
|
+
@data ||= {}
|
7
|
+
end
|
8
|
+
|
9
|
+
def get(file)
|
10
|
+
name = (file.is_a?(File) ? file.path : file).to_sym
|
11
|
+
timestamp = (file.is_a?(File) ? file.mtime : File.mtime(file)).to_i
|
12
|
+
size = file.is_a?(File) ? file.size : File.size(file)
|
13
|
+
if not @data[name] or not @data[name][:timestamp] == timestamp or not @data[name][:size] == size
|
14
|
+
hash = digest(file.is_a?(File) ? file.read : File.read(file))
|
15
|
+
@data[name] = {
|
16
|
+
timestamp: timestamp,
|
17
|
+
size: size,
|
18
|
+
hash: hash
|
19
|
+
}
|
20
|
+
File.write @file, JSON.pretty_generate(@data)
|
21
|
+
end
|
22
|
+
return @data[name][:hash]
|
23
|
+
end
|
24
|
+
|
25
|
+
def digest(data)
|
26
|
+
if @algorithm == :sha256
|
27
|
+
Digest::SHA256.hexdigest data
|
28
|
+
elsif @algorithm == :md5
|
29
|
+
Digest::MD5.hexdigest data
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
@@defaultCache = FileHashCache.new 'cache/filehashes', :sha256
|
34
|
+
def self.get(file)
|
35
|
+
@@defaultCache.get file
|
36
|
+
end
|
37
|
+
|
38
|
+
@@md5Cache = FileHashCache.new 'cache/filehashes.md5', :md5
|
39
|
+
def self.get_md5(file)
|
40
|
+
@@md5Cache.get file
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,143 @@
|
|
1
|
+
# http://www.ericson.net/content/2011/04/caching-http-requests-with-ruby/
|
2
|
+
# TODO proper etags and other caching stuff
|
3
|
+
class HTTPCache
|
4
|
+
def initialize(basedir)
|
5
|
+
@basedir = basedir
|
6
|
+
FileUtils.mkdir_p @basedir unless Dir.exist? @basedir
|
7
|
+
@etags = {}
|
8
|
+
@etags = JSON.parse File.read(@basedir + '/etags.json') if File.exists? @basedir + '/etags.json'
|
9
|
+
end
|
10
|
+
|
11
|
+
# HTTP GETs a url if it doesn't exist locally
|
12
|
+
def get(ctxt, url, key, check_stale = true)
|
13
|
+
fetch ctxt, url, key, check_stale
|
14
|
+
IO.read @basedir + '/' + key
|
15
|
+
end
|
16
|
+
|
17
|
+
def file(ctxt, url, key, check_stale = true)
|
18
|
+
fetch ctxt, url, key, check_stale
|
19
|
+
File.new @basedir + '/' + key, 'r'
|
20
|
+
end
|
21
|
+
|
22
|
+
private
|
23
|
+
def fetch(ctxt, url, key, check_stale)
|
24
|
+
cached_path = @basedir + '/' + key
|
25
|
+
cached_dir = File.dirname cached_path
|
26
|
+
FileUtils.mkdir_p cached_dir unless Dir.exist? cached_dir
|
27
|
+
|
28
|
+
TaskStack.in_background do
|
29
|
+
if should_check cached_path, check_stale
|
30
|
+
Logging.logger[ctxt.to_s].debug "DL: #{url}"
|
31
|
+
resp = http_get ctxt.to_s, url, cached_path
|
32
|
+
unless resp == nil
|
33
|
+
File.open(cached_path, 'w') do |f|
|
34
|
+
f.write resp.body
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
# get a file, using the local cached file modified timestamp to make sture we don't re-download stuff pointlessly
|
42
|
+
# this also *should* handle redirection properly
|
43
|
+
def http_get(ctxt, url, cached_path, limit = 10, http = nil)
|
44
|
+
# too many redirects...
|
45
|
+
raise ArgumentError, 'too many HTTP redirects' if limit == 0
|
46
|
+
|
47
|
+
uri = url.is_a?(URI) ? url : URI.parse(url)
|
48
|
+
|
49
|
+
local_date = Time.parse("1985-10-28")
|
50
|
+
local_date = File.mtime cached_path if File.exists? cached_path
|
51
|
+
|
52
|
+
if http.nil?
|
53
|
+
Net::HTTP.start uri.hostname, uri.port, :use_ssl => uri.scheme == 'https' do |http|
|
54
|
+
return http_get_internal ctxt, uri, cached_path, limit, http, local_date
|
55
|
+
end
|
56
|
+
else
|
57
|
+
return http_get_internal ctxt, uri, cached_path, limit, http, local_date
|
58
|
+
end
|
59
|
+
end
|
60
|
+
def http_get_internal(ctxt, uri, cached_path, limit = 10, http = nil, local_date = nil)
|
61
|
+
existing_etag = @etags[uri]
|
62
|
+
|
63
|
+
# start by doing a HEAD request
|
64
|
+
head_req = Net::HTTP::Head.new uri
|
65
|
+
head_req.add_field 'If-None-Match', existing_etag if existing_etag
|
66
|
+
head_req.add_field 'If-Modified-Since', local_date.httpdate
|
67
|
+
head_resp = http.request head_req
|
68
|
+
|
69
|
+
case head_resp
|
70
|
+
when Net::HTTPSuccess
|
71
|
+
# don't re-check this
|
72
|
+
checked cached_path
|
73
|
+
|
74
|
+
remote_date = head_resp['Last-Modified'] ? Time.httpdate(head_resp['Last-Modified']) : Time.now
|
75
|
+
new_etag = head_resp['ETag']
|
76
|
+
|
77
|
+
# if the remote resource has been modified later than the local file, grab it and return it
|
78
|
+
if remote_date > local_date || existing_etag != new_etag || !file_valid?(head_resp, cached_path)
|
79
|
+
req = Net::HTTP::Get.new(uri)
|
80
|
+
resp = http.request Net::HTTP::Get.new(uri)
|
81
|
+
Logging.logger[ctxt].debug 'GOT FULL FILE'
|
82
|
+
|
83
|
+
@etags[uri] = new_etag if new_etag
|
84
|
+
File.write @basedir + '/etags.json', JSON.pretty_generate(@etags)
|
85
|
+
|
86
|
+
return resp
|
87
|
+
else
|
88
|
+
Logging.logger[ctxt].debug 'CACHE HIT'
|
89
|
+
return nil
|
90
|
+
end
|
91
|
+
when Net::HTTPRedirection
|
92
|
+
if head_resp.code == "304"
|
93
|
+
Logging.logger[ctxt].debug 'CACHE HIT'
|
94
|
+
checked cached_path
|
95
|
+
return nil
|
96
|
+
end
|
97
|
+
|
98
|
+
location = head_resp['Location']
|
99
|
+
Logging.logger[ctxt].debug "Redirected to #{location} - code #{head_resp.code}"
|
100
|
+
newurl = URI.parse location
|
101
|
+
newurl = URI.join uri.to_s, location if newurl.relative?
|
102
|
+
return http_get ctxt, newurl, cached_path, limit - 1, http
|
103
|
+
else
|
104
|
+
Logging.logger[ctxt].warn "#{location} failed: #{head_resp.code}"
|
105
|
+
checked cached_path
|
106
|
+
return nil
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
def file_valid?(response, path)
|
111
|
+
if response['Content-Length']
|
112
|
+
return false if response['Content-Length'].to_i != File.size(path)
|
113
|
+
end
|
114
|
+
if response['Content-MD5']
|
115
|
+
return false if response['Content-MD5'] != FileHashCache.get_md5(path)
|
116
|
+
end
|
117
|
+
return true
|
118
|
+
end
|
119
|
+
|
120
|
+
@@checked_paths = Set.new
|
121
|
+
def should_check(cached_path, check_stale)
|
122
|
+
# if the file doesn't exist locally, or we should check for stale cache
|
123
|
+
if !File.exist? cached_path or check_stale
|
124
|
+
# but only once per run
|
125
|
+
return !@@checked_paths.include?(cached_path)
|
126
|
+
end
|
127
|
+
# otherwise don't check
|
128
|
+
return false
|
129
|
+
end
|
130
|
+
|
131
|
+
def checked(cached_path)
|
132
|
+
@@checked_paths.add cached_path
|
133
|
+
end
|
134
|
+
|
135
|
+
public
|
136
|
+
@@defaultCatcher = HTTPCache.new 'cache/network'
|
137
|
+
def self.get(url, options = {})
|
138
|
+
@@defaultCatcher.get(options[:ctxt] || 'Download', url, (options.key?(:key) ? options[:key] : url), options[:check_stale] || false)
|
139
|
+
end
|
140
|
+
def self.file(url, options = {})
|
141
|
+
@@defaultCatcher.file(options[:ctxt] || 'Download', url, (options.key?(:key) ? options[:key] : url), options[:check_stale] || false)
|
142
|
+
end
|
143
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
module WonkoTheSane
|
2
|
+
module Util
|
3
|
+
class MavenIdentifier
|
4
|
+
attr_accessor :group
|
5
|
+
attr_accessor :artifact
|
6
|
+
attr_accessor :version
|
7
|
+
attr_accessor :classifier
|
8
|
+
attr_accessor :extension
|
9
|
+
|
10
|
+
def initialize(string)
|
11
|
+
parts = string.match /(?<group>[^:@]+):(?<artifact>[^:@]+):(?<version>[^:@]+)(:(?<classifier>[^:@]+))?(@(?<extension>[^:@]+))?/
|
12
|
+
@group = parts[:group]
|
13
|
+
@artifact = parts[:artifact]
|
14
|
+
@version = parts[:version]
|
15
|
+
@classifier = parts[:classifier]
|
16
|
+
@extension = parts[:extension] ? parts[:extension] : 'jar'
|
17
|
+
end
|
18
|
+
|
19
|
+
def to_path
|
20
|
+
path = @group.gsub(/\./, '/') + '/' + @artifact + '/' + @version + '/' + @artifact + '-' + @version
|
21
|
+
if @classifier
|
22
|
+
path = path + '-' + @classifier
|
23
|
+
end
|
24
|
+
return path + '.' + @extension
|
25
|
+
end
|
26
|
+
|
27
|
+
def to_name
|
28
|
+
name = @group + ':' + @artifact + ':' + @version
|
29
|
+
if @classifier
|
30
|
+
name = name + ':' + @classifier
|
31
|
+
end
|
32
|
+
if @extension != 'jar'
|
33
|
+
name = name + '@' + @extension
|
34
|
+
end
|
35
|
+
return name
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,21 @@
|
|
1
|
+
class TaskStack
|
2
|
+
@@queue = []
|
3
|
+
def self.push(task)
|
4
|
+
@@queue.push task
|
5
|
+
end
|
6
|
+
def self.push_defered(task)
|
7
|
+
@@queue.unshift task
|
8
|
+
end
|
9
|
+
def self.pop
|
10
|
+
task = @@queue.pop
|
11
|
+
task.call
|
12
|
+
end
|
13
|
+
def self.pop_all
|
14
|
+
self.pop until @@queue.empty?
|
15
|
+
end
|
16
|
+
def self.in_background(&block)
|
17
|
+
thread = Thread.new &block
|
18
|
+
TaskStack.pop_all
|
19
|
+
thread.join
|
20
|
+
end
|
21
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
class VersionIndex
|
2
|
+
attr_accessor :uid
|
3
|
+
attr_accessor :name
|
4
|
+
attr_reader :versions
|
5
|
+
|
6
|
+
def initialize(uid)
|
7
|
+
@uid = uid
|
8
|
+
@versions = []
|
9
|
+
end
|
10
|
+
|
11
|
+
def add_version(version)
|
12
|
+
if version.is_a? WonkoVersion
|
13
|
+
remove_version version # remove any previous versions
|
14
|
+
@versions << version
|
15
|
+
end
|
16
|
+
end
|
17
|
+
def remove_version(version)
|
18
|
+
@versions.select! do |ver|
|
19
|
+
version.version != ver.version
|
20
|
+
end
|
21
|
+
end
|
22
|
+
def self.get_full_version(version)
|
23
|
+
if File.exist? version.local_filename
|
24
|
+
Reader.read_version File.read(version.local_filename)
|
25
|
+
else
|
26
|
+
nil
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
def self.local_filename(uid)
|
31
|
+
'files/' + uid + '.json'
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,115 @@
|
|
1
|
+
class VersionParser
|
2
|
+
private
|
3
|
+
@@cache = {}
|
4
|
+
def self.parse(string)
|
5
|
+
if @@cache.has_key? string
|
6
|
+
return @@cache[string]
|
7
|
+
end
|
8
|
+
appendix = string.scan(/\-.*$/).first
|
9
|
+
string = string.sub /\-.*$/, ''
|
10
|
+
sections = string.split '.'
|
11
|
+
sections.map! do |sec|
|
12
|
+
test = Integer sec rescue nil
|
13
|
+
if test.nil?
|
14
|
+
sec
|
15
|
+
else
|
16
|
+
test
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
result = {
|
21
|
+
appendix: appendix,
|
22
|
+
sections: sections
|
23
|
+
}
|
24
|
+
@@cache[string] = result
|
25
|
+
return result
|
26
|
+
end
|
27
|
+
|
28
|
+
def self.appendix_values(appendix)
|
29
|
+
str = appendix.scan /[a-zA-Z]*/
|
30
|
+
digits = appendix.scan(/\d*/).join.to_i
|
31
|
+
ret = case str
|
32
|
+
when 'a'
|
33
|
+
[0, digits]
|
34
|
+
when 'alpha'
|
35
|
+
[0, digits]
|
36
|
+
when 'b'
|
37
|
+
[1, digits]
|
38
|
+
when 'beta'
|
39
|
+
[1, digits]
|
40
|
+
when 'rc'
|
41
|
+
[2, digits]
|
42
|
+
when 'pre'
|
43
|
+
[2, digits]
|
44
|
+
end
|
45
|
+
return ret ? ret : [-1, digits]
|
46
|
+
end
|
47
|
+
|
48
|
+
def self.compare_values(first, second)
|
49
|
+
if first < second
|
50
|
+
-1
|
51
|
+
elsif first > second
|
52
|
+
1
|
53
|
+
else
|
54
|
+
0
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
public
|
59
|
+
def self.compare(string1, string2)
|
60
|
+
par1 = VersionParser.parse string1
|
61
|
+
par2 = VersionParser.parse string2
|
62
|
+
size = [par1[:sections].length, par2[:sections].length].max
|
63
|
+
ret = 0
|
64
|
+
size.times do |index|
|
65
|
+
val1 = par1[:sections].length > index ? par1[:sections][index] : 0
|
66
|
+
val2 = par2[:sections].length > index ? par2[:sections][index] : 0
|
67
|
+
if val1.is_a? Integer and val2.is_a? Integer
|
68
|
+
ret = VersionParser.compare_values val1, val2
|
69
|
+
elsif val1.is_a? Integer
|
70
|
+
ret = VersionParser.compare_values val1.to_s, val2
|
71
|
+
elsif val2.is_a? Integer
|
72
|
+
ret = VersionParser.compare_values val1, val2.to_s
|
73
|
+
else
|
74
|
+
ret = VersionParser.compare_values val1.to_s, val2.to_s
|
75
|
+
end
|
76
|
+
break unless ret == 0
|
77
|
+
end
|
78
|
+
if ret == 0
|
79
|
+
if par1[:appendix] and par2[:appendix]
|
80
|
+
appendix1 = VersionParser.appendix_values par1[:appendix]
|
81
|
+
appendix2 = VersionParser.appendix_values par2[:appendix]
|
82
|
+
ret = VersionParser.compare_values appendix1[0], appendix2[0]
|
83
|
+
if ret == 0
|
84
|
+
ret = VersionParser.compare_values appendix1[1], appendix2[1]
|
85
|
+
end
|
86
|
+
elsif par1[:appendix]
|
87
|
+
ret = -1
|
88
|
+
elsif par2[:appendix]
|
89
|
+
ret = 1
|
90
|
+
end
|
91
|
+
end
|
92
|
+
return ret
|
93
|
+
end
|
94
|
+
|
95
|
+
def self.less?(string1, string2)
|
96
|
+
VersionParser.compare(string1, string2) == -1
|
97
|
+
end
|
98
|
+
def self.greater?(string1, string2)
|
99
|
+
VersionParser.compare(string1, string2) == 1
|
100
|
+
end
|
101
|
+
def self.equal?(string1, string2)
|
102
|
+
VersionParser.compare(string1, string2) == 0
|
103
|
+
end
|
104
|
+
def self.less_or_equal?(string1, string2)
|
105
|
+
ret = VersionParser.compare string1, string2
|
106
|
+
ret == 0 or ret == -1
|
107
|
+
end
|
108
|
+
def self.greater_or_equal?(string1, string2)
|
109
|
+
ret = VersionParser.compare string1, string2
|
110
|
+
ret == 0 or ret == 1
|
111
|
+
end
|
112
|
+
def self.not_equal?(string1, string2)
|
113
|
+
VersionParser.compare(string1, string2) != 0
|
114
|
+
end
|
115
|
+
end
|