wonko_the_sane 0.1.2 → 0.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. checksums.yaml +4 -4
  2. data/bin/wonko_the_sane +12 -28
  3. data/data/sources.json +0 -5
  4. data/lib/wonko_the_sane.rb +6 -26
  5. data/lib/wonko_the_sane/input/base_input.rb +5 -10
  6. data/lib/wonko_the_sane/input/forge_installer_profile_input.rb +29 -47
  7. data/lib/wonko_the_sane/input/forgefiles_mods_input.rb +4 -7
  8. data/lib/wonko_the_sane/input/jenkins_input.rb +11 -15
  9. data/lib/wonko_the_sane/input/mojang_input.rb +52 -61
  10. data/lib/wonko_the_sane/{reader_writer.rb → new_format.rb} +31 -35
  11. data/lib/wonko_the_sane/old_format.rb +123 -0
  12. data/lib/wonko_the_sane/registry.rb +38 -23
  13. data/lib/wonko_the_sane/rules.rb +6 -6
  14. data/lib/wonko_the_sane/timestamps.rb +4 -4
  15. data/lib/wonko_the_sane/tools/update_nem.rb +20 -18
  16. data/lib/wonko_the_sane/util/configuration.rb +54 -2
  17. data/lib/wonko_the_sane/util/deep_storage_cache.rb +10 -8
  18. data/lib/wonko_the_sane/util/extraction_cache.rb +5 -4
  19. data/lib/wonko_the_sane/util/file_hash_cache.rb +6 -6
  20. data/lib/wonko_the_sane/util/http_cache.rb +50 -105
  21. data/lib/wonko_the_sane/util/maven_identifier.rb +8 -14
  22. data/lib/wonko_the_sane/util/task_stack.rb +7 -6
  23. data/lib/wonko_the_sane/{version_parser.rb → util/version_parser.rb} +10 -14
  24. data/lib/wonko_the_sane/version.rb +1 -1
  25. data/lib/wonko_the_sane/versionlists/base_version_list.rb +22 -15
  26. data/lib/wonko_the_sane/versionlists/curse_version_list.rb +15 -16
  27. data/lib/wonko_the_sane/versionlists/forge_version_list.rb +34 -37
  28. data/lib/wonko_the_sane/versionlists/forgefiles_mods_list.rb +3 -7
  29. data/lib/wonko_the_sane/versionlists/jenkins_version_list.rb +6 -15
  30. data/lib/wonko_the_sane/versionlists/liteloader_version_list.rb +18 -28
  31. data/lib/wonko_the_sane/versionlists/vanilla_legacy_version_list.rb +6 -6
  32. data/lib/wonko_the_sane/versionlists/vanilla_version_list.rb +6 -8
  33. data/lib/wonko_the_sane/wonko_version.rb +32 -12
  34. metadata +71 -15
  35. data/lib/wonko_the_sane/wonkoweb_uploader.rb +0 -162
@@ -1,11 +1,63 @@
1
+ require 'yaml'
2
+
1
3
  module WonkoTheSane
2
4
  module Util
3
5
  class Configuration
6
+ class Aws
7
+ attr_accessor :client_id
8
+ attr_accessor :client_secret
9
+ attr_accessor :bucket
10
+ end
11
+
12
+ class WonkoWeb
13
+ attr_accessor :host
14
+ attr_accessor :email
15
+ attr_accessor :token
16
+ attr_accessor :name
17
+ end
18
+
19
+ attr_reader :aws
20
+ attr_reader :wonkoweb
4
21
  attr_reader :lists
5
22
  attr_accessor :data_path
23
+ attr_accessor :out_dir
24
+
25
+ def initialize
26
+ @lists = []
27
+ @aws = Aws.new
28
+ @wonkoweb = WonkoWeb.new
29
+ end
30
+
31
+ def load_from_env
32
+ @aws.client_id = ENV['WTS_AWS_CLIENT_ID'] if ENV['WTS_AWS_CLIENT_ID']
33
+ @aws.client_secret = ENV['WTS_AWS_CLIENT_SECRET'] if ENV['WTS_AWS_CLIENT_SECRET']
34
+ @aws.bucket = ENV['WTS_AWS_BUCKET'] if ENV['WTS_AWS_BUCKET']
35
+
36
+ @wonkoweb.host = ENV['WTS_WONKOWEB_HOST'] if ENV['WTS_WONKOWEB_HOST']
37
+ @wonkoweb.email = ENV['WTS_WONKOWEB_EMAIL'] if ENV['WTS_WONKOWEB_EMAIL']
38
+ @wonkoweb.token = ENV['WTS_WONKOWEB_TOKEN'] if ENV['WTS_WONKOWEB_TOKEN']
39
+ @wonkoweb.token = ENV['WTS_WONKOWEB_NAME'] if ENV['WTS_WONKOWEB_NAME']
40
+
41
+ @data_path = ENV['WTS_DATA_PATH'] if ENV['WTS_DATA_PATH']
42
+ @out_dir = ENV['WTS_OUT_DIR'] if ENV['WTS_OUT_DIR']
43
+ end
44
+
45
+ def load_from_file(filename)
46
+ raw = YAML.load_file filename
47
+ @aws.client_id = raw['aws']['client_id']
48
+ @aws.client_secret = raw['aws']['client_secret']
49
+ @aws.bucket = raw['aws']['bucket']
50
+
51
+ @wonkoweb.host = raw['wonkoweb']['host']
52
+ @wonkoweb.email = raw['wonkoweb']['email']
53
+ @wonkoweb.token = raw['wonkoweb']['token']
54
+ @wonkoweb.name = raw['wonkoweb']['name']
55
+
56
+ @data_path = raw['data_path']
57
+ @out_dir = raw['out_dir']
58
+ end
6
59
 
7
60
  def register_list(list)
8
- @lists ||= []
9
61
  case list
10
62
  when String
11
63
  register_list list.to_sym
@@ -24,7 +76,7 @@ module WonkoTheSane
24
76
  register_list ForgeFilesModsList.new(uid.to_s, urlId)
25
77
  end if sources[:forgefiles]
26
78
  sources[:jenkins].each do |obj|
27
- register_list JenkinsVersionList.new(obj[:uid], obj[:url], obj[:artifact], obj[:fileRegex])
79
+ register_list JenkinsVersionList.new(obj[:uid], obj[:url], obj[:artifact], obj[:@file_regex])
28
80
  end if sources[:jenkins]
29
81
  sources[:curse].each do |obj|
30
82
  register_list CurseVersionList.new(obj[:uid], obj[:id], obj[:fileregex])
@@ -1,25 +1,27 @@
1
1
  require 'aws-sdk-resources'
2
2
  require 'wonko_the_sane/util/http_cache'
3
+ require 'wonko_the_sane'
3
4
 
4
5
  module WonkoTheSane
5
6
  module Util
6
7
  class DeepStorageCache
7
8
  def initialize
8
9
  @resource = Aws::S3::Resource.new region: 'eu-west-1',
9
- credentials: Aws::Credentials.new(Settings[:aws][:client_id],
10
- Settings[:aws][:client_secret])
11
- @bucket = @resource.bucket 'wonkoweb-02jandal-xyz'
10
+ credentials: Aws::Credentials.new(WonkoTheSane.configuration.aws.client_id,
11
+ WonkoTheSane.configuration.aws.client_secret)
12
+ @bucket = @resource.bucket WonkoTheSane.configuration.aws.bucket
12
13
 
13
14
  @manifest = @bucket.object 'manifest.json'
14
15
  @entries = @manifest.exists? ? JSON.parse(@manifest.get.body.read, symbolize_keys: true) : {}
15
16
  end
16
17
 
18
+ # fetch file (if not available), get info hash, upload metadata and file to S3, return info hash
17
19
  def get_info(url, options = {})
18
20
  return @entries[url] if @entries.key? url
19
21
 
20
22
  ctxt = options[:ctxt] || 'DeepStorageCache'
21
23
 
22
- file = HTTPCache.file url, check_stale: false, ctxt: options[:ctxt]
24
+ file = HTTPCache.file url, check_stale: false, ctxt: ctxt
23
25
  info = self.class.info_for_file file, url
24
26
 
25
27
  @entries[url] = info
@@ -43,7 +45,7 @@ module WonkoTheSane
43
45
  object.put body: file,
44
46
  content_md5: md5,
45
47
  content_type: content_type,
46
- metadata: Hash[info.map { |k,v| [k.to_s, v.to_s]}]
48
+ metadata: Hash[info.map { |k, v| [k.to_s, v.to_s] }]
47
49
  Logging.logger[ctxt].debug 'Backup successfully uploaded to S3'
48
50
  end
49
51
  end
@@ -52,9 +54,9 @@ module WonkoTheSane
52
54
  end
53
55
 
54
56
  def self.get_info(url, options = {})
55
- if Settings[:aws][:client_id]
56
- @@instance ||= DeepStorageCache.new
57
- @@instance.get_info url, options
57
+ if WonkoTheSane.configuration.aws.client_id
58
+ @instance ||= DeepStorageCache.new
59
+ @instance.get_info url, options
58
60
  else
59
61
  info_for_file HTTPCache.file(url, check_stale: false, ctxt: options[:ctxt]), url
60
62
  end
@@ -7,7 +7,7 @@ class ExtractionCache
7
7
  def get(archive, type, file)
8
8
  out = path(archive, type, file)
9
9
  FileUtils.mkdir_p File.dirname(out) unless Dir.exist? File.dirname(out)
10
- if not File.exist? out
10
+ unless File.exist? out
11
11
  if type == :zip
12
12
  Zip::File.open archive do |arch|
13
13
  File.write out, arch.glob(file).first.get_input_stream.read
@@ -15,15 +15,16 @@ class ExtractionCache
15
15
  end
16
16
  end
17
17
 
18
- return File.read out
18
+ File.read out
19
19
  end
20
20
 
21
- @@defaultCache = ExtractionCache.new 'cache/extraction'
22
21
  def self.get(archive, type, file)
23
- @@defaultCache.get archive, type, file
22
+ @cache ||= ExtractionCache.new 'cache/extraction'
23
+ @cache.get archive, type, file
24
24
  end
25
25
 
26
26
  private
27
+
27
28
  def path(archive, type, file)
28
29
  @basedir + '/' + File.basename(archive) + '/' + file
29
30
  end
@@ -10,7 +10,7 @@ class FileHashCache
10
10
  name = (file.is_a?(File) ? file.path : file).to_sym
11
11
  timestamp = (file.is_a?(File) ? file.mtime : File.mtime(file)).to_i
12
12
  size = file.is_a?(File) ? file.size : File.size(file)
13
- if not @data[name] or not @data[name][:timestamp] == timestamp or not @data[name][:size] == size
13
+ if !@data[name] || !@data[name][:timestamp] == timestamp || !@data[name][:size] == size
14
14
  hash = digest(file.is_a?(File) ? file.read : File.read(file))
15
15
  @data[name] = {
16
16
  timestamp: timestamp,
@@ -19,7 +19,7 @@ class FileHashCache
19
19
  }
20
20
  File.write @file, JSON.pretty_generate(@data)
21
21
  end
22
- return @data[name][:hash]
22
+ @data[name][:hash]
23
23
  end
24
24
 
25
25
  def digest(data)
@@ -30,13 +30,13 @@ class FileHashCache
30
30
  end
31
31
  end
32
32
 
33
- @@defaultCache = FileHashCache.new 'cache/filehashes', :sha256
34
33
  def self.get(file)
35
- @@defaultCache.get file
34
+ @sha256_cache ||= FileHashCache.new 'cache/filehashes.json', :sha256
35
+ @sha256_cache.get file
36
36
  end
37
37
 
38
- @@md5Cache = FileHashCache.new 'cache/filehashes.md5', :md5
39
38
  def self.get_md5(file)
40
- @@md5Cache.get file
39
+ @md5_cache ||= FileHashCache.new 'cache/filehashes.md5.json', :md5
40
+ @md5_cache.get file
41
41
  end
42
42
  end
@@ -1,17 +1,26 @@
1
- # http://www.ericson.net/content/2011/04/caching-http-requests-with-ruby/
2
- # TODO proper etags and other caching stuff
1
+ # :cookie_jar
2
+ # :timer, response.env[:duration]
3
+
4
+ require 'faraday'
5
+ require 'faraday_connection_pool'
6
+ require 'faraday/http_cache'
7
+ require 'faraday_middleware'
8
+ require 'faraday-cookie_jar'
9
+ require 'active_support/cache'
10
+ require 'uri'
11
+
3
12
  class HTTPCache
4
13
  def initialize(basedir)
5
14
  @basedir = basedir
6
15
  FileUtils.mkdir_p @basedir unless Dir.exist? @basedir
7
- @etags = {}
8
- @etags = JSON.parse File.read(@basedir + '/etags.json') if File.exists? @basedir + '/etags.json'
16
+ @mutex = Mutex.new
17
+ @connections = {}
18
+ @store = ActiveSupport::Cache.lookup_store(:file_store, @basedir + '/faraday_cache/')
9
19
  end
10
20
 
11
21
  # HTTP GETs a url if it doesn't exist locally
12
22
  def get(ctxt, url, key, check_stale = true)
13
23
  fetch ctxt, url, key, check_stale
14
- IO.read @basedir + '/' + key
15
24
  end
16
25
 
17
26
  def file(ctxt, url, key, check_stale = true)
@@ -20,124 +29,60 @@ class HTTPCache
20
29
  end
21
30
 
22
31
  private
32
+
23
33
  def fetch(ctxt, url, key, check_stale)
24
34
  cached_path = @basedir + '/' + key
25
35
  cached_dir = File.dirname cached_path
26
36
  FileUtils.mkdir_p cached_dir unless Dir.exist? cached_dir
27
37
 
28
- TaskStack.in_background do
29
- if should_check cached_path, check_stale
30
- Logging.logger[ctxt.to_s].debug "DL: #{url}"
31
- resp = http_get ctxt.to_s, url, cached_path
32
- unless resp == nil
33
- File.open(cached_path, 'w') do |f|
34
- f.write resp.body
35
- end
36
- end
37
- end
38
- end
39
- end
40
-
41
- # get a file, using the local cached file modified timestamp to make sture we don't re-download stuff pointlessly
42
- # this also *should* handle redirection properly
43
- def http_get(ctxt, url, cached_path, limit = 10, http = nil)
44
- # too many redirects...
45
- raise ArgumentError, 'too many HTTP redirects' if limit == 0
46
-
47
- uri = url.is_a?(URI) ? url : URI.parse(url)
38
+ return if File.exists?(cached_path) && !check_stale
48
39
 
49
- local_date = Time.parse("1985-10-28")
50
- local_date = File.mtime cached_path if File.exists? cached_path
51
-
52
- if http.nil?
53
- Net::HTTP.start uri.hostname, uri.port, :use_ssl => uri.scheme == 'https' do |http|
54
- return http_get_internal ctxt, uri, cached_path, limit, http, local_date
55
- end
56
- else
57
- return http_get_internal ctxt, uri, cached_path, limit, http, local_date
58
- end
59
- end
60
- def http_get_internal(ctxt, uri, cached_path, limit = 10, http = nil, local_date = nil)
61
- existing_etag = @etags[uri]
62
-
63
- # start by doing a HEAD request
64
- head_req = Net::HTTP::Head.new uri
65
- head_req.add_field 'If-None-Match', existing_etag if existing_etag
66
- head_req.add_field 'If-Modified-Since', local_date.httpdate
67
- head_resp = http.request head_req
68
-
69
- case head_resp
70
- when Net::HTTPSuccess
71
- # don't re-check this
72
- checked cached_path
73
-
74
- remote_date = head_resp['Last-Modified'] ? Time.httpdate(head_resp['Last-Modified']) : Time.now
75
- new_etag = head_resp['ETag']
76
-
77
- # if the remote resource has been modified later than the local file, grab it and return it
78
- if remote_date > local_date || existing_etag != new_etag || !file_valid?(head_resp, cached_path)
79
- req = Net::HTTP::Get.new(uri)
80
- resp = http.request Net::HTTP::Get.new(uri)
81
- Logging.logger[ctxt].debug 'GOT FULL FILE'
82
-
83
- @etags[uri] = new_etag if new_etag
84
- File.write @basedir + '/etags.json', JSON.pretty_generate(@etags)
85
-
86
- return resp
87
- else
88
- Logging.logger[ctxt].debug 'CACHE HIT'
89
- return nil
90
- end
91
- when Net::HTTPRedirection
92
- if head_resp.code == "304"
93
- Logging.logger[ctxt].debug 'CACHE HIT'
94
- checked cached_path
95
- return nil
40
+ TaskStack.in_background do
41
+ uri = URI.parse url
42
+ host = URI::HTTP.new(uri.scheme, uri.userinfo, uri.host, uri.port, nil, nil, nil, nil, nil).to_s
43
+
44
+ connection = nil
45
+ @mutex.synchronize do
46
+ connection_id = host + ctxt.to_s
47
+ @connections[connection_id] ||= create_faraday host, ctxt.to_s
48
+ connection = @connections[connection_id]
96
49
  end
50
+ Logging.logger[ctxt.to_s].debug "DL: #{url}"
97
51
 
98
- location = head_resp['Location']
99
- Logging.logger[ctxt].debug "Redirected to #{location} - code #{head_resp.code}"
100
- newurl = URI.parse location
101
- newurl = URI.join uri.to_s, location if newurl.relative?
102
- return http_get ctxt, newurl, cached_path, limit - 1, http
103
- else
104
- Logging.logger[ctxt].warn "#{location} failed: #{head_resp.code}"
105
- checked cached_path
106
- return nil
52
+ response = connection.get uri.path
53
+ File.write cached_path, response.body
54
+ response.body
107
55
  end
108
56
  end
109
57
 
110
- def file_valid?(response, path)
111
- if response['Content-Length']
112
- return false if response['Content-Length'].to_i != File.size(path)
113
- end
114
- if response['Content-MD5']
115
- return false if response['Content-MD5'] != FileHashCache.get_md5(path)
58
+ def create_faraday(host, ctxt)
59
+ Faraday.new url: host do |faraday|
60
+ faraday.use :cookie_jar
61
+ faraday.response :raise_error
62
+ faraday.response :chunked
63
+ faraday.use :http_cache,
64
+ logger: Logging.logger[ctxt],
65
+ shared_cache: true,
66
+ serializer: Marshal,
67
+ store: @store
68
+ faraday.response :follow_redirects
69
+ faraday.request :retry, max: 2, interval: 0.05, interval_randomness: 0.5, backoff_factor: 2,
70
+ exceptions: [Faraday::Error::ConnectionFailed]
71
+
72
+ faraday.adapter :net_http_pooled
116
73
  end
117
- return true
118
74
  end
119
75
 
120
- @@checked_paths = Set.new
121
- def should_check(cached_path, check_stale)
122
- # if the file doesn't exist locally, or we should check for stale cache
123
- if !File.exist? cached_path or check_stale
124
- # but only once per run
125
- return !@@checked_paths.include?(cached_path)
126
- end
127
- # otherwise don't check
128
- return false
129
- end
76
+ public
130
77
 
131
- def checked(cached_path)
132
- @@checked_paths.add cached_path
133
- end
78
+ class << self; attr_accessor :cache; end
79
+ self.cache = HTTPCache.new 'cache/network'
134
80
 
135
- public
136
- @@defaultCatcher = HTTPCache.new 'cache/network'
137
81
  def self.get(url, options = {})
138
- @@defaultCatcher.get(options[:ctxt] || 'Download', url, (options.key?(:key) ? options[:key] : url), options[:check_stale] || false)
82
+ self.cache.get(options[:ctxt] || 'Download', url, (options.key?(:key) ? options[:key] : url), options[:check_stale] || false)
139
83
  end
84
+
140
85
  def self.file(url, options = {})
141
- @@defaultCatcher.file(options[:ctxt] || 'Download', url, (options.key?(:key) ? options[:key] : url), options[:check_stale] || false)
86
+ self.cache.file(options[:ctxt] || 'Download', url, (options.key?(:key) ? options[:key] : url), options[:check_stale] || false)
142
87
  end
143
88
  end
@@ -13,26 +13,20 @@ module WonkoTheSane
13
13
  @artifact = parts[:artifact]
14
14
  @version = parts[:version]
15
15
  @classifier = parts[:classifier]
16
- @extension = parts[:extension] ? parts[:extension] : 'jar'
16
+ @extension = parts[:extension] || 'jar'
17
17
  end
18
18
 
19
19
  def to_path
20
- path = @group.gsub(/\./, '/') + '/' + @artifact + '/' + @version + '/' + @artifact + '-' + @version
21
- if @classifier
22
- path = path + '-' + @classifier
23
- end
24
- return path + '.' + @extension
20
+ path = "#{@group.gsub /\./, '/'}/#{@artifact}/#{@version}/#{@artifact}-#{@version}"
21
+ path = "#{path}-#{@classifier}" if @classifier
22
+ "#{path}.#{@extension}"
25
23
  end
26
24
 
27
25
  def to_name
28
- name = @group + ':' + @artifact + ':' + @version
29
- if @classifier
30
- name = name + ':' + @classifier
31
- end
32
- if @extension != 'jar'
33
- name = name + '@' + @extension
34
- end
35
- return name
26
+ name = "#{@group}:#{@artifact}:#{@version}"
27
+ name = "#{name}:#{@classifier}" if @classifier
28
+ name = "#{name}@#{@extension}" if @extension != 'jar'
29
+ name
36
30
  end
37
31
  end
38
32
  end
@@ -1,21 +1,22 @@
1
1
  class TaskStack
2
- @@queue = []
2
+ class << self; attr_accessor :queue; end
3
+ self.queue = []
3
4
  def self.push(task)
4
- @@queue.push task
5
+ self.queue.push task
5
6
  end
6
7
  def self.push_defered(task)
7
- @@queue.unshift task
8
+ self.queue.unshift task
8
9
  end
9
10
  def self.pop
10
- task = @@queue.pop
11
+ task = self.queue.pop
11
12
  task.call
12
13
  end
13
14
  def self.pop_all
14
- self.pop until @@queue.empty?
15
+ self.pop until self.queue.empty?
15
16
  end
16
17
  def self.in_background(&block)
17
18
  thread = Thread.new &block
18
19
  TaskStack.pop_all
19
- thread.join
20
+ thread.join.value
20
21
  end
21
22
  end