myprecious 0.0.5 → 0.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/bin/myprecious +5 -1
- data/lib/myprecious.rb +619 -95
- data/lib/myprecious/cves.rb +239 -0
- data/lib/myprecious/data_caches.rb +71 -0
- data/lib/myprecious/python_packages.rb +1190 -0
- data/lib/myprecious/ruby_gems.rb +291 -0
- metadata +141 -7
@@ -0,0 +1,239 @@
|
|
1
|
+
require 'date'
|
2
|
+
require 'digest'
|
3
|
+
require 'json'
|
4
|
+
require 'myprecious/data_caches'
|
5
|
+
require 'open3'
|
6
|
+
require 'pathname'
|
7
|
+
require 'rest-client'
|
8
|
+
require 'set'
|
9
|
+
|
10
|
+
module MyPrecious
|
11
|
+
module CVEs
|
12
|
+
extend DataCaching
|
13
|
+
|
14
|
+
MIN_GAP_SECONDS = 5
|
15
|
+
CONFIG_FILE = '.myprecious-cves.rb'
|
16
|
+
|
17
|
+
CVE_DATA_CACHE_DIR = MyPrecious.data_cache(DATA_DIR / "cve-data")
|
18
|
+
|
19
|
+
class <<self
|
20
|
+
attr_reader :config_dir
|
21
|
+
|
22
|
+
def config_dir=(val)
|
23
|
+
@config_dir = Pathname(val)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.last_query_time
|
28
|
+
@last_query_time ||= DateTime.now - 1
|
29
|
+
end
|
30
|
+
|
31
|
+
def self.queried!
|
32
|
+
@last_query_time = DateTime.now
|
33
|
+
end
|
34
|
+
|
35
|
+
##
|
36
|
+
# If you don't specify version, you get to match against the applicable
|
37
|
+
# configurations on your own to determine which CVEs returned apply to
|
38
|
+
# the versions of the named package in which you are interested
|
39
|
+
#
|
40
|
+
def self.get_for(package_name, version='*')
|
41
|
+
nvd_url = URI("https://services.nvd.nist.gov/rest/json/cves/1.0")
|
42
|
+
nvd_url.query = URI.encode_www_form(
|
43
|
+
cpeMatchString: "cpe:2.3:a:*:#{package_name.downcase}:#{version}:*:*:*:*:*:*:*",
|
44
|
+
)
|
45
|
+
|
46
|
+
cache = CVE_DATA_CACHE_DIR / "#{Digest::SHA256.hexdigest(nvd_url.to_s)}.json"
|
47
|
+
cve_data = apply_cache(cache) do
|
48
|
+
# Use last_query_time to sleep if necessary
|
49
|
+
wait_time = MIN_GAP_SECONDS - (DateTime.now - last_query_time) * 24 * 3600
|
50
|
+
if wait_time > 0
|
51
|
+
sleep(wait_time)
|
52
|
+
end
|
53
|
+
|
54
|
+
response = RestClient.get(nvd_url.to_s)
|
55
|
+
queried!
|
56
|
+
|
57
|
+
JSON.parse(response.body)
|
58
|
+
end
|
59
|
+
|
60
|
+
begin
|
61
|
+
return cve_data['result']['CVE_Items'].map do |e|
|
62
|
+
applicability = objectify_configurations(package_name, e['configurations'])
|
63
|
+
score = (((e['impact'] || {})['baseMetricV3'] || {})['cvssV3'] || {})['baseScore']
|
64
|
+
cve = CVERecord.new(
|
65
|
+
e['cve']['CVE_data_meta']['ID'],
|
66
|
+
applicability.respond_to?(:vendors) ? applicability.vendors : nil,
|
67
|
+
score
|
68
|
+
)
|
69
|
+
|
70
|
+
[cve, applicability]
|
71
|
+
end.reject {|cve, a| a.respond_to?(:applies_to?) && !a.applies_to?(version)}
|
72
|
+
rescue StandardError => e
|
73
|
+
$stderr.puts "[WARN] #{e}\n\n#{JSON.dump(cve_data)}\n\n"
|
74
|
+
[]
|
75
|
+
end
|
76
|
+
end
|
77
|
+
|
78
|
+
def self.config
|
79
|
+
if !@config && config_dir
|
80
|
+
if (config_path = config_dir / CONFIG_FILE).exist?
|
81
|
+
@config = begin
|
82
|
+
config_prog_output, status = Open3.capture2(RbConfig.ruby, config_path.to_s)
|
83
|
+
if status.success?
|
84
|
+
JSON.parse(config_prog_output)
|
85
|
+
else
|
86
|
+
$stderr.puts "#{config_path} did not exit cleanly (code ${status.exitstatus})"
|
87
|
+
{}
|
88
|
+
end
|
89
|
+
rescue StandardError
|
90
|
+
end
|
91
|
+
|
92
|
+
unless @config.kind_of?(Hash)
|
93
|
+
$stderr.puts "#{config_path} did not output a JSON configuration"
|
94
|
+
@config = {}
|
95
|
+
end
|
96
|
+
else
|
97
|
+
@config = {}
|
98
|
+
end
|
99
|
+
end
|
100
|
+
@config ||= {}
|
101
|
+
end
|
102
|
+
|
103
|
+
def self.objectify_configurations(package_name, configs)
|
104
|
+
if configs.kind_of?(Hash) && configs['CVE_data_version'] == "4.0"
|
105
|
+
Applicability_V4_0.new(package_name, configs)
|
106
|
+
else
|
107
|
+
configs
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
class CVERecord < String
|
112
|
+
def initialize(id, vendors, score)
|
113
|
+
super(id)
|
114
|
+
|
115
|
+
@vendors = vendors
|
116
|
+
@score = score
|
117
|
+
end
|
118
|
+
|
119
|
+
attr_accessor :vendors, :score
|
120
|
+
end
|
121
|
+
|
122
|
+
class Applicability_V4_0 < Hash
|
123
|
+
def initialize(package, configs)
|
124
|
+
super()
|
125
|
+
self.update(configs)
|
126
|
+
@package = package.downcase
|
127
|
+
end
|
128
|
+
attr_reader :package
|
129
|
+
|
130
|
+
def nodes
|
131
|
+
self['nodes']
|
132
|
+
end
|
133
|
+
|
134
|
+
def applies_to?(version)
|
135
|
+
package_nodes(nodes).any? do |node|
|
136
|
+
version_matches_node?(version, node)
|
137
|
+
end
|
138
|
+
end
|
139
|
+
|
140
|
+
def vendors
|
141
|
+
Set.new(each_vulnerable_cpe.map do |cpe|
|
142
|
+
cpe.split(':')[3]
|
143
|
+
end)
|
144
|
+
end
|
145
|
+
|
146
|
+
def package_nodes(node_list)
|
147
|
+
node_list.select do |node|
|
148
|
+
node['children'] || node['cpe_match'].any? do |pattern|
|
149
|
+
pattern['cpe23Uri'] =~ package_cpe_regexp
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
def each_vulnerable_cpe
|
155
|
+
return enum_for(:each_vulnerable_cpe) unless block_given?
|
156
|
+
|
157
|
+
remaining = nodes.to_a.dup
|
158
|
+
while (node = remaining.shift)
|
159
|
+
if node['children']
|
160
|
+
remaining.insert(0, *node['children'])
|
161
|
+
else
|
162
|
+
node['cpe_match'].each do |pattern|
|
163
|
+
next unless pattern['vulnerable']
|
164
|
+
cpe = pattern['cpe23Uri']
|
165
|
+
if package_cpe_regexp =~ cpe
|
166
|
+
yield cpe
|
167
|
+
end
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
def package_cpe_regexp
|
174
|
+
/^cpe:2.3:a:[^:]*:#{package}(:|$)/
|
175
|
+
end
|
176
|
+
|
177
|
+
def version_matches_node?(version, node)
|
178
|
+
test = (node['operator'] == 'AND') ? :all? : :any?
|
179
|
+
if node['children']
|
180
|
+
return node['children'].send(test) {|child| version_matches_node?(version, child)}
|
181
|
+
end
|
182
|
+
|
183
|
+
return node['cpe_match'].any? do |pattern|
|
184
|
+
cpe_entry_indicates_vulnerable_version?(version, pattern)
|
185
|
+
end
|
186
|
+
end
|
187
|
+
|
188
|
+
def cpe_entry_indicates_vulnerable_version?(version, pattern)
|
189
|
+
return false unless pattern['vulnerable']
|
190
|
+
|
191
|
+
cpe_vendor, cpe_product, cpe_version, cpe_update = pattern['cpe23Uri'].split(':')[3,4]
|
192
|
+
return false if (CVEs.config['blockedProducts'] ||= []).include?([cpe_vendor, cpe_product].join(':'))
|
193
|
+
return false if cpe_product != @package
|
194
|
+
if version == '*'
|
195
|
+
return true
|
196
|
+
end
|
197
|
+
return false unless [nil, '*', '-'].include?(cpe_update) # We'll ignore prerelease versions
|
198
|
+
if cpe_version != '*' && cpe_version == version
|
199
|
+
return true
|
200
|
+
end
|
201
|
+
|
202
|
+
if (range_start = pattern['versionStartIncluding'])
|
203
|
+
range_test = :<=
|
204
|
+
elsif (range_start = pattern['versionStartExcluding'])
|
205
|
+
range_test = :<
|
206
|
+
else
|
207
|
+
range_test = nil
|
208
|
+
end
|
209
|
+
if range_test && !version_compare(range_start, version).send(range_test, 0)
|
210
|
+
return false
|
211
|
+
end
|
212
|
+
|
213
|
+
if (range_end = pattern['versionEndIncluding'])
|
214
|
+
range_test = :<=
|
215
|
+
elsif (range_end = pattern['versionEndExcluding'])
|
216
|
+
range_test = :<
|
217
|
+
else
|
218
|
+
range_test = nil
|
219
|
+
end
|
220
|
+
if range_test && !version_compare(version, range_end).send(range_test, 0)
|
221
|
+
return false
|
222
|
+
end
|
223
|
+
|
224
|
+
return range_start || range_end
|
225
|
+
end
|
226
|
+
|
227
|
+
##
|
228
|
+
# Return a <=> b for version strings a and b
|
229
|
+
#
|
230
|
+
def version_compare(a, b)
|
231
|
+
make_comparable(a) <=> make_comparable(b)
|
232
|
+
end
|
233
|
+
|
234
|
+
def make_comparable(ver_str)
|
235
|
+
ver_str.split('.').map {|p| p.to_i}
|
236
|
+
end
|
237
|
+
end
|
238
|
+
end
|
239
|
+
end
|
@@ -0,0 +1,71 @@
|
|
1
|
+
require 'myprecious'
|
2
|
+
require 'pathname'
|
3
|
+
|
4
|
+
class <<MyPrecious
|
5
|
+
attr_accessor :caching_disabled
|
6
|
+
|
7
|
+
##
|
8
|
+
# Declare a path as a data cache
|
9
|
+
#
|
10
|
+
# This method returns the path it was given in +fpath+.
|
11
|
+
#
|
12
|
+
def data_cache(fpath)
|
13
|
+
(@data_caches ||= []) << fpath
|
14
|
+
return fpath
|
15
|
+
end
|
16
|
+
|
17
|
+
##
|
18
|
+
# Retrieve an Array of all known data caches
|
19
|
+
#
|
20
|
+
def data_caches
|
21
|
+
(@data_caches || [])
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
module MyPrecious
|
26
|
+
module DataCaching
|
27
|
+
##
|
28
|
+
# Use cached data in or write data to a file cache
|
29
|
+
#
|
30
|
+
# +cache+ should be a Pathname to a file in which JSON data or can
|
31
|
+
# be cached.
|
32
|
+
#
|
33
|
+
# The block given will only be invoked if the cache does not exist or
|
34
|
+
# is stale. The block must return JSON.dump -able data.
|
35
|
+
#
|
36
|
+
def apply_cache(cache, &get_data)
|
37
|
+
cache = Pathname(cache)
|
38
|
+
if !MyPrecious.caching_disabled && cache.exist? && cache.stat.mtime > Time.now - ONE_DAY
|
39
|
+
return cache.open('r') {|inf| JSON.load(inf)}
|
40
|
+
else
|
41
|
+
# Short-circuit to error if we've already received one for filling this cache
|
42
|
+
if @data_cache_errors_fetching && @data_cache_errors_fetching[cache]
|
43
|
+
raise @data_cache_errors_fetching[cache]
|
44
|
+
end
|
45
|
+
|
46
|
+
result = begin
|
47
|
+
DataCaching.print_error_info(cache.basename('.json'), &get_data)
|
48
|
+
rescue StandardError => e
|
49
|
+
# Remember this error in case there is another attempt to fill this cache
|
50
|
+
(@data_cache_errors_fetching ||= {})[cache] = e
|
51
|
+
raise
|
52
|
+
end
|
53
|
+
|
54
|
+
cache.dirname.mkpath
|
55
|
+
cache.open('w') {|outf| JSON.dump(result, outf)}
|
56
|
+
return result
|
57
|
+
end
|
58
|
+
end
|
59
|
+
private :apply_cache
|
60
|
+
|
61
|
+
|
62
|
+
def self.print_error_info(target)
|
63
|
+
yield
|
64
|
+
rescue Interrupt
|
65
|
+
raise
|
66
|
+
rescue StandardError => e
|
67
|
+
$stderr.puts "Error fetching data for #{target}: #{e.message}"
|
68
|
+
raise
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
@@ -0,0 +1,1190 @@
|
|
1
|
+
require 'date'
|
2
|
+
require 'json'
|
3
|
+
require 'myprecious'
|
4
|
+
require 'myprecious/cves'
|
5
|
+
require 'myprecious/data_caches'
|
6
|
+
require 'open-uri'
|
7
|
+
require 'open3'
|
8
|
+
require 'parslet'
|
9
|
+
require 'rest-client'
|
10
|
+
require 'rubygems/package'
|
11
|
+
require 'shellwords'
|
12
|
+
require 'tmpdir'
|
13
|
+
require 'zip'
|
14
|
+
|
15
|
+
module MyPrecious
|
16
|
+
class PyPackageInfo
|
17
|
+
include DataCaching
|
18
|
+
|
19
|
+
COMMON_REQ_FILE_NAMES = %w[requirements.txt Packages]
|
20
|
+
MIN_RELEASED_DAYS = 90
|
21
|
+
MIN_STABLE_DAYS = 14
|
22
|
+
|
23
|
+
PACKAGE_CACHE_DIR = MyPrecious.data_cache(DATA_DIR / "py-package-cache")
|
24
|
+
CODE_CACHE_DIR = MyPrecious.data_cache(DATA_DIR / "py-code-cache")
|
25
|
+
|
26
|
+
ACCEPTED_URI_SCHEMES = %w[
|
27
|
+
http
|
28
|
+
https
|
29
|
+
git
|
30
|
+
git+git
|
31
|
+
git+http
|
32
|
+
git+https
|
33
|
+
git+ssh
|
34
|
+
]
|
35
|
+
|
36
|
+
##
|
37
|
+
# Guess the name of the requirements file in the given directory
|
38
|
+
#
|
39
|
+
# Best effort (currently, consulting a static list of likely file names for
|
40
|
+
# existence), and may return +nil+.
|
41
|
+
#
|
42
|
+
def self.guess_req_file(fpath)
|
43
|
+
COMMON_REQ_FILE_NAMES.find do |fname|
|
44
|
+
fpath.join(fname).exist?
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
##
|
49
|
+
# Get an appropriate, human friendly column title for an attribute
|
50
|
+
#
|
51
|
+
def self.col_title(attr)
|
52
|
+
case attr
|
53
|
+
when :name then 'Package'
|
54
|
+
else Reporting.common_col_title(attr)
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
##
|
59
|
+
# Construct an instance
|
60
|
+
#
|
61
|
+
# At least one of the keywords +name:+ or +url:+ _MUST_ be provided.
|
62
|
+
#
|
63
|
+
def initialize(name: nil, version_reqs: [], url: nil, install: false)
|
64
|
+
super()
|
65
|
+
if name.nil? and url.nil?
|
66
|
+
raise ArgumentError, "At least one of name: or url: must be specified"
|
67
|
+
end
|
68
|
+
@name = name
|
69
|
+
@version_reqs = version_reqs
|
70
|
+
@url = url && URI(url)
|
71
|
+
@install = install
|
72
|
+
if pinning_req = self.version_reqs.find(&:determinative?)
|
73
|
+
current_version = pinning_req.vernum
|
74
|
+
end
|
75
|
+
end
|
76
|
+
attr_reader :name, :version_reqs, :url
|
77
|
+
attr_accessor :install
|
78
|
+
alias_method :install?, :install
|
79
|
+
|
80
|
+
##
|
81
|
+
# Was this requirement specified as a direct reference to a URL providing
|
82
|
+
# the package?
|
83
|
+
#
|
84
|
+
def direct_reference?
|
85
|
+
!url.nil?
|
86
|
+
end
|
87
|
+
|
88
|
+
##
|
89
|
+
# For packages specified without a name, do what is necessary to find the
|
90
|
+
# name
|
91
|
+
#
|
92
|
+
def resolve_name!
|
93
|
+
return unless direct_reference?
|
94
|
+
|
95
|
+
name_from_setup = setup_data['name']
|
96
|
+
if !@name.nil? && @name != name_from_setup
|
97
|
+
warn("Requirement file entry for #{@name} points to archive for #{name_from_setup}")
|
98
|
+
else
|
99
|
+
@name = name_from_setup
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
##
|
104
|
+
# For requirements not deterministically specifying a version, determine
|
105
|
+
# which version would be installed
|
106
|
+
#
|
107
|
+
def resolve_version!
|
108
|
+
return @current_version if @current_version
|
109
|
+
|
110
|
+
if direct_reference?
|
111
|
+
# Use setup_data
|
112
|
+
@current_version = parse_version_str(setup_data['version'] || '0a0.dev0')
|
113
|
+
elsif pinning_req = self.version_reqs.find(&:determinative?)
|
114
|
+
@current_version = parse_version_str(pinning_req.vernum)
|
115
|
+
else
|
116
|
+
# Use data from pypi
|
117
|
+
puts "Resolving current version of #{name}..."
|
118
|
+
if inferred_ver = latest_version_satisfying_reqs
|
119
|
+
self.current_version = inferred_ver
|
120
|
+
puts " -> #{inferred_ver}"
|
121
|
+
else
|
122
|
+
puts " (unknown)"
|
123
|
+
end
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
##
|
128
|
+
# Test if the version constraints on this package are satisfied by the
|
129
|
+
# given version
|
130
|
+
#
|
131
|
+
# All current version requirements are in #version_reqs.
|
132
|
+
#
|
133
|
+
def satisfied_by?(version)
|
134
|
+
version_reqs.all? {|r| r.satisfied_by?(version)}
|
135
|
+
end
|
136
|
+
|
137
|
+
##
|
138
|
+
# Incorporate the requirements for this package specified in another object
|
139
|
+
# into this instance
|
140
|
+
#
|
141
|
+
def incorporate(other_req)
|
142
|
+
if other_req.name != self.name
|
143
|
+
raise ArgumentError, "Cannot incorporate requirements for #{other_req.name} into #{self.name}"
|
144
|
+
end
|
145
|
+
|
146
|
+
self.version_reqs.concat(other_req.version_reqs)
|
147
|
+
self.install ||= other_req.install
|
148
|
+
if current_version.nil? && (pinning_req = self.version_reqs.find(&:determinative?))
|
149
|
+
current_version = pinning_req.vernum
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
def current_version
|
154
|
+
@current_version
|
155
|
+
end
|
156
|
+
|
157
|
+
def current_version=(val)
|
158
|
+
@current_version = val.kind_of?(Version) ? val : parse_version_str(val)
|
159
|
+
end
|
160
|
+
|
161
|
+
##
|
162
|
+
# An Array of Arrays containing version (MyPrecious::PyPackageInfo::Version
|
163
|
+
# or String) and release date (Time)
|
164
|
+
#
|
165
|
+
# The returned Array is sorted in order of descending version number, with
|
166
|
+
# strings not conforming to PEP-440 sorted lexicographically following all
|
167
|
+
# PEP-440 conformant versions, the latter presented as
|
168
|
+
# MyPrecious::PyPackageInfo::Version objects.
|
169
|
+
#
|
170
|
+
def versions_with_release
|
171
|
+
@versions ||= begin
|
172
|
+
all_releases = get_package_info.fetch('releases', {})
|
173
|
+
ver_release_pairs = all_releases.each_pair.map do |ver, info|
|
174
|
+
[
|
175
|
+
parse_version_str(ver),
|
176
|
+
info.select {|f| f['packagetype'] == 'sdist'}.map do |f|
|
177
|
+
Time.parse(f['upload_time_iso_8601'])
|
178
|
+
end.min
|
179
|
+
].freeze
|
180
|
+
end
|
181
|
+
ver_release_pairs.reject! do |vn, rd|
|
182
|
+
(vn.kind_of?(Version) && vn.prerelease?) || rd.nil?
|
183
|
+
end
|
184
|
+
ver_release_pairs.sort! do |l, r|
|
185
|
+
case
|
186
|
+
when l[0].kind_of?(String) && r[0].kind_of?(Version) then -1
|
187
|
+
when l[0].kind_of?(Version) && r[0].kind_of?(String) then 1
|
188
|
+
else l <=> r
|
189
|
+
end
|
190
|
+
end
|
191
|
+
ver_release_pairs.reverse!
|
192
|
+
ver_release_pairs.freeze
|
193
|
+
end
|
194
|
+
end
|
195
|
+
|
196
|
+
def latest_version_satisfying_reqs
|
197
|
+
versions_with_release.each do |ver, rel_date|
|
198
|
+
return ver if self.satisfied_by?(ver.to_s)
|
199
|
+
return ver if version_reqs.all? {|req| req.satisfied_by?(ver.to_s)}
|
200
|
+
end
|
201
|
+
return nil
|
202
|
+
end
|
203
|
+
|
204
|
+
##
|
205
|
+
# Age in days of the current version
|
206
|
+
#
|
207
|
+
def age
|
208
|
+
return @age if defined? @age
|
209
|
+
@age = get_age
|
210
|
+
end
|
211
|
+
|
212
|
+
def latest_version
|
213
|
+
versions_with_release[0][0].to_s
|
214
|
+
end
|
215
|
+
|
216
|
+
def latest_released
|
217
|
+
Date.parse(versions_with_release[0][1].to_s).to_s
|
218
|
+
end
|
219
|
+
|
220
|
+
##
|
221
|
+
# Version number recommended based on stability criteria
|
222
|
+
#
|
223
|
+
# May return +nil+ if no version meets the established criteria
|
224
|
+
#
|
225
|
+
def recommended_version
|
226
|
+
return nil if versions_with_release.empty?
|
227
|
+
return @recommended_version if defined? @recommended_version
|
228
|
+
|
229
|
+
orig_time_horizon = time_horizon = \
|
230
|
+
Time.now - (MIN_RELEASED_DAYS * ONE_DAY)
|
231
|
+
horizon_versegs = nil
|
232
|
+
versions_with_release.each do |vn, rd|
|
233
|
+
if vn.kind_of?(Version)
|
234
|
+
horizon_versegs = nonpatch_versegs(vn)
|
235
|
+
break
|
236
|
+
end
|
237
|
+
end
|
238
|
+
|
239
|
+
versions_with_release.each do |ver, released|
|
240
|
+
next if ver.kind_of?(String) || ver.prerelease?
|
241
|
+
return (@recommended_version = current_version) if current_version && current_version >= ver
|
242
|
+
|
243
|
+
# Reset the time-horizon clock if moving back into previous patch-series
|
244
|
+
if (nonpatch_versegs(ver) <=> horizon_versegs) < 0
|
245
|
+
time_horizon = orig_time_horizon
|
246
|
+
end
|
247
|
+
|
248
|
+
if released < time_horizon && version_reqs.all? {|r| r.satisfied_by?(ver, strict: false)}
|
249
|
+
return (@recommended_version = ver)
|
250
|
+
end
|
251
|
+
time_horizon = [time_horizon, released - (MIN_STABLE_DAYS * ONE_DAY)].min
|
252
|
+
end
|
253
|
+
return (@recommended_version = nil)
|
254
|
+
end
|
255
|
+
|
256
|
+
def homepage_uri
|
257
|
+
get_package_info['info']['home_page']
|
258
|
+
end
|
259
|
+
|
260
|
+
def license
|
261
|
+
# TODO: Implement better, showing difference between current and recommended
|
262
|
+
LicenseDescription.new(get_package_info['info']['license'])
|
263
|
+
end
|
264
|
+
|
265
|
+
def cves
|
266
|
+
resolve_name!
|
267
|
+
resolve_version!
|
268
|
+
|
269
|
+
CVEs.get_for(name, current_version.to_s).map do |cve, applicability|
|
270
|
+
cve
|
271
|
+
end
|
272
|
+
end
|
273
|
+
|
274
|
+
def changelog
|
275
|
+
# This is wrong
|
276
|
+
info = get_package_info['info']
|
277
|
+
return info['project_url']
|
278
|
+
end
|
279
|
+
|
280
|
+
def release_history_url
|
281
|
+
"https://pypi.org/project/#{name}/#history"
|
282
|
+
end
|
283
|
+
|
284
|
+
def days_between_current_and_recommended
|
285
|
+
v, cv_rel = versions_with_release.find do |v, r|
|
286
|
+
case
|
287
|
+
when current_version.prerelease?
|
288
|
+
v < current_version
|
289
|
+
else
|
290
|
+
v == current_version
|
291
|
+
end
|
292
|
+
end || []
|
293
|
+
v, rv_rel = versions_with_release.find {|v, r| v == recommended_version} || []
|
294
|
+
return nil if cv_rel.nil? || rv_rel.nil?
|
295
|
+
|
296
|
+
return ((rv_rel - cv_rel) / ONE_DAY).to_i
|
297
|
+
end
|
298
|
+
|
299
|
+
def obsolescence
|
300
|
+
at_least_moderate = false
|
301
|
+
if current_version.kind_of?(Version) && recommended_version
|
302
|
+
cv_major = [current_version.epoch, current_version.final.first]
|
303
|
+
rv_major = [recommended_version.epoch, recommended_version.final.first]
|
304
|
+
|
305
|
+
case
|
306
|
+
when rv_major[0] < cv_major[0]
|
307
|
+
return nil
|
308
|
+
when cv_major[0] < rv_major[0]
|
309
|
+
# Can't compare, rely on days_between_current_and_recommended
|
310
|
+
when cv_major[1] + 1 < rv_major[1]
|
311
|
+
return :severe
|
312
|
+
when cv_major[1] < rv_major[1]
|
313
|
+
at_least_moderate = true
|
314
|
+
end
|
315
|
+
|
316
|
+
days_between = days_between_current_and_recommended
|
317
|
+
|
318
|
+
return Reporting.obsolescence_by_age(
|
319
|
+
days_between,
|
320
|
+
at_least_moderate: at_least_moderate,
|
321
|
+
)
|
322
|
+
end
|
323
|
+
end
|
324
|
+
|
325
|
+
##
|
326
|
+
# Parses requirement line based on grammar in PEP 508
|
327
|
+
# (https://www.python.org/dev/peps/pep-0508/#complete-grammar)
|
328
|
+
#
|
329
|
+
class ReqSpecParser < Parslet::Parser
|
330
|
+
COMPARATORS = %w[<= < != === == >= > ~=]
|
331
|
+
ENVVARS = %w[
|
332
|
+
python_version python_full_version
|
333
|
+
os_name sys_platform platform_release
|
334
|
+
platform_system platform_version
|
335
|
+
platform_machine platform_python_implementation
|
336
|
+
implementation_name implementation_version
|
337
|
+
extra
|
338
|
+
]
|
339
|
+
root :specification
|
340
|
+
|
341
|
+
rule(:wsp) { match[' \t'] }
|
342
|
+
rule(:wsp_r) { wsp.repeat }
|
343
|
+
rule(:version_cmp) { wsp_r >> COMPARATORS.map {|o| str(o)}.inject(&:|) }
|
344
|
+
rule(:version) { wsp_r >> (match['[:alnum:]_.*+!-']).repeat(1) }
|
345
|
+
rule(:version_one) { (version_cmp.as(:op) >> version.as(:ver)).as(:verreq) }
|
346
|
+
rule(:version_many) { version_one.repeat(1,1) >> (wsp_r >> str(',') >> version_one).repeat }
|
347
|
+
rule(:versionspec) { (str('(') >> version_many >> str(')')) | version_many }
|
348
|
+
rule(:urlspec) { str('@') >> wsp_r >> uri_reference.as(:url) }
|
349
|
+
rule(:marker_op) { version_cmp | (wsp_r >> str('in')) | (wsp_r >> str('not') >> wsp.repeat(1) >> str('in')) }
|
350
|
+
rule(:python_str_c) { (wsp | match['A-Za-z0-9().{}_*#:;,/?\[\]!~`@$%^&=+|<>-']) }
|
351
|
+
rule(:dquote) { str('"') }
|
352
|
+
rule(:squote) { str("'") }
|
353
|
+
rule(:python_str) {
|
354
|
+
(squote >> (python_str_c | dquote).repeat.as(:str) >> squote) | \
|
355
|
+
(dquote >> (python_str_c | squote).repeat.as(:str) >> dquote)
|
356
|
+
}
|
357
|
+
rule(:env_var) { ENVVARS.map {|n| str(n)}.inject(&:|) }
|
358
|
+
rule(:marker_var) { wsp_r >> (env_var | python_str)}
|
359
|
+
rule(:marker_expr) { marker_var.as(:l) >> marker_op.as(:o) >> marker_var.as(:r) | wsp_r >> str('(') >> marker >> wsp_r >> str(')') }
|
360
|
+
rule(:marker_and) { marker_expr.as(:l) >> wsp_r >> str('and').as(:o) >> marker_expr.as(:r) | marker_expr }
|
361
|
+
rule(:marker_or) { marker_and.as(:l) >> wsp_r >> str('or').as(:o) >> marker_and.as(:r) | marker_and }
|
362
|
+
rule(:marker) { marker_or }
|
363
|
+
rule(:quoted_marker) { str(';') >> wsp_r >> marker.as(:markers) }
|
364
|
+
rule(:identifier_end) { match['[:alnum:]'] | match['_.-'].repeat >> match['[:alnum:]'] }
|
365
|
+
rule(:identifier) { match['[:alnum:]'] >> identifier_end.repeat }
|
366
|
+
rule(:name) { identifier }
|
367
|
+
rule(:extras_list) { identifier.as(:id).repeat(1,1) >> (wsp_r >> str(',') >> wsp_r >> identifier.as(:id)).repeat }
|
368
|
+
rule(:extras) { str('[') >> wsp_r >> extras_list >> wsp_r >> str(']') }
|
369
|
+
rule(:name_req) { name.as(:package) >> wsp_r >> extras.as(:extras).maybe >> wsp_r >> versionspec.as(:verreqs).maybe >> wsp_r >> quoted_marker.maybe }
|
370
|
+
rule(:url_req) { name.as(:package) >> wsp_r >> extras.as(:extras).maybe >> wsp_r >> urlspec >> (wsp.repeat(1) | any.absent?) >> quoted_marker.maybe }
|
371
|
+
rule(:specification) { wsp_r >> (url_req | name_req) >> wsp_r }
|
372
|
+
|
373
|
+
# URI
|
374
|
+
rule(:uri_reference) { uri | relative_ref }
|
375
|
+
rule(:query_maybe) { (str('?') >> query).maybe }
|
376
|
+
rule(:fragment_maybe) { (str('#') >> fragment).maybe }
|
377
|
+
rule(:uri) { scheme >> str(':') >> hier_part >> query_maybe >> fragment_maybe }
|
378
|
+
rule(:hier_part) { (str('//') >> authority >> path_abempty) | path_absolute | path_rootless | path_empty }
|
379
|
+
rule(:absolute_uri) { scheme >> str(':') >> hier_part >> query_maybe }
|
380
|
+
rule(:relative_ref) { relative_part >> query_maybe >> fragment_maybe }
|
381
|
+
rule(:relative_part) { str('//') >> authority >> path_abempty | path_absolute | path_noscheme | path_empty }
|
382
|
+
rule(:scheme) { match['[:alpha:]'] >> match['[:alnum:]+.-'].repeat }
|
383
|
+
rule(:authority) { (userinfo >> str('@')).maybe >> host >> (str(':') >> port).maybe }
|
384
|
+
rule(:userinfo) { (unreserved | pct_encoded | sub_delims | str(':')).repeat }
|
385
|
+
rule(:host) { ip_literal | ipv4address | reg_name }
|
386
|
+
rule(:port) { match['0-9'].repeat }
|
387
|
+
rule(:ip_literal) { str('[') >> (ipv6address | ipvfuture) >> str(']') }
|
388
|
+
rule(:ipvfuture) { str('v') >> match['[:xdigit:]'].repeat(1) >> str('.') >> (unreserved | sub_delims | str(':')).repeat(1) }
|
389
|
+
rule(:ipv6address) {
|
390
|
+
c = str(':')
|
391
|
+
cc = str('::')
|
392
|
+
|
393
|
+
(h16 >> c).repeat(6,6) >> ls32 |
|
394
|
+
cc >> (h16 >> c).repeat(5,5) >> ls32 |
|
395
|
+
h16.maybe >> cc >> (h16 >> c).repeat(4,4) >> ls32 |
|
396
|
+
((h16 >> c).maybe >> h16).maybe >> cc >> (h16 >> c).repeat(3,3) >> ls32 |
|
397
|
+
((h16 >> c).repeat(0,2) >> h16).maybe >> cc >> (h16 >> c).repeat(2,2) >> ls32 |
|
398
|
+
((h16 >> c).repeat(0,3) >> h16).maybe >> cc >> h16 >> c >> ls32 |
|
399
|
+
((h16 >> c).repeat(0,4) >> h16).maybe >> cc >> ls32 |
|
400
|
+
((h16 >> c).repeat(0,5) >> h16).maybe >> cc >> h16 |
|
401
|
+
((h16 >> c).repeat(0,6) >> h16).maybe >> cc
|
402
|
+
}
|
403
|
+
rule(:h16) { match['[:xdigit:]'].repeat(1,4) }
|
404
|
+
rule(:ls32) { h16 >> str(':') >> h16 | ipv4address }
|
405
|
+
rule(:ipv4address) { dec_octet >> (str('.') >> dec_octet).repeat(3,3) }
|
406
|
+
rule(:dec_octet) {
|
407
|
+
d = match['0-9']
|
408
|
+
nz = match['1-9']
|
409
|
+
|
410
|
+
d |
|
411
|
+
nz >> d |
|
412
|
+
str('1') >> d.repeat(2,2) |
|
413
|
+
str('2') >> match['0-4'] >> d |
|
414
|
+
str('25') >> match['0-5']
|
415
|
+
}
|
416
|
+
rule(:reg_name) { (unreserved | pct_encoded | sub_delims).repeat }
|
417
|
+
rule(:path) { path_abempty | path_absolute | path_noscheme | path_rootless | path_empty }
|
418
|
+
Parslet.str('/').tap do |sl|
|
419
|
+
rule(:path_abempty) { (sl >> segment).repeat }
|
420
|
+
rule(:path_absolute) { sl >> (segment_nz >> (sl >> segment).repeat).maybe }
|
421
|
+
rule(:path_noscheme) { segment_nz_nc >> (sl >> segment).repeat }
|
422
|
+
rule(:path_rootless) { segment_nz >> (sl >> segment).repeat }
|
423
|
+
end
|
424
|
+
rule(:path_empty) { pchar.absent? }
|
425
|
+
rule(:segment) { pchar.repeat }
|
426
|
+
rule(:segment_nz) { pchar.repeat(1) }
|
427
|
+
rule(:segment_nz_nc) { (unreserved | pct_encoded | sub_delims | str('@')).repeat(1) }
|
428
|
+
rule(:pchar) { unreserved | pct_encoded | sub_delims | match[':@'] }
|
429
|
+
rule(:query) { (pchar | match['/?']).repeat }
|
430
|
+
rule(:fragment) { (pchar | match['/?']).repeat }
|
431
|
+
rule(:pct_encoded) { str('%') >> match['[:xdigit:]'].repeat(2,2) }
|
432
|
+
rule(:unreserved) { match['[:alnum:]._~-'] }
|
433
|
+
rule(:reserved) { gen_delims | sub_delims }
|
434
|
+
rule(:gen_delims) { match[':/?#()@'] }
|
435
|
+
rule(:sub_delims) { match["!$&'()*+,;="] }
|
436
|
+
end
|
437
|
+
|
438
|
+
##
|
439
|
+
# Transforms parse tree from ReqSpecParser to usable objects
|
440
|
+
#
|
441
|
+
class ReqSpecTransform < Parslet::Transform
|
442
|
+
rule(:verreq => {op: simple(:o), ver: simple(:v)}) {Requirement.new(o.to_s, v.to_s)}
|
443
|
+
rule(package: simple(:n)) {|c| PyPackageInfo.new(name: c[:n].to_s)}
|
444
|
+
rule(package: simple(:n), verreqs: sequence(:rs)) {|c| PyPackageInfo.new(
|
445
|
+
name: c[:n].to_s,
|
446
|
+
version_reqs: c[:rs],
|
447
|
+
)}
|
448
|
+
rule(package: simple(:n), url: simple(:url)) {|c| PyPackageInfo.new(
|
449
|
+
name: c[:n].to_s,
|
450
|
+
url: c[:url].to_s,
|
451
|
+
)}
|
452
|
+
|
453
|
+
##
|
454
|
+
# Apply transform after normalizing a parse tree
|
455
|
+
#
|
456
|
+
# This method should be applied only to a parse tree expected to come
|
457
|
+
# from a requirement specification.
|
458
|
+
#
|
459
|
+
def apply_spec(ptree)
|
460
|
+
norm_ptree = {}
|
461
|
+
# TODO: :extras should be in this list, and we should default them to []
|
462
|
+
%i[package verreqs url].each do |c|
|
463
|
+
norm_ptree[c] = ptree[c] if ptree.has_key?(c)
|
464
|
+
end
|
465
|
+
apply(norm_ptree)
|
466
|
+
end
|
467
|
+
end
|
468
|
+
|
469
|
+
##
|
470
|
+
# Representation of a single requirement clause
|
471
|
+
#
|
472
|
+
class Requirement
|
473
|
+
def initialize(op, vernum)
|
474
|
+
super()
|
475
|
+
@op = case op
|
476
|
+
when '<' then :<
|
477
|
+
when '<=' then :<=
|
478
|
+
when '==' then :==
|
479
|
+
when '>=' then :>=
|
480
|
+
when '>' then :>
|
481
|
+
when '!=' then :!=
|
482
|
+
when '~=' then :compatible
|
483
|
+
when '===' then :str_equal
|
484
|
+
when Symbol then op
|
485
|
+
else
|
486
|
+
raise "Unknown requirement operator #{op.inspect}"
|
487
|
+
end
|
488
|
+
@vernum = vernum
|
489
|
+
end
|
490
|
+
attr_reader :op, :vernum
|
491
|
+
|
492
|
+
def determinative?
|
493
|
+
[:==, :str_equal].include?(op)
|
494
|
+
end
|
495
|
+
|
496
|
+
##
|
497
|
+
# Query if this requirement is satisfied by a particular version
|
498
|
+
#
|
499
|
+
# When +strict:+ is false and the instance is an equality-type requirement
|
500
|
+
# (i.e. the +op+ is +:==+ or +:str_equal+), the result is always +true+.
|
501
|
+
#
|
502
|
+
def satisfied_by?(version, strict: true)
|
503
|
+
req_key = PyPackageInfo.parse_version_str(self.vernum)
|
504
|
+
cand_key = PyPackageInfo.parse_version_str(version)
|
505
|
+
|
506
|
+
return true if !strict && %i[== str_equal].include?(op)
|
507
|
+
|
508
|
+
return case op
|
509
|
+
when :compatible
|
510
|
+
req_key, cand_key = comp_keys(version)
|
511
|
+
(cand_key <=> req_key) >= 0 && (cand_key <=> series(req_key)) == 0
|
512
|
+
when :str_equal
|
513
|
+
self.vernum == version.to_s
|
514
|
+
else
|
515
|
+
req_key, cand_key = comp_keys(version)
|
516
|
+
if comp_result = (cand_key <=> req_key)
|
517
|
+
comp_result.send(op, 0)
|
518
|
+
else
|
519
|
+
warn("Cannot test #{cand_key.inspect} #{op} #{req_key} (<=> returned nil)")
|
520
|
+
end
|
521
|
+
end
|
522
|
+
end
|
523
|
+
|
524
|
+
private
|
525
|
+
def comp_keys(other)
|
526
|
+
[self.vernum, other].map {|v| PyPackageInfo.parse_version_str(v)}
|
527
|
+
end
|
528
|
+
|
529
|
+
def series(comp_key)
|
530
|
+
comp_key.dup.tap do |result|
|
531
|
+
result.final.to_series
|
532
|
+
end
|
533
|
+
end
|
534
|
+
end
|
535
|
+
|
536
|
+
VERSION_PATTERN = /^
|
537
|
+
((?<epoch> \d+ ) ! )?
|
538
|
+
(?<final> \d+ (\.\d+)* (\.\*)? )
|
539
|
+
( # Pre-release (a | b | rc) group
|
540
|
+
[._-]?
|
541
|
+
(?<pre_group> a(lpha)? | b(eta)? | c | pre(view)? | rc )
|
542
|
+
[._-]?
|
543
|
+
(?<pre_n> \d* )
|
544
|
+
)?
|
545
|
+
( # Post-release group
|
546
|
+
(
|
547
|
+
[._-]? (post|r(ev)?) [._-]?
|
548
|
+
|
|
549
|
+
- # Implicit post release
|
550
|
+
)
|
551
|
+
(?<post> ((?<![._-]) | \d) \d* )
|
552
|
+
)?
|
553
|
+
( # Development release group
|
554
|
+
[._-]?
|
555
|
+
dev
|
556
|
+
(?<dev> \d* )
|
557
|
+
)?
|
558
|
+
( # Local version segment
|
559
|
+
\+
|
560
|
+
(?<local>.*)
|
561
|
+
)?
|
562
|
+
$/x
|
563
|
+
|
564
|
+
module VersionParsing
|
565
|
+
def parse_version_str(s)
|
566
|
+
return s if s.kind_of?(Version)
|
567
|
+
return s unless parts = VERSION_PATTERN.match(s.downcase)
|
568
|
+
|
569
|
+
# Normalization
|
570
|
+
pre_group = case parts[:pre_group]
|
571
|
+
when 'alpha' then 'a'
|
572
|
+
when 'beta' then 'b'
|
573
|
+
when 'c', 'pre', 'preview' then 'rc'
|
574
|
+
else parts[:pre_group]
|
575
|
+
end
|
576
|
+
|
577
|
+
return Version.new(
|
578
|
+
FinalVersion.new(parts[:final]),
|
579
|
+
epoch: parts[:epoch],
|
580
|
+
pre: [pre_group, parts[:pre_n]],
|
581
|
+
post: parts[:post],
|
582
|
+
dev: parts[:dev],
|
583
|
+
local: parts[:local],
|
584
|
+
)
|
585
|
+
end
|
586
|
+
end
|
587
|
+
extend VersionParsing
|
588
|
+
include VersionParsing
|
589
|
+
|
590
|
+
##
|
591
|
+
# Represents a full PEP-440 version
|
592
|
+
#
|
593
|
+
class Version
|
594
|
+
NOT_PRE = ['z', 0]
|
595
|
+
|
596
|
+
def initialize(final, epoch: 0, pre: [], post: nil, dev: nil, local: nil)
|
597
|
+
@epoch = (epoch || 0).to_i
|
598
|
+
@final = final.kind_of?(FinalVersion) ? final : FinalVersion.new(final)
|
599
|
+
@pre = normalize_part(pre[1]) {|n| n && [pre[0], n]}
|
600
|
+
@post = normalize_part(post) {|n| n && [n] }
|
601
|
+
@dev = normalize_part(dev) {|n| n}
|
602
|
+
@local = case local
|
603
|
+
when nil then nil
|
604
|
+
when Array then local
|
605
|
+
else local.to_s.split(/[._-]/).map {|part| try_to_i(part)}
|
606
|
+
end
|
607
|
+
end
|
608
|
+
attr_reader *%i[epoch final local]
|
609
|
+
|
610
|
+
def inspect
|
611
|
+
"#<#{self.class.name} #{to_s.inspect}>"
|
612
|
+
end
|
613
|
+
|
614
|
+
def to_s
|
615
|
+
[].tap do |parts|
|
616
|
+
parts << "#{epoch}!" unless epoch == 0
|
617
|
+
parts << final.to_s
|
618
|
+
parts << "#{@pre[0]}#{@pre[1]}" if @pre
|
619
|
+
parts << ".post#{@post}" if @post
|
620
|
+
parts << ".dev#{@dev}" if @dev
|
621
|
+
parts << "+#{local}" if local
|
622
|
+
end.join('')
|
623
|
+
end
|
624
|
+
|
625
|
+
def pre_group
|
626
|
+
@pre && @pre[0]
|
627
|
+
end
|
628
|
+
|
629
|
+
def pre_num
|
630
|
+
@pre && @pre[1]
|
631
|
+
end
|
632
|
+
|
633
|
+
def <=>(rhs)
|
634
|
+
return nil unless rhs.kind_of?(self.class)
|
635
|
+
steps = Enumerator.new do |comps|
|
636
|
+
%i[epoch final pre_comp post_comp dev_comp].each do |attr|
|
637
|
+
comps << (send(attr) <=> rhs.send(attr))
|
638
|
+
end
|
639
|
+
|
640
|
+
case [local, rhs.local].count(&:nil?)
|
641
|
+
when 2 then comps << 0
|
642
|
+
when 1 then comps << (local.nil? ? -1 : 1)
|
643
|
+
else comps << (local <=> rhs.local)
|
644
|
+
end
|
645
|
+
end
|
646
|
+
steps.find {|v| v != 0} || 0
|
647
|
+
end
|
648
|
+
include Comparable
|
649
|
+
|
650
|
+
def prerelease?
|
651
|
+
!!(@pre || @dev)
|
652
|
+
end
|
653
|
+
|
654
|
+
private
|
655
|
+
def normalize_part(value)
|
656
|
+
yield case value
|
657
|
+
when '' then 0
|
658
|
+
when nil then nil
|
659
|
+
else value.to_i
|
660
|
+
end
|
661
|
+
end
|
662
|
+
|
663
|
+
def try_to_i(s)
|
664
|
+
if /^\d+$/ =~ s
|
665
|
+
s.to_i
|
666
|
+
else
|
667
|
+
s
|
668
|
+
end
|
669
|
+
end
|
670
|
+
|
671
|
+
def pre_comp
|
672
|
+
@pre || NOT_PRE
|
673
|
+
end
|
674
|
+
|
675
|
+
def post_comp
|
676
|
+
@post || []
|
677
|
+
end
|
678
|
+
|
679
|
+
def dev_comp
|
680
|
+
@dev || Float::INFINITY
|
681
|
+
end
|
682
|
+
end
|
683
|
+
|
684
|
+
##
|
685
|
+
# Represents the "final" part of a PEP-440 version string
|
686
|
+
#
|
687
|
+
class FinalVersion
|
688
|
+
def initialize(final_ver)
|
689
|
+
@value = case final_ver
|
690
|
+
when Array then final_ver
|
691
|
+
else final_ver.split('.').map {|s| seg_value(s)}
|
692
|
+
end
|
693
|
+
end
|
694
|
+
|
695
|
+
def [](n)
|
696
|
+
@value[n]
|
697
|
+
end
|
698
|
+
|
699
|
+
def length
|
700
|
+
@value.length
|
701
|
+
end
|
702
|
+
|
703
|
+
def each(&blk)
|
704
|
+
@value.each(&blk)
|
705
|
+
end
|
706
|
+
include Enumerable
|
707
|
+
|
708
|
+
def to_s
|
709
|
+
@value.join('.')
|
710
|
+
end
|
711
|
+
|
712
|
+
def inspect
|
713
|
+
"#<#{self.class.name} #{to_s}>"
|
714
|
+
end
|
715
|
+
|
716
|
+
def <=>(rhs)
|
717
|
+
nil unless rhs.kind_of?(FinalVersion)
|
718
|
+
(0..Float::INFINITY).lazy.each do |i|
|
719
|
+
return 0 if self[i].nil? && rhs[i].nil?
|
720
|
+
return 0 if [self[i], rhs[i]].include?(:*)
|
721
|
+
diff = (self[i] || 0) <=> (rhs[i] || 0)
|
722
|
+
return diff if diff != 0
|
723
|
+
end
|
724
|
+
end
|
725
|
+
include Comparable
|
726
|
+
|
727
|
+
def to_series
|
728
|
+
self.class.new(@value.dup.tap do |mver|
|
729
|
+
mver[-1] = :*
|
730
|
+
end.join('.'))
|
731
|
+
end
|
732
|
+
|
733
|
+
private
|
734
|
+
def seg_value(s)
|
735
|
+
if s == '*'
|
736
|
+
:*
|
737
|
+
else
|
738
|
+
s.to_i
|
739
|
+
end
|
740
|
+
end
|
741
|
+
end
|
742
|
+
|
743
|
+
##
|
744
|
+
# Reads package requirements from a file
|
745
|
+
#
|
746
|
+
class Reader
|
747
|
+
def initialize(packages_fpath, only_constrain: false)
|
748
|
+
super()
|
749
|
+
@files = [Pathname(packages_fpath)]
|
750
|
+
@only_constrain = only_constrain
|
751
|
+
end
|
752
|
+
|
753
|
+
##
|
754
|
+
# Enumerate packages described by requirements targeted by this instance
|
755
|
+
#
|
756
|
+
# Each invocation of the block receives a PyPackageInfo object, which
|
757
|
+
# will have, at minimum, either a #name or #url not +nil+. It is
|
758
|
+
# possible that multiple iterations will process separate PyPackageInfo
|
759
|
+
# for the same package, in which case PyPackageInfo#incorporate is useful.
|
760
|
+
#
|
761
|
+
# An Enumerator is returned if no block is given.
|
762
|
+
#
|
763
|
+
def each_package_constrained
|
764
|
+
generator = Enumerator.new do |items|
|
765
|
+
continued_line = ''
|
766
|
+
current_file.each_line do |pkg_line|
|
767
|
+
pkg_line = pkg_line.chomp
|
768
|
+
next if /^#/ =~ pkg_line
|
769
|
+
if /(?<=\s)#.*$/ =~ pkg_line
|
770
|
+
pkg_line = pkg_line[0...-$&.length]
|
771
|
+
end
|
772
|
+
|
773
|
+
# Yes, this _does_ happen after comment lines are skipped :facepalm:
|
774
|
+
if /\\$/ =~ pkg_line
|
775
|
+
continued_line += pkg_line[0..-2]
|
776
|
+
next
|
777
|
+
end
|
778
|
+
pkg_line, continued_line = (continued_line + pkg_line).strip, ''
|
779
|
+
next if pkg_line.empty?
|
780
|
+
|
781
|
+
process_line_into(items, pkg_line)
|
782
|
+
end
|
783
|
+
end
|
784
|
+
|
785
|
+
if block_given?
|
786
|
+
generator.each {|item| yield item}
|
787
|
+
else
|
788
|
+
generator
|
789
|
+
end
|
790
|
+
end
|
791
|
+
|
792
|
+
##
|
793
|
+
# Enumerate packages targeted for installation by this instance
|
794
|
+
#
|
795
|
+
# Each invocation of the block receives a PyPackageInfo object targeted
|
796
|
+
# for installation. Each of these PyPackageInfo object will have a
|
797
|
+
# resolved #name and #current_version (if possible).
|
798
|
+
#
|
799
|
+
# An Enumerator is returned if no block is given.
|
800
|
+
#
|
801
|
+
def each_installed_package
|
802
|
+
generator = Enumerator.new do |items|
|
803
|
+
packages = {}
|
804
|
+
|
805
|
+
each_package_constrained do |pkg|
|
806
|
+
pkg.resolve_name!
|
807
|
+
if packages.has_key?(pkg.name)
|
808
|
+
packages[pkg.name].incorporate(pkg)
|
809
|
+
else
|
810
|
+
packages[pkg.name] = pkg
|
811
|
+
end
|
812
|
+
end
|
813
|
+
|
814
|
+
to_install = []
|
815
|
+
packages.each_value do |pkg|
|
816
|
+
next unless pkg.install?
|
817
|
+
to_install << pkg.name
|
818
|
+
end
|
819
|
+
|
820
|
+
while pkg_name = to_install.shift
|
821
|
+
pkg = packages[pkg_name]
|
822
|
+
pkg.resolve_version!
|
823
|
+
items << pkg
|
824
|
+
end
|
825
|
+
end
|
826
|
+
|
827
|
+
if block_given?
|
828
|
+
generator.each {|item| yield item}
|
829
|
+
else
|
830
|
+
generator
|
831
|
+
end
|
832
|
+
end
|
833
|
+
|
834
|
+
private
|
835
|
+
def current_file
|
836
|
+
@files.last
|
837
|
+
end
|
838
|
+
|
839
|
+
def in_file(fpath)
|
840
|
+
@files << Pathname(fpath)
|
841
|
+
begin
|
842
|
+
yield
|
843
|
+
ensure
|
844
|
+
@files.pop
|
845
|
+
end
|
846
|
+
end
|
847
|
+
|
848
|
+
def only_constrain?
|
849
|
+
@only_constrain
|
850
|
+
end
|
851
|
+
|
852
|
+
def reading_constraints
|
853
|
+
prev_val, @only_constrain = @only_constrain, true
|
854
|
+
begin
|
855
|
+
yield
|
856
|
+
ensure
|
857
|
+
@only_constrain = prev_val
|
858
|
+
end
|
859
|
+
end
|
860
|
+
|
861
|
+
def process_line_into(items, pkg_line)
|
862
|
+
case pkg_line
|
863
|
+
when /^-r (.)$/
|
864
|
+
if only_constrain?
|
865
|
+
warn("-r directive appears in constraints file #{current_file}")
|
866
|
+
end
|
867
|
+
in_file(current_file.dirname / $1) do
|
868
|
+
each_package_constrained {|pkg| items << pkg}
|
869
|
+
end
|
870
|
+
when /^-c (.)$/
|
871
|
+
in_file(current_file.dirname / $1) do
|
872
|
+
reading_constraints do
|
873
|
+
each_package_constrained {|pkg| items << pkg}
|
874
|
+
end
|
875
|
+
end
|
876
|
+
when /^-e/
|
877
|
+
warn %Q{#{current_file} lists "editable" package: #{pkg_line}}
|
878
|
+
else
|
879
|
+
insert_package_from_line_into(items, pkg_line)
|
880
|
+
end
|
881
|
+
end
|
882
|
+
|
883
|
+
def insert_package_from_line_into(items, pkg_line)
|
884
|
+
parse_tree = begin
|
885
|
+
ReqSpecParser.new.parse(pkg_line)
|
886
|
+
rescue Parslet::ParseFailed
|
887
|
+
if (uri = URI.try_parse(pkg_line)) && ACCEPTED_URI_SCHEMES.include?(uri.scheme)
|
888
|
+
if only_constrain?
|
889
|
+
warn("#{current_file} is a constraints file but specifies URL #{uri}")
|
890
|
+
else
|
891
|
+
items << PyPackageInfo.new(url: uri, install: true)
|
892
|
+
end
|
893
|
+
return
|
894
|
+
end
|
895
|
+
warn("Unreportable line in #{current_file}: #{pkg_line}")
|
896
|
+
return
|
897
|
+
end
|
898
|
+
|
899
|
+
# Transform parse tree into a spec
|
900
|
+
spec = ReqSpecTransform.new.apply_spec(parse_tree)
|
901
|
+
if spec.kind_of?(PyPackageInfo)
|
902
|
+
spec.install ||= !only_constrain?
|
903
|
+
items << spec
|
904
|
+
else
|
905
|
+
warn("Unhandled requirement parse tree: #{explain_parse_tree parse_tree}")
|
906
|
+
end
|
907
|
+
end
|
908
|
+
|
909
|
+
def explain_parse_tree(parse_tree)
|
910
|
+
case parse_tree
|
911
|
+
when Array
|
912
|
+
"[#{parse_tree.map {|i| "#<#{i.class.name}>"}.join(', ')}]"
|
913
|
+
when Hash
|
914
|
+
"{#{parse_tree.map {|k, v| "#{k.inspect} => #<#{v.class.name}>"}.join(', ')}}"
|
915
|
+
else
|
916
|
+
"#<#{parse_tree.class.name}>"
|
917
|
+
end
|
918
|
+
end
|
919
|
+
end
|
920
|
+
|
921
|
+
def pypi_url
|
922
|
+
"https://pypi.org/pypi/#{name}/json"
|
923
|
+
end
|
924
|
+
|
925
|
+
def pypi_release_url(release)
|
926
|
+
"https://pypi.org/pypi/#{name}/#{release}/json"
|
927
|
+
end
|
928
|
+
|
929
|
+
private
|
930
|
+
def get_package_info
|
931
|
+
cache = PACKAGE_CACHE_DIR.join("#{name}.json")
|
932
|
+
apply_cache(cache) do
|
933
|
+
pypi_response = RestClient.get(pypi_url)
|
934
|
+
JSON.parse(pypi_response)
|
935
|
+
end
|
936
|
+
end
|
937
|
+
|
938
|
+
def get_release_info(release)
|
939
|
+
cache = PACKAGE_CACHE_DIR.join(name, "#{release}.json")
|
940
|
+
apply_cache(cache) do
|
941
|
+
pypi_response = RestClient.get(pypi_release_url(release))
|
942
|
+
JSON.parse(pypi_response)
|
943
|
+
end
|
944
|
+
end
|
945
|
+
|
946
|
+
def get_age
|
947
|
+
versions_with_release.each do |vnum, released|
|
948
|
+
return ((Time.now - released) / ONE_DAY).to_i if vnum == current_version
|
949
|
+
end
|
950
|
+
return nil
|
951
|
+
end
|
952
|
+
|
953
|
+
##
|
954
|
+
# Given a version, return the parts that we expect to define the
|
955
|
+
# major/minor release series
|
956
|
+
#
|
957
|
+
# Returns an Array
|
958
|
+
#
|
959
|
+
def nonpatch_versegs(ver)
|
960
|
+
return nil if ver.nil?
|
961
|
+
[ver.epoch] + ver.final.take(2)
|
962
|
+
end
|
963
|
+
|
964
|
+
##
|
965
|
+
# Get data from the setup.py file of the package
|
966
|
+
#
|
967
|
+
def setup_data
|
968
|
+
return @setup_data if defined? @setup_data
|
969
|
+
unless self.url
|
970
|
+
raise "#setup_data called for #{name}, may only be called for packages specified by URL"
|
971
|
+
end
|
972
|
+
|
973
|
+
python_code = <<~END_OF_PYTHON
|
974
|
+
import json, sys
|
975
|
+
from unittest.mock import patch
|
976
|
+
|
977
|
+
sys.path[0:0] = ['.']
|
978
|
+
|
979
|
+
def capture_setup(**kwargs):
|
980
|
+
capture_setup.captured = kwargs
|
981
|
+
|
982
|
+
with patch('setuptools.setup', capture_setup):
|
983
|
+
import setup
|
984
|
+
|
985
|
+
json.dump(
|
986
|
+
capture_setup.captured,
|
987
|
+
sys.stdout,
|
988
|
+
default=lambda o: "<{}.{}>".format(type(o).__module__, type(o).__qualname__),
|
989
|
+
)
|
990
|
+
END_OF_PYTHON
|
991
|
+
|
992
|
+
output, status = with_package_files do |workdir|
|
993
|
+
Dir.chdir(workdir) do
|
994
|
+
Open3.capture2('python3', stdin_data: python_code)
|
995
|
+
end
|
996
|
+
end || []
|
997
|
+
|
998
|
+
@setup_data = begin
|
999
|
+
case status
|
1000
|
+
when nil
|
1001
|
+
warn("Package files unavailable, could not read setup.py")
|
1002
|
+
{}
|
1003
|
+
when :success?.to_proc
|
1004
|
+
JSON.parse(output)
|
1005
|
+
else
|
1006
|
+
warn("Failed to read setup.py in for #{self.url}")
|
1007
|
+
{}
|
1008
|
+
end
|
1009
|
+
rescue StandardError => ex
|
1010
|
+
warn("Failed to read setup.py in for #{self.url}: #{ex}")
|
1011
|
+
{}
|
1012
|
+
end
|
1013
|
+
end
|
1014
|
+
|
1015
|
+
##
|
1016
|
+
# Yield a Pathname for the directory containing the package files
|
1017
|
+
#
|
1018
|
+
# Returns the result of the block, or +nil+ if the block is not
|
1019
|
+
# executed. The directory with the package files may be removed when
|
1020
|
+
# the block exits.
|
1021
|
+
#
|
1022
|
+
def with_package_files(&blk)
|
1023
|
+
case self.url.scheme
|
1024
|
+
when 'git'
|
1025
|
+
return with_git_worktree(self.url, &blk)
|
1026
|
+
when /^git\+/
|
1027
|
+
git_uri = self.url.dup
|
1028
|
+
git_uri.scheme = self.url.scheme[4..-1]
|
1029
|
+
return with_git_worktree(git_uri, &blk)
|
1030
|
+
when 'http', 'https'
|
1031
|
+
case
|
1032
|
+
when zip_url?
|
1033
|
+
return with_unzipped_files(&blk)
|
1034
|
+
when tgz_url?
|
1035
|
+
return with_untarred_files(&blk)
|
1036
|
+
else
|
1037
|
+
warn("Unknown archive type for URL: #{self.url}")
|
1038
|
+
return nil
|
1039
|
+
end
|
1040
|
+
else
|
1041
|
+
warn("Unable to process URI package requirement: #{self.url}")
|
1042
|
+
end
|
1043
|
+
end
|
1044
|
+
|
1045
|
+
##
|
1046
|
+
# Implementation of #with_package_files for git URIs
|
1047
|
+
#
|
1048
|
+
def with_git_worktree(uri)
|
1049
|
+
git_url = uri.dup
|
1050
|
+
git_url.path, committish = uri.path.split('@', 2)
|
1051
|
+
uri_fragment, git_url.fragment = uri.fragment, nil
|
1052
|
+
repo_path = CODE_CACHE_DIR.join("git_#{Digest::MD5.hexdigest(git_url.to_s)}.git")
|
1053
|
+
|
1054
|
+
CODE_CACHE_DIR.mkpath
|
1055
|
+
|
1056
|
+
in_dir_git_cmd = ['git', '-C', repo_path.to_s]
|
1057
|
+
|
1058
|
+
if repo_path.exist?
|
1059
|
+
puts "Fetching #{git_url} to #{repo_path}..."
|
1060
|
+
cmd = in_dir_git_cmd + ['fetch', '--tags', 'origin', '+refs/heads/*:refs/heads/*']
|
1061
|
+
output, status = Open3.capture2(*cmd)
|
1062
|
+
unless status.success?
|
1063
|
+
warn("Failed to fetch 'origin' in #{repo_path}")
|
1064
|
+
return
|
1065
|
+
end
|
1066
|
+
else
|
1067
|
+
cmd = ['git', 'clone', '--bare', git_url.to_s, repo_path.to_s]
|
1068
|
+
output, status = Open3.capture2(*cmd)
|
1069
|
+
unless status.success?
|
1070
|
+
warn("Failed to clone #{git_url}")
|
1071
|
+
return
|
1072
|
+
end
|
1073
|
+
end
|
1074
|
+
|
1075
|
+
committish ||= (
|
1076
|
+
cmd = in_dir_git_cmd + ['ls-remote', 'origin', 'HEAD']
|
1077
|
+
output, status = Open3.capture2(*cmd)
|
1078
|
+
unless status.success?
|
1079
|
+
raise "Unable to read the HEAD of orgin"
|
1080
|
+
end
|
1081
|
+
output.split("\t")[0]
|
1082
|
+
)
|
1083
|
+
Dir.mktmpdir("myprecious-git-") do |workdir|
|
1084
|
+
cmds = [
|
1085
|
+
in_dir_git_cmd + ['archive', committish],
|
1086
|
+
['tar', '-x', '-C', workdir.to_s],
|
1087
|
+
]
|
1088
|
+
statuses = Open3.pipeline(*cmds, in: :close)
|
1089
|
+
if failed_i = statuses.find {|s| s.exited? && !s.success?}
|
1090
|
+
exitstatus = statuses[failed_i].exitstatus
|
1091
|
+
failed_cmd_str = cmds[failed_i].shelljoin
|
1092
|
+
warn(
|
1093
|
+
"Failed to create temporary folder at command:\n" +
|
1094
|
+
" #{failed_cmd.light_red} (exited with code #{exitstatus})"
|
1095
|
+
)
|
1096
|
+
return
|
1097
|
+
end
|
1098
|
+
|
1099
|
+
fragment_parts = Hash[URI.decode_www_form(uri.fragment || '')]
|
1100
|
+
package_dir = Pathname(workdir).join(
|
1101
|
+
fragment_parts.fetch('subdirectory', '.')
|
1102
|
+
)
|
1103
|
+
return (yield package_dir)
|
1104
|
+
end
|
1105
|
+
end
|
1106
|
+
|
1107
|
+
def get_url_content_type
|
1108
|
+
# TODO: Make a HEAD request to the URL to find out the content type
|
1109
|
+
return 'application/octet-stream'
|
1110
|
+
end
|
1111
|
+
|
1112
|
+
def zip_url?
|
1113
|
+
case get_url_content_type
|
1114
|
+
when 'application/zip' then true
|
1115
|
+
when 'application/octet-stream'
|
1116
|
+
self.url.path.downcase.end_with?('.zip')
|
1117
|
+
else false
|
1118
|
+
end
|
1119
|
+
end
|
1120
|
+
|
1121
|
+
##
|
1122
|
+
# Implementation of #with_package_files for ZIP file URLs
|
1123
|
+
#
|
1124
|
+
def with_unzipped_files
|
1125
|
+
zip_path = extracted_url("zip") do |url_f, zip_path|
|
1126
|
+
Zip::File.open_buffer(url_f) do |zip_file|
|
1127
|
+
zip_file.each do |entry|
|
1128
|
+
if entry.name_safe?
|
1129
|
+
dest_file = zip_path.join(entry.name.split('/', 2)[1])
|
1130
|
+
dest_file.dirname.mkpath
|
1131
|
+
entry.extract(dest_file.to_s) {:overwrite}
|
1132
|
+
else
|
1133
|
+
warn("Did not extract #{entry.name} from #{self.url}")
|
1134
|
+
end
|
1135
|
+
end
|
1136
|
+
end
|
1137
|
+
end
|
1138
|
+
|
1139
|
+
return (yield zip_path)
|
1140
|
+
end
|
1141
|
+
|
1142
|
+
def tgz_url?
|
1143
|
+
case get_url_content_type
|
1144
|
+
when %r{^application/(x-tar(\+gzip)?|gzip)$} then true
|
1145
|
+
when 'application/octet-stream'
|
1146
|
+
!!(self.url.path.downcase =~ /\.(tar\.gz|tgz)$/)
|
1147
|
+
else false
|
1148
|
+
end
|
1149
|
+
end
|
1150
|
+
|
1151
|
+
##
|
1152
|
+
# Implementation of #with_package_files for TGZ file URLs
|
1153
|
+
#
|
1154
|
+
def with_untarred_files
|
1155
|
+
tar_path = extracted_url("tar") do |url_f, tar_path|
|
1156
|
+
Gem::Package::TarReader.new(Zlib::GzipReader.new(url_f)) do |tar_file|
|
1157
|
+
tar_file.each do |entry|
|
1158
|
+
if entry.full_name =~ %r{(^|/)\.\./}
|
1159
|
+
warn("Did not extract #{entry.name} from #{self.url}")
|
1160
|
+
elsif entry.file?
|
1161
|
+
dest_file = tar_path.join(entry.full_name.split('/', 2)[1])
|
1162
|
+
dest_file.dirname.mkpath
|
1163
|
+
dest_file.open('wb') do |df|
|
1164
|
+
IO.copy_stream(entry, df)
|
1165
|
+
end
|
1166
|
+
end
|
1167
|
+
end
|
1168
|
+
end
|
1169
|
+
end
|
1170
|
+
|
1171
|
+
return (yield tar_path)
|
1172
|
+
end
|
1173
|
+
|
1174
|
+
def extracted_url(archive_type, &blk)
|
1175
|
+
puts "Downloading #{self.url}"
|
1176
|
+
extraction_path = CODE_CACHE_DIR.join(
|
1177
|
+
"#{archive_type}_#{Digest::MD5.hexdigest(self.url.to_s)}"
|
1178
|
+
)
|
1179
|
+
CODE_CACHE_DIR.mkpath
|
1180
|
+
|
1181
|
+
if %w[http https].include?(self.url.scheme)
|
1182
|
+
# TODO: Make a HEAD request to see if re-download is necessary
|
1183
|
+
end
|
1184
|
+
|
1185
|
+
self.url.open('rb') {|url_f| yield url_f, extraction_path}
|
1186
|
+
|
1187
|
+
return extraction_path
|
1188
|
+
end
|
1189
|
+
end
|
1190
|
+
end
|