myprecious 0.0.8 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/bin/myprecious +5 -1
- data/lib/myprecious.rb +536 -107
- data/lib/myprecious/data_caches.rb +71 -0
- data/lib/myprecious/python_packages.rb +1175 -0
- data/lib/myprecious/ruby_gems.rb +273 -0
- metadata +134 -12
@@ -0,0 +1,71 @@
|
|
1
|
+
require 'myprecious'
|
2
|
+
require 'pathname'
|
3
|
+
|
4
|
+
class <<MyPrecious
|
5
|
+
attr_accessor :caching_disabled
|
6
|
+
|
7
|
+
##
|
8
|
+
# Declare a path as a data cache
|
9
|
+
#
|
10
|
+
# This method returns the path it was given in +fpath+.
|
11
|
+
#
|
12
|
+
def data_cache(fpath)
|
13
|
+
(@data_caches ||= []) << fpath
|
14
|
+
return fpath
|
15
|
+
end
|
16
|
+
|
17
|
+
##
|
18
|
+
# Retrieve an Array of all known data caches
|
19
|
+
#
|
20
|
+
def data_caches
|
21
|
+
(@data_caches || [])
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
module MyPrecious
|
26
|
+
module DataCaching
|
27
|
+
##
|
28
|
+
# Use cached data in or write data to a file cache
|
29
|
+
#
|
30
|
+
# +cache+ should be a Pathname to a file in which JSON data or can
|
31
|
+
# be cached.
|
32
|
+
#
|
33
|
+
# The block given will only be invoked if the cache does not exist or
|
34
|
+
# is stale. The block must return JSON.dump -able data.
|
35
|
+
#
|
36
|
+
def apply_cache(cache, &get_data)
|
37
|
+
cache = Pathname(cache)
|
38
|
+
if !MyPrecious.caching_disabled && cache.exist? && cache.stat.mtime > Time.now - ONE_DAY
|
39
|
+
return cache.open('r') {|inf| JSON.load(inf)}
|
40
|
+
else
|
41
|
+
# Short-circuit to error if we've already received one for filling this cache
|
42
|
+
if @data_cache_errors_fetching && @data_cache_errors_fetching[cache]
|
43
|
+
raise @data_cache_errors_fetching[cache]
|
44
|
+
end
|
45
|
+
|
46
|
+
result = begin
|
47
|
+
DataCaching.print_error_info(cache.basename('.json'), &get_data)
|
48
|
+
rescue StandardError => e
|
49
|
+
# Remember this error in case there is another attempt to fill this cache
|
50
|
+
(@data_cache_errors_fetching ||= {})[cache] = e
|
51
|
+
raise
|
52
|
+
end
|
53
|
+
|
54
|
+
cache.dirname.mkpath
|
55
|
+
cache.open('w') {|outf| JSON.dump(result, outf)}
|
56
|
+
return result
|
57
|
+
end
|
58
|
+
end
|
59
|
+
private :apply_cache
|
60
|
+
|
61
|
+
|
62
|
+
def self.print_error_info(target)
|
63
|
+
yield
|
64
|
+
rescue Interrupt
|
65
|
+
raise
|
66
|
+
rescue StandardError => e
|
67
|
+
$stderr.puts "Error fetching data for #{target}: #{e.message}"
|
68
|
+
raise
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
@@ -0,0 +1,1175 @@
|
|
1
|
+
require 'json'
|
2
|
+
require 'myprecious'
|
3
|
+
require 'myprecious/data_caches'
|
4
|
+
require 'open-uri'
|
5
|
+
require 'open3'
|
6
|
+
require 'parslet'
|
7
|
+
require 'rest-client'
|
8
|
+
require 'rubygems/package'
|
9
|
+
require 'shellwords'
|
10
|
+
require 'tmpdir'
|
11
|
+
require 'zip'
|
12
|
+
|
13
|
+
module MyPrecious
|
14
|
+
class PyPackageInfo
|
15
|
+
include DataCaching
|
16
|
+
|
17
|
+
COMMON_REQ_FILE_NAMES = %w[requirements.txt Packages]
|
18
|
+
MIN_RELEASED_DAYS = 90
|
19
|
+
MIN_STABLE_DAYS = 14
|
20
|
+
|
21
|
+
PACKAGE_CACHE_DIR = MyPrecious.data_cache(DATA_DIR / "py-package-cache")
|
22
|
+
CODE_CACHE_DIR = MyPrecious.data_cache(DATA_DIR / "py-code-cache")
|
23
|
+
|
24
|
+
ACCEPTED_URI_SCHEMES = %w[
|
25
|
+
http
|
26
|
+
https
|
27
|
+
git
|
28
|
+
git+git
|
29
|
+
git+http
|
30
|
+
git+https
|
31
|
+
git+ssh
|
32
|
+
]
|
33
|
+
|
34
|
+
##
|
35
|
+
# Guess the name of the requirements file in the given directory
|
36
|
+
#
|
37
|
+
# Best effort (currently, consulting a static list of likely file names for
|
38
|
+
# existence), and may return +nil+.
|
39
|
+
#
|
40
|
+
def self.guess_req_file(fpath)
|
41
|
+
COMMON_REQ_FILE_NAMES.find do |fname|
|
42
|
+
fpath.join(fname).exist?
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
##
|
47
|
+
# Get an appropriate, human friendly column title for an attribute
|
48
|
+
#
|
49
|
+
def self.col_title(attr)
|
50
|
+
case attr
|
51
|
+
when :name then 'Package'
|
52
|
+
else Reporting.common_col_title(attr)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
##
|
57
|
+
# Construct an instance
|
58
|
+
#
|
59
|
+
# At least one of the keywords +name:+ or +url:+ _MUST_ be provided.
|
60
|
+
#
|
61
|
+
def initialize(name: nil, version_reqs: [], url: nil, install: false)
|
62
|
+
super()
|
63
|
+
if name.nil? and url.nil?
|
64
|
+
raise ArgumentError, "At least one of name: or url: must be specified"
|
65
|
+
end
|
66
|
+
@name = name
|
67
|
+
@version_reqs = version_reqs
|
68
|
+
@url = url && URI(url)
|
69
|
+
@install = install
|
70
|
+
if pinning_req = self.version_reqs.find(&:determinative?)
|
71
|
+
current_version = pinning_req.vernum
|
72
|
+
end
|
73
|
+
end
|
74
|
+
attr_reader :name, :version_reqs, :url
|
75
|
+
attr_accessor :install
|
76
|
+
alias_method :install?, :install
|
77
|
+
|
78
|
+
##
|
79
|
+
# Was this requirement specified as a direct reference to a URL providing
|
80
|
+
# the package?
|
81
|
+
#
|
82
|
+
def direct_reference?
|
83
|
+
!url.nil?
|
84
|
+
end
|
85
|
+
|
86
|
+
##
|
87
|
+
# For packages specified without a name, do what is necessary to find the
|
88
|
+
# name
|
89
|
+
#
|
90
|
+
def resolve_name!
|
91
|
+
return unless direct_reference?
|
92
|
+
|
93
|
+
name_from_setup = setup_data['name']
|
94
|
+
if !@name.nil? && @name != name_from_setup
|
95
|
+
warn("Requirement file entry for #{@name} points to archive for #{name_from_setup}")
|
96
|
+
else
|
97
|
+
@name = name_from_setup
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
##
|
102
|
+
# For requirements not deterministically specifying a version, determine
|
103
|
+
# which version would be installed
|
104
|
+
#
|
105
|
+
def resolve_version!
|
106
|
+
return @current_version if @current_version
|
107
|
+
|
108
|
+
if direct_reference?
|
109
|
+
# Use setup_data
|
110
|
+
@current_version = parse_version_str(setup_data['version'] || '0a0.dev0')
|
111
|
+
elsif pinning_req = self.version_reqs.find(&:determinative?)
|
112
|
+
@current_version = parse_version_str(pinning_req.vernum)
|
113
|
+
else
|
114
|
+
# Use data from pypi
|
115
|
+
puts "Resolving current version of #{name}..."
|
116
|
+
if inferred_ver = latest_version_satisfying_reqs
|
117
|
+
self.current_version = inferred_ver
|
118
|
+
puts " -> #{inferred_ver}"
|
119
|
+
else
|
120
|
+
puts " (unknown)"
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
##
|
126
|
+
# Test if the version constraints on this package are satisfied by the
|
127
|
+
# given version
|
128
|
+
#
|
129
|
+
# All current version requirements are in #version_reqs.
|
130
|
+
#
|
131
|
+
def satisfied_by?(version)
|
132
|
+
version_reqs.all? {|r| r.satisfied_by?(version)}
|
133
|
+
end
|
134
|
+
|
135
|
+
##
|
136
|
+
# Incorporate the requirements for this package specified in another object
|
137
|
+
# into this instance
|
138
|
+
#
|
139
|
+
def incorporate(other_req)
|
140
|
+
if other_req.name != self.name
|
141
|
+
raise ArgumentError, "Cannot incorporate requirements for #{other_req.name} into #{self.name}"
|
142
|
+
end
|
143
|
+
|
144
|
+
self.version_reqs.concat(other_req.version_reqs)
|
145
|
+
self.install ||= other_req.install
|
146
|
+
if current_version.nil? && (pinning_req = self.version_reqs.find(&:determinative?))
|
147
|
+
current_version = pinning_req.vernum
|
148
|
+
end
|
149
|
+
end
|
150
|
+
|
151
|
+
def current_version
|
152
|
+
@current_version
|
153
|
+
end
|
154
|
+
|
155
|
+
def current_version=(val)
|
156
|
+
@current_version = val.kind_of?(Version) ? val : parse_version_str(val)
|
157
|
+
end
|
158
|
+
|
159
|
+
##
|
160
|
+
# An Array of Arrays containing version (MyPrecious::PyPackageInfo::Version
|
161
|
+
# or String) and release date (Time)
|
162
|
+
#
|
163
|
+
# The returned Array is sorted in order of descending version number, with
|
164
|
+
# strings not conforming to PEP-440 sorted lexicographically following all
|
165
|
+
# PEP-440 conformant versions, the latter presented as
|
166
|
+
# MyPrecious::PyPackageInfo::Version objects.
|
167
|
+
#
|
168
|
+
def versions_with_release
|
169
|
+
@versions ||= begin
|
170
|
+
all_releases = get_package_info.fetch('releases', {})
|
171
|
+
ver_release_pairs = all_releases.each_pair.map do |ver, info|
|
172
|
+
[
|
173
|
+
parse_version_str(ver),
|
174
|
+
info.select {|f| f['packagetype'] == 'sdist'}.map do |f|
|
175
|
+
Time.parse(f['upload_time_iso_8601'])
|
176
|
+
end.min
|
177
|
+
].freeze
|
178
|
+
end
|
179
|
+
ver_release_pairs.reject! do |vn, rd|
|
180
|
+
(vn.kind_of?(Version) && vn.prerelease?) || rd.nil?
|
181
|
+
end
|
182
|
+
ver_release_pairs.sort! do |l, r|
|
183
|
+
case
|
184
|
+
when l[0].kind_of?(String) && r[0].kind_of?(Version) then -1
|
185
|
+
when l[0].kind_of?(Version) && r[0].kind_of?(String) then 1
|
186
|
+
else l <=> r
|
187
|
+
end
|
188
|
+
end
|
189
|
+
ver_release_pairs.reverse!
|
190
|
+
ver_release_pairs.freeze
|
191
|
+
end
|
192
|
+
end
|
193
|
+
|
194
|
+
def latest_version_satisfying_reqs
|
195
|
+
versions_with_release.each do |ver, rel_date|
|
196
|
+
return ver if self.satisfied_by?(ver.to_s)
|
197
|
+
return ver if version_reqs.all? {|req| req.satisfied_by?(ver.to_s)}
|
198
|
+
end
|
199
|
+
return nil
|
200
|
+
end
|
201
|
+
|
202
|
+
##
|
203
|
+
# Age in days of the current version
|
204
|
+
#
|
205
|
+
def age
|
206
|
+
return @age if defined? @age
|
207
|
+
@age = get_age
|
208
|
+
end
|
209
|
+
|
210
|
+
def latest_version
|
211
|
+
versions_with_release[0][0].to_s
|
212
|
+
end
|
213
|
+
|
214
|
+
def latest_released
|
215
|
+
versions_with_release[0][1]
|
216
|
+
end
|
217
|
+
|
218
|
+
##
|
219
|
+
# Version number recommended based on stability criteria
|
220
|
+
#
|
221
|
+
# May return +nil+ if no version meets the established criteria
|
222
|
+
#
|
223
|
+
def recommended_version
|
224
|
+
return nil if versions_with_release.empty?
|
225
|
+
return @recommended_version if defined? @recommended_version
|
226
|
+
|
227
|
+
orig_time_horizon = time_horizon = \
|
228
|
+
Time.now - (MIN_RELEASED_DAYS * ONE_DAY)
|
229
|
+
horizon_versegs = nil
|
230
|
+
versions_with_release.each do |vn, rd|
|
231
|
+
if vn.kind_of?(Version)
|
232
|
+
horizon_versegs = nonpatch_versegs(vn)
|
233
|
+
break
|
234
|
+
end
|
235
|
+
end
|
236
|
+
|
237
|
+
versions_with_release.each do |ver, released|
|
238
|
+
next if ver.kind_of?(String) || ver.prerelease?
|
239
|
+
return (@recommended_version = current_version) if current_version && current_version >= ver
|
240
|
+
|
241
|
+
# Reset the time-horizon clock if moving back into previous patch-series
|
242
|
+
if (nonpatch_versegs(ver) <=> horizon_versegs) < 0
|
243
|
+
time_horizon = orig_time_horizon
|
244
|
+
end
|
245
|
+
|
246
|
+
if released < time_horizon && version_reqs.all? {|r| r.satisfied_by?(ver, strict: false)}
|
247
|
+
return (@recommended_version = ver)
|
248
|
+
end
|
249
|
+
time_horizon = [time_horizon, released - (MIN_STABLE_DAYS * ONE_DAY)].min
|
250
|
+
end
|
251
|
+
return (@recommended_version = nil)
|
252
|
+
end
|
253
|
+
|
254
|
+
def homepage_uri
|
255
|
+
get_package_info['info']['home_page']
|
256
|
+
end
|
257
|
+
|
258
|
+
def license
|
259
|
+
# TODO: Implement better, showing difference between current and recommended
|
260
|
+
LicenseDescription.new(get_package_info['info']['license'])
|
261
|
+
end
|
262
|
+
|
263
|
+
def changelog
|
264
|
+
# This is wrong
|
265
|
+
info = get_package_info['info']
|
266
|
+
return info['project_url']
|
267
|
+
end
|
268
|
+
|
269
|
+
def days_between_current_and_recommended
|
270
|
+
v, cv_rel = versions_with_release.find do |v, r|
|
271
|
+
case
|
272
|
+
when current_version.prerelease?
|
273
|
+
v < current_version
|
274
|
+
else
|
275
|
+
v == current_version
|
276
|
+
end
|
277
|
+
end || []
|
278
|
+
v, rv_rel = versions_with_release.find {|v, r| v == recommended_version} || []
|
279
|
+
return nil if cv_rel.nil? || rv_rel.nil?
|
280
|
+
|
281
|
+
return ((rv_rel - cv_rel) / ONE_DAY).to_i
|
282
|
+
end
|
283
|
+
|
284
|
+
def obsolescence
|
285
|
+
at_least_moderate = false
|
286
|
+
if current_version.kind_of?(Version) && recommended_version
|
287
|
+
cv_major = [current_version.epoch, current_version.final.first]
|
288
|
+
rv_major = [recommended_version.epoch, recommended_version.final.first]
|
289
|
+
|
290
|
+
case
|
291
|
+
when rv_major[0] < cv_major[0]
|
292
|
+
return nil
|
293
|
+
when cv_major[0] < rv_major[0]
|
294
|
+
# Can't compare, rely on days_between_current_and_recommended
|
295
|
+
when cv_major[1] + 1 < rv_major[1]
|
296
|
+
return :severe
|
297
|
+
when cv_major[1] < rv_major[1]
|
298
|
+
at_least_moderate = true
|
299
|
+
end
|
300
|
+
|
301
|
+
days_between = days_between_current_and_recommended
|
302
|
+
|
303
|
+
return Reporting.obsolescence_by_age(
|
304
|
+
days_between,
|
305
|
+
at_least_moderate: at_least_moderate,
|
306
|
+
)
|
307
|
+
end
|
308
|
+
end
|
309
|
+
|
310
|
+
##
|
311
|
+
# Parses requirement line based on grammar in PEP 508
|
312
|
+
# (https://www.python.org/dev/peps/pep-0508/#complete-grammar)
|
313
|
+
#
|
314
|
+
class ReqSpecParser < Parslet::Parser
|
315
|
+
COMPARATORS = %w[<= < != === == >= > ~=]
|
316
|
+
ENVVARS = %w[
|
317
|
+
python_version python_full_version
|
318
|
+
os_name sys_platform platform_release
|
319
|
+
platform_system platform_version
|
320
|
+
platform_machine platform_python_implementation
|
321
|
+
implementation_name implementation_version
|
322
|
+
extra
|
323
|
+
]
|
324
|
+
root :specification
|
325
|
+
|
326
|
+
rule(:wsp) { match[' \t'] }
|
327
|
+
rule(:wsp_r) { wsp.repeat }
|
328
|
+
rule(:version_cmp) { wsp_r >> COMPARATORS.map {|o| str(o)}.inject(&:|) }
|
329
|
+
rule(:version) { wsp_r >> (match['[:alnum:]_.*+!-']).repeat(1) }
|
330
|
+
rule(:version_one) { (version_cmp.as(:op) >> version.as(:ver)).as(:verreq) }
|
331
|
+
rule(:version_many) { version_one.repeat(1,1) >> (wsp_r >> str(',') >> version_one).repeat }
|
332
|
+
rule(:versionspec) { (str('(') >> version_many >> str(')')) | version_many }
|
333
|
+
rule(:urlspec) { str('@') >> wsp_r >> uri_reference.as(:url) }
|
334
|
+
rule(:marker_op) { version_cmp | (wsp_r >> str('in')) | (wsp_r >> str('not') >> wsp.repeat(1) >> str('in')) }
|
335
|
+
rule(:python_str_c) { (wsp | match['A-Za-z0-9().{}_*#:;,/?\[\]!~`@$%^&=+|<>-']) }
|
336
|
+
rule(:dquote) { str('"') }
|
337
|
+
rule(:squote) { str("'") }
|
338
|
+
rule(:python_str) {
|
339
|
+
(squote >> (python_str_c | dquote).repeat.as(:str) >> squote) | \
|
340
|
+
(dquote >> (python_str_c | squote).repeat.as(:str) >> dquote)
|
341
|
+
}
|
342
|
+
rule(:env_var) { ENVVARS.map {|n| str(n)}.inject(&:|) }
|
343
|
+
rule(:marker_var) { wsp_r >> (env_var | python_str)}
|
344
|
+
rule(:marker_expr) { marker_var.as(:l) >> marker_op.as(:o) >> marker_var.as(:r) | wsp_r >> str('(') >> marker >> wsp_r >> str(')') }
|
345
|
+
rule(:marker_and) { marker_expr.as(:l) >> wsp_r >> str('and').as(:o) >> marker_expr.as(:r) | marker_expr }
|
346
|
+
rule(:marker_or) { marker_and.as(:l) >> wsp_r >> str('or').as(:o) >> marker_and.as(:r) | marker_and }
|
347
|
+
rule(:marker) { marker_or }
|
348
|
+
rule(:quoted_marker) { str(';') >> wsp_r >> marker.as(:markers) }
|
349
|
+
rule(:identifier_end) { match['[:alnum:]'] | match['_.-'].repeat >> match['[:alnum:]'] }
|
350
|
+
rule(:identifier) { match['[:alnum:]'] >> identifier_end.repeat }
|
351
|
+
rule(:name) { identifier }
|
352
|
+
rule(:extras_list) { identifier.as(:id).repeat(1,1) >> (wsp_r >> str(',') >> wsp_r >> identifier.as(:id)).repeat }
|
353
|
+
rule(:extras) { str('[') >> wsp_r >> extras_list >> wsp_r >> str(']') }
|
354
|
+
rule(:name_req) { name.as(:package) >> wsp_r >> extras.as(:extras).maybe >> wsp_r >> versionspec.as(:verreqs).maybe >> wsp_r >> quoted_marker.maybe }
|
355
|
+
rule(:url_req) { name.as(:package) >> wsp_r >> extras.as(:extras).maybe >> wsp_r >> urlspec >> (wsp.repeat(1) | any.absent?) >> quoted_marker.maybe }
|
356
|
+
rule(:specification) { wsp_r >> (url_req | name_req) >> wsp_r }
|
357
|
+
|
358
|
+
# URI
|
359
|
+
rule(:uri_reference) { uri | relative_ref }
|
360
|
+
rule(:query_maybe) { (str('?') >> query).maybe }
|
361
|
+
rule(:fragment_maybe) { (str('#') >> fragment).maybe }
|
362
|
+
rule(:uri) { scheme >> str(':') >> hier_part >> query_maybe >> fragment_maybe }
|
363
|
+
rule(:hier_part) { (str('//') >> authority >> path_abempty) | path_absolute | path_rootless | path_empty }
|
364
|
+
rule(:absolute_uri) { scheme >> str(':') >> hier_part >> query_maybe }
|
365
|
+
rule(:relative_ref) { relative_part >> query_maybe >> fragment_maybe }
|
366
|
+
rule(:relative_part) { str('//') >> authority >> path_abempty | path_absolute | path_noscheme | path_empty }
|
367
|
+
rule(:scheme) { match['[:alpha:]'] >> match['[:alnum:]+.-'].repeat }
|
368
|
+
rule(:authority) { (userinfo >> str('@')).maybe >> host >> (str(':') >> port).maybe }
|
369
|
+
rule(:userinfo) { (unreserved | pct_encoded | sub_delims | str(':')).repeat }
|
370
|
+
rule(:host) { ip_literal | ipv4address | reg_name }
|
371
|
+
rule(:port) { match['0-9'].repeat }
|
372
|
+
rule(:ip_literal) { str('[') >> (ipv6address | ipvfuture) >> str(']') }
|
373
|
+
rule(:ipvfuture) { str('v') >> match['[:xdigit:]'].repeat(1) >> str('.') >> (unreserved | sub_delims | str(':')).repeat(1) }
|
374
|
+
rule(:ipv6address) {
|
375
|
+
c = str(':')
|
376
|
+
cc = str('::')
|
377
|
+
|
378
|
+
(h16 >> c).repeat(6,6) >> ls32 |
|
379
|
+
cc >> (h16 >> c).repeat(5,5) >> ls32 |
|
380
|
+
h16.maybe >> cc >> (h16 >> c).repeat(4,4) >> ls32 |
|
381
|
+
((h16 >> c).maybe >> h16).maybe >> cc >> (h16 >> c).repeat(3,3) >> ls32 |
|
382
|
+
((h16 >> c).repeat(0,2) >> h16).maybe >> cc >> (h16 >> c).repeat(2,2) >> ls32 |
|
383
|
+
((h16 >> c).repeat(0,3) >> h16).maybe >> cc >> h16 >> c >> ls32 |
|
384
|
+
((h16 >> c).repeat(0,4) >> h16).maybe >> cc >> ls32 |
|
385
|
+
((h16 >> c).repeat(0,5) >> h16).maybe >> cc >> h16 |
|
386
|
+
((h16 >> c).repeat(0,6) >> h16).maybe >> cc
|
387
|
+
}
|
388
|
+
rule(:h16) { match['[:xdigit:]'].repeat(1,4) }
|
389
|
+
rule(:ls32) { h16 >> str(':') >> h16 | ipv4address }
|
390
|
+
rule(:ipv4address) { dec_octet >> (str('.') >> dec_octet).repeat(3,3) }
|
391
|
+
rule(:dec_octet) {
|
392
|
+
d = match['0-9']
|
393
|
+
nz = match['1-9']
|
394
|
+
|
395
|
+
d |
|
396
|
+
nz >> d |
|
397
|
+
str('1') >> d.repeat(2,2) |
|
398
|
+
str('2') >> match['0-4'] >> d |
|
399
|
+
str('25') >> match['0-5']
|
400
|
+
}
|
401
|
+
rule(:reg_name) { (unreserved | pct_encoded | sub_delims).repeat }
|
402
|
+
rule(:path) { path_abempty | path_absolute | path_noscheme | path_rootless | path_empty }
|
403
|
+
Parslet.str('/').tap do |sl|
|
404
|
+
rule(:path_abempty) { (sl >> segment).repeat }
|
405
|
+
rule(:path_absolute) { sl >> (segment_nz >> (sl >> segment).repeat).maybe }
|
406
|
+
rule(:path_noscheme) { segment_nz_nc >> (sl >> segment).repeat }
|
407
|
+
rule(:path_rootless) { segment_nz >> (sl >> segment).repeat }
|
408
|
+
end
|
409
|
+
rule(:path_empty) { pchar.absent? }
|
410
|
+
rule(:segment) { pchar.repeat }
|
411
|
+
rule(:segment_nz) { pchar.repeat(1) }
|
412
|
+
rule(:segment_nz_nc) { (unreserved | pct_encoded | sub_delims | str('@')).repeat(1) }
|
413
|
+
rule(:pchar) { unreserved | pct_encoded | sub_delims | match[':@'] }
|
414
|
+
rule(:query) { (pchar | match['/?']).repeat }
|
415
|
+
rule(:fragment) { (pchar | match['/?']).repeat }
|
416
|
+
rule(:pct_encoded) { str('%') >> match['[:xdigit:]'].repeat(2,2) }
|
417
|
+
rule(:unreserved) { match['[:alnum:]._~-'] }
|
418
|
+
rule(:reserved) { gen_delims | sub_delims }
|
419
|
+
rule(:gen_delims) { match[':/?#()@'] }
|
420
|
+
rule(:sub_delims) { match["!$&'()*+,;="] }
|
421
|
+
end
|
422
|
+
|
423
|
+
##
|
424
|
+
# Transforms parse tree from ReqSpecParser to usable objects
|
425
|
+
#
|
426
|
+
class ReqSpecTransform < Parslet::Transform
|
427
|
+
rule(:verreq => {op: simple(:o), ver: simple(:v)}) {Requirement.new(o.to_s, v.to_s)}
|
428
|
+
rule(package: simple(:n)) {|c| PyPackageInfo.new(name: c[:n].to_s)}
|
429
|
+
rule(package: simple(:n), verreqs: sequence(:rs)) {|c| PyPackageInfo.new(
|
430
|
+
name: c[:n].to_s,
|
431
|
+
version_reqs: c[:rs],
|
432
|
+
)}
|
433
|
+
rule(package: simple(:n), url: simple(:url)) {|c| PyPackageInfo.new(
|
434
|
+
name: c[:n].to_s,
|
435
|
+
url: c[:url].to_s,
|
436
|
+
)}
|
437
|
+
|
438
|
+
##
|
439
|
+
# Apply transform after normalizing a parse tree
|
440
|
+
#
|
441
|
+
# This method should be applied only to a parse tree expected to come
|
442
|
+
# from a requirement specification.
|
443
|
+
#
|
444
|
+
def apply_spec(ptree)
|
445
|
+
norm_ptree = {}
|
446
|
+
# TODO: :extras should be in this list, and we should default them to []
|
447
|
+
%i[package verreqs url].each do |c|
|
448
|
+
norm_ptree[c] = ptree[c] if ptree.has_key?(c)
|
449
|
+
end
|
450
|
+
apply(norm_ptree)
|
451
|
+
end
|
452
|
+
end
|
453
|
+
|
454
|
+
##
|
455
|
+
# Representation of a single requirement clause
|
456
|
+
#
|
457
|
+
class Requirement
|
458
|
+
def initialize(op, vernum)
|
459
|
+
super()
|
460
|
+
@op = case op
|
461
|
+
when '<' then :<
|
462
|
+
when '<=' then :<=
|
463
|
+
when '==' then :==
|
464
|
+
when '>=' then :>=
|
465
|
+
when '>' then :>
|
466
|
+
when '!=' then :!=
|
467
|
+
when '~=' then :compatible
|
468
|
+
when '===' then :str_equal
|
469
|
+
when Symbol then op
|
470
|
+
else
|
471
|
+
raise "Unknown requirement operator #{op.inspect}"
|
472
|
+
end
|
473
|
+
@vernum = vernum
|
474
|
+
end
|
475
|
+
attr_reader :op, :vernum
|
476
|
+
|
477
|
+
def determinative?
|
478
|
+
[:==, :str_equal].include?(op)
|
479
|
+
end
|
480
|
+
|
481
|
+
##
|
482
|
+
# Query if this requirement is satisfied by a particular version
|
483
|
+
#
|
484
|
+
# When +strict:+ is false and the instance is an equality-type requirement
|
485
|
+
# (i.e. the +op+ is +:==+ or +:str_equal+), the result is always +true+.
|
486
|
+
#
|
487
|
+
def satisfied_by?(version, strict: true)
|
488
|
+
req_key = PyPackageInfo.parse_version_str(self.vernum)
|
489
|
+
cand_key = PyPackageInfo.parse_version_str(version)
|
490
|
+
|
491
|
+
return true if !strict && %i[== str_equal].include?(op)
|
492
|
+
|
493
|
+
return case op
|
494
|
+
when :compatible
|
495
|
+
req_key, cand_key = comp_keys(version)
|
496
|
+
(cand_key <=> req_key) >= 0 && (cand_key <=> series(req_key)) == 0
|
497
|
+
when :str_equal
|
498
|
+
self.vernum == version.to_s
|
499
|
+
else
|
500
|
+
req_key, cand_key = comp_keys(version)
|
501
|
+
if comp_result = (cand_key <=> req_key)
|
502
|
+
comp_result.send(op, 0)
|
503
|
+
else
|
504
|
+
warn("Cannot test #{cand_key.inspect} #{op} #{req_key} (<=> returned nil)")
|
505
|
+
end
|
506
|
+
end
|
507
|
+
end
|
508
|
+
|
509
|
+
private
|
510
|
+
def comp_keys(other)
|
511
|
+
[self.vernum, other].map {|v| PyPackageInfo.parse_version_str(v)}
|
512
|
+
end
|
513
|
+
|
514
|
+
def series(comp_key)
|
515
|
+
comp_key.dup.tap do |result|
|
516
|
+
result.final.to_series
|
517
|
+
end
|
518
|
+
end
|
519
|
+
end
|
520
|
+
|
521
|
+
VERSION_PATTERN = /^
|
522
|
+
((?<epoch> \d+ ) ! )?
|
523
|
+
(?<final> \d+ (\.\d+)* (\.\*)? )
|
524
|
+
( # Pre-release (a | b | rc) group
|
525
|
+
[._-]?
|
526
|
+
(?<pre_group> a(lpha)? | b(eta)? | c | pre(view)? | rc )
|
527
|
+
[._-]?
|
528
|
+
(?<pre_n> \d* )
|
529
|
+
)?
|
530
|
+
( # Post-release group
|
531
|
+
(
|
532
|
+
[._-]? (post|r(ev)?) [._-]?
|
533
|
+
|
|
534
|
+
- # Implicit post release
|
535
|
+
)
|
536
|
+
(?<post> ((?<![._-]) | \d) \d* )
|
537
|
+
)?
|
538
|
+
( # Development release group
|
539
|
+
[._-]?
|
540
|
+
dev
|
541
|
+
(?<dev> \d* )
|
542
|
+
)?
|
543
|
+
( # Local version segment
|
544
|
+
\+
|
545
|
+
(?<local>.*)
|
546
|
+
)?
|
547
|
+
$/x
|
548
|
+
|
549
|
+
module VersionParsing
|
550
|
+
def parse_version_str(s)
|
551
|
+
return s if s.kind_of?(Version)
|
552
|
+
return s unless parts = VERSION_PATTERN.match(s.downcase)
|
553
|
+
|
554
|
+
# Normalization
|
555
|
+
pre_group = case parts[:pre_group]
|
556
|
+
when 'alpha' then 'a'
|
557
|
+
when 'beta' then 'b'
|
558
|
+
when 'c', 'pre', 'preview' then 'rc'
|
559
|
+
else parts[:pre_group]
|
560
|
+
end
|
561
|
+
|
562
|
+
return Version.new(
|
563
|
+
FinalVersion.new(parts[:final]),
|
564
|
+
epoch: parts[:epoch],
|
565
|
+
pre: [pre_group, parts[:pre_n]],
|
566
|
+
post: parts[:post],
|
567
|
+
dev: parts[:dev],
|
568
|
+
local: parts[:local],
|
569
|
+
)
|
570
|
+
end
|
571
|
+
end
|
572
|
+
extend VersionParsing
|
573
|
+
include VersionParsing
|
574
|
+
|
575
|
+
##
|
576
|
+
# Represents a full PEP-440 version
|
577
|
+
#
|
578
|
+
class Version
|
579
|
+
NOT_PRE = ['z', 0]
|
580
|
+
|
581
|
+
def initialize(final, epoch: 0, pre: [], post: nil, dev: nil, local: nil)
|
582
|
+
@epoch = (epoch || 0).to_i
|
583
|
+
@final = final.kind_of?(FinalVersion) ? final : FinalVersion.new(final)
|
584
|
+
@pre = normalize_part(pre[1]) {|n| n && [pre[0], n]}
|
585
|
+
@post = normalize_part(post) {|n| n && [n] }
|
586
|
+
@dev = normalize_part(dev) {|n| n}
|
587
|
+
@local = case local
|
588
|
+
when nil then nil
|
589
|
+
when Array then local
|
590
|
+
else local.to_s.split(/[._-]/).map {|part| try_to_i(part)}
|
591
|
+
end
|
592
|
+
end
|
593
|
+
attr_reader *%i[epoch final local]
|
594
|
+
|
595
|
+
def inspect
|
596
|
+
"#<#{self.class.name} #{to_s.inspect}>"
|
597
|
+
end
|
598
|
+
|
599
|
+
def to_s
|
600
|
+
[].tap do |parts|
|
601
|
+
parts << "#{epoch}!" unless epoch == 0
|
602
|
+
parts << final.to_s
|
603
|
+
parts << "#{@pre[0]}#{@pre[1]}" if @pre
|
604
|
+
parts << ".post#{@post}" if @post
|
605
|
+
parts << ".dev#{@dev}" if @dev
|
606
|
+
parts << "+#{local}" if local
|
607
|
+
end.join('')
|
608
|
+
end
|
609
|
+
|
610
|
+
def pre_group
|
611
|
+
@pre && @pre[0]
|
612
|
+
end
|
613
|
+
|
614
|
+
def pre_num
|
615
|
+
@pre && @pre[1]
|
616
|
+
end
|
617
|
+
|
618
|
+
def <=>(rhs)
|
619
|
+
return nil unless rhs.kind_of?(self.class)
|
620
|
+
steps = Enumerator.new do |comps|
|
621
|
+
%i[epoch final pre_comp post_comp dev_comp].each do |attr|
|
622
|
+
comps << (send(attr) <=> rhs.send(attr))
|
623
|
+
end
|
624
|
+
|
625
|
+
case [local, rhs.local].count(&:nil?)
|
626
|
+
when 2 then comps << 0
|
627
|
+
when 1 then comps << (local.nil? ? -1 : 1)
|
628
|
+
else comps << (local <=> rhs.local)
|
629
|
+
end
|
630
|
+
end
|
631
|
+
steps.find {|v| v != 0} || 0
|
632
|
+
end
|
633
|
+
include Comparable
|
634
|
+
|
635
|
+
def prerelease?
|
636
|
+
!!(@pre || @dev)
|
637
|
+
end
|
638
|
+
|
639
|
+
private
|
640
|
+
def normalize_part(value)
|
641
|
+
yield case value
|
642
|
+
when '' then 0
|
643
|
+
when nil then nil
|
644
|
+
else value.to_i
|
645
|
+
end
|
646
|
+
end
|
647
|
+
|
648
|
+
def try_to_i(s)
|
649
|
+
if /^\d+$/ =~ s
|
650
|
+
s.to_i
|
651
|
+
else
|
652
|
+
s
|
653
|
+
end
|
654
|
+
end
|
655
|
+
|
656
|
+
def pre_comp
|
657
|
+
@pre || NOT_PRE
|
658
|
+
end
|
659
|
+
|
660
|
+
def post_comp
|
661
|
+
@post || []
|
662
|
+
end
|
663
|
+
|
664
|
+
def dev_comp
|
665
|
+
@dev || Float::INFINITY
|
666
|
+
end
|
667
|
+
end
|
668
|
+
|
669
|
+
##
|
670
|
+
# Represents the "final" part of a PEP-440 version string
|
671
|
+
#
|
672
|
+
class FinalVersion
|
673
|
+
def initialize(final_ver)
|
674
|
+
@value = case final_ver
|
675
|
+
when Array then final_ver
|
676
|
+
else final_ver.split('.').map {|s| seg_value(s)}
|
677
|
+
end
|
678
|
+
end
|
679
|
+
|
680
|
+
def [](n)
|
681
|
+
@value[n]
|
682
|
+
end
|
683
|
+
|
684
|
+
def length
|
685
|
+
@value.length
|
686
|
+
end
|
687
|
+
|
688
|
+
def each(&blk)
|
689
|
+
@value.each(&blk)
|
690
|
+
end
|
691
|
+
include Enumerable
|
692
|
+
|
693
|
+
def to_s
|
694
|
+
@value.join('.')
|
695
|
+
end
|
696
|
+
|
697
|
+
def inspect
|
698
|
+
"#<#{self.class.name} #{to_s}>"
|
699
|
+
end
|
700
|
+
|
701
|
+
def <=>(rhs)
|
702
|
+
nil unless rhs.kind_of?(FinalVersion)
|
703
|
+
(0..Float::INFINITY).lazy.each do |i|
|
704
|
+
return 0 if self[i].nil? && rhs[i].nil?
|
705
|
+
return 0 if [self[i], rhs[i]].include?(:*)
|
706
|
+
diff = (self[i] || 0) <=> (rhs[i] || 0)
|
707
|
+
return diff if diff != 0
|
708
|
+
end
|
709
|
+
end
|
710
|
+
include Comparable
|
711
|
+
|
712
|
+
def to_series
|
713
|
+
self.class.new(@value.dup.tap do |mver|
|
714
|
+
mver[-1] = :*
|
715
|
+
end.join('.'))
|
716
|
+
end
|
717
|
+
|
718
|
+
private
|
719
|
+
def seg_value(s)
|
720
|
+
if s == '*'
|
721
|
+
:*
|
722
|
+
else
|
723
|
+
s.to_i
|
724
|
+
end
|
725
|
+
end
|
726
|
+
end
|
727
|
+
|
728
|
+
##
|
729
|
+
# Reads package requirements from a file
|
730
|
+
#
|
731
|
+
class Reader
|
732
|
+
def initialize(packages_fpath, only_constrain: false)
|
733
|
+
super()
|
734
|
+
@files = [Pathname(packages_fpath)]
|
735
|
+
@only_constrain = only_constrain
|
736
|
+
end
|
737
|
+
|
738
|
+
##
|
739
|
+
# Enumerate packages described by requirements targeted by this instance
|
740
|
+
#
|
741
|
+
# Each invocation of the block receives a PyPackageInfo object, which
|
742
|
+
# will have, at minimum, either a #name or #url not +nil+. It is
|
743
|
+
# possible that multiple iterations will process separate PyPackageInfo
|
744
|
+
# for the same package, in which case PyPackageInfo#incorporate is useful.
|
745
|
+
#
|
746
|
+
# An Enumerator is returned if no block is given.
|
747
|
+
#
|
748
|
+
def each_package_constrained
|
749
|
+
generator = Enumerator.new do |items|
|
750
|
+
continued_line = ''
|
751
|
+
current_file.each_line do |pkg_line|
|
752
|
+
pkg_line = pkg_line.chomp
|
753
|
+
next if /^#/ =~ pkg_line
|
754
|
+
if /(?<=\s)#.*$/ =~ pkg_line
|
755
|
+
pkg_line = pkg_line[0...-$&.length]
|
756
|
+
end
|
757
|
+
|
758
|
+
# Yes, this _does_ happen after comment lines are skipped :facepalm:
|
759
|
+
if /\\$/ =~ pkg_line
|
760
|
+
continued_line += pkg_line[0..-2]
|
761
|
+
next
|
762
|
+
end
|
763
|
+
pkg_line, continued_line = (continued_line + pkg_line).strip, ''
|
764
|
+
next if pkg_line.empty?
|
765
|
+
|
766
|
+
process_line_into(items, pkg_line)
|
767
|
+
end
|
768
|
+
end
|
769
|
+
|
770
|
+
if block_given?
|
771
|
+
generator.each {|item| yield item}
|
772
|
+
else
|
773
|
+
generator
|
774
|
+
end
|
775
|
+
end
|
776
|
+
|
777
|
+
##
|
778
|
+
# Enumerate packages targeted for installation by this instance
|
779
|
+
#
|
780
|
+
# Each invocation of the block receives a PyPackageInfo object targeted
|
781
|
+
# for installation. Each of these PyPackageInfo object will have a
|
782
|
+
# resolved #name and #current_version (if possible).
|
783
|
+
#
|
784
|
+
# An Enumerator is returned if no block is given.
|
785
|
+
#
|
786
|
+
def each_installed_package
|
787
|
+
generator = Enumerator.new do |items|
|
788
|
+
packages = {}
|
789
|
+
|
790
|
+
each_package_constrained do |pkg|
|
791
|
+
pkg.resolve_name!
|
792
|
+
if packages.has_key?(pkg.name)
|
793
|
+
packages[pkg.name].incorporate(pkg)
|
794
|
+
else
|
795
|
+
packages[pkg.name] = pkg
|
796
|
+
end
|
797
|
+
end
|
798
|
+
|
799
|
+
to_install = []
|
800
|
+
packages.each_value do |pkg|
|
801
|
+
next unless pkg.install?
|
802
|
+
to_install << pkg.name
|
803
|
+
end
|
804
|
+
|
805
|
+
while pkg_name = to_install.shift
|
806
|
+
pkg = packages[pkg_name]
|
807
|
+
pkg.resolve_version!
|
808
|
+
items << pkg
|
809
|
+
end
|
810
|
+
end
|
811
|
+
|
812
|
+
if block_given?
|
813
|
+
generator.each {|item| yield item}
|
814
|
+
else
|
815
|
+
generator
|
816
|
+
end
|
817
|
+
end
|
818
|
+
|
819
|
+
private
|
820
|
+
def current_file
|
821
|
+
@files.last
|
822
|
+
end
|
823
|
+
|
824
|
+
def in_file(fpath)
|
825
|
+
@files << Pathname(fpath)
|
826
|
+
begin
|
827
|
+
yield
|
828
|
+
ensure
|
829
|
+
@files.pop
|
830
|
+
end
|
831
|
+
end
|
832
|
+
|
833
|
+
def only_constrain?
|
834
|
+
@only_constrain
|
835
|
+
end
|
836
|
+
|
837
|
+
def reading_constraints
|
838
|
+
prev_val, @only_constrain = @only_constrain, true
|
839
|
+
begin
|
840
|
+
yield
|
841
|
+
ensure
|
842
|
+
@only_constrain = prev_val
|
843
|
+
end
|
844
|
+
end
|
845
|
+
|
846
|
+
def process_line_into(items, pkg_line)
|
847
|
+
case pkg_line
|
848
|
+
when /^-r (.)$/
|
849
|
+
if only_constrain?
|
850
|
+
warn("-r directive appears in constraints file #{current_file}")
|
851
|
+
end
|
852
|
+
in_file(current_file.dirname / $1) do
|
853
|
+
each_package_constrained {|pkg| items << pkg}
|
854
|
+
end
|
855
|
+
when /^-c (.)$/
|
856
|
+
in_file(current_file.dirname / $1) do
|
857
|
+
reading_constraints do
|
858
|
+
each_package_constrained {|pkg| items << pkg}
|
859
|
+
end
|
860
|
+
end
|
861
|
+
when /^-e/
|
862
|
+
warn %Q{#{current_file} lists "editable" package: #{pkg_line}}
|
863
|
+
else
|
864
|
+
insert_package_from_line_into(items, pkg_line)
|
865
|
+
end
|
866
|
+
end
|
867
|
+
|
868
|
+
def insert_package_from_line_into(items, pkg_line)
|
869
|
+
parse_tree = begin
|
870
|
+
ReqSpecParser.new.parse(pkg_line)
|
871
|
+
rescue Parslet::ParseFailed
|
872
|
+
if (uri = URI.try_parse(pkg_line)) && ACCEPTED_URI_SCHEMES.include?(uri.scheme)
|
873
|
+
if only_constrain?
|
874
|
+
warn("#{current_file} is a constraints file but specifies URL #{uri}")
|
875
|
+
else
|
876
|
+
items << PyPackageInfo.new(url: uri, install: true)
|
877
|
+
end
|
878
|
+
return
|
879
|
+
end
|
880
|
+
warn("Unreportable line in #{current_file}: #{pkg_line}")
|
881
|
+
return
|
882
|
+
end
|
883
|
+
|
884
|
+
# Transform parse tree into a spec
|
885
|
+
spec = ReqSpecTransform.new.apply_spec(parse_tree)
|
886
|
+
if spec.kind_of?(PyPackageInfo)
|
887
|
+
spec.install ||= !only_constrain?
|
888
|
+
items << spec
|
889
|
+
else
|
890
|
+
warn("Unhandled requirement parse tree: #{explain_parse_tree parse_tree}")
|
891
|
+
end
|
892
|
+
end
|
893
|
+
|
894
|
+
def explain_parse_tree(parse_tree)
|
895
|
+
case parse_tree
|
896
|
+
when Array
|
897
|
+
"[#{parse_tree.map {|i| "#<#{i.class.name}>"}.join(', ')}]"
|
898
|
+
when Hash
|
899
|
+
"{#{parse_tree.map {|k, v| "#{k.inspect} => #<#{v.class.name}>"}.join(', ')}}"
|
900
|
+
else
|
901
|
+
"#<#{parse_tree.class.name}>"
|
902
|
+
end
|
903
|
+
end
|
904
|
+
end
|
905
|
+
|
906
|
+
def pypi_url
|
907
|
+
"https://pypi.org/pypi/#{name}/json"
|
908
|
+
end
|
909
|
+
|
910
|
+
def pypi_release_url(release)
|
911
|
+
"https://pypi.org/pypi/#{name}/#{release}/json"
|
912
|
+
end
|
913
|
+
|
914
|
+
private
|
915
|
+
def get_package_info
|
916
|
+
cache = PACKAGE_CACHE_DIR.join("#{name}.json")
|
917
|
+
apply_cache(cache) do
|
918
|
+
pypi_response = RestClient.get(pypi_url)
|
919
|
+
JSON.parse(pypi_response)
|
920
|
+
end
|
921
|
+
end
|
922
|
+
|
923
|
+
def get_release_info(release)
|
924
|
+
cache = PACKAGE_CACHE_DIR.join(name, "#{release}.json")
|
925
|
+
apply_cache(cache) do
|
926
|
+
pypi_response = RestClient.get(pypi_release_url(release))
|
927
|
+
JSON.parse(pypi_response)
|
928
|
+
end
|
929
|
+
end
|
930
|
+
|
931
|
+
def get_age
|
932
|
+
versions_with_release.each do |vnum, released|
|
933
|
+
return ((Time.now - released) / ONE_DAY).to_i if vnum == current_version
|
934
|
+
end
|
935
|
+
return nil
|
936
|
+
end
|
937
|
+
|
938
|
+
##
|
939
|
+
# Given a version, return the parts that we expect to define the
|
940
|
+
# major/minor release series
|
941
|
+
#
|
942
|
+
# Returns an Array
|
943
|
+
#
|
944
|
+
def nonpatch_versegs(ver)
|
945
|
+
return nil if ver.nil?
|
946
|
+
[ver.epoch] + ver.final.take(2)
|
947
|
+
end
|
948
|
+
|
949
|
+
##
|
950
|
+
# Get data from the setup.py file of the package
|
951
|
+
#
|
952
|
+
def setup_data
|
953
|
+
return @setup_data if defined? @setup_data
|
954
|
+
unless self.url
|
955
|
+
raise "#setup_data called for #{name}, may only be called for packages specified by URL"
|
956
|
+
end
|
957
|
+
|
958
|
+
python_code = <<~END_OF_PYTHON
|
959
|
+
import json, sys
|
960
|
+
from unittest.mock import patch
|
961
|
+
|
962
|
+
sys.path[0:0] = ['.']
|
963
|
+
|
964
|
+
def capture_setup(**kwargs):
|
965
|
+
capture_setup.captured = kwargs
|
966
|
+
|
967
|
+
with patch('setuptools.setup', capture_setup):
|
968
|
+
import setup
|
969
|
+
|
970
|
+
json.dump(
|
971
|
+
capture_setup.captured,
|
972
|
+
sys.stdout,
|
973
|
+
default=lambda o: "<{}.{}>".format(type(o).__module__, type(o).__qualname__),
|
974
|
+
)
|
975
|
+
END_OF_PYTHON
|
976
|
+
|
977
|
+
output, status = with_package_files do |workdir|
|
978
|
+
Dir.chdir(workdir) do
|
979
|
+
Open3.capture2('python3', stdin_data: python_code)
|
980
|
+
end
|
981
|
+
end || []
|
982
|
+
|
983
|
+
@setup_data = begin
|
984
|
+
case status
|
985
|
+
when nil
|
986
|
+
warn("Package files unavailable, could not read setup.py")
|
987
|
+
{}
|
988
|
+
when :success?.to_proc
|
989
|
+
JSON.parse(output)
|
990
|
+
else
|
991
|
+
warn("Failed to read setup.py in for #{self.url}")
|
992
|
+
{}
|
993
|
+
end
|
994
|
+
rescue StandardError => ex
|
995
|
+
warn("Failed to read setup.py in for #{self.url}: #{ex}")
|
996
|
+
{}
|
997
|
+
end
|
998
|
+
end
|
999
|
+
|
1000
|
+
##
|
1001
|
+
# Yield a Pathname for the directory containing the package files
|
1002
|
+
#
|
1003
|
+
# Returns the result of the block, or +nil+ if the block is not
|
1004
|
+
# executed. The directory with the package files may be removed when
|
1005
|
+
# the block exits.
|
1006
|
+
#
|
1007
|
+
def with_package_files(&blk)
|
1008
|
+
case self.url.scheme
|
1009
|
+
when 'git'
|
1010
|
+
return with_git_worktree(self.url, &blk)
|
1011
|
+
when /^git\+/
|
1012
|
+
git_uri = self.url.dup
|
1013
|
+
git_uri.scheme = self.url.scheme[4..-1]
|
1014
|
+
return with_git_worktree(git_uri, &blk)
|
1015
|
+
when 'http', 'https'
|
1016
|
+
case
|
1017
|
+
when zip_url?
|
1018
|
+
return with_unzipped_files(&blk)
|
1019
|
+
when tgz_url?
|
1020
|
+
return with_untarred_files(&blk)
|
1021
|
+
else
|
1022
|
+
warn("Unknown archive type for URL: #{self.url}")
|
1023
|
+
return nil
|
1024
|
+
end
|
1025
|
+
else
|
1026
|
+
warn("Unable to process URI package requirement: #{self.url}")
|
1027
|
+
end
|
1028
|
+
end
|
1029
|
+
|
1030
|
+
##
|
1031
|
+
# Implementation of #with_package_files for git URIs
|
1032
|
+
#
|
1033
|
+
def with_git_worktree(uri)
|
1034
|
+
git_url = uri.dup
|
1035
|
+
git_url.path, committish = uri.path.split('@', 2)
|
1036
|
+
uri_fragment, git_url.fragment = uri.fragment, nil
|
1037
|
+
repo_path = CODE_CACHE_DIR.join("git_#{Digest::MD5.hexdigest(git_url.to_s)}.git")
|
1038
|
+
|
1039
|
+
CODE_CACHE_DIR.mkpath
|
1040
|
+
|
1041
|
+
in_dir_git_cmd = ['git', '-C', repo_path.to_s]
|
1042
|
+
|
1043
|
+
if repo_path.exist?
|
1044
|
+
puts "Fetching #{git_url} to #{repo_path}..."
|
1045
|
+
cmd = in_dir_git_cmd + ['fetch', '--tags', 'origin', '+refs/heads/*:refs/heads/*']
|
1046
|
+
output, status = Open3.capture2(*cmd)
|
1047
|
+
unless status.success?
|
1048
|
+
warn("Failed to fetch 'origin' in #{repo_path}")
|
1049
|
+
return
|
1050
|
+
end
|
1051
|
+
else
|
1052
|
+
cmd = ['git', 'clone', '--bare', git_url.to_s, repo_path.to_s]
|
1053
|
+
output, status = Open3.capture2(*cmd)
|
1054
|
+
unless status.success?
|
1055
|
+
warn("Failed to clone #{git_url}")
|
1056
|
+
return
|
1057
|
+
end
|
1058
|
+
end
|
1059
|
+
|
1060
|
+
committish ||= (
|
1061
|
+
cmd = in_dir_git_cmd + ['ls-remote', 'origin', 'HEAD']
|
1062
|
+
output, status = Open3.capture2(*cmd)
|
1063
|
+
unless status.success?
|
1064
|
+
raise "Unable to read the HEAD of orgin"
|
1065
|
+
end
|
1066
|
+
output.split("\t")[0]
|
1067
|
+
)
|
1068
|
+
Dir.mktmpdir("myprecious-git-") do |workdir|
|
1069
|
+
cmds = [
|
1070
|
+
in_dir_git_cmd + ['archive', committish],
|
1071
|
+
['tar', '-x', '-C', workdir.to_s],
|
1072
|
+
]
|
1073
|
+
statuses = Open3.pipeline(*cmds, in: :close)
|
1074
|
+
if failed_i = statuses.find {|s| s.exited? && !s.success?}
|
1075
|
+
exitstatus = statuses[failed_i].exitstatus
|
1076
|
+
failed_cmd_str = cmds[failed_i].shelljoin
|
1077
|
+
warn(
|
1078
|
+
"Failed to create temporary folder at command:\n" +
|
1079
|
+
" #{failed_cmd.light_red} (exited with code #{exitstatus})"
|
1080
|
+
)
|
1081
|
+
return
|
1082
|
+
end
|
1083
|
+
|
1084
|
+
fragment_parts = Hash[URI.decode_www_form(uri.fragment || '')]
|
1085
|
+
package_dir = Pathname(workdir).join(
|
1086
|
+
fragment_parts.fetch('subdirectory', '.')
|
1087
|
+
)
|
1088
|
+
return (yield package_dir)
|
1089
|
+
end
|
1090
|
+
end
|
1091
|
+
|
1092
|
+
def get_url_content_type
|
1093
|
+
# TODO: Make a HEAD request to the URL to find out the content type
|
1094
|
+
return 'application/octet-stream'
|
1095
|
+
end
|
1096
|
+
|
1097
|
+
def zip_url?
|
1098
|
+
case get_url_content_type
|
1099
|
+
when 'application/zip' then true
|
1100
|
+
when 'application/octet-stream'
|
1101
|
+
self.url.path.downcase.end_with?('.zip')
|
1102
|
+
else false
|
1103
|
+
end
|
1104
|
+
end
|
1105
|
+
|
1106
|
+
##
|
1107
|
+
# Implementation of #with_package_files for ZIP file URLs
|
1108
|
+
#
|
1109
|
+
def with_unzipped_files
|
1110
|
+
zip_path = extracted_url("zip") do |url_f, zip_path|
|
1111
|
+
Zip::File.open_buffer(url_f) do |zip_file|
|
1112
|
+
zip_file.each do |entry|
|
1113
|
+
if entry.name_safe?
|
1114
|
+
dest_file = zip_path.join(entry.name.split('/', 2)[1])
|
1115
|
+
dest_file.dirname.mkpath
|
1116
|
+
entry.extract(dest_file.to_s) {:overwrite}
|
1117
|
+
else
|
1118
|
+
warn("Did not extract #{entry.name} from #{self.url}")
|
1119
|
+
end
|
1120
|
+
end
|
1121
|
+
end
|
1122
|
+
end
|
1123
|
+
|
1124
|
+
return (yield zip_path)
|
1125
|
+
end
|
1126
|
+
|
1127
|
+
def tgz_url?
|
1128
|
+
case get_url_content_type
|
1129
|
+
when %r{^application/(x-tar(\+gzip)?|gzip)$} then true
|
1130
|
+
when 'application/octet-stream'
|
1131
|
+
!!(self.url.path.downcase =~ /\.(tar\.gz|tgz)$/)
|
1132
|
+
else false
|
1133
|
+
end
|
1134
|
+
end
|
1135
|
+
|
1136
|
+
##
|
1137
|
+
# Implementation of #with_package_files for TGZ file URLs
|
1138
|
+
#
|
1139
|
+
def with_untarred_files
|
1140
|
+
tar_path = extracted_url("tar") do |url_f, tar_path|
|
1141
|
+
Gem::Package::TarReader.new(Zlib::GzipReader.new(url_f)) do |tar_file|
|
1142
|
+
tar_file.each do |entry|
|
1143
|
+
if entry.full_name =~ %r{(^|/)\.\./}
|
1144
|
+
warn("Did not extract #{entry.name} from #{self.url}")
|
1145
|
+
elsif entry.file?
|
1146
|
+
dest_file = tar_path.join(entry.full_name.split('/', 2)[1])
|
1147
|
+
dest_file.dirname.mkpath
|
1148
|
+
dest_file.open('wb') do |df|
|
1149
|
+
IO.copy_stream(entry, df)
|
1150
|
+
end
|
1151
|
+
end
|
1152
|
+
end
|
1153
|
+
end
|
1154
|
+
end
|
1155
|
+
|
1156
|
+
return (yield tar_path)
|
1157
|
+
end
|
1158
|
+
|
1159
|
+
def extracted_url(archive_type, &blk)
|
1160
|
+
puts "Downloading #{self.url}"
|
1161
|
+
extraction_path = CODE_CACHE_DIR.join(
|
1162
|
+
"#{archive_type}_#{Digest::MD5.hexdigest(self.url.to_s)}"
|
1163
|
+
)
|
1164
|
+
CODE_CACHE_DIR.mkpath
|
1165
|
+
|
1166
|
+
if %w[http https].include?(self.url.scheme)
|
1167
|
+
# TODO: Make a HEAD request to see if re-download is necessary
|
1168
|
+
end
|
1169
|
+
|
1170
|
+
self.url.open('rb') {|url_f| yield url_f, extraction_path}
|
1171
|
+
|
1172
|
+
return extraction_path
|
1173
|
+
end
|
1174
|
+
end
|
1175
|
+
end
|