dependabot-python 0.364.0 → 0.365.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/dependabot/python/dependency_grapher.rb +293 -0
- data/lib/dependabot/python/pip_compile_file_matcher.rb +15 -3
- data/lib/dependabot/python/pipenv_runner.rb +39 -7
- data/lib/dependabot/python/update_checker/pip_version_resolver/marker_evaluator.rb +241 -0
- data/lib/dependabot/python/update_checker/pip_version_resolver.rb +475 -10
- data/lib/dependabot/python.rb +1 -0
- metadata +6 -4
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 4c8198c7872836b593620355006c05b2c8c4e7c2d01ff504055abdbb48060bcd
|
|
4
|
+
data.tar.gz: 1ecac99ad26554ca0ae29881083da1bd0b40698fe459594cdb37e253c6388492
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: bf1ee8da823a08a786319eb38a842620311d672d8ea157570f936bb7ff9abb6f3ecc44815f048cf547bca3dbfe22a6f07523d3c23b43ae3352c3b7da046fc673
|
|
7
|
+
data.tar.gz: 313756cb337d95a513c7ff4a628d0d91b4a15eac11cc06946be1ea98dd724106b77622140cb4abd3c9cbecbf48359978976d6d426e70b4d680bb38820c78c72b
|
|
@@ -0,0 +1,293 @@
|
|
|
1
|
+
# typed: strict
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
require "json"
|
|
5
|
+
require "sorbet-runtime"
|
|
6
|
+
|
|
7
|
+
require "dependabot/dependency_graphers"
|
|
8
|
+
require "dependabot/dependency_graphers/base"
|
|
9
|
+
require "dependabot/python/file_parser"
|
|
10
|
+
require "dependabot/python/language_version_manager"
|
|
11
|
+
require "dependabot/python/name_normaliser"
|
|
12
|
+
require "dependabot/python/pip_compile_file_matcher"
|
|
13
|
+
require "dependabot/python/pipenv_runner"
|
|
14
|
+
require "toml-rb"
|
|
15
|
+
|
|
16
|
+
module Dependabot
|
|
17
|
+
module Python
|
|
18
|
+
class DependencyGrapher < Dependabot::DependencyGraphers::Base
|
|
19
|
+
sig { override.returns(Dependabot::DependencyFile) }
|
|
20
|
+
def relevant_dependency_file
|
|
21
|
+
dependency_files_by_package_manager = T.let(
|
|
22
|
+
{
|
|
23
|
+
PipenvPackageManager::NAME => [pipfile_lock, pipfile],
|
|
24
|
+
PoetryPackageManager::NAME => [poetry_lock, pyproject_toml],
|
|
25
|
+
PipCompilePackageManager::NAME => [pip_compile_lockfile, pip_compile_manifest, pyproject_toml],
|
|
26
|
+
PipPackageManager::NAME => [pip_requirements_file, pyproject_toml, pipfile_lock, pipfile, setup_file,
|
|
27
|
+
setup_cfg_file]
|
|
28
|
+
},
|
|
29
|
+
T::Hash[String, T::Array[T.nilable(Dependabot::DependencyFile)]]
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
candidates = dependency_files_by_package_manager.fetch(python_package_manager, [])
|
|
33
|
+
relevant_file = candidates.compact.first
|
|
34
|
+
return relevant_file if relevant_file
|
|
35
|
+
|
|
36
|
+
raise DependabotError, "No supported dependency file present."
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
private
|
|
40
|
+
|
|
41
|
+
sig { returns(String) }
|
|
42
|
+
def python_package_manager
|
|
43
|
+
T.must(file_parser.ecosystem).package_manager.name
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
sig { override.params(dependency: Dependabot::Dependency).returns(T::Array[String]) }
|
|
47
|
+
def fetch_subdependencies(dependency)
|
|
48
|
+
package_relationships.fetch(dependency.name, []).select { |child| dependency_name_set.include?(child) }
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
sig { override.params(_dependency: Dependabot::Dependency).returns(String) }
|
|
52
|
+
def purl_pkg_for(_dependency)
|
|
53
|
+
"pypi"
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
sig { returns(T::Hash[String, T::Array[String]]) }
|
|
57
|
+
def package_relationships
|
|
58
|
+
@package_relationships ||= T.let(
|
|
59
|
+
fetch_package_relationships,
|
|
60
|
+
T.nilable(T::Hash[String, T::Array[String]])
|
|
61
|
+
)
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
sig { returns(T::Hash[String, T::Array[String]]) }
|
|
65
|
+
def fetch_package_relationships
|
|
66
|
+
case python_package_manager
|
|
67
|
+
when PoetryPackageManager::NAME
|
|
68
|
+
poetry_lock ? fetch_poetry_lock_relationships : {}
|
|
69
|
+
when PipenvPackageManager::NAME
|
|
70
|
+
pipfile_lock ? fetch_pipfile_lock_relationships : {}
|
|
71
|
+
else
|
|
72
|
+
{}
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
sig { returns(T::Hash[String, T::Array[String]]) }
|
|
77
|
+
def fetch_poetry_lock_relationships
|
|
78
|
+
TomlRB.parse(T.must(poetry_lock).content).fetch("package", []).each_with_object({}) do |pkg, rels|
|
|
79
|
+
next unless pkg.is_a?(Hash) && pkg["name"].is_a?(String)
|
|
80
|
+
|
|
81
|
+
parent = NameNormaliser.normalise(pkg["name"])
|
|
82
|
+
deps = pkg["dependencies"]
|
|
83
|
+
deps = {} unless deps.is_a?(Hash)
|
|
84
|
+
children = deps.keys.map { |name| NameNormaliser.normalise(name) }
|
|
85
|
+
rels[parent] = children
|
|
86
|
+
end
|
|
87
|
+
rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
|
|
88
|
+
raise Dependabot::DependencyFileNotParseable, T.must(poetry_lock).name
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
sig { returns(T::Hash[String, T::Array[String]]) }
|
|
92
|
+
def fetch_pipfile_lock_relationships
|
|
93
|
+
json_output = pipenv_runner.run_pipenv_graph
|
|
94
|
+
parse_pipenv_graph_output(json_output)
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
# Parses the JSON output from `pipenv graph --json`.
|
|
98
|
+
#
|
|
99
|
+
# The format is a flat list where each entry has a "package" object and a "dependencies" array:
|
|
100
|
+
# [
|
|
101
|
+
# {
|
|
102
|
+
# "package": { "package_name": "requests", "installed_version": "2.32.5", ... },
|
|
103
|
+
# "dependencies": [
|
|
104
|
+
# { "package_name": "certifi", "installed_version": "2024.2.2", ... },
|
|
105
|
+
# ...
|
|
106
|
+
# ]
|
|
107
|
+
# },
|
|
108
|
+
# ...
|
|
109
|
+
# ]
|
|
110
|
+
sig { params(json_output: String).returns(T::Hash[String, T::Array[String]]) }
|
|
111
|
+
def parse_pipenv_graph_output(json_output)
|
|
112
|
+
graph = JSON.parse(json_output)
|
|
113
|
+
return {} unless valid_pipenv_graph_array?(graph)
|
|
114
|
+
|
|
115
|
+
graph.each_with_object({}) do |entry, rels|
|
|
116
|
+
parent = pipenv_parent_name(entry)
|
|
117
|
+
next unless parent
|
|
118
|
+
|
|
119
|
+
rels[parent] = pipenv_child_names(entry)
|
|
120
|
+
end
|
|
121
|
+
rescue JSON::ParserError
|
|
122
|
+
Dependabot.logger.warn("Unexpected output from 'pipenv graph --json': could not parse as JSON")
|
|
123
|
+
{}
|
|
124
|
+
end
|
|
125
|
+
|
|
126
|
+
sig { params(graph: T.untyped).returns(T::Boolean) }
|
|
127
|
+
def valid_pipenv_graph_array?(graph)
|
|
128
|
+
return true if graph.is_a?(Array)
|
|
129
|
+
|
|
130
|
+
Dependabot.logger.warn("Unexpected output from 'pipenv graph --json': expected a JSON array")
|
|
131
|
+
false
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
sig { params(entry: T.untyped).returns(T.nilable(String)) }
|
|
135
|
+
def pipenv_parent_name(entry)
|
|
136
|
+
return nil unless entry.is_a?(Hash)
|
|
137
|
+
|
|
138
|
+
pkg = entry["package"]
|
|
139
|
+
return nil unless pkg.is_a?(Hash)
|
|
140
|
+
|
|
141
|
+
package_name = pkg["package_name"]
|
|
142
|
+
return nil unless package_name.is_a?(String)
|
|
143
|
+
|
|
144
|
+
NameNormaliser.normalise(package_name)
|
|
145
|
+
end
|
|
146
|
+
|
|
147
|
+
sig { params(entry: T.untyped).returns(T::Array[String]) }
|
|
148
|
+
def pipenv_child_names(entry)
|
|
149
|
+
deps = entry.is_a?(Hash) ? entry["dependencies"] : nil
|
|
150
|
+
return [] unless deps.is_a?(Array)
|
|
151
|
+
|
|
152
|
+
deps.filter_map do |dep|
|
|
153
|
+
next unless dep.is_a?(Hash)
|
|
154
|
+
|
|
155
|
+
package_name = dep["package_name"]
|
|
156
|
+
next unless package_name.is_a?(String)
|
|
157
|
+
|
|
158
|
+
NameNormaliser.normalise(package_name)
|
|
159
|
+
end
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
sig { returns(T::Set[String]) }
|
|
163
|
+
def dependency_name_set
|
|
164
|
+
@dependency_name_set ||= T.let(
|
|
165
|
+
Set.new(@dependencies.map(&:name)),
|
|
166
|
+
T.nilable(T::Set[String])
|
|
167
|
+
)
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
sig { returns(PipenvRunner) }
|
|
171
|
+
def pipenv_runner
|
|
172
|
+
@pipenv_runner ||= T.let(
|
|
173
|
+
PipenvRunner.new(
|
|
174
|
+
dependency: nil,
|
|
175
|
+
lockfile: pipfile_lock,
|
|
176
|
+
language_version_manager: language_version_manager,
|
|
177
|
+
dependency_files: dependency_files
|
|
178
|
+
),
|
|
179
|
+
T.nilable(PipenvRunner)
|
|
180
|
+
)
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
sig { returns(LanguageVersionManager) }
|
|
184
|
+
def language_version_manager
|
|
185
|
+
@language_version_manager ||= T.let(
|
|
186
|
+
LanguageVersionManager.new(
|
|
187
|
+
python_requirement_parser: python_requirement_parser
|
|
188
|
+
),
|
|
189
|
+
T.nilable(LanguageVersionManager)
|
|
190
|
+
)
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
sig { returns(FileParser::PythonRequirementParser) }
|
|
194
|
+
def python_requirement_parser
|
|
195
|
+
@python_requirement_parser ||= T.let(
|
|
196
|
+
FileParser::PythonRequirementParser.new(
|
|
197
|
+
dependency_files: dependency_files
|
|
198
|
+
),
|
|
199
|
+
T.nilable(FileParser::PythonRequirementParser)
|
|
200
|
+
)
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
204
|
+
def pyproject_toml
|
|
205
|
+
dependency_file("pyproject.toml")
|
|
206
|
+
end
|
|
207
|
+
|
|
208
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
209
|
+
def poetry_lock
|
|
210
|
+
dependency_file(PoetryPackageManager::LOCKFILE_NAME)
|
|
211
|
+
end
|
|
212
|
+
|
|
213
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
214
|
+
def pipfile
|
|
215
|
+
dependency_file(PipenvPackageManager::MANIFEST_FILENAME)
|
|
216
|
+
end
|
|
217
|
+
|
|
218
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
219
|
+
def pipfile_lock
|
|
220
|
+
dependency_file(PipenvPackageManager::LOCKFILE_FILENAME)
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
224
|
+
def setup_file
|
|
225
|
+
dependency_file("setup.py")
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
229
|
+
def setup_cfg_file
|
|
230
|
+
dependency_file("setup.cfg")
|
|
231
|
+
end
|
|
232
|
+
|
|
233
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
234
|
+
def requirements_in_files
|
|
235
|
+
@requirements_in_files ||= T.let(
|
|
236
|
+
dependency_files.select { |f| f.name.end_with?(".in") },
|
|
237
|
+
T.nilable(T::Array[Dependabot::DependencyFile])
|
|
238
|
+
)
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
242
|
+
def pip_compile_lockfile
|
|
243
|
+
return @pip_compile_lockfile if defined?(@pip_compile_lockfile)
|
|
244
|
+
|
|
245
|
+
@pip_compile_lockfile = T.let(
|
|
246
|
+
dependency_files.find { |f| pip_compile_file_matcher.lockfile_for_pip_compile_file?(f) },
|
|
247
|
+
T.nilable(Dependabot::DependencyFile)
|
|
248
|
+
)
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
252
|
+
def pip_compile_manifest
|
|
253
|
+
return @pip_compile_manifest if defined?(@pip_compile_manifest)
|
|
254
|
+
|
|
255
|
+
lockfile = pip_compile_lockfile
|
|
256
|
+
@pip_compile_manifest = T.let(
|
|
257
|
+
if lockfile
|
|
258
|
+
pip_compile_file_matcher.manifest_for_pip_compile_lockfile(lockfile)
|
|
259
|
+
else
|
|
260
|
+
requirements_in_files.first
|
|
261
|
+
end,
|
|
262
|
+
T.nilable(Dependabot::DependencyFile)
|
|
263
|
+
)
|
|
264
|
+
end
|
|
265
|
+
|
|
266
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
267
|
+
def pip_requirements_file
|
|
268
|
+
return @pip_requirements_file if defined?(@pip_requirements_file)
|
|
269
|
+
|
|
270
|
+
@pip_requirements_file = T.let(
|
|
271
|
+
dependency_files.find { |f| f.name == "requirements.txt" } ||
|
|
272
|
+
dependency_files.find { |f| f.name.end_with?(".txt") },
|
|
273
|
+
T.nilable(Dependabot::DependencyFile)
|
|
274
|
+
)
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
sig { params(filename: String).returns(T.nilable(Dependabot::DependencyFile)) }
|
|
278
|
+
def dependency_file(filename)
|
|
279
|
+
dependency_files.find { |file| file.name == filename }
|
|
280
|
+
end
|
|
281
|
+
|
|
282
|
+
sig { returns(PipCompileFileMatcher) }
|
|
283
|
+
def pip_compile_file_matcher
|
|
284
|
+
@pip_compile_file_matcher ||= T.let(
|
|
285
|
+
PipCompileFileMatcher.new(requirements_in_files),
|
|
286
|
+
T.nilable(PipCompileFileMatcher)
|
|
287
|
+
)
|
|
288
|
+
end
|
|
289
|
+
end
|
|
290
|
+
end
|
|
291
|
+
end
|
|
292
|
+
|
|
293
|
+
Dependabot::DependencyGraphers.register("pip", Dependabot::Python::DependencyGrapher)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# typed:
|
|
1
|
+
# typed: strong
|
|
2
2
|
# frozen_string_literal: true
|
|
3
3
|
|
|
4
4
|
module Dependabot
|
|
@@ -20,8 +20,14 @@ module Dependabot
|
|
|
20
20
|
|
|
21
21
|
return true if file.content&.match?(output_file_regex(name))
|
|
22
22
|
|
|
23
|
-
|
|
24
|
-
|
|
23
|
+
!!manifest_for_lockfile_name(name)
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
sig { params(file: Dependabot::DependencyFile).returns(T.nilable(Dependabot::DependencyFile)) }
|
|
27
|
+
def manifest_for_pip_compile_lockfile(file)
|
|
28
|
+
return nil unless lockfile_for_pip_compile_file?(file)
|
|
29
|
+
|
|
30
|
+
manifest_for_lockfile_name(file.name) || requirements_in_files.first
|
|
25
31
|
end
|
|
26
32
|
|
|
27
33
|
private
|
|
@@ -33,6 +39,12 @@ module Dependabot
|
|
|
33
39
|
def output_file_regex(filename)
|
|
34
40
|
"--output-file[=\s]+#{Regexp.escape(filename)}(?:\s|$)"
|
|
35
41
|
end
|
|
42
|
+
|
|
43
|
+
sig { params(lockfile_name: String).returns(T.nilable(Dependabot::DependencyFile)) }
|
|
44
|
+
def manifest_for_lockfile_name(lockfile_name)
|
|
45
|
+
basename = lockfile_name.gsub(/\.txt$/, "")
|
|
46
|
+
requirements_in_files.find { |f| f.name == basename + ".in" }
|
|
47
|
+
end
|
|
36
48
|
end
|
|
37
49
|
end
|
|
38
50
|
end
|
|
@@ -13,16 +13,18 @@ module Dependabot
|
|
|
13
13
|
|
|
14
14
|
sig do
|
|
15
15
|
params(
|
|
16
|
-
dependency: Dependabot::Dependency,
|
|
16
|
+
dependency: T.nilable(Dependabot::Dependency),
|
|
17
17
|
lockfile: T.nilable(Dependabot::DependencyFile),
|
|
18
|
-
language_version_manager: LanguageVersionManager
|
|
18
|
+
language_version_manager: LanguageVersionManager,
|
|
19
|
+
dependency_files: T.nilable(T::Array[Dependabot::DependencyFile])
|
|
19
20
|
)
|
|
20
21
|
.void
|
|
21
22
|
end
|
|
22
|
-
def initialize(dependency:, lockfile:, language_version_manager:)
|
|
23
|
+
def initialize(dependency:, lockfile:, language_version_manager:, dependency_files: nil)
|
|
23
24
|
@dependency = dependency
|
|
24
25
|
@lockfile = lockfile
|
|
25
26
|
@language_version_manager = language_version_manager
|
|
27
|
+
@dependency_files = dependency_files
|
|
26
28
|
end
|
|
27
29
|
|
|
28
30
|
sig { params(constraint: T.nilable(String)).returns(String) }
|
|
@@ -48,6 +50,17 @@ module Dependabot
|
|
|
48
50
|
fetch_version_from_parsed_lockfile(updated_lockfile)
|
|
49
51
|
end
|
|
50
52
|
|
|
53
|
+
# Called by Python::DependencyGrapher.
|
|
54
|
+
sig { returns(String) }
|
|
55
|
+
def run_pipenv_graph
|
|
56
|
+
SharedHelpers.in_a_temporary_directory do
|
|
57
|
+
write_temporary_dependency_files
|
|
58
|
+
language_version_manager.install_required_python
|
|
59
|
+
run_command("pyenv exec pipenv sync --dev", fingerprint: "pyenv exec pipenv sync --dev")
|
|
60
|
+
run_command("pyenv exec pipenv graph --json", fingerprint: "pyenv exec pipenv graph --json")
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
|
|
51
64
|
sig { params(command: String, fingerprint: T.nilable(String)).returns(String) }
|
|
52
65
|
def run(command, fingerprint: nil)
|
|
53
66
|
run_command(
|
|
@@ -60,7 +73,7 @@ module Dependabot
|
|
|
60
73
|
|
|
61
74
|
private
|
|
62
75
|
|
|
63
|
-
sig { returns(Dependabot::Dependency) }
|
|
76
|
+
sig { returns(T.nilable(Dependabot::Dependency)) }
|
|
64
77
|
attr_reader :dependency
|
|
65
78
|
|
|
66
79
|
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
@@ -69,6 +82,25 @@ module Dependabot
|
|
|
69
82
|
sig { returns(LanguageVersionManager) }
|
|
70
83
|
attr_reader :language_version_manager
|
|
71
84
|
|
|
85
|
+
sig { returns(T.nilable(T::Array[Dependabot::DependencyFile])) }
|
|
86
|
+
attr_reader :dependency_files
|
|
87
|
+
|
|
88
|
+
sig { returns(Dependabot::Dependency) }
|
|
89
|
+
def current_dependency
|
|
90
|
+
T.must(dependency)
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
94
|
+
def write_temporary_dependency_files
|
|
95
|
+
T.must(dependency_files)
|
|
96
|
+
.reject { |f| f.name == ".python-version" }
|
|
97
|
+
.each do |file|
|
|
98
|
+
path = file.name
|
|
99
|
+
FileUtils.mkdir_p(Pathname.new(path).dirname)
|
|
100
|
+
File.write(path, file.content)
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
|
|
72
104
|
sig { returns(String) }
|
|
73
105
|
def extras_specification
|
|
74
106
|
extras = dependency_extras
|
|
@@ -108,8 +140,8 @@ module Dependabot
|
|
|
108
140
|
|
|
109
141
|
sig { returns(String) }
|
|
110
142
|
def lockfile_section
|
|
111
|
-
if
|
|
112
|
-
T.must(
|
|
143
|
+
if current_dependency.requirements.any?
|
|
144
|
+
T.must(current_dependency.requirements.first)[:groups].first
|
|
113
145
|
else
|
|
114
146
|
Python::FileParser::DEPENDENCY_GROUP_KEYS.each do |keys|
|
|
115
147
|
section = keys.fetch(:lockfile)
|
|
@@ -120,7 +152,7 @@ module Dependabot
|
|
|
120
152
|
|
|
121
153
|
sig { returns(String) }
|
|
122
154
|
def dependency_name
|
|
123
|
-
|
|
155
|
+
current_dependency.metadata[:original_name] || current_dependency.name
|
|
124
156
|
end
|
|
125
157
|
|
|
126
158
|
sig { returns(T::Hash[String, String]) }
|
|
@@ -0,0 +1,241 @@
|
|
|
1
|
+
# typed: strong
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
require "dependabot/python/update_checker/pip_version_resolver"
|
|
5
|
+
|
|
6
|
+
module Dependabot
|
|
7
|
+
module Python
|
|
8
|
+
class UpdateChecker
|
|
9
|
+
class PipVersionResolver
|
|
10
|
+
class MarkerEvaluator
|
|
11
|
+
extend T::Sig
|
|
12
|
+
|
|
13
|
+
sig { params(requirement_string: String).returns([T.nilable(String), T.nilable(String)]) }
|
|
14
|
+
def split_requirement_and_marker(requirement_string)
|
|
15
|
+
separator_index = T.let(nil, T.nilable(Integer))
|
|
16
|
+
in_single_quote = T.let(false, T::Boolean)
|
|
17
|
+
in_double_quote = T.let(false, T::Boolean)
|
|
18
|
+
|
|
19
|
+
requirement_string.each_char.with_index do |char, index|
|
|
20
|
+
in_single_quote, in_double_quote, quote_toggled =
|
|
21
|
+
toggle_quote_state(char, in_single_quote, in_double_quote)
|
|
22
|
+
next if quote_toggled
|
|
23
|
+
next if in_single_quote || in_double_quote
|
|
24
|
+
next unless char == ";"
|
|
25
|
+
|
|
26
|
+
separator_index = index
|
|
27
|
+
break
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
return [requirement_string.strip, nil] if separator_index.nil?
|
|
31
|
+
|
|
32
|
+
requirement_part = requirement_string[0...separator_index]
|
|
33
|
+
marker_part = requirement_string[(separator_index + 1)..]
|
|
34
|
+
|
|
35
|
+
[requirement_part&.strip, marker_part&.strip]
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
sig { params(marker: String, python_version: String).returns(T::Boolean) }
|
|
39
|
+
def marker_satisfied?(marker:, python_version:)
|
|
40
|
+
evaluate_marker_expression(marker, python_version)
|
|
41
|
+
rescue ArgumentError
|
|
42
|
+
# If we cannot safely parse a python marker, treat it as applicable.
|
|
43
|
+
# This avoids silently skipping transitive constraints that may break installs.
|
|
44
|
+
true
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
private
|
|
48
|
+
|
|
49
|
+
sig { params(expression: String, python_version: String).returns(T::Boolean) }
|
|
50
|
+
def evaluate_marker_expression(expression, python_version)
|
|
51
|
+
expr = strip_wrapping_parentheses(expression.strip)
|
|
52
|
+
|
|
53
|
+
or_parts = split_top_level(expr, "or")
|
|
54
|
+
return or_parts.any? { |part| evaluate_marker_expression(part, python_version) } if or_parts.length > 1
|
|
55
|
+
|
|
56
|
+
and_parts = split_top_level(expr, "and")
|
|
57
|
+
if and_parts.length > 1
|
|
58
|
+
return and_parts.all? do |part|
|
|
59
|
+
evaluate_marker_expression(part, python_version) || !python_marker?(part)
|
|
60
|
+
end
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
not_expression = strip_top_level_not(expr)
|
|
64
|
+
if not_expression
|
|
65
|
+
return false unless python_marker?(not_expression)
|
|
66
|
+
|
|
67
|
+
return !evaluate_marker_expression(not_expression, python_version)
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
evaluate_python_version_condition(expr, python_version, default: python_marker?(expr))
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
sig { params(expression: String).returns(T.nilable(String)) }
|
|
74
|
+
def strip_top_level_not(expression)
|
|
75
|
+
return nil unless expression.start_with?("not")
|
|
76
|
+
return nil unless word_at?(expression, 0, "not")
|
|
77
|
+
|
|
78
|
+
remaining = expression[3..]&.strip
|
|
79
|
+
return nil if remaining.nil? || remaining.empty?
|
|
80
|
+
|
|
81
|
+
remaining
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
sig { params(expression: String).returns(T::Boolean) }
|
|
85
|
+
def python_marker?(expression)
|
|
86
|
+
expression.match?(/\bpython(?:_full)?_version\b/)
|
|
87
|
+
end
|
|
88
|
+
|
|
89
|
+
sig { params(expression: String).returns(String) }
|
|
90
|
+
def strip_wrapping_parentheses(expression)
|
|
91
|
+
expr = expression
|
|
92
|
+
while expr.start_with?("(") && expr.end_with?(")")
|
|
93
|
+
inner = expr[1...-1].to_s.strip
|
|
94
|
+
break unless balanced_parentheses?(inner)
|
|
95
|
+
|
|
96
|
+
expr = inner
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
expr
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
sig { params(expression: String).returns(T::Boolean) }
|
|
103
|
+
def balanced_parentheses?(expression)
|
|
104
|
+
depth = T.let(0, Integer)
|
|
105
|
+
in_single_quote = T.let(false, T::Boolean)
|
|
106
|
+
in_double_quote = T.let(false, T::Boolean)
|
|
107
|
+
|
|
108
|
+
expression.each_char do |char|
|
|
109
|
+
in_single_quote, in_double_quote, quote_toggled =
|
|
110
|
+
toggle_quote_state(char, in_single_quote, in_double_quote)
|
|
111
|
+
next if quote_toggled
|
|
112
|
+
next if in_single_quote || in_double_quote
|
|
113
|
+
|
|
114
|
+
depth += 1 if char == "("
|
|
115
|
+
depth -= 1 if char == ")"
|
|
116
|
+
return false if depth.negative?
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
depth.zero? && !in_single_quote && !in_double_quote
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
sig do
|
|
123
|
+
params(
|
|
124
|
+
char: String,
|
|
125
|
+
in_single_quote: T::Boolean,
|
|
126
|
+
in_double_quote: T::Boolean
|
|
127
|
+
).returns([T::Boolean, T::Boolean, T::Boolean])
|
|
128
|
+
end
|
|
129
|
+
def toggle_quote_state(char, in_single_quote, in_double_quote)
|
|
130
|
+
return [!in_single_quote, in_double_quote, true] if char == "'" && !in_double_quote
|
|
131
|
+
|
|
132
|
+
return [in_single_quote, !in_double_quote, true] if char == '"' && !in_single_quote
|
|
133
|
+
|
|
134
|
+
[in_single_quote, in_double_quote, false]
|
|
135
|
+
end
|
|
136
|
+
|
|
137
|
+
sig { params(expression: String, operator: String).returns(T::Array[String]) }
|
|
138
|
+
def split_top_level(expression, operator)
|
|
139
|
+
parts = T.let([], T::Array[String])
|
|
140
|
+
token = T.let(+"", String)
|
|
141
|
+
depth = T.let(0, Integer)
|
|
142
|
+
in_single_quote = T.let(false, T::Boolean)
|
|
143
|
+
in_double_quote = T.let(false, T::Boolean)
|
|
144
|
+
i = T.let(0, Integer)
|
|
145
|
+
|
|
146
|
+
while i < expression.length
|
|
147
|
+
char = T.must(expression[i])
|
|
148
|
+
|
|
149
|
+
in_single_quote, in_double_quote, quote_toggled =
|
|
150
|
+
toggle_quote_state(char, in_single_quote, in_double_quote)
|
|
151
|
+
if quote_toggled
|
|
152
|
+
token << char
|
|
153
|
+
i += 1
|
|
154
|
+
next
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
depth = update_depth_for_unquoted_char(char, depth, in_single_quote, in_double_quote)
|
|
158
|
+
|
|
159
|
+
if depth.zero? && !in_single_quote && !in_double_quote && word_at?(expression, i, operator)
|
|
160
|
+
parts << token.strip
|
|
161
|
+
token = +""
|
|
162
|
+
i += operator.length
|
|
163
|
+
next
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
token << char
|
|
167
|
+
i += 1
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
parts << token.strip
|
|
171
|
+
parts
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
sig do
|
|
175
|
+
params(
|
|
176
|
+
char: String,
|
|
177
|
+
depth: Integer,
|
|
178
|
+
in_single_quote: T::Boolean,
|
|
179
|
+
in_double_quote: T::Boolean
|
|
180
|
+
).returns(Integer)
|
|
181
|
+
end
|
|
182
|
+
def update_depth_for_unquoted_char(char, depth, in_single_quote, in_double_quote)
|
|
183
|
+
return depth if in_single_quote || in_double_quote
|
|
184
|
+
|
|
185
|
+
depth += 1 if char == "("
|
|
186
|
+
depth -= 1 if char == ")"
|
|
187
|
+
depth
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
sig { params(expression: String, index: Integer, word: String).returns(T::Boolean) }
|
|
191
|
+
def word_at?(expression, index, word)
|
|
192
|
+
return false unless expression[index, word.length] == word
|
|
193
|
+
|
|
194
|
+
before = index.zero? ? " " : T.must(expression[index - 1])
|
|
195
|
+
after_index = index + word.length
|
|
196
|
+
after = after_index >= expression.length ? " " : T.must(expression[after_index])
|
|
197
|
+
|
|
198
|
+
word_boundary?(before) && word_boundary?(after)
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
sig { params(char: String).returns(T::Boolean) }
|
|
202
|
+
def word_boundary?(char)
|
|
203
|
+
!!(char =~ /[^A-Za-z0-9_]/)
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
sig do
|
|
207
|
+
params(
|
|
208
|
+
condition: T.nilable(String),
|
|
209
|
+
python_version: String,
|
|
210
|
+
default: T::Boolean
|
|
211
|
+
).returns(T::Boolean)
|
|
212
|
+
end
|
|
213
|
+
def evaluate_python_version_condition(condition, python_version, default:)
|
|
214
|
+
return default if condition.nil?
|
|
215
|
+
|
|
216
|
+
candidate = strip_wrapping_parentheses(condition.strip)
|
|
217
|
+
match = candidate.match(/\Apython(?:_full)?_version\s*(<=|>=|<|>|==|!=)\s*['\"]([^'\"]+)['\"]\z/)
|
|
218
|
+
return default unless match
|
|
219
|
+
|
|
220
|
+
operator = T.must(match[1])
|
|
221
|
+
version = T.must(match[2])
|
|
222
|
+
lhs = Dependabot::Python::Version.new(python_version)
|
|
223
|
+
rhs = Dependabot::Python::Version.new(version)
|
|
224
|
+
|
|
225
|
+
case operator
|
|
226
|
+
when "<" then lhs < rhs
|
|
227
|
+
when "<=" then lhs <= rhs
|
|
228
|
+
when ">" then lhs > rhs
|
|
229
|
+
when ">=" then lhs >= rhs
|
|
230
|
+
when "==" then lhs == rhs
|
|
231
|
+
when "!=" then lhs != rhs
|
|
232
|
+
else false
|
|
233
|
+
end
|
|
234
|
+
rescue ArgumentError
|
|
235
|
+
true
|
|
236
|
+
end
|
|
237
|
+
end
|
|
238
|
+
end
|
|
239
|
+
end
|
|
240
|
+
end
|
|
241
|
+
end
|
|
@@ -1,8 +1,15 @@
|
|
|
1
1
|
# typed: strong
|
|
2
2
|
# frozen_string_literal: true
|
|
3
3
|
|
|
4
|
+
require "json"
|
|
5
|
+
require "pathname"
|
|
6
|
+
require "toml-rb"
|
|
4
7
|
require "sorbet-runtime"
|
|
8
|
+
require "excon"
|
|
9
|
+
require "dependabot/registry_client"
|
|
5
10
|
require "dependabot/python/language_version_manager"
|
|
11
|
+
require "dependabot/python/name_normaliser"
|
|
12
|
+
require "dependabot/python/package/package_registry_finder"
|
|
6
13
|
require "dependabot/python/update_checker"
|
|
7
14
|
require "dependabot/python/update_checker/latest_version_finder"
|
|
8
15
|
require "dependabot/python/file_parser/python_requirement_parser"
|
|
@@ -10,9 +17,14 @@ require "dependabot/python/file_parser/python_requirement_parser"
|
|
|
10
17
|
module Dependabot
|
|
11
18
|
module Python
|
|
12
19
|
class UpdateChecker
|
|
20
|
+
# This resolver intentionally co-locates resolution, marker handling, and
|
|
21
|
+
# constraints matching to keep compatibility decisions in one place.
|
|
22
|
+
# rubocop:disable Metrics/ClassLength
|
|
13
23
|
class PipVersionResolver
|
|
14
24
|
extend T::Sig
|
|
15
25
|
|
|
26
|
+
require_relative "pip_version_resolver/marker_evaluator"
|
|
27
|
+
|
|
16
28
|
sig do
|
|
17
29
|
params(
|
|
18
30
|
dependency: Dependabot::Dependency,
|
|
@@ -33,21 +45,33 @@ module Dependabot
|
|
|
33
45
|
update_cooldown: nil,
|
|
34
46
|
raise_on_ignored: false
|
|
35
47
|
)
|
|
36
|
-
@dependency
|
|
37
|
-
@dependency_files
|
|
38
|
-
@credentials
|
|
39
|
-
@ignored_versions
|
|
40
|
-
@security_advisories =
|
|
41
|
-
@update_cooldown =
|
|
42
|
-
@raise_on_ignored =
|
|
48
|
+
@dependency = dependency
|
|
49
|
+
@dependency_files = dependency_files
|
|
50
|
+
@credentials = credentials
|
|
51
|
+
@ignored_versions = ignored_versions
|
|
52
|
+
@security_advisories = security_advisories
|
|
53
|
+
@update_cooldown = update_cooldown
|
|
54
|
+
@raise_on_ignored = raise_on_ignored
|
|
43
55
|
@latest_version_finder = T.let(nil, T.nilable(LatestVersionFinder))
|
|
44
56
|
@python_requirement_parser = T.let(nil, T.nilable(FileParser::PythonRequirementParser))
|
|
45
57
|
@language_version_manager = T.let(nil, T.nilable(LanguageVersionManager))
|
|
58
|
+
@marker_evaluator = T.let(nil, T.nilable(MarkerEvaluator))
|
|
59
|
+
@registry_json_urls = T.let(nil, T.nilable(T::Array[String]))
|
|
60
|
+
@transitive_requirements_cache = T.let({}, T::Hash[String, T::Array[String]])
|
|
61
|
+
@transitive_requirement_available_cache = T.let({}, T::Hash[String, T::Boolean])
|
|
62
|
+
@constraints_files = T.let(nil, T.nilable(T::Array[String]))
|
|
63
|
+
@constraints_file_basenames = T.let(nil, T.nilable(T::Array[String]))
|
|
64
|
+
@requirement_file_directories = T.let(nil, T.nilable(T::Array[String]))
|
|
65
|
+
@pyproject_content_cache = T.let({}, T::Hash[String, T::Hash[String, T.untyped]])
|
|
46
66
|
end
|
|
47
67
|
|
|
48
68
|
sig { returns(T.nilable(Dependabot::Version)) }
|
|
49
69
|
def latest_resolvable_version
|
|
50
|
-
latest_version_finder.latest_version(language_version: language_version_manager.python_version)
|
|
70
|
+
candidate = latest_version_finder.latest_version(language_version: language_version_manager.python_version)
|
|
71
|
+
return candidate if candidate.nil?
|
|
72
|
+
return candidate if compatible_with_pinned_pyproject_dependencies?(candidate)
|
|
73
|
+
|
|
74
|
+
nil
|
|
51
75
|
end
|
|
52
76
|
|
|
53
77
|
sig { returns(T.nilable(Dependabot::Version)) }
|
|
@@ -58,8 +82,12 @@ module Dependabot
|
|
|
58
82
|
|
|
59
83
|
sig { returns(T.nilable(Dependabot::Version)) }
|
|
60
84
|
def lowest_resolvable_security_fix_version
|
|
61
|
-
latest_version_finder
|
|
62
|
-
|
|
85
|
+
candidate = latest_version_finder
|
|
86
|
+
.lowest_security_fix_version(language_version: language_version_manager.python_version)
|
|
87
|
+
return candidate if candidate.nil?
|
|
88
|
+
return candidate if compatible_with_pinned_pyproject_dependencies?(candidate)
|
|
89
|
+
|
|
90
|
+
nil
|
|
63
91
|
end
|
|
64
92
|
|
|
65
93
|
private
|
|
@@ -108,7 +136,444 @@ module Dependabot
|
|
|
108
136
|
python_requirement_parser: python_requirement_parser
|
|
109
137
|
)
|
|
110
138
|
end
|
|
139
|
+
|
|
140
|
+
sig { returns(MarkerEvaluator) }
|
|
141
|
+
def marker_evaluator
|
|
142
|
+
@marker_evaluator ||= MarkerEvaluator.new
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
sig { params(candidate: Dependabot::Version).returns(T::Boolean) }
|
|
146
|
+
def compatible_with_pinned_pyproject_dependencies?(candidate)
|
|
147
|
+
return true unless constraints_dependency?
|
|
148
|
+
|
|
149
|
+
pinned_dependencies = pinned_pyproject_dependencies
|
|
150
|
+
return false if pinned_dependencies.none? && pyproject_scope_ambiguous_for_constraints?
|
|
151
|
+
return true if pinned_dependencies.none?
|
|
152
|
+
|
|
153
|
+
pinned_dependencies.all? do |name, version|
|
|
154
|
+
requirements, metadata_available = transitive_requirement_for(name: name, version: version)
|
|
155
|
+
next false unless metadata_available
|
|
156
|
+
next true if requirements.empty?
|
|
157
|
+
|
|
158
|
+
requirements.all? do |requirement|
|
|
159
|
+
Python::Requirement.new(requirement).satisfied_by?(candidate)
|
|
160
|
+
rescue Gem::Requirement::BadRequirementError
|
|
161
|
+
# If one metadata requirement is unsupported, ignore it but still
|
|
162
|
+
# enforce any other valid constraints for this dependency.
|
|
163
|
+
true
|
|
164
|
+
end
|
|
165
|
+
end
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
sig { returns(T::Boolean) }
|
|
169
|
+
def pyproject_scope_ambiguous_for_constraints?
|
|
170
|
+
pyproject_files.length > 1 && relevant_pyproject_files_for_dependency.empty?
|
|
171
|
+
end
|
|
172
|
+
|
|
173
|
+
sig { returns(T::Boolean) }
|
|
174
|
+
def constraints_dependency?
|
|
175
|
+
normalized_requirement_files.any? do |raw_file, normalized_file|
|
|
176
|
+
constraints_files.include?(normalized_file) ||
|
|
177
|
+
(File.dirname(raw_file) == "." && constraints_file_basenames.include?(File.basename(normalized_file))) ||
|
|
178
|
+
File.basename(normalized_file).start_with?("constraints")
|
|
179
|
+
end
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
sig { returns(T::Array[[String, String]]) }
|
|
183
|
+
def normalized_requirement_files
|
|
184
|
+
dependency.requirements.filter_map do |req|
|
|
185
|
+
raw_file = T.cast(req.fetch(:file), String)
|
|
186
|
+
[raw_file, normalize_path(raw_file)]
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
|
|
190
|
+
sig { returns(T::Array[[String, String]]) }
|
|
191
|
+
def pinned_pyproject_dependencies
|
|
192
|
+
pyprojects = relevant_pyproject_files_for_dependency
|
|
193
|
+
return [] if pyprojects.empty?
|
|
194
|
+
|
|
195
|
+
pyprojects.flat_map do |pyproject|
|
|
196
|
+
pinned_pyproject_dependencies_for(pyproject)
|
|
197
|
+
end.uniq
|
|
198
|
+
end
|
|
199
|
+
|
|
200
|
+
sig { params(pyproject: Dependabot::DependencyFile).returns(T::Array[[String, String]]) }
|
|
201
|
+
def pinned_pyproject_dependencies_for(pyproject)
|
|
202
|
+
pyproject_content = pyproject_content_for(pyproject)
|
|
203
|
+
project_obj = T.cast(pyproject_content["project"], T.nilable(Object))
|
|
204
|
+
return [] unless project_obj.is_a?(Hash)
|
|
205
|
+
|
|
206
|
+
project_hash = project_obj
|
|
207
|
+
dependencies_obj = T.cast(project_hash["dependencies"], T.nilable(Object))
|
|
208
|
+
return [] unless dependencies_obj.is_a?(Array)
|
|
209
|
+
|
|
210
|
+
dependencies_obj.filter_map do |entry|
|
|
211
|
+
entry_obj = T.cast(entry, T.nilable(Object))
|
|
212
|
+
next unless entry_obj.is_a?(String)
|
|
213
|
+
|
|
214
|
+
requirement_string, marker = split_requirement_and_marker(entry_obj)
|
|
215
|
+
next unless marker_satisfied_for_python?(marker)
|
|
216
|
+
next if requirement_string.nil? || requirement_string.empty?
|
|
217
|
+
|
|
218
|
+
parsed = requirement_string.match(
|
|
219
|
+
/\A(?<name>[A-Za-z0-9][A-Za-z0-9._\-]*)(?:\[[^\]]+\])?\s*==\s*(?<version>[^\s]+)\z/
|
|
220
|
+
)
|
|
221
|
+
next unless parsed
|
|
222
|
+
|
|
223
|
+
dep_name = NameNormaliser.normalise(T.must(parsed[:name]))
|
|
224
|
+
next if dep_name == NameNormaliser.normalise(dependency.name)
|
|
225
|
+
|
|
226
|
+
[dep_name, T.must(parsed[:version])]
|
|
227
|
+
end
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
231
|
+
def relevant_pyproject_files_for_dependency
|
|
232
|
+
requirement_files = requirement_files_for_dependency
|
|
233
|
+
relevant_pyprojects = pyproject_files.select do |pyproject|
|
|
234
|
+
pyproject_matches_requirement_files?(pyproject: pyproject, requirement_files: requirement_files)
|
|
235
|
+
end
|
|
236
|
+
|
|
237
|
+
return relevant_pyprojects unless relevant_pyprojects.empty?
|
|
238
|
+
|
|
239
|
+
fallback_pyproject_for_dependency
|
|
240
|
+
end
|
|
241
|
+
|
|
242
|
+
sig { returns(T::Array[String]) }
|
|
243
|
+
def requirement_files_for_dependency
|
|
244
|
+
normalized_requirement_files.map(&:last).uniq
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
sig { params(pyproject: Dependabot::DependencyFile, requirement_files: T::Array[String]).returns(T::Boolean) }
|
|
248
|
+
def pyproject_matches_requirement_files?(pyproject:, requirement_files:)
|
|
249
|
+
declared_constraints = constraints_for_pyproject(pyproject)
|
|
250
|
+
declared_constraints.any? do |declared_constraint|
|
|
251
|
+
declared_constraint_matches_requirement_files?(
|
|
252
|
+
declared_constraint: declared_constraint,
|
|
253
|
+
requirement_files: requirement_files
|
|
254
|
+
)
|
|
255
|
+
end
|
|
256
|
+
end
|
|
257
|
+
|
|
258
|
+
sig { params(declared_constraint: String, requirement_files: T::Array[String]).returns(T::Boolean) }
|
|
259
|
+
def declared_constraint_matches_requirement_files?(declared_constraint:, requirement_files:)
|
|
260
|
+
return true if requirement_files.include?(declared_constraint)
|
|
261
|
+
|
|
262
|
+
!url_path?(declared_constraint) &&
|
|
263
|
+
File.dirname(declared_constraint) == "." &&
|
|
264
|
+
constraints_file_basenames.include?(File.basename(declared_constraint)) &&
|
|
265
|
+
requirement_files.include?(File.basename(declared_constraint))
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
269
|
+
def fallback_pyproject_for_dependency
|
|
270
|
+
return [T.must(pyproject_files.first)] if pyproject_files.length == 1
|
|
271
|
+
|
|
272
|
+
[]
|
|
273
|
+
end
|
|
274
|
+
|
|
275
|
+
sig { params(name: String, version: String).returns([T::Array[String], T::Boolean]) }
|
|
276
|
+
def transitive_requirement_for(name:, version:)
|
|
277
|
+
cache_key = "#{name}@#{version}"
|
|
278
|
+
if @transitive_requirements_cache.key?(cache_key)
|
|
279
|
+
requirements = T.must(@transitive_requirements_cache[cache_key])
|
|
280
|
+
available = T.must(@transitive_requirement_available_cache[cache_key])
|
|
281
|
+
return [requirements, available]
|
|
282
|
+
end
|
|
283
|
+
|
|
284
|
+
response, metadata_available = dependency_metadata_response(name: name, version: version)
|
|
285
|
+
unless response
|
|
286
|
+
@transitive_requirements_cache[cache_key] = []
|
|
287
|
+
@transitive_requirement_available_cache[cache_key] = metadata_available
|
|
288
|
+
return [[], metadata_available]
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
requirements, metadata_available = requirements_for_target_dependency(response)
|
|
292
|
+
|
|
293
|
+
@transitive_requirements_cache[cache_key] = requirements
|
|
294
|
+
@transitive_requirement_available_cache[cache_key] = metadata_available
|
|
295
|
+
[requirements, metadata_available]
|
|
296
|
+
end
|
|
297
|
+
|
|
298
|
+
sig { params(name: String, version: String).returns([T.nilable(Excon::Response), T::Boolean]) }
|
|
299
|
+
def dependency_metadata_response(name:, version:)
|
|
300
|
+
had_transport_error = T.let(false, T::Boolean)
|
|
301
|
+
saw_not_found = T.let(false, T::Boolean)
|
|
302
|
+
|
|
303
|
+
registry_json_urls.each do |registry_url|
|
|
304
|
+
url = "#{registry_url}#{name}/#{version}/json/"
|
|
305
|
+
response = Dependabot::RegistryClient.get(url: url)
|
|
306
|
+
return [response, true] if response.status == 200
|
|
307
|
+
|
|
308
|
+
if response.status == 404
|
|
309
|
+
saw_not_found = true
|
|
310
|
+
else
|
|
311
|
+
had_transport_error = true
|
|
312
|
+
Dependabot.logger.warn(
|
|
313
|
+
"Unexpected python dependency metadata response #{response.status} for #{name}@#{version}"
|
|
314
|
+
)
|
|
315
|
+
end
|
|
316
|
+
rescue Excon::Error::Timeout, Excon::Error::Socket, URI::InvalidURIError
|
|
317
|
+
had_transport_error = true
|
|
318
|
+
Dependabot.logger.warn("Failed to fetch python dependency metadata for #{name}@#{version}")
|
|
319
|
+
next
|
|
320
|
+
end
|
|
321
|
+
|
|
322
|
+
[nil, saw_not_found && !had_transport_error]
|
|
323
|
+
end
|
|
324
|
+
|
|
325
|
+
sig { returns(T::Array[String]) }
|
|
326
|
+
def registry_json_urls
|
|
327
|
+
return @registry_json_urls if @registry_json_urls
|
|
328
|
+
|
|
329
|
+
package_registry_urls = Package::PackageRegistryFinder.new(
|
|
330
|
+
dependency_files: dependency_files,
|
|
331
|
+
credentials: credentials,
|
|
332
|
+
dependency: dependency
|
|
333
|
+
).registry_urls
|
|
334
|
+
|
|
335
|
+
@registry_json_urls =
|
|
336
|
+
package_registry_urls
|
|
337
|
+
.map { |url| url.sub(%r{/simple/?$}i, "/pypi/") }
|
|
338
|
+
.uniq
|
|
339
|
+
|
|
340
|
+
@registry_json_urls
|
|
341
|
+
end
|
|
342
|
+
|
|
343
|
+
sig { params(response: Excon::Response).returns([T::Array[String], T::Boolean]) }
|
|
344
|
+
def requirements_for_target_dependency(response)
|
|
345
|
+
requires_dist = requires_dist_from_response(response)
|
|
346
|
+
return [[], true] unless requires_dist
|
|
347
|
+
|
|
348
|
+
parsed_requirements = requires_dist.filter_map do |requirement_string|
|
|
349
|
+
parse_target_requirement(requirement_string)
|
|
350
|
+
end.uniq
|
|
351
|
+
|
|
352
|
+
[parsed_requirements, true]
|
|
353
|
+
rescue JSON::ParserError
|
|
354
|
+
Dependabot.logger.warn("Failed to parse python dependency metadata JSON response")
|
|
355
|
+
[[], false]
|
|
356
|
+
end
|
|
357
|
+
|
|
358
|
+
sig { returns(T::Array[String]) }
|
|
359
|
+
def constraints_files
|
|
360
|
+
return @constraints_files if @constraints_files
|
|
361
|
+
|
|
362
|
+
pyproject_constraints = pyproject_constraints_files
|
|
363
|
+
requirement_constraints = requirement_constraint_declaration_files.flat_map do |file|
|
|
364
|
+
requirement_constraints_from_file(file)
|
|
365
|
+
end
|
|
366
|
+
|
|
367
|
+
@constraints_files = (pyproject_constraints + requirement_constraints).map do |path|
|
|
368
|
+
normalize_path(path)
|
|
369
|
+
end.uniq
|
|
370
|
+
@constraints_files
|
|
371
|
+
end
|
|
372
|
+
|
|
373
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
374
|
+
def requirement_constraint_declaration_files
|
|
375
|
+
requirement_paths = requirement_files_for_dependency
|
|
376
|
+
directories = requirement_file_directories
|
|
377
|
+
|
|
378
|
+
dependency_files.select do |file|
|
|
379
|
+
next false unless requirements_manifest_file?(file)
|
|
380
|
+
|
|
381
|
+
normalized_name = normalize_path(file.name)
|
|
382
|
+
|
|
383
|
+
requirement_paths.include?(normalized_name) ||
|
|
384
|
+
directories.include?(File.dirname(normalized_name))
|
|
385
|
+
end
|
|
386
|
+
end
|
|
387
|
+
|
|
388
|
+
sig { params(file: Dependabot::DependencyFile).returns(T::Boolean) }
|
|
389
|
+
def requirements_manifest_file?(file)
|
|
390
|
+
basename = File.basename(normalize_path(file.name))
|
|
391
|
+
return true if basename.start_with?("requirements")
|
|
392
|
+
|
|
393
|
+
false
|
|
394
|
+
end
|
|
395
|
+
|
|
396
|
+
sig { returns(T::Array[String]) }
|
|
397
|
+
def requirement_file_directories
|
|
398
|
+
return @requirement_file_directories if @requirement_file_directories
|
|
399
|
+
|
|
400
|
+
@requirement_file_directories = requirement_files_for_dependency.filter_map do |path|
|
|
401
|
+
next if url_path?(path)
|
|
402
|
+
|
|
403
|
+
File.dirname(path)
|
|
404
|
+
end.uniq
|
|
405
|
+
@requirement_file_directories
|
|
406
|
+
end
|
|
407
|
+
|
|
408
|
+
sig { returns(T::Array[String]) }
|
|
409
|
+
def constraints_file_basenames
|
|
410
|
+
return @constraints_file_basenames if @constraints_file_basenames
|
|
411
|
+
|
|
412
|
+
counts = T.let(Hash.new(0), T::Hash[String, Integer])
|
|
413
|
+
constraints_files.each do |path|
|
|
414
|
+
next if url_path?(path)
|
|
415
|
+
|
|
416
|
+
counts[File.basename(path)] = T.must(counts[File.basename(path)]) + 1
|
|
417
|
+
end
|
|
418
|
+
@constraints_file_basenames = counts.filter_map do |basename, count|
|
|
419
|
+
basename if count == 1
|
|
420
|
+
end
|
|
421
|
+
@constraints_file_basenames
|
|
422
|
+
end
|
|
423
|
+
|
|
424
|
+
sig { returns(T::Array[String]) }
|
|
425
|
+
def pyproject_constraints_files
|
|
426
|
+
pyproject_files.flat_map do |pyproject|
|
|
427
|
+
constraints_for_pyproject(pyproject)
|
|
428
|
+
end
|
|
429
|
+
end
|
|
430
|
+
|
|
431
|
+
sig { params(pyproject: Dependabot::DependencyFile).returns(T::Array[String]) }
|
|
432
|
+
def constraints_for_pyproject(pyproject)
|
|
433
|
+
pyproject_content = pyproject_content_for(pyproject)
|
|
434
|
+
tool_obj = T.cast(pyproject_content["tool"], T.nilable(Object))
|
|
435
|
+
return [] unless tool_obj.is_a?(Hash)
|
|
436
|
+
|
|
437
|
+
pip_obj = T.cast(tool_obj["pip"], T.nilable(Object))
|
|
438
|
+
return [] unless pip_obj.is_a?(Hash)
|
|
439
|
+
|
|
440
|
+
constraints_obj = T.cast(pip_obj["constraints"], T.nilable(Object))
|
|
441
|
+
case constraints_obj
|
|
442
|
+
when String
|
|
443
|
+
[resolve_constraint_path(path: constraints_obj, declaring_file: pyproject)]
|
|
444
|
+
when Array
|
|
445
|
+
constraints_obj.grep(String).map do |path|
|
|
446
|
+
resolve_constraint_path(path: path, declaring_file: pyproject)
|
|
447
|
+
end
|
|
448
|
+
else
|
|
449
|
+
[]
|
|
450
|
+
end
|
|
451
|
+
end
|
|
452
|
+
|
|
453
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
454
|
+
def pyproject_files
|
|
455
|
+
dependency_files.select do |file|
|
|
456
|
+
File.basename(file.name) == "pyproject.toml"
|
|
457
|
+
end
|
|
458
|
+
end
|
|
459
|
+
|
|
460
|
+
sig { params(file: Dependabot::DependencyFile).returns(T::Array[String]) }
|
|
461
|
+
def requirement_constraints_from_file(file)
|
|
462
|
+
content = file.content
|
|
463
|
+
return [] unless content
|
|
464
|
+
|
|
465
|
+
content.each_line.filter_map do |line|
|
|
466
|
+
path = constraint_path_from_line(line)
|
|
467
|
+
next unless path
|
|
468
|
+
|
|
469
|
+
resolve_constraint_path(path: path.strip, declaring_file: file)
|
|
470
|
+
end
|
|
471
|
+
end
|
|
472
|
+
|
|
473
|
+
sig { params(line: String).returns(T.nilable(String)) }
|
|
474
|
+
def constraint_path_from_line(line)
|
|
475
|
+
match = line.match(
|
|
476
|
+
/^\s*(?:-c|--constraint)(?:\s+|=)(?:"(?<double>[^"]+)"|'(?<single>[^']+)'|(?<plain>[^\s'\"]+))/
|
|
477
|
+
)
|
|
478
|
+
return nil unless match
|
|
479
|
+
|
|
480
|
+
T.must(match[:double] || match[:single] || match[:plain])
|
|
481
|
+
end
|
|
482
|
+
|
|
483
|
+
sig { params(path: String, declaring_file: Dependabot::DependencyFile).returns(String) }
|
|
484
|
+
def resolve_constraint_path(path:, declaring_file:)
|
|
485
|
+
return path if url_path?(path)
|
|
486
|
+
return normalize_path(path) if Pathname.new(path).absolute?
|
|
487
|
+
|
|
488
|
+
base_dir = File.dirname(declaring_file.name)
|
|
489
|
+
return normalize_path(path) if base_dir == "."
|
|
490
|
+
|
|
491
|
+
normalize_path(File.join(base_dir, path))
|
|
492
|
+
end
|
|
493
|
+
|
|
494
|
+
sig { params(path: String).returns(String) }
|
|
495
|
+
def normalize_path(path)
|
|
496
|
+
return path if url_path?(path)
|
|
497
|
+
|
|
498
|
+
Pathname.new(path).cleanpath.to_s
|
|
499
|
+
rescue ArgumentError
|
|
500
|
+
path
|
|
501
|
+
end
|
|
502
|
+
|
|
503
|
+
sig { params(path: String).returns(T::Boolean) }
|
|
504
|
+
def url_path?(path)
|
|
505
|
+
path.match?(%r{\A[a-z][a-z0-9+.-]*://}i)
|
|
506
|
+
end
|
|
507
|
+
|
|
508
|
+
sig { params(response: Excon::Response).returns(T.nilable(T::Array[String])) }
|
|
509
|
+
def requires_dist_from_response(response)
|
|
510
|
+
body = T.cast(JSON.parse(response.body), T::Hash[String, T.untyped])
|
|
511
|
+
info_obj = T.cast(body["info"], T.nilable(Object))
|
|
512
|
+
return nil unless info_obj.is_a?(Hash)
|
|
513
|
+
|
|
514
|
+
requires_dist_obj = T.cast(info_obj["requires_dist"], T.nilable(Object))
|
|
515
|
+
return nil unless requires_dist_obj.is_a?(Array)
|
|
516
|
+
|
|
517
|
+
requires_dist_obj.filter_map do |entry|
|
|
518
|
+
entry_obj = T.cast(entry, T.nilable(Object))
|
|
519
|
+
entry_obj if entry_obj.is_a?(String)
|
|
520
|
+
end
|
|
521
|
+
end
|
|
522
|
+
|
|
523
|
+
sig { params(requirement_string: String).returns(T.nilable(String)) }
|
|
524
|
+
def parse_target_requirement(requirement_string)
|
|
525
|
+
package_requirement, marker = split_requirement_and_marker(requirement_string)
|
|
526
|
+
return nil unless marker_satisfied_for_python?(marker)
|
|
527
|
+
return nil if package_requirement.nil?
|
|
528
|
+
|
|
529
|
+
match = package_requirement.match(
|
|
530
|
+
/\A(?<name>[A-Za-z0-9][A-Za-z0-9._\-]*)(?:\[[^\]]+\])?\s*(?:\((?<requirement>[^)]*)\))?\z/
|
|
531
|
+
)
|
|
532
|
+
return nil unless match
|
|
533
|
+
|
|
534
|
+
return nil unless NameNormaliser.normalise(T.must(match[:name])) == NameNormaliser.normalise(dependency.name)
|
|
535
|
+
|
|
536
|
+
match[:requirement]&.strip
|
|
537
|
+
end
|
|
538
|
+
|
|
539
|
+
sig { params(requirement_string: String).returns([T.nilable(String), T.nilable(String)]) }
|
|
540
|
+
def split_requirement_and_marker(requirement_string)
|
|
541
|
+
marker_evaluator.split_requirement_and_marker(requirement_string)
|
|
542
|
+
end
|
|
543
|
+
|
|
544
|
+
sig { params(marker: T.nilable(String)).returns(T::Boolean) }
|
|
545
|
+
def marker_satisfied_for_python?(marker)
|
|
546
|
+
return true if marker.nil? || marker.empty?
|
|
547
|
+
return false unless marker.match?(/\bpython(?:_full)?_version\b/)
|
|
548
|
+
|
|
549
|
+
marker_satisfied?(marker, language_version_manager.python_version)
|
|
550
|
+
end
|
|
551
|
+
|
|
552
|
+
sig { params(marker: String, python_version: String).returns(T::Boolean) }
|
|
553
|
+
def marker_satisfied?(marker, python_version)
|
|
554
|
+
marker_evaluator.marker_satisfied?(marker: marker, python_version: python_version)
|
|
555
|
+
end
|
|
556
|
+
|
|
557
|
+
sig { params(pyproject: Dependabot::DependencyFile).returns(T::Hash[String, T.untyped]) }
|
|
558
|
+
def pyproject_content_for(pyproject)
|
|
559
|
+
cache_key = pyproject.name
|
|
560
|
+
return T.must(@pyproject_content_cache[cache_key]) if @pyproject_content_cache.key?(cache_key)
|
|
561
|
+
|
|
562
|
+
content =
|
|
563
|
+
if pyproject.content
|
|
564
|
+
T.let(TomlRB.parse(pyproject.content), T::Hash[String, T.untyped])
|
|
565
|
+
else
|
|
566
|
+
T.let({}, T::Hash[String, T.untyped])
|
|
567
|
+
end
|
|
568
|
+
|
|
569
|
+
@pyproject_content_cache[cache_key] = content
|
|
570
|
+
content
|
|
571
|
+
rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
|
|
572
|
+
@pyproject_content_cache[pyproject.name] = {}
|
|
573
|
+
T.must(@pyproject_content_cache[pyproject.name])
|
|
574
|
+
end
|
|
111
575
|
end
|
|
576
|
+
# rubocop:enable Metrics/ClassLength
|
|
112
577
|
end
|
|
113
578
|
end
|
|
114
579
|
end
|
data/lib/dependabot/python.rb
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
|
|
4
4
|
# These all need to be required so the various classes can be registered in a
|
|
5
5
|
# lookup table of package manager names to concrete classes.
|
|
6
|
+
require "dependabot/python/dependency_grapher"
|
|
6
7
|
require "dependabot/python/file_fetcher"
|
|
7
8
|
require "dependabot/python/file_parser"
|
|
8
9
|
require "dependabot/python/update_checker"
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: dependabot-python
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.
|
|
4
|
+
version: 0.365.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Dependabot
|
|
@@ -15,14 +15,14 @@ dependencies:
|
|
|
15
15
|
requirements:
|
|
16
16
|
- - '='
|
|
17
17
|
- !ruby/object:Gem::Version
|
|
18
|
-
version: 0.
|
|
18
|
+
version: 0.365.0
|
|
19
19
|
type: :runtime
|
|
20
20
|
prerelease: false
|
|
21
21
|
version_requirements: !ruby/object:Gem::Requirement
|
|
22
22
|
requirements:
|
|
23
23
|
- - '='
|
|
24
24
|
- !ruby/object:Gem::Version
|
|
25
|
-
version: 0.
|
|
25
|
+
version: 0.365.0
|
|
26
26
|
- !ruby/object:Gem::Dependency
|
|
27
27
|
name: debug
|
|
28
28
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -249,6 +249,7 @@ files:
|
|
|
249
249
|
- helpers/run.py
|
|
250
250
|
- lib/dependabot/python.rb
|
|
251
251
|
- lib/dependabot/python/authed_url_builder.rb
|
|
252
|
+
- lib/dependabot/python/dependency_grapher.rb
|
|
252
253
|
- lib/dependabot/python/file_fetcher.rb
|
|
253
254
|
- lib/dependabot/python/file_parser.rb
|
|
254
255
|
- lib/dependabot/python/file_parser/pipfile_files_parser.rb
|
|
@@ -282,6 +283,7 @@ files:
|
|
|
282
283
|
- lib/dependabot/python/update_checker/latest_version_finder.rb
|
|
283
284
|
- lib/dependabot/python/update_checker/pip_compile_version_resolver.rb
|
|
284
285
|
- lib/dependabot/python/update_checker/pip_version_resolver.rb
|
|
286
|
+
- lib/dependabot/python/update_checker/pip_version_resolver/marker_evaluator.rb
|
|
285
287
|
- lib/dependabot/python/update_checker/pipenv_version_resolver.rb
|
|
286
288
|
- lib/dependabot/python/update_checker/poetry_version_resolver.rb
|
|
287
289
|
- lib/dependabot/python/update_checker/requirements_updater.rb
|
|
@@ -291,7 +293,7 @@ licenses:
|
|
|
291
293
|
- MIT
|
|
292
294
|
metadata:
|
|
293
295
|
bug_tracker_uri: https://github.com/dependabot/dependabot-core/issues
|
|
294
|
-
changelog_uri: https://github.com/dependabot/dependabot-core/releases/tag/v0.
|
|
296
|
+
changelog_uri: https://github.com/dependabot/dependabot-core/releases/tag/v0.365.0
|
|
295
297
|
rdoc_options: []
|
|
296
298
|
require_paths:
|
|
297
299
|
- lib
|