dependabot-python 0.355.0 → 0.356.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/dependabot/python/file_fetcher.rb +59 -379
- data/lib/dependabot/python/file_updater/pip_compile_file_updater.rb +1 -1
- data/lib/dependabot/python/shared_file_fetcher.rb +383 -0
- data/lib/dependabot/python/update_checker/pip_compile_version_resolver.rb +1 -1
- data/lib/dependabot/python/update_checker.rb +1 -1
- metadata +5 -4
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 9647a9b15a4a90b744336e7a0884615856ad7f7d74fc143249c102987f226332
|
|
4
|
+
data.tar.gz: 5bb14421ad8c437294aba6d0d5a0bc0bca2c6dd61e8f7d998773c13f73750f5b
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 98bbb44c7e1132f8892a9f386b8a9dedf0ef4b7e473c061b46001f9a8c2fdcad7a0d046c47e693993399876382383b8fe39cd2b0c2acaa051836b42cfbe37136
|
|
7
|
+
data.tar.gz: b4be22afa1102a99dd8e3dc4a8935d28fd839d6917e029a1db19400dfd380b0f21c3c92e97b5aedf06c9b7eb5e6c46a1a90d0550cf37c1c6b80b158b863d8d53
|
|
@@ -5,42 +5,21 @@ require "toml-rb"
|
|
|
5
5
|
require "sorbet-runtime"
|
|
6
6
|
|
|
7
7
|
require "dependabot/file_fetchers"
|
|
8
|
-
require "dependabot/
|
|
9
|
-
require "dependabot/python/language_version_manager"
|
|
8
|
+
require "dependabot/python/shared_file_fetcher"
|
|
10
9
|
require "dependabot/python/pip_compile_file_matcher"
|
|
11
|
-
require "dependabot/python/requirement_parser"
|
|
12
10
|
require "dependabot/python/file_parser/pyproject_files_parser"
|
|
13
|
-
require "dependabot/python/file_parser/python_requirement_parser"
|
|
14
11
|
require "dependabot/errors"
|
|
15
|
-
require "dependabot/file_filtering"
|
|
16
12
|
|
|
17
13
|
module Dependabot
|
|
18
14
|
module Python
|
|
19
|
-
|
|
20
|
-
class FileFetcher < Dependabot::FileFetchers::Base
|
|
15
|
+
class FileFetcher < Dependabot::Python::SharedFileFetcher
|
|
21
16
|
extend T::Sig
|
|
22
|
-
extend T::Helpers
|
|
23
17
|
|
|
24
|
-
|
|
25
|
-
CONSTRAINT_REGEX = /^-c\s?(?<path>.*\.(?:txt|in))/
|
|
26
|
-
DEPENDENCY_TYPES = %w(packages dev-packages).freeze
|
|
18
|
+
ECOSYSTEM_SPECIFIC_FILES = T.let(%w(Pipfile setup.py setup.cfg).freeze, T::Array[String])
|
|
27
19
|
|
|
28
|
-
sig { override.
|
|
29
|
-
def self.
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
# If there is a directory of requirements return true
|
|
33
|
-
return true if filenames.include?("requirements")
|
|
34
|
-
|
|
35
|
-
# If this repo is using a Pipfile return true
|
|
36
|
-
return true if filenames.include?("Pipfile")
|
|
37
|
-
|
|
38
|
-
# If this repo is using pyproject.toml return true
|
|
39
|
-
return true if filenames.include?("pyproject.toml")
|
|
40
|
-
|
|
41
|
-
return true if filenames.include?("setup.py")
|
|
42
|
-
|
|
43
|
-
filenames.include?("setup.cfg")
|
|
20
|
+
sig { override.returns(T::Array[String]) }
|
|
21
|
+
def self.ecosystem_specific_required_files
|
|
22
|
+
ECOSYSTEM_SPECIFIC_FILES
|
|
44
23
|
end
|
|
45
24
|
|
|
46
25
|
sig { override.returns(String) }
|
|
@@ -49,80 +28,73 @@ module Dependabot
|
|
|
49
28
|
"or a Pipfile."
|
|
50
29
|
end
|
|
51
30
|
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
{
|
|
63
|
-
languages: {
|
|
64
|
-
python: {
|
|
65
|
-
# TODO: alternatively this could use `python_requirement_parser.user_specified_requirements` which
|
|
66
|
-
# returns an array... which we could flip to return a hash of manifest name => version
|
|
67
|
-
# string and then check for min/max versions... today it simply defaults to
|
|
68
|
-
# array.first which seems rather arbitrary.
|
|
69
|
-
"raw" => language_version_manager.user_specified_python_version || "unknown",
|
|
70
|
-
"max" => language_version_manager.python_major_minor || "unknown"
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
}
|
|
31
|
+
private
|
|
32
|
+
|
|
33
|
+
sig { override.returns(T::Array[Dependabot::DependencyFile]) }
|
|
34
|
+
def ecosystem_specific_files
|
|
35
|
+
files = []
|
|
36
|
+
files += pipenv_files
|
|
37
|
+
files << setup_file if setup_file
|
|
38
|
+
files << setup_cfg_file if setup_cfg_file
|
|
39
|
+
files << pip_conf if pip_conf
|
|
40
|
+
files
|
|
74
41
|
end
|
|
75
42
|
|
|
76
|
-
sig { override.returns(T::Array[DependencyFile]) }
|
|
77
|
-
def
|
|
78
|
-
|
|
43
|
+
sig { override.returns(T::Array[Dependabot::DependencyFile]) }
|
|
44
|
+
def pyproject_files
|
|
45
|
+
[pyproject, poetry_lock, pdm_lock].compact
|
|
46
|
+
end
|
|
79
47
|
|
|
80
|
-
|
|
81
|
-
|
|
48
|
+
sig { override.returns(T::Array[T::Hash[Symbol, String]]) }
|
|
49
|
+
def path_dependencies
|
|
50
|
+
requirement_txt_path_dependencies +
|
|
51
|
+
requirement_in_path_dependencies +
|
|
52
|
+
pipfile_path_dependencies
|
|
53
|
+
end
|
|
82
54
|
|
|
83
|
-
|
|
84
|
-
|
|
55
|
+
sig { override.returns(T::Array[String]) }
|
|
56
|
+
def additional_path_dependencies
|
|
57
|
+
poetry_path_dependencies
|
|
58
|
+
end
|
|
85
59
|
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
fetched_files << python_version_file if python_version_file
|
|
60
|
+
sig { override.params(file: Dependabot::DependencyFile).returns(T::Boolean) }
|
|
61
|
+
def lockfile_for_compile_file?(file)
|
|
62
|
+
pip_compile_file_matcher.lockfile_for_pip_compile_file?(file)
|
|
63
|
+
end
|
|
91
64
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
end
|
|
65
|
+
sig { override.params(path: String).returns(T::Array[Dependabot::DependencyFile]) }
|
|
66
|
+
def fetch_project_file(path)
|
|
67
|
+
project_files = []
|
|
96
68
|
|
|
97
|
-
|
|
98
|
-
end
|
|
69
|
+
path = clean_path(File.join(path, "setup.py")) unless sdist_or_wheel?(path)
|
|
99
70
|
|
|
100
|
-
|
|
71
|
+
return [] if path == "setup.py" && setup_file
|
|
101
72
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
73
|
+
project_files <<
|
|
74
|
+
begin
|
|
75
|
+
fetch_file_from_host(
|
|
76
|
+
path,
|
|
77
|
+
fetch_submodules: true
|
|
78
|
+
).tap { |f| f.support_file = true }
|
|
79
|
+
rescue Dependabot::DependencyFileNotFound
|
|
80
|
+
# For projects with pyproject.toml attempt to fetch a pyproject.toml
|
|
81
|
+
# at the given path instead of a setup.py.
|
|
82
|
+
fetch_file_from_host(
|
|
83
|
+
path.gsub("setup.py", "pyproject.toml"),
|
|
84
|
+
fetch_submodules: true
|
|
85
|
+
).tap { |f| f.support_file = true }
|
|
86
|
+
end
|
|
108
87
|
|
|
109
|
-
|
|
110
|
-
def pipenv_files
|
|
111
|
-
[pipfile, pipfile_lock].compact
|
|
112
|
-
end
|
|
88
|
+
return project_files unless path.end_with?(".py")
|
|
113
89
|
|
|
114
|
-
|
|
115
|
-
def pyproject_files
|
|
116
|
-
[pyproject, poetry_lock, pdm_lock].compact
|
|
90
|
+
project_files + cfg_files_for_setup_py(path)
|
|
117
91
|
end
|
|
118
92
|
|
|
93
|
+
# Python-specific methods
|
|
94
|
+
|
|
119
95
|
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
120
|
-
def
|
|
121
|
-
[
|
|
122
|
-
*requirements_txt_files,
|
|
123
|
-
*child_requirement_txt_files,
|
|
124
|
-
*constraints_files
|
|
125
|
-
]
|
|
96
|
+
def pipenv_files
|
|
97
|
+
[pipfile, pipfile_lock].compact
|
|
126
98
|
end
|
|
127
99
|
|
|
128
100
|
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
@@ -149,23 +121,6 @@ module Dependabot
|
|
|
149
121
|
)
|
|
150
122
|
end
|
|
151
123
|
|
|
152
|
-
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
153
|
-
def python_version_file
|
|
154
|
-
@python_version_file ||= T.let(
|
|
155
|
-
begin
|
|
156
|
-
file = fetch_support_file(".python-version")
|
|
157
|
-
return file if file
|
|
158
|
-
return if [".", "/"].include?(directory)
|
|
159
|
-
|
|
160
|
-
# Check the top-level for a .python-version file, too
|
|
161
|
-
reverse_path = Pathname.new(directory[0]).relative_path_from(directory)
|
|
162
|
-
fetch_support_file(File.join(reverse_path, ".python-version"))
|
|
163
|
-
&.tap { |f| f.name = ".python-version" }
|
|
164
|
-
end,
|
|
165
|
-
T.nilable(Dependabot::DependencyFile)
|
|
166
|
-
)
|
|
167
|
-
end
|
|
168
|
-
|
|
169
124
|
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
170
125
|
def pipfile
|
|
171
126
|
@pipfile ||= T.let(
|
|
@@ -182,14 +137,6 @@ module Dependabot
|
|
|
182
137
|
)
|
|
183
138
|
end
|
|
184
139
|
|
|
185
|
-
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
186
|
-
def pyproject
|
|
187
|
-
@pyproject ||= T.let(
|
|
188
|
-
fetch_file_if_present("pyproject.toml"),
|
|
189
|
-
T.nilable(Dependabot::DependencyFile)
|
|
190
|
-
)
|
|
191
|
-
end
|
|
192
|
-
|
|
193
140
|
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
194
141
|
def poetry_lock
|
|
195
142
|
@poetry_lock ||= T.let(
|
|
@@ -206,17 +153,6 @@ module Dependabot
|
|
|
206
153
|
)
|
|
207
154
|
end
|
|
208
155
|
|
|
209
|
-
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
210
|
-
def requirements_txt_files
|
|
211
|
-
req_txt_and_in_files.select { |f| f.name.end_with?(".txt") }
|
|
212
|
-
end
|
|
213
|
-
|
|
214
|
-
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
215
|
-
def requirements_in_files
|
|
216
|
-
req_txt_and_in_files.select { |f| f.name.end_with?(".in") } +
|
|
217
|
-
child_requirement_in_files
|
|
218
|
-
end
|
|
219
|
-
|
|
220
156
|
sig { returns(T::Hash[String, T.untyped]) }
|
|
221
157
|
def parsed_pipfile
|
|
222
158
|
raise "No Pipfile" unless pipfile
|
|
@@ -229,196 +165,6 @@ module Dependabot
|
|
|
229
165
|
raise Dependabot::DependencyFileNotParseable, T.must(pipfile).path
|
|
230
166
|
end
|
|
231
167
|
|
|
232
|
-
sig { returns(T::Hash[String, T.untyped]) }
|
|
233
|
-
def parsed_pyproject
|
|
234
|
-
raise "No pyproject.toml" unless pyproject
|
|
235
|
-
|
|
236
|
-
@parsed_pyproject ||= T.let(
|
|
237
|
-
TomlRB.parse(T.must(pyproject).content),
|
|
238
|
-
T.nilable(T::Hash[String, T.untyped])
|
|
239
|
-
)
|
|
240
|
-
rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
|
|
241
|
-
raise Dependabot::DependencyFileNotParseable, T.must(pyproject).path
|
|
242
|
-
end
|
|
243
|
-
|
|
244
|
-
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
245
|
-
def req_txt_and_in_files
|
|
246
|
-
@req_txt_and_in_files ||= T.let(
|
|
247
|
-
begin
|
|
248
|
-
files = T.let([], T::Array[Dependabot::DependencyFile])
|
|
249
|
-
|
|
250
|
-
repo_contents
|
|
251
|
-
.select { |f| f.type == "file" }
|
|
252
|
-
.select { |f| f.name.end_with?(".txt", ".in") }
|
|
253
|
-
.reject { |f| f.size > 500_000 }
|
|
254
|
-
.map { |f| fetch_file_from_host(f.name) }
|
|
255
|
-
.select { |f| requirements_file?(f) }
|
|
256
|
-
.each { |f| files << f }
|
|
257
|
-
|
|
258
|
-
repo_contents
|
|
259
|
-
.select { |f| f.type == "dir" }
|
|
260
|
-
.each { |f| files.concat(req_files_for_dir(f)) }
|
|
261
|
-
|
|
262
|
-
files
|
|
263
|
-
end,
|
|
264
|
-
T.nilable(T::Array[Dependabot::DependencyFile])
|
|
265
|
-
)
|
|
266
|
-
end
|
|
267
|
-
|
|
268
|
-
sig { params(requirements_dir: T.untyped).returns(T::Array[Dependabot::DependencyFile]) }
|
|
269
|
-
def req_files_for_dir(requirements_dir)
|
|
270
|
-
dir = directory.gsub(%r{(^/|/$)}, "")
|
|
271
|
-
relative_reqs_dir =
|
|
272
|
-
requirements_dir.path.gsub(%r{^/?#{Regexp.escape(dir)}/?}, "")
|
|
273
|
-
|
|
274
|
-
repo_contents(dir: relative_reqs_dir)
|
|
275
|
-
.select { |f| f.type == "file" }
|
|
276
|
-
.select { |f| f.name.end_with?(".txt", ".in") }
|
|
277
|
-
.reject { |f| f.size > 500_000 }
|
|
278
|
-
.map { |f| fetch_file_from_host("#{relative_reqs_dir}/#{f.name}") }
|
|
279
|
-
.select { |f| requirements_file?(f) }
|
|
280
|
-
end
|
|
281
|
-
|
|
282
|
-
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
283
|
-
def child_requirement_txt_files
|
|
284
|
-
child_requirement_files.select { |f| f.name.end_with?(".txt") }
|
|
285
|
-
end
|
|
286
|
-
|
|
287
|
-
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
288
|
-
def child_requirement_in_files
|
|
289
|
-
child_requirement_files.select { |f| f.name.end_with?(".in") }
|
|
290
|
-
end
|
|
291
|
-
|
|
292
|
-
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
293
|
-
def child_requirement_files
|
|
294
|
-
@child_requirement_files ||= T.let(
|
|
295
|
-
begin
|
|
296
|
-
fetched_files = req_txt_and_in_files.dup
|
|
297
|
-
req_txt_and_in_files.flat_map do |requirement_file|
|
|
298
|
-
child_files = fetch_child_requirement_files(
|
|
299
|
-
file: requirement_file,
|
|
300
|
-
previously_fetched_files: fetched_files
|
|
301
|
-
)
|
|
302
|
-
|
|
303
|
-
fetched_files += child_files
|
|
304
|
-
child_files
|
|
305
|
-
end
|
|
306
|
-
end,
|
|
307
|
-
T.nilable(T::Array[Dependabot::DependencyFile])
|
|
308
|
-
)
|
|
309
|
-
end
|
|
310
|
-
|
|
311
|
-
sig do
|
|
312
|
-
params(
|
|
313
|
-
file: Dependabot::DependencyFile,
|
|
314
|
-
previously_fetched_files: T::Array[Dependabot::DependencyFile]
|
|
315
|
-
)
|
|
316
|
-
.returns(T::Array[Dependabot::DependencyFile])
|
|
317
|
-
end
|
|
318
|
-
def fetch_child_requirement_files(file:, previously_fetched_files:)
|
|
319
|
-
paths = T.must(file.content).scan(CHILD_REQUIREMENT_REGEX).flatten
|
|
320
|
-
current_dir = File.dirname(file.name)
|
|
321
|
-
|
|
322
|
-
paths.flat_map do |path|
|
|
323
|
-
path = File.join(current_dir, path) unless current_dir == "."
|
|
324
|
-
path = cleanpath(path)
|
|
325
|
-
|
|
326
|
-
next if previously_fetched_files.map(&:name).include?(path)
|
|
327
|
-
next if file.name == path
|
|
328
|
-
|
|
329
|
-
if Dependabot::Experiments.enabled?(:enable_exclude_paths_subdirectory_manifest_files) &&
|
|
330
|
-
!@exclude_paths.empty? && Dependabot::FileFiltering.exclude_path?(path, @exclude_paths)
|
|
331
|
-
raise Dependabot::DependencyFileNotEvaluatable,
|
|
332
|
-
"Cannot process requirements: '#{file.name}' references excluded file '#{path}'. " \
|
|
333
|
-
"Please either remove the reference from '#{file.name}' " \
|
|
334
|
-
"or update your exclude_paths configuration."
|
|
335
|
-
end
|
|
336
|
-
|
|
337
|
-
fetched_file = fetch_file_from_host(path)
|
|
338
|
-
grandchild_requirement_files = fetch_child_requirement_files(
|
|
339
|
-
file: fetched_file,
|
|
340
|
-
previously_fetched_files: previously_fetched_files + [file]
|
|
341
|
-
)
|
|
342
|
-
[fetched_file, *grandchild_requirement_files]
|
|
343
|
-
end.compact
|
|
344
|
-
end
|
|
345
|
-
|
|
346
|
-
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
347
|
-
def constraints_files
|
|
348
|
-
all_requirement_files = requirements_txt_files +
|
|
349
|
-
child_requirement_txt_files
|
|
350
|
-
|
|
351
|
-
constraints_paths = all_requirement_files.map do |req_file|
|
|
352
|
-
current_dir = File.dirname(req_file.name)
|
|
353
|
-
paths = T.must(req_file.content).scan(CONSTRAINT_REGEX).flatten
|
|
354
|
-
|
|
355
|
-
paths.map do |path|
|
|
356
|
-
path = File.join(current_dir, path) unless current_dir == "."
|
|
357
|
-
cleanpath(path)
|
|
358
|
-
end
|
|
359
|
-
end.flatten.uniq
|
|
360
|
-
|
|
361
|
-
constraints_paths.map { |path| fetch_file_from_host(path) }
|
|
362
|
-
end
|
|
363
|
-
|
|
364
|
-
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
365
|
-
def project_files
|
|
366
|
-
project_files = T.let([], T::Array[Dependabot::DependencyFile])
|
|
367
|
-
unfetchable_deps = []
|
|
368
|
-
|
|
369
|
-
path_dependencies.each do |dep|
|
|
370
|
-
path = T.must(dep[:path])
|
|
371
|
-
project_files += fetch_project_file(path)
|
|
372
|
-
rescue Dependabot::DependencyFileNotFound
|
|
373
|
-
next if sdist_or_wheel?(T.must(path))
|
|
374
|
-
|
|
375
|
-
unfetchable_deps << "\"#{dep[:name]}\" at #{cleanpath(File.join(directory, dep[:file]))}"
|
|
376
|
-
end
|
|
377
|
-
|
|
378
|
-
poetry_path_dependencies.each do |path|
|
|
379
|
-
project_files += fetch_project_file(path)
|
|
380
|
-
rescue Dependabot::DependencyFileNotFound => e
|
|
381
|
-
unfetchable_deps << e.file_path&.gsub(%r{^/}, "")
|
|
382
|
-
end
|
|
383
|
-
|
|
384
|
-
raise Dependabot::PathDependenciesNotReachable, unfetchable_deps if unfetchable_deps.any?
|
|
385
|
-
|
|
386
|
-
project_files
|
|
387
|
-
end
|
|
388
|
-
|
|
389
|
-
sig { params(path: String).returns(T::Array[Dependabot::DependencyFile]) }
|
|
390
|
-
def fetch_project_file(path)
|
|
391
|
-
project_files = []
|
|
392
|
-
|
|
393
|
-
path = cleanpath(File.join(path, "setup.py")) unless sdist_or_wheel?(path)
|
|
394
|
-
|
|
395
|
-
return [] if path == "setup.py" && setup_file
|
|
396
|
-
|
|
397
|
-
project_files <<
|
|
398
|
-
begin
|
|
399
|
-
fetch_file_from_host(
|
|
400
|
-
path,
|
|
401
|
-
fetch_submodules: true
|
|
402
|
-
).tap { |f| f.support_file = true }
|
|
403
|
-
rescue Dependabot::DependencyFileNotFound
|
|
404
|
-
# For projects with pyproject.toml attempt to fetch a pyproject.toml
|
|
405
|
-
# at the given path instead of a setup.py.
|
|
406
|
-
fetch_file_from_host(
|
|
407
|
-
path.gsub("setup.py", "pyproject.toml"),
|
|
408
|
-
fetch_submodules: true
|
|
409
|
-
).tap { |f| f.support_file = true }
|
|
410
|
-
end
|
|
411
|
-
|
|
412
|
-
return project_files unless path.end_with?(".py")
|
|
413
|
-
|
|
414
|
-
project_files + cfg_files_for_setup_py(path)
|
|
415
|
-
end
|
|
416
|
-
|
|
417
|
-
sig { params(path: String).returns(T::Boolean) }
|
|
418
|
-
def sdist_or_wheel?(path)
|
|
419
|
-
path.end_with?(".tar.gz", ".whl", ".zip")
|
|
420
|
-
end
|
|
421
|
-
|
|
422
168
|
sig { params(path: String).returns(T::Array[Dependabot::DependencyFile]) }
|
|
423
169
|
def cfg_files_for_setup_py(path)
|
|
424
170
|
cfg_path = path.gsub(/\.py$/, ".cfg")
|
|
@@ -434,66 +180,6 @@ module Dependabot
|
|
|
434
180
|
end
|
|
435
181
|
end
|
|
436
182
|
|
|
437
|
-
sig { params(file: Dependabot::DependencyFile).returns(T::Boolean) }
|
|
438
|
-
def requirements_file?(file)
|
|
439
|
-
return false unless T.must(file.content).valid_encoding?
|
|
440
|
-
return true if file.name.match?(/requirements/x)
|
|
441
|
-
|
|
442
|
-
T.must(file.content).lines.all? do |line|
|
|
443
|
-
next true if line.strip.empty?
|
|
444
|
-
next true if line.strip.start_with?("#", "-r ", "-c ", "-e ", "--")
|
|
445
|
-
|
|
446
|
-
line.match?(RequirementParser::VALID_REQ_TXT_REQUIREMENT)
|
|
447
|
-
end
|
|
448
|
-
end
|
|
449
|
-
|
|
450
|
-
sig { returns(T::Array[T::Hash[Symbol, String]]) }
|
|
451
|
-
def path_dependencies
|
|
452
|
-
requirement_txt_path_dependencies +
|
|
453
|
-
requirement_in_path_dependencies +
|
|
454
|
-
pipfile_path_dependencies
|
|
455
|
-
end
|
|
456
|
-
|
|
457
|
-
sig { returns(T::Array[T::Hash[Symbol, String]]) }
|
|
458
|
-
def requirement_txt_path_dependencies
|
|
459
|
-
(requirements_txt_files + child_requirement_txt_files)
|
|
460
|
-
.map { |req_file| parse_requirement_path_dependencies(req_file) }
|
|
461
|
-
.flatten.uniq { |dep| dep[:path] }
|
|
462
|
-
end
|
|
463
|
-
|
|
464
|
-
sig { returns(T::Array[T::Hash[Symbol, String]]) }
|
|
465
|
-
def requirement_in_path_dependencies
|
|
466
|
-
requirements_in_files
|
|
467
|
-
.map { |req_file| parse_requirement_path_dependencies(req_file) }
|
|
468
|
-
.flatten.uniq { |dep| dep[:path] }
|
|
469
|
-
end
|
|
470
|
-
|
|
471
|
-
sig { params(req_file: Dependabot::DependencyFile).returns(T::Array[T::Hash[Symbol, String]]) }
|
|
472
|
-
def parse_requirement_path_dependencies(req_file)
|
|
473
|
-
# If this is a pip-compile lockfile, rely on whatever path dependencies we found in the main manifest
|
|
474
|
-
return [] if pip_compile_file_matcher.lockfile_for_pip_compile_file?(req_file)
|
|
475
|
-
|
|
476
|
-
uneditable_reqs =
|
|
477
|
-
T.must(req_file.content)
|
|
478
|
-
.scan(/(?<name>^['"]?(?:file:)?(?<path>\..*?)(?=\[|#|'|"|$))/)
|
|
479
|
-
.filter_map do |match_array|
|
|
480
|
-
n, p = match_array
|
|
481
|
-
{ name: n.to_s.strip, path: p.to_s.strip, file: req_file.name } unless p.to_s.include?("://")
|
|
482
|
-
end
|
|
483
|
-
|
|
484
|
-
editable_reqs =
|
|
485
|
-
T.must(req_file.content)
|
|
486
|
-
.scan(/(?<name>^(?:-e)\s+['"]?(?:file:)?(?<path>.*?)(?=\[|#|'|"|$))/)
|
|
487
|
-
.filter_map do |match_array|
|
|
488
|
-
n, p = match_array
|
|
489
|
-
unless p.to_s.include?("://") || p.to_s.include?("git@")
|
|
490
|
-
{ name: n.to_s.strip, path: p.to_s.strip, file: req_file.name }
|
|
491
|
-
end
|
|
492
|
-
end
|
|
493
|
-
|
|
494
|
-
uneditable_reqs + editable_reqs
|
|
495
|
-
end
|
|
496
|
-
|
|
497
183
|
sig { returns(T::Array[T::Hash[Symbol, String]]) }
|
|
498
184
|
def pipfile_path_dependencies
|
|
499
185
|
return [] unless pipfile
|
|
@@ -530,11 +216,6 @@ module Dependabot
|
|
|
530
216
|
paths
|
|
531
217
|
end
|
|
532
218
|
|
|
533
|
-
sig { params(path: String).returns(String) }
|
|
534
|
-
def cleanpath(path)
|
|
535
|
-
Pathname.new(path).cleanpath.to_path
|
|
536
|
-
end
|
|
537
|
-
|
|
538
219
|
sig { returns(Dependabot::Python::PipCompileFileMatcher) }
|
|
539
220
|
def pip_compile_file_matcher
|
|
540
221
|
@pip_compile_file_matcher ||= T.let(
|
|
@@ -543,7 +224,6 @@ module Dependabot
|
|
|
543
224
|
)
|
|
544
225
|
end
|
|
545
226
|
end
|
|
546
|
-
# rubocop:enable Metrics/ClassLength
|
|
547
227
|
end
|
|
548
228
|
end
|
|
549
229
|
|
|
@@ -663,7 +663,7 @@ module Dependabot
|
|
|
663
663
|
|
|
664
664
|
sig { returns(T::Hash[String, T::Array[String]]) }
|
|
665
665
|
def requirement_map
|
|
666
|
-
child_req_regex = Python::
|
|
666
|
+
child_req_regex = Python::SharedFileFetcher::CHILD_REQUIREMENT_REGEX
|
|
667
667
|
@requirement_map ||=
|
|
668
668
|
pip_compile_files.each_with_object({}) do |file, req_map|
|
|
669
669
|
paths = T.must(file.content).scan(child_req_regex).flatten
|
|
@@ -0,0 +1,383 @@
|
|
|
1
|
+
# typed: strict
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
require "toml-rb"
|
|
5
|
+
require "sorbet-runtime"
|
|
6
|
+
|
|
7
|
+
require "dependabot/file_fetchers"
|
|
8
|
+
require "dependabot/file_fetchers/base"
|
|
9
|
+
require "dependabot/python/language_version_manager"
|
|
10
|
+
require "dependabot/python/requirement_parser"
|
|
11
|
+
require "dependabot/python/file_parser/pyproject_files_parser"
|
|
12
|
+
require "dependabot/python/file_parser/python_requirement_parser"
|
|
13
|
+
require "dependabot/errors"
|
|
14
|
+
require "dependabot/file_filtering"
|
|
15
|
+
|
|
16
|
+
module Dependabot
|
|
17
|
+
module Python
|
|
18
|
+
class SharedFileFetcher < Dependabot::FileFetchers::Base
|
|
19
|
+
extend T::Sig
|
|
20
|
+
extend T::Helpers
|
|
21
|
+
|
|
22
|
+
abstract!
|
|
23
|
+
|
|
24
|
+
CHILD_REQUIREMENT_REGEX = T.let(/^-r\s?(?<path>.*\.(?:txt|in))/, Regexp)
|
|
25
|
+
CONSTRAINT_REGEX = T.let(/^-c\s?(?<path>.*\.(?:txt|in))/, Regexp)
|
|
26
|
+
DEPENDENCY_TYPES = T.let(%w(packages dev-packages).freeze, T::Array[String])
|
|
27
|
+
MAX_FILE_SIZE = T.let(500_000, Integer)
|
|
28
|
+
|
|
29
|
+
sig { abstract.returns(T::Array[String]) }
|
|
30
|
+
def self.ecosystem_specific_required_files; end
|
|
31
|
+
|
|
32
|
+
sig { override.params(filenames: T::Array[String]).returns(T::Boolean) }
|
|
33
|
+
def self.required_files_in?(filenames)
|
|
34
|
+
return true if filenames.any? { |name| name.end_with?(".txt", ".in") }
|
|
35
|
+
return true if filenames.include?("requirements")
|
|
36
|
+
return true if filenames.include?("pyproject.toml")
|
|
37
|
+
return true if filenames.any? { |name| ecosystem_specific_required_files.include?(name) }
|
|
38
|
+
|
|
39
|
+
false
|
|
40
|
+
end
|
|
41
|
+
|
|
42
|
+
sig { override.returns(T.nilable(T::Hash[Symbol, T.untyped])) }
|
|
43
|
+
def ecosystem_versions
|
|
44
|
+
python_requirement_parser = FileParser::PythonRequirementParser.new(dependency_files: files)
|
|
45
|
+
language_version_manager = LanguageVersionManager.new(python_requirement_parser: python_requirement_parser)
|
|
46
|
+
Dependabot.logger.info("Dependabot is using Python version '#{language_version_manager.python_version}'.")
|
|
47
|
+
{
|
|
48
|
+
languages: {
|
|
49
|
+
python: {
|
|
50
|
+
"raw" => language_version_manager.user_specified_python_version || "unknown",
|
|
51
|
+
"max" => language_version_manager.python_major_minor || "unknown"
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
sig { override.returns(T::Array[DependencyFile]) }
|
|
58
|
+
def fetch_files
|
|
59
|
+
fetched_files = []
|
|
60
|
+
|
|
61
|
+
fetched_files += ecosystem_specific_files
|
|
62
|
+
fetched_files += pyproject_files
|
|
63
|
+
|
|
64
|
+
fetched_files += requirements_in_files
|
|
65
|
+
fetched_files += requirement_files if requirements_txt_files.any?
|
|
66
|
+
|
|
67
|
+
fetched_files += project_files
|
|
68
|
+
fetched_files << python_version_file if python_version_file
|
|
69
|
+
|
|
70
|
+
uniques = uniq_files(fetched_files)
|
|
71
|
+
uniques.reject do |file|
|
|
72
|
+
Dependabot::FileFiltering.should_exclude_path?(file.name, "file from final collection", @exclude_paths)
|
|
73
|
+
end
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
private
|
|
77
|
+
|
|
78
|
+
sig { abstract.returns(T::Array[Dependabot::DependencyFile]) }
|
|
79
|
+
def ecosystem_specific_files; end
|
|
80
|
+
|
|
81
|
+
sig { abstract.returns(T::Array[Dependabot::DependencyFile]) }
|
|
82
|
+
def pyproject_files; end
|
|
83
|
+
|
|
84
|
+
sig { abstract.returns(T::Array[T::Hash[Symbol, String]]) }
|
|
85
|
+
def path_dependencies; end
|
|
86
|
+
|
|
87
|
+
sig { abstract.returns(T::Array[String]) }
|
|
88
|
+
def additional_path_dependencies; end
|
|
89
|
+
|
|
90
|
+
sig { abstract.params(file: Dependabot::DependencyFile).returns(T::Boolean) }
|
|
91
|
+
def lockfile_for_compile_file?(file); end
|
|
92
|
+
|
|
93
|
+
sig { abstract.params(path: String).returns(T::Array[Dependabot::DependencyFile]) }
|
|
94
|
+
def fetch_project_file(path); end
|
|
95
|
+
|
|
96
|
+
sig { params(fetched_files: T::Array[Dependabot::DependencyFile]).returns(T::Array[Dependabot::DependencyFile]) }
|
|
97
|
+
def uniq_files(fetched_files)
|
|
98
|
+
uniq_files = fetched_files.reject(&:support_file?).uniq
|
|
99
|
+
uniq_files += fetched_files
|
|
100
|
+
.reject { |f| uniq_files.map(&:name).include?(f.name) }
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
104
|
+
def requirement_files
|
|
105
|
+
[
|
|
106
|
+
*requirements_txt_files,
|
|
107
|
+
*child_requirement_txt_files,
|
|
108
|
+
*constraints_files
|
|
109
|
+
]
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
113
|
+
def python_version_file
|
|
114
|
+
return @python_version_file if defined?(@python_version_file)
|
|
115
|
+
|
|
116
|
+
@python_version_file = T.let(
|
|
117
|
+
begin
|
|
118
|
+
file = fetch_support_file(".python-version")
|
|
119
|
+
return file if file
|
|
120
|
+
return if [".", "/"].include?(directory)
|
|
121
|
+
|
|
122
|
+
# Check the top-level for a .python-version file, too
|
|
123
|
+
reverse_path = Pathname.new(directory[0]).relative_path_from(directory)
|
|
124
|
+
fetch_support_file(File.join(reverse_path, ".python-version"))
|
|
125
|
+
&.tap { |f| f.name = ".python-version" }
|
|
126
|
+
end,
|
|
127
|
+
T.nilable(Dependabot::DependencyFile)
|
|
128
|
+
)
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
132
|
+
def pyproject
|
|
133
|
+
return @pyproject if defined?(@pyproject)
|
|
134
|
+
|
|
135
|
+
@pyproject = T.let(
|
|
136
|
+
fetch_file_if_present("pyproject.toml"),
|
|
137
|
+
T.nilable(Dependabot::DependencyFile)
|
|
138
|
+
)
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
142
|
+
def requirements_txt_files
|
|
143
|
+
req_txt_and_in_files.select { |f| f.name.end_with?(".txt") }
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
147
|
+
def requirements_in_files
|
|
148
|
+
req_txt_and_in_files.select { |f| f.name.end_with?(".in") } +
|
|
149
|
+
child_requirement_in_files
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
sig { returns(T::Hash[String, T.untyped]) }
|
|
153
|
+
def parsed_pyproject
|
|
154
|
+
raise "No pyproject.toml" unless pyproject
|
|
155
|
+
|
|
156
|
+
@parsed_pyproject ||= T.let(
|
|
157
|
+
TomlRB.parse(T.must(pyproject).content),
|
|
158
|
+
T.nilable(T::Hash[String, T.untyped])
|
|
159
|
+
)
|
|
160
|
+
rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
|
|
161
|
+
raise Dependabot::DependencyFileNotParseable, T.must(pyproject).path
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
165
|
+
def req_txt_and_in_files
|
|
166
|
+
@req_txt_and_in_files ||= T.let(
|
|
167
|
+
begin
|
|
168
|
+
files = T.let([], T::Array[Dependabot::DependencyFile])
|
|
169
|
+
|
|
170
|
+
repo_contents
|
|
171
|
+
.select { |f| f.type == "file" }
|
|
172
|
+
.select { |f| f.name.end_with?(".txt", ".in") }
|
|
173
|
+
.reject { |f| f.size > MAX_FILE_SIZE }
|
|
174
|
+
.map { |f| fetch_file_from_host(f.name) }
|
|
175
|
+
.select { |f| requirements_file?(f) }
|
|
176
|
+
.each { |f| files << f }
|
|
177
|
+
|
|
178
|
+
repo_contents
|
|
179
|
+
.select { |f| f.type == "dir" }
|
|
180
|
+
.each { |f| files.concat(req_files_for_dir(f)) }
|
|
181
|
+
|
|
182
|
+
files
|
|
183
|
+
end,
|
|
184
|
+
T.nilable(T::Array[Dependabot::DependencyFile])
|
|
185
|
+
)
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
sig { params(requirements_dir: T.untyped).returns(T::Array[Dependabot::DependencyFile]) }
|
|
189
|
+
def req_files_for_dir(requirements_dir)
|
|
190
|
+
dir = directory.gsub(%r{(^/|/$)}, "")
|
|
191
|
+
relative_reqs_dir =
|
|
192
|
+
requirements_dir.path.gsub(%r{^/?#{Regexp.escape(dir)}/?}, "")
|
|
193
|
+
|
|
194
|
+
repo_contents(dir: relative_reqs_dir)
|
|
195
|
+
.select { |f| f.type == "file" }
|
|
196
|
+
.select { |f| f.name.end_with?(".txt", ".in") }
|
|
197
|
+
.reject { |f| f.size > MAX_FILE_SIZE }
|
|
198
|
+
.map { |f| fetch_file_from_host("#{relative_reqs_dir}/#{f.name}") }
|
|
199
|
+
.select { |f| requirements_file?(f) }
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
203
|
+
def child_requirement_txt_files
|
|
204
|
+
child_requirement_files.select { |f| f.name.end_with?(".txt") }
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
208
|
+
def child_requirement_in_files
|
|
209
|
+
child_requirement_files.select { |f| f.name.end_with?(".in") }
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
213
|
+
def child_requirement_files
|
|
214
|
+
@child_requirement_files ||= T.let(
|
|
215
|
+
begin
|
|
216
|
+
fetched_files = req_txt_and_in_files.dup
|
|
217
|
+
req_txt_and_in_files.flat_map do |requirement_file|
|
|
218
|
+
child_files = fetch_child_requirement_files(
|
|
219
|
+
file: requirement_file,
|
|
220
|
+
previously_fetched_files: fetched_files
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
fetched_files += child_files
|
|
224
|
+
child_files
|
|
225
|
+
end
|
|
226
|
+
end,
|
|
227
|
+
T.nilable(T::Array[Dependabot::DependencyFile])
|
|
228
|
+
)
|
|
229
|
+
end
|
|
230
|
+
|
|
231
|
+
sig do
|
|
232
|
+
params(
|
|
233
|
+
file: Dependabot::DependencyFile,
|
|
234
|
+
previously_fetched_files: T::Array[Dependabot::DependencyFile]
|
|
235
|
+
).returns(T::Array[Dependabot::DependencyFile])
|
|
236
|
+
end
|
|
237
|
+
def fetch_child_requirement_files(file:, previously_fetched_files:)
|
|
238
|
+
content = file.content
|
|
239
|
+
return [] if content.nil?
|
|
240
|
+
|
|
241
|
+
paths = content.scan(CHILD_REQUIREMENT_REGEX).flatten
|
|
242
|
+
current_dir = File.dirname(file.name)
|
|
243
|
+
|
|
244
|
+
paths.flat_map do |path|
|
|
245
|
+
path = File.join(current_dir, path) unless current_dir == "."
|
|
246
|
+
path = clean_path(path)
|
|
247
|
+
|
|
248
|
+
next if previously_fetched_files.map(&:name).include?(path)
|
|
249
|
+
next if file.name == path
|
|
250
|
+
|
|
251
|
+
if Dependabot::Experiments.enabled?(:enable_exclude_paths_subdirectory_manifest_files) &&
|
|
252
|
+
!@exclude_paths.empty? && Dependabot::FileFiltering.exclude_path?(path, @exclude_paths)
|
|
253
|
+
raise Dependabot::DependencyFileNotEvaluatable,
|
|
254
|
+
"Cannot process requirements: '#{file.name}' references excluded file '#{path}'. " \
|
|
255
|
+
"Please either remove the reference from '#{file.name}' " \
|
|
256
|
+
"or update your exclude_paths configuration."
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
fetched_file = fetch_file_from_host(path)
|
|
260
|
+
grandchild_requirement_files = fetch_child_requirement_files(
|
|
261
|
+
file: fetched_file,
|
|
262
|
+
previously_fetched_files: previously_fetched_files + [file]
|
|
263
|
+
)
|
|
264
|
+
[fetched_file, *grandchild_requirement_files]
|
|
265
|
+
end.compact
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
269
|
+
def constraints_files
|
|
270
|
+
all_requirement_files = requirements_txt_files +
|
|
271
|
+
child_requirement_txt_files
|
|
272
|
+
|
|
273
|
+
constraints_paths = all_requirement_files.map do |req_file|
|
|
274
|
+
current_dir = File.dirname(req_file.name)
|
|
275
|
+
content = req_file.content
|
|
276
|
+
next [] if content.nil?
|
|
277
|
+
|
|
278
|
+
paths = content.scan(CONSTRAINT_REGEX).flatten
|
|
279
|
+
|
|
280
|
+
paths.map do |path|
|
|
281
|
+
path = File.join(current_dir, path) unless current_dir == "."
|
|
282
|
+
clean_path(path)
|
|
283
|
+
end
|
|
284
|
+
end.flatten.uniq
|
|
285
|
+
|
|
286
|
+
constraints_paths.map { |path| fetch_file_from_host(path) }
|
|
287
|
+
end
|
|
288
|
+
|
|
289
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
290
|
+
def project_files
|
|
291
|
+
project_files = T.let([], T::Array[Dependabot::DependencyFile])
|
|
292
|
+
unfetchable_deps = []
|
|
293
|
+
|
|
294
|
+
path_dependencies.each do |dep|
|
|
295
|
+
path = dep[:path]
|
|
296
|
+
next if path.nil?
|
|
297
|
+
|
|
298
|
+
project_files += fetch_project_file(path)
|
|
299
|
+
rescue Dependabot::DependencyFileNotFound
|
|
300
|
+
next if sdist_or_wheel?(T.must(path))
|
|
301
|
+
|
|
302
|
+
unfetchable_deps << "\"#{dep[:name]}\" at #{clean_path(File.join(directory, dep[:file]))}"
|
|
303
|
+
end
|
|
304
|
+
|
|
305
|
+
additional_path_dependencies.each do |path|
|
|
306
|
+
project_files += fetch_project_file(path)
|
|
307
|
+
rescue Dependabot::DependencyFileNotFound => e
|
|
308
|
+
unfetchable_deps << e.file_path&.gsub(%r{^/}, "")
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
raise Dependabot::PathDependenciesNotReachable, unfetchable_deps if unfetchable_deps.any?
|
|
312
|
+
|
|
313
|
+
project_files
|
|
314
|
+
end
|
|
315
|
+
|
|
316
|
+
sig { params(path: String).returns(T::Boolean) }
|
|
317
|
+
def sdist_or_wheel?(path)
|
|
318
|
+
path.end_with?(".tar.gz", ".whl", ".zip")
|
|
319
|
+
end
|
|
320
|
+
|
|
321
|
+
sig { params(file: Dependabot::DependencyFile).returns(T::Boolean) }
|
|
322
|
+
def requirements_file?(file)
|
|
323
|
+
return false unless file.content&.valid_encoding?
|
|
324
|
+
return true if file.name.match?(/requirements/x)
|
|
325
|
+
|
|
326
|
+
T.must(file.content).lines.all? do |line|
|
|
327
|
+
next true if line.strip.empty?
|
|
328
|
+
next true if line.strip.start_with?("#", "-r ", "-c ", "-e ", "--")
|
|
329
|
+
|
|
330
|
+
line.match?(RequirementParser::VALID_REQ_TXT_REQUIREMENT)
|
|
331
|
+
end
|
|
332
|
+
end
|
|
333
|
+
|
|
334
|
+
sig { returns(T::Array[T::Hash[Symbol, String]]) }
|
|
335
|
+
def requirement_txt_path_dependencies
|
|
336
|
+
(requirements_txt_files + child_requirement_txt_files)
|
|
337
|
+
.map { |req_file| parse_requirement_path_dependencies(req_file) }
|
|
338
|
+
.flatten.uniq { |dep| dep[:path] }
|
|
339
|
+
end
|
|
340
|
+
|
|
341
|
+
sig { returns(T::Array[T::Hash[Symbol, String]]) }
|
|
342
|
+
def requirement_in_path_dependencies
|
|
343
|
+
requirements_in_files
|
|
344
|
+
.map { |req_file| parse_requirement_path_dependencies(req_file) }
|
|
345
|
+
.flatten.uniq { |dep| dep[:path] }
|
|
346
|
+
end
|
|
347
|
+
|
|
348
|
+
sig { params(req_file: Dependabot::DependencyFile).returns(T::Array[T::Hash[Symbol, String]]) }
|
|
349
|
+
def parse_requirement_path_dependencies(req_file)
|
|
350
|
+
# If this is a pip-compile lockfile, rely on whatever path dependencies we found in the main manifest
|
|
351
|
+
return [] if lockfile_for_compile_file?(req_file)
|
|
352
|
+
|
|
353
|
+
content = req_file.content
|
|
354
|
+
return [] if content.nil?
|
|
355
|
+
|
|
356
|
+
uneditable_reqs =
|
|
357
|
+
content
|
|
358
|
+
.scan(/(?<name>^['"]?(?:file:)?(?<path>\.[^\[#'"\n]*))/)
|
|
359
|
+
.filter_map do |match_array|
|
|
360
|
+
n, p = match_array
|
|
361
|
+
{ name: n.to_s.strip, path: p.to_s.strip, file: req_file.name } unless p.to_s.include?("://")
|
|
362
|
+
end
|
|
363
|
+
|
|
364
|
+
editable_reqs =
|
|
365
|
+
content
|
|
366
|
+
.scan(/(?<name>^-e\s+['"]?(?:file:)?(?<path>[^\[#'"\n]*))/)
|
|
367
|
+
.filter_map do |match_array|
|
|
368
|
+
n, p = match_array
|
|
369
|
+
unless p.to_s.include?("://") || p.to_s.include?("git@")
|
|
370
|
+
{ name: n.to_s.strip, path: p.to_s.strip, file: req_file.name }
|
|
371
|
+
end
|
|
372
|
+
end
|
|
373
|
+
|
|
374
|
+
uneditable_reqs + editable_reqs
|
|
375
|
+
end
|
|
376
|
+
|
|
377
|
+
sig { params(path: String).returns(String) }
|
|
378
|
+
def clean_path(path)
|
|
379
|
+
Pathname.new(path).cleanpath.to_path
|
|
380
|
+
end
|
|
381
|
+
end
|
|
382
|
+
end
|
|
383
|
+
end
|
|
@@ -500,7 +500,7 @@ module Dependabot
|
|
|
500
500
|
|
|
501
501
|
sig { returns(T::Hash[String, T::Array[String]]) }
|
|
502
502
|
def requirement_map
|
|
503
|
-
child_req_regex = Python::
|
|
503
|
+
child_req_regex = Python::SharedFileFetcher::CHILD_REQUIREMENT_REGEX
|
|
504
504
|
@requirement_map ||= T.let(
|
|
505
505
|
pip_compile_files.each_with_object({}) do |file, req_map|
|
|
506
506
|
paths = T.must(file.content).scan(child_req_regex).flatten
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: dependabot-python
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.
|
|
4
|
+
version: 0.356.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Dependabot
|
|
@@ -15,14 +15,14 @@ dependencies:
|
|
|
15
15
|
requirements:
|
|
16
16
|
- - '='
|
|
17
17
|
- !ruby/object:Gem::Version
|
|
18
|
-
version: 0.
|
|
18
|
+
version: 0.356.0
|
|
19
19
|
type: :runtime
|
|
20
20
|
prerelease: false
|
|
21
21
|
version_requirements: !ruby/object:Gem::Requirement
|
|
22
22
|
requirements:
|
|
23
23
|
- - '='
|
|
24
24
|
- !ruby/object:Gem::Version
|
|
25
|
-
version: 0.
|
|
25
|
+
version: 0.356.0
|
|
26
26
|
- !ruby/object:Gem::Dependency
|
|
27
27
|
name: debug
|
|
28
28
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -277,6 +277,7 @@ files:
|
|
|
277
277
|
- lib/dependabot/python/pipenv_runner.rb
|
|
278
278
|
- lib/dependabot/python/requirement.rb
|
|
279
279
|
- lib/dependabot/python/requirement_parser.rb
|
|
280
|
+
- lib/dependabot/python/shared_file_fetcher.rb
|
|
280
281
|
- lib/dependabot/python/update_checker.rb
|
|
281
282
|
- lib/dependabot/python/update_checker/latest_version_finder.rb
|
|
282
283
|
- lib/dependabot/python/update_checker/pip_compile_version_resolver.rb
|
|
@@ -290,7 +291,7 @@ licenses:
|
|
|
290
291
|
- MIT
|
|
291
292
|
metadata:
|
|
292
293
|
bug_tracker_uri: https://github.com/dependabot/dependabot-core/issues
|
|
293
|
-
changelog_uri: https://github.com/dependabot/dependabot-core/releases/tag/v0.
|
|
294
|
+
changelog_uri: https://github.com/dependabot/dependabot-core/releases/tag/v0.356.0
|
|
294
295
|
rdoc_options: []
|
|
295
296
|
require_paths:
|
|
296
297
|
- lib
|