dependabot-uv 0.299.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/helpers/build +34 -0
- data/helpers/lib/__init__.py +0 -0
- data/helpers/lib/hasher.py +36 -0
- data/helpers/lib/parser.py +270 -0
- data/helpers/requirements.txt +13 -0
- data/helpers/run.py +22 -0
- data/lib/dependabot/uv/authed_url_builder.rb +31 -0
- data/lib/dependabot/uv/file_fetcher.rb +328 -0
- data/lib/dependabot/uv/file_parser/pipfile_files_parser.rb +192 -0
- data/lib/dependabot/uv/file_parser/pyproject_files_parser.rb +345 -0
- data/lib/dependabot/uv/file_parser/python_requirement_parser.rb +185 -0
- data/lib/dependabot/uv/file_parser/setup_file_parser.rb +193 -0
- data/lib/dependabot/uv/file_parser.rb +437 -0
- data/lib/dependabot/uv/file_updater/compile_file_updater.rb +576 -0
- data/lib/dependabot/uv/file_updater/pyproject_preparer.rb +124 -0
- data/lib/dependabot/uv/file_updater/requirement_file_updater.rb +73 -0
- data/lib/dependabot/uv/file_updater/requirement_replacer.rb +214 -0
- data/lib/dependabot/uv/file_updater.rb +105 -0
- data/lib/dependabot/uv/language.rb +76 -0
- data/lib/dependabot/uv/language_version_manager.rb +114 -0
- data/lib/dependabot/uv/metadata_finder.rb +186 -0
- data/lib/dependabot/uv/name_normaliser.rb +26 -0
- data/lib/dependabot/uv/native_helpers.rb +38 -0
- data/lib/dependabot/uv/package_manager.rb +54 -0
- data/lib/dependabot/uv/pip_compile_file_matcher.rb +38 -0
- data/lib/dependabot/uv/pipenv_runner.rb +108 -0
- data/lib/dependabot/uv/requirement.rb +163 -0
- data/lib/dependabot/uv/requirement_parser.rb +60 -0
- data/lib/dependabot/uv/update_checker/index_finder.rb +227 -0
- data/lib/dependabot/uv/update_checker/latest_version_finder.rb +297 -0
- data/lib/dependabot/uv/update_checker/pip_compile_version_resolver.rb +506 -0
- data/lib/dependabot/uv/update_checker/pip_version_resolver.rb +73 -0
- data/lib/dependabot/uv/update_checker/requirements_updater.rb +391 -0
- data/lib/dependabot/uv/update_checker.rb +317 -0
- data/lib/dependabot/uv/version.rb +321 -0
- data/lib/dependabot/uv.rb +35 -0
- metadata +306 -0
@@ -0,0 +1,328 @@
|
|
1
|
+
# typed: true
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require "toml-rb"
|
5
|
+
require "sorbet-runtime"
|
6
|
+
|
7
|
+
require "dependabot/file_fetchers"
|
8
|
+
require "dependabot/file_fetchers/base"
|
9
|
+
require "dependabot/uv/language_version_manager"
|
10
|
+
require "dependabot/uv/pip_compile_file_matcher"
|
11
|
+
require "dependabot/uv/requirement_parser"
|
12
|
+
require "dependabot/uv/file_parser/pyproject_files_parser"
|
13
|
+
require "dependabot/uv/file_parser/python_requirement_parser"
|
14
|
+
require "dependabot/errors"
|
15
|
+
|
16
|
+
module Dependabot
|
17
|
+
module Uv
|
18
|
+
class FileFetcher < Dependabot::FileFetchers::Base
|
19
|
+
extend T::Sig
|
20
|
+
extend T::Helpers
|
21
|
+
|
22
|
+
CHILD_REQUIREMENT_REGEX = /^-r\s?(?<path>.*\.(?:txt|in))/
|
23
|
+
CONSTRAINT_REGEX = /^-c\s?(?<path>.*\.(?:txt|in))/
|
24
|
+
DEPENDENCY_TYPES = %w(packages dev-packages).freeze
|
25
|
+
|
26
|
+
def self.required_files_in?(filenames)
|
27
|
+
return true if filenames.any? { |name| name.end_with?(".txt", ".in") }
|
28
|
+
|
29
|
+
# If there is a directory of requirements return true
|
30
|
+
return true if filenames.include?("requirements")
|
31
|
+
|
32
|
+
# If this repo is using pyproject.toml return true
|
33
|
+
filenames.include?("pyproject.toml")
|
34
|
+
end
|
35
|
+
|
36
|
+
def self.required_files_message
|
37
|
+
"Repo must contain a requirements.txt, requirements.in, or pyproject.toml" \
|
38
|
+
end
|
39
|
+
|
40
|
+
def ecosystem_versions
|
41
|
+
# Hmm... it's weird that this calls file parser methods, but here we are in the file fetcher... for all
|
42
|
+
# ecosystems our goal is to extract the user specified versions, so we'll need to do file parsing... so should
|
43
|
+
# we move this `ecosystem_versions` metrics method to run in the file parser for all ecosystems? Downside is if
|
44
|
+
# file parsing blows up, this metric isn't emitted, but reality is we have to parse anyway... as we want to know
|
45
|
+
# the user-specified range of versions, not the version Dependabot chose to run.
|
46
|
+
python_requirement_parser = FileParser::PythonRequirementParser.new(dependency_files: files)
|
47
|
+
language_version_manager = LanguageVersionManager.new(python_requirement_parser: python_requirement_parser)
|
48
|
+
Dependabot.logger.info("Dependabot is using Python version '#{language_version_manager.python_major_minor}'.")
|
49
|
+
{
|
50
|
+
languages: {
|
51
|
+
python: {
|
52
|
+
# TODO: alternatively this could use `python_requirement_parser.user_specified_requirements` which
|
53
|
+
# returns an array... which we could flip to return a hash of manifest name => version
|
54
|
+
# string and then check for min/max versions... today it simply defaults to
|
55
|
+
# array.first which seems rather arbitrary.
|
56
|
+
"raw" => language_version_manager.user_specified_python_version || "unknown",
|
57
|
+
"max" => language_version_manager.python_major_minor || "unknown"
|
58
|
+
}
|
59
|
+
}
|
60
|
+
}
|
61
|
+
end
|
62
|
+
|
63
|
+
sig { override.returns(T::Array[DependencyFile]) }
|
64
|
+
def fetch_files
|
65
|
+
fetched_files = []
|
66
|
+
|
67
|
+
fetched_files += pyproject_files
|
68
|
+
|
69
|
+
fetched_files += requirements_in_files
|
70
|
+
fetched_files += requirement_files if requirements_txt_files.any?
|
71
|
+
|
72
|
+
fetched_files += project_files
|
73
|
+
fetched_files << python_version_file if python_version_file
|
74
|
+
|
75
|
+
uniq_files(fetched_files)
|
76
|
+
end
|
77
|
+
|
78
|
+
private
|
79
|
+
|
80
|
+
def uniq_files(fetched_files)
|
81
|
+
uniq_files = fetched_files.reject(&:support_file?).uniq
|
82
|
+
uniq_files += fetched_files
|
83
|
+
.reject { |f| uniq_files.map(&:name).include?(f.name) }
|
84
|
+
end
|
85
|
+
|
86
|
+
def pyproject_files
|
87
|
+
[pyproject].compact
|
88
|
+
end
|
89
|
+
|
90
|
+
def requirement_files
|
91
|
+
[
|
92
|
+
*requirements_txt_files,
|
93
|
+
*child_requirement_txt_files,
|
94
|
+
*constraints_files
|
95
|
+
]
|
96
|
+
end
|
97
|
+
|
98
|
+
def python_version_file
|
99
|
+
return @python_version_file if defined?(@python_version_file)
|
100
|
+
|
101
|
+
@python_version_file = fetch_support_file(".python-version")
|
102
|
+
|
103
|
+
return @python_version_file if @python_version_file
|
104
|
+
return if [".", "/"].include?(directory)
|
105
|
+
|
106
|
+
# Check the top-level for a .python-version file, too
|
107
|
+
reverse_path = Pathname.new(directory[0]).relative_path_from(directory)
|
108
|
+
@python_version_file =
|
109
|
+
fetch_support_file(File.join(reverse_path, ".python-version"))
|
110
|
+
&.tap { |f| f.name = ".python-version" }
|
111
|
+
end
|
112
|
+
|
113
|
+
def pyproject
|
114
|
+
return @pyproject if defined?(@pyproject)
|
115
|
+
|
116
|
+
@pyproject = fetch_file_if_present("pyproject.toml")
|
117
|
+
end
|
118
|
+
|
119
|
+
def requirements_txt_files
|
120
|
+
req_txt_and_in_files.select { |f| f.name.end_with?(".txt") }
|
121
|
+
end
|
122
|
+
|
123
|
+
def requirements_in_files
|
124
|
+
req_txt_and_in_files.select { |f| f.name.end_with?(".in") } +
|
125
|
+
child_requirement_in_files
|
126
|
+
end
|
127
|
+
|
128
|
+
def parsed_pyproject
|
129
|
+
raise "No pyproject.toml" unless pyproject
|
130
|
+
|
131
|
+
@parsed_pyproject ||= TomlRB.parse(pyproject.content)
|
132
|
+
rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
|
133
|
+
raise Dependabot::DependencyFileNotParseable, pyproject.path
|
134
|
+
end
|
135
|
+
|
136
|
+
def req_txt_and_in_files
|
137
|
+
return @req_txt_and_in_files if @req_txt_and_in_files
|
138
|
+
|
139
|
+
@req_txt_and_in_files = []
|
140
|
+
|
141
|
+
repo_contents
|
142
|
+
.select { |f| f.type == "file" }
|
143
|
+
.select { |f| f.name.end_with?(".txt", ".in") }
|
144
|
+
.reject { |f| f.size > 500_000 }
|
145
|
+
.map { |f| fetch_file_from_host(f.name) }
|
146
|
+
.select { |f| requirements_file?(f) }
|
147
|
+
.each { |f| @req_txt_and_in_files << f }
|
148
|
+
|
149
|
+
repo_contents
|
150
|
+
.select { |f| f.type == "dir" }
|
151
|
+
.each { |f| @req_txt_and_in_files += req_files_for_dir(f) }
|
152
|
+
|
153
|
+
@req_txt_and_in_files
|
154
|
+
end
|
155
|
+
|
156
|
+
def req_files_for_dir(requirements_dir)
|
157
|
+
dir = directory.gsub(%r{(^/|/$)}, "")
|
158
|
+
relative_reqs_dir =
|
159
|
+
requirements_dir.path.gsub(%r{^/?#{Regexp.escape(dir)}/?}, "")
|
160
|
+
|
161
|
+
repo_contents(dir: relative_reqs_dir)
|
162
|
+
.select { |f| f.type == "file" }
|
163
|
+
.select { |f| f.name.end_with?(".txt", ".in") }
|
164
|
+
.reject { |f| f.size > 500_000 }
|
165
|
+
.map { |f| fetch_file_from_host("#{relative_reqs_dir}/#{f.name}") }
|
166
|
+
.select { |f| requirements_file?(f) }
|
167
|
+
end
|
168
|
+
|
169
|
+
def child_requirement_txt_files
|
170
|
+
child_requirement_files.select { |f| f.name.end_with?(".txt") }
|
171
|
+
end
|
172
|
+
|
173
|
+
def child_requirement_in_files
|
174
|
+
child_requirement_files.select { |f| f.name.end_with?(".in") }
|
175
|
+
end
|
176
|
+
|
177
|
+
def child_requirement_files
|
178
|
+
@child_requirement_files ||=
|
179
|
+
begin
|
180
|
+
fetched_files = req_txt_and_in_files.dup
|
181
|
+
req_txt_and_in_files.flat_map do |requirement_file|
|
182
|
+
child_files = fetch_child_requirement_files(
|
183
|
+
file: requirement_file,
|
184
|
+
previously_fetched_files: fetched_files
|
185
|
+
)
|
186
|
+
|
187
|
+
fetched_files += child_files
|
188
|
+
child_files
|
189
|
+
end
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
def fetch_child_requirement_files(file:, previously_fetched_files:)
|
194
|
+
paths = file.content.scan(CHILD_REQUIREMENT_REGEX).flatten
|
195
|
+
current_dir = File.dirname(file.name)
|
196
|
+
|
197
|
+
paths.flat_map do |path|
|
198
|
+
path = File.join(current_dir, path) unless current_dir == "."
|
199
|
+
path = cleanpath(path)
|
200
|
+
|
201
|
+
next if previously_fetched_files.map(&:name).include?(path)
|
202
|
+
next if file.name == path
|
203
|
+
|
204
|
+
fetched_file = fetch_file_from_host(path)
|
205
|
+
grandchild_requirement_files = fetch_child_requirement_files(
|
206
|
+
file: fetched_file,
|
207
|
+
previously_fetched_files: previously_fetched_files + [file]
|
208
|
+
)
|
209
|
+
[fetched_file, *grandchild_requirement_files]
|
210
|
+
end.compact
|
211
|
+
end
|
212
|
+
|
213
|
+
def constraints_files
|
214
|
+
all_requirement_files = requirements_txt_files +
|
215
|
+
child_requirement_txt_files
|
216
|
+
|
217
|
+
constraints_paths = all_requirement_files.map do |req_file|
|
218
|
+
current_dir = File.dirname(req_file.name)
|
219
|
+
paths = req_file.content.scan(CONSTRAINT_REGEX).flatten
|
220
|
+
|
221
|
+
paths.map do |path|
|
222
|
+
path = File.join(current_dir, path) unless current_dir == "."
|
223
|
+
cleanpath(path)
|
224
|
+
end
|
225
|
+
end.flatten.uniq
|
226
|
+
|
227
|
+
constraints_paths.map { |path| fetch_file_from_host(path) }
|
228
|
+
end
|
229
|
+
|
230
|
+
def project_files
|
231
|
+
project_files = T.let([], T::Array[Dependabot::DependencyFile])
|
232
|
+
unfetchable_deps = []
|
233
|
+
|
234
|
+
path_dependencies.each do |dep|
|
235
|
+
path = dep[:path]
|
236
|
+
project_files += fetch_project_file(path)
|
237
|
+
rescue Dependabot::DependencyFileNotFound
|
238
|
+
unfetchable_deps << "\"#{dep[:name]}\" at #{cleanpath(File.join(directory, dep[:file]))}"
|
239
|
+
end
|
240
|
+
|
241
|
+
raise Dependabot::PathDependenciesNotReachable, unfetchable_deps if unfetchable_deps.any?
|
242
|
+
|
243
|
+
project_files
|
244
|
+
end
|
245
|
+
|
246
|
+
def fetch_project_file(path)
|
247
|
+
project_files = []
|
248
|
+
|
249
|
+
path = cleanpath(File.join(path, "pyproject.toml")) unless sdist_or_wheel?(path)
|
250
|
+
|
251
|
+
return [] if path == "pyproject.toml" && pyproject
|
252
|
+
|
253
|
+
project_files << fetch_file_from_host(
|
254
|
+
path,
|
255
|
+
fetch_submodules: true
|
256
|
+
).tap { |f| f.support_file = true }
|
257
|
+
|
258
|
+
project_files
|
259
|
+
end
|
260
|
+
|
261
|
+
def sdist_or_wheel?(path)
|
262
|
+
path.end_with?(".tar.gz", ".whl", ".zip")
|
263
|
+
end
|
264
|
+
|
265
|
+
def requirements_file?(file)
|
266
|
+
return false unless file.content.valid_encoding?
|
267
|
+
return true if file.name.match?(/requirements/x)
|
268
|
+
|
269
|
+
file.content.lines.all? do |line|
|
270
|
+
next true if line.strip.empty?
|
271
|
+
next true if line.strip.start_with?("#", "-r ", "-c ", "-e ", "--")
|
272
|
+
|
273
|
+
line.match?(RequirementParser::VALID_REQ_TXT_REQUIREMENT)
|
274
|
+
end
|
275
|
+
end
|
276
|
+
|
277
|
+
def path_dependencies
|
278
|
+
[
|
279
|
+
*requirement_txt_path_dependencies,
|
280
|
+
*requirement_in_path_dependencies
|
281
|
+
]
|
282
|
+
end
|
283
|
+
|
284
|
+
def requirement_txt_path_dependencies
|
285
|
+
(requirements_txt_files + child_requirement_txt_files)
|
286
|
+
.map { |req_file| parse_requirement_path_dependencies(req_file) }
|
287
|
+
.flatten.uniq { |dep| dep[:path] }
|
288
|
+
end
|
289
|
+
|
290
|
+
def requirement_in_path_dependencies
|
291
|
+
requirements_in_files
|
292
|
+
.map { |req_file| parse_requirement_path_dependencies(req_file) }
|
293
|
+
.flatten.uniq { |dep| dep[:path] }
|
294
|
+
end
|
295
|
+
|
296
|
+
def parse_requirement_path_dependencies(req_file)
|
297
|
+
# If this is a pip-compile lockfile, rely on whatever path dependencies we found in the main manifest
|
298
|
+
return [] if pip_compile_file_matcher.lockfile_for_pip_compile_file?(req_file)
|
299
|
+
|
300
|
+
uneditable_reqs =
|
301
|
+
req_file.content
|
302
|
+
.scan(/(?<name>^['"]?(?:file:)?(?<path>\..*?)(?=\[|#|'|"|$))/)
|
303
|
+
.filter_map do |n, p|
|
304
|
+
{ name: n.strip, path: p.strip, file: req_file.name } unless p.include?("://")
|
305
|
+
end
|
306
|
+
|
307
|
+
editable_reqs =
|
308
|
+
req_file.content
|
309
|
+
.scan(/(?<name>^(?:-e)\s+['"]?(?:file:)?(?<path>.*?)(?=\[|#|'|"|$))/)
|
310
|
+
.filter_map do |n, p|
|
311
|
+
{ name: n.strip, path: p.strip, file: req_file.name } unless p.include?("://") || p.include?("git@")
|
312
|
+
end
|
313
|
+
|
314
|
+
uneditable_reqs + editable_reqs
|
315
|
+
end
|
316
|
+
|
317
|
+
def cleanpath(path)
|
318
|
+
Pathname.new(path).cleanpath.to_path
|
319
|
+
end
|
320
|
+
|
321
|
+
def pip_compile_file_matcher
|
322
|
+
@pip_compile_file_matcher ||= PipCompileFileMatcher.new(requirements_in_files)
|
323
|
+
end
|
324
|
+
end
|
325
|
+
end
|
326
|
+
end
|
327
|
+
|
328
|
+
Dependabot::FileFetchers.register("uv", Dependabot::Uv::FileFetcher)
|
@@ -0,0 +1,192 @@
|
|
1
|
+
# typed: strict
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require "toml-rb"
|
5
|
+
|
6
|
+
require "dependabot/dependency"
|
7
|
+
require "dependabot/file_parsers/base/dependency_set"
|
8
|
+
require "dependabot/uv/file_parser"
|
9
|
+
require "dependabot/errors"
|
10
|
+
require "dependabot/uv/name_normaliser"
|
11
|
+
|
12
|
+
module Dependabot
|
13
|
+
module Uv
|
14
|
+
class FileParser
|
15
|
+
class PipfileFilesParser
|
16
|
+
extend T::Sig
|
17
|
+
DEPENDENCY_GROUP_KEYS = T.let([
|
18
|
+
{
|
19
|
+
pipfile: "packages",
|
20
|
+
lockfile: "default"
|
21
|
+
},
|
22
|
+
{
|
23
|
+
pipfile: "dev-packages",
|
24
|
+
lockfile: "develop"
|
25
|
+
}
|
26
|
+
].freeze, T::Array[T::Hash[Symbol, String]])
|
27
|
+
|
28
|
+
sig { params(dependency_files: T::Array[Dependabot::DependencyFile]).void }
|
29
|
+
def initialize(dependency_files:)
|
30
|
+
@dependency_files = dependency_files
|
31
|
+
end
|
32
|
+
|
33
|
+
sig { returns(Dependabot::FileParsers::Base::DependencySet) }
|
34
|
+
def dependency_set
|
35
|
+
dependency_set = Dependabot::FileParsers::Base::DependencySet.new
|
36
|
+
|
37
|
+
dependency_set += pipfile_dependencies
|
38
|
+
dependency_set += pipfile_lock_dependencies
|
39
|
+
|
40
|
+
dependency_set
|
41
|
+
end
|
42
|
+
|
43
|
+
private
|
44
|
+
|
45
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
46
|
+
attr_reader :dependency_files
|
47
|
+
|
48
|
+
sig { returns(Dependabot::FileParsers::Base::DependencySet) }
|
49
|
+
def pipfile_dependencies
|
50
|
+
dependencies = Dependabot::FileParsers::Base::DependencySet.new
|
51
|
+
|
52
|
+
DEPENDENCY_GROUP_KEYS.each do |keys|
|
53
|
+
next unless parsed_pipfile[T.must(keys[:pipfile])]
|
54
|
+
|
55
|
+
parsed_pipfile[T.must(keys[:pipfile])].map do |dep_name, req|
|
56
|
+
group = keys[:lockfile]
|
57
|
+
next unless specifies_version?(req)
|
58
|
+
next if git_or_path_requirement?(req)
|
59
|
+
next if pipfile_lock && !dependency_version(dep_name, req, T.must(group))
|
60
|
+
|
61
|
+
# Empty requirements are not allowed in Dependabot::Dependency and
|
62
|
+
# equivalent to "*" (latest available version)
|
63
|
+
req = "*" if req == ""
|
64
|
+
|
65
|
+
dependencies <<
|
66
|
+
Dependency.new(
|
67
|
+
name: normalised_name(dep_name),
|
68
|
+
version: dependency_version(dep_name, req, T.must(group)),
|
69
|
+
requirements: [{
|
70
|
+
requirement: req.is_a?(String) ? req : req["version"],
|
71
|
+
file: T.must(pipfile).name,
|
72
|
+
source: nil,
|
73
|
+
groups: [group]
|
74
|
+
}],
|
75
|
+
package_manager: "uv",
|
76
|
+
metadata: { original_name: dep_name }
|
77
|
+
)
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
dependencies
|
82
|
+
end
|
83
|
+
|
84
|
+
# Create a DependencySet where each element has no requirement. Any
|
85
|
+
# requirements will be added when combining the DependencySet with
|
86
|
+
# other DependencySets.
|
87
|
+
sig { returns(Dependabot::FileParsers::Base::DependencySet) }
|
88
|
+
def pipfile_lock_dependencies
|
89
|
+
dependencies = Dependabot::FileParsers::Base::DependencySet.new
|
90
|
+
return dependencies unless pipfile_lock
|
91
|
+
|
92
|
+
DEPENDENCY_GROUP_KEYS.map { |h| h.fetch(:lockfile) }.each do |key|
|
93
|
+
next unless parsed_pipfile_lock[key]
|
94
|
+
|
95
|
+
parsed_pipfile_lock[key].each do |dep_name, details|
|
96
|
+
version = case details
|
97
|
+
when String then details
|
98
|
+
when Hash then details["version"]
|
99
|
+
end
|
100
|
+
next unless version
|
101
|
+
next if git_or_path_requirement?(details)
|
102
|
+
|
103
|
+
dependencies <<
|
104
|
+
Dependency.new(
|
105
|
+
name: dep_name,
|
106
|
+
version: version&.gsub(/^===?/, ""),
|
107
|
+
requirements: [],
|
108
|
+
package_manager: "uv",
|
109
|
+
subdependency_metadata: [{ production: key != "develop" }]
|
110
|
+
)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
dependencies
|
115
|
+
end
|
116
|
+
|
117
|
+
sig do
|
118
|
+
params(dep_name: String, requirement: T.any(String, T::Hash[String, T.untyped]),
|
119
|
+
group: String).returns(T.nilable(String))
|
120
|
+
end
|
121
|
+
def dependency_version(dep_name, requirement, group)
|
122
|
+
req = version_from_hash_or_string(requirement)
|
123
|
+
|
124
|
+
if pipfile_lock
|
125
|
+
details = parsed_pipfile_lock
|
126
|
+
.dig(group, normalised_name(dep_name))
|
127
|
+
|
128
|
+
version = version_from_hash_or_string(details)
|
129
|
+
version&.gsub(/^===?/, "")
|
130
|
+
elsif T.must(req).start_with?("==") && !T.must(req).include?("*")
|
131
|
+
T.must(req).strip.gsub(/^===?/, "")
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
sig do
|
136
|
+
params(obj: T.any(String, NilClass, T::Array[String], T::Hash[String, T.untyped])).returns(T.nilable(String))
|
137
|
+
end
|
138
|
+
def version_from_hash_or_string(obj)
|
139
|
+
case obj
|
140
|
+
when String then obj.strip
|
141
|
+
when Hash then obj["version"]
|
142
|
+
end
|
143
|
+
end
|
144
|
+
|
145
|
+
sig { params(req: T.any(String, T::Hash[String, T.untyped])).returns(T.any(T::Boolean, NilClass, String)) }
|
146
|
+
def specifies_version?(req)
|
147
|
+
return true if req.is_a?(String)
|
148
|
+
|
149
|
+
req["version"]
|
150
|
+
end
|
151
|
+
|
152
|
+
sig { params(req: T.any(String, T::Hash[String, T.untyped])).returns(T::Boolean) }
|
153
|
+
def git_or_path_requirement?(req)
|
154
|
+
return false unless req.is_a?(Hash)
|
155
|
+
|
156
|
+
%w(git path).any? { |k| req.key?(k) }
|
157
|
+
end
|
158
|
+
|
159
|
+
sig { params(name: String, extras: T::Array[String]).returns(String) }
|
160
|
+
def normalised_name(name, extras = [])
|
161
|
+
NameNormaliser.normalise_including_extras(name, extras)
|
162
|
+
end
|
163
|
+
|
164
|
+
sig { returns(T::Hash[String, T.untyped]) }
|
165
|
+
def parsed_pipfile
|
166
|
+
@parsed_pipfile ||= T.let(TomlRB.parse(T.must(pipfile).content), T.nilable(T::Hash[String, T.untyped]))
|
167
|
+
rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
|
168
|
+
raise Dependabot::DependencyFileNotParseable, T.must(pipfile).path
|
169
|
+
end
|
170
|
+
|
171
|
+
sig { returns(T::Hash[String, T.untyped]) }
|
172
|
+
def parsed_pipfile_lock
|
173
|
+
@parsed_pipfile_lock ||= T.let(JSON.parse(T.must(T.must(pipfile_lock).content)),
|
174
|
+
T.nilable(T::Hash[String, T.untyped]))
|
175
|
+
rescue JSON::ParserError
|
176
|
+
raise Dependabot::DependencyFileNotParseable, T.must(pipfile_lock).path
|
177
|
+
end
|
178
|
+
|
179
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
180
|
+
def pipfile
|
181
|
+
@pipfile ||= T.let(dependency_files.find { |f| f.name == "Pipfile" }, T.nilable(Dependabot::DependencyFile))
|
182
|
+
end
|
183
|
+
|
184
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
185
|
+
def pipfile_lock
|
186
|
+
@pipfile_lock ||= T.let(dependency_files.find { |f| f.name == "Pipfile.lock" },
|
187
|
+
T.nilable(Dependabot::DependencyFile))
|
188
|
+
end
|
189
|
+
end
|
190
|
+
end
|
191
|
+
end
|
192
|
+
end
|