dependabot-uv 0.299.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/helpers/build +34 -0
- data/helpers/lib/__init__.py +0 -0
- data/helpers/lib/hasher.py +36 -0
- data/helpers/lib/parser.py +270 -0
- data/helpers/requirements.txt +13 -0
- data/helpers/run.py +22 -0
- data/lib/dependabot/uv/authed_url_builder.rb +31 -0
- data/lib/dependabot/uv/file_fetcher.rb +328 -0
- data/lib/dependabot/uv/file_parser/pipfile_files_parser.rb +192 -0
- data/lib/dependabot/uv/file_parser/pyproject_files_parser.rb +345 -0
- data/lib/dependabot/uv/file_parser/python_requirement_parser.rb +185 -0
- data/lib/dependabot/uv/file_parser/setup_file_parser.rb +193 -0
- data/lib/dependabot/uv/file_parser.rb +437 -0
- data/lib/dependabot/uv/file_updater/compile_file_updater.rb +576 -0
- data/lib/dependabot/uv/file_updater/pyproject_preparer.rb +124 -0
- data/lib/dependabot/uv/file_updater/requirement_file_updater.rb +73 -0
- data/lib/dependabot/uv/file_updater/requirement_replacer.rb +214 -0
- data/lib/dependabot/uv/file_updater.rb +105 -0
- data/lib/dependabot/uv/language.rb +76 -0
- data/lib/dependabot/uv/language_version_manager.rb +114 -0
- data/lib/dependabot/uv/metadata_finder.rb +186 -0
- data/lib/dependabot/uv/name_normaliser.rb +26 -0
- data/lib/dependabot/uv/native_helpers.rb +38 -0
- data/lib/dependabot/uv/package_manager.rb +54 -0
- data/lib/dependabot/uv/pip_compile_file_matcher.rb +38 -0
- data/lib/dependabot/uv/pipenv_runner.rb +108 -0
- data/lib/dependabot/uv/requirement.rb +163 -0
- data/lib/dependabot/uv/requirement_parser.rb +60 -0
- data/lib/dependabot/uv/update_checker/index_finder.rb +227 -0
- data/lib/dependabot/uv/update_checker/latest_version_finder.rb +297 -0
- data/lib/dependabot/uv/update_checker/pip_compile_version_resolver.rb +506 -0
- data/lib/dependabot/uv/update_checker/pip_version_resolver.rb +73 -0
- data/lib/dependabot/uv/update_checker/requirements_updater.rb +391 -0
- data/lib/dependabot/uv/update_checker.rb +317 -0
- data/lib/dependabot/uv/version.rb +321 -0
- data/lib/dependabot/uv.rb +35 -0
- metadata +306 -0
@@ -0,0 +1,576 @@
|
|
1
|
+
# typed: true
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require "open3"
|
5
|
+
require "dependabot/dependency"
|
6
|
+
require "dependabot/uv/requirement_parser"
|
7
|
+
require "dependabot/uv/file_fetcher"
|
8
|
+
require "dependabot/uv/file_parser/python_requirement_parser"
|
9
|
+
require "dependabot/uv/file_updater"
|
10
|
+
require "dependabot/shared_helpers"
|
11
|
+
require "dependabot/uv/language_version_manager"
|
12
|
+
require "dependabot/uv/native_helpers"
|
13
|
+
require "dependabot/uv/name_normaliser"
|
14
|
+
require "dependabot/uv/authed_url_builder"
|
15
|
+
|
16
|
+
module Dependabot
|
17
|
+
module Uv
|
18
|
+
class FileUpdater
|
19
|
+
# rubocop:disable Metrics/ClassLength
|
20
|
+
class CompileFileUpdater
|
21
|
+
require_relative "requirement_replacer"
|
22
|
+
require_relative "requirement_file_updater"
|
23
|
+
|
24
|
+
UNSAFE_PACKAGES = %w(setuptools distribute pip).freeze
|
25
|
+
INCOMPATIBLE_VERSIONS_REGEX = /There are incompatible versions in the resolved dependencies:.*\z/m
|
26
|
+
WARNINGS = /\s*# WARNING:.*\Z/m
|
27
|
+
UNSAFE_NOTE = /\s*# The following packages are considered to be unsafe.*\Z/m
|
28
|
+
RESOLVER_REGEX = /(?<=--resolver=)(\w+)/
|
29
|
+
NATIVE_COMPILATION_ERROR =
|
30
|
+
"pip._internal.exceptions.InstallationSubprocessError: Getting requirements to build wheel exited with 1"
|
31
|
+
|
32
|
+
attr_reader :dependencies
|
33
|
+
attr_reader :dependency_files
|
34
|
+
attr_reader :credentials
|
35
|
+
|
36
|
+
def initialize(dependencies:, dependency_files:, credentials:, index_urls: nil)
|
37
|
+
@dependencies = dependencies
|
38
|
+
@dependency_files = dependency_files
|
39
|
+
@credentials = credentials
|
40
|
+
@index_urls = index_urls
|
41
|
+
@build_isolation = true
|
42
|
+
end
|
43
|
+
|
44
|
+
def updated_dependency_files
|
45
|
+
@updated_dependency_files ||= fetch_updated_dependency_files
|
46
|
+
end
|
47
|
+
|
48
|
+
private
|
49
|
+
|
50
|
+
def dependency
|
51
|
+
# For now, we'll only ever be updating a single dependency
|
52
|
+
dependencies.first
|
53
|
+
end
|
54
|
+
|
55
|
+
def fetch_updated_dependency_files
|
56
|
+
updated_compiled_files = compile_new_requirement_files
|
57
|
+
updated_manifest_files = update_manifest_files
|
58
|
+
|
59
|
+
updated_files = updated_compiled_files + updated_manifest_files
|
60
|
+
updated_uncompiled_files = update_uncompiled_files(updated_files)
|
61
|
+
|
62
|
+
[
|
63
|
+
*updated_manifest_files,
|
64
|
+
*updated_compiled_files,
|
65
|
+
*updated_uncompiled_files
|
66
|
+
]
|
67
|
+
end
|
68
|
+
|
69
|
+
def compile_new_requirement_files
|
70
|
+
SharedHelpers.in_a_temporary_directory do
|
71
|
+
write_updated_dependency_files
|
72
|
+
language_version_manager.install_required_python
|
73
|
+
|
74
|
+
filenames_to_compile.each do |filename|
|
75
|
+
compile_file(filename)
|
76
|
+
end
|
77
|
+
|
78
|
+
# Remove any .python-version file before parsing the reqs
|
79
|
+
FileUtils.remove_entry(".python-version", true)
|
80
|
+
|
81
|
+
dependency_files.filter_map do |file|
|
82
|
+
next unless file.name.end_with?(".txt")
|
83
|
+
|
84
|
+
updated_content = File.read(file.name)
|
85
|
+
|
86
|
+
updated_content =
|
87
|
+
post_process_compiled_file(updated_content, file)
|
88
|
+
next if updated_content == file.content
|
89
|
+
|
90
|
+
file.dup.tap { |f| f.content = updated_content }
|
91
|
+
end
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
def compile_file(filename)
|
96
|
+
# Shell out to pip-compile, generate a new set of requirements.
|
97
|
+
# This is slow, as pip-compile needs to do installs.
|
98
|
+
options = compile_options(filename)
|
99
|
+
options_fingerprint = compile_options_fingerprint(options)
|
100
|
+
|
101
|
+
name_part = "pyenv exec uv pip compile " \
|
102
|
+
"#{options} -P " \
|
103
|
+
"#{dependency.name}"
|
104
|
+
fingerprint_name_part = "pyenv exec uv pip compile " \
|
105
|
+
"#{options_fingerprint} -P " \
|
106
|
+
"<dependency_name>"
|
107
|
+
|
108
|
+
version_part = "#{dependency.version} #{filename}"
|
109
|
+
fingerprint_version_part = "<dependency_version> <filename>"
|
110
|
+
|
111
|
+
# Don't escape pyenv `dep-name==version` syntax
|
112
|
+
run_uv_compile_command(
|
113
|
+
"#{SharedHelpers.escape_command(name_part)}==" \
|
114
|
+
"#{SharedHelpers.escape_command(version_part)}",
|
115
|
+
allow_unsafe_shell_command: true,
|
116
|
+
fingerprint: "#{fingerprint_name_part}==#{fingerprint_version_part}"
|
117
|
+
)
|
118
|
+
rescue SharedHelpers::HelperSubprocessFailed => e
|
119
|
+
retry_count ||= 0
|
120
|
+
retry_count += 1
|
121
|
+
if compilation_error?(e) && retry_count <= 1
|
122
|
+
@build_isolation = false
|
123
|
+
retry
|
124
|
+
end
|
125
|
+
|
126
|
+
raise
|
127
|
+
end
|
128
|
+
|
129
|
+
def compilation_error?(error)
|
130
|
+
error.message.include?(NATIVE_COMPILATION_ERROR)
|
131
|
+
end
|
132
|
+
|
133
|
+
def update_manifest_files
|
134
|
+
dependency_files.filter_map do |file|
|
135
|
+
next unless file.name.end_with?(".in")
|
136
|
+
|
137
|
+
file = file.dup
|
138
|
+
updated_content = update_dependency_requirement(file)
|
139
|
+
next if updated_content == file.content
|
140
|
+
|
141
|
+
file.content = updated_content
|
142
|
+
file
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
def update_uncompiled_files(updated_files)
|
147
|
+
updated_filenames = updated_files.map(&:name)
|
148
|
+
old_reqs = dependency.previous_requirements
|
149
|
+
.reject { |r| updated_filenames.include?(r[:file]) }
|
150
|
+
new_reqs = dependency.requirements
|
151
|
+
.reject { |r| updated_filenames.include?(r[:file]) }
|
152
|
+
|
153
|
+
return [] if new_reqs.none?
|
154
|
+
|
155
|
+
files = dependency_files
|
156
|
+
.reject { |file| updated_filenames.include?(file.name) }
|
157
|
+
|
158
|
+
args = dependency.to_h
|
159
|
+
args = args.keys.to_h { |k| [k.to_sym, args[k]] }
|
160
|
+
args[:requirements] = new_reqs
|
161
|
+
args[:previous_requirements] = old_reqs
|
162
|
+
|
163
|
+
RequirementFileUpdater.new(
|
164
|
+
dependencies: [Dependency.new(**args)],
|
165
|
+
dependency_files: files,
|
166
|
+
credentials: credentials
|
167
|
+
).updated_dependency_files
|
168
|
+
end
|
169
|
+
|
170
|
+
def run_command(cmd, env: python_env, allow_unsafe_shell_command: false, fingerprint:)
|
171
|
+
SharedHelpers.run_shell_command(
|
172
|
+
cmd,
|
173
|
+
env: env,
|
174
|
+
allow_unsafe_shell_command: allow_unsafe_shell_command,
|
175
|
+
fingerprint: fingerprint,
|
176
|
+
stderr_to_stdout: true
|
177
|
+
)
|
178
|
+
rescue SharedHelpers::HelperSubprocessFailed => e
|
179
|
+
stdout = e.message
|
180
|
+
|
181
|
+
if stdout.match?(INCOMPATIBLE_VERSIONS_REGEX)
|
182
|
+
raise DependencyFileNotResolvable, stdout.match(INCOMPATIBLE_VERSIONS_REGEX)
|
183
|
+
end
|
184
|
+
|
185
|
+
raise
|
186
|
+
end
|
187
|
+
|
188
|
+
def run_uv_compile_command(command, allow_unsafe_shell_command: false, fingerprint:)
|
189
|
+
run_command(
|
190
|
+
"pyenv local #{language_version_manager.python_major_minor}",
|
191
|
+
fingerprint: "pyenv local <python_major_minor>"
|
192
|
+
)
|
193
|
+
|
194
|
+
run_command(
|
195
|
+
command,
|
196
|
+
allow_unsafe_shell_command: allow_unsafe_shell_command,
|
197
|
+
fingerprint: fingerprint
|
198
|
+
)
|
199
|
+
end
|
200
|
+
|
201
|
+
def python_env
|
202
|
+
env = {}
|
203
|
+
|
204
|
+
# Handle Apache Airflow 1.10.x installs
|
205
|
+
if dependency_files.any? { |f| f.content.include?("apache-airflow") }
|
206
|
+
if dependency_files.any? { |f| f.content.include?("unidecode") }
|
207
|
+
env["AIRFLOW_GPL_UNIDECODE"] = "yes"
|
208
|
+
else
|
209
|
+
env["SLUGIFY_USES_TEXT_UNIDECODE"] = "yes"
|
210
|
+
end
|
211
|
+
end
|
212
|
+
|
213
|
+
env
|
214
|
+
end
|
215
|
+
|
216
|
+
def write_updated_dependency_files
|
217
|
+
dependency_files.each do |file|
|
218
|
+
path = file.name
|
219
|
+
FileUtils.mkdir_p(Pathname.new(path).dirname)
|
220
|
+
File.write(path, freeze_dependency_requirement(file))
|
221
|
+
end
|
222
|
+
|
223
|
+
# Overwrite the .python-version with updated content
|
224
|
+
File.write(".python-version", language_version_manager.python_major_minor)
|
225
|
+
end
|
226
|
+
|
227
|
+
def freeze_dependency_requirement(file)
|
228
|
+
return file.content unless file.name.end_with?(".in")
|
229
|
+
|
230
|
+
old_req = dependency.previous_requirements
|
231
|
+
.find { |r| r[:file] == file.name }
|
232
|
+
|
233
|
+
return file.content unless old_req
|
234
|
+
return file.content if old_req == "==#{dependency.version}"
|
235
|
+
|
236
|
+
RequirementReplacer.new(
|
237
|
+
content: file.content,
|
238
|
+
dependency_name: dependency.name,
|
239
|
+
old_requirement: old_req[:requirement],
|
240
|
+
new_requirement: "==#{dependency.version}",
|
241
|
+
index_urls: @index_urls
|
242
|
+
).updated_content
|
243
|
+
end
|
244
|
+
|
245
|
+
def update_dependency_requirement(file)
|
246
|
+
return file.content unless file.name.end_with?(".in")
|
247
|
+
|
248
|
+
old_req = dependency.previous_requirements
|
249
|
+
.find { |r| r[:file] == file.name }
|
250
|
+
new_req = dependency.requirements
|
251
|
+
.find { |r| r[:file] == file.name }
|
252
|
+
return file.content unless old_req&.fetch(:requirement)
|
253
|
+
return file.content if old_req == new_req
|
254
|
+
|
255
|
+
RequirementReplacer.new(
|
256
|
+
content: file.content,
|
257
|
+
dependency_name: dependency.name,
|
258
|
+
old_requirement: old_req[:requirement],
|
259
|
+
new_requirement: new_req[:requirement],
|
260
|
+
index_urls: @index_urls
|
261
|
+
).updated_content
|
262
|
+
end
|
263
|
+
|
264
|
+
def post_process_compiled_file(updated_content, file)
|
265
|
+
content = replace_header_with_original(updated_content, file.content)
|
266
|
+
content = remove_new_warnings(content, file.content)
|
267
|
+
content = update_hashes_if_required(content, file.content)
|
268
|
+
replace_absolute_file_paths(content, file.content)
|
269
|
+
end
|
270
|
+
|
271
|
+
def replace_header_with_original(updated_content, original_content)
|
272
|
+
original_header_lines =
|
273
|
+
original_content.lines.take_while { |l| l.start_with?("#") }
|
274
|
+
|
275
|
+
updated_content_lines =
|
276
|
+
updated_content.lines.drop_while { |l| l.start_with?("#") }
|
277
|
+
|
278
|
+
[*original_header_lines, *updated_content_lines].join
|
279
|
+
end
|
280
|
+
|
281
|
+
def replace_absolute_file_paths(updated_content, original_content)
|
282
|
+
content = updated_content
|
283
|
+
|
284
|
+
update_count = 0
|
285
|
+
original_content.lines.each do |original_line|
|
286
|
+
next unless original_line.start_with?("-e")
|
287
|
+
next update_count += 1 if updated_content.include?(original_line)
|
288
|
+
|
289
|
+
line_to_update =
|
290
|
+
updated_content.lines
|
291
|
+
.select { |l| l.start_with?("-e") }
|
292
|
+
.at(update_count)
|
293
|
+
raise "Mismatch in editable requirements!" unless line_to_update
|
294
|
+
|
295
|
+
content = content.gsub(line_to_update, original_line)
|
296
|
+
update_count += 1
|
297
|
+
end
|
298
|
+
|
299
|
+
content
|
300
|
+
end
|
301
|
+
|
302
|
+
def remove_new_warnings(updated_content, original_content)
|
303
|
+
content = updated_content
|
304
|
+
|
305
|
+
content = content.sub(WARNINGS, "\n") if content.match?(WARNINGS) && !original_content.match?(WARNINGS)
|
306
|
+
|
307
|
+
if content.match?(UNSAFE_NOTE) &&
|
308
|
+
!original_content.match?(UNSAFE_NOTE)
|
309
|
+
content = content.sub(UNSAFE_NOTE, "\n")
|
310
|
+
end
|
311
|
+
|
312
|
+
content
|
313
|
+
end
|
314
|
+
|
315
|
+
def update_hashes_if_required(updated_content, original_content)
|
316
|
+
deps_to_update =
|
317
|
+
deps_to_augment_hashes_for(updated_content, original_content)
|
318
|
+
|
319
|
+
updated_content_with_hashes = updated_content
|
320
|
+
deps_to_update.each do |mtch|
|
321
|
+
updated_string = mtch.to_s.sub(
|
322
|
+
RequirementParser::HASHES,
|
323
|
+
package_hashes_for(
|
324
|
+
name: mtch.named_captures.fetch("name"),
|
325
|
+
version: mtch.named_captures.fetch("version"),
|
326
|
+
algorithm: mtch.named_captures.fetch("algorithm")
|
327
|
+
).sort.join(hash_separator(mtch.to_s))
|
328
|
+
)
|
329
|
+
|
330
|
+
updated_content_with_hashes = updated_content_with_hashes
|
331
|
+
.gsub(mtch.to_s, updated_string)
|
332
|
+
end
|
333
|
+
updated_content_with_hashes
|
334
|
+
end
|
335
|
+
|
336
|
+
def deps_to_augment_hashes_for(updated_content, original_content)
|
337
|
+
regex = /^#{RequirementParser::INSTALL_REQ_WITH_REQUIREMENT}/o
|
338
|
+
|
339
|
+
new_matches = []
|
340
|
+
updated_content.scan(regex) { new_matches << Regexp.last_match }
|
341
|
+
|
342
|
+
old_matches = []
|
343
|
+
original_content.scan(regex) { old_matches << Regexp.last_match }
|
344
|
+
|
345
|
+
new_deps = []
|
346
|
+
changed_hashes_deps = []
|
347
|
+
|
348
|
+
new_matches.each do |mtch|
|
349
|
+
nm = mtch.named_captures["name"]
|
350
|
+
old_match = old_matches.find { |m| m.named_captures["name"] == nm }
|
351
|
+
|
352
|
+
next new_deps << mtch unless old_match
|
353
|
+
next unless old_match.named_captures["hashes"]
|
354
|
+
|
355
|
+
old_count = old_match.named_captures["hashes"].split("--hash").count
|
356
|
+
new_count = mtch.named_captures["hashes"].split("--hash").count
|
357
|
+
changed_hashes_deps << mtch if new_count < old_count
|
358
|
+
end
|
359
|
+
|
360
|
+
return [] if changed_hashes_deps.none?
|
361
|
+
|
362
|
+
[*new_deps, *changed_hashes_deps]
|
363
|
+
end
|
364
|
+
|
365
|
+
def package_hashes_for(name:, version:, algorithm:)
|
366
|
+
index_urls = @index_urls || [nil]
|
367
|
+
hashes = []
|
368
|
+
|
369
|
+
index_urls.each do |index_url|
|
370
|
+
args = [name, version, algorithm]
|
371
|
+
args << index_url if index_url
|
372
|
+
|
373
|
+
begin
|
374
|
+
native_helper_hashes = T.cast(
|
375
|
+
SharedHelpers.run_helper_subprocess(
|
376
|
+
command: "pyenv exec python3 #{NativeHelpers.python_helper_path}",
|
377
|
+
function: "get_dependency_hash",
|
378
|
+
args: args
|
379
|
+
),
|
380
|
+
T::Array[T::Hash[String, String]]
|
381
|
+
).map { |h| "--hash=#{algorithm}:#{h['hash']}" }
|
382
|
+
|
383
|
+
hashes.concat(native_helper_hashes)
|
384
|
+
rescue SharedHelpers::HelperSubprocessFailed => e
|
385
|
+
raise unless e.error_class.include?("PackageNotFoundError")
|
386
|
+
|
387
|
+
next
|
388
|
+
end
|
389
|
+
end
|
390
|
+
|
391
|
+
hashes
|
392
|
+
end
|
393
|
+
|
394
|
+
def hash_separator(requirement_string)
|
395
|
+
hash_regex = RequirementParser::HASH
|
396
|
+
return unless requirement_string.match?(hash_regex)
|
397
|
+
|
398
|
+
current_separator =
|
399
|
+
requirement_string
|
400
|
+
.match(/#{hash_regex}((?<separator>\s*\\?\s*?)#{hash_regex})*/)
|
401
|
+
.named_captures.fetch("separator")
|
402
|
+
|
403
|
+
default_separator =
|
404
|
+
requirement_string
|
405
|
+
.match(RequirementParser::HASH)
|
406
|
+
.pre_match.match(/(?<separator>\s*\\?\s*?)\z/)
|
407
|
+
.named_captures.fetch("separator")
|
408
|
+
|
409
|
+
current_separator || default_separator
|
410
|
+
end
|
411
|
+
|
412
|
+
def compile_options_fingerprint(options)
|
413
|
+
options.sub(
|
414
|
+
/--output-file=\S+/, "--output-file=<output_file>"
|
415
|
+
).sub(
|
416
|
+
/--index-url=\S+/, "--index-url=<index_url>"
|
417
|
+
).sub(
|
418
|
+
/--extra-index-url=\S+/, "--extra-index-url=<extra_index_url>"
|
419
|
+
)
|
420
|
+
end
|
421
|
+
|
422
|
+
def compile_options(filename)
|
423
|
+
options = @build_isolation ? ["--build-isolation"] : ["--no-build-isolation"]
|
424
|
+
options += compile_index_options
|
425
|
+
|
426
|
+
if (requirements_file = compiled_file_for_filename(filename))
|
427
|
+
options += uv_compile_options_from_compiled_file(requirements_file)
|
428
|
+
end
|
429
|
+
|
430
|
+
options.join(" ")
|
431
|
+
end
|
432
|
+
|
433
|
+
def uv_compile_options_from_compiled_file(requirements_file)
|
434
|
+
options = ["--output-file=#{requirements_file.name}"]
|
435
|
+
options << "--emit-index-url" if requirements_file.content.include?("index-url http")
|
436
|
+
options << "--generate-hashes" if requirements_file.content.include?("--hash=sha")
|
437
|
+
options << "--no-annotate" unless requirements_file.content.include?("# via ")
|
438
|
+
options << "--pre" if requirements_file.content.include?("--pre")
|
439
|
+
options << "--no-strip-extras" if requirements_file.content.include?("--no-strip-extras")
|
440
|
+
|
441
|
+
if requirements_file.content.include?("--no-binary") || requirements_file.content.include?("--only-binary")
|
442
|
+
options << "--emit-build-options"
|
443
|
+
end
|
444
|
+
|
445
|
+
options << "--universal" if requirements_file.content.include?("--universal")
|
446
|
+
|
447
|
+
options
|
448
|
+
end
|
449
|
+
|
450
|
+
def compile_index_options
|
451
|
+
credentials
|
452
|
+
.select { |cred| cred["type"] == "python_index" }
|
453
|
+
.map do |cred|
|
454
|
+
authed_url = AuthedUrlBuilder.authed_url(credential: cred)
|
455
|
+
|
456
|
+
if cred.replaces_base?
|
457
|
+
"--index-url=#{authed_url}"
|
458
|
+
else
|
459
|
+
"--extra-index-url=#{authed_url}"
|
460
|
+
end
|
461
|
+
end
|
462
|
+
end
|
463
|
+
|
464
|
+
def includes_unsafe_packages?(content)
|
465
|
+
UNSAFE_PACKAGES.any? { |n| content.match?(/^#{Regexp.quote(n)}==/) }
|
466
|
+
end
|
467
|
+
|
468
|
+
def filenames_to_compile
|
469
|
+
files_from_reqs =
|
470
|
+
dependency.requirements
|
471
|
+
.map { |r| r[:file] }
|
472
|
+
.select { |fn| fn.end_with?(".in") }
|
473
|
+
|
474
|
+
files_from_compiled_files =
|
475
|
+
compile_files.map(&:name).select do |fn|
|
476
|
+
compiled_file = compiled_file_for_filename(fn)
|
477
|
+
compiled_file_includes_dependency?(compiled_file)
|
478
|
+
end
|
479
|
+
|
480
|
+
filenames = [*files_from_reqs, *files_from_compiled_files].uniq
|
481
|
+
|
482
|
+
order_filenames_for_compilation(filenames)
|
483
|
+
end
|
484
|
+
|
485
|
+
def compiled_file_for_filename(filename)
|
486
|
+
compiled_file =
|
487
|
+
compiled_files
|
488
|
+
.find { |f| f.content.match?(output_file_regex(filename)) }
|
489
|
+
|
490
|
+
compiled_file ||=
|
491
|
+
compiled_files
|
492
|
+
.find { |f| f.name == filename.gsub(/\.in$/, ".txt") }
|
493
|
+
|
494
|
+
compiled_file
|
495
|
+
end
|
496
|
+
|
497
|
+
def output_file_regex(filename)
|
498
|
+
"--output-file[=\s]+.*\s#{Regexp.escape(filename)}\s*$"
|
499
|
+
end
|
500
|
+
|
501
|
+
def compiled_file_includes_dependency?(compiled_file)
|
502
|
+
return false unless compiled_file
|
503
|
+
|
504
|
+
regex = RequirementParser::INSTALL_REQ_WITH_REQUIREMENT
|
505
|
+
|
506
|
+
matches = []
|
507
|
+
compiled_file.content.scan(regex) { matches << Regexp.last_match }
|
508
|
+
matches.any? { |m| normalise(m[:name]) == dependency.name }
|
509
|
+
end
|
510
|
+
|
511
|
+
def normalise(name)
|
512
|
+
NameNormaliser.normalise(name)
|
513
|
+
end
|
514
|
+
|
515
|
+
# If the files we need to update require one another then we need to
|
516
|
+
# update them in the right order
|
517
|
+
def order_filenames_for_compilation(filenames)
|
518
|
+
ordered_filenames = T.let([], T::Array[String])
|
519
|
+
|
520
|
+
while (remaining_filenames = filenames - ordered_filenames).any?
|
521
|
+
ordered_filenames +=
|
522
|
+
remaining_filenames
|
523
|
+
.reject do |fn|
|
524
|
+
unupdated_reqs = requirement_map[fn] - ordered_filenames
|
525
|
+
unupdated_reqs.intersect?(filenames)
|
526
|
+
end
|
527
|
+
end
|
528
|
+
|
529
|
+
ordered_filenames
|
530
|
+
end
|
531
|
+
|
532
|
+
def requirement_map
|
533
|
+
child_req_regex = Uv::FileFetcher::CHILD_REQUIREMENT_REGEX
|
534
|
+
@requirement_map ||=
|
535
|
+
compile_files.each_with_object({}) do |file, req_map|
|
536
|
+
paths = file.content.scan(child_req_regex).flatten
|
537
|
+
current_dir = File.dirname(file.name)
|
538
|
+
|
539
|
+
req_map[file.name] =
|
540
|
+
paths.map do |path|
|
541
|
+
path = File.join(current_dir, path) if current_dir != "."
|
542
|
+
path = Pathname.new(path).cleanpath.to_path
|
543
|
+
path = path.gsub(/\.txt$/, ".in")
|
544
|
+
next if path == file.name
|
545
|
+
|
546
|
+
path
|
547
|
+
end.uniq.compact
|
548
|
+
end
|
549
|
+
end
|
550
|
+
|
551
|
+
def python_requirement_parser
|
552
|
+
@python_requirement_parser ||=
|
553
|
+
FileParser::PythonRequirementParser.new(
|
554
|
+
dependency_files: dependency_files
|
555
|
+
)
|
556
|
+
end
|
557
|
+
|
558
|
+
def language_version_manager
|
559
|
+
@language_version_manager ||=
|
560
|
+
LanguageVersionManager.new(
|
561
|
+
python_requirement_parser: python_requirement_parser
|
562
|
+
)
|
563
|
+
end
|
564
|
+
|
565
|
+
def compile_files
|
566
|
+
dependency_files.select { |f| f.name.end_with?(".in") }
|
567
|
+
end
|
568
|
+
|
569
|
+
def compiled_files
|
570
|
+
dependency_files.select { |f| f.name.end_with?(".txt") }
|
571
|
+
end
|
572
|
+
end
|
573
|
+
# rubocop:enable Metrics/ClassLength
|
574
|
+
end
|
575
|
+
end
|
576
|
+
end
|
@@ -0,0 +1,124 @@
|
|
1
|
+
# typed: true
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
require "toml-rb"
|
5
|
+
|
6
|
+
require "dependabot/dependency"
|
7
|
+
require "dependabot/uv/file_parser"
|
8
|
+
require "dependabot/uv/file_updater"
|
9
|
+
require "dependabot/uv/authed_url_builder"
|
10
|
+
require "dependabot/uv/name_normaliser"
|
11
|
+
require "securerandom"
|
12
|
+
|
13
|
+
module Dependabot
|
14
|
+
module Uv
|
15
|
+
class FileUpdater
|
16
|
+
class PyprojectPreparer
|
17
|
+
def initialize(pyproject_content:, lockfile: nil)
|
18
|
+
@pyproject_content = pyproject_content
|
19
|
+
@lockfile = lockfile
|
20
|
+
end
|
21
|
+
|
22
|
+
# For hosted Dependabot token will be nil since the credentials aren't present.
|
23
|
+
# This is for those running Dependabot themselves and for dry-run.
|
24
|
+
def add_auth_env_vars(credentials)
|
25
|
+
TomlRB.parse(@pyproject_content).dig("tool", "poetry", "source")&.each do |source|
|
26
|
+
cred = credentials&.find { |c| c["index-url"] == source["url"] }
|
27
|
+
next unless cred
|
28
|
+
|
29
|
+
token = cred.fetch("token", nil)
|
30
|
+
next unless token && token.count(":") == 1
|
31
|
+
|
32
|
+
arr = token.split(":")
|
33
|
+
# https://python-poetry.org/docs/configuration/#using-environment-variables
|
34
|
+
name = source["name"]&.upcase&.gsub(/\W/, "_")
|
35
|
+
ENV["POETRY_HTTP_BASIC_#{name}_USERNAME"] = arr[0]
|
36
|
+
ENV["POETRY_HTTP_BASIC_#{name}_PASSWORD"] = arr[1]
|
37
|
+
end
|
38
|
+
end
|
39
|
+
|
40
|
+
def update_python_requirement(requirement)
|
41
|
+
pyproject_object = TomlRB.parse(@pyproject_content)
|
42
|
+
if (python_specification = pyproject_object.dig("tool", "poetry", "dependencies", "python"))
|
43
|
+
python_req = Uv::Requirement.new(python_specification)
|
44
|
+
unless python_req.satisfied_by?(requirement)
|
45
|
+
pyproject_object["tool"]["poetry"]["dependencies"]["python"] = "~#{requirement}"
|
46
|
+
end
|
47
|
+
end
|
48
|
+
TomlRB.dump(pyproject_object)
|
49
|
+
end
|
50
|
+
|
51
|
+
def sanitize
|
52
|
+
# {{ name }} syntax not allowed
|
53
|
+
pyproject_content
|
54
|
+
.gsub(/\{\{.*?\}\}/, "something")
|
55
|
+
.gsub('#{', "{")
|
56
|
+
end
|
57
|
+
|
58
|
+
# rubocop:disable Metrics/PerceivedComplexity
|
59
|
+
# rubocop:disable Metrics/AbcSize
|
60
|
+
def freeze_top_level_dependencies_except(dependencies)
|
61
|
+
return pyproject_content unless lockfile
|
62
|
+
|
63
|
+
pyproject_object = TomlRB.parse(pyproject_content)
|
64
|
+
poetry_object = pyproject_object["tool"]["poetry"]
|
65
|
+
excluded_names = dependencies.map(&:name) + ["python"]
|
66
|
+
|
67
|
+
Dependabot::Uv::FileParser::PyprojectFilesParser::POETRY_DEPENDENCY_TYPES.each do |key|
|
68
|
+
next unless poetry_object[key]
|
69
|
+
|
70
|
+
source_types = %w(directory file url)
|
71
|
+
poetry_object.fetch(key).each do |dep_name, _|
|
72
|
+
next if excluded_names.include?(normalise(dep_name))
|
73
|
+
|
74
|
+
locked_details = locked_details(dep_name)
|
75
|
+
|
76
|
+
next unless (locked_version = locked_details&.fetch("version"))
|
77
|
+
|
78
|
+
next if source_types.include?(locked_details&.dig("source", "type"))
|
79
|
+
|
80
|
+
if locked_details&.dig("source", "type") == "git"
|
81
|
+
poetry_object[key][dep_name] = {
|
82
|
+
"git" => locked_details&.dig("source", "url"),
|
83
|
+
"rev" => locked_details&.dig("source", "reference")
|
84
|
+
}
|
85
|
+
subdirectory = locked_details&.dig("source", "subdirectory")
|
86
|
+
poetry_object[key][dep_name]["subdirectory"] = subdirectory if subdirectory
|
87
|
+
elsif poetry_object[key][dep_name].is_a?(Hash)
|
88
|
+
poetry_object[key][dep_name]["version"] = locked_version
|
89
|
+
elsif poetry_object[key][dep_name].is_a?(Array)
|
90
|
+
# if it has multiple-constraints, locking to a single version is
|
91
|
+
# going to result in a bad lockfile, ignore
|
92
|
+
next
|
93
|
+
else
|
94
|
+
poetry_object[key][dep_name] = locked_version
|
95
|
+
end
|
96
|
+
end
|
97
|
+
end
|
98
|
+
|
99
|
+
TomlRB.dump(pyproject_object)
|
100
|
+
end
|
101
|
+
# rubocop:enable Metrics/AbcSize
|
102
|
+
# rubocop:enable Metrics/PerceivedComplexity
|
103
|
+
|
104
|
+
private
|
105
|
+
|
106
|
+
attr_reader :pyproject_content
|
107
|
+
attr_reader :lockfile
|
108
|
+
|
109
|
+
def locked_details(dep_name)
|
110
|
+
parsed_lockfile.fetch("package")
|
111
|
+
.find { |d| d["name"] == normalise(dep_name) }
|
112
|
+
end
|
113
|
+
|
114
|
+
def normalise(name)
|
115
|
+
NameNormaliser.normalise(name)
|
116
|
+
end
|
117
|
+
|
118
|
+
def parsed_lockfile
|
119
|
+
@parsed_lockfile ||= TomlRB.parse(lockfile.content)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|