dependabot-uv 0.355.0 → 0.356.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 181d67bcde74a88513746bbda787661b2209f8846e3cb2be4e25dc103df5d6ed
4
- data.tar.gz: f19a7c8dbc8208cfdba09e5f5513b04895b6ff4eb32c5cba0690bb21e2a2db6d
3
+ metadata.gz: d86ed84b5d9b47a421b7274955257417ee58559cbb67eaa40577daf3860a6207
4
+ data.tar.gz: caaf52d0e3e41c6069068f4ff57dec63bc0875fecbef986c584dc91eb9d4305c
5
5
  SHA512:
6
- metadata.gz: 283cc516dddfee23f781f4ba93cc92d16eeac587f1f018ea13b53c04bc63c47c680dd9cbf4ffeab96b7edfe23ef0d560b355b3b2b8f52b4f0765e02d27d77c23
7
- data.tar.gz: a94975fe714eae7eadac990e4ce9c8d996ac4478b0016557245fb5fc2e5745591832f2098d1ca0383bcb7de025cf0e85b047aeefed2baaa8b602e90ac4d0431f
6
+ metadata.gz: bb666f33ce0bb0f7540ca5d071043e6540acae1bd95cb70d3a153ec3c573ce88535e82d352aedd90b2da1a71169fe04cd77a43920f4c2bab271e87e21bdc6069
7
+ data.tar.gz: 6c7e22a137622e1cbd5a2c0415b973da7d412e812ee785172c9ba2191b91788ff9de087703197a41457b58a67688baf0912f407cb9b5008b43b210020f986452
@@ -7,7 +7,7 @@ plette==2.1.0
7
7
  poetry==1.8.5
8
8
  # TODO: Replace 3p package `tomli` with 3.11's new stdlib `tomllib` once we drop support for Python 3.10.
9
9
  tomli==2.0.1
10
- uv==0.9.11
10
+ uv==0.9.18
11
11
 
12
12
  # Some dependencies will only install if Cython is present
13
13
  Cython==3.0.10
@@ -4,10 +4,11 @@
4
4
  require "toml-rb"
5
5
  require "sorbet-runtime"
6
6
  require "dependabot/dependency_file"
7
+ require "dependabot/uv/file_fetcher"
7
8
 
8
9
  module Dependabot
9
10
  module Uv
10
- class FileFetcher < Dependabot::FileFetchers::Base
11
+ class FileFetcher < Dependabot::Python::SharedFileFetcher
11
12
  class WorkspaceFetcher
12
13
  extend T::Sig
13
14
 
@@ -172,7 +173,7 @@ module Dependabot
172
173
  # Delegate methods to file_fetcher
173
174
  sig { params(path: T.nilable(T.any(Pathname, String))).returns(String) }
174
175
  def clean_path(path)
175
- @file_fetcher.send(:cleanpath, path)
176
+ @file_fetcher.send(:clean_path, path)
176
177
  end
177
178
 
178
179
  sig do
@@ -5,26 +5,19 @@ require "toml-rb"
5
5
  require "sorbet-runtime"
6
6
 
7
7
  require "dependabot/file_fetchers"
8
- require "dependabot/file_fetchers/base"
8
+ require "dependabot/python/file_fetcher"
9
9
  require "dependabot/uv"
10
- require "dependabot/uv/language_version_manager"
11
10
  require "dependabot/uv/requirements_file_matcher"
12
- require "dependabot/uv/requirement_parser"
13
- require "dependabot/uv/file_parser/pyproject_files_parser"
14
- require "dependabot/uv/file_parser/python_requirement_parser"
15
11
  require "dependabot/uv/file_fetcher/workspace_fetcher"
16
12
  require "dependabot/errors"
17
- require "dependabot/file_filtering"
18
13
 
19
14
  module Dependabot
20
15
  module Uv
21
- class FileFetcher < Dependabot::FileFetchers::Base # rubocop:disable Metrics/ClassLength
16
+ class FileFetcher < Dependabot::Python::SharedFileFetcher
22
17
  extend T::Sig
23
- extend T::Helpers
24
18
 
25
- CHILD_REQUIREMENT_REGEX = /^-r\s?(?<path>.*\.(?:txt|in))/
26
- CONSTRAINT_REGEX = /^-c\s?(?<path>.*\.(?:txt|in))/
27
- DEPENDENCY_TYPES = %w(packages dev-packages).freeze
19
+ ECOSYSTEM_SPECIFIC_FILES = T.let(%w(uv.lock).freeze, T::Array[String])
20
+
28
21
  REQUIREMENT_FILE_PATTERNS = T.let(
29
22
  {
30
23
  extensions: [".txt", ".in"],
@@ -36,88 +29,69 @@ module Dependabot
36
29
  # Projects that use README files for metadata may use any of these common names
37
30
  README_FILENAMES = T.let(%w(README.md README.rst README.txt README).freeze, T::Array[String])
38
31
 
39
- MAX_FILE_SIZE = 500_000
40
-
41
- sig { override.params(filenames: T::Array[String]).returns(T::Boolean) }
42
- def self.required_files_in?(filenames)
43
- return true if filenames.any? do |name|
44
- T.must(REQUIREMENT_FILE_PATTERNS[:extensions]).any? do |ext|
45
- name.end_with?(ext)
46
- end
47
- end
48
-
49
- # If there is a directory of requirements return true
50
- return true if filenames.include?("requirements")
32
+ # Type alias for path dependency hashes
33
+ PathDependency = T.type_alias { T::Hash[Symbol, String] }
51
34
 
52
- # If this repo is using pyproject.toml return true (uv.lock files require a pyproject.toml)
53
- filenames.include?("pyproject.toml")
35
+ sig { override.returns(T::Array[String]) }
36
+ def self.ecosystem_specific_required_files
37
+ # uv.lock is not a standalone required file - it requires pyproject.toml
38
+ []
54
39
  end
55
40
 
56
41
  sig { override.returns(String) }
57
42
  def self.required_files_message
58
- "Repo must contain a requirements.txt, uv.lock, requirements.in, or pyproject.toml" \
43
+ "Repo must contain a requirements.txt, uv.lock, requirements.in, or pyproject.toml"
59
44
  end
60
45
 
61
- sig { override.returns(T.nilable(T::Hash[Symbol, T.untyped])) }
62
- def ecosystem_versions
63
- # Hmm... it's weird that this calls file parser methods, but here we are in the file fetcher... for all
64
- # ecosystems our goal is to extract the user specified versions, so we'll need to do file parsing... so should
65
- # we move this `ecosystem_versions` metrics method to run in the file parser for all ecosystems? Downside is if
66
- # file parsing blows up, this metric isn't emitted, but reality is we have to parse anyway... as we want to know
67
- # the user-specified range of versions, not the version Dependabot chose to run.
68
- python_requirement_parser = FileParser::PythonRequirementParser.new(dependency_files: files)
69
- language_version_manager = LanguageVersionManager.new(python_requirement_parser: python_requirement_parser)
70
- Dependabot.logger.info("Dependabot is using Python version '#{language_version_manager.python_version}'.")
71
- {
72
- languages: {
73
- python: {
74
- # TODO: alternatively this could use `python_requirement_parser.user_specified_requirements` which
75
- # returns an array... which we could flip to return a hash of manifest name => version
76
- # string and then check for min/max versions... today it simply defaults to
77
- # array.first which seems rather arbitrary.
78
- "raw" => language_version_manager.user_specified_python_version || "unknown",
79
- "max" => language_version_manager.python_major_minor || "unknown"
80
- }
81
- }
82
- }
46
+ private
47
+
48
+ sig { override.returns(T::Array[Dependabot::DependencyFile]) }
49
+ def ecosystem_specific_files
50
+ files = []
51
+ files += readme_files
52
+ files += uv_lock_files
53
+ files += workspace_member_files
54
+ files
83
55
  end
84
56
 
85
- sig { override.returns(T::Array[DependencyFile]) }
86
- def fetch_files
87
- fetched_files = []
57
+ sig { override.returns(T::Array[Dependabot::DependencyFile]) }
58
+ def pyproject_files
59
+ [pyproject].compact
60
+ end
88
61
 
89
- fetched_files += pyproject_files
90
- # Fetch README support files if referenced in pyproject metadata
91
- fetched_files += readme_files
62
+ sig { override.returns(T::Array[T::Hash[Symbol, String]]) }
63
+ def path_dependencies
64
+ [
65
+ *requirement_txt_path_dependencies,
66
+ *requirement_in_path_dependencies,
67
+ *uv_sources_path_dependencies
68
+ ]
69
+ end
92
70
 
93
- fetched_files += requirements_in_files
94
- fetched_files += requirement_files if requirements_txt_files.any?
71
+ sig { override.returns(T::Array[String]) }
72
+ def additional_path_dependencies
73
+ []
74
+ end
95
75
 
96
- fetched_files += uv_lock_files
97
- fetched_files += project_files
98
- fetched_files += workspace_member_files
99
- fetched_files << python_version_file if python_version_file
76
+ sig { override.params(file: Dependabot::DependencyFile).returns(T::Boolean) }
77
+ def lockfile_for_compile_file?(file)
78
+ requirements_in_file_matcher.compiled_file?(file)
79
+ end
100
80
 
101
- uniques = uniq_files(fetched_files)
102
- filtered_files = uniques.reject do |file|
103
- Dependabot::FileFiltering.should_exclude_path?(file.name, "file from final collection", @exclude_paths)
104
- end
81
+ sig { override.params(path: String).returns(T::Array[Dependabot::DependencyFile]) }
82
+ def fetch_project_file(path)
83
+ project_files = []
105
84
 
106
- filtered_files
107
- end
85
+ path = clean_path(File.join(path, "pyproject.toml")) unless sdist_or_wheel?(path)
108
86
 
109
- private
87
+ return [] if path == "pyproject.toml" && pyproject
110
88
 
111
- sig { params(fetched_files: T::Array[Dependabot::DependencyFile]).returns(T::Array[Dependabot::DependencyFile]) }
112
- def uniq_files(fetched_files)
113
- uniq_files = fetched_files.reject(&:support_file?).uniq
114
- uniq_files += fetched_files
115
- .reject { |f| uniq_files.map(&:name).include?(f.name) }
116
- end
89
+ project_files << fetch_file_from_host(
90
+ path,
91
+ fetch_submodules: true
92
+ ).tap { |f| f.support_file = true }
117
93
 
118
- sig { returns(T::Array[Dependabot::DependencyFile]) }
119
- def pyproject_files
120
- [pyproject].compact
94
+ project_files
121
95
  end
122
96
 
123
97
  sig { returns(T::Array[Dependabot::DependencyFile]) }
@@ -137,65 +111,18 @@ module Dependabot
137
111
  workspace_fetcher.send(:fetch_readme_files_for, directory, T.must(pyproject))
138
112
  end
139
113
 
140
- sig { returns(T::Array[Dependabot::DependencyFile]) }
141
- def requirement_files
142
- [
143
- *requirements_txt_files,
144
- *child_requirement_txt_files,
145
- *constraints_files
146
- ]
147
- end
148
-
149
- sig { returns(T.nilable(Dependabot::DependencyFile)) }
150
- def python_version_file
151
- return @python_version_file if defined?(@python_version_file)
152
-
153
- @python_version_file = T.let(fetch_support_file(".python-version"), T.nilable(Dependabot::DependencyFile))
154
-
155
- return @python_version_file if @python_version_file
156
- return if [".", "/"].include?(directory)
157
-
158
- # Check the top-level for a .python-version file, too
159
- reverse_path = Pathname.new(directory[0]).relative_path_from(directory)
160
- @python_version_file =
161
- fetch_support_file(File.join(reverse_path, ".python-version"))
162
- &.tap { |f| f.name = ".python-version" }
163
- end
164
-
165
- sig { returns(T.nilable(Dependabot::DependencyFile)) }
166
- def pyproject
167
- return @pyproject if defined?(@pyproject)
168
-
169
- @pyproject = T.let(fetch_file_if_present("pyproject.toml"), T.nilable(Dependabot::DependencyFile))
170
- end
171
-
172
- sig { returns(T::Array[Dependabot::DependencyFile]) }
173
- def requirements_txt_files
174
- req_txt_and_in_files.select { |f| f.name.end_with?(".txt") }
175
- end
176
-
177
- sig { returns(T::Array[Dependabot::DependencyFile]) }
178
- def requirements_in_files
179
- req_txt_and_in_files.select { |f| f.name.end_with?(".in") } +
180
- child_requirement_in_files
181
- end
182
-
183
114
  sig { returns(T::Array[Dependabot::DependencyFile]) }
184
115
  def uv_lock_files
185
116
  req_txt_and_in_files.select { |f| f.name.end_with?("uv.lock") } +
186
117
  child_uv_lock_files
187
118
  end
188
119
 
189
- sig { returns(TomlContent) }
190
- def parsed_pyproject
191
- raise "No pyproject.toml" unless pyproject
192
-
193
- @parsed_pyproject ||= T.let(TomlRB.parse(T.must(pyproject).content), T.nilable(TomlContent))
194
- rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
195
- raise Dependabot::DependencyFileNotParseable, T.must(pyproject).path
120
+ sig { returns(T::Array[Dependabot::DependencyFile]) }
121
+ def child_uv_lock_files
122
+ child_requirement_files.select { |f| f.name.end_with?("uv.lock") }
196
123
  end
197
124
 
198
- sig { returns(T::Array[Dependabot::DependencyFile]) }
125
+ sig { override.returns(T::Array[Dependabot::DependencyFile]) }
199
126
  def req_txt_and_in_files
200
127
  return @req_txt_and_in_files if @req_txt_and_in_files
201
128
 
@@ -215,229 +142,6 @@ module Dependabot
215
142
  fetch_requirement_files_from_path(relative_reqs_dir)
216
143
  end
217
144
 
218
- sig { returns(T::Array[Dependabot::DependencyFile]) }
219
- def child_requirement_txt_files
220
- child_requirement_files.select { |f| f.name.end_with?(".txt") }
221
- end
222
-
223
- sig { returns(T::Array[Dependabot::DependencyFile]) }
224
- def child_requirement_in_files
225
- child_requirement_files.select { |f| f.name.end_with?(".in") }
226
- end
227
-
228
- sig { returns(T::Array[Dependabot::DependencyFile]) }
229
- def child_uv_lock_files
230
- child_requirement_files.select { |f| f.name.end_with?("uv.lock") }
231
- end
232
-
233
- sig { returns(T::Array[Dependabot::DependencyFile]) }
234
- def child_requirement_files
235
- @child_requirement_files ||= T.let(
236
- begin
237
- fetched_files = req_txt_and_in_files.dup
238
- req_txt_and_in_files.flat_map do |requirement_file|
239
- child_files = fetch_child_requirement_files(
240
- file: requirement_file,
241
- previously_fetched_files: fetched_files
242
- )
243
-
244
- fetched_files += child_files
245
- child_files
246
- end
247
- end,
248
- T.nilable(T::Array[Dependabot::DependencyFile])
249
- )
250
- end
251
-
252
- sig do
253
- params(
254
- file: Dependabot::DependencyFile,
255
- previously_fetched_files: T::Array[Dependabot::DependencyFile]
256
- ).returns(T::Array[Dependabot::DependencyFile])
257
- end
258
- def fetch_child_requirement_files(file:, previously_fetched_files:)
259
- content = file.content
260
- return [] if content.nil?
261
-
262
- paths = content.scan(CHILD_REQUIREMENT_REGEX).flatten
263
- current_dir = File.dirname(file.name)
264
-
265
- paths.flat_map do |path|
266
- path = File.join(current_dir, path) unless current_dir == "."
267
- path = cleanpath(path)
268
-
269
- next if previously_fetched_files.map(&:name).include?(path)
270
- next if file.name == path
271
-
272
- if Dependabot::Experiments.enabled?(:enable_exclude_paths_subdirectory_manifest_files) &&
273
- !@exclude_paths.empty? && Dependabot::FileFiltering.exclude_path?(path, @exclude_paths)
274
- raise Dependabot::DependencyFileNotEvaluatable,
275
- "Cannot process requirements: '#{file.name}' references excluded file '#{path}'. " \
276
- "Please either remove the reference from '#{file.name}' " \
277
- "or update your exclude_paths configuration."
278
- end
279
-
280
- fetched_file = fetch_file_from_host(path)
281
- grandchild_requirement_files = fetch_child_requirement_files(
282
- file: fetched_file,
283
- previously_fetched_files: previously_fetched_files + [file]
284
- )
285
- [fetched_file, *grandchild_requirement_files]
286
- end.compact
287
- end
288
-
289
- sig { returns(T::Array[Dependabot::DependencyFile]) }
290
- def constraints_files
291
- all_requirement_files = requirements_txt_files +
292
- child_requirement_txt_files
293
-
294
- constraints_paths = all_requirement_files.map do |req_file|
295
- current_dir = File.dirname(req_file.name)
296
- content = req_file.content
297
- next [] if content.nil?
298
-
299
- paths = content.scan(CONSTRAINT_REGEX).flatten
300
-
301
- paths.map do |path|
302
- path = File.join(current_dir, path) unless current_dir == "."
303
- cleanpath(path)
304
- end
305
- end.flatten.uniq
306
-
307
- constraints_paths.map { |path| fetch_file_from_host(path) }
308
- end
309
-
310
- sig { returns(T::Array[Dependabot::DependencyFile]) }
311
- def project_files
312
- project_files = T.let([], T::Array[Dependabot::DependencyFile])
313
- unfetchable_deps = []
314
-
315
- path_dependencies.each do |dep|
316
- path = dep[:path]
317
- next if path.nil?
318
-
319
- project_files += fetch_project_file(path)
320
- rescue Dependabot::DependencyFileNotFound
321
- unfetchable_deps << "\"#{dep[:name]}\" at #{cleanpath(File.join(directory, dep[:file]))}"
322
- end
323
-
324
- raise Dependabot::PathDependenciesNotReachable, unfetchable_deps if unfetchable_deps.any?
325
-
326
- project_files
327
- end
328
-
329
- sig { params(path: String).returns(T::Array[Dependabot::DependencyFile]) }
330
- def fetch_project_file(path)
331
- project_files = []
332
-
333
- path = cleanpath(File.join(path, "pyproject.toml")) unless sdist_or_wheel?(path)
334
-
335
- return [] if path == "pyproject.toml" && pyproject
336
-
337
- project_files << fetch_file_from_host(
338
- path,
339
- fetch_submodules: true
340
- ).tap { |f| f.support_file = true }
341
-
342
- project_files
343
- end
344
-
345
- sig { params(path: String).returns(T::Boolean) }
346
- def sdist_or_wheel?(path)
347
- path.end_with?(".tar.gz", ".whl", ".zip")
348
- end
349
-
350
- sig { params(file: Dependabot::DependencyFile).returns(T::Boolean) }
351
- def requirements_file?(file)
352
- return false unless file.content&.valid_encoding?
353
- return true if file.name.match?(/requirements/x)
354
-
355
- T.must(file.content).lines.all? do |line|
356
- next true if line.strip.empty?
357
- next true if line.strip.start_with?("#", "-r ", "-c ", "-e ", "--")
358
-
359
- line.match?(RequirementParser::VALID_REQ_TXT_REQUIREMENT)
360
- end
361
- end
362
-
363
- sig { returns(T::Array[PathDependency]) }
364
- def path_dependencies
365
- [
366
- *requirement_txt_path_dependencies,
367
- *requirement_in_path_dependencies,
368
- *uv_sources_path_dependencies
369
- ]
370
- end
371
-
372
- sig { returns(T::Array[PathDependency]) }
373
- def requirement_txt_path_dependencies
374
- (requirements_txt_files + child_requirement_txt_files)
375
- .map { |req_file| parse_requirement_path_dependencies(req_file) }
376
- .flatten.uniq { |dep| dep[:path] }
377
- end
378
-
379
- sig { returns(T::Array[PathDependency]) }
380
- def requirement_in_path_dependencies
381
- requirements_in_files
382
- .map { |req_file| parse_requirement_path_dependencies(req_file) }
383
- .flatten.uniq { |dep| dep[:path] }
384
- end
385
-
386
- sig { params(req_file: Dependabot::DependencyFile).returns(T::Array[PathDependency]) }
387
- def parse_requirement_path_dependencies(req_file)
388
- # If this is a pip-compile lockfile, rely on whatever path dependencies we found in the main manifest
389
- return [] if requirements_in_file_matcher.compiled_file?(req_file)
390
-
391
- content = T.must(req_file.content)
392
- uneditable_reqs = parse_uneditable_requirements(content, req_file.name)
393
- editable_reqs = parse_editable_requirements(content, req_file.name)
394
-
395
- uneditable_reqs + editable_reqs
396
- end
397
-
398
- sig { params(content: String, file_name: String).returns(T::Array[PathDependency]) }
399
- def parse_uneditable_requirements(content, file_name)
400
- content
401
- .scan(/(?<name>^['"]?(?:file:)?(?<path>\..*?)(?=\[|#|'|"|$))/)
402
- .filter_map { |match_data| process_requirement_match(T.cast(match_data, T::Array[String]), file_name, false) }
403
- end
404
-
405
- sig { params(content: String, file_name: String).returns(T::Array[PathDependency]) }
406
- def parse_editable_requirements(content, file_name)
407
- content
408
- .scan(/(?<name>^(?:-e)\s+['"]?(?:file:)?(?<path>.*?)(?=\[|#|'|"|$))/)
409
- .filter_map { |match_data| process_requirement_match(T.cast(match_data, T::Array[String]), file_name, true) }
410
- end
411
-
412
- sig do
413
- params(
414
- match_data: T::Array[String],
415
- file_name: String,
416
- editable: T::Boolean
417
- ).returns(T.nilable(PathDependency))
418
- end
419
- def process_requirement_match(match_data, file_name, editable)
420
- name, path = match_data
421
- return nil if name.nil? || path.nil?
422
- return nil if path.include?("://")
423
- return nil if editable && path.include?("git@")
424
-
425
- { name: name.strip, path: path.strip, file: file_name }
426
- end
427
-
428
- sig { params(path: String).returns(String) }
429
- def cleanpath(path)
430
- Pathname.new(path).cleanpath.to_path
431
- end
432
-
433
- sig { returns(Dependabot::Uv::RequiremenstFileMatcher) }
434
- def requirements_in_file_matcher
435
- @requirements_in_file_matcher ||= T.let(
436
- RequiremenstFileMatcher.new(requirements_in_files),
437
- T.nilable(Dependabot::Uv::RequiremenstFileMatcher)
438
- )
439
- end
440
-
441
145
  sig { returns(T::Array[PathDependency]) }
442
146
  def uv_sources_path_dependencies
443
147
  return [] unless pyproject
@@ -461,6 +165,14 @@ module Dependabot
461
165
  workspace_fetcher.uv_sources_workspace_dependencies
462
166
  end
463
167
 
168
+ sig { returns(Dependabot::Uv::RequiremenstFileMatcher) }
169
+ def requirements_in_file_matcher
170
+ @requirements_in_file_matcher ||= T.let(
171
+ RequiremenstFileMatcher.new(requirements_in_files),
172
+ T.nilable(Dependabot::Uv::RequiremenstFileMatcher)
173
+ )
174
+ end
175
+
464
176
  sig { params(path: T.nilable(T.any(Pathname, String))).returns(T::Array[Dependabot::DependencyFile]) }
465
177
  def fetch_requirement_files_from_path(path = nil)
466
178
  contents = path ? repo_contents(dir: path) : repo_contents
@@ -610,7 +610,7 @@ module Dependabot
610
610
 
611
611
  sig { returns(T::Hash[String, T::Array[String]]) }
612
612
  def requirement_map
613
- child_req_regex = Uv::FileFetcher::CHILD_REQUIREMENT_REGEX
613
+ child_req_regex = Python::SharedFileFetcher::CHILD_REQUIREMENT_REGEX
614
614
  @requirement_map ||= T.let(
615
615
  compile_files.each_with_object({}) do |file, req_map|
616
616
  paths = T.must(file.content).scan(child_req_regex).flatten
@@ -314,13 +314,13 @@ module Dependabot
314
314
  command = "pyenv exec uv lock --upgrade-package #{package_spec} #{options}"
315
315
  fingerprint = "pyenv exec uv lock --upgrade-package <dependency_name> #{options_fingerprint}"
316
316
 
317
- run_command(command, fingerprint:)
317
+ run_command(command, fingerprint: fingerprint, env: explicit_index_env_vars)
318
318
  end
319
319
 
320
- sig { params(command: String, fingerprint: T.nilable(String)).returns(String) }
321
- def run_command(command, fingerprint: nil)
320
+ sig { params(command: String, fingerprint: T.nilable(String), env: T::Hash[String, String]).returns(String) }
321
+ def run_command(command, fingerprint: nil, env: {})
322
322
  Dependabot.logger.info("Running command: #{command}")
323
- SharedHelpers.run_shell_command(command, fingerprint: fingerprint)
323
+ SharedHelpers.run_shell_command(command, fingerprint: fingerprint, env: env)
324
324
  end
325
325
 
326
326
  sig { params(pyproject_content: String).returns(Integer) }
@@ -373,6 +373,7 @@ module Dependabot
373
373
  def lock_index_options
374
374
  credentials
375
375
  .select { |cred| cred["type"] == "python_index" }
376
+ .reject { |cred| explicit_index?(cred) }
376
377
  .map do |cred|
377
378
  authed_url = AuthedUrlBuilder.authed_url(credential: cred)
378
379
 
@@ -384,6 +385,86 @@ module Dependabot
384
385
  end
385
386
  end
386
387
 
388
+ sig { params(credential: Dependabot::Credential).returns(T::Boolean) }
389
+ def explicit_index?(credential)
390
+ return false if credential.replaces_base?
391
+
392
+ cred_url = normalize_index_url(credential["index-url"].to_s)
393
+ uv_indices.any? do |_name, config|
394
+ config["explicit"] == true && normalize_index_url(config["url"].to_s) == cred_url
395
+ end
396
+ end
397
+
398
+ sig { params(url: String).returns(String) }
399
+ def normalize_index_url(url)
400
+ url.chomp("/")
401
+ end
402
+
403
+ sig { returns(T::Hash[String, T::Hash[String, T.untyped]]) }
404
+ def uv_indices
405
+ @uv_indices ||= T.let(parse_uv_indices, T.nilable(T::Hash[String, T::Hash[String, T.untyped]]))
406
+ end
407
+
408
+ sig { returns(T::Hash[String, T::Hash[String, T.untyped]]) }
409
+ def parse_uv_indices
410
+ return {} unless pyproject&.content
411
+
412
+ parsed = TomlRB.parse(T.must(pyproject).content)
413
+ indices = parsed.dig("tool", "uv", "index")
414
+ return {} unless indices.is_a?(Array)
415
+
416
+ indices.each_with_object({}) do |index, result|
417
+ name = index["name"]
418
+ next unless name
419
+
420
+ result[name] = {
421
+ "url" => index["url"],
422
+ "explicit" => index["explicit"] == true
423
+ }
424
+ end
425
+ rescue TomlRB::ParseError
426
+ {}
427
+ end
428
+
429
+ # For hosted Dependabot, token will be nil since the credentials aren't present
430
+ # (the proxy handles authentication). This is for those running Dependabot
431
+ # themselves and for dry-run.
432
+ sig { returns(T::Hash[String, String]) }
433
+ def explicit_index_env_vars
434
+ env_vars = {}
435
+
436
+ credentials
437
+ .select { |cred| cred["type"] == "python_index" }
438
+ .select { |cred| explicit_index?(cred) }
439
+ .each do |cred|
440
+ index_name = find_index_name_for_credential(cred)
441
+ next unless index_name
442
+
443
+ env_name = index_name.upcase.gsub(/[^A-Z0-9]/, "_")
444
+
445
+ env_vars["UV_INDEX_#{env_name}_USERNAME"] = cred["username"] if cred["username"]
446
+
447
+ if cred["password"]
448
+ env_vars["UV_INDEX_#{env_name}_PASSWORD"] = cred["password"]
449
+ elsif cred["token"]
450
+ env_vars["UV_INDEX_#{env_name}_PASSWORD"] = cred["token"]
451
+ end
452
+ end
453
+
454
+ env_vars
455
+ end
456
+
457
+ sig { params(credential: Dependabot::Credential).returns(T.nilable(String)) }
458
+ def find_index_name_for_credential(credential)
459
+ cred_url = normalize_index_url(credential["index-url"].to_s)
460
+
461
+ uv_indices.each do |name, config|
462
+ return name if normalize_index_url(config["url"].to_s) == cred_url
463
+ end
464
+
465
+ nil
466
+ end
467
+
387
468
  sig { params(options: String).returns(String) }
388
469
  def lock_options_fingerprint(options)
389
470
  options.sub(