dependabot-uv 0.332.0 → 0.333.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/dependabot/uv/authed_url_builder.rb +8 -3
- data/lib/dependabot/uv/file_fetcher.rb +16 -2
- data/lib/dependabot/uv/file_parser/pyproject_files_parser.rb +1 -0
- data/lib/dependabot/uv/file_parser/python_requirement_parser.rb +39 -16
- data/lib/dependabot/uv/file_parser/setup_file_parser.rb +1 -0
- data/lib/dependabot/uv/file_updater/compile_file_updater.rb +149 -70
- data/lib/dependabot/uv/file_updater/lock_file_updater.rb +3 -2
- data/lib/dependabot/uv/file_updater/requirement_file_updater.rb +8 -8
- data/lib/dependabot/uv/file_updater/requirement_replacer.rb +61 -24
- data/lib/dependabot/uv/file_updater.rb +2 -2
- data/lib/dependabot/uv/language.rb +1 -0
- data/lib/dependabot/uv/metadata_finder.rb +41 -10
- data/lib/dependabot/uv/package/package_registry_finder.rb +116 -61
- data/lib/dependabot/uv/requirement.rb +28 -19
- data/lib/dependabot/uv/update_checker/lock_file_resolver.rb +26 -2
- data/lib/dependabot/uv/update_checker/pip_compile_version_resolver.rb +133 -54
- data/lib/dependabot/uv/update_checker/pip_version_resolver.rb +58 -22
- data/lib/dependabot/uv/update_checker/requirements_updater.rb +79 -31
- data/lib/dependabot/uv/update_checker.rb +120 -36
- data/lib/dependabot/uv/version.rb +22 -14
- metadata +6 -6
|
@@ -1,6 +1,8 @@
|
|
|
1
|
-
# typed:
|
|
1
|
+
# typed: strict
|
|
2
2
|
# frozen_string_literal: true
|
|
3
3
|
|
|
4
|
+
require "sorbet-runtime"
|
|
5
|
+
|
|
4
6
|
require "dependabot/dependency"
|
|
5
7
|
require "dependabot/uv/requirement_parser"
|
|
6
8
|
require "dependabot/uv/file_updater"
|
|
@@ -12,20 +14,33 @@ module Dependabot
|
|
|
12
14
|
module Uv
|
|
13
15
|
class FileUpdater
|
|
14
16
|
class RequirementReplacer
|
|
17
|
+
extend T::Sig
|
|
18
|
+
|
|
15
19
|
PACKAGE_NOT_FOUND_ERROR = "PackageNotFoundError"
|
|
16
20
|
|
|
17
21
|
CERTIFICATE_VERIFY_FAILED = /CERTIFICATE_VERIFY_FAILED/
|
|
18
22
|
|
|
23
|
+
sig do
|
|
24
|
+
params(
|
|
25
|
+
content: String,
|
|
26
|
+
dependency_name: String,
|
|
27
|
+
old_requirement: T.nilable(String),
|
|
28
|
+
new_requirement: T.nilable(String),
|
|
29
|
+
new_hash_version: T.nilable(String),
|
|
30
|
+
index_urls: T.nilable(T::Array[T.nilable(String)])
|
|
31
|
+
).void
|
|
32
|
+
end
|
|
19
33
|
def initialize(content:, dependency_name:, old_requirement:,
|
|
20
34
|
new_requirement:, new_hash_version: nil, index_urls: nil)
|
|
21
|
-
@content
|
|
22
|
-
@dependency_name
|
|
23
|
-
@old_requirement
|
|
24
|
-
@new_requirement
|
|
25
|
-
@new_hash_version = new_hash_version
|
|
26
|
-
@index_urls = index_urls
|
|
35
|
+
@content = T.let(content, String)
|
|
36
|
+
@dependency_name = T.let(normalise(dependency_name), String)
|
|
37
|
+
@old_requirement = T.let(old_requirement, T.nilable(String))
|
|
38
|
+
@new_requirement = T.let(new_requirement, T.nilable(String))
|
|
39
|
+
@new_hash_version = T.let(new_hash_version, T.nilable(String))
|
|
40
|
+
@index_urls = T.let(index_urls, T.nilable(T::Array[T.nilable(String)]))
|
|
27
41
|
end
|
|
28
42
|
|
|
43
|
+
sig { returns(String) }
|
|
29
44
|
def updated_content
|
|
30
45
|
updated_content =
|
|
31
46
|
content.gsub(original_declaration_replacement_regex) do |mtch|
|
|
@@ -43,40 +58,52 @@ module Dependabot
|
|
|
43
58
|
|
|
44
59
|
private
|
|
45
60
|
|
|
61
|
+
sig { returns(String) }
|
|
46
62
|
attr_reader :content
|
|
63
|
+
|
|
64
|
+
sig { returns(String) }
|
|
47
65
|
attr_reader :dependency_name
|
|
66
|
+
|
|
67
|
+
sig { returns(T.nilable(String)) }
|
|
48
68
|
attr_reader :old_requirement
|
|
69
|
+
|
|
70
|
+
sig { returns(T.nilable(String)) }
|
|
49
71
|
attr_reader :new_requirement
|
|
72
|
+
|
|
73
|
+
sig { returns(T.nilable(String)) }
|
|
50
74
|
attr_reader :new_hash_version
|
|
51
75
|
|
|
76
|
+
sig { returns(T::Boolean) }
|
|
52
77
|
def update_hashes?
|
|
53
78
|
!new_hash_version.nil?
|
|
54
79
|
end
|
|
55
80
|
|
|
81
|
+
sig { returns(T.nilable(String)) }
|
|
56
82
|
def updated_requirement_string
|
|
57
83
|
new_req_string = new_requirement
|
|
58
84
|
|
|
59
|
-
new_req_string = new_req_string
|
|
85
|
+
new_req_string = new_req_string&.gsub(/,\s*/, ", ") if add_space_after_commas?
|
|
60
86
|
|
|
61
87
|
if add_space_after_operators?
|
|
62
88
|
new_req_string =
|
|
63
89
|
new_req_string
|
|
64
|
-
|
|
90
|
+
&.gsub(/(#{RequirementParser::COMPARISON})\s*(?=\d)/o, '\1 ')
|
|
65
91
|
end
|
|
66
92
|
|
|
67
93
|
new_req_string
|
|
68
94
|
end
|
|
69
95
|
|
|
96
|
+
sig { returns(String) }
|
|
70
97
|
def updated_dependency_declaration_string
|
|
71
98
|
old_req = old_requirement
|
|
72
99
|
updated_string =
|
|
73
100
|
if old_req
|
|
74
101
|
original_dependency_declaration_string(old_req)
|
|
75
|
-
.sub(RequirementParser::REQUIREMENTS, updated_requirement_string)
|
|
102
|
+
.sub(RequirementParser::REQUIREMENTS, updated_requirement_string || "")
|
|
76
103
|
else
|
|
77
104
|
original_dependency_declaration_string(old_req)
|
|
78
105
|
.sub(RequirementParser::NAME_WITH_EXTRAS) do |nm|
|
|
79
|
-
nm + updated_requirement_string
|
|
106
|
+
nm + (updated_requirement_string || "")
|
|
80
107
|
end
|
|
81
108
|
end
|
|
82
109
|
|
|
@@ -88,59 +115,65 @@ module Dependabot
|
|
|
88
115
|
name: dependency_name,
|
|
89
116
|
version: new_hash_version,
|
|
90
117
|
algorithm: hash_algorithm(old_req)
|
|
91
|
-
).join(hash_separator(old_req))
|
|
118
|
+
).join(hash_separator(old_req) || "")
|
|
92
119
|
)
|
|
93
120
|
end
|
|
94
121
|
|
|
122
|
+
sig { returns(T::Boolean) }
|
|
95
123
|
def add_space_after_commas?
|
|
96
124
|
original_dependency_declaration_string(old_requirement)
|
|
97
125
|
.match(RequirementParser::REQUIREMENTS)
|
|
98
126
|
.to_s.include?(", ")
|
|
99
127
|
end
|
|
100
128
|
|
|
129
|
+
sig { returns(T::Boolean) }
|
|
101
130
|
def add_space_after_operators?
|
|
102
131
|
original_dependency_declaration_string(old_requirement)
|
|
103
132
|
.match(RequirementParser::REQUIREMENTS)
|
|
104
133
|
.to_s.match?(/#{RequirementParser::COMPARISON}\s+\d/o)
|
|
105
134
|
end
|
|
106
135
|
|
|
136
|
+
sig { returns(Regexp) }
|
|
107
137
|
def original_declaration_replacement_regex
|
|
108
138
|
original_string =
|
|
109
139
|
original_dependency_declaration_string(old_requirement)
|
|
110
140
|
/(?<![\-\w\.\[])#{Regexp.escape(original_string)}(?![\-\w\.])/
|
|
111
141
|
end
|
|
112
142
|
|
|
143
|
+
sig { params(requirement: T.nilable(String)).returns(T::Boolean) }
|
|
113
144
|
def requirement_includes_hashes?(requirement)
|
|
114
145
|
original_dependency_declaration_string(requirement)
|
|
115
146
|
.match?(RequirementParser::HASHES)
|
|
116
147
|
end
|
|
117
148
|
|
|
149
|
+
sig { params(requirement: T.nilable(String)).returns(T.nilable(String)) }
|
|
118
150
|
def hash_algorithm(requirement)
|
|
119
151
|
return unless requirement_includes_hashes?(requirement)
|
|
120
152
|
|
|
121
|
-
original_dependency_declaration_string(requirement)
|
|
122
|
-
|
|
123
|
-
|
|
153
|
+
matches = original_dependency_declaration_string(requirement).match(RequirementParser::HASHES)
|
|
154
|
+
return unless matches
|
|
155
|
+
|
|
156
|
+
matches.named_captures.fetch("algorithm")
|
|
124
157
|
end
|
|
125
158
|
|
|
159
|
+
sig { params(requirement: T.nilable(String)).returns(T.nilable(String)) }
|
|
126
160
|
def hash_separator(requirement)
|
|
127
161
|
return unless requirement_includes_hashes?(requirement)
|
|
128
162
|
|
|
129
163
|
hash_regex = RequirementParser::HASH
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
.named_captures.fetch("separator")
|
|
164
|
+
match_result = original_dependency_declaration_string(requirement)
|
|
165
|
+
.match(/#{hash_regex}((?<separator>\s*\\?\s*?)#{hash_regex})*/)
|
|
166
|
+
current_separator = match_result&.named_captures&.fetch("separator", nil)
|
|
134
167
|
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
.named_captures.fetch("separator")
|
|
168
|
+
default_match = original_dependency_declaration_string(requirement)
|
|
169
|
+
.match(RequirementParser::HASH)
|
|
170
|
+
default_separator = default_match&.pre_match&.match(/(?<separator>\s*\\?\s*?)\z/)
|
|
171
|
+
&.named_captures&.fetch("separator", nil)
|
|
140
172
|
|
|
141
173
|
current_separator || default_separator
|
|
142
174
|
end
|
|
143
175
|
|
|
176
|
+
sig { params(name: String, version: T.nilable(String), algorithm: T.nilable(String)).returns(T::Array[String]) }
|
|
144
177
|
def package_hashes_for(name:, version:, algorithm:)
|
|
145
178
|
index_urls = @index_urls || [nil]
|
|
146
179
|
|
|
@@ -168,6 +201,7 @@ module Dependabot
|
|
|
168
201
|
raise Dependabot::DependencyFileNotResolvable, "Unable to find hashes for package #{name}"
|
|
169
202
|
end
|
|
170
203
|
|
|
204
|
+
sig { params(old_req: T.nilable(String)).returns(String) }
|
|
171
205
|
def original_dependency_declaration_string(old_req)
|
|
172
206
|
matches = []
|
|
173
207
|
|
|
@@ -189,10 +223,12 @@ module Dependabot
|
|
|
189
223
|
dec.to_s.strip
|
|
190
224
|
end
|
|
191
225
|
|
|
226
|
+
sig { params(name: String).returns(String) }
|
|
192
227
|
def normalise(name)
|
|
193
228
|
NameNormaliser.normalise(name)
|
|
194
229
|
end
|
|
195
230
|
|
|
231
|
+
sig { params(req1: T.nilable(String), req2: T.nilable(String)).returns(T::Boolean) }
|
|
196
232
|
def requirements_match(req1, req2)
|
|
197
233
|
req1&.split(",")&.map { |r| r.gsub(/\s/, "") }&.sort ==
|
|
198
234
|
req2&.split(",")&.map { |r| r.gsub(/\s/, "") }&.sort
|
|
@@ -200,6 +236,7 @@ module Dependabot
|
|
|
200
236
|
|
|
201
237
|
public
|
|
202
238
|
|
|
239
|
+
sig { params(error: Exception).void }
|
|
203
240
|
def requirement_error_handler(error)
|
|
204
241
|
Dependabot.logger.warn(error.message)
|
|
205
242
|
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
# typed:
|
|
1
|
+
# typed: strong
|
|
2
2
|
# frozen_string_literal: true
|
|
3
3
|
|
|
4
4
|
require "toml-rb"
|
|
@@ -78,7 +78,7 @@ module Dependabot
|
|
|
78
78
|
).updated_dependency_files
|
|
79
79
|
end
|
|
80
80
|
|
|
81
|
-
sig { returns(T::Array[String]) }
|
|
81
|
+
sig { returns(T::Array[T.nilable(String)]) }
|
|
82
82
|
def pip_compile_index_urls
|
|
83
83
|
if credentials.any?(&:replaces_base?)
|
|
84
84
|
credentials.select(&:replaces_base?).map { |cred| AuthedUrlBuilder.authed_url(credential: cred) }
|
|
@@ -11,6 +11,7 @@ module Dependabot
|
|
|
11
11
|
|
|
12
12
|
class Language < Dependabot::Ecosystem::VersionManager
|
|
13
13
|
extend T::Sig
|
|
14
|
+
|
|
14
15
|
# This list must match the versions specified at the top of `uv/Dockerfile`
|
|
15
16
|
# ARG PY_3_13=3.13.2
|
|
16
17
|
# When updating this list, also update python/lib/dependabot/python/language.rb
|
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
# typed:
|
|
1
|
+
# typed: strict
|
|
2
2
|
# frozen_string_literal: true
|
|
3
3
|
|
|
4
4
|
require "excon"
|
|
5
|
+
require "sorbet-runtime"
|
|
5
6
|
require "uri"
|
|
6
7
|
|
|
7
8
|
require "dependabot/metadata_finders"
|
|
@@ -13,8 +14,26 @@ require "dependabot/uv/name_normaliser"
|
|
|
13
14
|
module Dependabot
|
|
14
15
|
module Uv
|
|
15
16
|
class MetadataFinder < Dependabot::MetadataFinders::Base
|
|
17
|
+
extend T::Sig
|
|
18
|
+
|
|
16
19
|
MAIN_PYPI_URL = "https://pypi.org/pypi"
|
|
17
20
|
|
|
21
|
+
sig do
|
|
22
|
+
params(
|
|
23
|
+
dependency: Dependabot::Dependency,
|
|
24
|
+
credentials: T::Array[Dependabot::Credential]
|
|
25
|
+
)
|
|
26
|
+
.void
|
|
27
|
+
end
|
|
28
|
+
def initialize(dependency:, credentials:)
|
|
29
|
+
super
|
|
30
|
+
@pypi_listing = T.let(nil, T.nilable(T::Hash[String, T.untyped]))
|
|
31
|
+
@source_from_description = T.let(nil, T.nilable(String))
|
|
32
|
+
@source_from_homepage = T.let(nil, T.nilable(String))
|
|
33
|
+
@homepage_response = T.let(nil, T.nilable(Excon::Response))
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
sig { returns(T.nilable(String)) }
|
|
18
37
|
def homepage_url
|
|
19
38
|
pypi_listing.dig("info", "home_page") ||
|
|
20
39
|
pypi_listing.dig("info", "project_urls", "Homepage") ||
|
|
@@ -24,6 +43,7 @@ module Dependabot
|
|
|
24
43
|
|
|
25
44
|
private
|
|
26
45
|
|
|
46
|
+
sig { override.returns(T.nilable(Dependabot::Source)) }
|
|
27
47
|
def look_up_source
|
|
28
48
|
potential_source_urls = [
|
|
29
49
|
pypi_listing.dig("info", "project_urls", "Source"),
|
|
@@ -44,6 +64,7 @@ module Dependabot
|
|
|
44
64
|
end
|
|
45
65
|
|
|
46
66
|
# rubocop:disable Metrics/PerceivedComplexity
|
|
67
|
+
sig { returns(T.nilable(String)) }
|
|
47
68
|
def source_from_description
|
|
48
69
|
potential_source_urls = []
|
|
49
70
|
desc = pypi_listing.dig("info", "description")
|
|
@@ -64,7 +85,7 @@ module Dependabot
|
|
|
64
85
|
|
|
65
86
|
# Failing that, look for a source where the full dependency name is
|
|
66
87
|
# mentioned when the link is followed
|
|
67
|
-
@source_from_description ||=
|
|
88
|
+
@source_from_description ||= T.let(
|
|
68
89
|
potential_source_urls.find do |url|
|
|
69
90
|
full_url = Source.from_url(url)&.url
|
|
70
91
|
next unless full_url
|
|
@@ -73,16 +94,19 @@ module Dependabot
|
|
|
73
94
|
next unless response.status == 200
|
|
74
95
|
|
|
75
96
|
response.body.include?(normalised_dependency_name)
|
|
76
|
-
end
|
|
97
|
+
end, T.nilable(String)
|
|
98
|
+
)
|
|
77
99
|
end
|
|
78
100
|
# rubocop:enable Metrics/PerceivedComplexity
|
|
79
101
|
|
|
80
102
|
# rubocop:disable Metrics/PerceivedComplexity
|
|
103
|
+
sig { returns(T.nilable(String)) }
|
|
81
104
|
def source_from_homepage
|
|
82
|
-
|
|
105
|
+
homepage_body_local = homepage_body
|
|
106
|
+
return unless homepage_body_local
|
|
83
107
|
|
|
84
108
|
potential_source_urls = []
|
|
85
|
-
|
|
109
|
+
homepage_body_local.scan(Source::SOURCE_REGEX) do
|
|
86
110
|
potential_source_urls << Regexp.last_match.to_s
|
|
87
111
|
end
|
|
88
112
|
|
|
@@ -93,7 +117,7 @@ module Dependabot
|
|
|
93
117
|
|
|
94
118
|
return match_url if match_url
|
|
95
119
|
|
|
96
|
-
@source_from_homepage ||=
|
|
120
|
+
@source_from_homepage ||= T.let(
|
|
97
121
|
potential_source_urls.find do |url|
|
|
98
122
|
full_url = Source.from_url(url)&.url
|
|
99
123
|
next unless full_url
|
|
@@ -102,10 +126,12 @@ module Dependabot
|
|
|
102
126
|
next unless response.status == 200
|
|
103
127
|
|
|
104
128
|
response.body.include?(normalised_dependency_name)
|
|
105
|
-
end
|
|
129
|
+
end, T.nilable(String)
|
|
130
|
+
)
|
|
106
131
|
end
|
|
107
132
|
# rubocop:enable Metrics/PerceivedComplexity
|
|
108
133
|
|
|
134
|
+
sig { returns(T.nilable(String)) }
|
|
109
135
|
def homepage_body
|
|
110
136
|
homepage_url = pypi_listing.dig("info", "home_page")
|
|
111
137
|
|
|
@@ -115,19 +141,21 @@ module Dependabot
|
|
|
115
141
|
"pypi.python.org"
|
|
116
142
|
].include?(URI(homepage_url).host)
|
|
117
143
|
|
|
118
|
-
@homepage_response ||=
|
|
144
|
+
@homepage_response ||= T.let(
|
|
119
145
|
begin
|
|
120
146
|
Dependabot::RegistryClient.get(url: homepage_url)
|
|
121
147
|
rescue Excon::Error::Timeout, Excon::Error::Socket,
|
|
122
148
|
Excon::Error::TooManyRedirects, ArgumentError
|
|
123
149
|
nil
|
|
124
|
-
end
|
|
150
|
+
end, T.nilable(Excon::Response)
|
|
151
|
+
)
|
|
125
152
|
|
|
126
153
|
return unless @homepage_response&.status == 200
|
|
127
154
|
|
|
128
|
-
@homepage_response
|
|
155
|
+
@homepage_response&.body
|
|
129
156
|
end
|
|
130
157
|
|
|
158
|
+
sig { returns(T::Hash[String, T.untyped]) }
|
|
131
159
|
def pypi_listing
|
|
132
160
|
return @pypi_listing unless @pypi_listing.nil?
|
|
133
161
|
return @pypi_listing = {} if dependency.version&.include?("+")
|
|
@@ -147,6 +175,7 @@ module Dependabot
|
|
|
147
175
|
@pypi_listing = {} # No listing found
|
|
148
176
|
end
|
|
149
177
|
|
|
178
|
+
sig { params(url: String).returns(Excon::Response) }
|
|
150
179
|
def fetch_authed_url(url)
|
|
151
180
|
if url.match(%r{(.*)://(.*?):(.*)@([^@]+)$}) &&
|
|
152
181
|
Regexp.last_match&.captures&.[](1)&.include?("@")
|
|
@@ -164,6 +193,7 @@ module Dependabot
|
|
|
164
193
|
end
|
|
165
194
|
end
|
|
166
195
|
|
|
196
|
+
sig { returns(T::Array[String]) }
|
|
167
197
|
def possible_listing_urls
|
|
168
198
|
credential_urls =
|
|
169
199
|
credentials
|
|
@@ -176,6 +206,7 @@ module Dependabot
|
|
|
176
206
|
end
|
|
177
207
|
|
|
178
208
|
# Strip [extras] from name (dependency_name[extra_dep,other_extra])
|
|
209
|
+
sig { returns(String) }
|
|
179
210
|
def normalised_dependency_name
|
|
180
211
|
NameNormaliser.normalise(dependency.name)
|
|
181
212
|
end
|
|
@@ -1,6 +1,11 @@
|
|
|
1
|
-
# typed:
|
|
1
|
+
# typed: strict
|
|
2
2
|
# frozen_string_literal: true
|
|
3
3
|
|
|
4
|
+
require "toml-rb"
|
|
5
|
+
require "sorbet-runtime"
|
|
6
|
+
require "dependabot/dependency"
|
|
7
|
+
require "dependabot/dependency_file"
|
|
8
|
+
require "dependabot/credential"
|
|
4
9
|
require "dependabot/uv/update_checker"
|
|
5
10
|
require "dependabot/uv/authed_url_builder"
|
|
6
11
|
require "dependabot/errors"
|
|
@@ -10,13 +15,23 @@ module Dependabot
|
|
|
10
15
|
module Package
|
|
11
16
|
class PackageRegistryFinder
|
|
12
17
|
extend T::Sig
|
|
18
|
+
|
|
13
19
|
PYPI_BASE_URL = "https://pypi.org/simple/"
|
|
14
20
|
ENVIRONMENT_VARIABLE_REGEX = /\$\{.+\}/
|
|
15
21
|
|
|
22
|
+
UrlsHash = T.type_alias { { main: T.nilable(String), extra: T::Array[String] } }
|
|
23
|
+
|
|
24
|
+
sig do
|
|
25
|
+
params(
|
|
26
|
+
dependency_files: T::Array[Dependabot::DependencyFile],
|
|
27
|
+
credentials: T::Array[Dependabot::Credential],
|
|
28
|
+
dependency: Dependabot::Dependency
|
|
29
|
+
).void
|
|
30
|
+
end
|
|
16
31
|
def initialize(dependency_files:, credentials:, dependency:)
|
|
17
|
-
@dependency_files = dependency_files
|
|
18
|
-
@credentials = credentials
|
|
19
|
-
@dependency = dependency
|
|
32
|
+
@dependency_files = T.let(dependency_files, T::Array[Dependabot::DependencyFile])
|
|
33
|
+
@credentials = T.let(credentials, T::Array[Dependabot::Credential])
|
|
34
|
+
@dependency = T.let(dependency, Dependabot::Dependency)
|
|
20
35
|
end
|
|
21
36
|
|
|
22
37
|
sig { returns(T::Array[String]) }
|
|
@@ -42,9 +57,13 @@ module Dependabot
|
|
|
42
57
|
|
|
43
58
|
private
|
|
44
59
|
|
|
60
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
45
61
|
attr_reader :dependency_files
|
|
62
|
+
|
|
63
|
+
sig { returns(T::Array[Dependabot::Credential]) }
|
|
46
64
|
attr_reader :credentials
|
|
47
65
|
|
|
66
|
+
sig { returns(String) }
|
|
48
67
|
def main_index_url
|
|
49
68
|
url =
|
|
50
69
|
config_variable_index_urls[:main] ||
|
|
@@ -57,92 +76,124 @@ module Dependabot
|
|
|
57
76
|
clean_check_and_remove_environment_variables(url)
|
|
58
77
|
end
|
|
59
78
|
|
|
79
|
+
sig { returns(UrlsHash) }
|
|
60
80
|
def requirement_file_index_urls
|
|
61
|
-
urls = { main: nil, extra: [] }
|
|
81
|
+
urls = T.let({ main: nil, extra: [] }, UrlsHash)
|
|
62
82
|
|
|
63
83
|
requirements_files.each do |file|
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
84
|
+
content = file.content
|
|
85
|
+
next unless content
|
|
86
|
+
|
|
87
|
+
if content.match?(/^--index-url\s+['"]?([^\s'"]+)['"]?/)
|
|
88
|
+
match_result = content.match(/^--index-url\s+['"]?([^\s'"]+)['"]?/)
|
|
89
|
+
urls[:main] = match_result&.captures&.first&.strip
|
|
68
90
|
end
|
|
69
|
-
urls[:extra]
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
91
|
+
extra_urls = urls[:extra]
|
|
92
|
+
extra_urls +=
|
|
93
|
+
content
|
|
94
|
+
.scan(/^--extra-index-url\s+['"]?([^\s'"]+)['"]?/)
|
|
95
|
+
.flatten
|
|
96
|
+
.map(&:strip)
|
|
97
|
+
urls[:extra] = extra_urls
|
|
74
98
|
end
|
|
75
99
|
|
|
76
100
|
urls
|
|
77
101
|
end
|
|
78
102
|
|
|
103
|
+
sig { returns(UrlsHash) }
|
|
79
104
|
def pip_conf_index_urls
|
|
80
|
-
urls = { main: nil, extra: [] }
|
|
105
|
+
urls = T.let({ main: nil, extra: [] }, UrlsHash)
|
|
81
106
|
|
|
82
107
|
return urls unless pip_conf
|
|
83
108
|
|
|
84
|
-
|
|
109
|
+
pip_conf_file = pip_conf
|
|
110
|
+
return urls unless pip_conf_file
|
|
111
|
+
|
|
112
|
+
content = pip_conf_file.content
|
|
113
|
+
return urls unless content
|
|
85
114
|
|
|
86
115
|
if content.match?(/^index-url\s*=/x)
|
|
87
|
-
|
|
88
|
-
|
|
116
|
+
match_result = content.match(/^index-url\s*=\s*(.+)/)
|
|
117
|
+
urls[:main] = match_result&.captures&.first
|
|
89
118
|
end
|
|
90
|
-
urls[:extra]
|
|
119
|
+
extra_urls = urls[:extra]
|
|
120
|
+
extra_urls += content.scan(/^extra-index-url\s*=(.+)/).flatten
|
|
121
|
+
urls[:extra] = extra_urls
|
|
91
122
|
|
|
92
123
|
urls
|
|
93
124
|
end
|
|
94
125
|
|
|
126
|
+
sig { returns(UrlsHash) }
|
|
95
127
|
def pipfile_index_urls
|
|
96
|
-
urls = { main: nil, extra: [] }
|
|
128
|
+
urls = T.let({ main: nil, extra: [] }, UrlsHash)
|
|
129
|
+
begin
|
|
130
|
+
return urls unless pipfile
|
|
97
131
|
|
|
98
|
-
|
|
132
|
+
pipfile_file = pipfile
|
|
133
|
+
return urls unless pipfile_file
|
|
99
134
|
|
|
100
|
-
|
|
135
|
+
content = pipfile_file.content
|
|
136
|
+
return urls unless content
|
|
101
137
|
|
|
102
|
-
|
|
138
|
+
pipfile_object = TomlRB.parse(content)
|
|
103
139
|
|
|
104
|
-
|
|
105
|
-
urls[:extra] << source.fetch("url") if source["url"]
|
|
106
|
-
end
|
|
107
|
-
urls[:extra] = urls[:extra].uniq
|
|
140
|
+
urls[:main] = pipfile_object["source"]&.first&.fetch("url", nil)
|
|
108
141
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
142
|
+
pipfile_object["source"]&.each do |source|
|
|
143
|
+
urls[:extra] << source.fetch("url") if source["url"]
|
|
144
|
+
end
|
|
145
|
+
urls[:extra] = urls[:extra].uniq
|
|
146
|
+
|
|
147
|
+
urls
|
|
148
|
+
rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
|
|
149
|
+
urls
|
|
150
|
+
end
|
|
112
151
|
end
|
|
113
152
|
|
|
153
|
+
# rubocop:disable Metrics/PerceivedComplexity
|
|
154
|
+
sig { returns(UrlsHash) }
|
|
114
155
|
def pyproject_index_urls
|
|
115
|
-
urls = { main: nil, extra: [] }
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
156
|
+
urls = T.let({ main: nil, extra: [] }, UrlsHash)
|
|
157
|
+
|
|
158
|
+
begin
|
|
159
|
+
return urls unless pyproject
|
|
160
|
+
|
|
161
|
+
pyproject_file = pyproject
|
|
162
|
+
return urls unless pyproject_file
|
|
163
|
+
|
|
164
|
+
pyproject_content = pyproject_file.content
|
|
165
|
+
return urls unless pyproject_content
|
|
166
|
+
|
|
167
|
+
sources =
|
|
168
|
+
TomlRB.parse(pyproject_content).dig("tool", "poetry", "source") ||
|
|
169
|
+
[]
|
|
170
|
+
|
|
171
|
+
sources.each do |source|
|
|
172
|
+
# If source is PyPI, skip it, and let it pick the default URI
|
|
173
|
+
next if source["name"].casecmp?("PyPI")
|
|
174
|
+
|
|
175
|
+
if @dependency.all_sources.include?(source["name"])
|
|
176
|
+
# If dependency has specified this source, use it
|
|
177
|
+
return { main: source["url"], extra: [] }
|
|
178
|
+
elsif source["default"]
|
|
179
|
+
urls[:main] = source["url"]
|
|
180
|
+
elsif source["priority"] != "explicit"
|
|
181
|
+
# if source is not explicit, add it to extra
|
|
182
|
+
urls[:extra] << source["url"]
|
|
183
|
+
end
|
|
135
184
|
end
|
|
136
|
-
|
|
137
|
-
urls[:extra] = urls[:extra].uniq
|
|
185
|
+
urls[:extra] = urls[:extra].uniq
|
|
138
186
|
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
187
|
+
urls
|
|
188
|
+
rescue TomlRB::ParseError, TomlRB::ValueOverwriteError
|
|
189
|
+
urls
|
|
190
|
+
end
|
|
142
191
|
end
|
|
192
|
+
# rubocop:enable Metrics/PerceivedComplexity
|
|
143
193
|
|
|
194
|
+
sig { returns(UrlsHash) }
|
|
144
195
|
def config_variable_index_urls
|
|
145
|
-
urls = { main:
|
|
196
|
+
urls = T.let({ main: nil, extra: [] }, UrlsHash)
|
|
146
197
|
|
|
147
198
|
index_url_creds = credentials
|
|
148
199
|
.select { |cred| cred["type"] == "python_index" }
|
|
@@ -159,6 +210,7 @@ module Dependabot
|
|
|
159
210
|
urls
|
|
160
211
|
end
|
|
161
212
|
|
|
213
|
+
sig { params(url: String).returns(String) }
|
|
162
214
|
def clean_check_and_remove_environment_variables(url)
|
|
163
215
|
url = url.strip.sub(%r{/+$}, "") + "/"
|
|
164
216
|
|
|
@@ -188,17 +240,15 @@ module Dependabot
|
|
|
188
240
|
raise PrivateSourceAuthenticationFailure, url
|
|
189
241
|
end
|
|
190
242
|
|
|
243
|
+
sig { params(base_url: String).returns(String) }
|
|
191
244
|
def authed_base_url(base_url)
|
|
192
245
|
cred = credential_for(base_url)
|
|
193
246
|
return base_url unless cred
|
|
194
247
|
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
return base_url unless builder
|
|
198
|
-
|
|
199
|
-
builder.gsub(%r{/*$}, "") + "/"
|
|
248
|
+
AuthedUrlBuilder.authed_url(credential: cred).gsub(%r{/*$}, "") + "/"
|
|
200
249
|
end
|
|
201
250
|
|
|
251
|
+
sig { params(url: String).returns(T.nilable(Dependabot::Credential)) }
|
|
202
252
|
def credential_for(url)
|
|
203
253
|
credentials
|
|
204
254
|
.select { |c| c["type"] == "python_index" }
|
|
@@ -208,22 +258,27 @@ module Dependabot
|
|
|
208
258
|
end
|
|
209
259
|
end
|
|
210
260
|
|
|
261
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
211
262
|
def pip_conf
|
|
212
263
|
dependency_files.find { |f| f.name == "pip.conf" }
|
|
213
264
|
end
|
|
214
265
|
|
|
266
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
215
267
|
def pipfile
|
|
216
268
|
dependency_files.find { |f| f.name == "Pipfile" }
|
|
217
269
|
end
|
|
218
270
|
|
|
271
|
+
sig { returns(T.nilable(Dependabot::DependencyFile)) }
|
|
219
272
|
def pyproject
|
|
220
273
|
dependency_files.find { |f| f.name == "pyproject.toml" }
|
|
221
274
|
end
|
|
222
275
|
|
|
276
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
223
277
|
def requirements_files
|
|
224
278
|
dependency_files.select { |f| f.name.match?(/requirements/x) }
|
|
225
279
|
end
|
|
226
280
|
|
|
281
|
+
sig { returns(T::Array[Dependabot::DependencyFile]) }
|
|
227
282
|
def pip_compile_files
|
|
228
283
|
dependency_files.select { |f| f.name.end_with?(".in") }
|
|
229
284
|
end
|