dependabot-uv 0.299.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. checksums.yaml +7 -0
  2. data/helpers/build +34 -0
  3. data/helpers/lib/__init__.py +0 -0
  4. data/helpers/lib/hasher.py +36 -0
  5. data/helpers/lib/parser.py +270 -0
  6. data/helpers/requirements.txt +13 -0
  7. data/helpers/run.py +22 -0
  8. data/lib/dependabot/uv/authed_url_builder.rb +31 -0
  9. data/lib/dependabot/uv/file_fetcher.rb +328 -0
  10. data/lib/dependabot/uv/file_parser/pipfile_files_parser.rb +192 -0
  11. data/lib/dependabot/uv/file_parser/pyproject_files_parser.rb +345 -0
  12. data/lib/dependabot/uv/file_parser/python_requirement_parser.rb +185 -0
  13. data/lib/dependabot/uv/file_parser/setup_file_parser.rb +193 -0
  14. data/lib/dependabot/uv/file_parser.rb +437 -0
  15. data/lib/dependabot/uv/file_updater/compile_file_updater.rb +576 -0
  16. data/lib/dependabot/uv/file_updater/pyproject_preparer.rb +124 -0
  17. data/lib/dependabot/uv/file_updater/requirement_file_updater.rb +73 -0
  18. data/lib/dependabot/uv/file_updater/requirement_replacer.rb +214 -0
  19. data/lib/dependabot/uv/file_updater.rb +105 -0
  20. data/lib/dependabot/uv/language.rb +76 -0
  21. data/lib/dependabot/uv/language_version_manager.rb +114 -0
  22. data/lib/dependabot/uv/metadata_finder.rb +186 -0
  23. data/lib/dependabot/uv/name_normaliser.rb +26 -0
  24. data/lib/dependabot/uv/native_helpers.rb +38 -0
  25. data/lib/dependabot/uv/package_manager.rb +54 -0
  26. data/lib/dependabot/uv/pip_compile_file_matcher.rb +38 -0
  27. data/lib/dependabot/uv/pipenv_runner.rb +108 -0
  28. data/lib/dependabot/uv/requirement.rb +163 -0
  29. data/lib/dependabot/uv/requirement_parser.rb +60 -0
  30. data/lib/dependabot/uv/update_checker/index_finder.rb +227 -0
  31. data/lib/dependabot/uv/update_checker/latest_version_finder.rb +297 -0
  32. data/lib/dependabot/uv/update_checker/pip_compile_version_resolver.rb +506 -0
  33. data/lib/dependabot/uv/update_checker/pip_version_resolver.rb +73 -0
  34. data/lib/dependabot/uv/update_checker/requirements_updater.rb +391 -0
  35. data/lib/dependabot/uv/update_checker.rb +317 -0
  36. data/lib/dependabot/uv/version.rb +321 -0
  37. data/lib/dependabot/uv.rb +35 -0
  38. metadata +306 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 37479ebd370ef17d7a56b2e53b0f9e94d93608fcbafdfb98a588d23f3f61c8f7
4
+ data.tar.gz: d3ff78147e58cf5fe353773c9bdf6c7f920c6ae73140de722827ce3f335ba30d
5
+ SHA512:
6
+ metadata.gz: 413470f2e052517d9a689131997e6425bf253e13cf558a1a217b4f7322ba4cfc28e9d78436a8249e3373ef9d7531fb8b6f5480b19401fcfc61be134c334986fd
7
+ data.tar.gz: a8edce98cdf17c759619019ab9406c4120c67129d884da7cc7111b2d247ba426ee865e53523b75c3ecd9c5fd93eaf9944071732f37ff59111e1955ead835d556
data/helpers/build ADDED
@@ -0,0 +1,34 @@
1
+ #!/usr/bin/env bash
2
+
3
+ set -e
4
+
5
+ if [ -z "$DEPENDABOT_NATIVE_HELPERS_PATH" ]; then
6
+ echo "Unable to build, DEPENDABOT_NATIVE_HELPERS_PATH is not set"
7
+ exit 1
8
+ fi
9
+
10
+ install_dir="$DEPENDABOT_NATIVE_HELPERS_PATH/python"
11
+ mkdir -p "$install_dir"
12
+
13
+ helpers_dir="$(dirname "${BASH_SOURCE[0]}")"
14
+ cp -r \
15
+ "$helpers_dir/lib" \
16
+ "$helpers_dir/run.py" \
17
+ "$helpers_dir/requirements.txt" \
18
+ "$install_dir"
19
+
20
+ cd "$install_dir"
21
+ PYENV_VERSION=$1 pyenv exec pip3 --disable-pip-version-check install --use-pep517 -r "requirements.txt"
22
+
23
+ # Remove the extra objects added during the previous install. Based on
24
+ # https://github.com/docker-library/python/blob/master/Dockerfile-linux.template
25
+ # And the image docker.io/library/python
26
+ find "${PYENV_ROOT:-/usr/local/.pyenv}/versions" -depth \
27
+ \( \
28
+ \( -type d -a \( -name test -o -name tests -o -name idle_test \) \) \
29
+ -o \( -type f -a \( -name '*.pyc' -o -name '*.pyo' -o -name 'libpython*.a' \) \) \
30
+ \) -exec rm -rf '{}' +
31
+
32
+ find -L "${PYENV_ROOT:-/usr/local/.pyenv}/versions" -type f \
33
+ -name '*.so' \
34
+ -exec strip --preserve-dates {} +
File without changes
@@ -0,0 +1,36 @@
1
+ import hashin
2
+ import json
3
+ import plette
4
+ import traceback
5
+ from poetry.factory import Factory
6
+
7
+
8
+ def get_dependency_hash(dependency_name, dependency_version, algorithm,
9
+ index_url=hashin.DEFAULT_INDEX_URL):
10
+ try:
11
+ hashes = hashin.get_package_hashes(
12
+ dependency_name,
13
+ version=dependency_version,
14
+ algorithm=algorithm,
15
+ index_url=index_url
16
+ )
17
+ return json.dumps({"result": hashes["hashes"]})
18
+ except hashin.PackageNotFoundError as e:
19
+ return json.dumps({
20
+ "error": repr(e),
21
+ "error_class:": e.__class__.__name__,
22
+ "trace:": ''.join(traceback.format_stack())
23
+ })
24
+
25
+
26
+ def get_pipfile_hash(directory):
27
+ with open(directory + '/Pipfile') as f:
28
+ pipfile = plette.Pipfile.load(f)
29
+
30
+ return json.dumps({"result": pipfile.get_hash().value})
31
+
32
+
33
+ def get_pyproject_hash(directory):
34
+ p = Factory().create_poetry(directory)
35
+
36
+ return json.dumps({"result": p.locker._get_content_hash()})
@@ -0,0 +1,270 @@
1
+ import glob
2
+ import io
3
+ import json
4
+ import os.path
5
+ import re
6
+
7
+ import configparser
8
+ import setuptools
9
+ import pip._internal.req.req_file
10
+ from pip._internal.network.session import PipSession
11
+ from pip._internal.req.constructors import (
12
+ install_req_from_line,
13
+ install_req_from_parsed_requirement,
14
+ )
15
+
16
+ from packaging.requirements import InvalidRequirement, Requirement
17
+ # TODO: Replace 3p package `tomli` with 3.11's new stdlib `tomllib` once we
18
+ # drop support for Python 3.10.
19
+ import tomli
20
+
21
+ # Inspired by pips internal check:
22
+ # https://github.com/pypa/pip/blob/0bb3ac87f5bb149bd75cceac000844128b574385/src/pip/_internal/req/req_file.py#L35
23
+ COMMENT_RE = re.compile(r'(^|\s+)#.*$')
24
+
25
+
26
+ def parse_pep621_dependencies(pyproject_path):
27
+ with open(pyproject_path, "rb") as file:
28
+ project_toml = tomli.load(file)
29
+
30
+ def parse_toml_section_pep621_dependencies(pyproject_path, dependencies):
31
+ requirement_packages = []
32
+
33
+ def version_from_req(specifier_set):
34
+ if (len(specifier_set) == 1 and
35
+ next(iter(specifier_set)).operator in {"==", "==="}):
36
+ return next(iter(specifier_set)).version
37
+
38
+ for dependency in dependencies:
39
+ try:
40
+ req = Requirement(dependency)
41
+ except InvalidRequirement as e:
42
+ print(json.dumps({"error": repr(e)}))
43
+ exit(1)
44
+ else:
45
+ requirement_packages.append({
46
+ "name": req.name,
47
+ "version": version_from_req(req.specifier),
48
+ "markers": str(req.marker) or None,
49
+ "file": pyproject_path,
50
+ "requirement": str(req.specifier),
51
+ "extras": sorted(list(req.extras))
52
+ })
53
+
54
+ return requirement_packages
55
+
56
+ dependencies = []
57
+
58
+ if 'project' in project_toml:
59
+ project_section = project_toml['project']
60
+
61
+ if 'dependencies' in project_section:
62
+ dependencies_toml = project_section['dependencies']
63
+ runtime_dependencies = parse_toml_section_pep621_dependencies(
64
+ pyproject_path,
65
+ dependencies_toml
66
+ )
67
+ dependencies.extend(runtime_dependencies)
68
+
69
+ if 'optional-dependencies' in project_section:
70
+ optional_dependencies_toml = project_section[
71
+ 'optional-dependencies'
72
+ ]
73
+ for group in optional_dependencies_toml:
74
+ group_dependencies = parse_toml_section_pep621_dependencies(
75
+ pyproject_path,
76
+ optional_dependencies_toml[group]
77
+ )
78
+ dependencies.extend(group_dependencies)
79
+
80
+ if 'build-system' in project_toml:
81
+ build_system_section = project_toml['build-system']
82
+ if 'requires' in build_system_section:
83
+ build_system_dependencies = parse_toml_section_pep621_dependencies(
84
+ pyproject_path,
85
+ build_system_section['requires']
86
+ )
87
+ dependencies.extend(build_system_dependencies)
88
+
89
+ return json.dumps({"result": dependencies})
90
+
91
+
92
+ def parse_requirements(directory):
93
+ # Parse the requirements.txt
94
+ requirement_packages = []
95
+ requirement_files = glob.glob(os.path.join(directory, '*.txt')) \
96
+ + glob.glob(os.path.join(directory, '**', '*.txt'))
97
+
98
+ pip_compile_files = glob.glob(os.path.join(directory, '*.in')) \
99
+ + glob.glob(os.path.join(directory, '**', '*.in'))
100
+
101
+ def version_from_install_req(install_req):
102
+ if install_req.is_pinned:
103
+ return next(iter(install_req.specifier)).version
104
+
105
+ for reqs_file in requirement_files + pip_compile_files:
106
+ try:
107
+ requirements = pip._internal.req.req_file.parse_requirements(
108
+ reqs_file,
109
+ session=PipSession()
110
+ )
111
+ for parsed_req in requirements:
112
+ install_req = install_req_from_parsed_requirement(parsed_req)
113
+ if install_req.req is None:
114
+ continue
115
+
116
+ # Ignore file: requirements
117
+ if install_req.link is not None and install_req.link.is_file:
118
+ continue
119
+
120
+ pattern = r"-[cr] (.*) \(line \d+\)"
121
+ abs_path = re.search(pattern, install_req.comes_from).group(1)
122
+
123
+ # Ignore dependencies from remote constraint files
124
+ if not os.path.isfile(abs_path):
125
+ continue
126
+
127
+ rel_path = os.path.relpath(abs_path, directory)
128
+
129
+ requirement_packages.append({
130
+ "name": install_req.req.name,
131
+ "version": version_from_install_req(install_req),
132
+ "markers": str(install_req.markers) or None,
133
+ "file": rel_path,
134
+ "requirement": str(install_req.specifier) or None,
135
+ "extras": sorted(list(install_req.extras))
136
+ })
137
+ except Exception as e:
138
+ print(json.dumps({"error": repr(e)}))
139
+ exit(1)
140
+
141
+ return json.dumps({"result": requirement_packages})
142
+
143
+
144
+ def parse_setup(directory):
145
+ def version_from_install_req(install_req):
146
+ if install_req.is_pinned:
147
+ return next(iter(install_req.specifier)).version
148
+
149
+ def parse_requirement(req, req_type, filename):
150
+ install_req = install_req_from_line(req)
151
+ if install_req.original_link:
152
+ return
153
+
154
+ setup_packages.append(
155
+ {
156
+ "name": install_req.req.name,
157
+ "version": version_from_install_req(install_req),
158
+ "markers": str(install_req.markers) or None,
159
+ "file": filename,
160
+ "requirement": str(install_req.specifier) or None,
161
+ "requirement_type": req_type,
162
+ "extras": sorted(list(install_req.extras)),
163
+ }
164
+ )
165
+
166
+ def parse_requirements(requires, req_type, filename):
167
+ for req in requires:
168
+ req = COMMENT_RE.sub('', req)
169
+ req = req.strip()
170
+ parse_requirement(req, req_type, filename)
171
+
172
+ # Parse the setup.py and setup.cfg
173
+ setup_py = "setup.py"
174
+ setup_py_path = os.path.join(directory, setup_py)
175
+ setup_cfg = "setup.cfg"
176
+ setup_cfg_path = os.path.join(directory, setup_cfg)
177
+ setup_packages = []
178
+
179
+ if os.path.isfile(setup_py_path):
180
+
181
+ def setup(*args, **kwargs):
182
+ for arg in ["setup_requires", "install_requires", "tests_require"]:
183
+ requires = kwargs.get(arg, [])
184
+ parse_requirements(requires, arg, setup_py)
185
+ extras_require_dict = kwargs.get("extras_require", {})
186
+ for key, value in extras_require_dict.items():
187
+ parse_requirements(
188
+ value, "extras_require:{}".format(key), setup_py
189
+ )
190
+
191
+ setuptools.setup = setup
192
+
193
+ def noop(*args, **kwargs):
194
+ pass
195
+
196
+ def fake_parse(*args, **kwargs):
197
+ return []
198
+
199
+ global fake_open
200
+
201
+ def fake_open(*args, **kwargs):
202
+ content = (
203
+ "VERSION = ('0', '0', '1+dependabot')\n"
204
+ "__version__ = '0.0.1+dependabot'\n"
205
+ "__author__ = 'someone'\n"
206
+ "__title__ = 'something'\n"
207
+ "__description__ = 'something'\n"
208
+ "__author_email__ = 'something'\n"
209
+ "__license__ = 'something'\n"
210
+ "__url__ = 'something'\n"
211
+ )
212
+ return io.StringIO(content)
213
+
214
+ content = open(setup_py_path, "r").read()
215
+
216
+ # Remove `print`, `open`, `log` and import statements
217
+ content = re.sub(r"print\s*\(", "noop(", content)
218
+ content = re.sub(r"log\s*(\.\w+)*\(", "noop(", content)
219
+ content = re.sub(r"\b(\w+\.)*(open|file)\s*\(", "fake_open(", content)
220
+ content = content.replace("parse_requirements(", "fake_parse(")
221
+ version_re = re.compile(r"^.*import.*__version__.*$", re.MULTILINE)
222
+ content = re.sub(version_re, "", content)
223
+
224
+ # Set variables likely to be imported
225
+ __version__ = "0.0.1+dependabot"
226
+ __author__ = "someone"
227
+ __title__ = "something"
228
+ __description__ = "something"
229
+ __author_email__ = "something"
230
+ __license__ = "something"
231
+ __url__ = "something"
232
+
233
+ # Run as main (since setup.py is a script)
234
+ __name__ = "__main__"
235
+
236
+ # Exec the setup.py
237
+ exec(content) in globals(), locals()
238
+
239
+ if os.path.isfile(setup_cfg_path):
240
+ try:
241
+ config = configparser.ConfigParser()
242
+ config.read(setup_cfg_path)
243
+
244
+ for req_type in [
245
+ "setup_requires",
246
+ "install_requires",
247
+ "tests_require",
248
+ ]:
249
+ requires = config.get(
250
+ 'options',
251
+ req_type, fallback='').splitlines()
252
+ requires = [req for req in requires if req.strip()]
253
+ parse_requirements(requires, req_type, setup_cfg)
254
+
255
+ if config.has_section('options.extras_require'):
256
+ extras_require = config._sections['options.extras_require']
257
+ for key, value in extras_require.items():
258
+ requires = value.splitlines()
259
+ requires = [req for req in requires if req.strip()]
260
+ parse_requirements(
261
+ requires,
262
+ f"extras_require:{key}",
263
+ setup_cfg
264
+ )
265
+
266
+ except Exception as e:
267
+ print(json.dumps({"error": repr(e)}))
268
+ exit(1)
269
+
270
+ return json.dumps({"result": setup_packages})
@@ -0,0 +1,13 @@
1
+ pip==24.0
2
+ pip-tools==7.4.1
3
+ flake8==7.1.0
4
+ hashin==1.0.3
5
+ pipenv==2024.0.2
6
+ plette==2.1.0
7
+ poetry==1.8.5
8
+ # TODO: Replace 3p package `tomli` with 3.11's new stdlib `tomllib` once we drop support for Python 3.10.
9
+ tomli==2.0.1
10
+ uv==0.6.2
11
+
12
+ # Some dependencies will only install if Cython is present
13
+ Cython==3.0.10
data/helpers/run.py ADDED
@@ -0,0 +1,22 @@
1
+ import sys
2
+ import json
3
+
4
+ from lib import parser, hasher
5
+
6
+ if __name__ == "__main__":
7
+ args = json.loads(sys.stdin.read())
8
+
9
+ # TODO Python 3.10 added native switch statements, so switch this if/elif
10
+ # to that once we drop support for 3.9.
11
+ if args["function"] == "parse_requirements":
12
+ print(parser.parse_requirements(args["args"][0]))
13
+ elif args["function"] == "parse_setup":
14
+ print(parser.parse_setup(args["args"][0]))
15
+ elif args["function"] == "parse_pep621_dependencies":
16
+ print(parser.parse_pep621_dependencies(args["args"][0]))
17
+ elif args["function"] == "get_dependency_hash":
18
+ print(hasher.get_dependency_hash(*args["args"]))
19
+ elif args["function"] == "get_pipfile_hash":
20
+ print(hasher.get_pipfile_hash(*args["args"]))
21
+ elif args["function"] == "get_pyproject_hash":
22
+ print(hasher.get_pyproject_hash(*args["args"]))
@@ -0,0 +1,31 @@
1
+ # typed: true
2
+ # frozen_string_literal: true
3
+
4
+ module Dependabot
5
+ module Uv
6
+ class AuthedUrlBuilder
7
+ def self.authed_url(credential:)
8
+ token = credential.fetch("token", nil)
9
+ url = credential.fetch("index-url", nil)
10
+ return "" unless url
11
+ return url unless token
12
+
13
+ basic_auth_details =
14
+ if token.ascii_only? && token.include?(":") then token
15
+ elsif Base64.decode64(token).ascii_only? &&
16
+ Base64.decode64(token).include?(":")
17
+ Base64.decode64(token)
18
+ else
19
+ token
20
+ end
21
+
22
+ if basic_auth_details.include?(":")
23
+ username, _, password = basic_auth_details.partition(":")
24
+ basic_auth_details = "#{CGI.escape(username)}:#{CGI.escape(password)}"
25
+ end
26
+
27
+ url.sub("://", "://#{basic_auth_details}@")
28
+ end
29
+ end
30
+ end
31
+ end