dependabot-python 0.79.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/helpers/build +17 -0
- data/helpers/lib/__init__.py +0 -0
- data/helpers/lib/hasher.py +23 -0
- data/helpers/lib/parser.py +130 -0
- data/helpers/requirements.txt +9 -0
- data/helpers/run.py +18 -0
- data/lib/dependabot/python.rb +11 -0
- data/lib/dependabot/python/file_fetcher.rb +307 -0
- data/lib/dependabot/python/file_parser.rb +221 -0
- data/lib/dependabot/python/file_parser/pipfile_files_parser.rb +150 -0
- data/lib/dependabot/python/file_parser/poetry_files_parser.rb +139 -0
- data/lib/dependabot/python/file_parser/setup_file_parser.rb +158 -0
- data/lib/dependabot/python/file_updater.rb +149 -0
- data/lib/dependabot/python/file_updater/pip_compile_file_updater.rb +361 -0
- data/lib/dependabot/python/file_updater/pipfile_file_updater.rb +391 -0
- data/lib/dependabot/python/file_updater/pipfile_preparer.rb +123 -0
- data/lib/dependabot/python/file_updater/poetry_file_updater.rb +282 -0
- data/lib/dependabot/python/file_updater/pyproject_preparer.rb +103 -0
- data/lib/dependabot/python/file_updater/requirement_file_updater.rb +160 -0
- data/lib/dependabot/python/file_updater/requirement_replacer.rb +93 -0
- data/lib/dependabot/python/file_updater/setup_file_sanitizer.rb +89 -0
- data/lib/dependabot/python/metadata_finder.rb +122 -0
- data/lib/dependabot/python/native_helpers.rb +17 -0
- data/lib/dependabot/python/python_versions.rb +25 -0
- data/lib/dependabot/python/requirement.rb +129 -0
- data/lib/dependabot/python/requirement_parser.rb +38 -0
- data/lib/dependabot/python/update_checker.rb +229 -0
- data/lib/dependabot/python/update_checker/latest_version_finder.rb +250 -0
- data/lib/dependabot/python/update_checker/pip_compile_version_resolver.rb +379 -0
- data/lib/dependabot/python/update_checker/pipfile_version_resolver.rb +558 -0
- data/lib/dependabot/python/update_checker/poetry_version_resolver.rb +298 -0
- data/lib/dependabot/python/update_checker/requirements_updater.rb +365 -0
- data/lib/dependabot/python/version.rb +87 -0
- metadata +203 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: '0996037472fe577fcff80293f9fe7c5a1a3579f5bfc74b74aafdbdb05ab2940d'
|
4
|
+
data.tar.gz: 7e90b75ec5540d68b384052354fbf216b59fc6ec8839ec82352f743af0112490
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 6c275bd0828455b6aeaa98fe936570d179201992229d0994de5f586843dccd73187278686736736023a8839ea9bf588754e8c4e087ec9341ede5a89f93440f37
|
7
|
+
data.tar.gz: 60e5e5b0f05fa66bf2d675de732bc16c4341f1f7188f33b3903323e59189be8ad4d6cbc0568529220f4ff32751916d62c4834e7869c1ea59e77df87decceface
|
data/helpers/build
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
#!/bin/bash
|
2
|
+
|
3
|
+
set -e
|
4
|
+
|
5
|
+
install_dir=$1
|
6
|
+
if [ -z "$install_dir" ]; then
|
7
|
+
echo "usage: $0 INSTALL_DIR"
|
8
|
+
exit 1
|
9
|
+
fi
|
10
|
+
|
11
|
+
if [ ! -d "$install_dir/bin" ]; then
|
12
|
+
mkdir -p "$install_dir/bin"
|
13
|
+
fi
|
14
|
+
|
15
|
+
helpers_dir="$(dirname "${BASH_SOURCE[0]}")"
|
16
|
+
PYENV_VERSION=2.7.15 pyenv exec pip install -r "${helpers_dir}/requirements.txt"
|
17
|
+
PYENV_VERSION=3.6.7 pyenv exec pip install -r "${helpers_dir}/requirements.txt"
|
File without changes
|
@@ -0,0 +1,23 @@
|
|
1
|
+
import hashin
|
2
|
+
import json
|
3
|
+
import pipfile
|
4
|
+
from poetry.poetry import Poetry
|
5
|
+
|
6
|
+
def get_dependency_hash(dependency_name, dependency_version, algorithm):
|
7
|
+
hashes = hashin.get_package_hashes(
|
8
|
+
dependency_name,
|
9
|
+
version=dependency_version,
|
10
|
+
algorithm=algorithm
|
11
|
+
)
|
12
|
+
|
13
|
+
return json.dumps({ "result": hashes["hashes"] })
|
14
|
+
|
15
|
+
def get_pipfile_hash(directory):
|
16
|
+
p = pipfile.load(directory + '/Pipfile')
|
17
|
+
|
18
|
+
return json.dumps({ "result": p.hash })
|
19
|
+
|
20
|
+
def get_pyproject_hash(directory):
|
21
|
+
p = Poetry.create(directory)
|
22
|
+
|
23
|
+
return json.dumps({ "result": p.locker._get_content_hash() })
|
@@ -0,0 +1,130 @@
|
|
1
|
+
from itertools import chain
|
2
|
+
import glob
|
3
|
+
import io
|
4
|
+
import json
|
5
|
+
import os.path
|
6
|
+
import re
|
7
|
+
|
8
|
+
import setuptools
|
9
|
+
import pip._internal.req.req_file
|
10
|
+
from pip._internal.download import PipSession
|
11
|
+
from pip._internal.req.constructors import install_req_from_line
|
12
|
+
|
13
|
+
def parse_requirements(directory):
|
14
|
+
# Parse the requirements.txt
|
15
|
+
requirement_packages = []
|
16
|
+
|
17
|
+
requirement_files = glob.glob(os.path.join(directory, '*.txt')) \
|
18
|
+
+ glob.glob(os.path.join(directory, '**', '*.txt'))
|
19
|
+
|
20
|
+
pip_compile_files = glob.glob(os.path.join(directory, '*.in')) \
|
21
|
+
+ glob.glob(os.path.join(directory, '**', '*.in'))
|
22
|
+
|
23
|
+
for reqs_file in requirement_files + pip_compile_files:
|
24
|
+
try:
|
25
|
+
requirements = pip._internal.req.req_file.parse_requirements(
|
26
|
+
reqs_file,
|
27
|
+
session=PipSession()
|
28
|
+
)
|
29
|
+
for install_req in requirements:
|
30
|
+
if install_req.original_link:
|
31
|
+
continue
|
32
|
+
if install_req.is_pinned:
|
33
|
+
version = next(iter(install_req.specifier)).version
|
34
|
+
else:
|
35
|
+
version = None
|
36
|
+
|
37
|
+
pattern = r"-[cr] (.*) \(line \d+\)"
|
38
|
+
abs_path = re.search(pattern, install_req.comes_from).group(1)
|
39
|
+
rel_path = os.path.relpath(abs_path, directory)
|
40
|
+
|
41
|
+
requirement_packages.append({
|
42
|
+
"name": install_req.req.name,
|
43
|
+
"version": version,
|
44
|
+
"markers": str(install_req.markers) or None,
|
45
|
+
"file": rel_path,
|
46
|
+
"requirement": str(install_req.specifier) or None
|
47
|
+
})
|
48
|
+
except Exception as e:
|
49
|
+
print(json.dumps({ "error": repr(e) }))
|
50
|
+
exit(1)
|
51
|
+
|
52
|
+
return json.dumps({ "result": requirement_packages })
|
53
|
+
|
54
|
+
def parse_setup(directory):
|
55
|
+
# Parse the setup.py
|
56
|
+
setup_packages = []
|
57
|
+
if os.path.isfile(directory + '/setup.py'):
|
58
|
+
def parse_requirement(req, req_type):
|
59
|
+
install_req = install_req_from_line(req)
|
60
|
+
if install_req.original_link:
|
61
|
+
return
|
62
|
+
if install_req.is_pinned:
|
63
|
+
version = next(iter(install_req.specifier)).version
|
64
|
+
else:
|
65
|
+
version = None
|
66
|
+
setup_packages.append({
|
67
|
+
"name": install_req.req.name,
|
68
|
+
"version": version,
|
69
|
+
"markers": str(install_req.markers) or None,
|
70
|
+
"file": "setup.py",
|
71
|
+
"requirement": str(install_req.specifier) or None,
|
72
|
+
"requirement_type": req_type
|
73
|
+
})
|
74
|
+
|
75
|
+
def setup(*args, **kwargs):
|
76
|
+
for arg in ['setup_requires', 'install_requires', 'tests_require']:
|
77
|
+
if not kwargs.get(arg):
|
78
|
+
continue
|
79
|
+
for req in kwargs.get(arg):
|
80
|
+
parse_requirement(req, arg)
|
81
|
+
extras_require_dict = kwargs.get('extras_require', {})
|
82
|
+
for key in extras_require_dict:
|
83
|
+
for req in extras_require_dict[key]:
|
84
|
+
parse_requirement(req, 'extras_require:{}'.format(key))
|
85
|
+
setuptools.setup = setup
|
86
|
+
|
87
|
+
def noop(*args, **kwargs):
|
88
|
+
pass
|
89
|
+
|
90
|
+
def fake_parse(*args, **kwargs):
|
91
|
+
return []
|
92
|
+
|
93
|
+
global fake_open
|
94
|
+
def fake_open(*args, **kwargs):
|
95
|
+
content = ("VERSION = ('0', '0', '1+dependabot')\n"
|
96
|
+
"__version__ = '0.0.1+dependabot'\n"
|
97
|
+
"__author__ = 'someone'\n"
|
98
|
+
"__title__ = 'something'\n"
|
99
|
+
"__description__ = 'something'\n"
|
100
|
+
"__author_email__ = 'something'\n"
|
101
|
+
"__license__ = 'something'\n"
|
102
|
+
"__url__ = 'something'\n")
|
103
|
+
return io.StringIO(content)
|
104
|
+
|
105
|
+
content = open(directory + '/setup.py', 'r').read()
|
106
|
+
|
107
|
+
# Remove `print`, `open`, `log` and import statements
|
108
|
+
content = re.sub(r"print\s*\(", "noop(", content)
|
109
|
+
content = re.sub(r"log\s*(\.\w+)*\(", "noop(", content)
|
110
|
+
content = re.sub(r"\b(\w+\.)*(open|file)\s*\(", "fake_open(", content)
|
111
|
+
content = content.replace("parse_requirements(", "fake_parse(")
|
112
|
+
version_re = re.compile(r"^.*import.*__version__.*$", re.MULTILINE)
|
113
|
+
content = re.sub(version_re, "", content)
|
114
|
+
|
115
|
+
# Set variables likely to be imported
|
116
|
+
__version__ = '0.0.1+dependabot'
|
117
|
+
__author__ = 'someone'
|
118
|
+
__title__ = 'something'
|
119
|
+
__description__ = 'something'
|
120
|
+
__author_email__ = 'something'
|
121
|
+
__license__ = 'something'
|
122
|
+
__url__ = 'something'
|
123
|
+
|
124
|
+
# Run as main (since setup.py is a script)
|
125
|
+
__name__ = '__main__'
|
126
|
+
|
127
|
+
# Exec the setup.py
|
128
|
+
exec(content) in globals(), locals()
|
129
|
+
|
130
|
+
return json.dumps({ "result": setup_packages })
|
data/helpers/run.py
ADDED
@@ -0,0 +1,18 @@
|
|
1
|
+
import sys
|
2
|
+
import json
|
3
|
+
|
4
|
+
from lib import parser, hasher
|
5
|
+
|
6
|
+
if __name__ == "__main__":
|
7
|
+
args = json.loads(sys.stdin.read())
|
8
|
+
|
9
|
+
if args["function"] == "parse_requirements":
|
10
|
+
print(parser.parse_requirements(args["args"][0]))
|
11
|
+
if args["function"] == "parse_setup":
|
12
|
+
print(parser.parse_setup(args["args"][0]))
|
13
|
+
elif args["function"] == "get_dependency_hash":
|
14
|
+
print(hasher.get_dependency_hash(*args["args"]))
|
15
|
+
elif args["function"] == "get_pipfile_hash":
|
16
|
+
print(hasher.get_pipfile_hash(*args["args"]))
|
17
|
+
elif args["function"] == "get_pyproject_hash":
|
18
|
+
print(hasher.get_pyproject_hash(*args["args"]))
|
@@ -0,0 +1,11 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# These all need to be required so the various classes can be registered in a
|
4
|
+
# lookup table of package manager names to concrete classes.
|
5
|
+
require "dependabot/python/file_fetcher"
|
6
|
+
require "dependabot/python/file_parser"
|
7
|
+
require "dependabot/python/update_checker"
|
8
|
+
require "dependabot/python/file_updater"
|
9
|
+
require "dependabot/python/metadata_finder"
|
10
|
+
require "dependabot/python/requirement"
|
11
|
+
require "dependabot/python/version"
|
@@ -0,0 +1,307 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "toml-rb"
|
4
|
+
|
5
|
+
require "dependabot/file_fetchers"
|
6
|
+
require "dependabot/file_fetchers/base"
|
7
|
+
require "dependabot/python/file_parser"
|
8
|
+
require "dependabot/errors"
|
9
|
+
|
10
|
+
module Dependabot
|
11
|
+
module Python
|
12
|
+
class FileFetcher < Dependabot::FileFetchers::Base
|
13
|
+
CHILD_REQUIREMENT_REGEX = /^-r\s?(?<path>.*\.txt)/.freeze
|
14
|
+
CONSTRAINT_REGEX = /^-c\s?(?<path>\..*)/.freeze
|
15
|
+
|
16
|
+
def self.required_files_in?(filenames)
|
17
|
+
return true if filenames.any? { |name| name.end_with?(".txt", ".in") }
|
18
|
+
|
19
|
+
# If there is a directory of requirements return true
|
20
|
+
return true if filenames.include?("requirements")
|
21
|
+
|
22
|
+
# If this repo is using a Pipfile return true
|
23
|
+
return true if filenames.include?("Pipfile")
|
24
|
+
|
25
|
+
# If this repo is using Poetry return true
|
26
|
+
return true if filenames.include?("pyproject.toml")
|
27
|
+
|
28
|
+
filenames.include?("setup.py")
|
29
|
+
end
|
30
|
+
|
31
|
+
def self.required_files_message
|
32
|
+
"Repo must contain a requirements.txt, setup.py, pyproject.toml, "\
|
33
|
+
"or a Pipfile."
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
def fetch_files
|
39
|
+
fetched_files = []
|
40
|
+
|
41
|
+
fetched_files += pipenv_files
|
42
|
+
fetched_files += pyproject_files
|
43
|
+
|
44
|
+
fetched_files += requirements_in_files
|
45
|
+
fetched_files += requirement_files if requirements_txt_files.any?
|
46
|
+
|
47
|
+
fetched_files << setup_file if setup_file
|
48
|
+
fetched_files << setup_cfg if setup_cfg
|
49
|
+
fetched_files += path_setup_files
|
50
|
+
fetched_files << pip_conf if pip_conf
|
51
|
+
fetched_files << python_version if python_version
|
52
|
+
|
53
|
+
check_required_files_present
|
54
|
+
fetched_files.uniq
|
55
|
+
end
|
56
|
+
|
57
|
+
def pipenv_files
|
58
|
+
[pipfile, pipfile_lock].compact
|
59
|
+
end
|
60
|
+
|
61
|
+
def pyproject_files
|
62
|
+
[pyproject, pyproject_lock, poetry_lock].compact
|
63
|
+
end
|
64
|
+
|
65
|
+
def requirement_files
|
66
|
+
[
|
67
|
+
*requirements_txt_files,
|
68
|
+
*child_requirement_files,
|
69
|
+
*constraints_files
|
70
|
+
]
|
71
|
+
end
|
72
|
+
|
73
|
+
def check_required_files_present
|
74
|
+
if requirements_txt_files.any? || setup_file || pipfile || pyproject
|
75
|
+
return
|
76
|
+
end
|
77
|
+
|
78
|
+
path = Pathname.new(File.join(directory, "requirements.txt")).
|
79
|
+
cleanpath.to_path
|
80
|
+
raise Dependabot::DependencyFileNotFound, path
|
81
|
+
end
|
82
|
+
|
83
|
+
def setup_file
|
84
|
+
@setup_file ||= fetch_file_if_present("setup.py")
|
85
|
+
end
|
86
|
+
|
87
|
+
def setup_cfg
|
88
|
+
@setup_cfg ||= fetch_file_if_present("setup.cfg")
|
89
|
+
end
|
90
|
+
|
91
|
+
def pip_conf
|
92
|
+
@pip_conf ||= fetch_file_if_present("pip.conf")&.
|
93
|
+
tap { |f| f.support_file = true }
|
94
|
+
end
|
95
|
+
|
96
|
+
def python_version
|
97
|
+
@python_version ||= fetch_file_if_present(".python-version")&.
|
98
|
+
tap { |f| f.support_file = true }
|
99
|
+
end
|
100
|
+
|
101
|
+
def pipfile
|
102
|
+
@pipfile ||= fetch_file_if_present("Pipfile")
|
103
|
+
end
|
104
|
+
|
105
|
+
def pipfile_lock
|
106
|
+
@pipfile_lock ||= fetch_file_if_present("Pipfile.lock")
|
107
|
+
end
|
108
|
+
|
109
|
+
def pyproject
|
110
|
+
@pyproject ||= fetch_file_if_present("pyproject.toml")
|
111
|
+
end
|
112
|
+
|
113
|
+
def pyproject_lock
|
114
|
+
@pyproject_lock ||= fetch_file_if_present("pyproject.lock")
|
115
|
+
end
|
116
|
+
|
117
|
+
def poetry_lock
|
118
|
+
@poetry_lock ||= fetch_file_if_present("poetry.lock")
|
119
|
+
end
|
120
|
+
|
121
|
+
def requirements_txt_files
|
122
|
+
req_txt_and_in_files.select { |f| f.name.end_with?(".txt") }
|
123
|
+
end
|
124
|
+
|
125
|
+
def requirements_in_files
|
126
|
+
req_txt_and_in_files.select { |f| f.name.end_with?(".in") }
|
127
|
+
end
|
128
|
+
|
129
|
+
def parsed_pipfile
|
130
|
+
raise "No Pipfile" unless pipfile
|
131
|
+
|
132
|
+
@parsed_pipfile ||= TomlRB.parse(pipfile.content)
|
133
|
+
rescue TomlRB::ParseError
|
134
|
+
raise Dependabot::DependencyFileNotParseable, pipfile.path
|
135
|
+
end
|
136
|
+
|
137
|
+
def req_txt_and_in_files
|
138
|
+
return @req_txt_and_in_files if @req_txt_and_in_files
|
139
|
+
|
140
|
+
@req_txt_and_in_files = []
|
141
|
+
|
142
|
+
repo_contents.
|
143
|
+
select { |f| f.type == "file" }.
|
144
|
+
select { |f| f.name.end_with?(".txt", ".in") }.
|
145
|
+
map { |f| fetch_file_from_host(f.name) }.
|
146
|
+
select { |f| requirements_file?(f) }.
|
147
|
+
each { |f| @req_txt_and_in_files << f }
|
148
|
+
|
149
|
+
repo_contents.
|
150
|
+
select { |f| f.type == "dir" }.
|
151
|
+
each { |f| @req_txt_and_in_files += req_files_for_dir(f) }
|
152
|
+
|
153
|
+
@req_txt_and_in_files
|
154
|
+
end
|
155
|
+
|
156
|
+
def req_files_for_dir(requirements_dir)
|
157
|
+
dir = directory.gsub(%r{(^/|/$)}, "")
|
158
|
+
relative_reqs_dir =
|
159
|
+
requirements_dir.path.gsub(%r{^/?#{Regexp.escape(dir)}/?}, "")
|
160
|
+
|
161
|
+
repo_contents(dir: relative_reqs_dir).
|
162
|
+
select { |f| f.type == "file" }.
|
163
|
+
select { |f| f.name.end_with?(".txt", ".in") }.
|
164
|
+
map { |f| fetch_file_from_host("#{relative_reqs_dir}/#{f.name}") }.
|
165
|
+
select { |f| requirements_file?(f) }
|
166
|
+
end
|
167
|
+
|
168
|
+
def child_requirement_files
|
169
|
+
@child_requirement_files ||=
|
170
|
+
begin
|
171
|
+
fetched_files = requirements_txt_files.dup
|
172
|
+
requirements_txt_files.flat_map do |requirement_file|
|
173
|
+
child_files = fetch_child_requirement_files(
|
174
|
+
file: requirement_file,
|
175
|
+
previously_fetched_files: fetched_files
|
176
|
+
)
|
177
|
+
|
178
|
+
fetched_files += child_files
|
179
|
+
child_files
|
180
|
+
end
|
181
|
+
end
|
182
|
+
end
|
183
|
+
|
184
|
+
def fetch_child_requirement_files(file:, previously_fetched_files:)
|
185
|
+
paths = file.content.scan(CHILD_REQUIREMENT_REGEX).flatten
|
186
|
+
current_dir = File.dirname(file.name)
|
187
|
+
|
188
|
+
paths.flat_map do |path|
|
189
|
+
path = File.join(current_dir, path) unless current_dir == "."
|
190
|
+
path = Pathname.new(path).cleanpath.to_path
|
191
|
+
|
192
|
+
next if previously_fetched_files.map(&:name).include?(path)
|
193
|
+
next if file.name == path
|
194
|
+
|
195
|
+
fetched_file = fetch_file_from_host(path)
|
196
|
+
grandchild_requirement_files = fetch_child_requirement_files(
|
197
|
+
file: fetched_file,
|
198
|
+
previously_fetched_files: previously_fetched_files + [file]
|
199
|
+
)
|
200
|
+
[fetched_file, *grandchild_requirement_files]
|
201
|
+
end.compact
|
202
|
+
end
|
203
|
+
|
204
|
+
def constraints_files
|
205
|
+
all_requirement_files = requirements_txt_files +
|
206
|
+
child_requirement_files
|
207
|
+
|
208
|
+
constraints_paths = all_requirement_files.map do |req_file|
|
209
|
+
req_file.content.scan(CONSTRAINT_REGEX).flatten
|
210
|
+
end.flatten.uniq
|
211
|
+
|
212
|
+
constraints_paths.map { |path| fetch_file_from_host(path) }
|
213
|
+
end
|
214
|
+
|
215
|
+
def path_setup_files
|
216
|
+
path_setup_files = []
|
217
|
+
unfetchable_files = []
|
218
|
+
|
219
|
+
path_setup_file_paths.each do |path|
|
220
|
+
path = Pathname.new(File.join(path, "setup.py")).cleanpath.to_path
|
221
|
+
next if path == "setup.py" && setup_file
|
222
|
+
|
223
|
+
begin
|
224
|
+
path_setup_files << fetch_file_from_host(path).
|
225
|
+
tap { |f| f.support_file = true }
|
226
|
+
rescue Dependabot::DependencyFileNotFound
|
227
|
+
unfetchable_files << path
|
228
|
+
end
|
229
|
+
|
230
|
+
begin
|
231
|
+
cfg_path = path.gsub(/\.py$/, ".cfg")
|
232
|
+
path_setup_files << fetch_file_from_host(cfg_path).
|
233
|
+
tap { |f| f.support_file = true }
|
234
|
+
rescue Dependabot::DependencyFileNotFound
|
235
|
+
# Ignore lack of a setup.cfg
|
236
|
+
nil
|
237
|
+
end
|
238
|
+
end
|
239
|
+
|
240
|
+
if unfetchable_files.any?
|
241
|
+
raise Dependabot::PathDependenciesNotReachable, unfetchable_files
|
242
|
+
end
|
243
|
+
|
244
|
+
path_setup_files
|
245
|
+
end
|
246
|
+
|
247
|
+
def requirements_file?(file)
|
248
|
+
return true if file.name.match?(/requirements/x)
|
249
|
+
|
250
|
+
content = file.content.
|
251
|
+
gsub(CONSTRAINT_REGEX, "").
|
252
|
+
gsub(CHILD_REQUIREMENT_REGEX, "")
|
253
|
+
|
254
|
+
tmp_file = DependencyFile.new(name: file.name, content: content)
|
255
|
+
Dependabot::Python::FileParser.
|
256
|
+
new(dependency_files: [tmp_file], source: source).
|
257
|
+
parse.any?
|
258
|
+
rescue Dependabot::DependencyFileNotEvaluatable
|
259
|
+
false
|
260
|
+
end
|
261
|
+
|
262
|
+
def path_setup_file_paths
|
263
|
+
requirement_txt_path_setup_file_paths + pipfile_path_setup_file_paths
|
264
|
+
end
|
265
|
+
|
266
|
+
def requirement_txt_path_setup_file_paths
|
267
|
+
(requirements_txt_files + child_requirement_files).map do |req_file|
|
268
|
+
uneditable_reqs =
|
269
|
+
req_file.content.
|
270
|
+
scan(/^['"]?(?<path>\..*?)(?=\[|#|'|"|$)/).
|
271
|
+
flatten.
|
272
|
+
map(&:strip).
|
273
|
+
reject { |p| p.include?("://") }
|
274
|
+
|
275
|
+
editable_reqs =
|
276
|
+
req_file.content.
|
277
|
+
scan(/^(?:-e)\s+['"]?(?<path>.*?)(?=\[|#|'|"|$)/).
|
278
|
+
flatten.
|
279
|
+
map(&:strip).
|
280
|
+
reject { |p| p.include?("://") }
|
281
|
+
|
282
|
+
uneditable_reqs + editable_reqs
|
283
|
+
end.flatten.uniq
|
284
|
+
end
|
285
|
+
|
286
|
+
def pipfile_path_setup_file_paths
|
287
|
+
return [] unless pipfile
|
288
|
+
|
289
|
+
paths = []
|
290
|
+
%w(packages dev-packages).each do |dep_type|
|
291
|
+
next unless parsed_pipfile[dep_type]
|
292
|
+
|
293
|
+
parsed_pipfile[dep_type].each do |_, req|
|
294
|
+
next unless req.is_a?(Hash) && req["path"]
|
295
|
+
|
296
|
+
paths << req["path"]
|
297
|
+
end
|
298
|
+
end
|
299
|
+
|
300
|
+
paths
|
301
|
+
end
|
302
|
+
end
|
303
|
+
end
|
304
|
+
end
|
305
|
+
|
306
|
+
Dependabot::FileFetchers.
|
307
|
+
register("pip", Dependabot::Python::FileFetcher)
|