owasp-depscan 5.4.8__py3-none-any.whl → 6.0.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of owasp-depscan might be problematic. Click here for more details.
- depscan/__init__.py +8 -0
- depscan/cli.py +719 -827
- depscan/cli_options.py +302 -0
- depscan/lib/audit.py +3 -1
- depscan/lib/bom.py +390 -288
- depscan/lib/config.py +86 -337
- depscan/lib/explainer.py +363 -98
- depscan/lib/license.py +11 -10
- depscan/lib/logger.py +65 -17
- depscan/lib/package_query/__init__.py +0 -0
- depscan/lib/package_query/cargo_pkg.py +124 -0
- depscan/lib/package_query/metadata.py +170 -0
- depscan/lib/package_query/npm_pkg.py +345 -0
- depscan/lib/package_query/pkg_query.py +195 -0
- depscan/lib/package_query/pypi_pkg.py +113 -0
- depscan/lib/tomlparse.py +116 -0
- depscan/lib/utils.py +34 -188
- owasp_depscan-6.0.0a2.dist-info/METADATA +390 -0
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info}/RECORD +28 -25
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info}/WHEEL +1 -1
- vendor/choosealicense.com/_licenses/cern-ohl-p-2.0.txt +1 -1
- vendor/choosealicense.com/_licenses/cern-ohl-s-2.0.txt +1 -1
- vendor/choosealicense.com/_licenses/cern-ohl-w-2.0.txt +2 -2
- vendor/choosealicense.com/_licenses/mit-0.txt +1 -1
- vendor/spdx/json/licenses.json +904 -677
- depscan/lib/analysis.py +0 -1550
- depscan/lib/csaf.py +0 -1860
- depscan/lib/normalize.py +0 -312
- depscan/lib/orasclient.py +0 -142
- depscan/lib/pkg_query.py +0 -532
- owasp_depscan-5.4.8.dist-info/METADATA +0 -580
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info}/entry_points.txt +0 -0
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info/licenses}/LICENSE +0 -0
- {owasp_depscan-5.4.8.dist-info → owasp_depscan-6.0.0a2.dist-info}/top_level.txt +0 -0
depscan/lib/logger.py
CHANGED
|
@@ -1,7 +1,8 @@
|
|
|
1
|
-
# This file is part of Scan.
|
|
2
|
-
|
|
3
1
|
import logging
|
|
4
2
|
import os
|
|
3
|
+
import random
|
|
4
|
+
import re
|
|
5
|
+
import sys
|
|
5
6
|
|
|
6
7
|
from rich.console import Console
|
|
7
8
|
from rich.highlighter import RegexHighlighter
|
|
@@ -10,7 +11,7 @@ from rich.theme import Theme
|
|
|
10
11
|
|
|
11
12
|
|
|
12
13
|
class CustomHighlighter(RegexHighlighter):
|
|
13
|
-
base_style = "
|
|
14
|
+
base_style = "depscan."
|
|
14
15
|
highlights = [
|
|
15
16
|
r"(?P<method>([\w-]+\.)+[\w-]+[^<>:(),]?)",
|
|
16
17
|
r"(?P<path>(\w+\/.*\.[\w:]+))",
|
|
@@ -21,25 +22,29 @@ class CustomHighlighter(RegexHighlighter):
|
|
|
21
22
|
|
|
22
23
|
custom_theme = Theme(
|
|
23
24
|
{
|
|
24
|
-
"
|
|
25
|
-
"
|
|
26
|
-
"
|
|
27
|
-
"
|
|
25
|
+
"depscan.path": "#7c8082",
|
|
26
|
+
"depscan.params": "#5a7c90",
|
|
27
|
+
"depscan.opers": "#7c8082",
|
|
28
|
+
"depscan.method": "#FF753D",
|
|
28
29
|
"info": "#5A7C90",
|
|
29
30
|
"warning": "#FF753D",
|
|
30
31
|
"danger": "bold red",
|
|
31
32
|
}
|
|
32
33
|
)
|
|
33
34
|
|
|
35
|
+
IS_CI = os.getenv("CI") or os.getenv("CONTINUOUS_INTEGRATION")
|
|
36
|
+
|
|
34
37
|
console = Console(
|
|
35
38
|
log_time=False,
|
|
36
39
|
log_path=False,
|
|
37
40
|
theme=custom_theme,
|
|
38
|
-
color_system="256",
|
|
39
|
-
|
|
40
|
-
highlight=
|
|
41
|
+
color_system=os.getenv("CONSOLE_COLOR_SCHEME", "256"),
|
|
42
|
+
width=140 if IS_CI else None,
|
|
43
|
+
highlight=not IS_CI,
|
|
44
|
+
tab_size=2,
|
|
41
45
|
highlighter=CustomHighlighter(),
|
|
42
|
-
record=
|
|
46
|
+
record=sys.platform == "win32",
|
|
47
|
+
emoji=os.getenv("DISABLE_CONSOLE_EMOJI", "") not in ("true", "1"),
|
|
43
48
|
)
|
|
44
49
|
|
|
45
50
|
logging.basicConfig(
|
|
@@ -58,14 +63,57 @@ logging.basicConfig(
|
|
|
58
63
|
LOG = logging.getLogger(__name__)
|
|
59
64
|
|
|
60
65
|
# Set logging level
|
|
61
|
-
if (
|
|
62
|
-
os.getenv("SCAN_DEBUG_MODE") == "debug"
|
|
63
|
-
or os.getenv("AT_DEBUG_MODE") == "debug"
|
|
64
|
-
):
|
|
66
|
+
if os.getenv("SCAN_DEBUG_MODE") == "debug":
|
|
65
67
|
LOG.setLevel(logging.DEBUG)
|
|
66
68
|
|
|
67
69
|
DEBUG = logging.DEBUG
|
|
68
|
-
|
|
69
70
|
for log_name, log_obj in logging.Logger.manager.loggerDict.items():
|
|
70
|
-
if log_name
|
|
71
|
+
if not log_name.startswith("depscan"):
|
|
71
72
|
log_obj.disabled = True
|
|
73
|
+
|
|
74
|
+
SPINNER = os.getenv(
|
|
75
|
+
"DEPSCAN_SPINNER",
|
|
76
|
+
random.choice(
|
|
77
|
+
[
|
|
78
|
+
"pong",
|
|
79
|
+
"arrow3",
|
|
80
|
+
"bouncingBall",
|
|
81
|
+
"dots2",
|
|
82
|
+
"material",
|
|
83
|
+
"shark",
|
|
84
|
+
"simpleDotsScrolling",
|
|
85
|
+
"toggle9",
|
|
86
|
+
]
|
|
87
|
+
),
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
# Support for thought logging
|
|
91
|
+
tlogger = None
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def thought_log(s):
|
|
95
|
+
if s and tlogger and tlogger.isEnabledFor(DEBUG):
|
|
96
|
+
s = re.sub(r"([.!?])?$", ".", s)
|
|
97
|
+
tlogger.debug(s)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def thought_begin():
|
|
101
|
+
thought_log("<think>")
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def thought_end():
|
|
105
|
+
thought_log("</think>")
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
if os.getenv("DEPSCAN_THINK_MODE", "") in ("true", "1"):
|
|
109
|
+
tlogger = logging.getLogger("depscan_thoughts")
|
|
110
|
+
tlogger.setLevel(DEBUG)
|
|
111
|
+
file_handler = logging.FileHandler(
|
|
112
|
+
os.getenv(
|
|
113
|
+
"DEPSCAN_THOUGHT_LOG", os.path.join(os.getcwd(), "depscan-thoughts.log")
|
|
114
|
+
)
|
|
115
|
+
)
|
|
116
|
+
file_handler.setLevel(DEBUG)
|
|
117
|
+
formatter = logging.Formatter("%(message)s")
|
|
118
|
+
file_handler.setFormatter(formatter)
|
|
119
|
+
tlogger.addHandler(file_handler)
|
|
File without changes
|
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
from datetime import datetime, timezone
|
|
2
|
+
|
|
3
|
+
from depscan.lib import config
|
|
4
|
+
from depscan.lib.package_query.pkg_query import compute_time_risks, calculate_risk_score
|
|
5
|
+
from semver import Version
|
|
6
|
+
|
|
7
|
+
def set_binary_risks(risk_metrics, current_version, latest_version):
|
|
8
|
+
"""
|
|
9
|
+
If current version has bin_names. then we should set "pkg_includes_binary_risk" as True.
|
|
10
|
+
and add the number of bin_names to the "pkg_includes_binary_value" key.
|
|
11
|
+
"""
|
|
12
|
+
version = current_version if current_version else latest_version
|
|
13
|
+
bin_names = version.get('bin_names', [])
|
|
14
|
+
risk_metrics["pkg_includes_binary_risk"] = True if len(bin_names)>0 else False
|
|
15
|
+
risk_metrics["pkg_includes_binary_value"] = len(bin_names)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def cargo_pkg_risk(pkg_metadata, is_private_pkg, scope, pkg):
|
|
19
|
+
"""
|
|
20
|
+
Calculate various package risks based on the metadata from cargo.
|
|
21
|
+
"""
|
|
22
|
+
risk_metrics = {
|
|
23
|
+
"pkg_deprecated_risk": False,
|
|
24
|
+
"pkg_version_deprecated_risk": False,
|
|
25
|
+
"pkg_version_missing_risk": False,
|
|
26
|
+
"pkg_includes_binary_risk": False,
|
|
27
|
+
"pkg_min_versions_risk": False,
|
|
28
|
+
"created_now_quarantine_seconds_risk": False,
|
|
29
|
+
"latest_now_max_seconds_risk": False,
|
|
30
|
+
"mod_create_min_seconds_risk": False,
|
|
31
|
+
"pkg_min_maintainers_risk": False,
|
|
32
|
+
"pkg_node_version_risk": False,
|
|
33
|
+
"pkg_private_on_public_registry_risk": False,
|
|
34
|
+
}
|
|
35
|
+
versions_list = pkg_metadata.get("versions", [])
|
|
36
|
+
versions_dict = {
|
|
37
|
+
crate_version.get('num'): crate_version
|
|
38
|
+
for crate_version in versions_list}
|
|
39
|
+
versions_nums = [
|
|
40
|
+
crate_version.get('num')
|
|
41
|
+
for crate_version in versions_list]
|
|
42
|
+
|
|
43
|
+
is_deprecated = versions_list[0].get("yanked")
|
|
44
|
+
is_version_deprecated = False
|
|
45
|
+
info = pkg_metadata.get("crate", {})
|
|
46
|
+
if not is_deprecated and pkg and pkg.get("version"):
|
|
47
|
+
theversion = versions_dict.get(pkg.get("version"), {})
|
|
48
|
+
if isinstance(theversion, dict) and len(theversion) > 0:
|
|
49
|
+
theversion = theversion.get('num')
|
|
50
|
+
elif theversion and theversion.get("yanked"):
|
|
51
|
+
is_version_deprecated = True
|
|
52
|
+
# Check if the version exists in the registry
|
|
53
|
+
if not theversion:
|
|
54
|
+
risk_metrics["pkg_version_missing_risk"] = True
|
|
55
|
+
risk_metrics["pkg_version_missing_value"] = 1
|
|
56
|
+
|
|
57
|
+
pkg_description = info.get("description", "").lower()
|
|
58
|
+
if not is_deprecated and (
|
|
59
|
+
"is deprecated" in pkg_description
|
|
60
|
+
or "no longer maintained" in pkg_description
|
|
61
|
+
):
|
|
62
|
+
is_deprecated = True
|
|
63
|
+
latest_deprecated = False
|
|
64
|
+
|
|
65
|
+
try:
|
|
66
|
+
first_version_num = min(
|
|
67
|
+
versions_nums,
|
|
68
|
+
key=lambda x: Version.parse(x, optional_minor_and_patch=True),
|
|
69
|
+
)
|
|
70
|
+
latest_version_num = max(
|
|
71
|
+
versions_nums,
|
|
72
|
+
key=lambda x: Version.parse(x, optional_minor_and_patch=True),
|
|
73
|
+
)
|
|
74
|
+
except (ValueError, TypeError):
|
|
75
|
+
first_version_num = versions_nums[-1]
|
|
76
|
+
latest_version_num = versions_nums[0]
|
|
77
|
+
|
|
78
|
+
first_version = versions_dict[first_version_num]
|
|
79
|
+
latest_version = versions_list[latest_version_num]
|
|
80
|
+
|
|
81
|
+
# Is the private package available publicly? Dependency confusion.
|
|
82
|
+
if is_private_pkg and pkg_metadata:
|
|
83
|
+
risk_metrics["pkg_private_on_public_registry_risk"] = True
|
|
84
|
+
risk_metrics["pkg_private_on_public_registry_value"] = 1
|
|
85
|
+
|
|
86
|
+
# If the package has fewer than minimum number of versions
|
|
87
|
+
if len(versions_list):
|
|
88
|
+
if len(versions_list) < config.pkg_min_versions:
|
|
89
|
+
risk_metrics["pkg_min_versions_risk"] = True
|
|
90
|
+
risk_metrics["pkg_min_versions_value"] = len(versions_list)
|
|
91
|
+
# Check if the latest version is deprecated
|
|
92
|
+
if latest_version and latest_version.get("yanked"):
|
|
93
|
+
latest_deprecated = True
|
|
94
|
+
|
|
95
|
+
# Created and modified time related checks
|
|
96
|
+
if first_version and latest_version:
|
|
97
|
+
created = first_version.get("created_at")
|
|
98
|
+
modified = latest_version.get("updated_at")
|
|
99
|
+
if created and modified:
|
|
100
|
+
modified_dt = datetime.fromisoformat(modified)
|
|
101
|
+
created_dt = datetime.fromisoformat(created)
|
|
102
|
+
mod_create_diff = modified_dt - created_dt
|
|
103
|
+
latest_now_diff = datetime.now(timezone.utc) - modified_dt
|
|
104
|
+
created_now_diff = datetime.now(timezone.utc) - created_dt
|
|
105
|
+
risk_metrics = compute_time_risks(
|
|
106
|
+
risk_metrics, created_now_diff, mod_create_diff, latest_now_diff
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
# Is the package deprecated
|
|
110
|
+
if is_deprecated or latest_deprecated:
|
|
111
|
+
risk_metrics["pkg_deprecated_risk"] = True
|
|
112
|
+
risk_metrics["pkg_deprecated_value"] = 1
|
|
113
|
+
elif is_version_deprecated:
|
|
114
|
+
risk_metrics["pkg_version_deprecated_risk"] = True
|
|
115
|
+
risk_metrics["pkg_version_deprecated_value"] = 1
|
|
116
|
+
# Add package scope related weight
|
|
117
|
+
if scope:
|
|
118
|
+
risk_metrics[f"pkg_{scope}_scope_risk"] = True
|
|
119
|
+
risk_metrics[f"pkg_{scope}_scope_value"] = 1
|
|
120
|
+
|
|
121
|
+
set_binary_risks(risk_metrics, versions_dict.get(pkg.get("version"), {}), latest_version)
|
|
122
|
+
|
|
123
|
+
risk_metrics["risk_score"] = calculate_risk_score(risk_metrics)
|
|
124
|
+
return risk_metrics
|
|
@@ -0,0 +1,170 @@
|
|
|
1
|
+
from rich.progress import Progress
|
|
2
|
+
|
|
3
|
+
from depscan.lib import config
|
|
4
|
+
from depscan.lib.logger import console, LOG
|
|
5
|
+
from depscan.lib.package_query.npm_pkg import npm_pkg_risk
|
|
6
|
+
from depscan.lib.package_query.pkg_query import get_lookup_url, httpclient
|
|
7
|
+
from depscan.lib.package_query.pypi_pkg import pypi_pkg_risk
|
|
8
|
+
from depscan.lib.package_query.cargo_pkg import cargo_pkg_risk
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def metadata_from_registry(
|
|
12
|
+
registry_type, scoped_pkgs, pkg_list, private_ns=None
|
|
13
|
+
):
|
|
14
|
+
"""
|
|
15
|
+
Method to query registry for the package metadata
|
|
16
|
+
|
|
17
|
+
:param registry_type: The type of registry to query
|
|
18
|
+
:param scoped_pkgs: Dictionary of lists of packages per scope
|
|
19
|
+
:param pkg_list: List of package dictionaries
|
|
20
|
+
:param private_ns: Private namespace
|
|
21
|
+
:return: A dict of package metadata, risk metrics, and private package
|
|
22
|
+
flag for each package
|
|
23
|
+
"""
|
|
24
|
+
metadata_dict = {}
|
|
25
|
+
# Circuit breaker flag to break the risk audit in case of many api errors
|
|
26
|
+
circuit_breaker = False
|
|
27
|
+
# Track the api failures count
|
|
28
|
+
failure_count = 0
|
|
29
|
+
done_count = 0
|
|
30
|
+
with Progress(
|
|
31
|
+
console=console,
|
|
32
|
+
transient=True,
|
|
33
|
+
redirect_stderr=False,
|
|
34
|
+
redirect_stdout=False,
|
|
35
|
+
refresh_per_second=1,
|
|
36
|
+
disable=len(pkg_list) < 10
|
|
37
|
+
) as progress:
|
|
38
|
+
task = progress.add_task(
|
|
39
|
+
"[green] Auditing packages", total=len(pkg_list)
|
|
40
|
+
)
|
|
41
|
+
for pkg in pkg_list:
|
|
42
|
+
if circuit_breaker:
|
|
43
|
+
LOG.info(
|
|
44
|
+
"Risk audited has been interrupted due to frequent api "
|
|
45
|
+
"errors. Please try again later."
|
|
46
|
+
)
|
|
47
|
+
progress.stop()
|
|
48
|
+
return {}
|
|
49
|
+
scope = pkg.get("scope", "").lower()
|
|
50
|
+
key, lookup_url = get_lookup_url(registry_type, pkg)
|
|
51
|
+
if not key or not lookup_url or key.startswith("https://"):
|
|
52
|
+
progress.advance(task)
|
|
53
|
+
continue
|
|
54
|
+
progress.update(task, description=f"Checking {key}")
|
|
55
|
+
try:
|
|
56
|
+
r = httpclient.get(
|
|
57
|
+
url=lookup_url,
|
|
58
|
+
follow_redirects=True,
|
|
59
|
+
timeout=config.request_timeout_sec,
|
|
60
|
+
)
|
|
61
|
+
json_data = r.json()
|
|
62
|
+
# Npm returns this error if the package is not found
|
|
63
|
+
if (
|
|
64
|
+
json_data.get("code") == "MethodNotAllowedError"
|
|
65
|
+
or r.status_code > 400
|
|
66
|
+
):
|
|
67
|
+
continue
|
|
68
|
+
is_private_pkg = False
|
|
69
|
+
if private_ns:
|
|
70
|
+
namespace_prefixes = private_ns.split(",")
|
|
71
|
+
for ns in namespace_prefixes:
|
|
72
|
+
if key.lower().startswith(
|
|
73
|
+
ns.lower()
|
|
74
|
+
) or key.lower().startswith("@" + ns.lower()):
|
|
75
|
+
is_private_pkg = True
|
|
76
|
+
break
|
|
77
|
+
risk_metrics = {}
|
|
78
|
+
match registry_type:
|
|
79
|
+
case "npm":
|
|
80
|
+
risk_metrics = npm_pkg_risk(
|
|
81
|
+
json_data, is_private_pkg, scope, pkg
|
|
82
|
+
)
|
|
83
|
+
case "pypi":
|
|
84
|
+
project_type_pkg = f"python:{key}".lower()
|
|
85
|
+
required_pkgs = scoped_pkgs.get("required", [])
|
|
86
|
+
optional_pkgs = scoped_pkgs.get("optional", [])
|
|
87
|
+
excluded_pkgs = scoped_pkgs.get("excluded", [])
|
|
88
|
+
if (
|
|
89
|
+
pkg.get("purl") in required_pkgs
|
|
90
|
+
or project_type_pkg in required_pkgs
|
|
91
|
+
):
|
|
92
|
+
scope = "required"
|
|
93
|
+
elif (
|
|
94
|
+
pkg.get("purl") in optional_pkgs
|
|
95
|
+
or project_type_pkg in optional_pkgs
|
|
96
|
+
):
|
|
97
|
+
scope = "optional"
|
|
98
|
+
elif (
|
|
99
|
+
pkg.get("purl") in excluded_pkgs
|
|
100
|
+
or project_type_pkg in excluded_pkgs
|
|
101
|
+
):
|
|
102
|
+
scope = "excluded"
|
|
103
|
+
risk_metrics = pypi_pkg_risk(
|
|
104
|
+
json_data, is_private_pkg, scope, pkg
|
|
105
|
+
)
|
|
106
|
+
case "cargo":
|
|
107
|
+
risk_metrics = cargo_pkg_risk(
|
|
108
|
+
json_data, is_private_pkg, scope, pkg
|
|
109
|
+
)
|
|
110
|
+
case _:
|
|
111
|
+
pass
|
|
112
|
+
# TODO: remove unnecessary if elif statements
|
|
113
|
+
# if registry_type == "npm":
|
|
114
|
+
# pass
|
|
115
|
+
# elif registry_type == "pypi":
|
|
116
|
+
# pass
|
|
117
|
+
metadata_dict[key] = {
|
|
118
|
+
"scope": scope,
|
|
119
|
+
"purl": pkg.get("purl"),
|
|
120
|
+
"pkg_metadata": json_data,
|
|
121
|
+
"risk_metrics": risk_metrics,
|
|
122
|
+
"is_private_pkg": is_private_pkg,
|
|
123
|
+
}
|
|
124
|
+
except Exception as e:
|
|
125
|
+
LOG.debug(e)
|
|
126
|
+
failure_count += 1
|
|
127
|
+
progress.advance(task)
|
|
128
|
+
done_count += 1
|
|
129
|
+
if failure_count >= config.max_request_failures:
|
|
130
|
+
circuit_breaker = True
|
|
131
|
+
LOG.debug(
|
|
132
|
+
"Retrieved package metadata for %d/%d packages. Failures count %d",
|
|
133
|
+
done_count,
|
|
134
|
+
len(pkg_list),
|
|
135
|
+
failure_count,
|
|
136
|
+
)
|
|
137
|
+
return metadata_dict
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def cargo_metadata(scoped_pkgs, pkg_list, private_ns=None):
|
|
141
|
+
"""
|
|
142
|
+
Method to query cargo for the package metadata
|
|
143
|
+
"""
|
|
144
|
+
return metadata_from_registry("cargo", scoped_pkgs, pkg_list, private_ns)
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def npm_metadata(scoped_pkgs, pkg_list, private_ns=None):
|
|
148
|
+
"""
|
|
149
|
+
Method to query npm for the package metadata
|
|
150
|
+
|
|
151
|
+
:param scoped_pkgs: Dictionary of lists of packages per scope
|
|
152
|
+
:param pkg_list: List of package dictionaries
|
|
153
|
+
:param private_ns: Private namespace
|
|
154
|
+
:return: A dict of package metadata, risk metrics, and private package
|
|
155
|
+
flag for each package
|
|
156
|
+
"""
|
|
157
|
+
return metadata_from_registry("npm", scoped_pkgs, pkg_list, private_ns)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def pypi_metadata(scoped_pkgs, pkg_list, private_ns=None):
|
|
161
|
+
"""
|
|
162
|
+
Method to query pypi for the package metadata
|
|
163
|
+
|
|
164
|
+
:param scoped_pkgs: Dictionary of lists of packages per scope
|
|
165
|
+
:param pkg_list: List of package dictionaries
|
|
166
|
+
:param private_ns: Private namespace
|
|
167
|
+
:return: A dict of package metadata, risk metrics, and private package
|
|
168
|
+
flag for each package
|
|
169
|
+
"""
|
|
170
|
+
return metadata_from_registry("pypi", scoped_pkgs, pkg_list, private_ns)
|