reqsync 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- reqsync/__init__.py +15 -0
- reqsync/_logging.py +40 -0
- reqsync/_types.py +102 -0
- reqsync/cli.py +120 -0
- reqsync/config.py +121 -0
- reqsync/core.py +214 -0
- reqsync/env.py +125 -0
- reqsync/io.py +57 -0
- reqsync/parse.py +120 -0
- reqsync/policy.py +80 -0
- reqsync/report.py +57 -0
- reqsync-0.1.0.dist-info/METADATA +298 -0
- reqsync-0.1.0.dist-info/RECORD +16 -0
- reqsync-0.1.0.dist-info/WHEEL +4 -0
- reqsync-0.1.0.dist-info/entry_points.txt +2 -0
- reqsync-0.1.0.dist-info/licenses/LICENSE +21 -0
reqsync/__init__.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# src/reqsync/__init__.py
|
|
2
|
+
|
|
3
|
+
from .core import sync
|
|
4
|
+
|
|
5
|
+
__all__ = ["sync"]
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
from importlib.metadata import PackageNotFoundError, version
|
|
9
|
+
except Exception: # py<3.8 backport if installed
|
|
10
|
+
from importlib_metadata import PackageNotFoundError, version # type: ignore
|
|
11
|
+
|
|
12
|
+
try:
|
|
13
|
+
__version__ = version("reqsync")
|
|
14
|
+
except PackageNotFoundError:
|
|
15
|
+
__version__ = "0.0.0+dev"
|
reqsync/_logging.py
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# src/reqsync/_logging.py
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
import sys
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def setup_logging(verbosity: int, quiet: bool, log_file: Path | None) -> None:
|
|
11
|
+
"""
|
|
12
|
+
Configure root logger with console and optional file sink.
|
|
13
|
+
Verbosity: 0 WARNING, 1 INFO, 2+ DEBUG. Quiet forces WARNING.
|
|
14
|
+
"""
|
|
15
|
+
if quiet:
|
|
16
|
+
level = logging.WARNING
|
|
17
|
+
elif verbosity >= 2:
|
|
18
|
+
level = logging.DEBUG
|
|
19
|
+
elif verbosity == 1:
|
|
20
|
+
level = logging.INFO
|
|
21
|
+
else:
|
|
22
|
+
level = logging.WARNING
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger()
|
|
25
|
+
logger.handlers.clear()
|
|
26
|
+
logger.setLevel(level)
|
|
27
|
+
|
|
28
|
+
console_formatter = logging.Formatter("%(message)s")
|
|
29
|
+
ch = logging.StreamHandler(sys.stderr)
|
|
30
|
+
ch.setLevel(level)
|
|
31
|
+
ch.setFormatter(console_formatter)
|
|
32
|
+
logger.addHandler(ch)
|
|
33
|
+
|
|
34
|
+
if log_file:
|
|
35
|
+
log_file.parent.mkdir(parents=True, exist_ok=True)
|
|
36
|
+
file_formatter = logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
|
|
37
|
+
fh = logging.FileHandler(log_file, encoding="utf-8")
|
|
38
|
+
fh.setLevel(logging.DEBUG)
|
|
39
|
+
fh.setFormatter(file_formatter)
|
|
40
|
+
logger.addHandler(fh)
|
reqsync/_types.py
ADDED
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
# src/reqsync/_types.py
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Sequence
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Literal, TypedDict
|
|
9
|
+
|
|
10
|
+
Policy = Literal["lower-bound", "floor-only", "floor-and-cap"]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ExitCode:
|
|
14
|
+
OK = 0
|
|
15
|
+
GENERIC_ERROR = 1
|
|
16
|
+
MISSING_FILE = 2
|
|
17
|
+
HASHES_PRESENT = 3
|
|
18
|
+
PIP_FAILED = 4
|
|
19
|
+
PARSE_ERROR = 5
|
|
20
|
+
CONSTRAINT_CONFLICT = 6
|
|
21
|
+
SYSTEM_PYTHON_BLOCKED = 7
|
|
22
|
+
DIRTY_REPO_BLOCKED = 8
|
|
23
|
+
WRITE_FAILED_ROLLED_BACK = 10
|
|
24
|
+
CHANGES_WOULD_BE_MADE = 11
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@dataclass
|
|
28
|
+
class Options:
|
|
29
|
+
path: Path
|
|
30
|
+
follow_includes: bool = True
|
|
31
|
+
update_constraints: bool = False
|
|
32
|
+
policy: Policy = "lower-bound"
|
|
33
|
+
allow_prerelease: bool = False
|
|
34
|
+
keep_local: bool = False
|
|
35
|
+
no_upgrade: bool = False
|
|
36
|
+
pip_timeout_sec: int = 900
|
|
37
|
+
pip_args: str = ""
|
|
38
|
+
only: Sequence[str] = ()
|
|
39
|
+
exclude: Sequence[str] = ()
|
|
40
|
+
check: bool = False
|
|
41
|
+
dry_run: bool = False
|
|
42
|
+
show_diff: bool = False
|
|
43
|
+
json_report: Path | None = None
|
|
44
|
+
backup_suffix: str = ".bak"
|
|
45
|
+
timestamped_backups: bool = True
|
|
46
|
+
log_file: Path | None = None
|
|
47
|
+
verbosity: int = 0
|
|
48
|
+
quiet: bool = False
|
|
49
|
+
system_ok: bool = False
|
|
50
|
+
allow_hashes: bool = False
|
|
51
|
+
allow_dirty: bool = True
|
|
52
|
+
last_wins: bool = False
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@dataclass
|
|
56
|
+
class Change:
|
|
57
|
+
package: str
|
|
58
|
+
installed_version: str
|
|
59
|
+
old_line: str
|
|
60
|
+
new_line: str
|
|
61
|
+
file: Path
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@dataclass
|
|
65
|
+
class FileChange:
|
|
66
|
+
file: Path
|
|
67
|
+
changes: list[Change] = field(default_factory=list)
|
|
68
|
+
original_text: str = ""
|
|
69
|
+
new_text: str = ""
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
@dataclass
|
|
73
|
+
class Result:
|
|
74
|
+
changed: bool
|
|
75
|
+
files: list[FileChange]
|
|
76
|
+
diff: str | None = None
|
|
77
|
+
backup_paths: list[Path] = field(default_factory=list)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
class JsonChange(TypedDict):
|
|
81
|
+
file: str
|
|
82
|
+
package: str
|
|
83
|
+
installed_version: str
|
|
84
|
+
old_line: str
|
|
85
|
+
new_line: str
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class JsonReport(TypedDict):
|
|
89
|
+
files: list[str]
|
|
90
|
+
changes: list[JsonChange]
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
__all__ = [
|
|
94
|
+
"Change",
|
|
95
|
+
"ExitCode",
|
|
96
|
+
"FileChange",
|
|
97
|
+
"JsonChange",
|
|
98
|
+
"JsonReport",
|
|
99
|
+
"Options",
|
|
100
|
+
"Policy",
|
|
101
|
+
"Result",
|
|
102
|
+
]
|
reqsync/cli.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
# src/reqsync/cli.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, Optional
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
from click.core import ParameterSource
|
|
10
|
+
|
|
11
|
+
from ._logging import setup_logging
|
|
12
|
+
from ._types import ExitCode, Options
|
|
13
|
+
from .config import load_project_config, merge_options
|
|
14
|
+
from .core import sync
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PolicyEnum(str, Enum):
|
|
18
|
+
LOWER_BOUND = "lower-bound"
|
|
19
|
+
FLOOR_ONLY = "floor-only"
|
|
20
|
+
FLOOR_AND_CAP = "floor-and-cap"
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
app = typer.Typer(
|
|
24
|
+
add_completion=False,
|
|
25
|
+
no_args_is_help=True,
|
|
26
|
+
help="Synchronize requirements.txt to >= installed versions safely.",
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# FIX: Revert to @app.callback() to create a single-command app.
|
|
31
|
+
# This aligns with the test runner's expectation and eliminates the "unexpected argument" error.
|
|
32
|
+
@app.callback(invoke_without_command=True)
|
|
33
|
+
def main_cli(
|
|
34
|
+
ctx: typer.Context,
|
|
35
|
+
path: Path = typer.Option(
|
|
36
|
+
Path("requirements.txt"),
|
|
37
|
+
"--path",
|
|
38
|
+
help="Path to requirements.txt",
|
|
39
|
+
dir_okay=False,
|
|
40
|
+
),
|
|
41
|
+
follow_includes: bool = typer.Option(True, help="Follow -r includes recursively"),
|
|
42
|
+
update_constraints: bool = typer.Option(False, help="Allow updating constraint files"),
|
|
43
|
+
policy: PolicyEnum = typer.Option(
|
|
44
|
+
PolicyEnum.LOWER_BOUND,
|
|
45
|
+
help="Policy: lower-bound | floor-only | floor-and-cap",
|
|
46
|
+
),
|
|
47
|
+
allow_prerelease: bool = typer.Option(False, help="Adopt pre/dev versions"),
|
|
48
|
+
keep_local: bool = typer.Option(False, help="Keep local version suffixes (+local)"),
|
|
49
|
+
no_upgrade: bool = typer.Option(False, help="Do not run pip upgrade; just rewrite to current env"),
|
|
50
|
+
pip_timeout_sec: int = typer.Option(900, help="Timeout for pip upgrade in seconds"),
|
|
51
|
+
pip_args: str = typer.Option("", help="Allowlisted pip args to pass through"),
|
|
52
|
+
only: Optional[str] = typer.Option(None, help="Comma-separated package globs to include"),
|
|
53
|
+
exclude: Optional[str] = typer.Option(None, help="Comma-separated package globs to exclude"),
|
|
54
|
+
check: bool = typer.Option(False, help="Exit nonzero if changes would be made"),
|
|
55
|
+
dry_run: bool = typer.Option(False, help="Preview changes without writing"),
|
|
56
|
+
show_diff: bool = typer.Option(False, help="Show unified diff"),
|
|
57
|
+
json_report: Optional[Path] = typer.Option(None, help="Write JSON report to this path"),
|
|
58
|
+
backup_suffix: str = typer.Option(".bak", help="Backup suffix"),
|
|
59
|
+
timestamped_backups: bool = typer.Option(True, help="Use timestamped backups"),
|
|
60
|
+
log_file: Optional[Path] = typer.Option(None, help="Optional log file path"),
|
|
61
|
+
verbosity: int = typer.Option(0, "--verbose", "-v", count=True, help="Increase logging verbosity"),
|
|
62
|
+
quiet: bool = typer.Option(False, "--quiet", "-q", help="Reduce logging"),
|
|
63
|
+
system_ok: bool = typer.Option(False, help="Allow running outside a venv"),
|
|
64
|
+
allow_hashes: bool = typer.Option(False, help="Skip hashed stanzas instead of refusing"),
|
|
65
|
+
allow_dirty: bool = typer.Option(True, help="Allow running with dirty git repo"),
|
|
66
|
+
last_wins: bool = typer.Option(False, help="If duplicates across includes, last definition wins"),
|
|
67
|
+
use_config: bool = typer.Option(True, help="Load reqsync.toml or [tool.reqsync] from pyproject"),
|
|
68
|
+
) -> None:
|
|
69
|
+
"""
|
|
70
|
+
Upgrade env and rewrite requirements to >= installed versions while preserving formatting.
|
|
71
|
+
"""
|
|
72
|
+
setup_logging(verbosity=verbosity, quiet=quiet, log_file=log_file)
|
|
73
|
+
opts = Options(path=Path("requirements.txt"))
|
|
74
|
+
if use_config:
|
|
75
|
+
config_from_file = load_project_config(Path(".").resolve())
|
|
76
|
+
opts = merge_options(opts, config_from_file)
|
|
77
|
+
cli_overrides: dict[str, Any] = {
|
|
78
|
+
k: v for k, v in ctx.params.items() if ctx.get_parameter_source(k) is not ParameterSource.DEFAULT
|
|
79
|
+
}
|
|
80
|
+
opts = merge_options(opts, cli_overrides)
|
|
81
|
+
try:
|
|
82
|
+
result = sync(opts)
|
|
83
|
+
except FileNotFoundError:
|
|
84
|
+
typer.secho(f"Requirements file not found: {opts.path}", fg=typer.colors.RED, err=True)
|
|
85
|
+
raise typer.Exit(ExitCode.MISSING_FILE) from None
|
|
86
|
+
except RuntimeError as err:
|
|
87
|
+
msg = str(err)
|
|
88
|
+
if "hash pins" in msg:
|
|
89
|
+
code = ExitCode.HASHES_PRESENT
|
|
90
|
+
elif "Refusing to run outside a virtualenv" in msg:
|
|
91
|
+
code = ExitCode.SYSTEM_PYTHON_BLOCKED
|
|
92
|
+
elif "pip install -U failed" in msg:
|
|
93
|
+
code = ExitCode.PIP_FAILED
|
|
94
|
+
elif "Write failed and backups restored" in msg:
|
|
95
|
+
code = ExitCode.WRITE_FAILED_ROLLED_BACK
|
|
96
|
+
else:
|
|
97
|
+
code = ExitCode.GENERIC_ERROR
|
|
98
|
+
typer.secho(msg, fg=typer.colors.RED, err=True)
|
|
99
|
+
raise typer.Exit(code) from err
|
|
100
|
+
|
|
101
|
+
if opts.json_report:
|
|
102
|
+
from .report import to_json_report, write_json_report
|
|
103
|
+
|
|
104
|
+
write_json_report(to_json_report(result.files), str(opts.json_report))
|
|
105
|
+
|
|
106
|
+
if result.changed:
|
|
107
|
+
if opts.show_diff or opts.dry_run:
|
|
108
|
+
typer.echo(result.diff or "")
|
|
109
|
+
if opts.check:
|
|
110
|
+
raise typer.Exit(ExitCode.CHANGES_WOULD_BE_MADE) from None
|
|
111
|
+
|
|
112
|
+
raise typer.Exit(ExitCode.OK) from None
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def main() -> None:
|
|
116
|
+
app()
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
if __name__ == "__main__":
|
|
120
|
+
main()
|
reqsync/config.py
ADDED
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
# src/reqsync/config.py
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
import importlib
|
|
5
|
+
import json
|
|
6
|
+
import logging
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Any
|
|
9
|
+
|
|
10
|
+
from ._types import Options
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Dynamically import tomllib (3.11+) or fall back to tomli; avoid static imports so mypy won't
|
|
14
|
+
# require stubs for tomllib on older interpreters.
|
|
15
|
+
def _import_toml_like() -> Any:
|
|
16
|
+
mod = None
|
|
17
|
+
try:
|
|
18
|
+
mod = importlib.import_module("tomllib") # Python 3.11+
|
|
19
|
+
except Exception:
|
|
20
|
+
try:
|
|
21
|
+
mod = importlib.import_module("tomli") # Backport
|
|
22
|
+
except Exception:
|
|
23
|
+
mod = None
|
|
24
|
+
return mod
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
toml: Any = _import_toml_like()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def _load_toml(path: Path) -> dict[str, Any]:
|
|
31
|
+
if not toml:
|
|
32
|
+
return {}
|
|
33
|
+
try:
|
|
34
|
+
with open(path, "rb") as f:
|
|
35
|
+
data = toml.load(f)
|
|
36
|
+
return data if isinstance(data, dict) else {}
|
|
37
|
+
except Exception:
|
|
38
|
+
return {}
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
def load_project_config(start_dir: Path) -> dict[str, Any]:
|
|
42
|
+
cfg: dict[str, Any] = {}
|
|
43
|
+
|
|
44
|
+
# reqsync.toml
|
|
45
|
+
rs = start_dir / "reqsync.toml"
|
|
46
|
+
if rs.exists():
|
|
47
|
+
cfg.update(_load_toml(rs))
|
|
48
|
+
|
|
49
|
+
# pyproject [tool.reqsync]
|
|
50
|
+
pyproj = start_dir / "pyproject.toml"
|
|
51
|
+
if pyproj.exists():
|
|
52
|
+
data = _load_toml(pyproj)
|
|
53
|
+
tool = data.get("tool") or {}
|
|
54
|
+
section = tool.get("reqsync") or {}
|
|
55
|
+
if isinstance(section, dict):
|
|
56
|
+
cfg.update(section)
|
|
57
|
+
|
|
58
|
+
# JSON fallback
|
|
59
|
+
rj = start_dir / "reqsync.json"
|
|
60
|
+
if rj.exists():
|
|
61
|
+
try:
|
|
62
|
+
cfg.update(json.loads(rj.read_text(encoding="utf-8")))
|
|
63
|
+
except Exception as e:
|
|
64
|
+
logging.warning("Failed to parse reqsync.json: %s", e)
|
|
65
|
+
|
|
66
|
+
return cfg
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def _to_path(v: Any) -> Path | None:
|
|
70
|
+
if v in (None, "", "."):
|
|
71
|
+
return None
|
|
72
|
+
try:
|
|
73
|
+
return Path(str(v))
|
|
74
|
+
except Exception:
|
|
75
|
+
return None
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _to_tuple(v: Any) -> tuple[str, ...]:
|
|
79
|
+
if v is None:
|
|
80
|
+
return ()
|
|
81
|
+
if isinstance(v, (list, tuple)):
|
|
82
|
+
return tuple(str(x).strip() for x in v if str(x).strip())
|
|
83
|
+
if isinstance(v, str):
|
|
84
|
+
return tuple(p for p in (s.strip() for s in v.split(",")) if p)
|
|
85
|
+
return ()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def merge_options(base: Options, overrides: dict[str, Any]) -> Options:
|
|
89
|
+
# Don't let config override an explicit CLI --path
|
|
90
|
+
cfg_path = _to_path(overrides.get("path"))
|
|
91
|
+
if cfg_path and Path(str(base.path)) == Path("requirements.txt"):
|
|
92
|
+
effective_path = cfg_path
|
|
93
|
+
else:
|
|
94
|
+
effective_path = base.path
|
|
95
|
+
|
|
96
|
+
return Options(
|
|
97
|
+
path=effective_path,
|
|
98
|
+
follow_includes=overrides.get("follow_includes", base.follow_includes),
|
|
99
|
+
update_constraints=overrides.get("update_constraints", base.update_constraints),
|
|
100
|
+
policy=overrides.get("policy", base.policy),
|
|
101
|
+
allow_prerelease=overrides.get("allow_prerelease", base.allow_prerelease),
|
|
102
|
+
keep_local=overrides.get("keep_local", base.keep_local),
|
|
103
|
+
no_upgrade=overrides.get("no_upgrade", base.no_upgrade),
|
|
104
|
+
pip_timeout_sec=int(overrides.get("pip_timeout_sec", base.pip_timeout_sec)),
|
|
105
|
+
pip_args=str(overrides.get("pip_args", base.pip_args)),
|
|
106
|
+
only=_to_tuple(overrides.get("only")) or base.only,
|
|
107
|
+
exclude=_to_tuple(overrides.get("exclude")) or base.exclude,
|
|
108
|
+
check=overrides.get("check", base.check),
|
|
109
|
+
dry_run=overrides.get("dry_run", base.dry_run),
|
|
110
|
+
show_diff=overrides.get("show_diff", base.show_diff),
|
|
111
|
+
json_report=_to_path(overrides.get("json_report")) or base.json_report,
|
|
112
|
+
backup_suffix=str(overrides.get("backup_suffix", base.backup_suffix)),
|
|
113
|
+
timestamped_backups=overrides.get("timestamped_backups", base.timestamped_backups),
|
|
114
|
+
log_file=_to_path(overrides.get("log_file")) or base.log_file,
|
|
115
|
+
verbosity=int(overrides.get("verbosity", base.verbosity)),
|
|
116
|
+
quiet=overrides.get("quiet", base.quiet),
|
|
117
|
+
system_ok=overrides.get("system_ok", base.system_ok),
|
|
118
|
+
allow_hashes=overrides.get("allow_hashes", base.allow_hashes),
|
|
119
|
+
allow_dirty=overrides.get("allow_dirty", base.allow_dirty),
|
|
120
|
+
last_wins=overrides.get("last_wins", base.last_wins),
|
|
121
|
+
)
|
reqsync/core.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
1
|
+
# src/reqsync/core.py
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import fnmatch
|
|
6
|
+
import logging
|
|
7
|
+
import shutil
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from packaging.utils import canonicalize_name
|
|
11
|
+
|
|
12
|
+
from . import env as env_mod
|
|
13
|
+
from . import report as report_mod
|
|
14
|
+
from ._types import Change, FileChange, Options, Result
|
|
15
|
+
from .io import backup_file, read_text_preserve, write_text_preserve
|
|
16
|
+
from .parse import find_constraints, find_includes, guard_hashes, parse_line
|
|
17
|
+
from .policy import CapStrategy, apply_policy
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def _match(name: str, patterns: list[str]) -> bool:
|
|
21
|
+
return any(fnmatch.fnmatch(name, p) for p in patterns)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _resolve_includes(root: Path, follow: bool) -> list[Path]:
|
|
25
|
+
files: list[Path] = [root]
|
|
26
|
+
if not follow or not root.exists():
|
|
27
|
+
return files
|
|
28
|
+
seen = {root.resolve()}
|
|
29
|
+
queue = [root]
|
|
30
|
+
while queue:
|
|
31
|
+
cur = queue.pop()
|
|
32
|
+
text, _, _ = read_text_preserve(cur)
|
|
33
|
+
for rel in find_includes(text.splitlines()):
|
|
34
|
+
inc = (cur.parent / rel).resolve()
|
|
35
|
+
if inc.exists() and inc not in seen:
|
|
36
|
+
seen.add(inc)
|
|
37
|
+
files.append(inc)
|
|
38
|
+
queue.append(inc)
|
|
39
|
+
return files
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def _should_skip_pkg(pkg_name: str, only: list[str], exclude: list[str]) -> bool:
|
|
43
|
+
if only and not _match(pkg_name, only):
|
|
44
|
+
return True
|
|
45
|
+
if exclude and _match(pkg_name, exclude):
|
|
46
|
+
return True
|
|
47
|
+
return False
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _rewrite_text(
|
|
51
|
+
path: Path,
|
|
52
|
+
text: str,
|
|
53
|
+
installed: dict[str, str],
|
|
54
|
+
options: Options,
|
|
55
|
+
cap: CapStrategy | None,
|
|
56
|
+
) -> tuple[str, list[Change]]:
|
|
57
|
+
lines = text.splitlines(keepends=True)
|
|
58
|
+
out_lines: list[str] = []
|
|
59
|
+
changes: list[Change] = []
|
|
60
|
+
for line in lines:
|
|
61
|
+
parsed = parse_line(line)
|
|
62
|
+
kind = parsed.kind
|
|
63
|
+
|
|
64
|
+
if kind != "package":
|
|
65
|
+
out_lines.append(line)
|
|
66
|
+
continue
|
|
67
|
+
|
|
68
|
+
req = parsed.requirement
|
|
69
|
+
if not req:
|
|
70
|
+
out_lines.append(line)
|
|
71
|
+
continue
|
|
72
|
+
|
|
73
|
+
base = canonicalize_name(req.name)
|
|
74
|
+
if _should_skip_pkg(base, list(options.only), list(options.exclude)):
|
|
75
|
+
out_lines.append(line)
|
|
76
|
+
continue
|
|
77
|
+
|
|
78
|
+
if base not in installed:
|
|
79
|
+
logging.warning("Not installed after upgrade: %s (kept)", req.name)
|
|
80
|
+
out_lines.append(line)
|
|
81
|
+
continue
|
|
82
|
+
|
|
83
|
+
new_content = apply_policy(
|
|
84
|
+
req=req,
|
|
85
|
+
installed_version=installed[base],
|
|
86
|
+
policy=options.policy,
|
|
87
|
+
allow_prerelease=options.allow_prerelease,
|
|
88
|
+
keep_local=options.keep_local,
|
|
89
|
+
cap_strategy=cap,
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
if new_content is None:
|
|
93
|
+
out_lines.append(line)
|
|
94
|
+
continue
|
|
95
|
+
|
|
96
|
+
new_line = new_content + parsed.comment + parsed.eol
|
|
97
|
+
if new_line != line:
|
|
98
|
+
changes.append(
|
|
99
|
+
Change(
|
|
100
|
+
package=req.name,
|
|
101
|
+
installed_version=installed[base],
|
|
102
|
+
old_line=line,
|
|
103
|
+
new_line=new_line,
|
|
104
|
+
file=path,
|
|
105
|
+
)
|
|
106
|
+
)
|
|
107
|
+
out_lines.append(new_line)
|
|
108
|
+
else:
|
|
109
|
+
out_lines.append(line)
|
|
110
|
+
|
|
111
|
+
return "".join(out_lines), changes
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def sync(options: Options) -> Result:
|
|
115
|
+
"""
|
|
116
|
+
Orchestrate upgrade and rewrite.
|
|
117
|
+
Raises RuntimeError with clear messages for fatal conditions.
|
|
118
|
+
"""
|
|
119
|
+
logging.info("Starting reqsync")
|
|
120
|
+
# This now calls the alias at the bottom of the file, which the test can patch
|
|
121
|
+
ensure_venv_or_exit(options.system_ok)
|
|
122
|
+
root = options.path
|
|
123
|
+
logging.info("Reading %s", root)
|
|
124
|
+
|
|
125
|
+
root = options.path
|
|
126
|
+
if not root.exists():
|
|
127
|
+
raise FileNotFoundError(str(root))
|
|
128
|
+
|
|
129
|
+
# The file_lock context manager has been removed.
|
|
130
|
+
# The logic is now at the top level of the function.
|
|
131
|
+
logging.info("Reading and parsing requirements files...")
|
|
132
|
+
root_text, _, _ = read_text_preserve(root)
|
|
133
|
+
try:
|
|
134
|
+
guard_hashes(root_text.splitlines(), allow_hashes=options.allow_hashes)
|
|
135
|
+
except ValueError as e:
|
|
136
|
+
raise RuntimeError(str(e)) from e
|
|
137
|
+
|
|
138
|
+
if not options.no_upgrade:
|
|
139
|
+
logging.info("Upgrading environment via pip (may take a while)...")
|
|
140
|
+
# This now calls the alias at the bottom of the file
|
|
141
|
+
code, _ = run_pip_upgrade(str(root), timeout_sec=options.pip_timeout_sec, extra_args=options.pip_args)
|
|
142
|
+
if code != 0:
|
|
143
|
+
raise RuntimeError("pip install -U failed. See logs.")
|
|
144
|
+
logging.info("Environment upgrade complete.")
|
|
145
|
+
|
|
146
|
+
logging.info("Inspecting installed packages...")
|
|
147
|
+
# This now calls the alias at the bottom of the file
|
|
148
|
+
installed = get_installed_versions()
|
|
149
|
+
|
|
150
|
+
files = _resolve_includes(root, follow=options.follow_includes)
|
|
151
|
+
logging.info("Resolved %d file(s) (follow_includes=%s)", len(files), options.follow_includes)
|
|
152
|
+
file_results: list[FileChange] = []
|
|
153
|
+
aggregate_changes: list[Change] = []
|
|
154
|
+
cap = CapStrategy(default="next-major")
|
|
155
|
+
file_properties: dict[Path, tuple[bool]] = {}
|
|
156
|
+
|
|
157
|
+
for f in files:
|
|
158
|
+
text, _, bom = read_text_preserve(f)
|
|
159
|
+
file_properties[f] = (bom,)
|
|
160
|
+
constraints = find_constraints(text.splitlines())
|
|
161
|
+
if constraints and not options.update_constraints and f != root:
|
|
162
|
+
file_results.append(FileChange(file=f, original_text=text, new_text=text, changes=[]))
|
|
163
|
+
continue
|
|
164
|
+
|
|
165
|
+
new_text, changes = _rewrite_text(f, text, installed, options, cap)
|
|
166
|
+
if changes:
|
|
167
|
+
aggregate_changes.extend(changes)
|
|
168
|
+
file_results.append(FileChange(file=f, original_text=text, new_text=new_text, changes=changes))
|
|
169
|
+
|
|
170
|
+
changed = any(fr.original_text != fr.new_text for fr in file_results)
|
|
171
|
+
|
|
172
|
+
if options.check:
|
|
173
|
+
diff = report_mod.make_diff(file_results) if changed and (options.show_diff or options.dry_run) else None
|
|
174
|
+
logging.info("Check complete. Changes detected: %s", changed)
|
|
175
|
+
return Result(changed=True, files=file_results, diff=diff)
|
|
176
|
+
|
|
177
|
+
if options.dry_run:
|
|
178
|
+
diff = report_mod.make_diff(file_results) if changed and options.show_diff else None
|
|
179
|
+
logging.info("Dry run complete. Changes detected: %s", changed)
|
|
180
|
+
return Result(changed=changed, files=file_results, diff=diff)
|
|
181
|
+
|
|
182
|
+
backups: list[Path] = []
|
|
183
|
+
if changed:
|
|
184
|
+
logging.info("Writing %d change(s) to disk...", len(aggregate_changes))
|
|
185
|
+
for fr in file_results:
|
|
186
|
+
if fr.original_text == fr.new_text:
|
|
187
|
+
continue
|
|
188
|
+
backup = backup_file(fr.file, options.backup_suffix, options.timestamped_backups)
|
|
189
|
+
backups.append(backup)
|
|
190
|
+
try:
|
|
191
|
+
bom = file_properties.get(fr.file, (False,))[0]
|
|
192
|
+
write_text_preserve(fr.file, fr.new_text, bom)
|
|
193
|
+
except Exception as e:
|
|
194
|
+
for b in backups:
|
|
195
|
+
orig = b
|
|
196
|
+
target = Path(str(b).rsplit(options.backup_suffix, 1)[0])
|
|
197
|
+
try:
|
|
198
|
+
shutil.copy2(orig, target)
|
|
199
|
+
except Exception:
|
|
200
|
+
pass
|
|
201
|
+
raise RuntimeError(f"Write failed and backups restored: {e}") from e
|
|
202
|
+
|
|
203
|
+
diff = report_mod.make_diff(file_results) if changed and options.show_diff else None
|
|
204
|
+
logging.info("Reqsync process finished.")
|
|
205
|
+
return Result(changed=changed, files=file_results, diff=diff, backup_paths=backups)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
# --- Back-compat test shims ---------------------------------------------------
|
|
209
|
+
# Tests (and possibly external callers) patch these names on the core module.
|
|
210
|
+
# Keep them as aliases to the env module so monkeypatching still works.
|
|
211
|
+
get_installed_versions = env_mod.get_installed_versions
|
|
212
|
+
ensure_venv_or_exit = env_mod.ensure_venv_or_exit
|
|
213
|
+
run_pip_upgrade = env_mod.run_pip_upgrade
|
|
214
|
+
is_venv_active = env_mod.is_venv_active
|