fops 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- fops-0.1.0/PKG-INFO +25 -0
- fops-0.1.0/README.md +14 -0
- fops-0.1.0/pyproject.toml +29 -0
- fops-0.1.0/src/fops/__init__.py +12 -0
- fops-0.1.0/src/fops/__main__.py +4 -0
- fops-0.1.0/src/fops/_cli_entry.py +14 -0
- fops-0.1.0/src/fops/cli.py +63 -0
- fops-0.1.0/src/fops/commands/__init__.py +0 -0
- fops-0.1.0/src/fops/commands/create_archive.py +52 -0
- fops-0.1.0/src/fops/commands/delete_branches.py +22 -0
- fops-0.1.0/src/fops/commands/delete_cache.py +38 -0
- fops-0.1.0/src/fops/commands/rename_extensions.py +65 -0
- fops-0.1.0/src/fops/core.py +478 -0
- fops-0.1.0/src/fops/utils.py +36 -0
fops-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: fops
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary:
|
|
5
|
+
Author: oyghen
|
|
6
|
+
Author-email: oyghen <oyghen@duck.com>
|
|
7
|
+
Requires-Dist: typer>=0.21.0 ; extra == 'cli'
|
|
8
|
+
Requires-Python: >=3.11
|
|
9
|
+
Provides-Extra: cli
|
|
10
|
+
Description-Content-Type: text/markdown
|
|
11
|
+
|
|
12
|
+
<div align="center">
|
|
13
|
+
|
|
14
|
+
# fops
|
|
15
|
+
|
|
16
|
+
[](https://github.com/oyghen/fops)
|
|
17
|
+
[](https://pypi.org/project/fops)
|
|
18
|
+
[](https://github.com/oyghen/fops/blob/main/LICENSE)
|
|
19
|
+
[](https://github.com/oyghen/fops/actions/workflows/ci.yml)
|
|
20
|
+
|
|
21
|
+
</div>
|
|
22
|
+
|
|
23
|
+
```shell
|
|
24
|
+
uv tool install 'fops[cli]'
|
|
25
|
+
```
|
fops-0.1.0/README.md
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
<div align="center">
|
|
2
|
+
|
|
3
|
+
# fops
|
|
4
|
+
|
|
5
|
+
[](https://github.com/oyghen/fops)
|
|
6
|
+
[](https://pypi.org/project/fops)
|
|
7
|
+
[](https://github.com/oyghen/fops/blob/main/LICENSE)
|
|
8
|
+
[](https://github.com/oyghen/fops/actions/workflows/ci.yml)
|
|
9
|
+
|
|
10
|
+
</div>
|
|
11
|
+
|
|
12
|
+
```shell
|
|
13
|
+
uv tool install 'fops[cli]'
|
|
14
|
+
```
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "fops"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = ""
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
authors = [
|
|
7
|
+
{ name = "oyghen", email = "oyghen@duck.com" }
|
|
8
|
+
]
|
|
9
|
+
requires-python = ">=3.11"
|
|
10
|
+
dependencies = []
|
|
11
|
+
|
|
12
|
+
[project.scripts]
|
|
13
|
+
fops = "fops._cli_entry:main"
|
|
14
|
+
|
|
15
|
+
[project.optional-dependencies]
|
|
16
|
+
cli = [
|
|
17
|
+
"typer>=0.21.0",
|
|
18
|
+
]
|
|
19
|
+
|
|
20
|
+
[build-system]
|
|
21
|
+
requires = ["uv_build>=0.9.18,<0.10.0"]
|
|
22
|
+
build-backend = "uv_build"
|
|
23
|
+
|
|
24
|
+
[dependency-groups]
|
|
25
|
+
dev = [
|
|
26
|
+
"pre-commit>=4.5.1",
|
|
27
|
+
"pytest>=9.0.2",
|
|
28
|
+
"ruff>=0.14.10",
|
|
29
|
+
]
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
__all__ = ("__version__", "core")
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from importlib import metadata
|
|
5
|
+
|
|
6
|
+
from fops import core
|
|
7
|
+
|
|
8
|
+
__version__ = metadata.version(__name__)
|
|
9
|
+
|
|
10
|
+
# Prevent "No handlers could be found" warnings when the library is imported.
|
|
11
|
+
# Applications are responsible for configuring handlers/formatters/levels.
|
|
12
|
+
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def main() -> None:
|
|
5
|
+
try:
|
|
6
|
+
from fops import cli
|
|
7
|
+
except (ImportError, ModuleNotFoundError) as exc:
|
|
8
|
+
print(
|
|
9
|
+
"CLI dependencies missing. Use: uv tool install 'fops[cli]'",
|
|
10
|
+
file=sys.stderr,
|
|
11
|
+
)
|
|
12
|
+
raise SystemExit(2) from exc
|
|
13
|
+
|
|
14
|
+
cli.main()
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
__all__ = ("app", "main")
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import sys
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
|
|
8
|
+
import fops
|
|
9
|
+
|
|
10
|
+
app = typer.Typer(add_completion=False)
|
|
11
|
+
|
|
12
|
+
# command imports need to be after app creation
|
|
13
|
+
from fops.commands import ( # noqa: E402,F401
|
|
14
|
+
create_archive,
|
|
15
|
+
delete_branches,
|
|
16
|
+
delete_cache,
|
|
17
|
+
rename_extensions,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def configure_logging(level: int) -> None:
|
|
22
|
+
root = logging.getLogger()
|
|
23
|
+
if root.handlers:
|
|
24
|
+
root.handlers.clear()
|
|
25
|
+
|
|
26
|
+
handler = logging.StreamHandler(sys.stderr)
|
|
27
|
+
fmt = "%(levelname)s: %(message)s"
|
|
28
|
+
handler.setFormatter(logging.Formatter(fmt))
|
|
29
|
+
|
|
30
|
+
root.addHandler(handler)
|
|
31
|
+
root.setLevel(level)
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@app.callback(invoke_without_command=True)
|
|
35
|
+
def cli(
|
|
36
|
+
version: bool = typer.Option(
|
|
37
|
+
False, "--version", "-V", help="Show app version and exit."
|
|
38
|
+
),
|
|
39
|
+
verbose: bool = typer.Option(
|
|
40
|
+
False, "--verbose", "-v", help="Enable debug logging."
|
|
41
|
+
),
|
|
42
|
+
quiet: bool = typer.Option(False, "--quiet", "-q", help="Suppress info logging."),
|
|
43
|
+
) -> None:
|
|
44
|
+
if version:
|
|
45
|
+
typer.echo(f"{fops.__name__} {fops.__version__}")
|
|
46
|
+
raise typer.Exit()
|
|
47
|
+
|
|
48
|
+
if verbose and quiet:
|
|
49
|
+
raise typer.BadParameter("Cannot use --verbose and --quiet together")
|
|
50
|
+
|
|
51
|
+
if verbose:
|
|
52
|
+
level = logging.DEBUG
|
|
53
|
+
elif quiet:
|
|
54
|
+
level = logging.WARNING
|
|
55
|
+
else:
|
|
56
|
+
level = logging.INFO
|
|
57
|
+
|
|
58
|
+
configure_logging(level)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def main() -> None:
|
|
62
|
+
"""Canonical entry point for CLI execution."""
|
|
63
|
+
app()
|
|
File without changes
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import typer
|
|
5
|
+
|
|
6
|
+
import fops
|
|
7
|
+
from fops.cli import app
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
DIRECTORY_ARG = typer.Argument(help="Directory to process.")
|
|
12
|
+
|
|
13
|
+
ARCHIVE_NAME_OPT = typer.Option(None, help="Archive name.")
|
|
14
|
+
PATTERN_OPT = typer.Option(None, help="File pattern to include.")
|
|
15
|
+
ARCHIVE_FORMAT_OPT = typer.Option("zip", help="Archive format.")
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@app.command()
|
|
19
|
+
def create_archive(
|
|
20
|
+
directory_path: Path = DIRECTORY_ARG,
|
|
21
|
+
archive_name: str | None = ARCHIVE_NAME_OPT,
|
|
22
|
+
pattern: list[str] | None = PATTERN_OPT,
|
|
23
|
+
archive_format: str = ARCHIVE_FORMAT_OPT,
|
|
24
|
+
) -> None:
|
|
25
|
+
"""Archive files.
|
|
26
|
+
|
|
27
|
+
Example:
|
|
28
|
+
$ fops create-archive . --pattern '*.txt' --pattern '*.md'
|
|
29
|
+
"""
|
|
30
|
+
directory = Path(directory_path).resolve()
|
|
31
|
+
|
|
32
|
+
if not directory.exists():
|
|
33
|
+
typer.secho(f"Directory not found: {directory}", fg=typer.colors.RED, err=True)
|
|
34
|
+
raise typer.Exit(code=2)
|
|
35
|
+
|
|
36
|
+
if not directory.is_dir():
|
|
37
|
+
typer.secho(f"Not a directory: {directory}", fg=typer.colors.RED, err=True)
|
|
38
|
+
raise typer.Exit(code=2)
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
archive_path = fops.core.create_archive(
|
|
42
|
+
directory,
|
|
43
|
+
archive_name,
|
|
44
|
+
pattern,
|
|
45
|
+
archive_format,
|
|
46
|
+
)
|
|
47
|
+
typer.secho(f"Done - {archive_path}", fg=typer.colors.GREEN)
|
|
48
|
+
except Exception as exc:
|
|
49
|
+
message = "Failed to create archive"
|
|
50
|
+
logger.exception(message)
|
|
51
|
+
typer.secho(f"{message} (see log for details).", fg=typer.colors.RED, err=True)
|
|
52
|
+
raise typer.Exit(code=1) from exc
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
|
|
3
|
+
import typer
|
|
4
|
+
|
|
5
|
+
import fops
|
|
6
|
+
from fops.cli import app
|
|
7
|
+
|
|
8
|
+
logger = logging.getLogger(__name__)
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
@app.command()
|
|
12
|
+
def delete_branches() -> None:
|
|
13
|
+
"""Delete local git branches and remote-tracking refs except protected ones."""
|
|
14
|
+
try:
|
|
15
|
+
fops.core.delete_local_branches()
|
|
16
|
+
fops.core.delete_remote_branch_refs()
|
|
17
|
+
typer.secho("Done.", fg=typer.colors.GREEN)
|
|
18
|
+
except Exception as exc:
|
|
19
|
+
message = "Failed to delete branches"
|
|
20
|
+
logger.exception(message)
|
|
21
|
+
typer.secho(f"{message} (see log for details).", fg=typer.colors.RED, err=True)
|
|
22
|
+
raise typer.Exit(code=1) from exc
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import typer
|
|
5
|
+
|
|
6
|
+
import fops
|
|
7
|
+
from fops.cli import app
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
DIRECTORY_ARG = typer.Argument(help="Directory to process.")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@app.command()
|
|
15
|
+
def delete_cache(directory_path: Path = DIRECTORY_ARG) -> None:
|
|
16
|
+
"""Delete cache directories and files.
|
|
17
|
+
|
|
18
|
+
Example:
|
|
19
|
+
$ fops delete-cache .
|
|
20
|
+
"""
|
|
21
|
+
directory = Path(directory_path).resolve()
|
|
22
|
+
|
|
23
|
+
if not directory.exists():
|
|
24
|
+
typer.secho(f"Directory not found: {directory}", fg=typer.colors.RED, err=True)
|
|
25
|
+
raise typer.Exit(code=2)
|
|
26
|
+
|
|
27
|
+
if not directory.is_dir():
|
|
28
|
+
typer.secho(f"Not a directory: {directory}", fg=typer.colors.RED, err=True)
|
|
29
|
+
raise typer.Exit(code=2)
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
fops.core.delete_cache(directory_path=directory)
|
|
33
|
+
typer.secho("Done.", fg=typer.colors.GREEN)
|
|
34
|
+
except Exception as exc:
|
|
35
|
+
message = "Failed to delete cache"
|
|
36
|
+
logger.exception(message)
|
|
37
|
+
typer.secho(f"{message} (see log for details).", fg=typer.colors.RED, err=True)
|
|
38
|
+
raise typer.Exit(code=1) from exc
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
import typer
|
|
5
|
+
|
|
6
|
+
import fops
|
|
7
|
+
from fops.cli import app
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
DIRECTORY_ARG = typer.Argument(help="Directory to process.")
|
|
12
|
+
OLD_EXT_ARG = typer.Argument(help="File extension to match (e.g. 'txt' or '.txt').")
|
|
13
|
+
NEW_EXT_ARG = typer.Argument(help="New file extension to apply (e.g. 'md' or '.md').")
|
|
14
|
+
|
|
15
|
+
CREATE_COPY_OPT = typer.Option(False, help="Copy files instead of renaming them.")
|
|
16
|
+
RECURSIVE_OPT = typer.Option(False, help="Process files recursively in subdirectories.")
|
|
17
|
+
OVERWRITE_OPT = typer.Option(
|
|
18
|
+
False, help="Overwrite existing target files if they already exist."
|
|
19
|
+
)
|
|
20
|
+
DRY_RUN_OPT = typer.Option(
|
|
21
|
+
False, help="Show what would be changed without modifying any files."
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@app.command()
|
|
26
|
+
def rename_extensions(
|
|
27
|
+
directory_path: Path = DIRECTORY_ARG,
|
|
28
|
+
old_ext: str = OLD_EXT_ARG,
|
|
29
|
+
new_ext: str = NEW_EXT_ARG,
|
|
30
|
+
create_copy: bool = CREATE_COPY_OPT,
|
|
31
|
+
recursive: bool = RECURSIVE_OPT,
|
|
32
|
+
overwrite: bool = OVERWRITE_OPT,
|
|
33
|
+
dry_run: bool = DRY_RUN_OPT,
|
|
34
|
+
) -> None:
|
|
35
|
+
"""Rename (or copy) files in a directory by changing their extensions.
|
|
36
|
+
|
|
37
|
+
Example:
|
|
38
|
+
$ fops rename-extensions --create-copy --recursive . .txt .md --dry-run
|
|
39
|
+
"""
|
|
40
|
+
directory = Path(directory_path).resolve()
|
|
41
|
+
|
|
42
|
+
if not directory.exists():
|
|
43
|
+
typer.secho(f"Directory not found: {directory}", fg=typer.colors.RED, err=True)
|
|
44
|
+
raise typer.Exit(code=2)
|
|
45
|
+
|
|
46
|
+
if not directory.is_dir():
|
|
47
|
+
typer.secho(f"Not a directory: {directory}", fg=typer.colors.RED, err=True)
|
|
48
|
+
raise typer.Exit(code=2)
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
fops.core.rename_extensions(
|
|
52
|
+
directory,
|
|
53
|
+
old_ext,
|
|
54
|
+
new_ext,
|
|
55
|
+
create_copy=create_copy,
|
|
56
|
+
recursive=recursive,
|
|
57
|
+
overwrite=overwrite,
|
|
58
|
+
dry_run=dry_run,
|
|
59
|
+
)
|
|
60
|
+
typer.secho("Done.", fg=typer.colors.GREEN)
|
|
61
|
+
except Exception as exc:
|
|
62
|
+
message = "Failed to rename extensions"
|
|
63
|
+
logger.exception(message)
|
|
64
|
+
typer.secho(f"{message} (see log for details).", fg=typer.colors.RED, err=True)
|
|
65
|
+
raise typer.Exit(code=1) from exc
|
|
@@ -0,0 +1,478 @@
|
|
|
1
|
+
__all__ = (
|
|
2
|
+
"PathLikeStr",
|
|
3
|
+
"PROTECTED_BRANCHES",
|
|
4
|
+
"CACHE_DIRECTORIES",
|
|
5
|
+
"CACHE_FILE_EXTENSIONS",
|
|
6
|
+
"delete_cache",
|
|
7
|
+
"confirm",
|
|
8
|
+
"create_archive",
|
|
9
|
+
"iter_lines",
|
|
10
|
+
"terminal_width",
|
|
11
|
+
"delete_local_branches",
|
|
12
|
+
"delete_remote_branch_refs",
|
|
13
|
+
"get_current_branch_name",
|
|
14
|
+
"get_local_branch_names",
|
|
15
|
+
"get_remote_branch_names",
|
|
16
|
+
"get_last_commit_hash",
|
|
17
|
+
"run_command",
|
|
18
|
+
"get_installed_package_count",
|
|
19
|
+
"rename_extensions",
|
|
20
|
+
"safe_copy",
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
import contextlib
|
|
24
|
+
import logging
|
|
25
|
+
import os
|
|
26
|
+
import shlex
|
|
27
|
+
import shutil
|
|
28
|
+
import subprocess
|
|
29
|
+
import sys
|
|
30
|
+
import tempfile
|
|
31
|
+
from collections.abc import Iterator, Sequence
|
|
32
|
+
from importlib import metadata
|
|
33
|
+
from pathlib import Path
|
|
34
|
+
from shutil import copy2, get_archive_formats, get_terminal_size, make_archive
|
|
35
|
+
from typing import Final, TypeAlias
|
|
36
|
+
|
|
37
|
+
from fops import utils
|
|
38
|
+
|
|
39
|
+
PathLikeStr: TypeAlias = str | Path | os.PathLike[str]
|
|
40
|
+
|
|
41
|
+
logger = logging.getLogger(__name__)
|
|
42
|
+
|
|
43
|
+
PROTECTED_BRANCHES: Final[frozenset[str]] = frozenset({"main", "master", "develop"})
|
|
44
|
+
|
|
45
|
+
CACHE_DIRECTORIES: Final[tuple[str, ...]] = (
|
|
46
|
+
"__pycache__",
|
|
47
|
+
".pytest_cache",
|
|
48
|
+
".ipynb_checkpoints",
|
|
49
|
+
".ruff_cache",
|
|
50
|
+
"spark-warehouse",
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
CACHE_FILE_EXTENSIONS: Final[tuple[str, ...]] = (
|
|
54
|
+
"*.py[co]",
|
|
55
|
+
".coverage",
|
|
56
|
+
".coverage.*",
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def delete_cache(
|
|
61
|
+
directory_path: PathLikeStr,
|
|
62
|
+
cache_directories: Sequence[str] | None = None,
|
|
63
|
+
cache_file_extensions: Sequence[str] | None = None,
|
|
64
|
+
) -> None:
|
|
65
|
+
"""Delete cache directories and files in the specified directory."""
|
|
66
|
+
root = Path(directory_path).resolve()
|
|
67
|
+
|
|
68
|
+
if cache_directories is None:
|
|
69
|
+
cache_directories = CACHE_DIRECTORIES
|
|
70
|
+
|
|
71
|
+
if cache_file_extensions is None:
|
|
72
|
+
cache_file_extensions = CACHE_FILE_EXTENSIONS
|
|
73
|
+
|
|
74
|
+
for directory in cache_directories:
|
|
75
|
+
for path in root.rglob(directory):
|
|
76
|
+
if "venv" in str(path):
|
|
77
|
+
continue
|
|
78
|
+
shutil.rmtree(path.absolute(), ignore_errors=False)
|
|
79
|
+
logger.info("deleted - %s", path)
|
|
80
|
+
logger.info("done with deleting cache directories")
|
|
81
|
+
|
|
82
|
+
for file_extension in cache_file_extensions:
|
|
83
|
+
for path in root.rglob(file_extension):
|
|
84
|
+
if "venv" in str(path):
|
|
85
|
+
continue
|
|
86
|
+
path.unlink()
|
|
87
|
+
logger.info("deleted - %s", path)
|
|
88
|
+
logger.info("done with deleting cache files")
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def confirm(prompt: str, default: str | None = None) -> bool:
|
|
92
|
+
"""Return True if the user confirms ('yes'); repeats until valid input."""
|
|
93
|
+
if default not in (None, "yes", "no"):
|
|
94
|
+
raise ValueError(f"invalid {default=!r}; expected None, 'yes', or 'no'")
|
|
95
|
+
|
|
96
|
+
true_tokens = frozenset(("y", "yes", "t", "true", "on", "1"))
|
|
97
|
+
false_tokens = frozenset(("n", "no", "f", "false", "off", "0"))
|
|
98
|
+
prompt_map = {None: "[y/n]", "yes": "[Y/n]", "no": "[y/N]"}
|
|
99
|
+
suffix = prompt_map[default]
|
|
100
|
+
|
|
101
|
+
while True:
|
|
102
|
+
reply = input(f"{prompt} {suffix} ").strip().lower()
|
|
103
|
+
|
|
104
|
+
if not reply:
|
|
105
|
+
if default is not None:
|
|
106
|
+
return default == "yes"
|
|
107
|
+
print("Please respond with 'yes' or 'no'.")
|
|
108
|
+
continue
|
|
109
|
+
|
|
110
|
+
if reply in true_tokens:
|
|
111
|
+
return True
|
|
112
|
+
if reply in false_tokens:
|
|
113
|
+
return False
|
|
114
|
+
|
|
115
|
+
print("Please respond with 'yes' or 'no'.")
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def create_archive(
|
|
119
|
+
directory_path: PathLikeStr,
|
|
120
|
+
archive_name: str | None = None,
|
|
121
|
+
patterns: Sequence[str] | None = None,
|
|
122
|
+
archive_format: str = "zip",
|
|
123
|
+
) -> Path:
|
|
124
|
+
"""Return the path of the created archive file."""
|
|
125
|
+
dir_path = Path(directory_path).resolve()
|
|
126
|
+
if not dir_path.exists() or not dir_path.is_dir():
|
|
127
|
+
raise ValueError(f"{directory_path!r} does not exist or is not a directory")
|
|
128
|
+
|
|
129
|
+
patterns = list(patterns) if patterns else ["**/*"]
|
|
130
|
+
archive_format = archive_format.lower()
|
|
131
|
+
supported = {fmt for fmt, _ in get_archive_formats()}
|
|
132
|
+
if archive_format not in supported:
|
|
133
|
+
raise ValueError(
|
|
134
|
+
f"invalid choice {archive_format!r}; expected a value from {supported!r}"
|
|
135
|
+
)
|
|
136
|
+
|
|
137
|
+
if archive_name is None:
|
|
138
|
+
base_name = f"{utils.utctimestamp()}_{dir_path.stem}"
|
|
139
|
+
else:
|
|
140
|
+
if Path(archive_name).name != archive_name:
|
|
141
|
+
raise ValueError("archive_name must not contain directory components")
|
|
142
|
+
base_name = archive_name
|
|
143
|
+
|
|
144
|
+
# collect matches deterministically and deduplicate
|
|
145
|
+
matched: set[Path] = set()
|
|
146
|
+
for pattern in patterns:
|
|
147
|
+
matched.update(dir_path.rglob(pattern))
|
|
148
|
+
|
|
149
|
+
# sort by relative path for deterministic archive contents/order
|
|
150
|
+
paths = sorted((p for p in matched), key=lambda p: str(p.relative_to(dir_path)))
|
|
151
|
+
|
|
152
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
153
|
+
tmpdir_path = Path(tmpdir)
|
|
154
|
+
for src_path in paths:
|
|
155
|
+
logger.debug("processing - %s", src_path)
|
|
156
|
+
try:
|
|
157
|
+
rel = src_path.relative_to(dir_path)
|
|
158
|
+
except Exception:
|
|
159
|
+
# skip anything not under target (shouldn't happen with rglob)
|
|
160
|
+
continue
|
|
161
|
+
|
|
162
|
+
dst_path = tmpdir_path / rel
|
|
163
|
+
if src_path.is_dir():
|
|
164
|
+
dst_path.mkdir(parents=True, exist_ok=True)
|
|
165
|
+
continue
|
|
166
|
+
|
|
167
|
+
dst_path.parent.mkdir(parents=True, exist_ok=True)
|
|
168
|
+
|
|
169
|
+
if src_path.is_symlink():
|
|
170
|
+
target_link = os.readlink(src_path)
|
|
171
|
+
if dst_path.exists() or dst_path.is_symlink():
|
|
172
|
+
dst_path.unlink()
|
|
173
|
+
os.symlink(target_link, dst_path)
|
|
174
|
+
|
|
175
|
+
elif src_path.is_file():
|
|
176
|
+
copy2(src_path, dst_path)
|
|
177
|
+
|
|
178
|
+
else:
|
|
179
|
+
continue
|
|
180
|
+
|
|
181
|
+
archive_path = make_archive(
|
|
182
|
+
str(Path(base_name)),
|
|
183
|
+
archive_format,
|
|
184
|
+
root_dir=str(tmpdir_path),
|
|
185
|
+
)
|
|
186
|
+
|
|
187
|
+
return Path(archive_path)
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def iter_lines(
|
|
191
|
+
filepath: PathLikeStr,
|
|
192
|
+
encoding: str | None = None,
|
|
193
|
+
errors: str | None = None,
|
|
194
|
+
newline: str | None = None,
|
|
195
|
+
) -> Iterator[str]:
|
|
196
|
+
"""Return an iterator over text lines from filepath."""
|
|
197
|
+
path = os.fspath(filepath)
|
|
198
|
+
with open(path, encoding=encoding, errors=errors, newline=newline) as fh:
|
|
199
|
+
yield from fh
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def terminal_width(default: int = 79) -> int:
|
|
203
|
+
"""Return the current terminal width or a fallback value."""
|
|
204
|
+
try:
|
|
205
|
+
return get_terminal_size().columns
|
|
206
|
+
except OSError:
|
|
207
|
+
return default
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def delete_local_branches() -> None:
|
|
211
|
+
"""Delete local git branches except protected ones."""
|
|
212
|
+
logger.debug("running '%s'", utils.get_caller_name())
|
|
213
|
+
current = get_current_branch_name()
|
|
214
|
+
exclude = PROTECTED_BRANCHES | {current}
|
|
215
|
+
|
|
216
|
+
local = get_local_branch_names()
|
|
217
|
+
to_delete = [b for b in local if b not in exclude]
|
|
218
|
+
|
|
219
|
+
if not to_delete:
|
|
220
|
+
logger.info("no local branches to delete")
|
|
221
|
+
return
|
|
222
|
+
|
|
223
|
+
logger.debug("deleting %d local branch(es): %s", len(to_delete), to_delete)
|
|
224
|
+
for branch in to_delete:
|
|
225
|
+
try:
|
|
226
|
+
run_command(f"git branch -D {branch}", label=utils.get_caller_name())
|
|
227
|
+
logger.info("deleted local branch '%s'", branch)
|
|
228
|
+
except subprocess.CalledProcessError as exc:
|
|
229
|
+
logger.exception(
|
|
230
|
+
"failed deleting local branch %s; exit=%s; stderr=%s",
|
|
231
|
+
branch,
|
|
232
|
+
getattr(exc, "returncode", None),
|
|
233
|
+
getattr(exc, "stderr", None),
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def delete_remote_branch_refs() -> None:
|
|
238
|
+
"""Delete remote-tracking git branch refs except protected ones."""
|
|
239
|
+
logger.debug("running '%s'", utils.get_caller_name())
|
|
240
|
+
current = get_current_branch_name()
|
|
241
|
+
exclude = PROTECTED_BRANCHES | {current}
|
|
242
|
+
|
|
243
|
+
remote = get_remote_branch_names()
|
|
244
|
+
to_delete = [r for r in remote if r.split("/", 1)[-1] not in exclude]
|
|
245
|
+
|
|
246
|
+
if not to_delete:
|
|
247
|
+
logger.info("no remote-tracking refs to delete")
|
|
248
|
+
return
|
|
249
|
+
|
|
250
|
+
logger.debug("deleting %d remote ref(s): %s", len(to_delete), to_delete)
|
|
251
|
+
for ref in to_delete:
|
|
252
|
+
try:
|
|
253
|
+
run_command(f"git branch -r -d {ref}", label=utils.get_caller_name())
|
|
254
|
+
logger.info("deleted remote ref '%s'", ref)
|
|
255
|
+
except subprocess.CalledProcessError as exc:
|
|
256
|
+
logger.exception(
|
|
257
|
+
"failed deleting remote ref %s; exit=%s; stderr=%s",
|
|
258
|
+
ref,
|
|
259
|
+
getattr(exc, "returncode", None),
|
|
260
|
+
getattr(exc, "stderr", None),
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def get_current_branch_name() -> str:
|
|
265
|
+
"""Return current branch name as string."""
|
|
266
|
+
return run_command("git rev-parse --abbrev-ref HEAD", label=utils.get_caller_name())
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def get_local_branch_names() -> list[str]:
|
|
270
|
+
"""Return list of local branch names."""
|
|
271
|
+
out = run_command("git branch", label=utils.get_caller_name())
|
|
272
|
+
branches: list[str] = []
|
|
273
|
+
for line in out.splitlines():
|
|
274
|
+
branches.append(line.lstrip("*").strip())
|
|
275
|
+
return branches
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def get_remote_branch_names() -> list[str]:
|
|
279
|
+
"""Return list of remote-tracking branch refs."""
|
|
280
|
+
out = run_command("git branch --remotes", label=utils.get_caller_name())
|
|
281
|
+
branches: list[str] = []
|
|
282
|
+
for line in out.splitlines():
|
|
283
|
+
line = line.strip()
|
|
284
|
+
if "->" in line:
|
|
285
|
+
continue
|
|
286
|
+
branches.append(line)
|
|
287
|
+
return branches
|
|
288
|
+
|
|
289
|
+
|
|
290
|
+
def get_last_commit_hash(max_length: int | None = None) -> str:
|
|
291
|
+
"""Return the full or truncated commit hash of the current branch."""
|
|
292
|
+
if max_length is not None:
|
|
293
|
+
if not isinstance(max_length, int):
|
|
294
|
+
raise TypeError(
|
|
295
|
+
f"unsupported type {type(max_length).__name__!r}; expected int or None"
|
|
296
|
+
)
|
|
297
|
+
if max_length < 1:
|
|
298
|
+
raise ValueError(f"invalid value {max_length!r}; expected >= 1")
|
|
299
|
+
|
|
300
|
+
commit = run_command("git rev-parse HEAD", label=utils.get_caller_name())
|
|
301
|
+
if not commit:
|
|
302
|
+
raise RuntimeError("git returned an empty commit hash")
|
|
303
|
+
|
|
304
|
+
return commit if max_length is None else commit[:max_length]
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def run_command(command: str | Sequence[str], label: str) -> str:
|
|
308
|
+
"""Return stdout as string of the executed command."""
|
|
309
|
+
cmd = shlex.split(command) if isinstance(command, str) else list(command)
|
|
310
|
+
response = subprocess.run(cmd, capture_output=True, text=True, check=True)
|
|
311
|
+
logger.debug("'%s' ran '%s'", label, " ".join(cmd))
|
|
312
|
+
return response.stdout.strip()
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def get_installed_package_count() -> int:
|
|
316
|
+
"""Return the number of installed packages for the current Python environment."""
|
|
317
|
+
try:
|
|
318
|
+
count = sum(1 for _ in metadata.distributions())
|
|
319
|
+
except Exception:
|
|
320
|
+
count = 0
|
|
321
|
+
|
|
322
|
+
# fallback: use the same interpreter's pip to get a reliable package list
|
|
323
|
+
if count < 10:
|
|
324
|
+
try:
|
|
325
|
+
proc = subprocess.run(
|
|
326
|
+
[sys.executable, "-m", "pip", "list", "--format=freeze"],
|
|
327
|
+
check=True,
|
|
328
|
+
capture_output=True,
|
|
329
|
+
text=True,
|
|
330
|
+
)
|
|
331
|
+
# ignore blank lines and count non-empty entries
|
|
332
|
+
lines = [ln for ln in proc.stdout.splitlines() if ln.strip()]
|
|
333
|
+
return len(lines)
|
|
334
|
+
except (subprocess.SubprocessError, OSError):
|
|
335
|
+
# if pip fails, return what metadata provided (possibly 0)
|
|
336
|
+
return int(count)
|
|
337
|
+
|
|
338
|
+
return int(count)
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
def rename_extensions(
|
|
342
|
+
directory_path: PathLikeStr,
|
|
343
|
+
old_ext: str | None,
|
|
344
|
+
new_ext: str,
|
|
345
|
+
*,
|
|
346
|
+
create_copy: bool = False,
|
|
347
|
+
recursive: bool = False,
|
|
348
|
+
overwrite: bool = False,
|
|
349
|
+
dry_run: bool = False,
|
|
350
|
+
) -> None:
|
|
351
|
+
"""Rename (or copy) files in a directory by changing their extensions."""
|
|
352
|
+
logger.debug(
|
|
353
|
+
"running '%s' with %s",
|
|
354
|
+
utils.get_caller_name(),
|
|
355
|
+
{
|
|
356
|
+
"directory_path": directory_path,
|
|
357
|
+
"old_ext": old_ext,
|
|
358
|
+
"new_ext": new_ext,
|
|
359
|
+
"create_copy": create_copy,
|
|
360
|
+
"recursive": recursive,
|
|
361
|
+
"overwrite": overwrite,
|
|
362
|
+
"dry_run": dry_run,
|
|
363
|
+
},
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
dir_path = Path(directory_path).resolve()
|
|
367
|
+
if not dir_path.exists() or not dir_path.is_dir():
|
|
368
|
+
raise ValueError(f"{directory_path!r} does not exist or is not a directory")
|
|
369
|
+
|
|
370
|
+
def _normalize(ext: str | None) -> str | None:
|
|
371
|
+
if ext is None:
|
|
372
|
+
return None
|
|
373
|
+
if ext == "":
|
|
374
|
+
return ""
|
|
375
|
+
return ext if ext.startswith(".") else f".{ext}"
|
|
376
|
+
|
|
377
|
+
src_ext = _normalize(old_ext)
|
|
378
|
+
dst_ext = _normalize(new_ext)
|
|
379
|
+
if dst_ext is None:
|
|
380
|
+
raise ValueError("new_ext must be provided")
|
|
381
|
+
|
|
382
|
+
# iterable of Path objects
|
|
383
|
+
file_paths = dir_path.rglob("*") if recursive else dir_path.iterdir()
|
|
384
|
+
|
|
385
|
+
for file_path in file_paths:
|
|
386
|
+
logger.debug("processing: %s", file_path)
|
|
387
|
+
|
|
388
|
+
if not file_path.is_file():
|
|
389
|
+
logger.debug("skipping - not a file: %s", file_path)
|
|
390
|
+
continue
|
|
391
|
+
|
|
392
|
+
name = file_path.name
|
|
393
|
+
lower_name = name.lower()
|
|
394
|
+
|
|
395
|
+
# decide if file matches src_ext
|
|
396
|
+
if src_ext is None:
|
|
397
|
+
matches = True
|
|
398
|
+
else:
|
|
399
|
+
src_lower = src_ext.lower()
|
|
400
|
+
# treat multi-dot extensions (e.g. '.tar.gz') via endswith
|
|
401
|
+
if src_lower.count(".") > 1:
|
|
402
|
+
matches = lower_name.endswith(src_lower)
|
|
403
|
+
else:
|
|
404
|
+
matches = file_path.suffix.lower() == src_lower
|
|
405
|
+
|
|
406
|
+
if not matches:
|
|
407
|
+
logger.debug("skipping - not a match: %s", file_path)
|
|
408
|
+
continue
|
|
409
|
+
|
|
410
|
+
# compute new path
|
|
411
|
+
if (
|
|
412
|
+
src_ext
|
|
413
|
+
and src_ext.lower().count(".") > 1
|
|
414
|
+
and lower_name.endswith(src_ext.lower())
|
|
415
|
+
):
|
|
416
|
+
# replace trailing multi-dot ext
|
|
417
|
+
new_name = name[: -len(src_ext)] + dst_ext
|
|
418
|
+
new_path = file_path.with_name(new_name)
|
|
419
|
+
else:
|
|
420
|
+
# pathlib.with_suffix accepts '' to remove suffix
|
|
421
|
+
new_path = file_path.with_suffix(dst_ext)
|
|
422
|
+
|
|
423
|
+
# no-op
|
|
424
|
+
if new_path == file_path:
|
|
425
|
+
logger.debug("skipping - new_path is current file_path")
|
|
426
|
+
continue
|
|
427
|
+
|
|
428
|
+
if new_path.exists() and not overwrite:
|
|
429
|
+
raise FileExistsError(f"file already exists: {new_path}")
|
|
430
|
+
|
|
431
|
+
if dry_run:
|
|
432
|
+
op = "copy" if create_copy else "rename"
|
|
433
|
+
logger.info("[dry-run] %s %s -> %s", op, file_path, new_path)
|
|
434
|
+
continue
|
|
435
|
+
|
|
436
|
+
if create_copy:
|
|
437
|
+
safe_copy(file_path, new_path, overwrite=overwrite)
|
|
438
|
+
logger.info("copied %s -> %s", file_path, new_path)
|
|
439
|
+
else:
|
|
440
|
+
# use replace when allowing overwrite (atomic where supported)
|
|
441
|
+
if overwrite and new_path.exists():
|
|
442
|
+
file_path.replace(new_path)
|
|
443
|
+
else:
|
|
444
|
+
file_path.rename(new_path)
|
|
445
|
+
logger.info("renamed %s -> %s", file_path, new_path)
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
def safe_copy(
|
|
449
|
+
old_file: PathLikeStr,
|
|
450
|
+
new_file: PathLikeStr,
|
|
451
|
+
*,
|
|
452
|
+
overwrite: bool = False,
|
|
453
|
+
) -> None:
|
|
454
|
+
"""Safely copy a file with metadata and atomically replace the target if desired."""
|
|
455
|
+
src = Path(old_file)
|
|
456
|
+
dst = Path(new_file)
|
|
457
|
+
|
|
458
|
+
if not src.exists() or not src.is_file():
|
|
459
|
+
raise FileNotFoundError(f"source does not exist or is not a file: {src}")
|
|
460
|
+
|
|
461
|
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
462
|
+
|
|
463
|
+
if dst.exists() and not overwrite:
|
|
464
|
+
raise FileExistsError(f"target already exists: {dst}")
|
|
465
|
+
|
|
466
|
+
tmp_path: Path | None = None
|
|
467
|
+
try:
|
|
468
|
+
# create a named temporary file in the destination directory for atomic replace
|
|
469
|
+
with tempfile.NamedTemporaryFile(delete=False, dir=dst.parent) as tmp:
|
|
470
|
+
tmp_path = Path(tmp.name)
|
|
471
|
+
copy2(src, tmp_path) # copy2 preserves metadata (mtime, permissions, flags)
|
|
472
|
+
os.replace(str(tmp_path), str(dst)) # atomic rename (replace) to final dst
|
|
473
|
+
except Exception:
|
|
474
|
+
# best-effort cleanup of temp file
|
|
475
|
+
with contextlib.suppress(Exception):
|
|
476
|
+
if tmp_path is not None and tmp_path.exists():
|
|
477
|
+
tmp_path.unlink()
|
|
478
|
+
raise
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
import datetime as dt
|
|
2
|
+
import inspect
|
|
3
|
+
from collections.abc import Callable, Iterable
|
|
4
|
+
from typing import Any, TypeVar
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T")
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def get_caller_name(depth: int = 1) -> str:
|
|
10
|
+
"""Return the name of the calling function; depth=1 is the immediate caller."""
|
|
11
|
+
if depth < 1:
|
|
12
|
+
raise ValueError(f"invalid {depth=!r}; expected >= 1")
|
|
13
|
+
|
|
14
|
+
frame = inspect.currentframe()
|
|
15
|
+
try:
|
|
16
|
+
caller = frame
|
|
17
|
+
for _ in range(depth):
|
|
18
|
+
caller = caller.f_back if caller is not None else None
|
|
19
|
+
if caller is None:
|
|
20
|
+
raise RuntimeError("expected to be executed within a function")
|
|
21
|
+
return caller.f_code.co_name
|
|
22
|
+
finally:
|
|
23
|
+
del frame
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def pipe(value: T, functions: Iterable[Callable[[Any], Any]]) -> Any:
|
|
27
|
+
"""Return the result of applying a sequence of functions to the initial value."""
|
|
28
|
+
result: Any = value
|
|
29
|
+
for function in functions:
|
|
30
|
+
result = function(result)
|
|
31
|
+
return result
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def utctimestamp() -> str:
|
|
35
|
+
"""Return UTC timestamp string."""
|
|
36
|
+
return dt.datetime.now(dt.UTC).strftime("%Y%m%d%H%M%S")
|