path-sync 0.3.4__tar.gz → 0.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {path_sync-0.3.4 → path_sync-0.4.0}/PKG-INFO +7 -2
- {path_sync-0.3.4 → path_sync-0.4.0}/README.md +5 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/__init__.py +5 -3
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/__main__.py +1 -1
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/cmd_copy.py +3 -3
- path_sync-0.4.0/path_sync/_internal/cmd_dep_update.py +250 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/git_ops.py +33 -2
- path_sync-0.4.0/path_sync/_internal/models_dep.py +70 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/validation.py +5 -2
- path_sync-0.4.0/path_sync/dep_update.py +18 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/sections.py +17 -6
- {path_sync-0.3.4 → path_sync-0.4.0}/pyproject.toml +10 -3
- {path_sync-0.3.4 → path_sync-0.4.0}/.gitignore +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/LICENSE +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/__init__.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/cmd_boot.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/cmd_validate.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/file_utils.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/header.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/models.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/typer_app.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/_internal/yaml_utils.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/config.py +0 -0
- {path_sync-0.3.4 → path_sync-0.4.0}/path_sync/copy.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: path-sync
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: Sync files from a source repo to multiple destination repos
|
|
5
5
|
Author-email: EspenAlbert <espen.albert1@gmail.com>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -13,11 +13,16 @@ Requires-Dist: gitpython>=3.1.0
|
|
|
13
13
|
Requires-Dist: pydantic>=2.0
|
|
14
14
|
Requires-Dist: pyyaml>=6.0
|
|
15
15
|
Requires-Dist: typer>=0.16.0
|
|
16
|
-
Requires-Dist: zero-3rdparty>=0.
|
|
16
|
+
Requires-Dist: zero-3rdparty>=0.102.0
|
|
17
17
|
Description-Content-Type: text/markdown
|
|
18
18
|
|
|
19
19
|
# path-sync
|
|
20
20
|
|
|
21
|
+
[](https://pypi.org/project/path-sync/)
|
|
22
|
+
[](https://github.com/EspenAlbert/path-sync)
|
|
23
|
+
[](https://codecov.io/gh/EspenAlbert/path-sync)
|
|
24
|
+
[](https://espenalbert.github.io/path-sync/)
|
|
25
|
+
|
|
21
26
|
Sync files from a source repo to multiple destination repos.
|
|
22
27
|
|
|
23
28
|
## Overview
|
|
@@ -1,5 +1,10 @@
|
|
|
1
1
|
# path-sync
|
|
2
2
|
|
|
3
|
+
[](https://pypi.org/project/path-sync/)
|
|
4
|
+
[](https://github.com/EspenAlbert/path-sync)
|
|
5
|
+
[](https://codecov.io/gh/EspenAlbert/path-sync)
|
|
6
|
+
[](https://espenalbert.github.io/path-sync/)
|
|
7
|
+
|
|
3
8
|
Sync files from a source repo to multiple destination repos.
|
|
4
9
|
|
|
5
10
|
## Overview
|
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
# Generated by pkg-ext
|
|
2
2
|
# flake8: noqa
|
|
3
|
-
from path_sync import copy
|
|
4
3
|
from path_sync import config
|
|
4
|
+
from path_sync import copy
|
|
5
|
+
from path_sync import dep_update
|
|
5
6
|
|
|
6
|
-
VERSION = "0.
|
|
7
|
+
VERSION = "0.4.0"
|
|
7
8
|
__all__ = [
|
|
8
|
-
"copy",
|
|
9
9
|
"config",
|
|
10
|
+
"copy",
|
|
11
|
+
"dep_update",
|
|
10
12
|
]
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
|
|
3
|
-
from path_sync._internal import cmd_boot, cmd_copy, cmd_validate # noqa: F401
|
|
3
|
+
from path_sync._internal import cmd_boot, cmd_copy, cmd_dep_update, cmd_validate # noqa: F401
|
|
4
4
|
from path_sync._internal.models import LOG_FORMAT
|
|
5
5
|
from path_sync._internal.typer_app import app
|
|
6
6
|
|
|
@@ -237,10 +237,10 @@ def _sync_destination(
|
|
|
237
237
|
|
|
238
238
|
# --no-checkout means "I'm already on the right branch"
|
|
239
239
|
# Prompt decline means "skip git operations for this run"
|
|
240
|
-
if opts.
|
|
241
|
-
skip_git_ops = False
|
|
242
|
-
elif opts.dry_run:
|
|
240
|
+
if opts.dry_run:
|
|
243
241
|
skip_git_ops = True
|
|
242
|
+
elif opts.no_checkout:
|
|
243
|
+
skip_git_ops = False
|
|
244
244
|
elif _prompt(f"Switch {dest.name} to {copy_branch}?", opts.no_prompt):
|
|
245
245
|
git_ops.prepare_copy_branch(
|
|
246
246
|
repo=dest_repo,
|
|
@@ -0,0 +1,250 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import subprocess
|
|
5
|
+
from dataclasses import dataclass, field
|
|
6
|
+
from enum import StrEnum
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
import typer
|
|
10
|
+
from git import Repo
|
|
11
|
+
|
|
12
|
+
from path_sync._internal import git_ops
|
|
13
|
+
from path_sync._internal.models import Destination, find_repo_root
|
|
14
|
+
from path_sync._internal.models_dep import (
|
|
15
|
+
DepConfig,
|
|
16
|
+
OnFailStrategy,
|
|
17
|
+
UpdateEntry,
|
|
18
|
+
VerifyConfig,
|
|
19
|
+
resolve_dep_config_path,
|
|
20
|
+
)
|
|
21
|
+
from path_sync._internal.typer_app import app
|
|
22
|
+
from path_sync._internal.yaml_utils import load_yaml_model
|
|
23
|
+
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
MAX_STDERR_LINES = 20
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Status(StrEnum):
|
|
30
|
+
PASSED = "passed"
|
|
31
|
+
SKIPPED = "skipped"
|
|
32
|
+
WARN = "warn"
|
|
33
|
+
NO_CHANGES = "no_changes"
|
|
34
|
+
FAILED = "failed"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class StepFailure:
|
|
39
|
+
step: str
|
|
40
|
+
returncode: int
|
|
41
|
+
stderr: str
|
|
42
|
+
on_fail: OnFailStrategy
|
|
43
|
+
|
|
44
|
+
@classmethod
|
|
45
|
+
def from_error(cls, step: str, e: subprocess.CalledProcessError, on_fail: OnFailStrategy) -> StepFailure:
|
|
46
|
+
stderr = _truncate_stderr(e.stderr or "", MAX_STDERR_LINES)
|
|
47
|
+
return cls(step=step, returncode=e.returncode, stderr=stderr, on_fail=on_fail)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class RepoResult:
|
|
52
|
+
dest: Destination
|
|
53
|
+
repo_path: Path
|
|
54
|
+
status: Status
|
|
55
|
+
failures: list[StepFailure] = field(default_factory=list)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _truncate_stderr(text: str, max_lines: int) -> str:
|
|
59
|
+
lines = text.strip().splitlines()
|
|
60
|
+
if len(lines) <= max_lines:
|
|
61
|
+
return text.strip()
|
|
62
|
+
skipped = len(lines) - max_lines
|
|
63
|
+
return f"... ({skipped} lines skipped)\n" + "\n".join(lines[-max_lines:])
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
@app.command()
|
|
67
|
+
def dep_update(
|
|
68
|
+
name: str = typer.Option(..., "-n", "--name", help="Config name"),
|
|
69
|
+
dest_filter: str = typer.Option("", "-d", "--dest", help="Filter destinations (comma-separated)"),
|
|
70
|
+
work_dir: str = typer.Option("", "--work-dir", help="Directory for cloning repos"),
|
|
71
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Preview without creating PRs"),
|
|
72
|
+
skip_verify: bool = typer.Option(False, "--skip-verify", help="Skip verification steps"),
|
|
73
|
+
src_root_opt: str = typer.Option("", "--src-root", help="Source repo root"),
|
|
74
|
+
) -> None:
|
|
75
|
+
"""Run dependency updates across repositories."""
|
|
76
|
+
src_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
|
|
77
|
+
config_path = resolve_dep_config_path(src_root, name)
|
|
78
|
+
if not config_path.exists():
|
|
79
|
+
logger.error(f"Config not found: {config_path}")
|
|
80
|
+
raise typer.Exit(1)
|
|
81
|
+
|
|
82
|
+
config = load_yaml_model(config_path, DepConfig)
|
|
83
|
+
destinations = config.load_destinations(src_root)
|
|
84
|
+
|
|
85
|
+
if dest_filter:
|
|
86
|
+
filter_names = [n.strip() for n in dest_filter.split(",")]
|
|
87
|
+
destinations = [d for d in destinations if d.name in filter_names]
|
|
88
|
+
|
|
89
|
+
results = _update_and_validate(config, destinations, src_root, work_dir, skip_verify)
|
|
90
|
+
_create_prs(config, results, dry_run)
|
|
91
|
+
|
|
92
|
+
if any(r.status == Status.SKIPPED for r in results):
|
|
93
|
+
raise typer.Exit(1)
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def _update_and_validate(
|
|
97
|
+
config: DepConfig,
|
|
98
|
+
destinations: list[Destination],
|
|
99
|
+
src_root: Path,
|
|
100
|
+
work_dir: str,
|
|
101
|
+
skip_verify: bool,
|
|
102
|
+
) -> list[RepoResult]:
|
|
103
|
+
results: list[RepoResult] = []
|
|
104
|
+
|
|
105
|
+
for dest in destinations:
|
|
106
|
+
result = _process_single_repo(config, dest, src_root, work_dir, skip_verify)
|
|
107
|
+
|
|
108
|
+
if result.status == Status.FAILED:
|
|
109
|
+
logger.error(f"{dest.name}: Verification failed, stopping")
|
|
110
|
+
raise typer.Exit(1)
|
|
111
|
+
|
|
112
|
+
if result.status != Status.NO_CHANGES:
|
|
113
|
+
results.append(result)
|
|
114
|
+
|
|
115
|
+
return results
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _process_single_repo(
|
|
119
|
+
config: DepConfig,
|
|
120
|
+
dest: Destination,
|
|
121
|
+
src_root: Path,
|
|
122
|
+
work_dir: str,
|
|
123
|
+
skip_verify: bool,
|
|
124
|
+
) -> RepoResult:
|
|
125
|
+
logger.info(f"Processing {dest.name}...")
|
|
126
|
+
repo_path = _resolve_repo_path(dest, src_root, work_dir)
|
|
127
|
+
repo = _ensure_repo(dest, repo_path)
|
|
128
|
+
git_ops.prepare_copy_branch(repo, dest.default_branch, config.pr.branch, from_default=True)
|
|
129
|
+
|
|
130
|
+
if failure := _run_updates(config.updates, repo_path):
|
|
131
|
+
logger.warning(f"{dest.name}: Update failed with exit code {failure.returncode}")
|
|
132
|
+
return RepoResult(dest, repo_path, Status.SKIPPED, failures=[failure])
|
|
133
|
+
|
|
134
|
+
if not git_ops.has_changes(repo):
|
|
135
|
+
logger.info(f"{dest.name}: No changes, skipping")
|
|
136
|
+
return RepoResult(dest, repo_path, Status.NO_CHANGES)
|
|
137
|
+
|
|
138
|
+
git_ops.commit_changes(repo, config.pr.title)
|
|
139
|
+
|
|
140
|
+
if skip_verify:
|
|
141
|
+
return RepoResult(dest, repo_path, Status.PASSED)
|
|
142
|
+
|
|
143
|
+
return _verify_repo(repo, repo_path, config.verify, dest)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def _run_updates(updates: list[UpdateEntry], repo_path: Path) -> StepFailure | None:
|
|
147
|
+
"""Returns StepFailure on failure, None on success."""
|
|
148
|
+
try:
|
|
149
|
+
for update in updates:
|
|
150
|
+
_run_command(update.command, repo_path / update.workdir)
|
|
151
|
+
return None
|
|
152
|
+
except subprocess.CalledProcessError as e:
|
|
153
|
+
return StepFailure.from_error(e.cmd, e, OnFailStrategy.SKIP)
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def _verify_repo(repo: Repo, repo_path: Path, verify: VerifyConfig, dest: Destination) -> RepoResult:
|
|
157
|
+
status, failures = _run_verify_steps(repo, repo_path, verify)
|
|
158
|
+
return RepoResult(dest, repo_path, status, failures)
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _create_prs(config: DepConfig, results: list[RepoResult], dry_run: bool) -> None:
|
|
162
|
+
for result in results:
|
|
163
|
+
if result.status == Status.SKIPPED:
|
|
164
|
+
continue
|
|
165
|
+
|
|
166
|
+
if dry_run:
|
|
167
|
+
logger.info(f"[DRY RUN] Would create PR for {result.dest.name}")
|
|
168
|
+
continue
|
|
169
|
+
|
|
170
|
+
repo = git_ops.get_repo(result.repo_path)
|
|
171
|
+
git_ops.push_branch(repo, config.pr.branch, force=True)
|
|
172
|
+
|
|
173
|
+
body = _build_pr_body(result.failures)
|
|
174
|
+
git_ops.create_or_update_pr(
|
|
175
|
+
result.repo_path,
|
|
176
|
+
config.pr.branch,
|
|
177
|
+
config.pr.title,
|
|
178
|
+
body,
|
|
179
|
+
config.pr.labels or None,
|
|
180
|
+
auto_merge=config.pr.auto_merge,
|
|
181
|
+
)
|
|
182
|
+
logger.info(f"{result.dest.name}: PR created/updated")
|
|
183
|
+
|
|
184
|
+
|
|
185
|
+
def _resolve_repo_path(dest: Destination, src_root: Path, work_dir: str) -> Path:
|
|
186
|
+
if dest.dest_path_relative:
|
|
187
|
+
return (src_root / dest.dest_path_relative).resolve()
|
|
188
|
+
if not work_dir:
|
|
189
|
+
raise typer.BadParameter(f"No dest_path_relative for {dest.name}, --work-dir required")
|
|
190
|
+
return Path(work_dir) / dest.name
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def _ensure_repo(dest: Destination, repo_path: Path):
|
|
194
|
+
if repo_path.exists() and git_ops.is_git_repo(repo_path):
|
|
195
|
+
return git_ops.get_repo(repo_path)
|
|
196
|
+
if not dest.repo_url:
|
|
197
|
+
raise ValueError(f"Dest {dest.name} not found at {repo_path} and no repo_url configured")
|
|
198
|
+
return git_ops.clone_repo(dest.repo_url, repo_path)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
def _run_command(cmd: str, cwd: Path) -> None:
|
|
202
|
+
logger.info(f"Running: {cmd}")
|
|
203
|
+
result = subprocess.run(cmd, shell=True, cwd=cwd, capture_output=True, text=True)
|
|
204
|
+
if result.returncode != 0:
|
|
205
|
+
stderr = result.stderr.strip()
|
|
206
|
+
logger.error(f"Command failed '{cmd}' in {cwd}: {stderr}")
|
|
207
|
+
raise subprocess.CalledProcessError(result.returncode, cmd, output=result.stdout, stderr=stderr)
|
|
208
|
+
|
|
209
|
+
|
|
210
|
+
def _run_verify_steps(
|
|
211
|
+
repo: Repo,
|
|
212
|
+
repo_path: Path,
|
|
213
|
+
verify: VerifyConfig,
|
|
214
|
+
) -> tuple[Status, list[StepFailure]]:
|
|
215
|
+
failures: list[StepFailure] = []
|
|
216
|
+
|
|
217
|
+
for step in verify.steps:
|
|
218
|
+
on_fail = step.on_fail or verify.on_fail
|
|
219
|
+
|
|
220
|
+
try:
|
|
221
|
+
_run_command(step.run, repo_path)
|
|
222
|
+
except subprocess.CalledProcessError as e:
|
|
223
|
+
failure = StepFailure.from_error(step.run, e, on_fail)
|
|
224
|
+
match on_fail:
|
|
225
|
+
case OnFailStrategy.FAIL:
|
|
226
|
+
return (Status.FAILED, [failure])
|
|
227
|
+
case OnFailStrategy.SKIP:
|
|
228
|
+
return (Status.SKIPPED, [failure])
|
|
229
|
+
case OnFailStrategy.WARN:
|
|
230
|
+
failures.append(failure)
|
|
231
|
+
continue
|
|
232
|
+
|
|
233
|
+
if step.commit:
|
|
234
|
+
git_ops.stage_and_commit(repo, step.commit.add_paths, step.commit.message)
|
|
235
|
+
|
|
236
|
+
return (Status.WARN if failures else Status.PASSED, failures)
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def _build_pr_body(failures: list[StepFailure]) -> str:
|
|
240
|
+
body = "Automated dependency update."
|
|
241
|
+
if not failures:
|
|
242
|
+
return body
|
|
243
|
+
|
|
244
|
+
body += "\n\n---\n## Verification Issues\n"
|
|
245
|
+
for f in failures:
|
|
246
|
+
body += f"\n### `{f.step}` (strategy: {f.on_fail})\n"
|
|
247
|
+
body += f"**Exit code:** {f.returncode}\n"
|
|
248
|
+
if f.stderr:
|
|
249
|
+
body += f"\n```\n{f.stderr}\n```\n"
|
|
250
|
+
return body
|
|
@@ -107,6 +107,22 @@ def commit_changes(repo: Repo, message: str) -> None:
|
|
|
107
107
|
logger.info(f"Committed: {message}")
|
|
108
108
|
|
|
109
109
|
|
|
110
|
+
def stage_and_commit(repo: Repo, add_paths: list[str], message: str) -> bool:
|
|
111
|
+
"""Stage specified paths and commit if there are changes. Returns True if a commit was made."""
|
|
112
|
+
include = [p for p in add_paths if not p.startswith("!")]
|
|
113
|
+
exclude = [p[1:] for p in add_paths if p.startswith("!")]
|
|
114
|
+
for path in include:
|
|
115
|
+
repo.git.add(path)
|
|
116
|
+
for path in exclude:
|
|
117
|
+
repo.git.reset("HEAD", "--", path)
|
|
118
|
+
if not repo.is_dirty(index=True):
|
|
119
|
+
return False
|
|
120
|
+
_ensure_git_user(repo)
|
|
121
|
+
repo.git.commit("-m", message)
|
|
122
|
+
logger.info(f"Committed: {message}")
|
|
123
|
+
return True
|
|
124
|
+
|
|
125
|
+
|
|
110
126
|
def _ensure_git_user(repo: Repo) -> None:
|
|
111
127
|
"""Configure git user if not already set."""
|
|
112
128
|
try:
|
|
@@ -140,6 +156,7 @@ def create_or_update_pr(
|
|
|
140
156
|
labels: list[str] | None = None,
|
|
141
157
|
reviewers: list[str] | None = None,
|
|
142
158
|
assignees: list[str] | None = None,
|
|
159
|
+
auto_merge: bool = False,
|
|
143
160
|
) -> str:
|
|
144
161
|
cmd = ["gh", "pr", "create", "--head", branch, "--title", title]
|
|
145
162
|
cmd.extend(["--body", body or ""])
|
|
@@ -155,10 +172,24 @@ def create_or_update_pr(
|
|
|
155
172
|
if "already exists" in result.stderr:
|
|
156
173
|
logger.info("PR already exists, updating body")
|
|
157
174
|
update_pr_body(repo_path, branch, body)
|
|
175
|
+
if auto_merge:
|
|
176
|
+
_enable_auto_merge(repo_path, branch)
|
|
158
177
|
return ""
|
|
159
178
|
raise RuntimeError(f"Failed to create PR: {result.stderr}")
|
|
160
|
-
|
|
161
|
-
|
|
179
|
+
pr_url = result.stdout.strip()
|
|
180
|
+
logger.info(f"Created PR: {pr_url}")
|
|
181
|
+
if auto_merge:
|
|
182
|
+
_enable_auto_merge(repo_path, pr_url)
|
|
183
|
+
return pr_url
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
def _enable_auto_merge(repo_path: Path, pr_ref: str) -> None:
|
|
187
|
+
cmd = ["gh", "pr", "merge", "--auto", "--squash", pr_ref]
|
|
188
|
+
result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
|
|
189
|
+
if result.returncode != 0:
|
|
190
|
+
logger.warning(f"Auto-merge failed (branch protection may not be configured): {result.stderr}")
|
|
191
|
+
else:
|
|
192
|
+
logger.info(f"Enabled auto-merge for {pr_ref}")
|
|
162
193
|
|
|
163
194
|
|
|
164
195
|
def file_has_git_changes(repo: Repo, file_path: Path, base_ref: str = "HEAD") -> bool:
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from enum import StrEnum
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import ClassVar
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
|
|
9
|
+
from path_sync._internal.models import Destination, SrcConfig, resolve_config_path
|
|
10
|
+
from path_sync._internal.yaml_utils import load_yaml_model
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class OnFailStrategy(StrEnum):
|
|
14
|
+
SKIP = "skip"
|
|
15
|
+
FAIL = "fail"
|
|
16
|
+
WARN = "warn"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class CommitConfig(BaseModel):
|
|
20
|
+
message: str
|
|
21
|
+
add_paths: list[str] = Field(default_factory=lambda: ["."])
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class VerifyStep(BaseModel):
|
|
25
|
+
run: str
|
|
26
|
+
commit: CommitConfig | None = None
|
|
27
|
+
on_fail: OnFailStrategy | None = None
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class VerifyConfig(BaseModel):
|
|
31
|
+
on_fail: OnFailStrategy = OnFailStrategy.SKIP
|
|
32
|
+
steps: list[VerifyStep] = Field(default_factory=list)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class UpdateEntry(BaseModel):
|
|
36
|
+
workdir: str = "."
|
|
37
|
+
command: str
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class PRConfig(BaseModel):
|
|
41
|
+
branch: str
|
|
42
|
+
title: str
|
|
43
|
+
labels: list[str] = Field(default_factory=list)
|
|
44
|
+
auto_merge: bool = False
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class DepConfig(BaseModel):
|
|
48
|
+
CONFIG_EXT: ClassVar[str] = ".dep.yaml"
|
|
49
|
+
|
|
50
|
+
name: str
|
|
51
|
+
from_config: str
|
|
52
|
+
include_destinations: list[str] = Field(default_factory=list)
|
|
53
|
+
exclude_destinations: list[str] = Field(default_factory=list)
|
|
54
|
+
updates: list[UpdateEntry]
|
|
55
|
+
verify: VerifyConfig = Field(default_factory=VerifyConfig)
|
|
56
|
+
pr: PRConfig
|
|
57
|
+
|
|
58
|
+
def load_destinations(self, repo_root: Path) -> list[Destination]:
|
|
59
|
+
src_config_path = resolve_config_path(repo_root, self.from_config)
|
|
60
|
+
src_config = load_yaml_model(src_config_path, SrcConfig)
|
|
61
|
+
destinations = src_config.destinations
|
|
62
|
+
if self.include_destinations:
|
|
63
|
+
destinations = [d for d in destinations if d.name in self.include_destinations]
|
|
64
|
+
if self.exclude_destinations:
|
|
65
|
+
destinations = [d for d in destinations if d.name not in self.exclude_destinations]
|
|
66
|
+
return destinations
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def resolve_dep_config_path(repo_root: Path, name: str) -> Path:
|
|
70
|
+
return repo_root / ".github" / f"{name}{DepConfig.CONFIG_EXT}"
|
|
@@ -31,6 +31,7 @@ def validate_no_unauthorized_changes(
|
|
|
31
31
|
"""Find files with unauthorized changes in DO_NOT_EDIT sections.
|
|
32
32
|
|
|
33
33
|
Returns 'path:section_id' for section changes or 'path' for full-file.
|
|
34
|
+
Missing sections (user opted out) are warned but not treated as errors.
|
|
34
35
|
"""
|
|
35
36
|
repo = git_ops.get_repo(repo_root)
|
|
36
37
|
base_ref = f"origin/{default_branch}"
|
|
@@ -55,8 +56,10 @@ def validate_no_unauthorized_changes(
|
|
|
55
56
|
|
|
56
57
|
if baseline_has_sections:
|
|
57
58
|
file_skip = skip.get(rel_path, set())
|
|
58
|
-
|
|
59
|
-
|
|
59
|
+
changes = sections.changed_sections(baseline_content, current_content, path, file_skip)
|
|
60
|
+
for sid in changes.missing:
|
|
61
|
+
logger.warning(f"Section '{sid}' removed from {rel_path}, consider updating src.yaml")
|
|
62
|
+
unauthorized.extend(f"{rel_path}:{sid}" for sid in changes.modified)
|
|
60
63
|
elif current_has_sections:
|
|
61
64
|
unauthorized.append(rel_path)
|
|
62
65
|
else:
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
# Generated by pkg-ext
|
|
2
|
+
from path_sync._internal.cmd_dep_update import dep_update as _dep_update
|
|
3
|
+
from path_sync._internal.models_dep import CommitConfig as _CommitConfig
|
|
4
|
+
from path_sync._internal.models_dep import DepConfig as _DepConfig
|
|
5
|
+
from path_sync._internal.models_dep import OnFailStrategy as _OnFailStrategy
|
|
6
|
+
from path_sync._internal.models_dep import PRConfig as _PRConfig
|
|
7
|
+
from path_sync._internal.models_dep import UpdateEntry as _UpdateEntry
|
|
8
|
+
from path_sync._internal.models_dep import VerifyConfig as _VerifyConfig
|
|
9
|
+
from path_sync._internal.models_dep import VerifyStep as _VerifyStep
|
|
10
|
+
|
|
11
|
+
dep_update = _dep_update
|
|
12
|
+
CommitConfig = _CommitConfig
|
|
13
|
+
DepConfig = _DepConfig
|
|
14
|
+
OnFailStrategy = _OnFailStrategy
|
|
15
|
+
PRConfig = _PRConfig
|
|
16
|
+
UpdateEntry = _UpdateEntry
|
|
17
|
+
VerifyConfig = _VerifyConfig
|
|
18
|
+
VerifyStep = _VerifyStep
|
|
@@ -4,10 +4,11 @@ from pathlib import Path
|
|
|
4
4
|
|
|
5
5
|
from zero_3rdparty.sections import (
|
|
6
6
|
Section,
|
|
7
|
+
SectionChanges,
|
|
7
8
|
get_comment_config,
|
|
8
9
|
)
|
|
9
10
|
from zero_3rdparty.sections import (
|
|
10
|
-
|
|
11
|
+
changed_sections as _changed_sections,
|
|
11
12
|
)
|
|
12
13
|
from zero_3rdparty.sections import (
|
|
13
14
|
extract_sections as _extract_sections,
|
|
@@ -27,7 +28,8 @@ from zero_3rdparty.sections import (
|
|
|
27
28
|
|
|
28
29
|
__all__ = [
|
|
29
30
|
"Section",
|
|
30
|
-
"
|
|
31
|
+
"SectionChanges",
|
|
32
|
+
"changed_sections",
|
|
31
33
|
"extract_sections",
|
|
32
34
|
"has_sections",
|
|
33
35
|
"parse_sections",
|
|
@@ -59,14 +61,23 @@ def replace_sections(
|
|
|
59
61
|
src_sections: dict[str, str],
|
|
60
62
|
path: Path,
|
|
61
63
|
skip_sections: list[str] | None = None,
|
|
64
|
+
*,
|
|
65
|
+
keep_deleted_sections: bool = False,
|
|
62
66
|
) -> str:
|
|
63
|
-
return _replace_sections(
|
|
67
|
+
return _replace_sections(
|
|
68
|
+
dest_content,
|
|
69
|
+
src_sections,
|
|
70
|
+
TOOL_NAME,
|
|
71
|
+
get_comment_config(path),
|
|
72
|
+
skip_sections,
|
|
73
|
+
keep_deleted_sections=keep_deleted_sections,
|
|
74
|
+
)
|
|
64
75
|
|
|
65
76
|
|
|
66
|
-
def
|
|
77
|
+
def changed_sections(
|
|
67
78
|
baseline_content: str,
|
|
68
79
|
current_content: str,
|
|
69
80
|
path: Path,
|
|
70
81
|
skip: set[str] | None = None,
|
|
71
|
-
) ->
|
|
72
|
-
return
|
|
82
|
+
) -> SectionChanges:
|
|
83
|
+
return _changed_sections(baseline_content, current_content, TOOL_NAME, get_comment_config(path), skip, str(path))
|
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
# path-sync copy -n python-template
|
|
2
2
|
|
|
3
|
-
# === OK_EDIT: path-sync header ===
|
|
4
3
|
[project]
|
|
5
4
|
name = "path-sync"
|
|
6
|
-
version = "0.
|
|
5
|
+
version = "0.4.0"
|
|
7
6
|
description = "Sync files from a source repo to multiple destination repos"
|
|
8
7
|
requires-python = ">=3.13"
|
|
9
8
|
license = "MIT"
|
|
@@ -29,7 +28,7 @@ dependencies = [
|
|
|
29
28
|
"pyyaml>=6.0",
|
|
30
29
|
"typer>=0.16.0",
|
|
31
30
|
"gitpython>=3.1.0",
|
|
32
|
-
"zero-3rdparty>=0.
|
|
31
|
+
"zero-3rdparty>=0.102.0",
|
|
33
32
|
]
|
|
34
33
|
|
|
35
34
|
[project.scripts]
|
|
@@ -53,6 +52,7 @@ exclude = [
|
|
|
53
52
|
|
|
54
53
|
[tool.pkg-ext]
|
|
55
54
|
tag_prefix = "v"
|
|
55
|
+
max_bump_type = "minor"
|
|
56
56
|
|
|
57
57
|
# === DO_NOT_EDIT: path-sync ruff ===
|
|
58
58
|
[tool.ruff]
|
|
@@ -75,6 +75,7 @@ extend-select = ["Q", "RUF100", "C90", "UP", "I", "T"]
|
|
|
75
75
|
[tool.pytest.ini_options]
|
|
76
76
|
addopts = "-s -vv --log-cli-level=INFO"
|
|
77
77
|
python_files = ["*_test.py", "test_*.py"]
|
|
78
|
+
asyncio_mode = "auto"
|
|
78
79
|
# === OK_EDIT: path-sync pytest ===
|
|
79
80
|
|
|
80
81
|
# === DO_NOT_EDIT: path-sync coverage ===
|
|
@@ -88,7 +89,9 @@ exclude_lines = ["no cov", "if __name__ == .__main__.:"]
|
|
|
88
89
|
dev = [
|
|
89
90
|
"pyright>=1.1",
|
|
90
91
|
"pytest>=9.0",
|
|
92
|
+
"pytest-asyncio>=0.24",
|
|
91
93
|
"pytest-cov>=7.0",
|
|
94
|
+
"pytest-regressions>=2.6",
|
|
92
95
|
"ruff>=0.14",
|
|
93
96
|
]
|
|
94
97
|
# === OK_EDIT: path-sync dev ===
|
|
@@ -98,3 +101,7 @@ docs = [
|
|
|
98
101
|
"mkdocs-material>=9.5",
|
|
99
102
|
]
|
|
100
103
|
# === OK_EDIT: path-sync docs ===
|
|
104
|
+
|
|
105
|
+
# === DO_NOT_EDIT: path-sync release ===
|
|
106
|
+
release = ["pkg-ext"]
|
|
107
|
+
# === OK_EDIT: path-sync release ===
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|