path-sync 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- path_sync/__init__.py +1 -1
- path_sync/_internal/__init__.py +1 -0
- path_sync/_internal/cmd_boot.py +85 -0
- path_sync/_internal/cmd_copy.py +546 -0
- path_sync/_internal/cmd_validate.py +45 -0
- path_sync/_internal/file_utils.py +7 -0
- path_sync/_internal/git_ops.py +182 -0
- path_sync/_internal/header.py +83 -0
- path_sync/_internal/models.py +133 -0
- path_sync/_internal/typer_app.py +8 -0
- path_sync/_internal/validation.py +66 -0
- path_sync/_internal/yaml_utils.py +19 -0
- {path_sync-0.3.0.dist-info → path_sync-0.3.1.dist-info}/METADATA +1 -1
- path_sync-0.3.1.dist-info/RECORD +21 -0
- path_sync-0.3.0.dist-info/RECORD +0 -10
- {path_sync-0.3.0.dist-info → path_sync-0.3.1.dist-info}/WHEEL +0 -0
- {path_sync-0.3.0.dist-info → path_sync-0.3.1.dist-info}/entry_points.txt +0 -0
- {path_sync-0.3.0.dist-info → path_sync-0.3.1.dist-info}/licenses/LICENSE +0 -0
path_sync/__init__.py
CHANGED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Internal implementation - not part of public API."""
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Annotated
|
|
6
|
+
|
|
7
|
+
import typer
|
|
8
|
+
|
|
9
|
+
from path_sync._internal import git_ops
|
|
10
|
+
from path_sync._internal.file_utils import ensure_parents_write_text
|
|
11
|
+
from path_sync._internal.models import (
|
|
12
|
+
Destination,
|
|
13
|
+
PathMapping,
|
|
14
|
+
SrcConfig,
|
|
15
|
+
find_repo_root,
|
|
16
|
+
resolve_config_path,
|
|
17
|
+
)
|
|
18
|
+
from path_sync._internal.typer_app import app
|
|
19
|
+
from path_sync._internal.yaml_utils import dump_yaml_model, load_yaml_model
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@app.command()
|
|
25
|
+
def boot(
|
|
26
|
+
name: str = typer.Option(..., "-n", "--name", help="Config name"),
|
|
27
|
+
dest_paths: Annotated[list[str], typer.Option("-d", "--dest", help="Destination relative paths")] = [],
|
|
28
|
+
sync_paths: Annotated[list[str], typer.Option("-p", "--path", help="Paths to sync (glob patterns)")] = [],
|
|
29
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
|
|
30
|
+
regen: bool = typer.Option(False, "--regen", help="Regenerate config"),
|
|
31
|
+
) -> None:
|
|
32
|
+
"""Initialize or update SRC repo config."""
|
|
33
|
+
repo_root = find_repo_root(Path.cwd())
|
|
34
|
+
config_path = resolve_config_path(repo_root, name)
|
|
35
|
+
|
|
36
|
+
if config_path.exists() and not regen:
|
|
37
|
+
load_yaml_model(config_path, SrcConfig)
|
|
38
|
+
logger.info(f"Using existing config: {config_path}")
|
|
39
|
+
else:
|
|
40
|
+
src_repo = git_ops.get_repo(repo_root)
|
|
41
|
+
src_repo_url = git_ops.get_remote_url(src_repo, "origin")
|
|
42
|
+
|
|
43
|
+
destinations = _build_destinations(repo_root, dest_paths)
|
|
44
|
+
path_mappings = [PathMapping(src_path=p) for p in sync_paths]
|
|
45
|
+
|
|
46
|
+
config = SrcConfig(
|
|
47
|
+
name=name,
|
|
48
|
+
src_repo_url=src_repo_url,
|
|
49
|
+
paths=path_mappings,
|
|
50
|
+
destinations=destinations,
|
|
51
|
+
)
|
|
52
|
+
config_content = dump_yaml_model(config)
|
|
53
|
+
_write_file(config_path, config_content, dry_run, "config")
|
|
54
|
+
|
|
55
|
+
if dry_run:
|
|
56
|
+
logger.info("Dry run complete - no files written")
|
|
57
|
+
else:
|
|
58
|
+
logger.info("Boot complete")
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _build_destinations(repo_root: Path, dest_paths: list[str]) -> list[Destination]:
|
|
62
|
+
destinations = []
|
|
63
|
+
for rel_path in dest_paths:
|
|
64
|
+
dest_dir = (repo_root / rel_path).resolve()
|
|
65
|
+
dest_name = dest_dir.name
|
|
66
|
+
|
|
67
|
+
dest = Destination(name=dest_name, dest_path_relative=rel_path)
|
|
68
|
+
|
|
69
|
+
if git_ops.is_git_repo(dest_dir):
|
|
70
|
+
dest_repo = git_ops.get_repo(dest_dir)
|
|
71
|
+
dest.repo_url = git_ops.get_remote_url(dest_repo, "origin")
|
|
72
|
+
dest.default_branch = git_ops.get_default_branch(dest_repo)
|
|
73
|
+
logger.info(f"Found git repo at {dest_dir}: {dest.repo_url}")
|
|
74
|
+
|
|
75
|
+
destinations.append(dest)
|
|
76
|
+
|
|
77
|
+
return destinations
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def _write_file(path: Path, content: str, dry_run: bool, desc: str) -> None:
|
|
81
|
+
if dry_run:
|
|
82
|
+
logger.info(f"[DRY RUN] Would write {desc}: {path}")
|
|
83
|
+
return
|
|
84
|
+
ensure_parents_write_text(path, content)
|
|
85
|
+
logger.info(f"Wrote {desc}: {path}")
|
|
@@ -0,0 +1,546 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import glob
|
|
4
|
+
import logging
|
|
5
|
+
import tempfile
|
|
6
|
+
from contextlib import contextmanager
|
|
7
|
+
from dataclasses import dataclass, field
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
import typer
|
|
11
|
+
|
|
12
|
+
from path_sync import sections
|
|
13
|
+
from path_sync._internal import git_ops, header
|
|
14
|
+
from path_sync._internal.file_utils import ensure_parents_write_text
|
|
15
|
+
from path_sync._internal.models import (
|
|
16
|
+
LOG_FORMAT,
|
|
17
|
+
Destination,
|
|
18
|
+
PathMapping,
|
|
19
|
+
SrcConfig,
|
|
20
|
+
SyncMode,
|
|
21
|
+
find_repo_root,
|
|
22
|
+
resolve_config_path,
|
|
23
|
+
)
|
|
24
|
+
from path_sync._internal.typer_app import app
|
|
25
|
+
from path_sync._internal.yaml_utils import load_yaml_model
|
|
26
|
+
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
EXIT_NO_CHANGES = 0
|
|
30
|
+
EXIT_CHANGES = 1
|
|
31
|
+
EXIT_ERROR = 2
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _prompt(message: str, no_prompt: bool) -> bool:
|
|
35
|
+
if no_prompt:
|
|
36
|
+
return True
|
|
37
|
+
try:
|
|
38
|
+
response = input(f"{message} [y/n]: ").strip().lower()
|
|
39
|
+
return response == "y"
|
|
40
|
+
except (EOFError, KeyboardInterrupt):
|
|
41
|
+
return False
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class SyncResult:
|
|
46
|
+
content_changes: int = 0
|
|
47
|
+
orphans_deleted: int = 0
|
|
48
|
+
synced_paths: set[Path] = field(default_factory=set)
|
|
49
|
+
|
|
50
|
+
@property
|
|
51
|
+
def total(self) -> int:
|
|
52
|
+
return self.content_changes + self.orphans_deleted
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@contextmanager
|
|
56
|
+
def capture_sync_log(dest_name: str):
|
|
57
|
+
with tempfile.TemporaryDirectory(prefix="path-sync-") as tmpdir:
|
|
58
|
+
log_path = Path(tmpdir) / f"{dest_name}.log"
|
|
59
|
+
file_handler = logging.FileHandler(log_path, mode="w")
|
|
60
|
+
file_handler.setLevel(logging.INFO)
|
|
61
|
+
file_handler.setFormatter(logging.Formatter(LOG_FORMAT))
|
|
62
|
+
root_logger = logging.getLogger("path_sync")
|
|
63
|
+
root_logger.addHandler(file_handler)
|
|
64
|
+
try:
|
|
65
|
+
yield log_path
|
|
66
|
+
finally:
|
|
67
|
+
file_handler.close()
|
|
68
|
+
root_logger.removeHandler(file_handler)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
@dataclass
|
|
72
|
+
class CopyOptions:
|
|
73
|
+
dry_run: bool = False
|
|
74
|
+
force_overwrite: bool = False
|
|
75
|
+
no_checkout: bool = False
|
|
76
|
+
checkout_from_default: bool = False
|
|
77
|
+
local: bool = False
|
|
78
|
+
no_prompt: bool = False
|
|
79
|
+
no_pr: bool = False
|
|
80
|
+
skip_orphan_cleanup: bool = False
|
|
81
|
+
pr_title: str = ""
|
|
82
|
+
pr_labels: str = ""
|
|
83
|
+
pr_reviewers: str = ""
|
|
84
|
+
pr_assignees: str = ""
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
@app.command()
|
|
88
|
+
def copy(
|
|
89
|
+
name: str = typer.Option(..., "-n", "--name", help="Config name"),
|
|
90
|
+
dest_filter: str = typer.Option("", "-d", "--dest", help="Filter destinations (comma-separated)"),
|
|
91
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
|
|
92
|
+
force_overwrite: bool = typer.Option(
|
|
93
|
+
False,
|
|
94
|
+
"--force-overwrite",
|
|
95
|
+
help="Overwrite files even if header removed (opted out)",
|
|
96
|
+
),
|
|
97
|
+
detailed_exit_code: bool = typer.Option(
|
|
98
|
+
False,
|
|
99
|
+
"--detailed-exit-code",
|
|
100
|
+
help="Exit 0=no changes, 1=changes, 2=error",
|
|
101
|
+
),
|
|
102
|
+
no_checkout: bool = typer.Option(
|
|
103
|
+
False,
|
|
104
|
+
"--no-checkout",
|
|
105
|
+
help="Skip branch switching before sync",
|
|
106
|
+
),
|
|
107
|
+
checkout_from_default: bool = typer.Option(
|
|
108
|
+
False,
|
|
109
|
+
"--checkout-from-default",
|
|
110
|
+
help="Reset to origin/default before sync (for CI)",
|
|
111
|
+
),
|
|
112
|
+
local: bool = typer.Option(
|
|
113
|
+
False,
|
|
114
|
+
"--local",
|
|
115
|
+
help="No git operations after sync (no commit/push/PR)",
|
|
116
|
+
),
|
|
117
|
+
no_prompt: bool = typer.Option(
|
|
118
|
+
False,
|
|
119
|
+
"-y",
|
|
120
|
+
"--no-prompt",
|
|
121
|
+
help="Skip confirmations (for CI)",
|
|
122
|
+
),
|
|
123
|
+
no_pr: bool = typer.Option(
|
|
124
|
+
False,
|
|
125
|
+
"--no-pr",
|
|
126
|
+
help="Push but skip PR creation",
|
|
127
|
+
),
|
|
128
|
+
pr_title: str = typer.Option(
|
|
129
|
+
"",
|
|
130
|
+
"--pr-title",
|
|
131
|
+
help="Override PR title (supports {name}, {dest_name})",
|
|
132
|
+
),
|
|
133
|
+
pr_labels: str = typer.Option(
|
|
134
|
+
"",
|
|
135
|
+
"--pr-labels",
|
|
136
|
+
help="Comma-separated PR labels",
|
|
137
|
+
),
|
|
138
|
+
pr_reviewers: str = typer.Option(
|
|
139
|
+
"",
|
|
140
|
+
"--pr-reviewers",
|
|
141
|
+
help="Comma-separated PR reviewers",
|
|
142
|
+
),
|
|
143
|
+
pr_assignees: str = typer.Option(
|
|
144
|
+
"",
|
|
145
|
+
"--pr-assignees",
|
|
146
|
+
help="Comma-separated PR assignees",
|
|
147
|
+
),
|
|
148
|
+
skip_orphan_cleanup: bool = typer.Option(
|
|
149
|
+
False,
|
|
150
|
+
"--skip-orphan-cleanup",
|
|
151
|
+
help="Skip deletion of orphaned synced files",
|
|
152
|
+
),
|
|
153
|
+
) -> None:
|
|
154
|
+
"""Copy files from SRC to DEST repositories."""
|
|
155
|
+
src_root = find_repo_root(Path.cwd())
|
|
156
|
+
config_path = resolve_config_path(src_root, name)
|
|
157
|
+
|
|
158
|
+
if not config_path.exists():
|
|
159
|
+
logger.error(f"Config not found: {config_path}")
|
|
160
|
+
raise typer.Exit(EXIT_ERROR if detailed_exit_code else 1)
|
|
161
|
+
|
|
162
|
+
config = load_yaml_model(config_path, SrcConfig)
|
|
163
|
+
src_repo = git_ops.get_repo(src_root)
|
|
164
|
+
current_sha = git_ops.get_current_sha(src_repo)
|
|
165
|
+
src_repo_url = git_ops.get_remote_url(src_repo, config.git_remote)
|
|
166
|
+
|
|
167
|
+
opts = CopyOptions(
|
|
168
|
+
dry_run=dry_run,
|
|
169
|
+
force_overwrite=force_overwrite,
|
|
170
|
+
no_checkout=no_checkout,
|
|
171
|
+
checkout_from_default=checkout_from_default,
|
|
172
|
+
local=local,
|
|
173
|
+
no_prompt=no_prompt,
|
|
174
|
+
no_pr=no_pr,
|
|
175
|
+
skip_orphan_cleanup=skip_orphan_cleanup,
|
|
176
|
+
pr_title=pr_title or config.pr_defaults.title,
|
|
177
|
+
pr_labels=pr_labels or ",".join(config.pr_defaults.labels),
|
|
178
|
+
pr_reviewers=pr_reviewers or ",".join(config.pr_defaults.reviewers),
|
|
179
|
+
pr_assignees=pr_assignees or ",".join(config.pr_defaults.assignees),
|
|
180
|
+
)
|
|
181
|
+
|
|
182
|
+
destinations = config.destinations
|
|
183
|
+
if dest_filter:
|
|
184
|
+
filter_names = [n.strip() for n in dest_filter.split(",")]
|
|
185
|
+
destinations = [d for d in destinations if d.name in filter_names]
|
|
186
|
+
|
|
187
|
+
total_changes = 0
|
|
188
|
+
for dest in destinations:
|
|
189
|
+
try:
|
|
190
|
+
with capture_sync_log(dest.name) as log_path:
|
|
191
|
+
changes = _sync_destination(config, dest, src_root, current_sha, src_repo_url, opts, log_path)
|
|
192
|
+
total_changes += changes
|
|
193
|
+
except Exception as e:
|
|
194
|
+
logger.error(f"Failed to sync {dest.name}: {e}")
|
|
195
|
+
if detailed_exit_code:
|
|
196
|
+
raise typer.Exit(EXIT_ERROR)
|
|
197
|
+
raise
|
|
198
|
+
|
|
199
|
+
if detailed_exit_code:
|
|
200
|
+
raise typer.Exit(EXIT_CHANGES if total_changes > 0 else EXIT_NO_CHANGES)
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def _sync_destination(
|
|
204
|
+
config: SrcConfig,
|
|
205
|
+
dest: Destination,
|
|
206
|
+
src_root: Path,
|
|
207
|
+
current_sha: str,
|
|
208
|
+
src_repo_url: str,
|
|
209
|
+
opts: CopyOptions,
|
|
210
|
+
log_path: Path,
|
|
211
|
+
) -> int:
|
|
212
|
+
dest_root = (src_root / dest.dest_path_relative).resolve()
|
|
213
|
+
|
|
214
|
+
if opts.dry_run and not dest_root.exists():
|
|
215
|
+
raise ValueError(f"Destination repo not found: {dest_root}. Clone it first or run without --dry-run.")
|
|
216
|
+
|
|
217
|
+
dest_repo = _ensure_dest_repo(dest, dest_root, opts.dry_run)
|
|
218
|
+
copy_branch = dest.resolved_copy_branch(config.name)
|
|
219
|
+
|
|
220
|
+
# --no-checkout means "I'm already on the right branch"
|
|
221
|
+
# Prompt decline means "skip git operations for this run"
|
|
222
|
+
if opts.no_checkout:
|
|
223
|
+
skip_git_ops = False
|
|
224
|
+
elif opts.dry_run:
|
|
225
|
+
skip_git_ops = True
|
|
226
|
+
elif _prompt(f"Switch {dest.name} to {copy_branch}?", opts.no_prompt):
|
|
227
|
+
git_ops.prepare_copy_branch(
|
|
228
|
+
repo=dest_repo,
|
|
229
|
+
default_branch=dest.default_branch,
|
|
230
|
+
copy_branch=copy_branch,
|
|
231
|
+
from_default=opts.checkout_from_default,
|
|
232
|
+
)
|
|
233
|
+
skip_git_ops = False
|
|
234
|
+
else:
|
|
235
|
+
skip_git_ops = True
|
|
236
|
+
|
|
237
|
+
result = _sync_paths(config, dest, src_root, dest_root, opts)
|
|
238
|
+
_print_sync_summary(dest, result)
|
|
239
|
+
|
|
240
|
+
if result.total == 0:
|
|
241
|
+
logger.info(f"{dest.name}: No changes")
|
|
242
|
+
return 0
|
|
243
|
+
|
|
244
|
+
if skip_git_ops:
|
|
245
|
+
return result.total
|
|
246
|
+
|
|
247
|
+
_commit_and_pr(config, dest_repo, dest_root, dest, current_sha, src_repo_url, opts, log_path)
|
|
248
|
+
return result.total
|
|
249
|
+
|
|
250
|
+
|
|
251
|
+
def _print_sync_summary(dest: Destination, result: SyncResult) -> None:
|
|
252
|
+
typer.echo(f"\nSyncing to {dest.name}...", err=True)
|
|
253
|
+
if result.content_changes > 0:
|
|
254
|
+
typer.echo(f" [{result.content_changes} files synced]", err=True)
|
|
255
|
+
if result.orphans_deleted > 0:
|
|
256
|
+
typer.echo(f" [-] {result.orphans_deleted} orphans deleted", err=True)
|
|
257
|
+
if result.total > 0:
|
|
258
|
+
typer.echo(f"\n{result.total} changes ready.", err=True)
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def _ensure_dest_repo(dest: Destination, dest_root: Path, dry_run: bool):
|
|
262
|
+
if not dest_root.exists():
|
|
263
|
+
if dry_run:
|
|
264
|
+
raise ValueError(f"Destination repo not found: {dest_root}. Clone it first or run without --dry-run.")
|
|
265
|
+
if not dest.repo_url:
|
|
266
|
+
raise ValueError(f"Dest {dest.name} not found and no repo_url configured")
|
|
267
|
+
git_ops.clone_repo(dest.repo_url, dest_root)
|
|
268
|
+
return git_ops.get_repo(dest_root)
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def _sync_paths(
|
|
272
|
+
config: SrcConfig,
|
|
273
|
+
dest: Destination,
|
|
274
|
+
src_root: Path,
|
|
275
|
+
dest_root: Path,
|
|
276
|
+
opts: CopyOptions,
|
|
277
|
+
) -> SyncResult:
|
|
278
|
+
result = SyncResult()
|
|
279
|
+
for mapping in config.paths:
|
|
280
|
+
changes, paths = _sync_path(
|
|
281
|
+
mapping,
|
|
282
|
+
src_root,
|
|
283
|
+
dest_root,
|
|
284
|
+
dest,
|
|
285
|
+
config.name,
|
|
286
|
+
opts.dry_run,
|
|
287
|
+
opts.force_overwrite,
|
|
288
|
+
)
|
|
289
|
+
result.content_changes += changes
|
|
290
|
+
result.synced_paths.update(paths)
|
|
291
|
+
|
|
292
|
+
if not opts.skip_orphan_cleanup:
|
|
293
|
+
result.orphans_deleted = _cleanup_orphans(dest_root, config.name, result.synced_paths, opts.dry_run)
|
|
294
|
+
return result
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def _sync_path(
|
|
298
|
+
mapping: PathMapping,
|
|
299
|
+
src_root: Path,
|
|
300
|
+
dest_root: Path,
|
|
301
|
+
dest: Destination,
|
|
302
|
+
config_name: str,
|
|
303
|
+
dry_run: bool,
|
|
304
|
+
force_overwrite: bool,
|
|
305
|
+
) -> tuple[int, set[Path]]:
|
|
306
|
+
src_pattern = src_root / mapping.src_path
|
|
307
|
+
changes = 0
|
|
308
|
+
synced: set[Path] = set()
|
|
309
|
+
sync_mode = mapping.sync_mode
|
|
310
|
+
|
|
311
|
+
if "*" in mapping.src_path:
|
|
312
|
+
glob_prefix = mapping.src_path.split("*")[0].rstrip("/")
|
|
313
|
+
dest_base = mapping.dest_path or glob_prefix
|
|
314
|
+
matches = glob.glob(str(src_pattern), recursive=True)
|
|
315
|
+
if not matches:
|
|
316
|
+
logger.warning(f"Glob matched no files: {mapping.src_path}")
|
|
317
|
+
for src_file in matches:
|
|
318
|
+
src_path = Path(src_file)
|
|
319
|
+
if src_path.is_file():
|
|
320
|
+
rel = src_path.relative_to(src_root / glob_prefix)
|
|
321
|
+
dest_path = dest_root / dest_base / rel
|
|
322
|
+
dest_key = str(Path(dest_base) / rel)
|
|
323
|
+
changes += _copy_file(
|
|
324
|
+
src_path,
|
|
325
|
+
dest_path,
|
|
326
|
+
dest,
|
|
327
|
+
dest_key,
|
|
328
|
+
config_name,
|
|
329
|
+
sync_mode,
|
|
330
|
+
dry_run,
|
|
331
|
+
force_overwrite,
|
|
332
|
+
)
|
|
333
|
+
synced.add(dest_path)
|
|
334
|
+
elif src_pattern.is_dir():
|
|
335
|
+
dest_base = mapping.resolved_dest_path()
|
|
336
|
+
for src_file in src_pattern.rglob("*"):
|
|
337
|
+
if src_file.is_file():
|
|
338
|
+
rel = src_file.relative_to(src_pattern)
|
|
339
|
+
dest_path = dest_root / dest_base / rel
|
|
340
|
+
dest_key = str(Path(dest_base) / rel)
|
|
341
|
+
changes += _copy_file(
|
|
342
|
+
src_file,
|
|
343
|
+
dest_path,
|
|
344
|
+
dest,
|
|
345
|
+
dest_key,
|
|
346
|
+
config_name,
|
|
347
|
+
sync_mode,
|
|
348
|
+
dry_run,
|
|
349
|
+
force_overwrite,
|
|
350
|
+
)
|
|
351
|
+
synced.add(dest_path)
|
|
352
|
+
elif src_pattern.is_file():
|
|
353
|
+
dest_base = mapping.resolved_dest_path()
|
|
354
|
+
dest_path = dest_root / dest_base
|
|
355
|
+
changes += _copy_file(
|
|
356
|
+
src_pattern,
|
|
357
|
+
dest_path,
|
|
358
|
+
dest,
|
|
359
|
+
dest_base,
|
|
360
|
+
config_name,
|
|
361
|
+
sync_mode,
|
|
362
|
+
dry_run,
|
|
363
|
+
force_overwrite,
|
|
364
|
+
)
|
|
365
|
+
synced.add(dest_path)
|
|
366
|
+
else:
|
|
367
|
+
logger.warning(f"Source not found: {mapping.src_path}")
|
|
368
|
+
|
|
369
|
+
return changes, synced
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def _copy_file(
|
|
373
|
+
src: Path,
|
|
374
|
+
dest_path: Path,
|
|
375
|
+
dest: Destination,
|
|
376
|
+
dest_key: str,
|
|
377
|
+
config_name: str,
|
|
378
|
+
sync_mode: SyncMode,
|
|
379
|
+
dry_run: bool,
|
|
380
|
+
force_overwrite: bool = False,
|
|
381
|
+
) -> int:
|
|
382
|
+
src_content = header.remove_header(src.read_text())
|
|
383
|
+
|
|
384
|
+
match sync_mode:
|
|
385
|
+
case SyncMode.SCAFFOLD:
|
|
386
|
+
return _handle_scaffold(src_content, dest_path, dry_run)
|
|
387
|
+
case SyncMode.REPLACE:
|
|
388
|
+
return _handle_replace(src_content, dest_path, dry_run)
|
|
389
|
+
case SyncMode.SYNC:
|
|
390
|
+
skip_list = dest.skip_sections.get(dest_key, [])
|
|
391
|
+
return _handle_sync(src_content, dest_path, skip_list, config_name, dry_run, force_overwrite)
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def _handle_scaffold(content: str, dest_path: Path, dry_run: bool) -> int:
|
|
395
|
+
if dest_path.exists():
|
|
396
|
+
return 0
|
|
397
|
+
return _write_file(dest_path, content, dry_run)
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
def _handle_replace(content: str, dest_path: Path, dry_run: bool) -> int:
|
|
401
|
+
if dest_path.exists() and dest_path.read_text() == content:
|
|
402
|
+
return 0
|
|
403
|
+
return _write_file(dest_path, content, dry_run)
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def _handle_sync(
|
|
407
|
+
src_content: str,
|
|
408
|
+
dest_path: Path,
|
|
409
|
+
skip_list: list[str],
|
|
410
|
+
config_name: str,
|
|
411
|
+
dry_run: bool,
|
|
412
|
+
force_overwrite: bool,
|
|
413
|
+
) -> int:
|
|
414
|
+
if sections.has_sections(src_content, dest_path):
|
|
415
|
+
return _handle_sync_sections(src_content, dest_path, skip_list, config_name, dry_run, force_overwrite)
|
|
416
|
+
|
|
417
|
+
if dest_path.exists():
|
|
418
|
+
existing = dest_path.read_text()
|
|
419
|
+
has_hdr = header.has_header(existing)
|
|
420
|
+
if not has_hdr and not force_overwrite:
|
|
421
|
+
logger.info(f"Skipping {dest_path} (header removed - opted out)")
|
|
422
|
+
return 0
|
|
423
|
+
if header.remove_header(existing) == src_content and has_hdr:
|
|
424
|
+
return 0
|
|
425
|
+
|
|
426
|
+
new_content = header.add_header(src_content, dest_path, config_name)
|
|
427
|
+
return _write_file(dest_path, new_content, dry_run)
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
def _write_file(dest_path: Path, content: str, dry_run: bool) -> int:
|
|
431
|
+
if dry_run:
|
|
432
|
+
logger.info(f"[DRY RUN] Would write: {dest_path}")
|
|
433
|
+
return 1
|
|
434
|
+
ensure_parents_write_text(dest_path, content)
|
|
435
|
+
logger.info(f"Wrote: {dest_path}")
|
|
436
|
+
return 1
|
|
437
|
+
|
|
438
|
+
|
|
439
|
+
def _handle_sync_sections(
|
|
440
|
+
src_content: str,
|
|
441
|
+
dest_path: Path,
|
|
442
|
+
skip_list: list[str],
|
|
443
|
+
config_name: str,
|
|
444
|
+
dry_run: bool,
|
|
445
|
+
force_overwrite: bool,
|
|
446
|
+
) -> int:
|
|
447
|
+
src_sections = sections.extract_sections(src_content, dest_path)
|
|
448
|
+
|
|
449
|
+
if dest_path.exists():
|
|
450
|
+
existing = dest_path.read_text()
|
|
451
|
+
if not header.has_header(existing) and not force_overwrite:
|
|
452
|
+
logger.info(f"Skipping {dest_path} (header removed - opted out)")
|
|
453
|
+
return 0
|
|
454
|
+
dest_body = header.remove_header(existing)
|
|
455
|
+
new_body = sections.replace_sections(dest_body, src_sections, dest_path, skip_list)
|
|
456
|
+
else:
|
|
457
|
+
new_body = src_content
|
|
458
|
+
|
|
459
|
+
new_content = header.add_header(new_body, dest_path, config_name)
|
|
460
|
+
|
|
461
|
+
if dest_path.exists() and dest_path.read_text() == new_content:
|
|
462
|
+
return 0
|
|
463
|
+
|
|
464
|
+
return _write_file(dest_path, new_content, dry_run)
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def _cleanup_orphans(
|
|
468
|
+
dest_root: Path,
|
|
469
|
+
config_name: str,
|
|
470
|
+
synced_paths: set[Path],
|
|
471
|
+
dry_run: bool,
|
|
472
|
+
) -> int:
|
|
473
|
+
deleted = 0
|
|
474
|
+
for path in _find_files_with_config(dest_root, config_name):
|
|
475
|
+
if path not in synced_paths:
|
|
476
|
+
if dry_run:
|
|
477
|
+
logger.info(f"[DRY RUN] Would delete orphan: {path}")
|
|
478
|
+
else:
|
|
479
|
+
path.unlink()
|
|
480
|
+
logger.info(f"Deleted orphan: {path}")
|
|
481
|
+
deleted += 1
|
|
482
|
+
return deleted
|
|
483
|
+
|
|
484
|
+
|
|
485
|
+
def _find_files_with_config(dest_root: Path, config_name: str) -> list[Path]:
|
|
486
|
+
result = []
|
|
487
|
+
for path in dest_root.rglob("*"):
|
|
488
|
+
if ".git" in path.parts:
|
|
489
|
+
continue
|
|
490
|
+
if header.file_get_config_name(path) == config_name:
|
|
491
|
+
result.append(path)
|
|
492
|
+
return result
|
|
493
|
+
|
|
494
|
+
|
|
495
|
+
def _commit_and_pr(
|
|
496
|
+
config: SrcConfig,
|
|
497
|
+
repo,
|
|
498
|
+
dest_root: Path,
|
|
499
|
+
dest: Destination,
|
|
500
|
+
sha: str,
|
|
501
|
+
src_repo_url: str,
|
|
502
|
+
opts: CopyOptions,
|
|
503
|
+
log_path: Path,
|
|
504
|
+
) -> None:
|
|
505
|
+
if opts.local:
|
|
506
|
+
logger.info("Local mode: skipping commit/push/PR")
|
|
507
|
+
return
|
|
508
|
+
|
|
509
|
+
copy_branch = dest.resolved_copy_branch(config.name)
|
|
510
|
+
|
|
511
|
+
if not _prompt(f"Commit changes to {dest.name}?", opts.no_prompt):
|
|
512
|
+
return
|
|
513
|
+
|
|
514
|
+
commit_msg = f"chore: sync {config.name} from {sha[:8]}"
|
|
515
|
+
git_ops.commit_changes(repo, commit_msg)
|
|
516
|
+
typer.echo(f" Committed: {commit_msg}", err=True)
|
|
517
|
+
|
|
518
|
+
if not _prompt(f"Push {dest.name} to origin?", opts.no_prompt):
|
|
519
|
+
return
|
|
520
|
+
|
|
521
|
+
git_ops.push_branch(repo, copy_branch, force=True)
|
|
522
|
+
typer.echo(f" Pushed: {copy_branch} (force)", err=True)
|
|
523
|
+
|
|
524
|
+
if opts.no_pr or not _prompt(f"Create PR for {dest.name}?", opts.no_prompt):
|
|
525
|
+
return
|
|
526
|
+
|
|
527
|
+
sync_log = log_path.read_text() if log_path.exists() else ""
|
|
528
|
+
pr_body = config.pr_defaults.format_body(
|
|
529
|
+
src_repo_url=src_repo_url,
|
|
530
|
+
src_sha=sha,
|
|
531
|
+
sync_log=sync_log,
|
|
532
|
+
dest_name=dest.name,
|
|
533
|
+
)
|
|
534
|
+
|
|
535
|
+
title = opts.pr_title.format(name=config.name, dest_name=dest.name)
|
|
536
|
+
pr_url = git_ops.create_or_update_pr(
|
|
537
|
+
dest_root,
|
|
538
|
+
copy_branch,
|
|
539
|
+
title,
|
|
540
|
+
pr_body,
|
|
541
|
+
opts.pr_labels.split(",") if opts.pr_labels else None,
|
|
542
|
+
opts.pr_reviewers.split(",") if opts.pr_reviewers else None,
|
|
543
|
+
opts.pr_assignees.split(",") if opts.pr_assignees else None,
|
|
544
|
+
)
|
|
545
|
+
if pr_url:
|
|
546
|
+
typer.echo(f" Created PR: {pr_url}", err=True)
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
|
|
8
|
+
from path_sync._internal import git_ops
|
|
9
|
+
from path_sync._internal.models import find_repo_root
|
|
10
|
+
from path_sync._internal.typer_app import app
|
|
11
|
+
from path_sync._internal.validation import parse_skip_sections, validate_no_unauthorized_changes
|
|
12
|
+
|
|
13
|
+
logger = logging.getLogger(__name__)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@app.command("validate-no-changes")
|
|
17
|
+
def validate_no_changes(
|
|
18
|
+
branch: str = typer.Option("main", "-b", "--branch", help="Default branch to compare against"),
|
|
19
|
+
skip_sections_opt: str = typer.Option(
|
|
20
|
+
"",
|
|
21
|
+
"--skip-sections",
|
|
22
|
+
help="Comma-separated path:section_id pairs to skip (e.g., 'justfile:coverage,pyproject.toml:default')",
|
|
23
|
+
),
|
|
24
|
+
) -> None:
|
|
25
|
+
"""Validate no unauthorized changes to synced files."""
|
|
26
|
+
repo_root = find_repo_root(Path.cwd())
|
|
27
|
+
repo = git_ops.get_repo(repo_root)
|
|
28
|
+
|
|
29
|
+
current_branch = repo.active_branch.name
|
|
30
|
+
if current_branch.startswith("sync/"):
|
|
31
|
+
logger.info(f"On sync branch {current_branch}, validation skipped")
|
|
32
|
+
return
|
|
33
|
+
if current_branch == branch:
|
|
34
|
+
logger.info(f"On default branch {branch}, validation skipped")
|
|
35
|
+
return
|
|
36
|
+
|
|
37
|
+
skip_sections = parse_skip_sections(skip_sections_opt) if skip_sections_opt else None
|
|
38
|
+
unauthorized = validate_no_unauthorized_changes(repo_root, branch, skip_sections)
|
|
39
|
+
|
|
40
|
+
if unauthorized:
|
|
41
|
+
files_list = "\n ".join(unauthorized)
|
|
42
|
+
logger.error(f"Unauthorized changes in {len(unauthorized)} files:\n {files_list}")
|
|
43
|
+
raise typer.Exit(1)
|
|
44
|
+
|
|
45
|
+
logger.info("Validation passed: no unauthorized changes")
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import os
|
|
5
|
+
import subprocess
|
|
6
|
+
from contextlib import suppress
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
|
|
9
|
+
from git import GitCommandError, InvalidGitRepositoryError, NoSuchPathError, Repo
|
|
10
|
+
|
|
11
|
+
logger = logging.getLogger(__name__)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def _auth_url(url: str) -> str:
|
|
15
|
+
"""Inject GH_TOKEN into HTTPS URL for authentication."""
|
|
16
|
+
token = os.environ.get("GH_TOKEN", "")
|
|
17
|
+
if not token:
|
|
18
|
+
return url
|
|
19
|
+
if url.startswith("https://github.com/"):
|
|
20
|
+
return url.replace("https://", f"https://x-access-token:{token}@")
|
|
21
|
+
return url
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def get_repo(path: Path) -> Repo:
|
|
25
|
+
try:
|
|
26
|
+
return Repo(path)
|
|
27
|
+
except InvalidGitRepositoryError as e:
|
|
28
|
+
raise ValueError(f"Not a git repository: {path}") from e
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def is_git_repo(path: Path) -> bool:
|
|
32
|
+
with suppress(InvalidGitRepositoryError, NoSuchPathError):
|
|
33
|
+
Repo(path)
|
|
34
|
+
return True
|
|
35
|
+
return False
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_default_branch(repo: Repo) -> str:
|
|
39
|
+
with suppress(GitCommandError):
|
|
40
|
+
return repo.git.symbolic_ref("refs/remotes/origin/HEAD", short=True).replace("origin/", "")
|
|
41
|
+
return "main"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def clone_repo(url: str, dest: Path) -> Repo:
|
|
45
|
+
logger.info(f"Cloning {url} to {dest}")
|
|
46
|
+
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
47
|
+
return Repo.clone_from(_auth_url(url), str(dest))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def checkout_branch(repo: Repo, branch: str) -> None:
|
|
51
|
+
current = repo.active_branch.name
|
|
52
|
+
if current == branch:
|
|
53
|
+
return
|
|
54
|
+
logger.info(f"Checking out {branch}")
|
|
55
|
+
try:
|
|
56
|
+
repo.git.checkout(branch)
|
|
57
|
+
except GitCommandError:
|
|
58
|
+
repo.git.checkout("-b", branch)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def prepare_copy_branch(repo: Repo, default_branch: str, copy_branch: str, from_default: bool = False) -> None:
|
|
62
|
+
"""Prepare copy_branch for syncing.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
from_default: If True, fetch origin and reset to origin/default_branch
|
|
66
|
+
before creating copy_branch. Use in CI for clean state.
|
|
67
|
+
If False, just switch to or create copy_branch from current HEAD.
|
|
68
|
+
"""
|
|
69
|
+
if from_default:
|
|
70
|
+
logger.info("Fetching origin")
|
|
71
|
+
repo.git.fetch("origin")
|
|
72
|
+
|
|
73
|
+
logger.info(f"Checking out {default_branch}")
|
|
74
|
+
repo.git.checkout(default_branch)
|
|
75
|
+
repo.git.reset("--hard", f"origin/{default_branch}")
|
|
76
|
+
|
|
77
|
+
with suppress(GitCommandError):
|
|
78
|
+
repo.git.branch("-D", copy_branch)
|
|
79
|
+
logger.info(f"Deleted existing local branch: {copy_branch}")
|
|
80
|
+
|
|
81
|
+
logger.info(f"Creating fresh branch: {copy_branch}")
|
|
82
|
+
repo.git.checkout("-b", copy_branch)
|
|
83
|
+
else:
|
|
84
|
+
checkout_branch(repo, copy_branch)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def get_current_sha(repo: Repo) -> str:
|
|
88
|
+
return repo.head.commit.hexsha
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def get_remote_url(repo: Repo, remote_name: str = "origin") -> str:
|
|
92
|
+
try:
|
|
93
|
+
return repo.remote(remote_name).url
|
|
94
|
+
except ValueError:
|
|
95
|
+
return ""
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def has_changes(repo: Repo) -> bool:
|
|
99
|
+
return repo.is_dirty() or len(repo.untracked_files) > 0
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def commit_changes(repo: Repo, message: str) -> None:
|
|
103
|
+
repo.git.add("-A")
|
|
104
|
+
if repo.is_dirty():
|
|
105
|
+
_ensure_git_user(repo)
|
|
106
|
+
repo.git.commit("-m", message)
|
|
107
|
+
logger.info(f"Committed: {message}")
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _ensure_git_user(repo: Repo) -> None:
|
|
111
|
+
"""Configure git user if not already set."""
|
|
112
|
+
try:
|
|
113
|
+
repo.config_reader().get_value("user", "name")
|
|
114
|
+
except Exception:
|
|
115
|
+
repo.config_writer().set_value("user", "name", "path-sync[bot]").release()
|
|
116
|
+
repo.config_writer().set_value("user", "email", "path-sync[bot]@users.noreply.github.com").release()
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def push_branch(repo: Repo, branch: str, force: bool = True) -> None:
|
|
120
|
+
logger.info(f"Pushing {branch}" + (" (force)" if force else ""))
|
|
121
|
+
args = ["--force", "-u", "origin", branch] if force else ["-u", "origin", branch]
|
|
122
|
+
repo.git.push(*args)
|
|
123
|
+
|
|
124
|
+
|
|
125
|
+
def update_pr_body(repo_path: Path, branch: str, body: str) -> bool:
|
|
126
|
+
cmd = ["gh", "pr", "edit", branch, "--body", body]
|
|
127
|
+
result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
|
|
128
|
+
if result.returncode != 0:
|
|
129
|
+
logger.warning(f"Failed to update PR body: {result.stderr}")
|
|
130
|
+
return False
|
|
131
|
+
logger.info("Updated PR body")
|
|
132
|
+
return True
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def create_or_update_pr(
|
|
136
|
+
repo_path: Path,
|
|
137
|
+
branch: str,
|
|
138
|
+
title: str,
|
|
139
|
+
body: str = "",
|
|
140
|
+
labels: list[str] | None = None,
|
|
141
|
+
reviewers: list[str] | None = None,
|
|
142
|
+
assignees: list[str] | None = None,
|
|
143
|
+
) -> str:
|
|
144
|
+
cmd = ["gh", "pr", "create", "--head", branch, "--title", title]
|
|
145
|
+
cmd.extend(["--body", body or ""])
|
|
146
|
+
if labels:
|
|
147
|
+
cmd.extend(["--label", ",".join(labels)])
|
|
148
|
+
if reviewers:
|
|
149
|
+
cmd.extend(["--reviewer", ",".join(reviewers)])
|
|
150
|
+
if assignees:
|
|
151
|
+
cmd.extend(["--assignee", ",".join(assignees)])
|
|
152
|
+
|
|
153
|
+
result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
|
|
154
|
+
if result.returncode != 0:
|
|
155
|
+
if "already exists" in result.stderr:
|
|
156
|
+
logger.info("PR already exists, updating body")
|
|
157
|
+
update_pr_body(repo_path, branch, body)
|
|
158
|
+
return ""
|
|
159
|
+
raise RuntimeError(f"Failed to create PR: {result.stderr}")
|
|
160
|
+
logger.info(f"Created PR: {result.stdout.strip()}")
|
|
161
|
+
return result.stdout.strip()
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def file_has_git_changes(repo: Repo, file_path: Path, base_ref: str = "HEAD") -> bool:
|
|
165
|
+
rel_path = str(file_path.relative_to(repo.working_dir))
|
|
166
|
+
diff = repo.git.diff("--name-only", base_ref, "--", rel_path)
|
|
167
|
+
return bool(diff.strip())
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def get_changed_files(repo: Repo, base_ref: str = "HEAD") -> list[Path]:
|
|
171
|
+
diff = repo.git.diff("--name-only", base_ref)
|
|
172
|
+
if not diff.strip():
|
|
173
|
+
return []
|
|
174
|
+
repo_root = Path(repo.working_dir)
|
|
175
|
+
return [repo_root / p for p in diff.strip().split("\n")]
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def get_file_content_at_ref(repo: Repo, file_path: Path, ref: str) -> str | None:
|
|
179
|
+
rel_path = str(file_path.relative_to(repo.working_dir))
|
|
180
|
+
with suppress(GitCommandError):
|
|
181
|
+
return repo.git.show(f"{ref}:{rel_path}")
|
|
182
|
+
return None
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from zero_3rdparty.sections import CommentConfig, get_comment_config
|
|
7
|
+
|
|
8
|
+
from path_sync._internal.models import HEADER_TEMPLATE, HeaderConfig
|
|
9
|
+
|
|
10
|
+
HEADER_PATTERN = re.compile(r"path-sync copy -n (?P<config_name>[\w-]+)")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def _resolve_comment_config(path: Path, config: HeaderConfig | None) -> CommentConfig:
|
|
14
|
+
if config:
|
|
15
|
+
ext = path.suffix
|
|
16
|
+
prefix = config.comment_prefixes.get(ext)
|
|
17
|
+
suffix = config.comment_suffixes.get(ext, "")
|
|
18
|
+
if prefix:
|
|
19
|
+
return CommentConfig(prefix, suffix)
|
|
20
|
+
return get_comment_config(path)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def get_comment_prefix(path: Path, config: HeaderConfig | None = None) -> str:
|
|
24
|
+
return _resolve_comment_config(path, config).prefix
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def get_comment_suffix(path: Path, config: HeaderConfig | None = None) -> str:
|
|
28
|
+
return _resolve_comment_config(path, config).suffix
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def get_header_line(path: Path, config_name: str, config: HeaderConfig | None = None) -> str:
|
|
32
|
+
cc = _resolve_comment_config(path, config)
|
|
33
|
+
header_text = HEADER_TEMPLATE.format(config_name=config_name)
|
|
34
|
+
return f"{cc.prefix} {header_text}{cc.suffix}"
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def has_header(content: str) -> bool:
|
|
38
|
+
first_line = content.split("\n", 1)[0] if content else ""
|
|
39
|
+
return bool(HEADER_PATTERN.search(first_line))
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_config_name(content: str) -> str | None:
|
|
43
|
+
first_line = content.split("\n", 1)[0] if content else ""
|
|
44
|
+
if match := HEADER_PATTERN.search(first_line):
|
|
45
|
+
return match.group("config_name")
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def add_header(content: str, path: Path, config_name: str, config: HeaderConfig | None = None) -> str:
|
|
50
|
+
header = get_header_line(path, config_name, config)
|
|
51
|
+
return f"{header}\n{content}"
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def remove_header(content: str) -> str:
|
|
55
|
+
if not has_header(content):
|
|
56
|
+
return content
|
|
57
|
+
lines = content.split("\n", 1)
|
|
58
|
+
return lines[1] if len(lines) > 1 else ""
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def has_known_comment_prefix(path: Path) -> bool:
|
|
62
|
+
try:
|
|
63
|
+
get_comment_config(path)
|
|
64
|
+
return True
|
|
65
|
+
except ValueError:
|
|
66
|
+
return False
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def file_get_config_name(path: Path) -> str | None:
|
|
70
|
+
if not path.exists() or not has_known_comment_prefix(path):
|
|
71
|
+
return None
|
|
72
|
+
try:
|
|
73
|
+
with path.open() as f:
|
|
74
|
+
first_line = f.readline()
|
|
75
|
+
except (UnicodeDecodeError, OSError):
|
|
76
|
+
return None
|
|
77
|
+
return get_config_name(first_line)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def file_has_header(path: Path, config: HeaderConfig | None = None) -> bool:
|
|
81
|
+
if config and path.suffix not in config.comment_prefixes:
|
|
82
|
+
return False
|
|
83
|
+
return file_get_config_name(path) is not None
|
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import glob as glob_mod
|
|
4
|
+
from enum import StrEnum
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import ClassVar
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, Field
|
|
9
|
+
|
|
10
|
+
LOG_FORMAT = "%(asctime)s %(levelname)s %(message)s"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SyncMode(StrEnum):
|
|
14
|
+
SYNC = "sync"
|
|
15
|
+
REPLACE = "replace"
|
|
16
|
+
SCAFFOLD = "scaffold"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class PathMapping(BaseModel):
|
|
20
|
+
src_path: str
|
|
21
|
+
dest_path: str = ""
|
|
22
|
+
sync_mode: SyncMode = SyncMode.SYNC
|
|
23
|
+
|
|
24
|
+
def resolved_dest_path(self) -> str:
|
|
25
|
+
return self.dest_path or self.src_path
|
|
26
|
+
|
|
27
|
+
def expand_dest_paths(self, repo_root: Path) -> list[Path]:
|
|
28
|
+
dest_path = self.resolved_dest_path()
|
|
29
|
+
pattern = repo_root / dest_path
|
|
30
|
+
|
|
31
|
+
if "*" in dest_path:
|
|
32
|
+
return [Path(p) for p in glob_mod.glob(str(pattern), recursive=True)]
|
|
33
|
+
if pattern.is_dir():
|
|
34
|
+
return [p for p in pattern.rglob("*") if p.is_file()]
|
|
35
|
+
if pattern.exists():
|
|
36
|
+
return [pattern]
|
|
37
|
+
return []
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
HEADER_TEMPLATE = "path-sync copy -n {config_name}"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class HeaderConfig(BaseModel):
|
|
44
|
+
comment_prefixes: dict[str, str] = Field(default_factory=dict)
|
|
45
|
+
comment_suffixes: dict[str, str] = Field(default_factory=dict)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
DEFAULT_BODY_TEMPLATE = """\
|
|
49
|
+
Synced from [{src_repo_name}]({src_repo_url}) @ `{src_sha_short}`
|
|
50
|
+
|
|
51
|
+
<details>
|
|
52
|
+
<summary>Sync Log</summary>
|
|
53
|
+
|
|
54
|
+
```
|
|
55
|
+
{sync_log}
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
</details>
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class PRDefaults(BaseModel):
|
|
63
|
+
title: str = "chore: sync {name} files"
|
|
64
|
+
body_template: str = DEFAULT_BODY_TEMPLATE
|
|
65
|
+
body_suffix: str = ""
|
|
66
|
+
labels: list[str] = Field(default_factory=list)
|
|
67
|
+
reviewers: list[str] = Field(default_factory=list)
|
|
68
|
+
assignees: list[str] = Field(default_factory=list)
|
|
69
|
+
|
|
70
|
+
def format_body(
|
|
71
|
+
self,
|
|
72
|
+
src_repo_url: str,
|
|
73
|
+
src_sha: str,
|
|
74
|
+
sync_log: str,
|
|
75
|
+
dest_name: str,
|
|
76
|
+
) -> str:
|
|
77
|
+
src_repo_name = src_repo_url.rstrip("/").rsplit("/", 1)[-1].removesuffix(".git")
|
|
78
|
+
body = self.body_template.format(
|
|
79
|
+
src_repo_url=src_repo_url,
|
|
80
|
+
src_repo_name=src_repo_name,
|
|
81
|
+
src_sha=src_sha,
|
|
82
|
+
src_sha_short=src_sha[:8],
|
|
83
|
+
sync_log=sync_log,
|
|
84
|
+
dest_name=dest_name,
|
|
85
|
+
)
|
|
86
|
+
if self.body_suffix:
|
|
87
|
+
body = f"{body}\n---\n{self.body_suffix}"
|
|
88
|
+
return body
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class Destination(BaseModel):
|
|
92
|
+
name: str
|
|
93
|
+
repo_url: str = ""
|
|
94
|
+
dest_path_relative: str
|
|
95
|
+
copy_branch: str = ""
|
|
96
|
+
default_branch: str = "main"
|
|
97
|
+
skip_sections: dict[str, list[str]] = Field(default_factory=dict)
|
|
98
|
+
|
|
99
|
+
def resolved_copy_branch(self, config_name: str) -> str:
|
|
100
|
+
"""Returns branch name, defaulting to sync/{config_name} if not set."""
|
|
101
|
+
return self.copy_branch or f"sync/{config_name}"
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
class SrcConfig(BaseModel):
|
|
105
|
+
CONFIG_EXT: ClassVar[str] = ".src.yaml"
|
|
106
|
+
|
|
107
|
+
name: str
|
|
108
|
+
git_remote: str = "origin"
|
|
109
|
+
src_repo_url: str = ""
|
|
110
|
+
schedule: str = "0 6 * * *"
|
|
111
|
+
header_config: HeaderConfig = Field(default_factory=HeaderConfig)
|
|
112
|
+
pr_defaults: PRDefaults = Field(default_factory=PRDefaults)
|
|
113
|
+
paths: list[PathMapping] = Field(default_factory=list)
|
|
114
|
+
destinations: list[Destination] = Field(default_factory=list)
|
|
115
|
+
|
|
116
|
+
def find_destination(self, name: str) -> Destination:
|
|
117
|
+
for dest in self.destinations:
|
|
118
|
+
if dest.name == name:
|
|
119
|
+
return dest
|
|
120
|
+
raise ValueError(f"Destination not found: {name}")
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def resolve_config_path(repo_root: Path, name: str) -> Path:
|
|
124
|
+
return repo_root / ".github" / f"{name}{SrcConfig.CONFIG_EXT}"
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
def find_repo_root(start_path: Path) -> Path:
|
|
128
|
+
current = start_path.resolve()
|
|
129
|
+
while current != current.parent:
|
|
130
|
+
if (current / ".git").exists():
|
|
131
|
+
return current
|
|
132
|
+
current = current.parent
|
|
133
|
+
raise ValueError(f"No git repository found from {start_path}")
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
from path_sync import sections
|
|
7
|
+
from path_sync._internal import git_ops, header
|
|
8
|
+
|
|
9
|
+
logger = logging.getLogger(__name__)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def parse_skip_sections(value: str) -> dict[str, set[str]]:
|
|
13
|
+
"""Parse 'path:section_id,path:section_id' into {path: {section_ids}}."""
|
|
14
|
+
result: dict[str, set[str]] = {}
|
|
15
|
+
for item in value.split(","):
|
|
16
|
+
item = item.strip()
|
|
17
|
+
if not item:
|
|
18
|
+
continue
|
|
19
|
+
if ":" not in item:
|
|
20
|
+
raise ValueError(f"Invalid format '{item}', expected 'path:section_id'")
|
|
21
|
+
path, section_id = item.rsplit(":", 1)
|
|
22
|
+
result.setdefault(path, set()).add(section_id)
|
|
23
|
+
return result
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def validate_no_unauthorized_changes(
|
|
27
|
+
repo_root: Path,
|
|
28
|
+
default_branch: str = "main",
|
|
29
|
+
skip_sections: dict[str, set[str]] | None = None,
|
|
30
|
+
) -> list[str]:
|
|
31
|
+
"""Find files with unauthorized changes in DO_NOT_EDIT sections.
|
|
32
|
+
|
|
33
|
+
Returns 'path:section_id' for section changes or 'path' for full-file.
|
|
34
|
+
"""
|
|
35
|
+
repo = git_ops.get_repo(repo_root)
|
|
36
|
+
base_ref = f"origin/{default_branch}"
|
|
37
|
+
skip = skip_sections or {}
|
|
38
|
+
unauthorized: list[str] = []
|
|
39
|
+
|
|
40
|
+
for path in git_ops.get_changed_files(repo, base_ref):
|
|
41
|
+
if not path.exists():
|
|
42
|
+
continue
|
|
43
|
+
if not header.file_has_header(path):
|
|
44
|
+
continue
|
|
45
|
+
|
|
46
|
+
rel_path = str(path.relative_to(repo_root))
|
|
47
|
+
current_content = path.read_text()
|
|
48
|
+
baseline_content = git_ops.get_file_content_at_ref(repo, path, base_ref)
|
|
49
|
+
|
|
50
|
+
if baseline_content is None:
|
|
51
|
+
continue
|
|
52
|
+
|
|
53
|
+
baseline_has_sections = sections.has_sections(baseline_content, path)
|
|
54
|
+
current_has_sections = sections.has_sections(current_content, path)
|
|
55
|
+
|
|
56
|
+
if baseline_has_sections:
|
|
57
|
+
file_skip = skip.get(rel_path, set())
|
|
58
|
+
changed_ids = sections.compare_sections(baseline_content, current_content, path, file_skip)
|
|
59
|
+
unauthorized.extend(f"{rel_path}:{sid}" for sid in changed_ids)
|
|
60
|
+
elif current_has_sections:
|
|
61
|
+
unauthorized.append(rel_path)
|
|
62
|
+
else:
|
|
63
|
+
if baseline_content != current_content:
|
|
64
|
+
unauthorized.append(rel_path)
|
|
65
|
+
|
|
66
|
+
return sorted(unauthorized)
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import TypeVar
|
|
5
|
+
|
|
6
|
+
import yaml
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
|
|
9
|
+
T = TypeVar("T", bound=BaseModel)
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def load_yaml_model(path: Path, model_type: type[T]) -> T:
|
|
13
|
+
data = yaml.safe_load(path.read_text())
|
|
14
|
+
return model_type.model_validate(data)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def dump_yaml_model(model: BaseModel) -> str:
|
|
18
|
+
data = model.model_dump(mode="json", exclude_none=True)
|
|
19
|
+
return yaml.safe_dump(data, default_flow_style=False, sort_keys=False)
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
path_sync/__init__.py,sha256=S6tn88icmaqW7h_DeyDXb_Koaq9rIeUUuIyQVf31VF4,153
|
|
2
|
+
path_sync/__main__.py,sha256=HDj3qgijDcK8k976qsAvKMvUq9fZEJTK-dZldUc5-no,326
|
|
3
|
+
path_sync/config.py,sha256=XYuEK_bjSpAk_nZN0oxpEA-S3t9F6Qn0yznYLoQHIw8,568
|
|
4
|
+
path_sync/copy.py,sha256=BpflW4086XJFSHHK4taYPgXtF07xHB8qrgm8TYqdM4E,120
|
|
5
|
+
path_sync/sections.py,sha256=dMEux8wT6nu9fcPgqgoWCc7QFNOxGVx5RVwcHhEK_lw,1894
|
|
6
|
+
path_sync/_internal/__init__.py,sha256=iPkMhrpiyXBijo2Hp-y_2zEYxAXnHLnStKM0X0HHd4U,56
|
|
7
|
+
path_sync/_internal/cmd_boot.py,sha256=44IUnemVzYO_qcZsHnCG1oeQHoGIavLy-9BNKIV8Tlo,2918
|
|
8
|
+
path_sync/_internal/cmd_copy.py,sha256=Cl81112qBKRWU-99a8aaVHjl3RD4GCBYlHRNj39wems,16896
|
|
9
|
+
path_sync/_internal/cmd_validate.py,sha256=L84W5iCUxCbM04muiOvDOTdvN3HA0Ubxy8Fk_XMQf1E,1612
|
|
10
|
+
path_sync/_internal/file_utils.py,sha256=5C33qzKFQdwChi5YwUWBujj126t0P6dbGSU_5hWExpE,194
|
|
11
|
+
path_sync/_internal/git_ops.py,sha256=rpG_r7VNH1KlBgqM9mz7xop0mpdy76Vs3rzCoxE1dIQ,5895
|
|
12
|
+
path_sync/_internal/header.py,sha256=evgY2q_gfDdEytEt_jyJ7M_KdGzCpfdKBUnoh3v-0Go,2593
|
|
13
|
+
path_sync/_internal/models.py,sha256=D4qGXh4NnT01xf2odeLYULtgr245TkdjaqW26BPnxP4,3767
|
|
14
|
+
path_sync/_internal/typer_app.py,sha256=lEGMRXql3Se3VbmwAohvpUaL2cbY-RwhPUq8kL7bPbc,177
|
|
15
|
+
path_sync/_internal/validation.py,sha256=qhEha-pJiM5zkZlr4sj2I4ZqvqcWMEfL4IZu_LGatLI,2226
|
|
16
|
+
path_sync/_internal/yaml_utils.py,sha256=yj6Bl54EltjLEcVKaiA5Ahb9byT6OUMh0xIEzTsrvnQ,498
|
|
17
|
+
path_sync-0.3.1.dist-info/METADATA,sha256=dkhwUJMoG5BDWpZkZpY1rBm1qm4hBzo_rQwkqS1um_8,8231
|
|
18
|
+
path_sync-0.3.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
19
|
+
path_sync-0.3.1.dist-info/entry_points.txt,sha256=jTsL0c-9gP-4_Jt3EPgihtpLcwQR0AFAf1AUpD50AlI,54
|
|
20
|
+
path_sync-0.3.1.dist-info/licenses/LICENSE,sha256=OphKV48tcMv6ep-7j-8T6nycykPT0g8ZlMJ9zbGvdPs,1066
|
|
21
|
+
path_sync-0.3.1.dist-info/RECORD,,
|
path_sync-0.3.0.dist-info/RECORD
DELETED
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
path_sync/__init__.py,sha256=GxwE45z99FV9EG57V7d0ld557zw2SzYk_83nEbBGtRk,153
|
|
2
|
-
path_sync/__main__.py,sha256=HDj3qgijDcK8k976qsAvKMvUq9fZEJTK-dZldUc5-no,326
|
|
3
|
-
path_sync/config.py,sha256=XYuEK_bjSpAk_nZN0oxpEA-S3t9F6Qn0yznYLoQHIw8,568
|
|
4
|
-
path_sync/copy.py,sha256=BpflW4086XJFSHHK4taYPgXtF07xHB8qrgm8TYqdM4E,120
|
|
5
|
-
path_sync/sections.py,sha256=dMEux8wT6nu9fcPgqgoWCc7QFNOxGVx5RVwcHhEK_lw,1894
|
|
6
|
-
path_sync-0.3.0.dist-info/METADATA,sha256=8oFXF5R7ULoTo2CFdDRcsXjpRW6G0yZ-aGFjDHh1iJk,8231
|
|
7
|
-
path_sync-0.3.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
8
|
-
path_sync-0.3.0.dist-info/entry_points.txt,sha256=jTsL0c-9gP-4_Jt3EPgihtpLcwQR0AFAf1AUpD50AlI,54
|
|
9
|
-
path_sync-0.3.0.dist-info/licenses/LICENSE,sha256=OphKV48tcMv6ep-7j-8T6nycykPT0g8ZlMJ9zbGvdPs,1066
|
|
10
|
-
path_sync-0.3.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|