path-sync 0.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
path_sync/__init__.py ADDED
@@ -0,0 +1,12 @@
1
+ # Generated by pkg-ext
2
+ # flake8: noqa
3
+ from path_sync import config
4
+ from path_sync import copy
5
+ from path_sync import dep_update
6
+
7
+ VERSION = "0.4.0"
8
+ __all__ = [
9
+ "config",
10
+ "copy",
11
+ "dep_update",
12
+ ]
path_sync/__main__.py ADDED
@@ -0,0 +1,14 @@
1
+ import logging
2
+
3
+ from path_sync._internal import cmd_boot, cmd_copy, cmd_dep_update, cmd_validate # noqa: F401
4
+ from path_sync._internal.models import LOG_FORMAT
5
+ from path_sync._internal.typer_app import app
6
+
7
+
8
+ def main() -> None:
9
+ logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
10
+ app()
11
+
12
+
13
+ if __name__ == "__main__":
14
+ main()
@@ -0,0 +1 @@
1
+ """Internal implementation - not part of public API."""
@@ -0,0 +1,90 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Annotated
6
+
7
+ import typer
8
+
9
+ from path_sync._internal import git_ops
10
+ from path_sync._internal.file_utils import ensure_parents_write_text
11
+ from path_sync._internal.models import (
12
+ Destination,
13
+ PathMapping,
14
+ SrcConfig,
15
+ find_repo_root,
16
+ resolve_config_path,
17
+ )
18
+ from path_sync._internal.typer_app import app
19
+ from path_sync._internal.yaml_utils import dump_yaml_model, load_yaml_model
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ @app.command()
25
+ def boot(
26
+ name: str = typer.Option(..., "-n", "--name", help="Config name"),
27
+ dest_paths: Annotated[list[str], typer.Option("-d", "--dest", help="Destination relative paths")] = [],
28
+ sync_paths: Annotated[list[str], typer.Option("-p", "--path", help="Paths to sync (glob patterns)")] = [],
29
+ dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
30
+ regen: bool = typer.Option(False, "--regen", help="Regenerate config"),
31
+ src_root_opt: str = typer.Option(
32
+ "",
33
+ "--src-root",
34
+ help="Source repo root (default: find git root from cwd)",
35
+ ),
36
+ ) -> None:
37
+ """Initialize or update SRC repo config."""
38
+ repo_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
39
+ config_path = resolve_config_path(repo_root, name)
40
+
41
+ if config_path.exists() and not regen:
42
+ load_yaml_model(config_path, SrcConfig)
43
+ logger.info(f"Using existing config: {config_path}")
44
+ else:
45
+ src_repo = git_ops.get_repo(repo_root)
46
+ src_repo_url = git_ops.get_remote_url(src_repo, "origin")
47
+
48
+ destinations = _build_destinations(repo_root, dest_paths)
49
+ path_mappings = [PathMapping(src_path=p) for p in sync_paths]
50
+
51
+ config = SrcConfig(
52
+ name=name,
53
+ src_repo_url=src_repo_url,
54
+ paths=path_mappings,
55
+ destinations=destinations,
56
+ )
57
+ config_content = dump_yaml_model(config)
58
+ _write_file(config_path, config_content, dry_run, "config")
59
+
60
+ if dry_run:
61
+ logger.info("Dry run complete - no files written")
62
+ else:
63
+ logger.info("Boot complete")
64
+
65
+
66
+ def _build_destinations(repo_root: Path, dest_paths: list[str]) -> list[Destination]:
67
+ destinations = []
68
+ for rel_path in dest_paths:
69
+ dest_dir = (repo_root / rel_path).resolve()
70
+ dest_name = dest_dir.name
71
+
72
+ dest = Destination(name=dest_name, dest_path_relative=rel_path)
73
+
74
+ if git_ops.is_git_repo(dest_dir):
75
+ dest_repo = git_ops.get_repo(dest_dir)
76
+ dest.repo_url = git_ops.get_remote_url(dest_repo, "origin")
77
+ dest.default_branch = git_ops.get_default_branch(dest_repo)
78
+ logger.info(f"Found git repo at {dest_dir}: {dest.repo_url}")
79
+
80
+ destinations.append(dest)
81
+
82
+ return destinations
83
+
84
+
85
+ def _write_file(path: Path, content: str, dry_run: bool, desc: str) -> None:
86
+ if dry_run:
87
+ logger.info(f"[DRY RUN] Would write {desc}: {path}")
88
+ return
89
+ ensure_parents_write_text(path, content)
90
+ logger.info(f"Wrote {desc}: {path}")
@@ -0,0 +1,577 @@
1
+ from __future__ import annotations
2
+
3
+ import glob
4
+ import logging
5
+ import tempfile
6
+ from contextlib import contextmanager
7
+ from dataclasses import dataclass, field
8
+ from pathlib import Path
9
+
10
+ import typer
11
+ from pydantic import BaseModel
12
+
13
+ from path_sync import sections
14
+ from path_sync._internal import git_ops, header
15
+ from path_sync._internal.file_utils import ensure_parents_write_text
16
+ from path_sync._internal.models import (
17
+ LOG_FORMAT,
18
+ Destination,
19
+ PathMapping,
20
+ SrcConfig,
21
+ SyncMode,
22
+ find_repo_root,
23
+ resolve_config_path,
24
+ )
25
+ from path_sync._internal.typer_app import app
26
+ from path_sync._internal.yaml_utils import load_yaml_model
27
+
28
+ logger = logging.getLogger(__name__)
29
+
30
+ EXIT_NO_CHANGES = 0
31
+ EXIT_CHANGES = 1
32
+ EXIT_ERROR = 2
33
+
34
+
35
+ def _prompt(message: str, no_prompt: bool) -> bool:
36
+ if no_prompt:
37
+ return True
38
+ try:
39
+ response = input(f"{message} [y/n]: ").strip().lower()
40
+ return response == "y"
41
+ except (EOFError, KeyboardInterrupt):
42
+ return False
43
+
44
+
45
+ @dataclass
46
+ class SyncResult:
47
+ content_changes: int = 0
48
+ orphans_deleted: int = 0
49
+ synced_paths: set[Path] = field(default_factory=set)
50
+
51
+ @property
52
+ def total(self) -> int:
53
+ return self.content_changes + self.orphans_deleted
54
+
55
+
56
+ @contextmanager
57
+ def capture_sync_log(dest_name: str):
58
+ with tempfile.TemporaryDirectory(prefix="path-sync-") as tmpdir:
59
+ log_path = Path(tmpdir) / f"{dest_name}.log"
60
+ file_handler = logging.FileHandler(log_path, mode="w")
61
+ file_handler.setLevel(logging.INFO)
62
+ file_handler.setFormatter(logging.Formatter(LOG_FORMAT))
63
+ root_logger = logging.getLogger("path_sync")
64
+ root_logger.addHandler(file_handler)
65
+ try:
66
+ yield log_path
67
+ finally:
68
+ file_handler.close()
69
+ root_logger.removeHandler(file_handler)
70
+
71
+
72
+ class CopyOptions(BaseModel):
73
+ dry_run: bool = False
74
+ force_overwrite: bool = False
75
+ no_checkout: bool = False
76
+ checkout_from_default: bool = False
77
+ local: bool = False
78
+ no_prompt: bool = False
79
+ no_pr: bool = False
80
+ skip_orphan_cleanup: bool = False
81
+ pr_title: str = ""
82
+ pr_labels: str = ""
83
+ pr_reviewers: str = ""
84
+ pr_assignees: str = ""
85
+
86
+
87
+ @app.command()
88
+ def copy(
89
+ name: str = typer.Option("", "-n", "--name", help="Config name (used with src-root to find config)"),
90
+ config_path_opt: str = typer.Option(
91
+ "",
92
+ "-c",
93
+ "--config-path",
94
+ help="Full path to config file (alternative to --name)",
95
+ ),
96
+ src_root_opt: str = typer.Option(
97
+ "",
98
+ "--src-root",
99
+ help="Source repo root (default: find git root from cwd)",
100
+ ),
101
+ dest_filter: str = typer.Option("", "-d", "--dest", help="Filter destinations (comma-separated)"),
102
+ dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
103
+ force_overwrite: bool = typer.Option(
104
+ False,
105
+ "--force-overwrite",
106
+ help="Overwrite files even if header removed (opted out)",
107
+ ),
108
+ detailed_exit_code: bool = typer.Option(
109
+ False,
110
+ "--detailed-exit-code",
111
+ help="Exit 0=no changes, 1=changes, 2=error",
112
+ ),
113
+ no_checkout: bool = typer.Option(
114
+ False,
115
+ "--no-checkout",
116
+ help="Skip branch switching before sync",
117
+ ),
118
+ checkout_from_default: bool = typer.Option(
119
+ False,
120
+ "--checkout-from-default",
121
+ help="Reset to origin/default before sync (for CI)",
122
+ ),
123
+ local: bool = typer.Option(
124
+ False,
125
+ "--local",
126
+ help="No git operations after sync (no commit/push/PR)",
127
+ ),
128
+ no_prompt: bool = typer.Option(
129
+ False,
130
+ "-y",
131
+ "--no-prompt",
132
+ help="Skip confirmations (for CI)",
133
+ ),
134
+ no_pr: bool = typer.Option(
135
+ False,
136
+ "--no-pr",
137
+ help="Push but skip PR creation",
138
+ ),
139
+ pr_title: str = typer.Option(
140
+ "",
141
+ "--pr-title",
142
+ help="Override PR title (supports {name}, {dest_name})",
143
+ ),
144
+ pr_labels: str = typer.Option(
145
+ "",
146
+ "--pr-labels",
147
+ help="Comma-separated PR labels",
148
+ ),
149
+ pr_reviewers: str = typer.Option(
150
+ "",
151
+ "--pr-reviewers",
152
+ help="Comma-separated PR reviewers",
153
+ ),
154
+ pr_assignees: str = typer.Option(
155
+ "",
156
+ "--pr-assignees",
157
+ help="Comma-separated PR assignees",
158
+ ),
159
+ skip_orphan_cleanup: bool = typer.Option(
160
+ False,
161
+ "--skip-orphan-cleanup",
162
+ help="Skip deletion of orphaned synced files",
163
+ ),
164
+ ) -> None:
165
+ """Copy files from SRC to DEST repositories."""
166
+ if name and config_path_opt:
167
+ logger.error("Cannot use both --name and --config-path")
168
+ raise typer.Exit(EXIT_ERROR if detailed_exit_code else 1)
169
+ if not name and not config_path_opt:
170
+ logger.error("Either --name or --config-path is required")
171
+ raise typer.Exit(EXIT_ERROR if detailed_exit_code else 1)
172
+
173
+ src_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
174
+ config_path = Path(config_path_opt) if config_path_opt else resolve_config_path(src_root, name)
175
+
176
+ if not config_path.exists():
177
+ logger.error(f"Config not found: {config_path}")
178
+ raise typer.Exit(EXIT_ERROR if detailed_exit_code else 1)
179
+
180
+ config = load_yaml_model(config_path, SrcConfig)
181
+ src_repo = git_ops.get_repo(src_root)
182
+ current_sha = git_ops.get_current_sha(src_repo)
183
+ src_repo_url = git_ops.get_remote_url(src_repo, config.git_remote)
184
+
185
+ opts = CopyOptions(
186
+ dry_run=dry_run,
187
+ force_overwrite=force_overwrite,
188
+ no_checkout=no_checkout,
189
+ checkout_from_default=checkout_from_default,
190
+ local=local,
191
+ no_prompt=no_prompt,
192
+ no_pr=no_pr,
193
+ skip_orphan_cleanup=skip_orphan_cleanup,
194
+ pr_title=pr_title or config.pr_defaults.title,
195
+ pr_labels=pr_labels or ",".join(config.pr_defaults.labels),
196
+ pr_reviewers=pr_reviewers or ",".join(config.pr_defaults.reviewers),
197
+ pr_assignees=pr_assignees or ",".join(config.pr_defaults.assignees),
198
+ )
199
+
200
+ destinations = config.destinations
201
+ if dest_filter:
202
+ filter_names = [n.strip() for n in dest_filter.split(",")]
203
+ destinations = [d for d in destinations if d.name in filter_names]
204
+
205
+ total_changes = 0
206
+ for dest in destinations:
207
+ try:
208
+ with capture_sync_log(dest.name) as log_path:
209
+ changes = _sync_destination(config, dest, src_root, current_sha, src_repo_url, opts, log_path)
210
+ total_changes += changes
211
+ except Exception as e:
212
+ logger.error(f"Failed to sync {dest.name}: {e}")
213
+ if detailed_exit_code:
214
+ raise typer.Exit(EXIT_ERROR)
215
+ raise
216
+
217
+ if detailed_exit_code:
218
+ raise typer.Exit(EXIT_CHANGES if total_changes > 0 else EXIT_NO_CHANGES)
219
+
220
+
221
+ def _sync_destination(
222
+ config: SrcConfig,
223
+ dest: Destination,
224
+ src_root: Path,
225
+ current_sha: str,
226
+ src_repo_url: str,
227
+ opts: CopyOptions,
228
+ log_path: Path,
229
+ ) -> int:
230
+ dest_root = (src_root / dest.dest_path_relative).resolve()
231
+
232
+ if opts.dry_run and not dest_root.exists():
233
+ raise ValueError(f"Destination repo not found: {dest_root}. Clone it first or run without --dry-run.")
234
+
235
+ dest_repo = _ensure_dest_repo(dest, dest_root, opts.dry_run)
236
+ copy_branch = dest.resolved_copy_branch(config.name)
237
+
238
+ # --no-checkout means "I'm already on the right branch"
239
+ # Prompt decline means "skip git operations for this run"
240
+ if opts.dry_run:
241
+ skip_git_ops = True
242
+ elif opts.no_checkout:
243
+ skip_git_ops = False
244
+ elif _prompt(f"Switch {dest.name} to {copy_branch}?", opts.no_prompt):
245
+ git_ops.prepare_copy_branch(
246
+ repo=dest_repo,
247
+ default_branch=dest.default_branch,
248
+ copy_branch=copy_branch,
249
+ from_default=opts.checkout_from_default,
250
+ )
251
+ skip_git_ops = False
252
+ else:
253
+ skip_git_ops = True
254
+
255
+ result = _sync_paths(config, dest, src_root, dest_root, opts)
256
+ _print_sync_summary(dest, result)
257
+
258
+ if result.total == 0:
259
+ logger.info(f"{dest.name}: No changes")
260
+ return 0
261
+
262
+ if skip_git_ops:
263
+ return result.total
264
+
265
+ _commit_and_pr(config, dest_repo, dest_root, dest, current_sha, src_repo_url, opts, log_path)
266
+ return result.total
267
+
268
+
269
+ def _print_sync_summary(dest: Destination, result: SyncResult) -> None:
270
+ typer.echo(f"\nSyncing to {dest.name}...", err=True)
271
+ if result.content_changes > 0:
272
+ typer.echo(f" [{result.content_changes} files synced]", err=True)
273
+ if result.orphans_deleted > 0:
274
+ typer.echo(f" [-] {result.orphans_deleted} orphans deleted", err=True)
275
+ if result.total > 0:
276
+ typer.echo(f"\n{result.total} changes ready.", err=True)
277
+
278
+
279
+ def _ensure_dest_repo(dest: Destination, dest_root: Path, dry_run: bool):
280
+ if not dest_root.exists():
281
+ if dry_run:
282
+ raise ValueError(f"Destination repo not found: {dest_root}. Clone it first or run without --dry-run.")
283
+ if not dest.repo_url:
284
+ raise ValueError(f"Dest {dest.name} not found and no repo_url configured")
285
+ git_ops.clone_repo(dest.repo_url, dest_root)
286
+ return git_ops.get_repo(dest_root)
287
+
288
+
289
+ def _sync_paths(
290
+ config: SrcConfig,
291
+ dest: Destination,
292
+ src_root: Path,
293
+ dest_root: Path,
294
+ opts: CopyOptions,
295
+ ) -> SyncResult:
296
+ result = SyncResult()
297
+ for mapping in config.paths:
298
+ changes, paths = _sync_path(
299
+ mapping,
300
+ src_root,
301
+ dest_root,
302
+ dest,
303
+ config.name,
304
+ opts.dry_run,
305
+ opts.force_overwrite,
306
+ )
307
+ result.content_changes += changes
308
+ result.synced_paths.update(paths)
309
+
310
+ if not opts.skip_orphan_cleanup:
311
+ result.orphans_deleted = _cleanup_orphans(dest_root, config.name, result.synced_paths, opts.dry_run)
312
+ return result
313
+
314
+
315
+ def _iter_sync_files(
316
+ mapping: PathMapping,
317
+ src_root: Path,
318
+ dest_root: Path,
319
+ ):
320
+ src_pattern = src_root / mapping.src_path
321
+
322
+ if "*" in mapping.src_path:
323
+ glob_prefix = mapping.src_path.split("*")[0].rstrip("/")
324
+ dest_base = mapping.dest_path or glob_prefix
325
+ matches = glob.glob(str(src_pattern), recursive=True)
326
+ if not matches:
327
+ logger.warning(f"Glob matched no files: {mapping.src_path}")
328
+ for src_file in matches:
329
+ src_path = Path(src_file)
330
+ if src_path.is_file() and not mapping.is_excluded(src_path):
331
+ rel = src_path.relative_to(src_root / glob_prefix)
332
+ dest_key = str(Path(dest_base) / rel)
333
+ yield src_path, dest_key, dest_root / dest_base / rel
334
+ elif src_pattern.is_dir():
335
+ dest_base = mapping.resolved_dest_path()
336
+ for src_file in src_pattern.rglob("*"):
337
+ if src_file.is_file() and not mapping.is_excluded(src_file):
338
+ rel = src_file.relative_to(src_pattern)
339
+ dest_key = str(Path(dest_base) / rel)
340
+ yield src_file, dest_key, dest_root / dest_base / rel
341
+ elif src_pattern.is_file():
342
+ dest_base = mapping.resolved_dest_path()
343
+ yield src_pattern, dest_base, dest_root / dest_base
344
+ else:
345
+ logger.warning(f"Source not found: {mapping.src_path}")
346
+
347
+
348
+ def _sync_path(
349
+ mapping: PathMapping,
350
+ src_root: Path,
351
+ dest_root: Path,
352
+ dest: Destination,
353
+ config_name: str,
354
+ dry_run: bool,
355
+ force_overwrite: bool,
356
+ ) -> tuple[int, set[Path]]:
357
+ changes = 0
358
+ synced: set[Path] = set()
359
+
360
+ for src_path, dest_key, dest_path in _iter_sync_files(mapping, src_root, dest_root):
361
+ if dest.is_skipped(dest_key):
362
+ continue
363
+ changes += _copy_file(
364
+ src_path,
365
+ dest_path,
366
+ dest,
367
+ dest_key,
368
+ config_name,
369
+ mapping.sync_mode,
370
+ dry_run,
371
+ force_overwrite,
372
+ )
373
+ synced.add(dest_path)
374
+
375
+ return changes, synced
376
+
377
+
378
+ def _copy_file(
379
+ src: Path,
380
+ dest_path: Path,
381
+ dest: Destination,
382
+ dest_key: str,
383
+ config_name: str,
384
+ sync_mode: SyncMode,
385
+ dry_run: bool,
386
+ force_overwrite: bool = False,
387
+ ) -> int:
388
+ try:
389
+ src_content = header.remove_header(src.read_text())
390
+ except UnicodeDecodeError:
391
+ return _copy_binary_file(src, dest_path, sync_mode, dry_run)
392
+
393
+ match sync_mode:
394
+ case SyncMode.SCAFFOLD:
395
+ return _handle_scaffold(src_content, dest_path, dry_run)
396
+ case SyncMode.REPLACE:
397
+ return _handle_replace(src_content, dest_path, dry_run)
398
+ case SyncMode.SYNC:
399
+ skip_list = dest.skip_sections.get(dest_key, [])
400
+ return _handle_sync(src_content, dest_path, skip_list, config_name, dry_run, force_overwrite)
401
+
402
+
403
+ def _copy_binary_file(src: Path, dest_path: Path, sync_mode: SyncMode, dry_run: bool) -> int:
404
+ src_bytes = src.read_bytes()
405
+ match sync_mode:
406
+ case SyncMode.SCAFFOLD:
407
+ if dest_path.exists():
408
+ return 0
409
+ case SyncMode.REPLACE | SyncMode.SYNC:
410
+ if dest_path.exists() and dest_path.read_bytes() == src_bytes:
411
+ return 0
412
+ return _write_binary_file(dest_path, src_bytes, dry_run)
413
+
414
+
415
+ def _write_binary_file(dest_path: Path, content: bytes, dry_run: bool) -> int:
416
+ if dry_run:
417
+ logger.info(f"[DRY RUN] Would write binary: {dest_path}")
418
+ return 1
419
+ dest_path.parent.mkdir(parents=True, exist_ok=True)
420
+ dest_path.write_bytes(content)
421
+ logger.info(f"Wrote binary: {dest_path}")
422
+ return 1
423
+
424
+
425
+ def _handle_scaffold(content: str, dest_path: Path, dry_run: bool) -> int:
426
+ if dest_path.exists():
427
+ return 0
428
+ return _write_file(dest_path, content, dry_run)
429
+
430
+
431
+ def _handle_replace(content: str, dest_path: Path, dry_run: bool) -> int:
432
+ if dest_path.exists() and dest_path.read_text() == content:
433
+ return 0
434
+ return _write_file(dest_path, content, dry_run)
435
+
436
+
437
+ def _handle_sync(
438
+ src_content: str,
439
+ dest_path: Path,
440
+ skip_list: list[str],
441
+ config_name: str,
442
+ dry_run: bool,
443
+ force_overwrite: bool,
444
+ ) -> int:
445
+ if sections.has_sections(src_content, dest_path):
446
+ return _handle_sync_sections(src_content, dest_path, skip_list, config_name, dry_run, force_overwrite)
447
+
448
+ if dest_path.exists():
449
+ existing = dest_path.read_text()
450
+ has_hdr = header.has_header(existing)
451
+ if not has_hdr and not force_overwrite:
452
+ logger.info(f"Skipping {dest_path} (header removed - opted out)")
453
+ return 0
454
+ if header.remove_header(existing) == src_content and has_hdr:
455
+ return 0
456
+
457
+ new_content = header.add_header(src_content, dest_path, config_name)
458
+ return _write_file(dest_path, new_content, dry_run)
459
+
460
+
461
+ def _write_file(dest_path: Path, content: str, dry_run: bool) -> int:
462
+ if dry_run:
463
+ logger.info(f"[DRY RUN] Would write: {dest_path}")
464
+ return 1
465
+ ensure_parents_write_text(dest_path, content)
466
+ logger.info(f"Wrote: {dest_path}")
467
+ return 1
468
+
469
+
470
+ def _handle_sync_sections(
471
+ src_content: str,
472
+ dest_path: Path,
473
+ skip_list: list[str],
474
+ config_name: str,
475
+ dry_run: bool,
476
+ force_overwrite: bool,
477
+ ) -> int:
478
+ src_sections = sections.extract_sections(src_content, dest_path)
479
+
480
+ if dest_path.exists():
481
+ existing = dest_path.read_text()
482
+ if not header.has_header(existing) and not force_overwrite:
483
+ logger.info(f"Skipping {dest_path} (header removed - opted out)")
484
+ return 0
485
+ dest_body = header.remove_header(existing)
486
+ new_body = sections.replace_sections(dest_body, src_sections, dest_path, skip_list)
487
+ else:
488
+ new_body = src_content
489
+
490
+ new_content = header.add_header(new_body, dest_path, config_name)
491
+
492
+ if dest_path.exists() and dest_path.read_text() == new_content:
493
+ return 0
494
+
495
+ return _write_file(dest_path, new_content, dry_run)
496
+
497
+
498
+ def _cleanup_orphans(
499
+ dest_root: Path,
500
+ config_name: str,
501
+ synced_paths: set[Path],
502
+ dry_run: bool,
503
+ ) -> int:
504
+ deleted = 0
505
+ for path in _find_files_with_config(dest_root, config_name):
506
+ if path not in synced_paths:
507
+ if dry_run:
508
+ logger.info(f"[DRY RUN] Would delete orphan: {path}")
509
+ else:
510
+ path.unlink()
511
+ logger.info(f"Deleted orphan: {path}")
512
+ deleted += 1
513
+ return deleted
514
+
515
+
516
+ def _find_files_with_config(dest_root: Path, config_name: str) -> list[Path]:
517
+ result = []
518
+ for path in dest_root.rglob("*"):
519
+ if ".git" in path.parts:
520
+ continue
521
+ if header.file_get_config_name(path) == config_name:
522
+ result.append(path)
523
+ return result
524
+
525
+
526
+ def _commit_and_pr(
527
+ config: SrcConfig,
528
+ repo,
529
+ dest_root: Path,
530
+ dest: Destination,
531
+ sha: str,
532
+ src_repo_url: str,
533
+ opts: CopyOptions,
534
+ log_path: Path,
535
+ ) -> None:
536
+ if opts.local:
537
+ logger.info("Local mode: skipping commit/push/PR")
538
+ return
539
+
540
+ copy_branch = dest.resolved_copy_branch(config.name)
541
+
542
+ if not _prompt(f"Commit changes to {dest.name}?", opts.no_prompt):
543
+ return
544
+
545
+ commit_msg = f"chore: sync {config.name} from {sha[:8]}"
546
+ git_ops.commit_changes(repo, commit_msg)
547
+ typer.echo(f" Committed: {commit_msg}", err=True)
548
+
549
+ if not _prompt(f"Push {dest.name} to origin?", opts.no_prompt):
550
+ return
551
+
552
+ git_ops.push_branch(repo, copy_branch, force=True)
553
+ typer.echo(f" Pushed: {copy_branch} (force)", err=True)
554
+
555
+ if opts.no_pr or not _prompt(f"Create PR for {dest.name}?", opts.no_prompt):
556
+ return
557
+
558
+ sync_log = log_path.read_text() if log_path.exists() else ""
559
+ pr_body = config.pr_defaults.format_body(
560
+ src_repo_url=src_repo_url,
561
+ src_sha=sha,
562
+ sync_log=sync_log,
563
+ dest_name=dest.name,
564
+ )
565
+
566
+ title = opts.pr_title.format(name=config.name, dest_name=dest.name)
567
+ pr_url = git_ops.create_or_update_pr(
568
+ dest_root,
569
+ copy_branch,
570
+ title,
571
+ pr_body,
572
+ opts.pr_labels.split(",") if opts.pr_labels else None,
573
+ opts.pr_reviewers.split(",") if opts.pr_reviewers else None,
574
+ opts.pr_assignees.split(",") if opts.pr_assignees else None,
575
+ )
576
+ if pr_url:
577
+ typer.echo(f" Created PR: {pr_url}", err=True)