path-sync 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
path_sync/__init__.py ADDED
@@ -0,0 +1 @@
1
+ """Path sync package for syncing files across repositories."""
path_sync/__main__.py ADDED
@@ -0,0 +1,14 @@
1
+ import logging
2
+
3
+ from path_sync import cmd_boot, cmd_copy, cmd_validate # noqa: F401
4
+ from path_sync.models import LOG_FORMAT
5
+ from path_sync.typer_app import app
6
+
7
+
8
+ def main() -> None:
9
+ logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
10
+ app()
11
+
12
+
13
+ if __name__ == "__main__":
14
+ main()
path_sync/cmd_boot.py ADDED
@@ -0,0 +1,105 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Annotated
6
+
7
+ import typer
8
+
9
+ from path_sync import git_ops, workflow_gen
10
+ from path_sync.file_utils import ensure_parents_write_text
11
+ from path_sync.models import (
12
+ Destination,
13
+ PathMapping,
14
+ SrcConfig,
15
+ find_repo_root,
16
+ resolve_config_path,
17
+ )
18
+ from path_sync.typer_app import app
19
+ from path_sync.yaml_utils import dump_yaml_model, load_yaml_model
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ @app.command()
25
+ def boot(
26
+ name: str = typer.Option(..., "-n", "--name", help="Config name"),
27
+ dest_paths: Annotated[
28
+ list[str], typer.Option("-d", "--dest", help="Destination relative paths")
29
+ ] = [],
30
+ always_paths: Annotated[
31
+ list[str], typer.Option("-p", "--path", help="Always paths (glob patterns)")
32
+ ] = [],
33
+ dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
34
+ regen: bool = typer.Option(False, "--regen", help="Regenerate all files"),
35
+ ) -> None:
36
+ """Initialize or update SRC repo tooling."""
37
+ repo_root = find_repo_root(Path.cwd())
38
+ config_path = resolve_config_path(repo_root, name, SrcConfig)
39
+
40
+ if config_path.exists() and not regen:
41
+ config = load_yaml_model(config_path, SrcConfig)
42
+ logger.info(f"Using existing config: {config_path}")
43
+ else:
44
+ src_repo = git_ops.get_repo(repo_root)
45
+ src_repo_url = git_ops.get_remote_url(src_repo, "origin")
46
+
47
+ destinations = _build_destinations(repo_root, dest_paths)
48
+ path_mappings = [PathMapping(src_path=p) for p in always_paths]
49
+
50
+ config = SrcConfig(
51
+ name=name,
52
+ src_repo_url=src_repo_url,
53
+ always_paths=path_mappings,
54
+ destinations=destinations,
55
+ )
56
+ config_content = dump_yaml_model(config)
57
+ _write_file(config_path, config_content, dry_run, "config")
58
+
59
+ if config.src_tools_update.github_workflows:
60
+ workflow_path = repo_root / workflow_gen.copy_workflow_path(name)
61
+ if not workflow_path.exists() or regen:
62
+ content = workflow_gen.generate_copy_workflow(config)
63
+ _write_file(workflow_path, content, dry_run, "workflow")
64
+
65
+ if config.src_tools_update.justfile:
66
+ _update_justfile(repo_root, name, dry_run)
67
+
68
+ if dry_run:
69
+ logger.info("Dry run complete - no files written")
70
+ else:
71
+ logger.info("Boot complete")
72
+
73
+
74
+ def _build_destinations(repo_root: Path, dest_paths: list[str]) -> list[Destination]:
75
+ destinations = []
76
+ for rel_path in dest_paths:
77
+ dest_dir = (repo_root / rel_path).resolve()
78
+ dest_name = dest_dir.name
79
+
80
+ dest = Destination(name=dest_name, dest_path_relative=rel_path)
81
+
82
+ if git_ops.is_git_repo(dest_dir):
83
+ dest_repo = git_ops.get_repo(dest_dir)
84
+ dest.repo_url = git_ops.get_remote_url(dest_repo, "origin")
85
+ dest.default_branch = git_ops.get_default_branch(dest_repo)
86
+ logger.info(f"Found git repo at {dest_dir}: {dest.repo_url}")
87
+
88
+ destinations.append(dest)
89
+
90
+ return destinations
91
+
92
+
93
+ def _write_file(path: Path, content: str, dry_run: bool, desc: str) -> None:
94
+ if dry_run:
95
+ logger.info(f"[DRY RUN] Would write {desc}: {path}")
96
+ return
97
+ ensure_parents_write_text(path, content)
98
+ logger.info(f"Wrote {desc}: {path}")
99
+
100
+
101
+ def _update_justfile(repo_root: Path, name: str, dry_run: bool) -> None:
102
+ justfile_path = repo_root / "justfile"
103
+ workflow_gen.update_justfile(
104
+ justfile_path, name, workflow_gen.JustfileRecipeKind.COPY, dry_run
105
+ )
path_sync/cmd_copy.py ADDED
@@ -0,0 +1,546 @@
1
+ from __future__ import annotations
2
+
3
+ import glob
4
+ import logging
5
+ import subprocess
6
+ import tempfile
7
+ from contextlib import contextmanager
8
+ from dataclasses import dataclass, field
9
+ from pathlib import Path
10
+
11
+ import typer
12
+
13
+ from path_sync import git_ops, header, sections, workflow_gen
14
+ from path_sync.file_utils import ensure_parents_write_text
15
+ from path_sync.models import (
16
+ LOG_FORMAT,
17
+ Destination,
18
+ PathMapping,
19
+ SrcConfig,
20
+ find_repo_root,
21
+ resolve_config_path,
22
+ )
23
+ from path_sync.typer_app import app
24
+ from path_sync.yaml_utils import load_yaml_model
25
+
26
+ logger = logging.getLogger(__name__)
27
+
28
+ EXIT_NO_CHANGES = 0
29
+ EXIT_CHANGES = 1
30
+ EXIT_ERROR = 2
31
+
32
+
33
+ @dataclass
34
+ class SyncResult:
35
+ content_changes: int = 0
36
+ tools_changes: int = 0
37
+ orphans_deleted: int = 0
38
+ synced_paths: set[Path] = field(default_factory=set)
39
+
40
+ @property
41
+ def total(self) -> int:
42
+ return self.content_changes + self.tools_changes + self.orphans_deleted
43
+
44
+
45
+ @contextmanager
46
+ def capture_sync_log(dest_name: str):
47
+ with tempfile.TemporaryDirectory(prefix="path-sync-") as tmpdir:
48
+ log_path = Path(tmpdir) / f"{dest_name}.log"
49
+ file_handler = logging.FileHandler(log_path, mode="w")
50
+ file_handler.setLevel(logging.INFO)
51
+ file_handler.setFormatter(logging.Formatter(LOG_FORMAT))
52
+ root_logger = logging.getLogger("path_sync")
53
+ root_logger.addHandler(file_handler)
54
+ try:
55
+ yield log_path
56
+ finally:
57
+ file_handler.close()
58
+ root_logger.removeHandler(file_handler)
59
+
60
+
61
+ @dataclass
62
+ class CopyOptions:
63
+ dry_run: bool = False
64
+ force_overwrite: bool = False
65
+ skip_checkout: bool = False
66
+ checkout_from_default: bool = False
67
+ force_push: bool = True
68
+ force_tools_pr: bool = False
69
+ no_commit: bool = False
70
+ no_push: bool = False
71
+ no_pr: bool = False
72
+ pr_title: str = ""
73
+ pr_labels: str = ""
74
+ pr_reviewers: str = ""
75
+ pr_assignees: str = ""
76
+
77
+
78
+ @app.command()
79
+ def copy(
80
+ name: str = typer.Option(..., "-n", "--name", help="Config name"),
81
+ dest_filter: str = typer.Option(
82
+ "", "-d", "--dest", help="Filter destinations (comma-separated)"
83
+ ),
84
+ dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
85
+ force_overwrite: bool = typer.Option(
86
+ False,
87
+ "--force-no-header-updates",
88
+ help="Overwrite files even if header removed",
89
+ ),
90
+ detailed_exit_code: bool = typer.Option(False, "--detailed-exit-code"),
91
+ skip_dest_checkout: bool = typer.Option(False, "--skip-dest-checkout"),
92
+ checkout_from_default: bool = typer.Option(
93
+ False,
94
+ "--checkout-from-default",
95
+ help="Reset to origin/default before sync (for CI)",
96
+ ),
97
+ force_push: bool = typer.Option(True, "--force-push/--no-force-push"),
98
+ force_tools_pr: bool = typer.Option(
99
+ False, "--force-tools-pr", help="Create PR even if only tools changed"
100
+ ),
101
+ no_commit: bool = typer.Option(False, "--no-commit"),
102
+ no_push: bool = typer.Option(False, "--no-push"),
103
+ no_pr: bool = typer.Option(False, "--no-pr"),
104
+ pr_title: str = typer.Option("", "--pr-title"),
105
+ pr_labels: str = typer.Option("", "--pr-labels"),
106
+ pr_reviewers: str = typer.Option("", "--pr-reviewers"),
107
+ pr_assignees: str = typer.Option("", "--pr-assignees"),
108
+ ) -> None:
109
+ """Copy files from SRC to DEST repositories."""
110
+ src_root = find_repo_root(Path.cwd())
111
+ config_path = resolve_config_path(src_root, name)
112
+
113
+ if not config_path.exists():
114
+ logger.error(f"Config not found: {config_path}")
115
+ raise typer.Exit(EXIT_ERROR if detailed_exit_code else 1)
116
+
117
+ config = load_yaml_model(config_path, SrcConfig)
118
+ src_repo = git_ops.get_repo(src_root)
119
+ current_sha = git_ops.get_current_sha(src_repo)
120
+ src_repo_url = git_ops.get_remote_url(src_repo, config.git_remote)
121
+
122
+ opts = CopyOptions(
123
+ dry_run=dry_run,
124
+ force_overwrite=force_overwrite,
125
+ skip_checkout=skip_dest_checkout,
126
+ checkout_from_default=checkout_from_default,
127
+ force_push=force_push,
128
+ force_tools_pr=force_tools_pr,
129
+ no_commit=no_commit,
130
+ no_push=no_push,
131
+ no_pr=no_pr,
132
+ pr_title=pr_title or config.pr_defaults.title,
133
+ pr_labels=pr_labels or ",".join(config.pr_defaults.labels),
134
+ pr_reviewers=pr_reviewers or ",".join(config.pr_defaults.reviewers),
135
+ pr_assignees=pr_assignees or ",".join(config.pr_defaults.assignees),
136
+ )
137
+
138
+ destinations = config.destinations
139
+ if dest_filter:
140
+ filter_names = [n.strip() for n in dest_filter.split(",")]
141
+ destinations = [d for d in destinations if d.name in filter_names]
142
+
143
+ total_changes = 0
144
+ for dest in destinations:
145
+ try:
146
+ with capture_sync_log(dest.name) as log_path:
147
+ changes = _sync_destination(
148
+ config, dest, src_root, current_sha, src_repo_url, opts, log_path
149
+ )
150
+ total_changes += changes
151
+ except Exception as e:
152
+ logger.error(f"Failed to sync {dest.name}: {e}")
153
+ if detailed_exit_code:
154
+ raise typer.Exit(EXIT_ERROR)
155
+ raise
156
+
157
+ if detailed_exit_code:
158
+ raise typer.Exit(EXIT_CHANGES if total_changes > 0 else EXIT_NO_CHANGES)
159
+
160
+
161
+ def _sync_destination(
162
+ config: SrcConfig,
163
+ dest: Destination,
164
+ src_root: Path,
165
+ current_sha: str,
166
+ src_repo_url: str,
167
+ opts: CopyOptions,
168
+ log_path: Path,
169
+ ) -> int:
170
+ dest_root = (src_root / dest.dest_path_relative).resolve()
171
+ dest_repo = _ensure_dest_repo(dest, dest_root)
172
+
173
+ if not opts.skip_checkout and not opts.dry_run:
174
+ git_ops.prepare_copy_branch(
175
+ repo=dest_repo,
176
+ default_branch=dest.default_branch,
177
+ copy_branch=dest.copy_branch,
178
+ from_default=opts.checkout_from_default,
179
+ )
180
+
181
+ result = _sync_paths(config, dest, src_root, dest_root, opts)
182
+
183
+ if result.total == 0:
184
+ logger.info(f"{dest.name}: No changes")
185
+ return 0
186
+ logger.info(f"{dest.name}: Found {result.total} changes")
187
+ if (
188
+ result.content_changes == 0
189
+ and result.orphans_deleted == 0
190
+ and not opts.force_tools_pr
191
+ ):
192
+ logger.info(f"{dest.name}: Tools-only changes, skipping PR")
193
+ return 0
194
+
195
+ if opts.dry_run:
196
+ logger.info(f"{dest.name}: Would make {result.total} changes")
197
+ return result.total
198
+
199
+ return _commit_and_pr(
200
+ config, dest_repo, dest_root, dest, current_sha, src_repo_url, opts, log_path
201
+ )
202
+
203
+
204
+ def _ensure_dest_repo(dest: Destination, dest_root: Path):
205
+ if not dest_root.exists():
206
+ if not dest.repo_url:
207
+ raise ValueError(f"Dest {dest.name} not found and no repo_url configured")
208
+ git_ops.clone_repo(dest.repo_url, dest_root)
209
+ return git_ops.get_repo(dest_root)
210
+
211
+
212
+ def _sync_paths(
213
+ config: SrcConfig,
214
+ dest: Destination,
215
+ src_root: Path,
216
+ dest_root: Path,
217
+ opts: CopyOptions,
218
+ ) -> SyncResult:
219
+ result = SyncResult()
220
+ for mapping in config.paths:
221
+ changes, paths = _sync_path(
222
+ mapping,
223
+ src_root,
224
+ dest_root,
225
+ dest,
226
+ config.name,
227
+ opts.dry_run,
228
+ opts.force_overwrite,
229
+ )
230
+ result.content_changes += changes
231
+ result.synced_paths.update(paths)
232
+
233
+ result.tools_changes = _sync_tools_update(config, dest, dest_root, opts)
234
+ result.orphans_deleted = _cleanup_orphans(
235
+ dest_root, config.name, result.synced_paths, opts.dry_run
236
+ )
237
+ return result
238
+
239
+
240
+ def _sync_path(
241
+ mapping: PathMapping,
242
+ src_root: Path,
243
+ dest_root: Path,
244
+ dest: Destination,
245
+ config_name: str,
246
+ dry_run: bool,
247
+ force_overwrite: bool,
248
+ ) -> tuple[int, set[Path]]:
249
+ src_pattern = src_root / mapping.src_path
250
+ changes = 0
251
+ synced: set[Path] = set()
252
+
253
+ if "*" in mapping.src_path:
254
+ glob_prefix = mapping.src_path.split("*")[0].rstrip("/")
255
+ dest_base = mapping.dest_path or glob_prefix
256
+ matches = glob.glob(str(src_pattern), recursive=True)
257
+ if not matches:
258
+ logger.warning(f"Glob matched no files: {mapping.src_path}")
259
+ for src_file in matches:
260
+ src_path = Path(src_file)
261
+ if src_path.is_file():
262
+ rel = src_path.relative_to(src_root / glob_prefix)
263
+ dest_path = dest_root / dest_base / rel
264
+ dest_key = str(Path(dest_base) / rel)
265
+ changes += _copy_with_header(
266
+ src_path,
267
+ dest_path,
268
+ dest,
269
+ dest_key,
270
+ config_name,
271
+ dry_run,
272
+ force_overwrite,
273
+ )
274
+ synced.add(dest_path)
275
+ elif src_pattern.is_dir():
276
+ dest_base = mapping.resolved_dest_path()
277
+ for src_file in src_pattern.rglob("*"):
278
+ if src_file.is_file():
279
+ rel = src_file.relative_to(src_pattern)
280
+ dest_path = dest_root / dest_base / rel
281
+ dest_key = str(Path(dest_base) / rel)
282
+ changes += _copy_with_header(
283
+ src_file,
284
+ dest_path,
285
+ dest,
286
+ dest_key,
287
+ config_name,
288
+ dry_run,
289
+ force_overwrite,
290
+ )
291
+ synced.add(dest_path)
292
+ elif src_pattern.is_file():
293
+ dest_base = mapping.resolved_dest_path()
294
+ dest_path = dest_root / dest_base
295
+ changes += _copy_with_header(
296
+ src_pattern,
297
+ dest_path,
298
+ dest,
299
+ dest_base,
300
+ config_name,
301
+ dry_run,
302
+ force_overwrite,
303
+ )
304
+ synced.add(dest_path)
305
+ else:
306
+ logger.warning(f"Source not found: {mapping.src_path}")
307
+
308
+ return changes, synced
309
+
310
+
311
+ def _copy_with_header(
312
+ src: Path,
313
+ dest_path: Path,
314
+ dest: Destination,
315
+ dest_key: str,
316
+ config_name: str,
317
+ dry_run: bool,
318
+ force_overwrite: bool = False,
319
+ ) -> int:
320
+ src_content = src.read_text()
321
+ skip_list = dest.skip_sections.get(dest_key, [])
322
+
323
+ if sections.has_sections(src_content):
324
+ return _copy_with_sections(
325
+ src_content, dest_path, skip_list, config_name, dry_run, force_overwrite
326
+ )
327
+
328
+ # No sections: full-file replacement
329
+ if dest_path.exists():
330
+ existing = dest_path.read_text()
331
+ if not header.has_header(existing) and not force_overwrite:
332
+ logger.info(f"Skipping {dest_path} (header removed - opted out)")
333
+ return 0
334
+ if header.remove_header(existing) == src_content:
335
+ return 0
336
+
337
+ new_content = header.add_header(src_content, dest_path.suffix, config_name)
338
+ if dry_run:
339
+ logger.info(f"[DRY RUN] Would write: {dest_path}")
340
+ return 1
341
+
342
+ ensure_parents_write_text(dest_path, new_content)
343
+ logger.info(f"Wrote: {dest_path}")
344
+ return 1
345
+
346
+
347
+ def _copy_with_sections(
348
+ src_content: str,
349
+ dest_path: Path,
350
+ skip_list: list[str],
351
+ config_name: str,
352
+ dry_run: bool,
353
+ force_overwrite: bool,
354
+ ) -> int:
355
+ src_sections = sections.extract_sections(src_content)
356
+
357
+ if dest_path.exists():
358
+ existing = dest_path.read_text()
359
+ if not header.has_header(existing) and not force_overwrite:
360
+ logger.info(f"Skipping {dest_path} (header removed - opted out)")
361
+ return 0
362
+ dest_body = header.remove_header(existing)
363
+ new_body = sections.replace_sections(dest_body, src_sections, skip_list)
364
+ else:
365
+ new_body = src_content
366
+
367
+ new_content = header.add_header(new_body, dest_path.suffix, config_name)
368
+
369
+ if dest_path.exists():
370
+ if dest_path.read_text() == new_content:
371
+ return 0
372
+
373
+ if dry_run:
374
+ logger.info(f"[DRY RUN] Would write: {dest_path}")
375
+ return 1
376
+
377
+ ensure_parents_write_text(dest_path, new_content)
378
+ logger.info(f"Wrote: {dest_path}")
379
+ return 1
380
+
381
+
382
+ def _cleanup_orphans(
383
+ dest_root: Path,
384
+ config_name: str,
385
+ synced_paths: set[Path],
386
+ dry_run: bool,
387
+ ) -> int:
388
+ deleted = 0
389
+ for path in _find_files_with_config(dest_root, config_name):
390
+ if path not in synced_paths:
391
+ if dry_run:
392
+ logger.info(f"[DRY RUN] Would delete orphan: {path}")
393
+ else:
394
+ path.unlink()
395
+ logger.info(f"Deleted orphan: {path}")
396
+ deleted += 1
397
+ return deleted
398
+
399
+
400
+ def _find_files_with_config(dest_root: Path, config_name: str) -> list[Path]:
401
+ result = []
402
+ for ext in header.COMMENT_PREFIXES:
403
+ for path in dest_root.rglob(f"*{ext}"):
404
+ if ".git" in path.parts:
405
+ continue
406
+ if header.file_get_config_name(path) == config_name:
407
+ result.append(path)
408
+ return result
409
+
410
+
411
+ def _sync_tools_update(
412
+ config: SrcConfig,
413
+ dest: Destination,
414
+ dest_root: Path,
415
+ opts: CopyOptions,
416
+ ) -> int:
417
+ changes = 0
418
+
419
+ if dest.tools_update.github_workflows:
420
+ wf_path = dest_root / workflow_gen.validate_workflow_path(config.name)
421
+ if not wf_path.exists():
422
+ content = workflow_gen.generate_validate_workflow(
423
+ name=config.name,
424
+ copy_branch=dest.copy_branch,
425
+ default_branch=dest.default_branch,
426
+ )
427
+ if opts.dry_run:
428
+ logger.info(f"[DRY RUN] Would write workflow: {wf_path}")
429
+ else:
430
+ ensure_parents_write_text(wf_path, content)
431
+ logger.info(f"Wrote workflow: {wf_path}")
432
+ changes += 1
433
+
434
+ if dest.tools_update.justfile:
435
+ changes += _sync_justfile_recipe(config.name, dest_root, opts)
436
+
437
+ if dest.tools_update.path_sync_wheel:
438
+ _sync_wheel(dest_root, opts)
439
+
440
+ return changes
441
+
442
+
443
+ def _sync_justfile_recipe(name: str, dest_root: Path, opts: CopyOptions) -> int:
444
+ justfile_path = dest_root / "justfile"
445
+ changed = workflow_gen.update_justfile(
446
+ justfile_path, name, workflow_gen.JustfileRecipeKind.VALIDATE, opts.dry_run
447
+ )
448
+ return 1 if changed else 0
449
+
450
+
451
+ def _sync_wheel(dest_root: Path, opts: CopyOptions) -> None:
452
+ pkg_root = Path(__file__).parent.parent
453
+ wheel = _build_wheel(pkg_root, opts.dry_run)
454
+ if not wheel:
455
+ return
456
+
457
+ dest_dir = dest_root / ".github"
458
+ dest_wheel = dest_dir / wheel.name
459
+
460
+ for old_wheel in dest_dir.glob("path_sync-*.whl"):
461
+ if old_wheel != dest_wheel:
462
+ if opts.dry_run:
463
+ logger.info(f"[DRY RUN] Would remove old wheel: {old_wheel}")
464
+ else:
465
+ old_wheel.unlink()
466
+ logger.info(f"Removed old wheel: {old_wheel}")
467
+
468
+ if opts.dry_run:
469
+ logger.info(f"[DRY RUN] Would copy wheel: {dest_wheel}")
470
+ return
471
+
472
+ dest_dir.mkdir(parents=True, exist_ok=True)
473
+ dest_wheel.write_bytes(wheel.read_bytes())
474
+ logger.info(f"Copied wheel: {dest_wheel}")
475
+
476
+
477
+ def _build_wheel(pkg_root: Path, dry_run: bool) -> Path | None:
478
+ dist_dir = pkg_root / "dist"
479
+
480
+ if dry_run:
481
+ logger.info("[DRY RUN] Would build wheel")
482
+ wheels = sorted(dist_dir.glob("path_sync-*.whl")) if dist_dir.exists() else []
483
+ return wheels[-1] if wheels else None
484
+
485
+ if dist_dir.exists():
486
+ for old in dist_dir.glob("path_sync-*.whl"):
487
+ old.unlink()
488
+
489
+ logger.info("Building wheel...")
490
+ result = subprocess.run(
491
+ ["uv", "build", "--wheel"],
492
+ cwd=pkg_root,
493
+ capture_output=True,
494
+ text=True,
495
+ )
496
+ if result.returncode != 0:
497
+ logger.error(f"Failed to build wheel: {result.stderr}")
498
+ raise RuntimeError("Wheel build failed")
499
+
500
+ wheels = sorted(dist_dir.glob("path_sync-*.whl"))
501
+ assert len(wheels) == 1, f"Expected 1 wheel, got {len(wheels)}"
502
+ return wheels[0]
503
+
504
+
505
+ def _commit_and_pr(
506
+ config: SrcConfig,
507
+ repo,
508
+ dest_root: Path,
509
+ dest: Destination,
510
+ sha: str,
511
+ src_repo_url: str,
512
+ opts: CopyOptions,
513
+ log_path: Path,
514
+ ) -> int:
515
+ if opts.no_commit:
516
+ return 1
517
+
518
+ git_ops.commit_changes(repo, f"chore: sync {config.name} from {sha[:8]}")
519
+
520
+ if opts.no_push:
521
+ return 1
522
+
523
+ git_ops.push_branch(repo, dest.copy_branch, force=opts.force_push)
524
+
525
+ if opts.no_pr:
526
+ return 1
527
+
528
+ sync_log = log_path.read_text() if log_path.exists() else ""
529
+ pr_body = config.pr_defaults.format_body(
530
+ src_repo_url=src_repo_url,
531
+ src_sha=sha,
532
+ sync_log=sync_log,
533
+ dest_name=dest.name,
534
+ )
535
+
536
+ title = opts.pr_title.format(name=config.name, dest_name=dest.name)
537
+ git_ops.create_or_update_pr(
538
+ dest_root,
539
+ dest.copy_branch,
540
+ title,
541
+ pr_body,
542
+ opts.pr_labels.split(",") if opts.pr_labels else None,
543
+ opts.pr_reviewers.split(",") if opts.pr_reviewers else None,
544
+ opts.pr_assignees.split(",") if opts.pr_assignees else None,
545
+ )
546
+ return 1