path-sync 0.3.0__py3-none-any.whl → 0.3.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
path_sync/__init__.py CHANGED
@@ -3,7 +3,7 @@
3
3
  from path_sync import copy
4
4
  from path_sync import config
5
5
 
6
- VERSION = "0.3.0"
6
+ VERSION = "0.3.2"
7
7
  __all__ = [
8
8
  "copy",
9
9
  "config",
@@ -0,0 +1 @@
1
+ """Internal implementation - not part of public API."""
@@ -0,0 +1,90 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+ from typing import Annotated
6
+
7
+ import typer
8
+
9
+ from path_sync._internal import git_ops
10
+ from path_sync._internal.file_utils import ensure_parents_write_text
11
+ from path_sync._internal.models import (
12
+ Destination,
13
+ PathMapping,
14
+ SrcConfig,
15
+ find_repo_root,
16
+ resolve_config_path,
17
+ )
18
+ from path_sync._internal.typer_app import app
19
+ from path_sync._internal.yaml_utils import dump_yaml_model, load_yaml_model
20
+
21
+ logger = logging.getLogger(__name__)
22
+
23
+
24
+ @app.command()
25
+ def boot(
26
+ name: str = typer.Option(..., "-n", "--name", help="Config name"),
27
+ dest_paths: Annotated[list[str], typer.Option("-d", "--dest", help="Destination relative paths")] = [],
28
+ sync_paths: Annotated[list[str], typer.Option("-p", "--path", help="Paths to sync (glob patterns)")] = [],
29
+ dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
30
+ regen: bool = typer.Option(False, "--regen", help="Regenerate config"),
31
+ src_root_opt: str = typer.Option(
32
+ "",
33
+ "--src-root",
34
+ help="Source repo root (default: find git root from cwd)",
35
+ ),
36
+ ) -> None:
37
+ """Initialize or update SRC repo config."""
38
+ repo_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
39
+ config_path = resolve_config_path(repo_root, name)
40
+
41
+ if config_path.exists() and not regen:
42
+ load_yaml_model(config_path, SrcConfig)
43
+ logger.info(f"Using existing config: {config_path}")
44
+ else:
45
+ src_repo = git_ops.get_repo(repo_root)
46
+ src_repo_url = git_ops.get_remote_url(src_repo, "origin")
47
+
48
+ destinations = _build_destinations(repo_root, dest_paths)
49
+ path_mappings = [PathMapping(src_path=p) for p in sync_paths]
50
+
51
+ config = SrcConfig(
52
+ name=name,
53
+ src_repo_url=src_repo_url,
54
+ paths=path_mappings,
55
+ destinations=destinations,
56
+ )
57
+ config_content = dump_yaml_model(config)
58
+ _write_file(config_path, config_content, dry_run, "config")
59
+
60
+ if dry_run:
61
+ logger.info("Dry run complete - no files written")
62
+ else:
63
+ logger.info("Boot complete")
64
+
65
+
66
+ def _build_destinations(repo_root: Path, dest_paths: list[str]) -> list[Destination]:
67
+ destinations = []
68
+ for rel_path in dest_paths:
69
+ dest_dir = (repo_root / rel_path).resolve()
70
+ dest_name = dest_dir.name
71
+
72
+ dest = Destination(name=dest_name, dest_path_relative=rel_path)
73
+
74
+ if git_ops.is_git_repo(dest_dir):
75
+ dest_repo = git_ops.get_repo(dest_dir)
76
+ dest.repo_url = git_ops.get_remote_url(dest_repo, "origin")
77
+ dest.default_branch = git_ops.get_default_branch(dest_repo)
78
+ logger.info(f"Found git repo at {dest_dir}: {dest.repo_url}")
79
+
80
+ destinations.append(dest)
81
+
82
+ return destinations
83
+
84
+
85
+ def _write_file(path: Path, content: str, dry_run: bool, desc: str) -> None:
86
+ if dry_run:
87
+ logger.info(f"[DRY RUN] Would write {desc}: {path}")
88
+ return
89
+ ensure_parents_write_text(path, content)
90
+ logger.info(f"Wrote {desc}: {path}")
@@ -0,0 +1,589 @@
1
+ from __future__ import annotations
2
+
3
+ import glob
4
+ import logging
5
+ import tempfile
6
+ from contextlib import contextmanager
7
+ from dataclasses import dataclass, field
8
+ from pathlib import Path
9
+
10
+ import typer
11
+
12
+ from path_sync import sections
13
+ from path_sync._internal import git_ops, header
14
+ from path_sync._internal.file_utils import ensure_parents_write_text
15
+ from path_sync._internal.models import (
16
+ LOG_FORMAT,
17
+ Destination,
18
+ PathMapping,
19
+ SrcConfig,
20
+ SyncMode,
21
+ find_repo_root,
22
+ resolve_config_path,
23
+ )
24
+ from path_sync._internal.typer_app import app
25
+ from path_sync._internal.yaml_utils import load_yaml_model
26
+
27
+ logger = logging.getLogger(__name__)
28
+
29
+ EXIT_NO_CHANGES = 0
30
+ EXIT_CHANGES = 1
31
+ EXIT_ERROR = 2
32
+
33
+
34
+ def _prompt(message: str, no_prompt: bool) -> bool:
35
+ if no_prompt:
36
+ return True
37
+ try:
38
+ response = input(f"{message} [y/n]: ").strip().lower()
39
+ return response == "y"
40
+ except (EOFError, KeyboardInterrupt):
41
+ return False
42
+
43
+
44
+ @dataclass
45
+ class SyncResult:
46
+ content_changes: int = 0
47
+ orphans_deleted: int = 0
48
+ synced_paths: set[Path] = field(default_factory=set)
49
+
50
+ @property
51
+ def total(self) -> int:
52
+ return self.content_changes + self.orphans_deleted
53
+
54
+
55
+ @contextmanager
56
+ def capture_sync_log(dest_name: str):
57
+ with tempfile.TemporaryDirectory(prefix="path-sync-") as tmpdir:
58
+ log_path = Path(tmpdir) / f"{dest_name}.log"
59
+ file_handler = logging.FileHandler(log_path, mode="w")
60
+ file_handler.setLevel(logging.INFO)
61
+ file_handler.setFormatter(logging.Formatter(LOG_FORMAT))
62
+ root_logger = logging.getLogger("path_sync")
63
+ root_logger.addHandler(file_handler)
64
+ try:
65
+ yield log_path
66
+ finally:
67
+ file_handler.close()
68
+ root_logger.removeHandler(file_handler)
69
+
70
+
71
+ @dataclass
72
+ class CopyOptions:
73
+ dry_run: bool = False
74
+ force_overwrite: bool = False
75
+ no_checkout: bool = False
76
+ checkout_from_default: bool = False
77
+ local: bool = False
78
+ no_prompt: bool = False
79
+ no_pr: bool = False
80
+ skip_orphan_cleanup: bool = False
81
+ pr_title: str = ""
82
+ pr_labels: str = ""
83
+ pr_reviewers: str = ""
84
+ pr_assignees: str = ""
85
+
86
+
87
+ @app.command()
88
+ def copy(
89
+ name: str = typer.Option("", "-n", "--name", help="Config name (used with src-root to find config)"),
90
+ config_path_opt: str = typer.Option(
91
+ "",
92
+ "-c",
93
+ "--config-path",
94
+ help="Full path to config file (alternative to --name)",
95
+ ),
96
+ src_root_opt: str = typer.Option(
97
+ "",
98
+ "--src-root",
99
+ help="Source repo root (default: find git root from cwd)",
100
+ ),
101
+ dest_filter: str = typer.Option("", "-d", "--dest", help="Filter destinations (comma-separated)"),
102
+ dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
103
+ force_overwrite: bool = typer.Option(
104
+ False,
105
+ "--force-overwrite",
106
+ help="Overwrite files even if header removed (opted out)",
107
+ ),
108
+ detailed_exit_code: bool = typer.Option(
109
+ False,
110
+ "--detailed-exit-code",
111
+ help="Exit 0=no changes, 1=changes, 2=error",
112
+ ),
113
+ no_checkout: bool = typer.Option(
114
+ False,
115
+ "--no-checkout",
116
+ help="Skip branch switching before sync",
117
+ ),
118
+ checkout_from_default: bool = typer.Option(
119
+ False,
120
+ "--checkout-from-default",
121
+ help="Reset to origin/default before sync (for CI)",
122
+ ),
123
+ local: bool = typer.Option(
124
+ False,
125
+ "--local",
126
+ help="No git operations after sync (no commit/push/PR)",
127
+ ),
128
+ no_prompt: bool = typer.Option(
129
+ False,
130
+ "-y",
131
+ "--no-prompt",
132
+ help="Skip confirmations (for CI)",
133
+ ),
134
+ no_pr: bool = typer.Option(
135
+ False,
136
+ "--no-pr",
137
+ help="Push but skip PR creation",
138
+ ),
139
+ pr_title: str = typer.Option(
140
+ "",
141
+ "--pr-title",
142
+ help="Override PR title (supports {name}, {dest_name})",
143
+ ),
144
+ pr_labels: str = typer.Option(
145
+ "",
146
+ "--pr-labels",
147
+ help="Comma-separated PR labels",
148
+ ),
149
+ pr_reviewers: str = typer.Option(
150
+ "",
151
+ "--pr-reviewers",
152
+ help="Comma-separated PR reviewers",
153
+ ),
154
+ pr_assignees: str = typer.Option(
155
+ "",
156
+ "--pr-assignees",
157
+ help="Comma-separated PR assignees",
158
+ ),
159
+ skip_orphan_cleanup: bool = typer.Option(
160
+ False,
161
+ "--skip-orphan-cleanup",
162
+ help="Skip deletion of orphaned synced files",
163
+ ),
164
+ ) -> None:
165
+ """Copy files from SRC to DEST repositories."""
166
+ if name and config_path_opt:
167
+ logger.error("Cannot use both --name and --config-path")
168
+ raise typer.Exit(EXIT_ERROR if detailed_exit_code else 1)
169
+ if not name and not config_path_opt:
170
+ logger.error("Either --name or --config-path is required")
171
+ raise typer.Exit(EXIT_ERROR if detailed_exit_code else 1)
172
+
173
+ src_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
174
+ config_path = Path(config_path_opt) if config_path_opt else resolve_config_path(src_root, name)
175
+
176
+ if not config_path.exists():
177
+ logger.error(f"Config not found: {config_path}")
178
+ raise typer.Exit(EXIT_ERROR if detailed_exit_code else 1)
179
+
180
+ config = load_yaml_model(config_path, SrcConfig)
181
+ src_repo = git_ops.get_repo(src_root)
182
+ current_sha = git_ops.get_current_sha(src_repo)
183
+ src_repo_url = git_ops.get_remote_url(src_repo, config.git_remote)
184
+
185
+ opts = CopyOptions(
186
+ dry_run=dry_run,
187
+ force_overwrite=force_overwrite,
188
+ no_checkout=no_checkout,
189
+ checkout_from_default=checkout_from_default,
190
+ local=local,
191
+ no_prompt=no_prompt,
192
+ no_pr=no_pr,
193
+ skip_orphan_cleanup=skip_orphan_cleanup,
194
+ pr_title=pr_title or config.pr_defaults.title,
195
+ pr_labels=pr_labels or ",".join(config.pr_defaults.labels),
196
+ pr_reviewers=pr_reviewers or ",".join(config.pr_defaults.reviewers),
197
+ pr_assignees=pr_assignees or ",".join(config.pr_defaults.assignees),
198
+ )
199
+
200
+ destinations = config.destinations
201
+ if dest_filter:
202
+ filter_names = [n.strip() for n in dest_filter.split(",")]
203
+ destinations = [d for d in destinations if d.name in filter_names]
204
+
205
+ total_changes = 0
206
+ for dest in destinations:
207
+ try:
208
+ with capture_sync_log(dest.name) as log_path:
209
+ changes = _sync_destination(config, dest, src_root, current_sha, src_repo_url, opts, log_path)
210
+ total_changes += changes
211
+ except Exception as e:
212
+ logger.error(f"Failed to sync {dest.name}: {e}")
213
+ if detailed_exit_code:
214
+ raise typer.Exit(EXIT_ERROR)
215
+ raise
216
+
217
+ if detailed_exit_code:
218
+ raise typer.Exit(EXIT_CHANGES if total_changes > 0 else EXIT_NO_CHANGES)
219
+
220
+
221
+ def _sync_destination(
222
+ config: SrcConfig,
223
+ dest: Destination,
224
+ src_root: Path,
225
+ current_sha: str,
226
+ src_repo_url: str,
227
+ opts: CopyOptions,
228
+ log_path: Path,
229
+ ) -> int:
230
+ dest_root = (src_root / dest.dest_path_relative).resolve()
231
+
232
+ if opts.dry_run and not dest_root.exists():
233
+ raise ValueError(f"Destination repo not found: {dest_root}. Clone it first or run without --dry-run.")
234
+
235
+ dest_repo = _ensure_dest_repo(dest, dest_root, opts.dry_run)
236
+ copy_branch = dest.resolved_copy_branch(config.name)
237
+
238
+ # --no-checkout means "I'm already on the right branch"
239
+ # Prompt decline means "skip git operations for this run"
240
+ if opts.no_checkout:
241
+ skip_git_ops = False
242
+ elif opts.dry_run:
243
+ skip_git_ops = True
244
+ elif _prompt(f"Switch {dest.name} to {copy_branch}?", opts.no_prompt):
245
+ git_ops.prepare_copy_branch(
246
+ repo=dest_repo,
247
+ default_branch=dest.default_branch,
248
+ copy_branch=copy_branch,
249
+ from_default=opts.checkout_from_default,
250
+ )
251
+ skip_git_ops = False
252
+ else:
253
+ skip_git_ops = True
254
+
255
+ result = _sync_paths(config, dest, src_root, dest_root, opts)
256
+ _print_sync_summary(dest, result)
257
+
258
+ if result.total == 0:
259
+ logger.info(f"{dest.name}: No changes")
260
+ return 0
261
+
262
+ if skip_git_ops:
263
+ return result.total
264
+
265
+ _commit_and_pr(config, dest_repo, dest_root, dest, current_sha, src_repo_url, opts, log_path)
266
+ return result.total
267
+
268
+
269
+ def _print_sync_summary(dest: Destination, result: SyncResult) -> None:
270
+ typer.echo(f"\nSyncing to {dest.name}...", err=True)
271
+ if result.content_changes > 0:
272
+ typer.echo(f" [{result.content_changes} files synced]", err=True)
273
+ if result.orphans_deleted > 0:
274
+ typer.echo(f" [-] {result.orphans_deleted} orphans deleted", err=True)
275
+ if result.total > 0:
276
+ typer.echo(f"\n{result.total} changes ready.", err=True)
277
+
278
+
279
+ def _ensure_dest_repo(dest: Destination, dest_root: Path, dry_run: bool):
280
+ if not dest_root.exists():
281
+ if dry_run:
282
+ raise ValueError(f"Destination repo not found: {dest_root}. Clone it first or run without --dry-run.")
283
+ if not dest.repo_url:
284
+ raise ValueError(f"Dest {dest.name} not found and no repo_url configured")
285
+ git_ops.clone_repo(dest.repo_url, dest_root)
286
+ return git_ops.get_repo(dest_root)
287
+
288
+
289
+ def _sync_paths(
290
+ config: SrcConfig,
291
+ dest: Destination,
292
+ src_root: Path,
293
+ dest_root: Path,
294
+ opts: CopyOptions,
295
+ ) -> SyncResult:
296
+ result = SyncResult()
297
+ for mapping in config.paths:
298
+ changes, paths = _sync_path(
299
+ mapping,
300
+ src_root,
301
+ dest_root,
302
+ dest,
303
+ config.name,
304
+ opts.dry_run,
305
+ opts.force_overwrite,
306
+ )
307
+ result.content_changes += changes
308
+ result.synced_paths.update(paths)
309
+
310
+ if not opts.skip_orphan_cleanup:
311
+ result.orphans_deleted = _cleanup_orphans(dest_root, config.name, result.synced_paths, opts.dry_run)
312
+ return result
313
+
314
+
315
+ def _sync_path(
316
+ mapping: PathMapping,
317
+ src_root: Path,
318
+ dest_root: Path,
319
+ dest: Destination,
320
+ config_name: str,
321
+ dry_run: bool,
322
+ force_overwrite: bool,
323
+ ) -> tuple[int, set[Path]]:
324
+ src_pattern = src_root / mapping.src_path
325
+ changes = 0
326
+ synced: set[Path] = set()
327
+ sync_mode = mapping.sync_mode
328
+
329
+ if "*" in mapping.src_path:
330
+ glob_prefix = mapping.src_path.split("*")[0].rstrip("/")
331
+ dest_base = mapping.dest_path or glob_prefix
332
+ matches = glob.glob(str(src_pattern), recursive=True)
333
+ if not matches:
334
+ logger.warning(f"Glob matched no files: {mapping.src_path}")
335
+ for src_file in matches:
336
+ src_path = Path(src_file)
337
+ if src_path.is_file() and not mapping.is_excluded(src_path):
338
+ rel = src_path.relative_to(src_root / glob_prefix)
339
+ dest_path = dest_root / dest_base / rel
340
+ dest_key = str(Path(dest_base) / rel)
341
+ changes += _copy_file(
342
+ src_path,
343
+ dest_path,
344
+ dest,
345
+ dest_key,
346
+ config_name,
347
+ sync_mode,
348
+ dry_run,
349
+ force_overwrite,
350
+ )
351
+ synced.add(dest_path)
352
+ elif src_pattern.is_dir():
353
+ dest_base = mapping.resolved_dest_path()
354
+ for src_file in src_pattern.rglob("*"):
355
+ if src_file.is_file() and not mapping.is_excluded(src_file):
356
+ rel = src_file.relative_to(src_pattern)
357
+ dest_path = dest_root / dest_base / rel
358
+ dest_key = str(Path(dest_base) / rel)
359
+ changes += _copy_file(
360
+ src_file,
361
+ dest_path,
362
+ dest,
363
+ dest_key,
364
+ config_name,
365
+ sync_mode,
366
+ dry_run,
367
+ force_overwrite,
368
+ )
369
+ synced.add(dest_path)
370
+ elif src_pattern.is_file():
371
+ dest_base = mapping.resolved_dest_path()
372
+ dest_path = dest_root / dest_base
373
+ changes += _copy_file(
374
+ src_pattern,
375
+ dest_path,
376
+ dest,
377
+ dest_base,
378
+ config_name,
379
+ sync_mode,
380
+ dry_run,
381
+ force_overwrite,
382
+ )
383
+ synced.add(dest_path)
384
+ else:
385
+ logger.warning(f"Source not found: {mapping.src_path}")
386
+
387
+ return changes, synced
388
+
389
+
390
+ def _copy_file(
391
+ src: Path,
392
+ dest_path: Path,
393
+ dest: Destination,
394
+ dest_key: str,
395
+ config_name: str,
396
+ sync_mode: SyncMode,
397
+ dry_run: bool,
398
+ force_overwrite: bool = False,
399
+ ) -> int:
400
+ try:
401
+ src_content = header.remove_header(src.read_text())
402
+ except UnicodeDecodeError:
403
+ return _copy_binary_file(src, dest_path, sync_mode, dry_run)
404
+
405
+ match sync_mode:
406
+ case SyncMode.SCAFFOLD:
407
+ return _handle_scaffold(src_content, dest_path, dry_run)
408
+ case SyncMode.REPLACE:
409
+ return _handle_replace(src_content, dest_path, dry_run)
410
+ case SyncMode.SYNC:
411
+ skip_list = dest.skip_sections.get(dest_key, [])
412
+ return _handle_sync(src_content, dest_path, skip_list, config_name, dry_run, force_overwrite)
413
+
414
+
415
+ def _copy_binary_file(src: Path, dest_path: Path, sync_mode: SyncMode, dry_run: bool) -> int:
416
+ src_bytes = src.read_bytes()
417
+ match sync_mode:
418
+ case SyncMode.SCAFFOLD:
419
+ if dest_path.exists():
420
+ return 0
421
+ case SyncMode.REPLACE | SyncMode.SYNC:
422
+ if dest_path.exists() and dest_path.read_bytes() == src_bytes:
423
+ return 0
424
+ return _write_binary_file(dest_path, src_bytes, dry_run)
425
+
426
+
427
+ def _write_binary_file(dest_path: Path, content: bytes, dry_run: bool) -> int:
428
+ if dry_run:
429
+ logger.info(f"[DRY RUN] Would write binary: {dest_path}")
430
+ return 1
431
+ dest_path.parent.mkdir(parents=True, exist_ok=True)
432
+ dest_path.write_bytes(content)
433
+ logger.info(f"Wrote binary: {dest_path}")
434
+ return 1
435
+
436
+
437
+ def _handle_scaffold(content: str, dest_path: Path, dry_run: bool) -> int:
438
+ if dest_path.exists():
439
+ return 0
440
+ return _write_file(dest_path, content, dry_run)
441
+
442
+
443
+ def _handle_replace(content: str, dest_path: Path, dry_run: bool) -> int:
444
+ if dest_path.exists() and dest_path.read_text() == content:
445
+ return 0
446
+ return _write_file(dest_path, content, dry_run)
447
+
448
+
449
+ def _handle_sync(
450
+ src_content: str,
451
+ dest_path: Path,
452
+ skip_list: list[str],
453
+ config_name: str,
454
+ dry_run: bool,
455
+ force_overwrite: bool,
456
+ ) -> int:
457
+ if sections.has_sections(src_content, dest_path):
458
+ return _handle_sync_sections(src_content, dest_path, skip_list, config_name, dry_run, force_overwrite)
459
+
460
+ if dest_path.exists():
461
+ existing = dest_path.read_text()
462
+ has_hdr = header.has_header(existing)
463
+ if not has_hdr and not force_overwrite:
464
+ logger.info(f"Skipping {dest_path} (header removed - opted out)")
465
+ return 0
466
+ if header.remove_header(existing) == src_content and has_hdr:
467
+ return 0
468
+
469
+ new_content = header.add_header(src_content, dest_path, config_name)
470
+ return _write_file(dest_path, new_content, dry_run)
471
+
472
+
473
+ def _write_file(dest_path: Path, content: str, dry_run: bool) -> int:
474
+ if dry_run:
475
+ logger.info(f"[DRY RUN] Would write: {dest_path}")
476
+ return 1
477
+ ensure_parents_write_text(dest_path, content)
478
+ logger.info(f"Wrote: {dest_path}")
479
+ return 1
480
+
481
+
482
+ def _handle_sync_sections(
483
+ src_content: str,
484
+ dest_path: Path,
485
+ skip_list: list[str],
486
+ config_name: str,
487
+ dry_run: bool,
488
+ force_overwrite: bool,
489
+ ) -> int:
490
+ src_sections = sections.extract_sections(src_content, dest_path)
491
+
492
+ if dest_path.exists():
493
+ existing = dest_path.read_text()
494
+ if not header.has_header(existing) and not force_overwrite:
495
+ logger.info(f"Skipping {dest_path} (header removed - opted out)")
496
+ return 0
497
+ dest_body = header.remove_header(existing)
498
+ new_body = sections.replace_sections(dest_body, src_sections, dest_path, skip_list)
499
+ else:
500
+ new_body = src_content
501
+
502
+ new_content = header.add_header(new_body, dest_path, config_name)
503
+
504
+ if dest_path.exists() and dest_path.read_text() == new_content:
505
+ return 0
506
+
507
+ return _write_file(dest_path, new_content, dry_run)
508
+
509
+
510
+ def _cleanup_orphans(
511
+ dest_root: Path,
512
+ config_name: str,
513
+ synced_paths: set[Path],
514
+ dry_run: bool,
515
+ ) -> int:
516
+ deleted = 0
517
+ for path in _find_files_with_config(dest_root, config_name):
518
+ if path not in synced_paths:
519
+ if dry_run:
520
+ logger.info(f"[DRY RUN] Would delete orphan: {path}")
521
+ else:
522
+ path.unlink()
523
+ logger.info(f"Deleted orphan: {path}")
524
+ deleted += 1
525
+ return deleted
526
+
527
+
528
+ def _find_files_with_config(dest_root: Path, config_name: str) -> list[Path]:
529
+ result = []
530
+ for path in dest_root.rglob("*"):
531
+ if ".git" in path.parts:
532
+ continue
533
+ if header.file_get_config_name(path) == config_name:
534
+ result.append(path)
535
+ return result
536
+
537
+
538
+ def _commit_and_pr(
539
+ config: SrcConfig,
540
+ repo,
541
+ dest_root: Path,
542
+ dest: Destination,
543
+ sha: str,
544
+ src_repo_url: str,
545
+ opts: CopyOptions,
546
+ log_path: Path,
547
+ ) -> None:
548
+ if opts.local:
549
+ logger.info("Local mode: skipping commit/push/PR")
550
+ return
551
+
552
+ copy_branch = dest.resolved_copy_branch(config.name)
553
+
554
+ if not _prompt(f"Commit changes to {dest.name}?", opts.no_prompt):
555
+ return
556
+
557
+ commit_msg = f"chore: sync {config.name} from {sha[:8]}"
558
+ git_ops.commit_changes(repo, commit_msg)
559
+ typer.echo(f" Committed: {commit_msg}", err=True)
560
+
561
+ if not _prompt(f"Push {dest.name} to origin?", opts.no_prompt):
562
+ return
563
+
564
+ git_ops.push_branch(repo, copy_branch, force=True)
565
+ typer.echo(f" Pushed: {copy_branch} (force)", err=True)
566
+
567
+ if opts.no_pr or not _prompt(f"Create PR for {dest.name}?", opts.no_prompt):
568
+ return
569
+
570
+ sync_log = log_path.read_text() if log_path.exists() else ""
571
+ pr_body = config.pr_defaults.format_body(
572
+ src_repo_url=src_repo_url,
573
+ src_sha=sha,
574
+ sync_log=sync_log,
575
+ dest_name=dest.name,
576
+ )
577
+
578
+ title = opts.pr_title.format(name=config.name, dest_name=dest.name)
579
+ pr_url = git_ops.create_or_update_pr(
580
+ dest_root,
581
+ copy_branch,
582
+ title,
583
+ pr_body,
584
+ opts.pr_labels.split(",") if opts.pr_labels else None,
585
+ opts.pr_reviewers.split(",") if opts.pr_reviewers else None,
586
+ opts.pr_assignees.split(",") if opts.pr_assignees else None,
587
+ )
588
+ if pr_url:
589
+ typer.echo(f" Created PR: {pr_url}", err=True)
@@ -0,0 +1,50 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+
6
+ import typer
7
+
8
+ from path_sync._internal import git_ops
9
+ from path_sync._internal.models import find_repo_root
10
+ from path_sync._internal.typer_app import app
11
+ from path_sync._internal.validation import parse_skip_sections, validate_no_unauthorized_changes
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ @app.command("validate-no-changes")
17
+ def validate_no_changes(
18
+ branch: str = typer.Option("main", "-b", "--branch", help="Default branch to compare against"),
19
+ skip_sections_opt: str = typer.Option(
20
+ "",
21
+ "--skip-sections",
22
+ help="Comma-separated path:section_id pairs to skip (e.g., 'justfile:coverage,pyproject.toml:default')",
23
+ ),
24
+ src_root_opt: str = typer.Option(
25
+ "",
26
+ "--src-root",
27
+ help="Source repo root (default: find git root from cwd)",
28
+ ),
29
+ ) -> None:
30
+ """Validate no unauthorized changes to synced files."""
31
+ repo_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
32
+ repo = git_ops.get_repo(repo_root)
33
+
34
+ current_branch = repo.active_branch.name
35
+ if current_branch.startswith("sync/"):
36
+ logger.info(f"On sync branch {current_branch}, validation skipped")
37
+ return
38
+ if current_branch == branch:
39
+ logger.info(f"On default branch {branch}, validation skipped")
40
+ return
41
+
42
+ skip_sections = parse_skip_sections(skip_sections_opt) if skip_sections_opt else None
43
+ unauthorized = validate_no_unauthorized_changes(repo_root, branch, skip_sections)
44
+
45
+ if unauthorized:
46
+ files_list = "\n ".join(unauthorized)
47
+ logger.error(f"Unauthorized changes in {len(unauthorized)} files:\n {files_list}")
48
+ raise typer.Exit(1)
49
+
50
+ logger.info("Validation passed: no unauthorized changes")
@@ -0,0 +1,7 @@
1
+ from pathlib import Path
2
+
3
+
4
+ def ensure_parents_write_text(path: Path | str, text: str) -> None:
5
+ path = Path(path)
6
+ path.parent.mkdir(parents=True, exist_ok=True)
7
+ path.write_text(text)
@@ -0,0 +1,182 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import os
5
+ import subprocess
6
+ from contextlib import suppress
7
+ from pathlib import Path
8
+
9
+ from git import GitCommandError, InvalidGitRepositoryError, NoSuchPathError, Repo
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def _auth_url(url: str) -> str:
15
+ """Inject GH_TOKEN into HTTPS URL for authentication."""
16
+ token = os.environ.get("GH_TOKEN", "")
17
+ if not token:
18
+ return url
19
+ if url.startswith("https://github.com/"):
20
+ return url.replace("https://", f"https://x-access-token:{token}@")
21
+ return url
22
+
23
+
24
+ def get_repo(path: Path) -> Repo:
25
+ try:
26
+ return Repo(path)
27
+ except InvalidGitRepositoryError as e:
28
+ raise ValueError(f"Not a git repository: {path}") from e
29
+
30
+
31
+ def is_git_repo(path: Path) -> bool:
32
+ with suppress(InvalidGitRepositoryError, NoSuchPathError):
33
+ Repo(path)
34
+ return True
35
+ return False
36
+
37
+
38
+ def get_default_branch(repo: Repo) -> str:
39
+ with suppress(GitCommandError):
40
+ return repo.git.symbolic_ref("refs/remotes/origin/HEAD", short=True).replace("origin/", "")
41
+ return "main"
42
+
43
+
44
+ def clone_repo(url: str, dest: Path) -> Repo:
45
+ logger.info(f"Cloning {url} to {dest}")
46
+ dest.parent.mkdir(parents=True, exist_ok=True)
47
+ return Repo.clone_from(_auth_url(url), str(dest))
48
+
49
+
50
+ def checkout_branch(repo: Repo, branch: str) -> None:
51
+ current = repo.active_branch.name
52
+ if current == branch:
53
+ return
54
+ logger.info(f"Checking out {branch}")
55
+ try:
56
+ repo.git.checkout(branch)
57
+ except GitCommandError:
58
+ repo.git.checkout("-b", branch)
59
+
60
+
61
+ def prepare_copy_branch(repo: Repo, default_branch: str, copy_branch: str, from_default: bool = False) -> None:
62
+ """Prepare copy_branch for syncing.
63
+
64
+ Args:
65
+ from_default: If True, fetch origin and reset to origin/default_branch
66
+ before creating copy_branch. Use in CI for clean state.
67
+ If False, just switch to or create copy_branch from current HEAD.
68
+ """
69
+ if from_default:
70
+ logger.info("Fetching origin")
71
+ repo.git.fetch("origin")
72
+
73
+ logger.info(f"Checking out {default_branch}")
74
+ repo.git.checkout(default_branch)
75
+ repo.git.reset("--hard", f"origin/{default_branch}")
76
+
77
+ with suppress(GitCommandError):
78
+ repo.git.branch("-D", copy_branch)
79
+ logger.info(f"Deleted existing local branch: {copy_branch}")
80
+
81
+ logger.info(f"Creating fresh branch: {copy_branch}")
82
+ repo.git.checkout("-b", copy_branch)
83
+ else:
84
+ checkout_branch(repo, copy_branch)
85
+
86
+
87
+ def get_current_sha(repo: Repo) -> str:
88
+ return repo.head.commit.hexsha
89
+
90
+
91
+ def get_remote_url(repo: Repo, remote_name: str = "origin") -> str:
92
+ try:
93
+ return repo.remote(remote_name).url
94
+ except ValueError:
95
+ return ""
96
+
97
+
98
+ def has_changes(repo: Repo) -> bool:
99
+ return repo.is_dirty() or len(repo.untracked_files) > 0
100
+
101
+
102
+ def commit_changes(repo: Repo, message: str) -> None:
103
+ repo.git.add("-A")
104
+ if repo.is_dirty():
105
+ _ensure_git_user(repo)
106
+ repo.git.commit("-m", message)
107
+ logger.info(f"Committed: {message}")
108
+
109
+
110
+ def _ensure_git_user(repo: Repo) -> None:
111
+ """Configure git user if not already set."""
112
+ try:
113
+ repo.config_reader().get_value("user", "name")
114
+ except Exception:
115
+ repo.config_writer().set_value("user", "name", "path-sync[bot]").release()
116
+ repo.config_writer().set_value("user", "email", "path-sync[bot]@users.noreply.github.com").release()
117
+
118
+
119
+ def push_branch(repo: Repo, branch: str, force: bool = True) -> None:
120
+ logger.info(f"Pushing {branch}" + (" (force)" if force else ""))
121
+ args = ["--force", "-u", "origin", branch] if force else ["-u", "origin", branch]
122
+ repo.git.push(*args)
123
+
124
+
125
+ def update_pr_body(repo_path: Path, branch: str, body: str) -> bool:
126
+ cmd = ["gh", "pr", "edit", branch, "--body", body]
127
+ result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
128
+ if result.returncode != 0:
129
+ logger.warning(f"Failed to update PR body: {result.stderr}")
130
+ return False
131
+ logger.info("Updated PR body")
132
+ return True
133
+
134
+
135
+ def create_or_update_pr(
136
+ repo_path: Path,
137
+ branch: str,
138
+ title: str,
139
+ body: str = "",
140
+ labels: list[str] | None = None,
141
+ reviewers: list[str] | None = None,
142
+ assignees: list[str] | None = None,
143
+ ) -> str:
144
+ cmd = ["gh", "pr", "create", "--head", branch, "--title", title]
145
+ cmd.extend(["--body", body or ""])
146
+ if labels:
147
+ cmd.extend(["--label", ",".join(labels)])
148
+ if reviewers:
149
+ cmd.extend(["--reviewer", ",".join(reviewers)])
150
+ if assignees:
151
+ cmd.extend(["--assignee", ",".join(assignees)])
152
+
153
+ result = subprocess.run(cmd, cwd=repo_path, capture_output=True, text=True)
154
+ if result.returncode != 0:
155
+ if "already exists" in result.stderr:
156
+ logger.info("PR already exists, updating body")
157
+ update_pr_body(repo_path, branch, body)
158
+ return ""
159
+ raise RuntimeError(f"Failed to create PR: {result.stderr}")
160
+ logger.info(f"Created PR: {result.stdout.strip()}")
161
+ return result.stdout.strip()
162
+
163
+
164
+ def file_has_git_changes(repo: Repo, file_path: Path, base_ref: str = "HEAD") -> bool:
165
+ rel_path = str(file_path.relative_to(repo.working_dir))
166
+ diff = repo.git.diff("--name-only", base_ref, "--", rel_path)
167
+ return bool(diff.strip())
168
+
169
+
170
+ def get_changed_files(repo: Repo, base_ref: str = "HEAD") -> list[Path]:
171
+ diff = repo.git.diff("--name-only", base_ref)
172
+ if not diff.strip():
173
+ return []
174
+ repo_root = Path(repo.working_dir)
175
+ return [repo_root / p for p in diff.strip().split("\n")]
176
+
177
+
178
+ def get_file_content_at_ref(repo: Repo, file_path: Path, ref: str) -> str | None:
179
+ rel_path = str(file_path.relative_to(repo.working_dir))
180
+ with suppress(GitCommandError):
181
+ return repo.git.show(f"{ref}:{rel_path}")
182
+ return None
@@ -0,0 +1,83 @@
1
+ from __future__ import annotations
2
+
3
+ import re
4
+ from pathlib import Path
5
+
6
+ from zero_3rdparty.sections import CommentConfig, get_comment_config
7
+
8
+ from path_sync._internal.models import HEADER_TEMPLATE, HeaderConfig
9
+
10
+ HEADER_PATTERN = re.compile(r"path-sync copy -n (?P<config_name>[\w-]+)")
11
+
12
+
13
+ def _resolve_comment_config(path: Path, config: HeaderConfig | None) -> CommentConfig:
14
+ if config:
15
+ ext = path.suffix
16
+ prefix = config.comment_prefixes.get(ext)
17
+ suffix = config.comment_suffixes.get(ext, "")
18
+ if prefix:
19
+ return CommentConfig(prefix, suffix)
20
+ return get_comment_config(path)
21
+
22
+
23
+ def get_comment_prefix(path: Path, config: HeaderConfig | None = None) -> str:
24
+ return _resolve_comment_config(path, config).prefix
25
+
26
+
27
+ def get_comment_suffix(path: Path, config: HeaderConfig | None = None) -> str:
28
+ return _resolve_comment_config(path, config).suffix
29
+
30
+
31
+ def get_header_line(path: Path, config_name: str, config: HeaderConfig | None = None) -> str:
32
+ cc = _resolve_comment_config(path, config)
33
+ header_text = HEADER_TEMPLATE.format(config_name=config_name)
34
+ return f"{cc.prefix} {header_text}{cc.suffix}"
35
+
36
+
37
+ def has_header(content: str) -> bool:
38
+ first_line = content.split("\n", 1)[0] if content else ""
39
+ return bool(HEADER_PATTERN.search(first_line))
40
+
41
+
42
+ def get_config_name(content: str) -> str | None:
43
+ first_line = content.split("\n", 1)[0] if content else ""
44
+ if match := HEADER_PATTERN.search(first_line):
45
+ return match.group("config_name")
46
+ return None
47
+
48
+
49
+ def add_header(content: str, path: Path, config_name: str, config: HeaderConfig | None = None) -> str:
50
+ header = get_header_line(path, config_name, config)
51
+ return f"{header}\n{content}"
52
+
53
+
54
+ def remove_header(content: str) -> str:
55
+ if not has_header(content):
56
+ return content
57
+ lines = content.split("\n", 1)
58
+ return lines[1] if len(lines) > 1 else ""
59
+
60
+
61
+ def has_known_comment_prefix(path: Path) -> bool:
62
+ try:
63
+ get_comment_config(path)
64
+ return True
65
+ except ValueError:
66
+ return False
67
+
68
+
69
+ def file_get_config_name(path: Path) -> str | None:
70
+ if not path.exists() or not has_known_comment_prefix(path):
71
+ return None
72
+ try:
73
+ with path.open() as f:
74
+ first_line = f.readline()
75
+ except (UnicodeDecodeError, OSError):
76
+ return None
77
+ return get_config_name(first_line)
78
+
79
+
80
+ def file_has_header(path: Path, config: HeaderConfig | None = None) -> bool:
81
+ if config and path.suffix not in config.comment_prefixes:
82
+ return False
83
+ return file_get_config_name(path) is not None
@@ -0,0 +1,155 @@
1
+ from __future__ import annotations
2
+
3
+ import glob as glob_mod
4
+ from enum import StrEnum
5
+ from pathlib import Path
6
+ from typing import ClassVar
7
+
8
+ from pydantic import BaseModel, Field
9
+
10
+ LOG_FORMAT = "%(asctime)s %(levelname)s %(message)s"
11
+
12
+ DEFAULT_EXCLUDE_DIRS: frozenset[str] = frozenset(
13
+ {
14
+ "__pycache__",
15
+ ".git",
16
+ ".venv",
17
+ "venv",
18
+ "node_modules",
19
+ ".mypy_cache",
20
+ ".pytest_cache",
21
+ ".ruff_cache",
22
+ ".tox",
23
+ }
24
+ )
25
+
26
+
27
+ def _default_exclude_dirs() -> set[str]:
28
+ return set(DEFAULT_EXCLUDE_DIRS)
29
+
30
+
31
+ class SyncMode(StrEnum):
32
+ SYNC = "sync"
33
+ REPLACE = "replace"
34
+ SCAFFOLD = "scaffold"
35
+
36
+
37
+ class PathMapping(BaseModel):
38
+ src_path: str
39
+ dest_path: str = ""
40
+ sync_mode: SyncMode = SyncMode.SYNC
41
+ exclude_dirs: set[str] = Field(default_factory=_default_exclude_dirs)
42
+
43
+ def resolved_dest_path(self) -> str:
44
+ return self.dest_path or self.src_path
45
+
46
+ def is_excluded(self, path: Path) -> bool:
47
+ return bool(self.exclude_dirs & set(path.parts))
48
+
49
+ def expand_dest_paths(self, repo_root: Path) -> list[Path]:
50
+ dest_path = self.resolved_dest_path()
51
+ pattern = repo_root / dest_path
52
+
53
+ if "*" in dest_path:
54
+ return [Path(p) for p in glob_mod.glob(str(pattern), recursive=True)]
55
+ if pattern.is_dir():
56
+ return [p for p in pattern.rglob("*") if p.is_file()]
57
+ if pattern.exists():
58
+ return [pattern]
59
+ return []
60
+
61
+
62
+ HEADER_TEMPLATE = "path-sync copy -n {config_name}"
63
+
64
+
65
+ class HeaderConfig(BaseModel):
66
+ comment_prefixes: dict[str, str] = Field(default_factory=dict)
67
+ comment_suffixes: dict[str, str] = Field(default_factory=dict)
68
+
69
+
70
+ DEFAULT_BODY_TEMPLATE = """\
71
+ Synced from [{src_repo_name}]({src_repo_url}) @ `{src_sha_short}`
72
+
73
+ <details>
74
+ <summary>Sync Log</summary>
75
+
76
+ ```
77
+ {sync_log}
78
+ ```
79
+
80
+ </details>
81
+ """
82
+
83
+
84
+ class PRDefaults(BaseModel):
85
+ title: str = "chore: sync {name} files"
86
+ body_template: str = DEFAULT_BODY_TEMPLATE
87
+ body_suffix: str = ""
88
+ labels: list[str] = Field(default_factory=list)
89
+ reviewers: list[str] = Field(default_factory=list)
90
+ assignees: list[str] = Field(default_factory=list)
91
+
92
+ def format_body(
93
+ self,
94
+ src_repo_url: str,
95
+ src_sha: str,
96
+ sync_log: str,
97
+ dest_name: str,
98
+ ) -> str:
99
+ src_repo_name = src_repo_url.rstrip("/").rsplit("/", 1)[-1].removesuffix(".git")
100
+ body = self.body_template.format(
101
+ src_repo_url=src_repo_url,
102
+ src_repo_name=src_repo_name,
103
+ src_sha=src_sha,
104
+ src_sha_short=src_sha[:8],
105
+ sync_log=sync_log,
106
+ dest_name=dest_name,
107
+ )
108
+ if self.body_suffix:
109
+ body = f"{body}\n---\n{self.body_suffix}"
110
+ return body
111
+
112
+
113
+ class Destination(BaseModel):
114
+ name: str
115
+ repo_url: str = ""
116
+ dest_path_relative: str
117
+ copy_branch: str = ""
118
+ default_branch: str = "main"
119
+ skip_sections: dict[str, list[str]] = Field(default_factory=dict)
120
+
121
+ def resolved_copy_branch(self, config_name: str) -> str:
122
+ """Returns branch name, defaulting to sync/{config_name} if not set."""
123
+ return self.copy_branch or f"sync/{config_name}"
124
+
125
+
126
+ class SrcConfig(BaseModel):
127
+ CONFIG_EXT: ClassVar[str] = ".src.yaml"
128
+
129
+ name: str
130
+ git_remote: str = "origin"
131
+ src_repo_url: str = ""
132
+ schedule: str = "0 6 * * *"
133
+ header_config: HeaderConfig = Field(default_factory=HeaderConfig)
134
+ pr_defaults: PRDefaults = Field(default_factory=PRDefaults)
135
+ paths: list[PathMapping] = Field(default_factory=list)
136
+ destinations: list[Destination] = Field(default_factory=list)
137
+
138
+ def find_destination(self, name: str) -> Destination:
139
+ for dest in self.destinations:
140
+ if dest.name == name:
141
+ return dest
142
+ raise ValueError(f"Destination not found: {name}")
143
+
144
+
145
+ def resolve_config_path(repo_root: Path, name: str) -> Path:
146
+ return repo_root / ".github" / f"{name}{SrcConfig.CONFIG_EXT}"
147
+
148
+
149
+ def find_repo_root(start_path: Path) -> Path:
150
+ current = start_path.resolve()
151
+ while current != current.parent:
152
+ if (current / ".git").exists():
153
+ return current
154
+ current = current.parent
155
+ raise ValueError(f"No git repository found from {start_path}")
@@ -0,0 +1,8 @@
1
+ import typer
2
+
3
+ app = typer.Typer(
4
+ name="path-sync",
5
+ help="Sync files across repositories",
6
+ pretty_exceptions_enable=False,
7
+ pretty_exceptions_show_locals=False,
8
+ )
@@ -0,0 +1,66 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ from pathlib import Path
5
+
6
+ from path_sync import sections
7
+ from path_sync._internal import git_ops, header
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+
12
+ def parse_skip_sections(value: str) -> dict[str, set[str]]:
13
+ """Parse 'path:section_id,path:section_id' into {path: {section_ids}}."""
14
+ result: dict[str, set[str]] = {}
15
+ for item in value.split(","):
16
+ item = item.strip()
17
+ if not item:
18
+ continue
19
+ if ":" not in item:
20
+ raise ValueError(f"Invalid format '{item}', expected 'path:section_id'")
21
+ path, section_id = item.rsplit(":", 1)
22
+ result.setdefault(path, set()).add(section_id)
23
+ return result
24
+
25
+
26
+ def validate_no_unauthorized_changes(
27
+ repo_root: Path,
28
+ default_branch: str = "main",
29
+ skip_sections: dict[str, set[str]] | None = None,
30
+ ) -> list[str]:
31
+ """Find files with unauthorized changes in DO_NOT_EDIT sections.
32
+
33
+ Returns 'path:section_id' for section changes or 'path' for full-file.
34
+ """
35
+ repo = git_ops.get_repo(repo_root)
36
+ base_ref = f"origin/{default_branch}"
37
+ skip = skip_sections or {}
38
+ unauthorized: list[str] = []
39
+
40
+ for path in git_ops.get_changed_files(repo, base_ref):
41
+ if not path.exists():
42
+ continue
43
+ if not header.file_has_header(path):
44
+ continue
45
+
46
+ rel_path = str(path.relative_to(repo_root))
47
+ current_content = path.read_text()
48
+ baseline_content = git_ops.get_file_content_at_ref(repo, path, base_ref)
49
+
50
+ if baseline_content is None:
51
+ continue
52
+
53
+ baseline_has_sections = sections.has_sections(baseline_content, path)
54
+ current_has_sections = sections.has_sections(current_content, path)
55
+
56
+ if baseline_has_sections:
57
+ file_skip = skip.get(rel_path, set())
58
+ changed_ids = sections.compare_sections(baseline_content, current_content, path, file_skip)
59
+ unauthorized.extend(f"{rel_path}:{sid}" for sid in changed_ids)
60
+ elif current_has_sections:
61
+ unauthorized.append(rel_path)
62
+ else:
63
+ if baseline_content != current_content:
64
+ unauthorized.append(rel_path)
65
+
66
+ return sorted(unauthorized)
@@ -0,0 +1,19 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from typing import TypeVar
5
+
6
+ import yaml
7
+ from pydantic import BaseModel
8
+
9
+ T = TypeVar("T", bound=BaseModel)
10
+
11
+
12
+ def load_yaml_model(path: Path, model_type: type[T]) -> T:
13
+ data = yaml.safe_load(path.read_text())
14
+ return model_type.model_validate(data)
15
+
16
+
17
+ def dump_yaml_model(model: BaseModel) -> str:
18
+ data = model.model_dump(mode="json", exclude_none=True)
19
+ return yaml.safe_dump(data, default_flow_style=False, sort_keys=False)
path_sync/sections.py CHANGED
@@ -43,7 +43,7 @@ def has_sections(content: str, path: Path) -> bool:
43
43
 
44
44
 
45
45
  def parse_sections(content: str, path: Path) -> list[Section]:
46
- return _parse_sections(content, TOOL_NAME, get_comment_config(path))
46
+ return _parse_sections(content, TOOL_NAME, get_comment_config(path), str(path))
47
47
 
48
48
 
49
49
  def wrap_in_default_section(content: str, path: Path) -> str:
@@ -51,7 +51,7 @@ def wrap_in_default_section(content: str, path: Path) -> str:
51
51
 
52
52
 
53
53
  def extract_sections(content: str, path: Path) -> dict[str, str]:
54
- return _extract_sections(content, TOOL_NAME, get_comment_config(path))
54
+ return _extract_sections(content, TOOL_NAME, get_comment_config(path), str(path))
55
55
 
56
56
 
57
57
  def replace_sections(
@@ -69,4 +69,4 @@ def compare_sections(
69
69
  path: Path,
70
70
  skip: set[str] | None = None,
71
71
  ) -> list[str]:
72
- return _compare_sections(baseline_content, current_content, TOOL_NAME, get_comment_config(path), skip)
72
+ return _compare_sections(baseline_content, current_content, TOOL_NAME, get_comment_config(path), skip, str(path))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: path-sync
3
- Version: 0.3.0
3
+ Version: 0.3.2
4
4
  Summary: Sync files from a source repo to multiple destination repos
5
5
  Author-email: EspenAlbert <espen.albert1@gmail.com>
6
6
  License-Expression: MIT
@@ -13,7 +13,7 @@ Requires-Dist: gitpython>=3.1.0
13
13
  Requires-Dist: pydantic>=2.0
14
14
  Requires-Dist: pyyaml>=6.0
15
15
  Requires-Dist: typer>=0.16.0
16
- Requires-Dist: zero-3rdparty>=0.100.0
16
+ Requires-Dist: zero-3rdparty>=0.101.1
17
17
  Description-Content-Type: text/markdown
18
18
 
19
19
  # path-sync
@@ -0,0 +1,21 @@
1
+ path_sync/__init__.py,sha256=m7PJh6WLWUKwFydw1lNBUGCWCD--J4Vou2S-WkP_KFg,153
2
+ path_sync/__main__.py,sha256=HDj3qgijDcK8k976qsAvKMvUq9fZEJTK-dZldUc5-no,326
3
+ path_sync/config.py,sha256=XYuEK_bjSpAk_nZN0oxpEA-S3t9F6Qn0yznYLoQHIw8,568
4
+ path_sync/copy.py,sha256=BpflW4086XJFSHHK4taYPgXtF07xHB8qrgm8TYqdM4E,120
5
+ path_sync/sections.py,sha256=lUFGO3gqpGr7msztDudUrejmfghrfOPWiOMKBP6YotE,1927
6
+ path_sync/_internal/__init__.py,sha256=iPkMhrpiyXBijo2Hp-y_2zEYxAXnHLnStKM0X0HHd4U,56
7
+ path_sync/_internal/cmd_boot.py,sha256=cFomUyPOhNX9fi5_BYL2Sm7O1oq7vntD8b7clQ7mL1E,3104
8
+ path_sync/_internal/cmd_copy.py,sha256=-iqEoAGTA2oBeR8HXbJfzJ0hmnmtq27LBCeSgVbOo9k,18635
9
+ path_sync/_internal/cmd_validate.py,sha256=e6m-JZlXAGr0ZRqfLhhrlmjs4w79p2WWnZo4I05PGpo,1798
10
+ path_sync/_internal/file_utils.py,sha256=5C33qzKFQdwChi5YwUWBujj126t0P6dbGSU_5hWExpE,194
11
+ path_sync/_internal/git_ops.py,sha256=rpG_r7VNH1KlBgqM9mz7xop0mpdy76Vs3rzCoxE1dIQ,5895
12
+ path_sync/_internal/header.py,sha256=evgY2q_gfDdEytEt_jyJ7M_KdGzCpfdKBUnoh3v-0Go,2593
13
+ path_sync/_internal/models.py,sha256=YLN_fi1n0LaYrpmrllHeJbQH_ryPtER5GGPnVCnkr0o,4274
14
+ path_sync/_internal/typer_app.py,sha256=lEGMRXql3Se3VbmwAohvpUaL2cbY-RwhPUq8kL7bPbc,177
15
+ path_sync/_internal/validation.py,sha256=qhEha-pJiM5zkZlr4sj2I4ZqvqcWMEfL4IZu_LGatLI,2226
16
+ path_sync/_internal/yaml_utils.py,sha256=yj6Bl54EltjLEcVKaiA5Ahb9byT6OUMh0xIEzTsrvnQ,498
17
+ path_sync-0.3.2.dist-info/METADATA,sha256=rZJuJVeznXEleFmKcYFy0ziIfgic_doPOwhYlV7MGKc,8231
18
+ path_sync-0.3.2.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
19
+ path_sync-0.3.2.dist-info/entry_points.txt,sha256=jTsL0c-9gP-4_Jt3EPgihtpLcwQR0AFAf1AUpD50AlI,54
20
+ path_sync-0.3.2.dist-info/licenses/LICENSE,sha256=OphKV48tcMv6ep-7j-8T6nycykPT0g8ZlMJ9zbGvdPs,1066
21
+ path_sync-0.3.2.dist-info/RECORD,,
@@ -1,10 +0,0 @@
1
- path_sync/__init__.py,sha256=GxwE45z99FV9EG57V7d0ld557zw2SzYk_83nEbBGtRk,153
2
- path_sync/__main__.py,sha256=HDj3qgijDcK8k976qsAvKMvUq9fZEJTK-dZldUc5-no,326
3
- path_sync/config.py,sha256=XYuEK_bjSpAk_nZN0oxpEA-S3t9F6Qn0yznYLoQHIw8,568
4
- path_sync/copy.py,sha256=BpflW4086XJFSHHK4taYPgXtF07xHB8qrgm8TYqdM4E,120
5
- path_sync/sections.py,sha256=dMEux8wT6nu9fcPgqgoWCc7QFNOxGVx5RVwcHhEK_lw,1894
6
- path_sync-0.3.0.dist-info/METADATA,sha256=8oFXF5R7ULoTo2CFdDRcsXjpRW6G0yZ-aGFjDHh1iJk,8231
7
- path_sync-0.3.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
8
- path_sync-0.3.0.dist-info/entry_points.txt,sha256=jTsL0c-9gP-4_Jt3EPgihtpLcwQR0AFAf1AUpD50AlI,54
9
- path_sync-0.3.0.dist-info/licenses/LICENSE,sha256=OphKV48tcMv6ep-7j-8T6nycykPT0g8ZlMJ9zbGvdPs,1066
10
- path_sync-0.3.0.dist-info/RECORD,,