path-sync 0.2.1__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
path_sync/__init__.py CHANGED
@@ -1 +1,10 @@
1
- """Path sync package for syncing files across repositories."""
1
+ # Generated by pkg-ext
2
+ # flake8: noqa
3
+ from path_sync import copy
4
+ from path_sync import config
5
+
6
+ VERSION = "0.3.1"
7
+ __all__ = [
8
+ "copy",
9
+ "config",
10
+ ]
path_sync/__main__.py CHANGED
@@ -1,8 +1,8 @@
1
1
  import logging
2
2
 
3
- from path_sync import cmd_boot, cmd_copy, cmd_validate # noqa: F401
4
- from path_sync.models import LOG_FORMAT
5
- from path_sync.typer_app import app
3
+ from path_sync._internal import cmd_boot, cmd_copy, cmd_validate # noqa: F401
4
+ from path_sync._internal.models import LOG_FORMAT
5
+ from path_sync._internal.typer_app import app
6
6
 
7
7
 
8
8
  def main() -> None:
@@ -0,0 +1 @@
1
+ """Internal implementation - not part of public API."""
@@ -6,17 +6,17 @@ from typing import Annotated
6
6
 
7
7
  import typer
8
8
 
9
- from path_sync import git_ops
10
- from path_sync.file_utils import ensure_parents_write_text
11
- from path_sync.models import (
9
+ from path_sync._internal import git_ops
10
+ from path_sync._internal.file_utils import ensure_parents_write_text
11
+ from path_sync._internal.models import (
12
12
  Destination,
13
13
  PathMapping,
14
14
  SrcConfig,
15
15
  find_repo_root,
16
16
  resolve_config_path,
17
17
  )
18
- from path_sync.typer_app import app
19
- from path_sync.yaml_utils import dump_yaml_model, load_yaml_model
18
+ from path_sync._internal.typer_app import app
19
+ from path_sync._internal.yaml_utils import dump_yaml_model, load_yaml_model
20
20
 
21
21
  logger = logging.getLogger(__name__)
22
22
 
@@ -24,12 +24,8 @@ logger = logging.getLogger(__name__)
24
24
  @app.command()
25
25
  def boot(
26
26
  name: str = typer.Option(..., "-n", "--name", help="Config name"),
27
- dest_paths: Annotated[
28
- list[str], typer.Option("-d", "--dest", help="Destination relative paths")
29
- ] = [],
30
- sync_paths: Annotated[
31
- list[str], typer.Option("-p", "--path", help="Paths to sync (glob patterns)")
32
- ] = [],
27
+ dest_paths: Annotated[list[str], typer.Option("-d", "--dest", help="Destination relative paths")] = [],
28
+ sync_paths: Annotated[list[str], typer.Option("-p", "--path", help="Paths to sync (glob patterns)")] = [],
33
29
  dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
34
30
  regen: bool = typer.Option(False, "--regen", help="Regenerate config"),
35
31
  ) -> None:
@@ -9,18 +9,20 @@ from pathlib import Path
9
9
 
10
10
  import typer
11
11
 
12
- from path_sync import git_ops, header, sections
13
- from path_sync.file_utils import ensure_parents_write_text
14
- from path_sync.models import (
12
+ from path_sync import sections
13
+ from path_sync._internal import git_ops, header
14
+ from path_sync._internal.file_utils import ensure_parents_write_text
15
+ from path_sync._internal.models import (
15
16
  LOG_FORMAT,
16
17
  Destination,
17
18
  PathMapping,
18
19
  SrcConfig,
20
+ SyncMode,
19
21
  find_repo_root,
20
22
  resolve_config_path,
21
23
  )
22
- from path_sync.typer_app import app
23
- from path_sync.yaml_utils import load_yaml_model
24
+ from path_sync._internal.typer_app import app
25
+ from path_sync._internal.yaml_utils import load_yaml_model
24
26
 
25
27
  logger = logging.getLogger(__name__)
26
28
 
@@ -85,9 +87,7 @@ class CopyOptions:
85
87
  @app.command()
86
88
  def copy(
87
89
  name: str = typer.Option(..., "-n", "--name", help="Config name"),
88
- dest_filter: str = typer.Option(
89
- "", "-d", "--dest", help="Filter destinations (comma-separated)"
90
- ),
90
+ dest_filter: str = typer.Option("", "-d", "--dest", help="Filter destinations (comma-separated)"),
91
91
  dry_run: bool = typer.Option(False, "--dry-run", help="Preview without writing"),
92
92
  force_overwrite: bool = typer.Option(
93
93
  False,
@@ -188,9 +188,7 @@ def copy(
188
188
  for dest in destinations:
189
189
  try:
190
190
  with capture_sync_log(dest.name) as log_path:
191
- changes = _sync_destination(
192
- config, dest, src_root, current_sha, src_repo_url, opts, log_path
193
- )
191
+ changes = _sync_destination(config, dest, src_root, current_sha, src_repo_url, opts, log_path)
194
192
  total_changes += changes
195
193
  except Exception as e:
196
194
  logger.error(f"Failed to sync {dest.name}: {e}")
@@ -214,25 +212,27 @@ def _sync_destination(
214
212
  dest_root = (src_root / dest.dest_path_relative).resolve()
215
213
 
216
214
  if opts.dry_run and not dest_root.exists():
217
- raise ValueError(
218
- f"Destination repo not found: {dest_root}. "
219
- "Clone it first or run without --dry-run."
220
- )
215
+ raise ValueError(f"Destination repo not found: {dest_root}. Clone it first or run without --dry-run.")
221
216
 
222
217
  dest_repo = _ensure_dest_repo(dest, dest_root, opts.dry_run)
223
-
224
- should_checkout = (
225
- not opts.no_checkout
226
- and not opts.dry_run
227
- and _prompt(f"Switch to {dest.copy_branch}?", opts.no_prompt)
228
- )
229
- if should_checkout:
218
+ copy_branch = dest.resolved_copy_branch(config.name)
219
+
220
+ # --no-checkout means "I'm already on the right branch"
221
+ # Prompt decline means "skip git operations for this run"
222
+ if opts.no_checkout:
223
+ skip_git_ops = False
224
+ elif opts.dry_run:
225
+ skip_git_ops = True
226
+ elif _prompt(f"Switch {dest.name} to {copy_branch}?", opts.no_prompt):
230
227
  git_ops.prepare_copy_branch(
231
228
  repo=dest_repo,
232
229
  default_branch=dest.default_branch,
233
- copy_branch=dest.copy_branch,
230
+ copy_branch=copy_branch,
234
231
  from_default=opts.checkout_from_default,
235
232
  )
233
+ skip_git_ops = False
234
+ else:
235
+ skip_git_ops = True
236
236
 
237
237
  result = _sync_paths(config, dest, src_root, dest_root, opts)
238
238
  _print_sync_summary(dest, result)
@@ -241,12 +241,10 @@ def _sync_destination(
241
241
  logger.info(f"{dest.name}: No changes")
242
242
  return 0
243
243
 
244
- if opts.dry_run:
244
+ if skip_git_ops:
245
245
  return result.total
246
246
 
247
- _commit_and_pr(
248
- config, dest_repo, dest_root, dest, current_sha, src_repo_url, opts, log_path
249
- )
247
+ _commit_and_pr(config, dest_repo, dest_root, dest, current_sha, src_repo_url, opts, log_path)
250
248
  return result.total
251
249
 
252
250
 
@@ -263,10 +261,7 @@ def _print_sync_summary(dest: Destination, result: SyncResult) -> None:
263
261
  def _ensure_dest_repo(dest: Destination, dest_root: Path, dry_run: bool):
264
262
  if not dest_root.exists():
265
263
  if dry_run:
266
- raise ValueError(
267
- f"Destination repo not found: {dest_root}. "
268
- "Clone it first or run without --dry-run."
269
- )
264
+ raise ValueError(f"Destination repo not found: {dest_root}. Clone it first or run without --dry-run.")
270
265
  if not dest.repo_url:
271
266
  raise ValueError(f"Dest {dest.name} not found and no repo_url configured")
272
267
  git_ops.clone_repo(dest.repo_url, dest_root)
@@ -295,9 +290,7 @@ def _sync_paths(
295
290
  result.synced_paths.update(paths)
296
291
 
297
292
  if not opts.skip_orphan_cleanup:
298
- result.orphans_deleted = _cleanup_orphans(
299
- dest_root, config.name, result.synced_paths, opts.dry_run
300
- )
293
+ result.orphans_deleted = _cleanup_orphans(dest_root, config.name, result.synced_paths, opts.dry_run)
301
294
  return result
302
295
 
303
296
 
@@ -313,6 +306,7 @@ def _sync_path(
313
306
  src_pattern = src_root / mapping.src_path
314
307
  changes = 0
315
308
  synced: set[Path] = set()
309
+ sync_mode = mapping.sync_mode
316
310
 
317
311
  if "*" in mapping.src_path:
318
312
  glob_prefix = mapping.src_path.split("*")[0].rstrip("/")
@@ -326,12 +320,13 @@ def _sync_path(
326
320
  rel = src_path.relative_to(src_root / glob_prefix)
327
321
  dest_path = dest_root / dest_base / rel
328
322
  dest_key = str(Path(dest_base) / rel)
329
- changes += _copy_with_header(
323
+ changes += _copy_file(
330
324
  src_path,
331
325
  dest_path,
332
326
  dest,
333
327
  dest_key,
334
328
  config_name,
329
+ sync_mode,
335
330
  dry_run,
336
331
  force_overwrite,
337
332
  )
@@ -343,12 +338,13 @@ def _sync_path(
343
338
  rel = src_file.relative_to(src_pattern)
344
339
  dest_path = dest_root / dest_base / rel
345
340
  dest_key = str(Path(dest_base) / rel)
346
- changes += _copy_with_header(
341
+ changes += _copy_file(
347
342
  src_file,
348
343
  dest_path,
349
344
  dest,
350
345
  dest_key,
351
346
  config_name,
347
+ sync_mode,
352
348
  dry_run,
353
349
  force_overwrite,
354
350
  )
@@ -356,12 +352,13 @@ def _sync_path(
356
352
  elif src_pattern.is_file():
357
353
  dest_base = mapping.resolved_dest_path()
358
354
  dest_path = dest_root / dest_base
359
- changes += _copy_with_header(
355
+ changes += _copy_file(
360
356
  src_pattern,
361
357
  dest_path,
362
358
  dest,
363
359
  dest_base,
364
360
  config_name,
361
+ sync_mode,
365
362
  dry_run,
366
363
  force_overwrite,
367
364
  )
@@ -372,43 +369,74 @@ def _sync_path(
372
369
  return changes, synced
373
370
 
374
371
 
375
- def _copy_with_header(
372
+ def _copy_file(
376
373
  src: Path,
377
374
  dest_path: Path,
378
375
  dest: Destination,
379
376
  dest_key: str,
380
377
  config_name: str,
378
+ sync_mode: SyncMode,
381
379
  dry_run: bool,
382
380
  force_overwrite: bool = False,
383
381
  ) -> int:
384
- src_content = src.read_text()
385
- skip_list = dest.skip_sections.get(dest_key, [])
382
+ src_content = header.remove_header(src.read_text())
386
383
 
387
- if sections.has_sections(src_content):
388
- return _copy_with_sections(
389
- src_content, dest_path, skip_list, config_name, dry_run, force_overwrite
390
- )
384
+ match sync_mode:
385
+ case SyncMode.SCAFFOLD:
386
+ return _handle_scaffold(src_content, dest_path, dry_run)
387
+ case SyncMode.REPLACE:
388
+ return _handle_replace(src_content, dest_path, dry_run)
389
+ case SyncMode.SYNC:
390
+ skip_list = dest.skip_sections.get(dest_key, [])
391
+ return _handle_sync(src_content, dest_path, skip_list, config_name, dry_run, force_overwrite)
392
+
393
+
394
+ def _handle_scaffold(content: str, dest_path: Path, dry_run: bool) -> int:
395
+ if dest_path.exists():
396
+ return 0
397
+ return _write_file(dest_path, content, dry_run)
398
+
399
+
400
+ def _handle_replace(content: str, dest_path: Path, dry_run: bool) -> int:
401
+ if dest_path.exists() and dest_path.read_text() == content:
402
+ return 0
403
+ return _write_file(dest_path, content, dry_run)
404
+
405
+
406
+ def _handle_sync(
407
+ src_content: str,
408
+ dest_path: Path,
409
+ skip_list: list[str],
410
+ config_name: str,
411
+ dry_run: bool,
412
+ force_overwrite: bool,
413
+ ) -> int:
414
+ if sections.has_sections(src_content, dest_path):
415
+ return _handle_sync_sections(src_content, dest_path, skip_list, config_name, dry_run, force_overwrite)
391
416
 
392
417
  if dest_path.exists():
393
418
  existing = dest_path.read_text()
394
- has_header = header.has_header(existing)
395
- if not has_header and not force_overwrite:
419
+ has_hdr = header.has_header(existing)
420
+ if not has_hdr and not force_overwrite:
396
421
  logger.info(f"Skipping {dest_path} (header removed - opted out)")
397
422
  return 0
398
- if header.remove_header(existing) == src_content and has_header:
423
+ if header.remove_header(existing) == src_content and has_hdr:
399
424
  return 0
400
425
 
401
- new_content = header.add_header(src_content, dest_path.suffix, config_name)
426
+ new_content = header.add_header(src_content, dest_path, config_name)
427
+ return _write_file(dest_path, new_content, dry_run)
428
+
429
+
430
+ def _write_file(dest_path: Path, content: str, dry_run: bool) -> int:
402
431
  if dry_run:
403
432
  logger.info(f"[DRY RUN] Would write: {dest_path}")
404
433
  return 1
405
-
406
- ensure_parents_write_text(dest_path, new_content)
434
+ ensure_parents_write_text(dest_path, content)
407
435
  logger.info(f"Wrote: {dest_path}")
408
436
  return 1
409
437
 
410
438
 
411
- def _copy_with_sections(
439
+ def _handle_sync_sections(
412
440
  src_content: str,
413
441
  dest_path: Path,
414
442
  skip_list: list[str],
@@ -416,7 +444,7 @@ def _copy_with_sections(
416
444
  dry_run: bool,
417
445
  force_overwrite: bool,
418
446
  ) -> int:
419
- src_sections = sections.extract_sections(src_content)
447
+ src_sections = sections.extract_sections(src_content, dest_path)
420
448
 
421
449
  if dest_path.exists():
422
450
  existing = dest_path.read_text()
@@ -424,22 +452,16 @@ def _copy_with_sections(
424
452
  logger.info(f"Skipping {dest_path} (header removed - opted out)")
425
453
  return 0
426
454
  dest_body = header.remove_header(existing)
427
- new_body = sections.replace_sections(dest_body, src_sections, skip_list)
455
+ new_body = sections.replace_sections(dest_body, src_sections, dest_path, skip_list)
428
456
  else:
429
457
  new_body = src_content
430
458
 
431
- new_content = header.add_header(new_body, dest_path.suffix, config_name)
459
+ new_content = header.add_header(new_body, dest_path, config_name)
432
460
 
433
461
  if dest_path.exists() and dest_path.read_text() == new_content:
434
462
  return 0
435
463
 
436
- if dry_run:
437
- logger.info(f"[DRY RUN] Would write: {dest_path}")
438
- return 1
439
-
440
- ensure_parents_write_text(dest_path, new_content)
441
- logger.info(f"Wrote: {dest_path}")
442
- return 1
464
+ return _write_file(dest_path, new_content, dry_run)
443
465
 
444
466
 
445
467
  def _cleanup_orphans(
@@ -462,12 +484,11 @@ def _cleanup_orphans(
462
484
 
463
485
  def _find_files_with_config(dest_root: Path, config_name: str) -> list[Path]:
464
486
  result = []
465
- for ext in header.COMMENT_PREFIXES:
466
- for path in dest_root.rglob(f"*{ext}"):
467
- if ".git" in path.parts:
468
- continue
469
- if header.file_get_config_name(path) == config_name:
470
- result.append(path)
487
+ for path in dest_root.rglob("*"):
488
+ if ".git" in path.parts:
489
+ continue
490
+ if header.file_get_config_name(path) == config_name:
491
+ result.append(path)
471
492
  return result
472
493
 
473
494
 
@@ -485,20 +506,22 @@ def _commit_and_pr(
485
506
  logger.info("Local mode: skipping commit/push/PR")
486
507
  return
487
508
 
488
- if not _prompt("Commit changes?", opts.no_prompt):
509
+ copy_branch = dest.resolved_copy_branch(config.name)
510
+
511
+ if not _prompt(f"Commit changes to {dest.name}?", opts.no_prompt):
489
512
  return
490
513
 
491
514
  commit_msg = f"chore: sync {config.name} from {sha[:8]}"
492
515
  git_ops.commit_changes(repo, commit_msg)
493
516
  typer.echo(f" Committed: {commit_msg}", err=True)
494
517
 
495
- if not _prompt("Push to origin?", opts.no_prompt):
518
+ if not _prompt(f"Push {dest.name} to origin?", opts.no_prompt):
496
519
  return
497
520
 
498
- git_ops.push_branch(repo, dest.copy_branch, force=True)
499
- typer.echo(f" Pushed: {dest.copy_branch} (force)", err=True)
521
+ git_ops.push_branch(repo, copy_branch, force=True)
522
+ typer.echo(f" Pushed: {copy_branch} (force)", err=True)
500
523
 
501
- if opts.no_pr or not _prompt("Create PR?", opts.no_prompt):
524
+ if opts.no_pr or not _prompt(f"Create PR for {dest.name}?", opts.no_prompt):
502
525
  return
503
526
 
504
527
  sync_log = log_path.read_text() if log_path.exists() else ""
@@ -512,7 +535,7 @@ def _commit_and_pr(
512
535
  title = opts.pr_title.format(name=config.name, dest_name=dest.name)
513
536
  pr_url = git_ops.create_or_update_pr(
514
537
  dest_root,
515
- dest.copy_branch,
538
+ copy_branch,
516
539
  title,
517
540
  pr_body,
518
541
  opts.pr_labels.split(",") if opts.pr_labels else None,
@@ -5,19 +5,17 @@ from pathlib import Path
5
5
 
6
6
  import typer
7
7
 
8
- from path_sync import git_ops
9
- from path_sync.models import find_repo_root
10
- from path_sync.typer_app import app
11
- from path_sync.validation import parse_skip_sections, validate_no_unauthorized_changes
8
+ from path_sync._internal import git_ops
9
+ from path_sync._internal.models import find_repo_root
10
+ from path_sync._internal.typer_app import app
11
+ from path_sync._internal.validation import parse_skip_sections, validate_no_unauthorized_changes
12
12
 
13
13
  logger = logging.getLogger(__name__)
14
14
 
15
15
 
16
16
  @app.command("validate-no-changes")
17
17
  def validate_no_changes(
18
- branch: str = typer.Option(
19
- "main", "-b", "--branch", help="Default branch to compare against"
20
- ),
18
+ branch: str = typer.Option("main", "-b", "--branch", help="Default branch to compare against"),
21
19
  skip_sections_opt: str = typer.Option(
22
20
  "",
23
21
  "--skip-sections",
@@ -36,16 +34,12 @@ def validate_no_changes(
36
34
  logger.info(f"On default branch {branch}, validation skipped")
37
35
  return
38
36
 
39
- skip_sections = (
40
- parse_skip_sections(skip_sections_opt) if skip_sections_opt else None
41
- )
37
+ skip_sections = parse_skip_sections(skip_sections_opt) if skip_sections_opt else None
42
38
  unauthorized = validate_no_unauthorized_changes(repo_root, branch, skip_sections)
43
39
 
44
40
  if unauthorized:
45
41
  files_list = "\n ".join(unauthorized)
46
- logger.error(
47
- f"Unauthorized changes in {len(unauthorized)} files:\n {files_list}"
48
- )
42
+ logger.error(f"Unauthorized changes in {len(unauthorized)} files:\n {files_list}")
49
43
  raise typer.Exit(1)
50
44
 
51
45
  logger.info("Validation passed: no unauthorized changes")
@@ -37,9 +37,7 @@ def is_git_repo(path: Path) -> bool:
37
37
 
38
38
  def get_default_branch(repo: Repo) -> str:
39
39
  with suppress(GitCommandError):
40
- return repo.git.symbolic_ref("refs/remotes/origin/HEAD", short=True).replace(
41
- "origin/", ""
42
- )
40
+ return repo.git.symbolic_ref("refs/remotes/origin/HEAD", short=True).replace("origin/", "")
43
41
  return "main"
44
42
 
45
43
 
@@ -60,9 +58,7 @@ def checkout_branch(repo: Repo, branch: str) -> None:
60
58
  repo.git.checkout("-b", branch)
61
59
 
62
60
 
63
- def prepare_copy_branch(
64
- repo: Repo, default_branch: str, copy_branch: str, from_default: bool = False
65
- ) -> None:
61
+ def prepare_copy_branch(repo: Repo, default_branch: str, copy_branch: str, from_default: bool = False) -> None:
66
62
  """Prepare copy_branch for syncing.
67
63
 
68
64
  Args:
@@ -117,9 +113,7 @@ def _ensure_git_user(repo: Repo) -> None:
117
113
  repo.config_reader().get_value("user", "name")
118
114
  except Exception:
119
115
  repo.config_writer().set_value("user", "name", "path-sync[bot]").release()
120
- repo.config_writer().set_value(
121
- "user", "email", "path-sync[bot]@users.noreply.github.com"
122
- ).release()
116
+ repo.config_writer().set_value("user", "email", "path-sync[bot]@users.noreply.github.com").release()
123
117
 
124
118
 
125
119
  def push_branch(repo: Repo, branch: str, force: bool = True) -> None:
@@ -0,0 +1,83 @@
1
+ from __future__ import annotations
2
+
3
+ import re
4
+ from pathlib import Path
5
+
6
+ from zero_3rdparty.sections import CommentConfig, get_comment_config
7
+
8
+ from path_sync._internal.models import HEADER_TEMPLATE, HeaderConfig
9
+
10
+ HEADER_PATTERN = re.compile(r"path-sync copy -n (?P<config_name>[\w-]+)")
11
+
12
+
13
+ def _resolve_comment_config(path: Path, config: HeaderConfig | None) -> CommentConfig:
14
+ if config:
15
+ ext = path.suffix
16
+ prefix = config.comment_prefixes.get(ext)
17
+ suffix = config.comment_suffixes.get(ext, "")
18
+ if prefix:
19
+ return CommentConfig(prefix, suffix)
20
+ return get_comment_config(path)
21
+
22
+
23
+ def get_comment_prefix(path: Path, config: HeaderConfig | None = None) -> str:
24
+ return _resolve_comment_config(path, config).prefix
25
+
26
+
27
+ def get_comment_suffix(path: Path, config: HeaderConfig | None = None) -> str:
28
+ return _resolve_comment_config(path, config).suffix
29
+
30
+
31
+ def get_header_line(path: Path, config_name: str, config: HeaderConfig | None = None) -> str:
32
+ cc = _resolve_comment_config(path, config)
33
+ header_text = HEADER_TEMPLATE.format(config_name=config_name)
34
+ return f"{cc.prefix} {header_text}{cc.suffix}"
35
+
36
+
37
+ def has_header(content: str) -> bool:
38
+ first_line = content.split("\n", 1)[0] if content else ""
39
+ return bool(HEADER_PATTERN.search(first_line))
40
+
41
+
42
+ def get_config_name(content: str) -> str | None:
43
+ first_line = content.split("\n", 1)[0] if content else ""
44
+ if match := HEADER_PATTERN.search(first_line):
45
+ return match.group("config_name")
46
+ return None
47
+
48
+
49
+ def add_header(content: str, path: Path, config_name: str, config: HeaderConfig | None = None) -> str:
50
+ header = get_header_line(path, config_name, config)
51
+ return f"{header}\n{content}"
52
+
53
+
54
+ def remove_header(content: str) -> str:
55
+ if not has_header(content):
56
+ return content
57
+ lines = content.split("\n", 1)
58
+ return lines[1] if len(lines) > 1 else ""
59
+
60
+
61
+ def has_known_comment_prefix(path: Path) -> bool:
62
+ try:
63
+ get_comment_config(path)
64
+ return True
65
+ except ValueError:
66
+ return False
67
+
68
+
69
+ def file_get_config_name(path: Path) -> str | None:
70
+ if not path.exists() or not has_known_comment_prefix(path):
71
+ return None
72
+ try:
73
+ with path.open() as f:
74
+ first_line = f.readline()
75
+ except (UnicodeDecodeError, OSError):
76
+ return None
77
+ return get_config_name(first_line)
78
+
79
+
80
+ def file_has_header(path: Path, config: HeaderConfig | None = None) -> bool:
81
+ if config and path.suffix not in config.comment_prefixes:
82
+ return False
83
+ return file_get_config_name(path) is not None
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import glob as glob_mod
4
+ from enum import StrEnum
4
5
  from pathlib import Path
5
6
  from typing import ClassVar
6
7
 
@@ -9,9 +10,16 @@ from pydantic import BaseModel, Field
9
10
  LOG_FORMAT = "%(asctime)s %(levelname)s %(message)s"
10
11
 
11
12
 
13
+ class SyncMode(StrEnum):
14
+ SYNC = "sync"
15
+ REPLACE = "replace"
16
+ SCAFFOLD = "scaffold"
17
+
18
+
12
19
  class PathMapping(BaseModel):
13
20
  src_path: str
14
21
  dest_path: str = ""
22
+ sync_mode: SyncMode = SyncMode.SYNC
15
23
 
16
24
  def resolved_dest_path(self) -> str:
17
25
  return self.dest_path or self.src_path
@@ -30,32 +38,11 @@ class PathMapping(BaseModel):
30
38
 
31
39
 
32
40
  HEADER_TEMPLATE = "path-sync copy -n {config_name}"
33
- DEFAULT_COMMENT_PREFIXES: dict[str, str] = {
34
- ".py": "#",
35
- ".sh": "#",
36
- ".yaml": "#",
37
- ".yml": "#",
38
- ".go": "//",
39
- ".js": "//",
40
- ".ts": "//",
41
- ".md": "<!--",
42
- ".mdc": "<!--",
43
- ".html": "<!--",
44
- }
45
- DEFAULT_COMMENT_SUFFIXES: dict[str, str] = {
46
- ".md": " -->",
47
- ".mdc": " -->",
48
- ".html": " -->",
49
- }
50
41
 
51
42
 
52
43
  class HeaderConfig(BaseModel):
53
- comment_prefixes: dict[str, str] = Field(
54
- default_factory=DEFAULT_COMMENT_PREFIXES.copy
55
- )
56
- comment_suffixes: dict[str, str] = Field(
57
- default_factory=DEFAULT_COMMENT_SUFFIXES.copy
58
- )
44
+ comment_prefixes: dict[str, str] = Field(default_factory=dict)
45
+ comment_suffixes: dict[str, str] = Field(default_factory=dict)
59
46
 
60
47
 
61
48
  DEFAULT_BODY_TEMPLATE = """\
@@ -105,10 +92,14 @@ class Destination(BaseModel):
105
92
  name: str
106
93
  repo_url: str = ""
107
94
  dest_path_relative: str
108
- copy_branch: str = "sync/path-sync"
95
+ copy_branch: str = ""
109
96
  default_branch: str = "main"
110
97
  skip_sections: dict[str, list[str]] = Field(default_factory=dict)
111
98
 
99
+ def resolved_copy_branch(self, config_name: str) -> str:
100
+ """Returns branch name, defaulting to sync/{config_name} if not set."""
101
+ return self.copy_branch or f"sync/{config_name}"
102
+
112
103
 
113
104
  class SrcConfig(BaseModel):
114
105
  CONFIG_EXT: ClassVar[str] = ".src.yaml"