path-sync 0.4.1__py3-none-any.whl → 0.5.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
path_sync/__init__.py CHANGED
@@ -1,12 +1,14 @@
1
1
  # Generated by pkg-ext
2
2
  # flake8: noqa
3
- from path_sync import config
4
3
  from path_sync import copy
5
4
  from path_sync import dep_update
5
+ from path_sync import validate_no_changes
6
+ from path_sync import config
6
7
 
7
- VERSION = "0.4.1"
8
+ VERSION = "0.5.0"
8
9
  __all__ = [
9
- "config",
10
10
  "copy",
11
11
  "dep_update",
12
+ "validate_no_changes",
13
+ "config",
12
14
  ]
@@ -2,8 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import glob
4
4
  import logging
5
- import tempfile
6
- from contextlib import contextmanager
5
+ from collections.abc import Callable
7
6
  from dataclasses import dataclass, field
8
7
  from pathlib import Path
9
8
 
@@ -11,10 +10,10 @@ import typer
11
10
  from pydantic import BaseModel
12
11
 
13
12
  from path_sync import sections
14
- from path_sync._internal import git_ops, header
13
+ from path_sync._internal import cmd_options, git_ops, header, prompt_utils
15
14
  from path_sync._internal.file_utils import ensure_parents_write_text
15
+ from path_sync._internal.log_capture import capture_log
16
16
  from path_sync._internal.models import (
17
- LOG_FORMAT,
18
17
  Destination,
19
18
  PathMapping,
20
19
  SrcConfig,
@@ -32,16 +31,6 @@ EXIT_CHANGES = 1
32
31
  EXIT_ERROR = 2
33
32
 
34
33
 
35
- def _prompt(message: str, no_prompt: bool) -> bool:
36
- if no_prompt:
37
- return True
38
- try:
39
- response = input(f"{message} [y/n]: ").strip().lower()
40
- return response == "y"
41
- except (EOFError, KeyboardInterrupt):
42
- return False
43
-
44
-
45
34
  @dataclass
46
35
  class SyncResult:
47
36
  content_changes: int = 0
@@ -53,22 +42,6 @@ class SyncResult:
53
42
  return self.content_changes + self.orphans_deleted
54
43
 
55
44
 
56
- @contextmanager
57
- def capture_sync_log(dest_name: str):
58
- with tempfile.TemporaryDirectory(prefix="path-sync-") as tmpdir:
59
- log_path = Path(tmpdir) / f"{dest_name}.log"
60
- file_handler = logging.FileHandler(log_path, mode="w")
61
- file_handler.setLevel(logging.INFO)
62
- file_handler.setFormatter(logging.Formatter(LOG_FORMAT))
63
- root_logger = logging.getLogger("path_sync")
64
- root_logger.addHandler(file_handler)
65
- try:
66
- yield log_path
67
- finally:
68
- file_handler.close()
69
- root_logger.removeHandler(file_handler)
70
-
71
-
72
45
  class CopyOptions(BaseModel):
73
46
  dry_run: bool = False
74
47
  force_overwrite: bool = False
@@ -79,9 +52,9 @@ class CopyOptions(BaseModel):
79
52
  no_pr: bool = False
80
53
  skip_orphan_cleanup: bool = False
81
54
  pr_title: str = ""
82
- pr_labels: str = ""
83
- pr_reviewers: str = ""
84
- pr_assignees: str = ""
55
+ labels: list[str] | None = None
56
+ reviewers: list[str] | None = None
57
+ assignees: list[str] | None = None
85
58
 
86
59
 
87
60
  @app.command()
@@ -141,21 +114,9 @@ def copy(
141
114
  "--pr-title",
142
115
  help="Override PR title (supports {name}, {dest_name})",
143
116
  ),
144
- pr_labels: str = typer.Option(
145
- "",
146
- "--pr-labels",
147
- help="Comma-separated PR labels",
148
- ),
149
- pr_reviewers: str = typer.Option(
150
- "",
151
- "--pr-reviewers",
152
- help="Comma-separated PR reviewers",
153
- ),
154
- pr_assignees: str = typer.Option(
155
- "",
156
- "--pr-assignees",
157
- help="Comma-separated PR assignees",
158
- ),
117
+ pr_labels: str = cmd_options.pr_labels_option(),
118
+ pr_reviewers: str = cmd_options.pr_reviewers_option(),
119
+ pr_assignees: str = cmd_options.pr_assignees_option(),
159
120
  skip_orphan_cleanup: bool = typer.Option(
160
121
  False,
161
122
  "--skip-orphan-cleanup",
@@ -192,9 +153,9 @@ def copy(
192
153
  no_pr=no_pr,
193
154
  skip_orphan_cleanup=skip_orphan_cleanup,
194
155
  pr_title=pr_title or config.pr_defaults.title,
195
- pr_labels=pr_labels or ",".join(config.pr_defaults.labels),
196
- pr_reviewers=pr_reviewers or ",".join(config.pr_defaults.reviewers),
197
- pr_assignees=pr_assignees or ",".join(config.pr_defaults.assignees),
156
+ labels=cmd_options.split_csv(pr_labels) or config.pr_defaults.labels,
157
+ reviewers=cmd_options.split_csv(pr_reviewers) or config.pr_defaults.reviewers,
158
+ assignees=cmd_options.split_csv(pr_assignees) or config.pr_defaults.assignees,
198
159
  )
199
160
 
200
161
  destinations = config.destinations
@@ -205,8 +166,8 @@ def copy(
205
166
  total_changes = 0
206
167
  for dest in destinations:
207
168
  try:
208
- with capture_sync_log(dest.name) as log_path:
209
- changes = _sync_destination(config, dest, src_root, current_sha, src_repo_url, opts, log_path)
169
+ with capture_log(dest.name) as read_log:
170
+ changes = _sync_destination(config, dest, src_root, current_sha, src_repo_url, opts, read_log)
210
171
  total_changes += changes
211
172
  except Exception as e:
212
173
  logger.error(f"Failed to sync {dest.name}: {e}")
@@ -225,7 +186,7 @@ def _sync_destination(
225
186
  current_sha: str,
226
187
  src_repo_url: str,
227
188
  opts: CopyOptions,
228
- log_path: Path,
189
+ read_log: Callable[[], str],
229
190
  ) -> int:
230
191
  dest_root = (src_root / dest.dest_path_relative).resolve()
231
192
 
@@ -241,7 +202,7 @@ def _sync_destination(
241
202
  skip_git_ops = True
242
203
  elif opts.no_checkout:
243
204
  skip_git_ops = False
244
- elif _prompt(f"Switch {dest.name} to {copy_branch}?", opts.no_prompt):
205
+ elif prompt_utils.prompt_confirm(f"Switch {dest.name} to {copy_branch}?", opts.no_prompt):
245
206
  git_ops.prepare_copy_branch(
246
207
  repo=dest_repo,
247
208
  default_branch=dest.default_branch,
@@ -262,7 +223,7 @@ def _sync_destination(
262
223
  if skip_git_ops:
263
224
  return result.total
264
225
 
265
- _commit_and_pr(config, dest_repo, dest_root, dest, current_sha, src_repo_url, opts, log_path)
226
+ _commit_and_pr(config, dest_repo, dest_root, dest, current_sha, src_repo_url, opts, read_log)
266
227
  return result.total
267
228
 
268
229
 
@@ -531,7 +492,7 @@ def _commit_and_pr(
531
492
  sha: str,
532
493
  src_repo_url: str,
533
494
  opts: CopyOptions,
534
- log_path: Path,
495
+ read_log: Callable[[], str],
535
496
  ) -> None:
536
497
  if opts.local:
537
498
  logger.info("Local mode: skipping commit/push/PR")
@@ -539,23 +500,23 @@ def _commit_and_pr(
539
500
 
540
501
  copy_branch = dest.resolved_copy_branch(config.name)
541
502
 
542
- if not _prompt(f"Commit changes to {dest.name}?", opts.no_prompt):
503
+ if not prompt_utils.prompt_confirm(f"Commit changes to {dest.name}?", opts.no_prompt):
543
504
  return
544
505
 
545
506
  commit_msg = f"chore: sync {config.name} from {sha[:8]}"
546
507
  git_ops.commit_changes(repo, commit_msg)
547
508
  typer.echo(f" Committed: {commit_msg}", err=True)
548
509
 
549
- if not _prompt(f"Push {dest.name} to origin?", opts.no_prompt):
510
+ if not prompt_utils.prompt_confirm(f"Push {dest.name} to origin?", opts.no_prompt):
550
511
  return
551
512
 
552
513
  git_ops.push_branch(repo, copy_branch, force=True)
553
514
  typer.echo(f" Pushed: {copy_branch} (force)", err=True)
554
515
 
555
- if opts.no_pr or not _prompt(f"Create PR for {dest.name}?", opts.no_prompt):
516
+ if opts.no_pr or not prompt_utils.prompt_confirm(f"Create PR for {dest.name}?", opts.no_prompt):
556
517
  return
557
518
 
558
- sync_log = log_path.read_text() if log_path.exists() else ""
519
+ sync_log = read_log()
559
520
  pr_body = config.pr_defaults.format_body(
560
521
  src_repo_url=src_repo_url,
561
522
  src_sha=sha,
@@ -569,9 +530,9 @@ def _commit_and_pr(
569
530
  copy_branch,
570
531
  title,
571
532
  pr_body,
572
- opts.pr_labels.split(",") if opts.pr_labels else None,
573
- opts.pr_reviewers.split(",") if opts.pr_reviewers else None,
574
- opts.pr_assignees.split(",") if opts.pr_assignees else None,
533
+ opts.labels,
534
+ opts.reviewers,
535
+ opts.assignees,
575
536
  )
576
537
  if pr_url:
577
538
  typer.echo(f" Created PR: {pr_url}", err=True)
@@ -1,6 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import logging
4
+ import shutil
4
5
  import subprocess
5
6
  from dataclasses import dataclass, field
6
7
  from enum import StrEnum
@@ -9,7 +10,8 @@ from pathlib import Path
9
10
  import typer
10
11
  from git import Repo
11
12
 
12
- from path_sync._internal import git_ops
13
+ from path_sync._internal import cmd_options, git_ops, prompt_utils
14
+ from path_sync._internal.log_capture import capture_log
13
15
  from path_sync._internal.models import Destination, find_repo_root
14
16
  from path_sync._internal.models_dep import (
15
17
  DepConfig,
@@ -23,8 +25,6 @@ from path_sync._internal.yaml_utils import load_yaml_model
23
25
 
24
26
  logger = logging.getLogger(__name__)
25
27
 
26
- MAX_STDERR_LINES = 20
27
-
28
28
 
29
29
  class Status(StrEnum):
30
30
  PASSED = "passed"
@@ -38,14 +38,8 @@ class Status(StrEnum):
38
38
  class StepFailure:
39
39
  step: str
40
40
  returncode: int
41
- stderr: str
42
41
  on_fail: OnFailStrategy
43
42
 
44
- @classmethod
45
- def from_error(cls, step: str, e: subprocess.CalledProcessError, on_fail: OnFailStrategy) -> StepFailure:
46
- stderr = _truncate_stderr(e.stderr or "", MAX_STDERR_LINES)
47
- return cls(step=step, returncode=e.returncode, stderr=stderr, on_fail=on_fail)
48
-
49
43
 
50
44
  @dataclass
51
45
  class RepoResult:
@@ -53,24 +47,27 @@ class RepoResult:
53
47
  repo_path: Path
54
48
  status: Status
55
49
  failures: list[StepFailure] = field(default_factory=list)
50
+ log_content: str = ""
56
51
 
57
52
 
58
- def _truncate_stderr(text: str, max_lines: int) -> str:
59
- lines = text.strip().splitlines()
60
- if len(lines) <= max_lines:
61
- return text.strip()
62
- skipped = len(lines) - max_lines
63
- return f"... ({skipped} lines skipped)\n" + "\n".join(lines[-max_lines:])
53
+ @dataclass
54
+ class DepUpdateOptions:
55
+ dry_run: bool = False
56
+ skip_verify: bool = False
57
+ reviewers: list[str] | None = None
58
+ assignees: list[str] | None = None
64
59
 
65
60
 
66
61
  @app.command()
67
62
  def dep_update(
68
63
  name: str = typer.Option(..., "-n", "--name", help="Config name"),
69
64
  dest_filter: str = typer.Option("", "-d", "--dest", help="Filter destinations (comma-separated)"),
70
- work_dir: str = typer.Option("", "--work-dir", help="Directory for cloning repos"),
65
+ work_dir: str = typer.Option("", "--work-dir", help="Clone repos here (overrides dest_path_relative)"),
71
66
  dry_run: bool = typer.Option(False, "--dry-run", help="Preview without creating PRs"),
72
67
  skip_verify: bool = typer.Option(False, "--skip-verify", help="Skip verification steps"),
73
68
  src_root_opt: str = typer.Option("", "--src-root", help="Source repo root"),
69
+ pr_reviewers: str = cmd_options.pr_reviewers_option(),
70
+ pr_assignees: str = cmd_options.pr_assignees_option(),
74
71
  ) -> None:
75
72
  """Run dependency updates across repositories."""
76
73
  src_root = Path(src_root_opt) if src_root_opt else find_repo_root(Path.cwd())
@@ -86,8 +83,15 @@ def dep_update(
86
83
  filter_names = [n.strip() for n in dest_filter.split(",")]
87
84
  destinations = [d for d in destinations if d.name in filter_names]
88
85
 
89
- results = _update_and_validate(config, destinations, src_root, work_dir, skip_verify)
90
- _create_prs(config, results, dry_run)
86
+ opts = DepUpdateOptions(
87
+ dry_run=dry_run,
88
+ skip_verify=skip_verify,
89
+ reviewers=cmd_options.split_csv(pr_reviewers) or config.pr.reviewers,
90
+ assignees=cmd_options.split_csv(pr_assignees) or config.pr.assignees,
91
+ )
92
+
93
+ results = _update_and_validate(config, destinations, src_root, work_dir, opts)
94
+ _create_prs(config, results, opts)
91
95
 
92
96
  if any(r.status == Status.SKIPPED for r in results):
93
97
  raise typer.Exit(1)
@@ -98,12 +102,12 @@ def _update_and_validate(
98
102
  destinations: list[Destination],
99
103
  src_root: Path,
100
104
  work_dir: str,
101
- skip_verify: bool,
105
+ opts: DepUpdateOptions,
102
106
  ) -> list[RepoResult]:
103
107
  results: list[RepoResult] = []
104
108
 
105
109
  for dest in destinations:
106
- result = _process_single_repo(config, dest, src_root, work_dir, skip_verify)
110
+ result = _process_single_repo(config, dest, src_root, work_dir, opts)
107
111
 
108
112
  if result.status == Status.FAILED:
109
113
  logger.error(f"{dest.name}: Verification failed, stopping")
@@ -120,79 +124,100 @@ def _process_single_repo(
120
124
  dest: Destination,
121
125
  src_root: Path,
122
126
  work_dir: str,
123
- skip_verify: bool,
127
+ opts: DepUpdateOptions,
128
+ ) -> RepoResult:
129
+ with capture_log(dest.name) as read_log:
130
+ result = _process_single_repo_inner(config, dest, src_root, work_dir, opts)
131
+ result.log_content = read_log()
132
+ return result
133
+
134
+
135
+ def _process_single_repo_inner(
136
+ config: DepConfig,
137
+ dest: Destination,
138
+ src_root: Path,
139
+ work_dir: str,
140
+ opts: DepUpdateOptions,
124
141
  ) -> RepoResult:
125
142
  logger.info(f"Processing {dest.name}...")
126
143
  repo_path = _resolve_repo_path(dest, src_root, work_dir)
127
- repo = _ensure_repo(dest, repo_path)
144
+ repo = _ensure_repo(dest, repo_path, dest.default_branch)
128
145
  git_ops.prepare_copy_branch(repo, dest.default_branch, config.pr.branch, from_default=True)
129
146
 
130
147
  if failure := _run_updates(config.updates, repo_path):
131
148
  logger.warning(f"{dest.name}: Update failed with exit code {failure.returncode}")
132
- return RepoResult(dest, repo_path, Status.SKIPPED, failures=[failure])
149
+ return RepoResult(dest=dest, repo_path=repo_path, status=Status.SKIPPED, failures=[failure])
133
150
 
134
151
  if not git_ops.has_changes(repo):
135
152
  logger.info(f"{dest.name}: No changes, skipping")
136
- return RepoResult(dest, repo_path, Status.NO_CHANGES)
153
+ return RepoResult(dest=dest, repo_path=repo_path, status=Status.NO_CHANGES)
137
154
 
138
155
  git_ops.commit_changes(repo, config.pr.title)
139
156
 
140
- if skip_verify:
141
- return RepoResult(dest, repo_path, Status.PASSED)
157
+ if opts.skip_verify:
158
+ return RepoResult(dest=dest, repo_path=repo_path, status=Status.PASSED)
142
159
 
143
160
  return _verify_repo(repo, repo_path, config.verify, dest)
144
161
 
145
162
 
146
163
  def _run_updates(updates: list[UpdateEntry], repo_path: Path) -> StepFailure | None:
147
- """Returns StepFailure on failure, None on success."""
148
164
  try:
149
165
  for update in updates:
150
166
  _run_command(update.command, repo_path / update.workdir)
151
167
  return None
152
168
  except subprocess.CalledProcessError as e:
153
- return StepFailure.from_error(e.cmd, e, OnFailStrategy.SKIP)
169
+ return StepFailure(step=e.cmd, returncode=e.returncode, on_fail=OnFailStrategy.SKIP)
154
170
 
155
171
 
156
172
  def _verify_repo(repo: Repo, repo_path: Path, verify: VerifyConfig, dest: Destination) -> RepoResult:
157
173
  status, failures = _run_verify_steps(repo, repo_path, verify)
158
- return RepoResult(dest, repo_path, status, failures)
174
+ return RepoResult(dest=dest, repo_path=repo_path, status=status, failures=failures)
159
175
 
160
176
 
161
- def _create_prs(config: DepConfig, results: list[RepoResult], dry_run: bool) -> None:
177
+ def _create_prs(config: DepConfig, results: list[RepoResult], opts: DepUpdateOptions) -> None:
162
178
  for result in results:
163
179
  if result.status == Status.SKIPPED:
164
180
  continue
165
181
 
166
- if dry_run:
182
+ if opts.dry_run:
167
183
  logger.info(f"[DRY RUN] Would create PR for {result.dest.name}")
168
184
  continue
169
185
 
170
186
  repo = git_ops.get_repo(result.repo_path)
171
187
  git_ops.push_branch(repo, config.pr.branch, force=True)
172
188
 
173
- body = _build_pr_body(result.failures)
189
+ body = _build_pr_body(result.log_content, result.failures)
174
190
  git_ops.create_or_update_pr(
175
191
  result.repo_path,
176
192
  config.pr.branch,
177
193
  config.pr.title,
178
194
  body,
179
195
  config.pr.labels or None,
196
+ reviewers=opts.reviewers,
197
+ assignees=opts.assignees,
180
198
  auto_merge=config.pr.auto_merge,
181
199
  )
182
200
  logger.info(f"{result.dest.name}: PR created/updated")
183
201
 
184
202
 
185
203
  def _resolve_repo_path(dest: Destination, src_root: Path, work_dir: str) -> Path:
204
+ if work_dir:
205
+ return Path(work_dir) / dest.name
186
206
  if dest.dest_path_relative:
187
207
  return (src_root / dest.dest_path_relative).resolve()
188
- if not work_dir:
189
- raise typer.BadParameter(f"No dest_path_relative for {dest.name}, --work-dir required")
190
- return Path(work_dir) / dest.name
191
-
192
-
193
- def _ensure_repo(dest: Destination, repo_path: Path):
194
- if repo_path.exists() and git_ops.is_git_repo(repo_path):
195
- return git_ops.get_repo(repo_path)
208
+ raise typer.BadParameter(f"No dest_path_relative for {dest.name}, --work-dir required")
209
+
210
+
211
+ def _ensure_repo(dest: Destination, repo_path: Path, default_branch: str) -> Repo:
212
+ if repo_path.exists():
213
+ if git_ops.is_git_repo(repo_path):
214
+ repo = git_ops.get_repo(repo_path)
215
+ git_ops.fetch_and_reset_to_default(repo, default_branch)
216
+ return repo
217
+ logger.warning(f"Invalid git repo at {repo_path}")
218
+ if not prompt_utils.prompt_confirm(f"Remove {repo_path} and re-clone?"):
219
+ raise typer.Abort()
220
+ shutil.rmtree(repo_path)
196
221
  if not dest.repo_url:
197
222
  raise ValueError(f"Dest {dest.name} not found at {repo_path} and no repo_url configured")
198
223
  return git_ops.clone_repo(dest.repo_url, repo_path)
@@ -201,17 +226,19 @@ def _ensure_repo(dest: Destination, repo_path: Path):
201
226
  def _run_command(cmd: str, cwd: Path) -> None:
202
227
  logger.info(f"Running: {cmd}")
203
228
  result = subprocess.run(cmd, shell=True, cwd=cwd, capture_output=True, text=True)
229
+ prefix = cmd.split()[0]
230
+ for line in result.stdout.strip().splitlines():
231
+ logger.info(f"[{prefix}] {line}")
232
+ for line in result.stderr.strip().splitlines():
233
+ if result.returncode != 0:
234
+ logger.error(f"[{prefix}] {line}")
235
+ else:
236
+ logger.info(f"[{prefix}] {line}")
204
237
  if result.returncode != 0:
205
- stderr = result.stderr.strip()
206
- logger.error(f"Command failed '{cmd}' in {cwd}: {stderr}")
207
- raise subprocess.CalledProcessError(result.returncode, cmd, output=result.stdout, stderr=stderr)
238
+ raise subprocess.CalledProcessError(result.returncode, cmd, output=result.stdout, stderr=result.stderr)
208
239
 
209
240
 
210
- def _run_verify_steps(
211
- repo: Repo,
212
- repo_path: Path,
213
- verify: VerifyConfig,
214
- ) -> tuple[Status, list[StepFailure]]:
241
+ def _run_verify_steps(repo: Repo, repo_path: Path, verify: VerifyConfig) -> tuple[Status, list[StepFailure]]:
215
242
  failures: list[StepFailure] = []
216
243
 
217
244
  for step in verify.steps:
@@ -220,7 +247,7 @@ def _run_verify_steps(
220
247
  try:
221
248
  _run_command(step.run, repo_path)
222
249
  except subprocess.CalledProcessError as e:
223
- failure = StepFailure.from_error(step.run, e, on_fail)
250
+ failure = StepFailure(step=step.run, returncode=e.returncode, on_fail=on_fail)
224
251
  match on_fail:
225
252
  case OnFailStrategy.FAIL:
226
253
  return (Status.FAILED, [failure])
@@ -236,15 +263,15 @@ def _run_verify_steps(
236
263
  return (Status.WARN if failures else Status.PASSED, failures)
237
264
 
238
265
 
239
- def _build_pr_body(failures: list[StepFailure]) -> str:
266
+ def _build_pr_body(log_content: str, failures: list[StepFailure]) -> str:
240
267
  body = "Automated dependency update."
241
- if not failures:
242
- return body
243
-
244
- body += "\n\n---\n## Verification Issues\n"
245
- for f in failures:
246
- body += f"\n### `{f.step}` (strategy: {f.on_fail})\n"
247
- body += f"**Exit code:** {f.returncode}\n"
248
- if f.stderr:
249
- body += f"\n```\n{f.stderr}\n```\n"
268
+
269
+ if log_content.strip():
270
+ body += "\n\n## Command Output\n\n```\n" + log_content.strip() + "\n```"
271
+
272
+ if failures:
273
+ body += "\n\n---\n## Verification Issues\n"
274
+ for f in failures:
275
+ body += f"\n- `{f.step}` failed (exit code {f.returncode}, strategy: {f.on_fail})"
276
+
250
277
  return body
@@ -0,0 +1,22 @@
1
+ """Shared CLI options for commands."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import typer
6
+
7
+
8
+ def pr_reviewers_option() -> str:
9
+ return typer.Option("", "--pr-reviewers", help="Comma-separated PR reviewers")
10
+
11
+
12
+ def pr_assignees_option() -> str:
13
+ return typer.Option("", "--pr-assignees", help="Comma-separated PR assignees")
14
+
15
+
16
+ def pr_labels_option() -> str:
17
+ return typer.Option("", "--pr-labels", help="Comma-separated PR labels")
18
+
19
+
20
+ def split_csv(value: str) -> list[str] | None:
21
+ """Split comma-separated string, returns None if empty."""
22
+ return [v.strip() for v in value.split(",")] if value else None
@@ -41,6 +41,15 @@ def get_default_branch(repo: Repo) -> str:
41
41
  return "main"
42
42
 
43
43
 
44
+ def fetch_and_reset_to_default(repo: Repo, default_branch: str) -> None:
45
+ """Fetch latest from remote and reset to default branch."""
46
+ logger.info(f"Fetching origin and resetting to {default_branch}")
47
+ repo.remotes.origin.fetch()
48
+ if repo.head.is_detached or repo.active_branch.name != default_branch:
49
+ repo.git.checkout(default_branch)
50
+ repo.git.reset("--hard", f"origin/{default_branch}")
51
+
52
+
44
53
  def clone_repo(url: str, dest: Path) -> Repo:
45
54
  logger.info(f"Cloning {url} to {dest}")
46
55
  dest.parent.mkdir(parents=True, exist_ok=True)
@@ -0,0 +1,38 @@
1
+ from __future__ import annotations
2
+
3
+ import logging
4
+ import tempfile
5
+ from collections.abc import Callable, Generator
6
+ from contextlib import contextmanager
7
+ from pathlib import Path
8
+
9
+ LOG_FORMAT = "%(message)s"
10
+
11
+
12
+ @contextmanager
13
+ def capture_log(name: str) -> Generator[Callable[[], str]]:
14
+ """Capture path_sync logger output to a temp file.
15
+
16
+ Args:
17
+ name: Used for temp file naming (e.g., repo name for debugging).
18
+
19
+ Yields:
20
+ Callable that flushes the handler and returns log content.
21
+ """
22
+ with tempfile.TemporaryDirectory(prefix="path-sync-") as tmpdir:
23
+ log_path = Path(tmpdir) / f"{name}.log"
24
+ file_handler = logging.FileHandler(log_path, mode="w")
25
+ file_handler.setLevel(logging.INFO)
26
+ file_handler.setFormatter(logging.Formatter(LOG_FORMAT))
27
+ root_logger = logging.getLogger("path_sync")
28
+ root_logger.addHandler(file_handler)
29
+ try:
30
+
31
+ def read_log() -> str:
32
+ file_handler.flush()
33
+ return log_path.read_text() if log_path.exists() else ""
34
+
35
+ yield read_log
36
+ finally:
37
+ file_handler.close()
38
+ root_logger.removeHandler(file_handler)
@@ -85,14 +85,19 @@ Synced from [{src_repo_name}]({src_repo_url}) @ `{src_sha_short}`
85
85
  """
86
86
 
87
87
 
88
- class PRDefaults(BaseModel):
89
- title: str = "chore: sync {name} files"
90
- body_template: str = DEFAULT_BODY_TEMPLATE
91
- body_suffix: str = ""
88
+ class PRFieldsBase(BaseModel):
89
+ """Common PR fields shared by copy and dep-update commands."""
90
+
92
91
  labels: list[str] = Field(default_factory=list)
93
92
  reviewers: list[str] = Field(default_factory=list)
94
93
  assignees: list[str] = Field(default_factory=list)
95
94
 
95
+
96
+ class PRDefaults(PRFieldsBase):
97
+ title: str = "chore: sync {name} files"
98
+ body_template: str = DEFAULT_BODY_TEMPLATE
99
+ body_suffix: str = ""
100
+
96
101
  def format_body(
97
102
  self,
98
103
  src_repo_url: str,
@@ -6,7 +6,7 @@ from typing import ClassVar
6
6
 
7
7
  from pydantic import BaseModel, Field
8
8
 
9
- from path_sync._internal.models import Destination, SrcConfig, resolve_config_path
9
+ from path_sync._internal.models import Destination, PRFieldsBase, SrcConfig, resolve_config_path
10
10
  from path_sync._internal.yaml_utils import load_yaml_model
11
11
 
12
12
 
@@ -37,10 +37,9 @@ class UpdateEntry(BaseModel):
37
37
  command: str
38
38
 
39
39
 
40
- class PRConfig(BaseModel):
40
+ class PRConfig(PRFieldsBase):
41
41
  branch: str
42
42
  title: str
43
- labels: list[str] = Field(default_factory=list)
44
43
  auto_merge: bool = False
45
44
 
46
45
 
@@ -0,0 +1,25 @@
1
+ """Shared prompt utilities for interactive CLI commands."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import sys
6
+
7
+
8
+ def prompt_confirm(message: str, no_prompt: bool = False) -> bool:
9
+ """Prompt user for confirmation.
10
+
11
+ Args:
12
+ message: The prompt message to display
13
+ no_prompt: If True, auto-confirms without prompting
14
+
15
+ Returns:
16
+ True if confirmed, False otherwise.
17
+ Auto-confirms in non-interactive mode (CI) or when no_prompt=True.
18
+ """
19
+ if no_prompt or not sys.stdin.isatty():
20
+ return True
21
+ try:
22
+ response = input(f"{message} [y/n]: ").strip().lower()
23
+ return response == "y"
24
+ except (EOFError, KeyboardInterrupt):
25
+ return False
path_sync/copy.py CHANGED
@@ -1,4 +1,6 @@
1
1
  # Generated by pkg-ext
2
2
  from path_sync._internal.cmd_copy import CopyOptions as _CopyOptions
3
+ from path_sync._internal.cmd_copy import copy as _copy
3
4
 
4
5
  CopyOptions = _CopyOptions
6
+ copy = _copy
@@ -0,0 +1,4 @@
1
+ # Generated by pkg-ext
2
+ from path_sync._internal.cmd_validate import validate_no_changes as _validate_no_changes
3
+
4
+ validate_no_changes = _validate_no_changes
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: path-sync
3
- Version: 0.4.1
3
+ Version: 0.5.0
4
4
  Summary: Sync files from a source repo to multiple destination repos
5
5
  Author-email: EspenAlbert <espen.albert1@gmail.com>
6
6
  License-Expression: MIT
@@ -314,6 +314,109 @@ Add as repository secret: `GH_PAT`
314
314
  | `GraphQL: Resource not accessible` | Use GH_PAT, not GITHUB_TOKEN |
315
315
  | `HTTP 422: Required status check` | Exclude `sync/*` from branch protection |
316
316
 
317
+ ## Dependency Updates
318
+
319
+ The `dep-update` command runs dependency updates across multiple repositories. It clones repos, runs update commands, verifies changes, and creates PRs.
320
+
321
+ ### Quick Start
322
+
323
+ ```bash
324
+ # Create config at .github/myconfig.dep.yaml (see example below)
325
+ # Then run:
326
+ path-sync dep-update -n myconfig
327
+
328
+ # Preview without creating PRs
329
+ path-sync dep-update -n myconfig --dry-run
330
+
331
+ # Filter specific destinations
332
+ path-sync dep-update -n myconfig -d repo1,repo2
333
+ ```
334
+
335
+ ### Dep Config Reference
336
+
337
+ **Config file**: `.github/{name}.dep.yaml`
338
+
339
+ ```yaml
340
+ name: uv-deps
341
+ from_config: python-template # references .github/python-template.src.yaml for destinations
342
+ exclude_destinations:
343
+ - path-sync # skip self
344
+
345
+ updates:
346
+ - command: uv lock --upgrade
347
+ - workdir: packages/sub # optional subdirectory
348
+ command: uv lock --upgrade
349
+
350
+ verify:
351
+ on_fail: skip # default strategy: skip, fail, warn
352
+ steps:
353
+ - run: uv sync
354
+ - run: just fmt
355
+ commit:
356
+ message: "chore: format after update"
357
+ add_paths: [".", "!uv.lock"] # ! prefix excludes
358
+ on_fail: warn
359
+ - run: just test
360
+
361
+ pr:
362
+ branch: deps/uv-lock-update
363
+ title: "chore(deps): update uv.lock"
364
+ labels: [dependencies]
365
+ reviewers: [] # optional
366
+ assignees: [] # optional
367
+ auto_merge: true
368
+ ```
369
+
370
+ | Field | Description |
371
+ |-------|-------------|
372
+ | `from_config` | Source config name for destination list |
373
+ | `include_destinations` | Only process these destinations |
374
+ | `exclude_destinations` | Skip these destinations |
375
+ | `updates` | Commands to run (in order) |
376
+ | `verify.on_fail` | Default failure strategy: `skip`, `fail`, `warn` |
377
+ | `verify.steps` | Verification commands with optional commit/on_fail |
378
+ | `pr.auto_merge` | Enable GitHub auto-merge after PR creation |
379
+
380
+ ### CLI Flags
381
+
382
+ | Flag | Description |
383
+ |------|-------------|
384
+ | `-n, --name` | Config name (required) |
385
+ | `-d, --dest` | Filter destinations (comma-separated) |
386
+ | `--work-dir` | Clone directory for repos without `dest_path_relative` |
387
+ | `--dry-run` | Preview without creating PRs |
388
+ | `--skip-verify` | Skip verification steps |
389
+ | `--pr-reviewers` | Override PR reviewers (comma-separated) |
390
+ | `--pr-assignees` | Override PR assignees (comma-separated) |
391
+
392
+ ### Failure Strategies
393
+
394
+ - **skip**: Skip PR for this repo, continue with others (default)
395
+ - **fail**: Stop all processing immediately
396
+ - **warn**: Create PR anyway with warning in body
397
+
398
+ Per-step `on_fail` overrides the verify-level default.
399
+
400
+ ### GitHub Actions
401
+
402
+ ```yaml
403
+ name: dep-update
404
+ on:
405
+ schedule:
406
+ - cron: "0 6 * * 1" # Weekly Monday
407
+ workflow_dispatch:
408
+
409
+ jobs:
410
+ update:
411
+ runs-on: ubuntu-latest
412
+ steps:
413
+ - uses: actions/checkout@v4
414
+ - uses: astral-sh/setup-uv@v5
415
+ - run: uvx path-sync dep-update -n myconfig
416
+ env:
417
+ GH_TOKEN: ${{ secrets.GH_PAT }}
418
+ ```
419
+
317
420
  ## Alternatives Considered
318
421
 
319
422
  | Tool | Why Not |
@@ -0,0 +1,28 @@
1
+ path_sync/__init__.py,sha256=AGzufMFGGBbtXb-7FQHBiiohnPnllborz80jE85r_yg,273
2
+ path_sync/__main__.py,sha256=zAjlCOVF1duQi_RSVWbncfmnjoqaik9YPl2jf5mfcR0,342
3
+ path_sync/config.py,sha256=XYuEK_bjSpAk_nZN0oxpEA-S3t9F6Qn0yznYLoQHIw8,568
4
+ path_sync/copy.py,sha256=fsTKQtSuphNw7fHGpK5E56JGLYcalslr5MHnY0bn-NU,188
5
+ path_sync/dep_update.py,sha256=UjjoUIIaZyGwTYmDo3OWQDdpY1zZ4dbdXGwy4xLDuRc,804
6
+ path_sync/sections.py,sha256=dB0RGUhRWcZj9c1364UULEEnYHMf1LkWUBZ8EayIyKM,2122
7
+ path_sync/validate_no_changes.py,sha256=QJtjSsFoqqMT9jeSgZXgq4gGUxDjIn5GUqhOixjhDlg,156
8
+ path_sync/_internal/__init__.py,sha256=iPkMhrpiyXBijo2Hp-y_2zEYxAXnHLnStKM0X0HHd4U,56
9
+ path_sync/_internal/cmd_boot.py,sha256=cFomUyPOhNX9fi5_BYL2Sm7O1oq7vntD8b7clQ7mL1E,3104
10
+ path_sync/_internal/cmd_copy.py,sha256=nKcq4rBIvi8haBY6NMdm8yH_jQgJgSq-T-ZfqpwBbe0,17235
11
+ path_sync/_internal/cmd_dep_update.py,sha256=CuHmI3GPM9SmfNPRF-jctA6Uap3Q3jTAREWiSPOQNHY,9533
12
+ path_sync/_internal/cmd_options.py,sha256=0GZKaBtZ8ae3yKdkAJrfWFHvu--BZvhLI4Gl8s_dJes,617
13
+ path_sync/_internal/cmd_validate.py,sha256=e6m-JZlXAGr0ZRqfLhhrlmjs4w79p2WWnZo4I05PGpo,1798
14
+ path_sync/_internal/file_utils.py,sha256=5C33qzKFQdwChi5YwUWBujj126t0P6dbGSU_5hWExpE,194
15
+ path_sync/_internal/git_ops.py,sha256=Xme_kuIVqCekj6YxWLWxvQu693wPcYHycAIiB_4l_Ow,7500
16
+ path_sync/_internal/header.py,sha256=evgY2q_gfDdEytEt_jyJ7M_KdGzCpfdKBUnoh3v-0Go,2593
17
+ path_sync/_internal/log_capture.py,sha256=IlCX4BBwWvA_FPIRn_8XyeOGE5sLp-kawlIk7vFQMK4,1187
18
+ path_sync/_internal/models.py,sha256=tkfnYkzn6V-vS0eYrVlFlHD70XFo1Mp_xSNgC5fdKG0,4681
19
+ path_sync/_internal/models_dep.py,sha256=B55-ItTrHEIQUD3LP-O-kOBG1uoeEuTHyqC73XC-LJI,1979
20
+ path_sync/_internal/prompt_utils.py,sha256=YChOxsoP11-r7D2k6G_kMrvzbvF0sk9sEPynrhkPkVc,703
21
+ path_sync/_internal/typer_app.py,sha256=lEGMRXql3Se3VbmwAohvpUaL2cbY-RwhPUq8kL7bPbc,177
22
+ path_sync/_internal/validation.py,sha256=23kwtmsiHiYlKbVU8mtwr8J0MqSlnvbuRRR5NQAsJ08,2446
23
+ path_sync/_internal/yaml_utils.py,sha256=yj6Bl54EltjLEcVKaiA5Ahb9byT6OUMh0xIEzTsrvnQ,498
24
+ path_sync-0.5.0.dist-info/METADATA,sha256=8s7pT8_ST24Ca43GGgbWinbK_2SgBB-kEju3KfO6bcM,13236
25
+ path_sync-0.5.0.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
26
+ path_sync-0.5.0.dist-info/entry_points.txt,sha256=jTsL0c-9gP-4_Jt3EPgihtpLcwQR0AFAf1AUpD50AlI,54
27
+ path_sync-0.5.0.dist-info/licenses/LICENSE,sha256=MnHjsc6ccjI5Iiw2R3jLEAApIcrEpLdIcZxkilhSPxc,1069
28
+ path_sync-0.5.0.dist-info/RECORD,,
@@ -1,24 +0,0 @@
1
- path_sync/__init__.py,sha256=OHLo0rlS3tquOhUn2sHNgEfAYvtsJaQmT1MYmM94Nuk,204
2
- path_sync/__main__.py,sha256=zAjlCOVF1duQi_RSVWbncfmnjoqaik9YPl2jf5mfcR0,342
3
- path_sync/config.py,sha256=XYuEK_bjSpAk_nZN0oxpEA-S3t9F6Qn0yznYLoQHIw8,568
4
- path_sync/copy.py,sha256=BpflW4086XJFSHHK4taYPgXtF07xHB8qrgm8TYqdM4E,120
5
- path_sync/dep_update.py,sha256=UjjoUIIaZyGwTYmDo3OWQDdpY1zZ4dbdXGwy4xLDuRc,804
6
- path_sync/sections.py,sha256=dB0RGUhRWcZj9c1364UULEEnYHMf1LkWUBZ8EayIyKM,2122
7
- path_sync/_internal/__init__.py,sha256=iPkMhrpiyXBijo2Hp-y_2zEYxAXnHLnStKM0X0HHd4U,56
8
- path_sync/_internal/cmd_boot.py,sha256=cFomUyPOhNX9fi5_BYL2Sm7O1oq7vntD8b7clQ7mL1E,3104
9
- path_sync/_internal/cmd_copy.py,sha256=0iuhq3vlL1bah2EJEvkHZlXqdsOwdkKktt8V7lVjKIY,18241
10
- path_sync/_internal/cmd_dep_update.py,sha256=7mKs-ZdPoAfl3L2Q57345lPtOFtua5taF8dYAe0U1d8,8315
11
- path_sync/_internal/cmd_validate.py,sha256=e6m-JZlXAGr0ZRqfLhhrlmjs4w79p2WWnZo4I05PGpo,1798
12
- path_sync/_internal/file_utils.py,sha256=5C33qzKFQdwChi5YwUWBujj126t0P6dbGSU_5hWExpE,194
13
- path_sync/_internal/git_ops.py,sha256=OJ82-TUUqlFGSCcaeya9J-x8_10aG5iu55SZew_dzbk,7085
14
- path_sync/_internal/header.py,sha256=evgY2q_gfDdEytEt_jyJ7M_KdGzCpfdKBUnoh3v-0Go,2593
15
- path_sync/_internal/models.py,sha256=IA6lb_BFXntcZnn9bWJZYenlnDvk9ddNBA67l5qFrmA,4577
16
- path_sync/_internal/models_dep.py,sha256=KKgcOEUgWPa_speyp_XgwBQDp8L1hS24WN1C4SMMG-w,2014
17
- path_sync/_internal/typer_app.py,sha256=lEGMRXql3Se3VbmwAohvpUaL2cbY-RwhPUq8kL7bPbc,177
18
- path_sync/_internal/validation.py,sha256=23kwtmsiHiYlKbVU8mtwr8J0MqSlnvbuRRR5NQAsJ08,2446
19
- path_sync/_internal/yaml_utils.py,sha256=yj6Bl54EltjLEcVKaiA5Ahb9byT6OUMh0xIEzTsrvnQ,498
20
- path_sync-0.4.1.dist-info/METADATA,sha256=1nIEtOCDLLRoti_7s-OK0x-rHPWtNZqTJd350laIHTw,10430
21
- path_sync-0.4.1.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
22
- path_sync-0.4.1.dist-info/entry_points.txt,sha256=jTsL0c-9gP-4_Jt3EPgihtpLcwQR0AFAf1AUpD50AlI,54
23
- path_sync-0.4.1.dist-info/licenses/LICENSE,sha256=MnHjsc6ccjI5Iiw2R3jLEAApIcrEpLdIcZxkilhSPxc,1069
24
- path_sync-0.4.1.dist-info/RECORD,,