clsync 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
File without changes
claude_sync/apply.py ADDED
@@ -0,0 +1,147 @@
1
+ import difflib
2
+ import shutil
3
+ from datetime import datetime, timezone
4
+ from pathlib import Path
5
+
6
+ from rich.console import Console
7
+
8
+ from claude_sync.claude_paths import claude_memory_dir
9
+ from claude_sync.models import Config
10
+
11
+ console = Console()
12
+
13
+
14
+ def apply_consolidated(config: Config, skip_scopes: set[str] | None = None) -> list[str]:
15
+ """
16
+ Copy consolidated artifacts from sync repo to local machine.
17
+ Returns list of scopes that were applied.
18
+
19
+ Global:
20
+ 1. {repo}/consolidated/global/CLAUDE.md -> ~/.claude/CLAUDE.md
21
+ 2. {repo}/consolidated/global/settings.json -> ~/.claude/settings.json
22
+
23
+ Per-project:
24
+ 1. {repo}/consolidated/projects/{name}/CLAUDE.md -> {project}/.claude/CLAUDE.md
25
+ 2. {repo}/consolidated/projects/{name}/memory/ -> ~/.claude/projects/{slug}/memory/
26
+ """
27
+ if skip_scopes is None:
28
+ skip_scopes = set()
29
+ repo = Path(config.repo.local_path)
30
+ applied: list[str] = []
31
+
32
+ # Global
33
+ if "global" not in skip_scopes:
34
+ consolidated_global = repo / "consolidated" / "global"
35
+ global_applied = False
36
+
37
+ global_md = consolidated_global / "CLAUDE.md"
38
+ if global_md.exists():
39
+ dest = Path.home() / ".claude" / "CLAUDE.md"
40
+ if config.safety.backup_before_apply and dest.exists():
41
+ _backup(dest)
42
+ dest.parent.mkdir(parents=True, exist_ok=True)
43
+ shutil.copy2(global_md, dest)
44
+ global_applied = True
45
+
46
+ global_settings = consolidated_global / "settings.json"
47
+ if global_settings.exists():
48
+ dest = Path.home() / ".claude" / "settings.json"
49
+ if config.safety.backup_before_apply and dest.exists():
50
+ _backup(dest)
51
+ dest.parent.mkdir(parents=True, exist_ok=True)
52
+ shutil.copy2(global_settings, dest)
53
+ global_applied = True
54
+
55
+ if global_applied:
56
+ applied.append("global")
57
+
58
+ # Per-project
59
+ consolidated_projects = repo / "consolidated" / "projects"
60
+ if consolidated_projects.exists():
61
+ for project_dir in consolidated_projects.iterdir():
62
+ if not project_dir.is_dir():
63
+ continue
64
+ name = project_dir.name
65
+ scope = f"projects/{name}"
66
+ if scope in skip_scopes:
67
+ continue
68
+
69
+ local_project = _find_local_project(name, config)
70
+ if local_project is None:
71
+ console.print(f"[yellow]Warning: no local project found matching '{name}', skipping scope {scope}[/yellow]")
72
+ continue
73
+
74
+ scope_applied = False
75
+
76
+ consolidated_md = project_dir / "CLAUDE.md"
77
+ if consolidated_md.exists():
78
+ dest = local_project / ".claude" / "CLAUDE.md"
79
+ if config.safety.backup_before_apply and dest.exists():
80
+ _backup(dest)
81
+ dest.parent.mkdir(parents=True, exist_ok=True)
82
+ shutil.copy2(consolidated_md, dest)
83
+ scope_applied = True
84
+
85
+ consolidated_memory = project_dir / "memory"
86
+ if consolidated_memory.exists():
87
+ dest_memory = claude_memory_dir(local_project)
88
+ dest_memory.mkdir(parents=True, exist_ok=True)
89
+ consolidated_rels: set[Path] = set()
90
+ for mem_file in consolidated_memory.rglob("*"):
91
+ if mem_file.is_file():
92
+ rel = mem_file.relative_to(consolidated_memory)
93
+ consolidated_rels.add(rel)
94
+ dest_file = dest_memory / rel
95
+ dest_file.parent.mkdir(parents=True, exist_ok=True)
96
+ shutil.copy2(mem_file, dest_file)
97
+ # Remove local memory files that were previously pushed but
98
+ # are no longer in consolidated (deleted on another machine).
99
+ # Only remove files that exist in our own snapshot — files that
100
+ # were never pushed are left untouched.
101
+ machine_mem = (
102
+ repo / "machines" / config.client.machine_id
103
+ / "projects" / name / "memory"
104
+ )
105
+ if machine_mem.exists():
106
+ for local_file in list(dest_memory.rglob("*")):
107
+ if local_file.is_file():
108
+ rel = local_file.relative_to(dest_memory)
109
+ if rel not in consolidated_rels and (machine_mem / rel).exists():
110
+ local_file.unlink()
111
+ scope_applied = True
112
+
113
+ if scope_applied:
114
+ applied.append(scope)
115
+
116
+ return applied
117
+
118
+
119
+ def _backup(path: Path) -> None:
120
+ timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H%M%SZ")
121
+ backup_dir = path.parent / "backups" / timestamp
122
+ backup_dir.mkdir(parents=True, exist_ok=True)
123
+ shutil.copy2(path, backup_dir / path.name)
124
+
125
+
126
+ def _find_local_project(name: str, config: Config) -> Path | None:
127
+ for p in config.projects.paths:
128
+ if Path(p).name == name:
129
+ return Path(p)
130
+ return None
131
+
132
+
133
+ def check_large_diff(local_file: Path, consolidated_file: Path) -> bool:
134
+ """
135
+ Return True if more than 50% of lines differ between the two files.
136
+ """
137
+ local_lines = local_file.read_text(encoding="utf-8", errors="replace").splitlines()
138
+ consolidated_lines = consolidated_file.read_text(encoding="utf-8", errors="replace").splitlines()
139
+
140
+ if not local_lines and not consolidated_lines:
141
+ return False
142
+
143
+ matcher = difflib.SequenceMatcher(None, local_lines, consolidated_lines)
144
+ matching = sum(block.size for block in matcher.get_matching_blocks())
145
+ total = max(len(local_lines), len(consolidated_lines))
146
+ differing = total - matching
147
+ return (differing / total) > 0.5
@@ -0,0 +1,250 @@
1
+ """
2
+ Generate CLAUDE.md files from existing Claude Code artifacts (memory files, session summaries).
3
+
4
+ This module calls the Anthropic API directly on the client machine. It requires the
5
+ 'bootstrap' optional dependency: pip install claude-sync[bootstrap]
6
+ """
7
+ import json
8
+ from pathlib import Path
9
+
10
+ from claude_sync.claude_paths import claude_memory_dir, claude_sessions_index
11
+
12
+ MODEL = "claude-sonnet-4-20250514"
13
+ MAX_TOKENS = 8192
14
+
15
+ _PROMPTS_DIR = Path(__file__).parent / "prompts"
16
+
17
+
18
+ def _load_prompt_template() -> str:
19
+ return (_PROMPTS_DIR / "bootstrap.txt").read_text(encoding="utf-8")
20
+
21
+
22
+ def gather_project_context(project_path: Path) -> dict:
23
+ """
24
+ Collect all available context for a project.
25
+
26
+ Returns dict with keys (all optional):
27
+ - "memory_files": dict[str, str] — filename -> content
28
+ - "session_summaries": list[str] — summaries from sessions-index.json
29
+ - "existing_claude_md": str | None
30
+ """
31
+ context: dict = {}
32
+
33
+ memory_dir = claude_memory_dir(project_path)
34
+ if memory_dir.exists():
35
+ memory_files: dict[str, str] = {}
36
+ for f in sorted(memory_dir.rglob("*")):
37
+ if f.is_file():
38
+ try:
39
+ key = f.relative_to(memory_dir).as_posix()
40
+ memory_files[key] = f.read_text(encoding="utf-8", errors="replace")
41
+ except OSError:
42
+ pass
43
+ if memory_files:
44
+ context["memory_files"] = memory_files
45
+
46
+ sessions_path = claude_sessions_index(project_path)
47
+ if sessions_path.exists():
48
+ try:
49
+ data = json.loads(sessions_path.read_text(encoding="utf-8"))
50
+ summaries = [
51
+ entry["summary"]
52
+ for entry in (data if isinstance(data, list) else [])
53
+ if isinstance(entry, dict) and entry.get("summary")
54
+ ]
55
+ if summaries:
56
+ context["session_summaries"] = summaries
57
+ except (OSError, json.JSONDecodeError, KeyError):
58
+ pass
59
+
60
+ claude_md = project_path / ".claude" / "CLAUDE.md"
61
+ if claude_md.exists():
62
+ try:
63
+ context["existing_claude_md"] = claude_md.read_text(encoding="utf-8", errors="replace")
64
+ except OSError:
65
+ pass
66
+
67
+ return context
68
+
69
+
70
+ def gather_global_context(project_paths: list[Path]) -> dict:
71
+ """
72
+ Collect cross-project context for generating a global CLAUDE.md.
73
+
74
+ Returns dict with keys:
75
+ - "all_memory_files": dict[str, dict[str, str]] — project_name -> {filename -> content}
76
+ - "all_session_summaries": dict[str, list[str]] — project_name -> [summaries]
77
+ - "settings": str | None
78
+ - "existing_claude_md": str | None
79
+ """
80
+ context: dict = {}
81
+ all_memory: dict[str, dict[str, str]] = {}
82
+ all_summaries: dict[str, list[str]] = {}
83
+
84
+ for project_path in project_paths:
85
+ project_path = Path(project_path)
86
+ proj_context = gather_project_context(project_path)
87
+ name = project_path.name
88
+ if "memory_files" in proj_context:
89
+ all_memory[name] = proj_context["memory_files"]
90
+ if "session_summaries" in proj_context:
91
+ all_summaries[name] = proj_context["session_summaries"]
92
+
93
+ if all_memory:
94
+ context["all_memory_files"] = all_memory
95
+ if all_summaries:
96
+ context["all_session_summaries"] = all_summaries
97
+
98
+ settings_path = Path.home() / ".claude" / "settings.json"
99
+ if settings_path.exists():
100
+ try:
101
+ context["settings"] = settings_path.read_text(encoding="utf-8", errors="replace")
102
+ except OSError:
103
+ pass
104
+
105
+ global_md = Path.home() / ".claude" / "CLAUDE.md"
106
+ if global_md.exists():
107
+ try:
108
+ context["existing_claude_md"] = global_md.read_text(encoding="utf-8", errors="replace")
109
+ except OSError:
110
+ pass
111
+
112
+ return context
113
+
114
+
115
+ def build_bootstrap_prompt(context: dict, scope: str) -> str:
116
+ """Build the prompt for generating a CLAUDE.md from gathered context."""
117
+ template = _load_prompt_template()
118
+
119
+ sections: list[str] = []
120
+
121
+ # Per-project context
122
+ if "memory_files" in context:
123
+ sections.append("## MEMORY FILES\n")
124
+ for filename, content in context["memory_files"].items():
125
+ sections.append(f"### {filename}\n{content}\n")
126
+
127
+ if "session_summaries" in context:
128
+ sections.append("## SESSION SUMMARIES\n")
129
+ for summary in context["session_summaries"]:
130
+ sections.append(f"- {summary}")
131
+ sections.append("")
132
+
133
+ if "existing_claude_md" in context and context["existing_claude_md"]:
134
+ sections.append("## EXISTING CLAUDE.md (baseline — preserve if still accurate)\n")
135
+ sections.append(context["existing_claude_md"])
136
+
137
+ # Global context
138
+ if "all_memory_files" in context:
139
+ for project_name, files in context["all_memory_files"].items():
140
+ sections.append(f"## MEMORY FILES — {project_name}\n")
141
+ for filename, content in files.items():
142
+ sections.append(f"### {filename}\n{content}\n")
143
+
144
+ if "all_session_summaries" in context:
145
+ for project_name, summaries in context["all_session_summaries"].items():
146
+ sections.append(f"## SESSION SUMMARIES — {project_name}\n")
147
+ for summary in summaries:
148
+ sections.append(f"- {summary}")
149
+ sections.append("")
150
+
151
+ if "settings" in context and context["settings"]:
152
+ sections.append("## CLAUDE CODE SETTINGS (settings.json)\n")
153
+ sections.append(context["settings"])
154
+
155
+ context_text = "\n".join(sections).strip()
156
+
157
+ if scope == "global":
158
+ scope_instruction = (
159
+ "Focus on cross-project patterns: your user's general coding style, "
160
+ "preferred tools, workflow habits, and environment setup."
161
+ )
162
+ else:
163
+ scope_instruction = (
164
+ "Focus on knowledge specific to this project: its architecture, "
165
+ "conventions, tooling, and key decisions."
166
+ )
167
+
168
+ return (
169
+ template
170
+ .replace("{CONTEXT_SECTIONS}", context_text)
171
+ .replace("{SCOPE_INSTRUCTION}", scope_instruction)
172
+ )
173
+
174
+
175
+ def generate_claude_md(context: dict, scope: str, api_key: str) -> str | None:
176
+ """
177
+ Call the Anthropic API to generate a CLAUDE.md from context.
178
+
179
+ Returns the generated content, or None if context is empty / generation fails.
180
+ """
181
+ if not context:
182
+ return None
183
+
184
+ try:
185
+ import anthropic
186
+ except ImportError:
187
+ raise ImportError(
188
+ "The 'anthropic' package is required for bootstrap. "
189
+ "Install it with: pip install claude-sync[bootstrap]"
190
+ )
191
+
192
+ prompt = build_bootstrap_prompt(context, scope)
193
+
194
+ client = anthropic.Anthropic(api_key=api_key)
195
+ response = client.messages.create(
196
+ model=MODEL,
197
+ max_tokens=MAX_TOKENS,
198
+ messages=[{"role": "user", "content": prompt}],
199
+ )
200
+ return response.content[0].text
201
+
202
+
203
+ def bootstrap_project(project_path: Path, api_key: str) -> str | None:
204
+ """
205
+ Generate a CLAUDE.md for a single project.
206
+
207
+ 1. Gather context from memory/ and sessions-index.json
208
+ 2. If no sources found, return None
209
+ 3. Generate CLAUDE.md via API
210
+ 4. Write result to {project_path}/.claude/CLAUDE.md
211
+ 5. Return the generated content
212
+ """
213
+ project_path = Path(project_path).resolve()
214
+ context = gather_project_context(project_path)
215
+ if not context:
216
+ return None
217
+
218
+ scope = f"projects/{project_path.name}"
219
+ result = generate_claude_md(context, scope, api_key)
220
+ if result is None:
221
+ return None
222
+
223
+ dest = project_path / ".claude" / "CLAUDE.md"
224
+ dest.parent.mkdir(parents=True, exist_ok=True)
225
+ dest.write_text(result, encoding="utf-8")
226
+ return result
227
+
228
+
229
+ def bootstrap_global(project_paths: list[Path], api_key: str) -> str | None:
230
+ """
231
+ Generate a global ~/.claude/CLAUDE.md.
232
+
233
+ 1. Gather cross-project context
234
+ 2. If no sources found, return None
235
+ 3. Generate CLAUDE.md via API
236
+ 4. Write result to ~/.claude/CLAUDE.md
237
+ 5. Return the generated content
238
+ """
239
+ context = gather_global_context(project_paths)
240
+ if not context:
241
+ return None
242
+
243
+ result = generate_claude_md(context, "global", api_key)
244
+ if result is None:
245
+ return None
246
+
247
+ dest = Path.home() / ".claude" / "CLAUDE.md"
248
+ dest.parent.mkdir(parents=True, exist_ok=True)
249
+ dest.write_text(result, encoding="utf-8")
250
+ return result
@@ -0,0 +1,296 @@
1
+ """
2
+ CI provider abstraction for triggering and polling synthesis pipelines.
3
+
4
+ Supported providers:
5
+ gitlab — GitLab CI pipeline trigger + polling (default)
6
+ github — GitHub Actions workflow dispatch
7
+ local — Run pipeline/synthesize.py directly as a subprocess
8
+ """
9
+ import subprocess
10
+ import time
11
+ from datetime import datetime, timezone
12
+ from typing import Protocol, runtime_checkable
13
+
14
+ import httpx
15
+ from rich.console import Console
16
+
17
+ from claude_sync.models import Config
18
+
19
+ console = Console()
20
+
21
+ GITLAB_TERMINAL_STATES = {"success", "failed", "canceled", "skipped"}
22
+ GITHUB_SUCCESS_CONCLUSIONS = {"success"}
23
+ GITHUB_TERMINAL_CONCLUSIONS = {"success", "failure", "cancelled", "timed_out", "action_required", "skipped"}
24
+
25
+
26
+ @runtime_checkable
27
+ class CIProvider(Protocol):
28
+ def trigger(self) -> int | None:
29
+ """Start the synthesis pipeline. Returns a run/pipeline ID, or None for local."""
30
+ ...
31
+
32
+ def wait_for_completion(self, run_id: int | None, timeout: int = 300, poll_interval: int = 10) -> bool:
33
+ """Wait until completion. Returns True on success."""
34
+ ...
35
+
36
+ def get_failure_diagnosis(self, run_id: int | None) -> str | None:
37
+ """Return a human-readable hint about a known failure, or None."""
38
+ ...
39
+
40
+
41
+ # ---------------------------------------------------------------------------
42
+ # GitLab provider
43
+ # ---------------------------------------------------------------------------
44
+
45
+ class GitLabProvider:
46
+ def __init__(self, config: Config) -> None:
47
+ self.config = config
48
+
49
+ def _gitlab(self):
50
+ if not self.config.gitlab:
51
+ raise RuntimeError(
52
+ "GitLab is not configured. Run 'claude-sync init --provider gitlab'."
53
+ )
54
+ return self.config.gitlab
55
+
56
+ def trigger(self) -> int:
57
+ gl = self._gitlab()
58
+ if not gl.trigger_token:
59
+ raise RuntimeError(
60
+ "GitLab trigger token is not configured. "
61
+ "Run 'claude-sync init' or set trigger_token in ~/.claude-sync/config.toml."
62
+ )
63
+ url = f"{gl.url}/api/v4/projects/{gl.project_id}/trigger/pipeline"
64
+ response = httpx.post(url, data={"token": gl.trigger_token, "ref": "main"})
65
+ if response.status_code not in (200, 201):
66
+ raise RuntimeError(
67
+ f"Failed to trigger pipeline: HTTP {response.status_code} — {response.text}"
68
+ )
69
+ return response.json()["id"]
70
+
71
+ def wait_for_completion(self, run_id: int | None, timeout: int = 300, poll_interval: int = 10) -> bool:
72
+ if run_id is None:
73
+ return False
74
+ gl = self._gitlab()
75
+ if not gl.access_token:
76
+ raise RuntimeError(
77
+ "GitLab access token is not configured. "
78
+ "Run 'claude-sync init' or set access_token in ~/.claude-sync/config.toml."
79
+ )
80
+ url = f"{gl.url}/api/v4/projects/{gl.project_id}/pipelines/{run_id}"
81
+ headers = {"PRIVATE-TOKEN": gl.access_token}
82
+ deadline = time.monotonic() + timeout
83
+ last_status = ""
84
+
85
+ while time.monotonic() < deadline:
86
+ response = httpx.get(url, headers=headers)
87
+ if response.status_code != 200:
88
+ raise RuntimeError(
89
+ f"Failed to get pipeline status: HTTP {response.status_code} — {response.text}"
90
+ )
91
+ status = response.json()["status"]
92
+ if status != last_status:
93
+ console.print(f"Pipeline #{run_id}: [bold]{status}[/bold]")
94
+ last_status = status
95
+ if status in GITLAB_TERMINAL_STATES:
96
+ return status == "success"
97
+ time.sleep(poll_interval)
98
+
99
+ raise TimeoutError(
100
+ f"Pipeline #{run_id} did not reach a terminal state within {timeout}s. "
101
+ f"Last status: {last_status}"
102
+ )
103
+
104
+ def get_failure_diagnosis(self, run_id: int | None) -> str | None:
105
+ if run_id is None:
106
+ return None
107
+ gl = self._gitlab()
108
+ if not gl.access_token:
109
+ return None
110
+ base = f"{gl.url}/api/v4/projects/{gl.project_id}"
111
+ headers = {"PRIVATE-TOKEN": gl.access_token}
112
+ try:
113
+ jobs_resp = httpx.get(f"{base}/pipelines/{run_id}/jobs", headers=headers)
114
+ if jobs_resp.status_code != 200:
115
+ return None
116
+ failed = [j for j in jobs_resp.json() if j.get("status") == "failed"]
117
+ if not failed:
118
+ return None
119
+ job_id = failed[0]["id"]
120
+ trace_resp = httpx.get(f"{base}/jobs/{job_id}/trace", headers=headers)
121
+ if trace_resp.status_code != 200:
122
+ return None
123
+ log = trace_resp.text
124
+ except Exception:
125
+ return None
126
+
127
+ return _diagnose_gitlab_log(log)
128
+
129
+
130
+ def _diagnose_gitlab_log(log: str) -> str | None:
131
+ """Return a human-readable hint if a known failure pattern is found."""
132
+ if "not allowed to push" in log or ("403" in log and "push" in log.lower()):
133
+ return (
134
+ "The CI job was denied push access.\n"
135
+ "Fix: in your sync repo go to Settings → CI/CD → Token Access\n"
136
+ " and enable 'Allow Git push requests to the repository'."
137
+ )
138
+ if "ANTHROPIC_API_KEY" in log or "AuthenticationError" in log or "invalid x-api-key" in log.lower():
139
+ return (
140
+ "The Anthropic API key is missing or invalid.\n"
141
+ "Fix: add ANTHROPIC_API_KEY as a masked CI/CD variable in your sync repo\n"
142
+ " under Settings → CI/CD → Variables."
143
+ )
144
+ return None
145
+
146
+
147
+ # ---------------------------------------------------------------------------
148
+ # GitHub Actions provider
149
+ # ---------------------------------------------------------------------------
150
+
151
+ class GitHubProvider:
152
+ def __init__(self, config: Config) -> None:
153
+ self.config = config
154
+ self._dispatch_time: float = 0.0
155
+
156
+ def _gh(self):
157
+ if not self.config.github:
158
+ raise RuntimeError(
159
+ "GitHub is not configured. Run 'claude-sync init --provider github'."
160
+ )
161
+ return self.config.github
162
+
163
+ def _headers(self) -> dict:
164
+ return {
165
+ "Authorization": f"Bearer {self._gh().token}",
166
+ "Accept": "application/vnd.github+json",
167
+ "X-GitHub-Api-Version": "2022-11-28",
168
+ }
169
+
170
+ def trigger(self) -> int:
171
+ gh = self._gh()
172
+ if not gh.token:
173
+ raise RuntimeError(
174
+ "GitHub token is not configured. "
175
+ "Run 'claude-sync init' or set token in ~/.claude-sync/config.toml."
176
+ )
177
+ url = f"https://api.github.com/repos/{gh.repo}/actions/workflows/{gh.workflow_id}/dispatches"
178
+ self._dispatch_time = time.time()
179
+ response = httpx.post(url, headers=self._headers(), json={"ref": "main"})
180
+ if response.status_code != 204:
181
+ raise RuntimeError(
182
+ f"Failed to dispatch workflow: HTTP {response.status_code} — {response.text}"
183
+ )
184
+ return self._find_run_id()
185
+
186
+ def _find_run_id(self, search_timeout: int = 15) -> int:
187
+ gh = self._gh()
188
+ url = f"https://api.github.com/repos/{gh.repo}/actions/runs"
189
+ deadline = time.monotonic() + search_timeout
190
+ while time.monotonic() < deadline:
191
+ time.sleep(2)
192
+ resp = httpx.get(
193
+ url, headers=self._headers(),
194
+ params={"event": "workflow_dispatch", "per_page": 10},
195
+ )
196
+ if resp.status_code != 200:
197
+ continue
198
+ for run in resp.json().get("workflow_runs", []):
199
+ created_str = run.get("created_at", "")
200
+ try:
201
+ created = datetime.fromisoformat(created_str.replace("Z", "+00:00")).timestamp()
202
+ except ValueError:
203
+ continue
204
+ if created >= self._dispatch_time - 5:
205
+ return run["id"]
206
+ raise RuntimeError(
207
+ "Could not find the dispatched GitHub Actions run. "
208
+ "Check the Actions tab in your repository."
209
+ )
210
+
211
+ def wait_for_completion(self, run_id: int | None, timeout: int = 300, poll_interval: int = 10) -> bool:
212
+ if run_id is None:
213
+ return False
214
+ gh = self._gh()
215
+ url = f"https://api.github.com/repos/{gh.repo}/actions/runs/{run_id}"
216
+ deadline = time.monotonic() + timeout
217
+ last_display = ""
218
+
219
+ while time.monotonic() < deadline:
220
+ resp = httpx.get(url, headers=self._headers())
221
+ if resp.status_code != 200:
222
+ time.sleep(poll_interval)
223
+ continue
224
+ data = resp.json()
225
+ status = data["status"] # queued | in_progress | completed
226
+ conclusion = data.get("conclusion") # success | failure | cancelled | …
227
+ display = conclusion if status == "completed" and conclusion else status
228
+ if display != last_display:
229
+ console.print(f"Run #{run_id}: [bold]{display}[/bold]")
230
+ last_display = display
231
+ if status == "completed":
232
+ return conclusion in GITHUB_SUCCESS_CONCLUSIONS
233
+ time.sleep(poll_interval)
234
+
235
+ raise TimeoutError(
236
+ f"GitHub Actions run #{run_id} did not complete within {timeout}s. "
237
+ f"Last status: {last_display}"
238
+ )
239
+
240
+ def get_failure_diagnosis(self, run_id: int | None) -> str | None:
241
+ # GitHub Actions logs require following a signed S3 redirect; too complex for
242
+ # automatic diagnosis. Direct the user to the web UI instead.
243
+ if run_id is None:
244
+ return None
245
+ gh = self._gh()
246
+ return (
247
+ f"Check the Actions tab for details:\n"
248
+ f" https://github.com/{gh.repo}/actions/runs/{run_id}"
249
+ )
250
+
251
+
252
+ # ---------------------------------------------------------------------------
253
+ # Local provider (runs pipeline/synthesize.py as a subprocess)
254
+ # ---------------------------------------------------------------------------
255
+
256
+ class LocalProvider:
257
+ def __init__(self, config: Config) -> None:
258
+ self.config = config
259
+ self._success: bool = False
260
+
261
+ def trigger(self) -> None:
262
+ import os
263
+ repo_path = self.config.repo.local_path
264
+ if not os.environ.get("ANTHROPIC_API_KEY"):
265
+ raise RuntimeError(
266
+ "ANTHROPIC_API_KEY is not set. "
267
+ "Export it before running 'claude-sync trigger' with the local provider."
268
+ )
269
+ result = subprocess.run(
270
+ ["python", "pipeline/synthesize.py"],
271
+ cwd=repo_path,
272
+ )
273
+ self._success = result.returncode == 0
274
+ return None
275
+
276
+ def wait_for_completion(self, run_id: int | None, timeout: int = 300, poll_interval: int = 10) -> bool:
277
+ # synthesis already ran synchronously in trigger()
278
+ return self._success
279
+
280
+ def get_failure_diagnosis(self, run_id: int | None) -> str | None:
281
+ return None
282
+
283
+
284
+ # ---------------------------------------------------------------------------
285
+ # Factory
286
+ # ---------------------------------------------------------------------------
287
+
288
+ def get_provider(config: Config) -> CIProvider:
289
+ provider_type = config.provider.type
290
+ if provider_type == "gitlab":
291
+ return GitLabProvider(config)
292
+ if provider_type == "github":
293
+ return GitHubProvider(config)
294
+ if provider_type == "local":
295
+ return LocalProvider(config)
296
+ raise ValueError(f"Unknown provider type: {provider_type!r}")