stravinsky 0.2.52__py3-none-any.whl → 0.4.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of stravinsky might be problematic. Click here for more details.

Files changed (58) hide show
  1. mcp_bridge/__init__.py +1 -1
  2. mcp_bridge/auth/token_store.py +113 -11
  3. mcp_bridge/cli/__init__.py +6 -0
  4. mcp_bridge/cli/install_hooks.py +1265 -0
  5. mcp_bridge/cli/session_report.py +585 -0
  6. mcp_bridge/config/MANIFEST_SCHEMA.md +305 -0
  7. mcp_bridge/config/README.md +276 -0
  8. mcp_bridge/config/hook_config.py +249 -0
  9. mcp_bridge/config/hooks_manifest.json +138 -0
  10. mcp_bridge/config/rate_limits.py +222 -0
  11. mcp_bridge/config/skills_manifest.json +128 -0
  12. mcp_bridge/hooks/HOOKS_SETTINGS.json +175 -0
  13. mcp_bridge/hooks/README.md +215 -0
  14. mcp_bridge/hooks/__init__.py +119 -60
  15. mcp_bridge/hooks/edit_recovery.py +42 -37
  16. mcp_bridge/hooks/git_noninteractive.py +89 -0
  17. mcp_bridge/hooks/keyword_detector.py +30 -0
  18. mcp_bridge/hooks/manager.py +8 -0
  19. mcp_bridge/hooks/notification_hook.py +103 -0
  20. mcp_bridge/hooks/parallel_execution.py +111 -0
  21. mcp_bridge/hooks/pre_compact.py +82 -183
  22. mcp_bridge/hooks/rules_injector.py +507 -0
  23. mcp_bridge/hooks/session_notifier.py +125 -0
  24. mcp_bridge/{native_hooks → hooks}/stravinsky_mode.py +51 -16
  25. mcp_bridge/hooks/subagent_stop.py +98 -0
  26. mcp_bridge/hooks/task_validator.py +73 -0
  27. mcp_bridge/hooks/tmux_manager.py +141 -0
  28. mcp_bridge/hooks/todo_continuation.py +90 -0
  29. mcp_bridge/hooks/todo_delegation.py +88 -0
  30. mcp_bridge/hooks/tool_messaging.py +267 -0
  31. mcp_bridge/hooks/truncator.py +21 -17
  32. mcp_bridge/notifications.py +151 -0
  33. mcp_bridge/prompts/multimodal.py +24 -3
  34. mcp_bridge/server.py +214 -49
  35. mcp_bridge/server_tools.py +445 -0
  36. mcp_bridge/tools/__init__.py +22 -18
  37. mcp_bridge/tools/agent_manager.py +220 -32
  38. mcp_bridge/tools/code_search.py +97 -11
  39. mcp_bridge/tools/lsp/__init__.py +7 -0
  40. mcp_bridge/tools/lsp/manager.py +448 -0
  41. mcp_bridge/tools/lsp/tools.py +637 -150
  42. mcp_bridge/tools/model_invoke.py +208 -106
  43. mcp_bridge/tools/query_classifier.py +323 -0
  44. mcp_bridge/tools/semantic_search.py +3042 -0
  45. mcp_bridge/tools/templates.py +32 -18
  46. mcp_bridge/update_manager.py +589 -0
  47. mcp_bridge/update_manager_pypi.py +299 -0
  48. stravinsky-0.4.18.dist-info/METADATA +468 -0
  49. stravinsky-0.4.18.dist-info/RECORD +88 -0
  50. stravinsky-0.4.18.dist-info/entry_points.txt +5 -0
  51. mcp_bridge/native_hooks/edit_recovery.py +0 -46
  52. mcp_bridge/native_hooks/todo_delegation.py +0 -54
  53. mcp_bridge/native_hooks/truncator.py +0 -23
  54. stravinsky-0.2.52.dist-info/METADATA +0 -204
  55. stravinsky-0.2.52.dist-info/RECORD +0 -63
  56. stravinsky-0.2.52.dist-info/entry_points.txt +0 -3
  57. /mcp_bridge/{native_hooks → hooks}/context.py +0 -0
  58. {stravinsky-0.2.52.dist-info → stravinsky-0.4.18.dist-info}/WHEEL +0 -0
@@ -2,11 +2,16 @@
2
2
  Templates for stravinsky repository initialization.
3
3
  """
4
4
 
5
- CLAUDE_MD_TEMPLATE = """## stravinsky MCP (Parallel Agents)
5
+ CLAUDE_MD_TEMPLATE = """## stravinsky MCP (Multi-Model Orchestration)
6
6
 
7
- Use stravinsky MCP tools. **DEFAULT: spawn parallel agents for multi-step tasks.**
7
+ Stravinsky provides multi-model AI orchestration with parallel agent execution.
8
8
 
9
- ### Agent Tools
9
+ ### Architecture
10
+ - **Native Subagent**: Stravinsky orchestrator (.claude/agents/stravinsky.md) auto-delegates complex tasks
11
+ - **MCP Tools**: agent_spawn, invoke_gemini, invoke_openai, LSP tools, code search
12
+ - **Specialist Agents**: explore, dewey, frontend, delphi, multimodal, document_writer
13
+
14
+ ### Agent Tools (via MCP)
10
15
  - `agent_spawn(prompt, agent_type, description)` - Spawn background agent with full tool access
11
16
  - `agent_output(task_id, block)` - Get results (block=True to wait)
12
17
  - `agent_progress(task_id)` - Check real-time progress
@@ -14,15 +19,17 @@ Use stravinsky MCP tools. **DEFAULT: spawn parallel agents for multi-step tasks.
14
19
  - `agent_cancel(task_id)` - Stop a running agent
15
20
 
16
21
  ### Agent Types
17
- - `explore` - Codebase search, "where is X?" questions
18
- - `dewey` - Documentation research, implementation examples
19
- - `frontend` - UI/UX work, component design
20
- - `delphi` - Strategic advice, architecture review
21
-
22
- ### Parallel Execution (IRONSTAR)
22
+ - `explore` - Codebase search, structural analysis (Gemini 3 Flash)
23
+ - `dewey` - Documentation research, web search (Gemini 3 Flash + Web)
24
+ - `frontend` - UI/UX implementation (Gemini 3 Pro High)
25
+ - `delphi` - Strategic advice, architecture review (GPT-5.2 Medium)
26
+ - `multimodal` - Visual analysis, screenshots (Gemini 3 Flash Vision)
27
+ - `document_writer` - Technical documentation (Gemini 3 Flash)
28
+
29
+ ### Parallel Execution (MANDATORY)
23
30
  For ANY task with 2+ independent steps:
24
31
  1. **Immediately use agent_spawn** for each independent component
25
- 2. Fire all agents simultaneously, don't wait
32
+ 2. Fire all agents simultaneously in ONE response, don't wait
26
33
  3. Monitor with agent_progress, collect with agent_output
27
34
 
28
35
  ### Trigger Commands
@@ -30,6 +37,12 @@ For ANY task with 2+ independent steps:
30
37
  - **ULTRATHINK**: Engage exhaustive deep reasoning, multi-dimensional analysis
31
38
  - **SEARCH**: Maximize search effort across codebase and external resources
32
39
  - **ANALYZE**: Deep analysis mode with delphi consultation for complex issues
40
+
41
+ ### Native Subagent Benefits
42
+ - ✅ Auto-delegation (no manual /stravinsky invocation)
43
+ - ✅ Context isolation (orchestrator runs as subagent)
44
+ - ✅ Full MCP tool access (agent_spawn, invoke_gemini/openai, LSP, etc.)
45
+ - ✅ Multi-model routing (Gemini for UI/research, GPT for strategy)
33
46
  """
34
47
 
35
48
  COMMAND_STRAVINSKY = """---
@@ -74,15 +87,16 @@ stravinsky:agent_spawn(prompt="Task 3...", agent_type="dewey", description="Task
74
87
  stravinsky:agent_output(task_id="[id]", block=true)
75
88
  ```
76
89
 
77
- ### DO NOT USE:
78
- - Built-in Read tool for file reading
79
- - Built-in Search/Glob for searching
80
- - Built-in Task tool for subagents
90
+ ### Recommended Tool Usage:
91
+ - For file operations within agents: Use standard Read/Edit tools
92
+ - For parallel agent spawning: Use stravinsky:agent_spawn (supports nesting, unlike native Task tool)
93
+ - For collecting results: Use stravinsky:agent_output
94
+ - For monitoring agents: Use stravinsky:agent_list
81
95
 
82
- ### ALWAYS USE:
83
- - stravinsky:agent_spawn for ALL exploration
84
- - stravinsky:agent_output for collecting results
85
- - stravinsky:agent_list to see running agents
96
+ ### Native Subagent Integration:
97
+ - Stravinsky orchestrator configured as native Claude Code subagent (.claude/agents/stravinsky.md)
98
+ - Native subagents CAN call Stravinsky MCP tools (agent_spawn, invoke_gemini, etc.)
99
+ - This enables auto-delegation without manual /stravinsky invocation
86
100
 
87
101
  ### Execution Modes:
88
102
  - `ironstar` / `irs` / `ultrawork` - Maximum parallel execution (10+ agents)
@@ -0,0 +1,589 @@
1
+ #!/usr/bin/env python3
2
+ """
3
+ Update Manager for Stravinsky Hooks and Skills
4
+
5
+ Safely merges hooks and skills during Stravinsky updates with:
6
+ - Version tracking via manifest files
7
+ - 3-way merge algorithm (base, user, new)
8
+ - User customization preservation
9
+ - Conflict detection and reporting
10
+ - Automatic backups before updates
11
+ - Rollback capability
12
+ - Dry-run mode for testing
13
+ - Comprehensive logging
14
+ """
15
+
16
+ import json
17
+ import logging
18
+ import shutil
19
+ import sys
20
+ from datetime import datetime
21
+ from pathlib import Path
22
+ from typing import Dict, List, Optional, Tuple, Any
23
+ from dataclasses import dataclass, asdict
24
+
25
+
26
+ @dataclass
27
+ class MergeConflict:
28
+ """Represents a merge conflict for a file."""
29
+ file_path: str
30
+ base_version: Optional[str]
31
+ user_version: Optional[str]
32
+ new_version: Optional[str]
33
+ conflict_type: str
34
+
35
+
36
+ @dataclass
37
+ class UpdateManifest:
38
+ """Manifest tracking file versions and update status."""
39
+ version: str
40
+ timestamp: str
41
+ files: Dict[str, str]
42
+
43
+ def to_dict(self) -> Dict[str, Any]:
44
+ return asdict(self)
45
+
46
+ @staticmethod
47
+ def from_dict(data: Dict[str, Any]) -> 'UpdateManifest':
48
+ return UpdateManifest(
49
+ version=data.get('version', ''),
50
+ timestamp=data.get('timestamp', ''),
51
+ files=data.get('files', {})
52
+ )
53
+
54
+
55
+ class UpdateManager:
56
+ """Manages safe updates of hooks and skills with conflict detection and rollback."""
57
+
58
+ def __init__(self, dry_run: bool = False, verbose: bool = False):
59
+ """Initialize update manager."""
60
+ self.dry_run = dry_run
61
+ self.verbose = verbose
62
+ self.home = Path.home()
63
+ self.global_claude_dir = self.home / ".claude"
64
+ self.backup_dir = self.global_claude_dir / ".backups"
65
+ self.manifest_dir = self.global_claude_dir / ".manifests"
66
+
67
+ self.logger = self._setup_logging()
68
+
69
+ if not self.dry_run:
70
+ self.backup_dir.mkdir(parents=True, exist_ok=True)
71
+ self.manifest_dir.mkdir(parents=True, exist_ok=True)
72
+
73
+ def _setup_logging(self) -> logging.Logger:
74
+ """Setup logging with file and console output."""
75
+ logger = logging.getLogger("stravinsky.update_manager")
76
+ logger.setLevel(logging.DEBUG if self.verbose else logging.INFO)
77
+ logger.handlers.clear()
78
+
79
+ log_dir = self.global_claude_dir / ".logs"
80
+ if not self.dry_run:
81
+ log_dir.mkdir(parents=True, exist_ok=True)
82
+ fh = logging.FileHandler(log_dir / "update_manager.log")
83
+ fh.setLevel(logging.DEBUG)
84
+ formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
85
+ fh.setFormatter(formatter)
86
+ logger.addHandler(fh)
87
+
88
+ ch = logging.StreamHandler()
89
+ ch.setLevel(logging.WARNING if not self.verbose else logging.DEBUG)
90
+ formatter = logging.Formatter('%(levelname)s: %(message)s')
91
+ ch.setFormatter(formatter)
92
+ logger.addHandler(ch)
93
+
94
+ return logger
95
+
96
+ def _hash_file(self, path: Path) -> str:
97
+ """Generate hash of file content."""
98
+ import hashlib
99
+ try:
100
+ content = path.read_bytes()
101
+ return hashlib.sha256(content).hexdigest()[:16]
102
+ except Exception:
103
+ return "unknown"
104
+
105
+ def _load_manifest(self, manifest_type: str) -> Optional[UpdateManifest]:
106
+ """Load manifest file (base, user, new)."""
107
+ manifest_path = self.manifest_dir / f"{manifest_type}_manifest.json"
108
+
109
+ if not manifest_path.exists():
110
+ return None
111
+
112
+ try:
113
+ data = json.loads(manifest_path.read_text())
114
+ return UpdateManifest.from_dict(data)
115
+ except Exception as e:
116
+ self.logger.error(f"Failed to load manifest: {e}")
117
+ return None
118
+
119
+ def _save_manifest(self, manifest: UpdateManifest, manifest_type: str) -> bool:
120
+ """Save manifest file."""
121
+ if self.dry_run:
122
+ self.logger.debug(f"[DRY-RUN] Would save {manifest_type} manifest")
123
+ return True
124
+
125
+ manifest_path = self.manifest_dir / f"{manifest_type}_manifest.json"
126
+
127
+ try:
128
+ manifest_path.write_text(json.dumps(manifest.to_dict(), indent=2))
129
+ self.logger.info(f"Saved {manifest_type} manifest")
130
+ return True
131
+ except Exception as e:
132
+ self.logger.error(f"Failed to save manifest: {e}")
133
+ return False
134
+
135
+ def _create_backup(self, source_dir: Path, backup_name: str) -> Optional[Path]:
136
+ """Create timestamped backup of directory."""
137
+ if self.dry_run:
138
+ return None
139
+
140
+ if not source_dir.exists():
141
+ return None
142
+
143
+ timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
144
+ backup_path = self.backup_dir / f"{backup_name}_{timestamp}"
145
+
146
+ try:
147
+ shutil.copytree(source_dir, backup_path)
148
+ self.logger.info(f"Created backup: {backup_path}")
149
+ return backup_path
150
+ except Exception as e:
151
+ self.logger.error(f"Failed to create backup: {e}")
152
+ return None
153
+
154
+ def _read_file_safely(self, path: Path) -> Optional[str]:
155
+ """Read file with error handling."""
156
+ try:
157
+ if not path.exists():
158
+ return None
159
+ return path.read_text(encoding='utf-8')
160
+ except Exception as e:
161
+ self.logger.error(f"Failed to read {path}: {e}")
162
+ return None
163
+
164
+ def _write_file_safely(self, path: Path, content: str) -> bool:
165
+ """Write file with error handling."""
166
+ if self.dry_run:
167
+ self.logger.debug(f"[DRY-RUN] Would write {len(content)} bytes to {path}")
168
+ return True
169
+
170
+ try:
171
+ path.parent.mkdir(parents=True, exist_ok=True)
172
+ path.write_text(content, encoding='utf-8')
173
+ path.chmod(0o755)
174
+ self.logger.debug(f"Wrote {len(content)} bytes to {path}")
175
+ return True
176
+ except Exception as e:
177
+ self.logger.error(f"Failed to write {path}: {e}")
178
+ return False
179
+
180
+ def _detect_conflicts(
181
+ self,
182
+ base: Optional[str],
183
+ user: Optional[str],
184
+ new: Optional[str],
185
+ file_path: str
186
+ ) -> Optional[MergeConflict]:
187
+ """Detect merge conflicts using 3-way merge logic."""
188
+ if new == base:
189
+ return None
190
+
191
+ if user == base or user is None:
192
+ return None
193
+
194
+ if user == new:
195
+ return None
196
+
197
+ conflict_type = "different_modifications"
198
+ if base is None and user is not None and new is not None:
199
+ conflict_type = "added_both_ways"
200
+ elif base is not None and user is None and new is not None:
201
+ conflict_type = "deleted_vs_new"
202
+
203
+ return MergeConflict(
204
+ file_path=file_path,
205
+ base_version=base[:50] if base else None,
206
+ user_version=user[:50] if user else None,
207
+ new_version=new[:50] if new else None,
208
+ conflict_type=conflict_type
209
+ )
210
+
211
+ def _merge_3way(
212
+ self,
213
+ base: Optional[str],
214
+ user: Optional[str],
215
+ new: Optional[str],
216
+ file_path: str
217
+ ) -> Tuple[str, bool]:
218
+ """Perform 3-way merge on file content."""
219
+ if base is None:
220
+ if user is None:
221
+ return new or "", False
222
+ elif new is None:
223
+ return user, False
224
+ elif user == new:
225
+ return user, False
226
+ else:
227
+ return self._format_conflict_markers(user, new), True
228
+
229
+ if user is None:
230
+ if new is None:
231
+ return "", False
232
+ else:
233
+ return self._format_conflict_markers(None, new), True
234
+
235
+ if new is None:
236
+ return self._format_conflict_markers(user, None), True
237
+
238
+ if base == new:
239
+ return user, False
240
+
241
+ if base == user:
242
+ return new, False
243
+
244
+ if user != new:
245
+ merged, has_conflict = self._line_based_merge(base, user, new)
246
+ return merged, has_conflict
247
+
248
+ return user, False
249
+
250
+ def _line_based_merge(self, base: str, user: str, new: str) -> Tuple[str, bool]:
251
+ """Perform line-based merge for text conflicts."""
252
+ base_lines = base.splitlines(keepends=True)
253
+ user_lines = user.splitlines(keepends=True)
254
+ new_lines = new.splitlines(keepends=True)
255
+
256
+ merged = []
257
+ has_conflict = False
258
+
259
+ if len(base_lines) == len(user_lines) == len(new_lines):
260
+ for i, (b, u, n) in enumerate(zip(base_lines, user_lines, new_lines)):
261
+ if u == b == n:
262
+ merged.append(u)
263
+ elif u == b and n != b:
264
+ merged.append(n)
265
+ elif n == b and u != b:
266
+ merged.append(u)
267
+ elif u == n:
268
+ merged.append(u)
269
+ else:
270
+ merged.append(f"<<<<<<< {u}======= {n}>>>>>>> ")
271
+ has_conflict = True
272
+ else:
273
+ has_conflict = True
274
+ merged.append("<<<<<<< USER VERSION\n")
275
+ merged.extend(user_lines)
276
+ merged.append("=======\n")
277
+ merged.extend(new_lines)
278
+ merged.append(">>>>>>> NEW VERSION\n")
279
+
280
+ return "".join(merged), has_conflict
281
+
282
+ def _format_conflict_markers(self, user: Optional[str], new: Optional[str]) -> str:
283
+ """Format conflict markers for display."""
284
+ lines = ["<<<<<<< USER VERSION\n"]
285
+ if user:
286
+ lines.append(user)
287
+ if not user.endswith('\n'):
288
+ lines.append('\n')
289
+ lines.append("=======\n")
290
+ if new:
291
+ lines.append(new)
292
+ if not new.endswith('\n'):
293
+ lines.append('\n')
294
+ lines.append(">>>>>>> NEW VERSION\n")
295
+ return "".join(lines)
296
+
297
+ def _preserve_statusline(self, settings_file: Path) -> Optional[Dict[str, Any]]:
298
+ """Read and preserve statusline from settings.json."""
299
+ try:
300
+ if not settings_file.exists():
301
+ return None
302
+ settings = json.loads(settings_file.read_text())
303
+ statusline = settings.get("statusLine")
304
+ if statusline:
305
+ self.logger.debug(f"Preserved statusline: {statusline}")
306
+ return statusline
307
+ except Exception as e:
308
+ self.logger.error(f"Failed to read statusline: {e}")
309
+ return None
310
+
311
+ def _merge_settings_json(
312
+ self,
313
+ base: Optional[Dict[str, Any]],
314
+ user: Optional[Dict[str, Any]],
315
+ new: Optional[Dict[str, Any]]
316
+ ) -> Tuple[Dict[str, Any], List[MergeConflict]]:
317
+ """Merge settings.json with special handling for hooks and statusline."""
318
+ conflicts = []
319
+
320
+ if base is None:
321
+ base = {}
322
+ if user is None:
323
+ user = {}
324
+ if new is None:
325
+ new = {}
326
+
327
+ merged = {}
328
+
329
+ if "statusLine" in user:
330
+ merged["statusLine"] = user["statusLine"]
331
+ self.logger.debug("Preserved user statusLine")
332
+ elif "statusLine" in new:
333
+ merged["statusLine"] = new["statusLine"]
334
+
335
+ user_hooks = user.get("hooks", {})
336
+ new_hooks = new.get("hooks", {})
337
+ base_hooks = base.get("hooks", {})
338
+
339
+ merged_hooks = {}
340
+
341
+ for hook_type in set(list(user_hooks.keys()) + list(new_hooks.keys()) + list(base_hooks.keys())):
342
+ user_type_hooks = user_hooks.get(hook_type, [])
343
+ new_type_hooks = new_hooks.get(hook_type, [])
344
+ base_type_hooks = base_hooks.get(hook_type, [])
345
+
346
+ merged_type_hooks = user_type_hooks.copy()
347
+
348
+ for new_hook in new_type_hooks:
349
+ if new_hook not in base_type_hooks and new_hook not in merged_type_hooks:
350
+ merged_type_hooks.append(new_hook)
351
+ self.logger.debug(f"Added new {hook_type} hook")
352
+
353
+ if merged_type_hooks:
354
+ merged_hooks[hook_type] = merged_type_hooks
355
+
356
+ if merged_hooks:
357
+ merged["hooks"] = merged_hooks
358
+
359
+ for key in set(list(user.keys()) + list(new.keys()) + list(base.keys())):
360
+ if key in ("hooks", "statusLine"):
361
+ continue
362
+
363
+ if key in user:
364
+ merged[key] = user[key]
365
+ elif key in new:
366
+ merged[key] = new[key]
367
+
368
+ return merged, conflicts
369
+
370
+ def update_hooks(
371
+ self,
372
+ new_hooks: Dict[str, str],
373
+ stravinsky_version: str
374
+ ) -> Tuple[bool, List[MergeConflict]]:
375
+ """Update hooks with 3-way merge and conflict detection."""
376
+ self.logger.info(f"Starting hooks update to version {stravinsky_version}")
377
+
378
+ hooks_dir = self.global_claude_dir / "hooks"
379
+ conflicts = []
380
+
381
+ backup_path = self._create_backup(hooks_dir, "hooks")
382
+
383
+ base_manifest = self._load_manifest("base")
384
+
385
+ updated_files = {}
386
+
387
+ for filename, new_content in new_hooks.items():
388
+ hook_path = hooks_dir / filename
389
+
390
+ base_content = None
391
+ user_content = self._read_file_safely(hook_path)
392
+
393
+ if base_manifest:
394
+ base_file_hash = base_manifest.files.get(filename)
395
+ if base_file_hash and backup_path:
396
+ base_path = backup_path / filename
397
+ base_content = self._read_file_safely(base_path)
398
+
399
+ conflict = self._detect_conflicts(base_content, user_content, new_content, filename)
400
+ if conflict:
401
+ conflicts.append(conflict)
402
+ self.logger.warning(f"Conflict detected in {filename}: {conflict.conflict_type}")
403
+
404
+ merged_content, has_conflict = self._merge_3way(
405
+ base_content,
406
+ user_content,
407
+ new_content,
408
+ filename
409
+ )
410
+
411
+ if self._write_file_safely(hook_path, merged_content):
412
+ updated_files[filename] = self._hash_file(hook_path)
413
+ if has_conflict:
414
+ self.logger.warning(f"Updated {filename} with conflict markers")
415
+ else:
416
+ self.logger.info(f"Updated {filename}")
417
+ else:
418
+ self.logger.error(f"Failed to write {filename}")
419
+ return False, conflicts
420
+
421
+ new_manifest = UpdateManifest(
422
+ version=stravinsky_version,
423
+ timestamp=datetime.now().isoformat(),
424
+ files=updated_files
425
+ )
426
+
427
+ if not self._save_manifest(new_manifest, "base"):
428
+ return False, conflicts
429
+
430
+ self.logger.info(f"Hooks update completed ({len(updated_files)} files updated)")
431
+ return True, conflicts
432
+
433
+ def update_settings_json(self, new_settings: Dict[str, Any]) -> Tuple[bool, List[MergeConflict]]:
434
+ """Update settings.json with hook merging and statusline preservation."""
435
+ self.logger.info("Starting settings.json update")
436
+
437
+ settings_file = self.global_claude_dir / "settings.json"
438
+
439
+ self._create_backup(settings_file.parent, "settings")
440
+
441
+ user_settings = {}
442
+ if settings_file.exists():
443
+ try:
444
+ user_settings = json.loads(settings_file.read_text())
445
+ except Exception as e:
446
+ self.logger.error(f"Failed to parse settings.json: {e}")
447
+
448
+ base_settings = {}
449
+
450
+ merged_settings, conflicts = self._merge_settings_json(
451
+ base_settings or None,
452
+ user_settings or None,
453
+ new_settings or None
454
+ )
455
+
456
+ if self._write_file_safely(settings_file, json.dumps(merged_settings, indent=2)):
457
+ self.logger.info("Updated settings.json")
458
+ return True, conflicts
459
+ else:
460
+ self.logger.error("Failed to write settings.json")
461
+ return False, conflicts
462
+
463
+ def rollback(self, backup_timestamp: str) -> bool:
464
+ """Rollback to a previous backup."""
465
+ if self.dry_run:
466
+ self.logger.info(f"[DRY-RUN] Would rollback to {backup_timestamp}")
467
+ return True
468
+
469
+ self.logger.info(f"Rolling back to backup {backup_timestamp}")
470
+
471
+ backups = list(self.backup_dir.glob(f"*_{backup_timestamp}"))
472
+
473
+ if not backups:
474
+ self.logger.error(f"No backups found for timestamp {backup_timestamp}")
475
+ return False
476
+
477
+ success = True
478
+ for backup_path in backups:
479
+ try:
480
+ if "hooks" in backup_path.name:
481
+ restore_dir = self.global_claude_dir / "hooks"
482
+ elif "settings" in backup_path.name:
483
+ restore_dir = self.global_claude_dir
484
+ else:
485
+ continue
486
+
487
+ if restore_dir.exists():
488
+ shutil.rmtree(restore_dir)
489
+
490
+ shutil.copytree(backup_path, restore_dir)
491
+ self.logger.info(f"Restored from {backup_path}")
492
+ except Exception as e:
493
+ self.logger.error(f"Failed to restore from backup: {e}")
494
+ success = False
495
+
496
+ return success
497
+
498
+ def verify_integrity(self) -> Tuple[bool, List[str]]:
499
+ """Verify integrity of installed hooks and settings."""
500
+ issues = []
501
+ hooks_dir = self.global_claude_dir / "hooks"
502
+ settings_file = self.global_claude_dir / "settings.json"
503
+
504
+ if not hooks_dir.exists():
505
+ issues.append("Hooks directory doesn't exist")
506
+ return False, issues
507
+
508
+ if not settings_file.exists():
509
+ issues.append("settings.json doesn't exist")
510
+ return False, issues
511
+
512
+ try:
513
+ json.loads(settings_file.read_text())
514
+ except Exception as e:
515
+ issues.append(f"settings.json is invalid: {e}")
516
+ return False, issues
517
+
518
+ if not self._load_manifest("base"):
519
+ issues.append("Base manifest missing")
520
+
521
+ for hook_file in hooks_dir.glob("*.py"):
522
+ if not (hook_file.stat().st_mode & 0o111):
523
+ issues.append(f"{hook_file.name} is not executable")
524
+
525
+ return len(issues) == 0, issues
526
+
527
+ def list_backups(self) -> List[Dict[str, Any]]:
528
+ """List all available backups."""
529
+ backups = []
530
+
531
+ if not self.backup_dir.exists():
532
+ return backups
533
+
534
+ for backup_path in sorted(self.backup_dir.iterdir(), reverse=True):
535
+ if backup_path.is_dir():
536
+ stat = backup_path.stat()
537
+ size_mb = sum(f.stat().st_size for f in backup_path.rglob('*') if f.is_file()) / (1024 * 1024)
538
+ backups.append({
539
+ "name": backup_path.name,
540
+ "size_mb": size_mb,
541
+ "created": datetime.fromtimestamp(stat.st_mtime).isoformat()
542
+ })
543
+
544
+ return backups
545
+
546
+
547
+ def main():
548
+ """CLI entry point."""
549
+ import argparse
550
+
551
+ parser = argparse.ArgumentParser(description="Stravinsky Update Manager")
552
+ parser.add_argument("--dry-run", action="store_true", help="Don't make actual changes")
553
+ parser.add_argument("--verbose", action="store_true", help="Verbose logging")
554
+ parser.add_argument("--verify", action="store_true", help="Verify integrity")
555
+ parser.add_argument("--list-backups", action="store_true", help="List backups")
556
+ parser.add_argument("--rollback", type=str, help="Rollback to backup")
557
+
558
+ args = parser.parse_args()
559
+
560
+ manager = UpdateManager(dry_run=args.dry_run, verbose=args.verbose)
561
+
562
+ if args.verify:
563
+ is_valid, issues = manager.verify_integrity()
564
+ print(f"Integrity: {'✓ Valid' if is_valid else '✗ Invalid'}")
565
+ for issue in issues:
566
+ print(f" - {issue}")
567
+ return 0 if is_valid else 1
568
+
569
+ if args.list_backups:
570
+ backups = manager.list_backups()
571
+ if not backups:
572
+ print("No backups found")
573
+ else:
574
+ print(f"Found {len(backups)} backups:")
575
+ for backup in backups:
576
+ print(f" {backup['name']} ({backup['size_mb']:.1f} MB)")
577
+ return 0
578
+
579
+ if args.rollback:
580
+ success = manager.rollback(args.rollback)
581
+ print(f"Rollback: {'✓ Success' if success else '✗ Failed'}")
582
+ return 0 if success else 1
583
+
584
+ print("Use --verify, --list-backups, or --rollback")
585
+ return 0
586
+
587
+
588
+ if __name__ == "__main__":
589
+ sys.exit(main())