gobby 0.2.7__py3-none-any.whl → 0.2.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/adapters/claude_code.py +96 -35
- gobby/adapters/gemini.py +140 -38
- gobby/agents/isolation.py +130 -0
- gobby/agents/registry.py +11 -0
- gobby/agents/session.py +1 -0
- gobby/agents/spawn_executor.py +43 -13
- gobby/agents/spawners/macos.py +26 -1
- gobby/cli/__init__.py +0 -2
- gobby/cli/memory.py +185 -0
- gobby/clones/git.py +177 -0
- gobby/config/skills.py +31 -0
- gobby/hooks/event_handlers.py +109 -10
- gobby/hooks/hook_manager.py +19 -1
- gobby/install/gemini/hooks/hook_dispatcher.py +74 -15
- gobby/mcp_proxy/instructions.py +2 -2
- gobby/mcp_proxy/registries.py +21 -4
- gobby/mcp_proxy/tools/agent_messaging.py +93 -44
- gobby/mcp_proxy/tools/agents.py +45 -9
- gobby/mcp_proxy/tools/artifacts.py +43 -9
- gobby/mcp_proxy/tools/sessions/_commits.py +31 -24
- gobby/mcp_proxy/tools/sessions/_crud.py +5 -5
- gobby/mcp_proxy/tools/sessions/_handoff.py +45 -41
- gobby/mcp_proxy/tools/sessions/_messages.py +35 -7
- gobby/mcp_proxy/tools/spawn_agent.py +44 -6
- gobby/mcp_proxy/tools/tasks/_context.py +18 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +13 -6
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +29 -14
- gobby/mcp_proxy/tools/tasks/_session.py +22 -7
- gobby/mcp_proxy/tools/workflows.py +84 -34
- gobby/mcp_proxy/tools/worktrees.py +32 -7
- gobby/memory/extractor.py +15 -1
- gobby/runner.py +13 -0
- gobby/servers/routes/mcp/hooks.py +50 -3
- gobby/servers/websocket.py +57 -1
- gobby/sessions/analyzer.py +2 -2
- gobby/sessions/manager.py +9 -0
- gobby/sessions/transcripts/gemini.py +100 -34
- gobby/storage/database.py +9 -2
- gobby/storage/memories.py +32 -21
- gobby/storage/migrations.py +23 -4
- gobby/storage/sessions.py +4 -2
- gobby/storage/skills.py +43 -3
- gobby/workflows/detection_helpers.py +38 -24
- gobby/workflows/enforcement/blocking.py +13 -1
- gobby/workflows/engine.py +93 -0
- gobby/workflows/evaluator.py +110 -0
- gobby/workflows/hooks.py +41 -0
- gobby/workflows/memory_actions.py +11 -0
- gobby/workflows/safe_evaluator.py +8 -0
- gobby/workflows/summary_actions.py +123 -50
- {gobby-0.2.7.dist-info → gobby-0.2.8.dist-info}/METADATA +1 -1
- {gobby-0.2.7.dist-info → gobby-0.2.8.dist-info}/RECORD +56 -80
- gobby/cli/tui.py +0 -34
- gobby/tui/__init__.py +0 -5
- gobby/tui/api_client.py +0 -278
- gobby/tui/app.py +0 -329
- gobby/tui/screens/__init__.py +0 -25
- gobby/tui/screens/agents.py +0 -333
- gobby/tui/screens/chat.py +0 -450
- gobby/tui/screens/dashboard.py +0 -377
- gobby/tui/screens/memory.py +0 -305
- gobby/tui/screens/metrics.py +0 -231
- gobby/tui/screens/orchestrator.py +0 -903
- gobby/tui/screens/sessions.py +0 -412
- gobby/tui/screens/tasks.py +0 -440
- gobby/tui/screens/workflows.py +0 -289
- gobby/tui/screens/worktrees.py +0 -174
- gobby/tui/widgets/__init__.py +0 -21
- gobby/tui/widgets/chat.py +0 -210
- gobby/tui/widgets/conductor.py +0 -104
- gobby/tui/widgets/menu.py +0 -132
- gobby/tui/widgets/message_panel.py +0 -160
- gobby/tui/widgets/review_gate.py +0 -224
- gobby/tui/widgets/task_tree.py +0 -99
- gobby/tui/widgets/token_budget.py +0 -166
- gobby/tui/ws_client.py +0 -258
- {gobby-0.2.7.dist-info → gobby-0.2.8.dist-info}/WHEEL +0 -0
- {gobby-0.2.7.dist-info → gobby-0.2.8.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.7.dist-info → gobby-0.2.8.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.7.dist-info → gobby-0.2.8.dist-info}/top_level.txt +0 -0
gobby/agents/spawn_executor.py
CHANGED
|
@@ -10,7 +10,11 @@ import logging
|
|
|
10
10
|
from dataclasses import dataclass, field
|
|
11
11
|
from typing import TYPE_CHECKING, Any, Literal, cast
|
|
12
12
|
|
|
13
|
-
from gobby.agents.sandbox import
|
|
13
|
+
from gobby.agents.sandbox import (
|
|
14
|
+
GeminiSandboxResolver,
|
|
15
|
+
SandboxConfig,
|
|
16
|
+
compute_sandbox_paths,
|
|
17
|
+
)
|
|
14
18
|
|
|
15
19
|
if TYPE_CHECKING:
|
|
16
20
|
from gobby.agents.session import ChildSessionManager
|
|
@@ -63,7 +67,7 @@ class SpawnResult:
|
|
|
63
67
|
|
|
64
68
|
success: bool
|
|
65
69
|
run_id: str
|
|
66
|
-
child_session_id: str
|
|
70
|
+
child_session_id: str | None
|
|
67
71
|
status: str
|
|
68
72
|
|
|
69
73
|
# Optional result fields
|
|
@@ -219,16 +223,19 @@ async def _spawn_gemini_terminal(request: SpawnRequest) -> SpawnResult:
|
|
|
219
223
|
"""
|
|
220
224
|
Spawn Gemini agent in terminal with preflight session capture.
|
|
221
225
|
|
|
222
|
-
|
|
223
|
-
1.
|
|
224
|
-
2.
|
|
225
|
-
3.
|
|
226
|
+
Uses preflight to capture Gemini's session_id before launching interactive mode:
|
|
227
|
+
1. Run `gemini --output-format stream-json` to capture Gemini's session_id
|
|
228
|
+
2. Pre-create Gobby session with parent_session_id linked and external_id set
|
|
229
|
+
3. Resume Gemini session with `-r {session_id}` flag
|
|
230
|
+
|
|
231
|
+
This approach ensures session linkage works without relying on env vars,
|
|
232
|
+
which don't pass through macOS's `open` command.
|
|
226
233
|
"""
|
|
227
234
|
if request.session_manager is None:
|
|
228
235
|
return SpawnResult(
|
|
229
236
|
success=False,
|
|
230
237
|
run_id=request.run_id,
|
|
231
|
-
child_session_id=
|
|
238
|
+
child_session_id=None,
|
|
232
239
|
status="failed",
|
|
233
240
|
error="session_manager is required for Gemini preflight",
|
|
234
241
|
)
|
|
@@ -242,21 +249,31 @@ async def _spawn_gemini_terminal(request: SpawnRequest) -> SpawnResult:
|
|
|
242
249
|
machine_id=request.machine_id or "unknown",
|
|
243
250
|
workflow_name=request.workflow,
|
|
244
251
|
git_branch=None, # Will be detected by hook
|
|
252
|
+
prompt=request.prompt,
|
|
253
|
+
max_agent_depth=request.max_agent_depth,
|
|
245
254
|
)
|
|
246
255
|
except FileNotFoundError as e:
|
|
256
|
+
logger.error(
|
|
257
|
+
f"Gemini spawn failed - command not found: {e}",
|
|
258
|
+
extra={"project_id": request.project_id, "run_id": request.run_id},
|
|
259
|
+
)
|
|
247
260
|
return SpawnResult(
|
|
248
261
|
success=False,
|
|
249
262
|
run_id=request.run_id,
|
|
250
|
-
child_session_id=
|
|
263
|
+
child_session_id=None,
|
|
251
264
|
status="failed",
|
|
252
265
|
error=str(e),
|
|
253
266
|
)
|
|
254
267
|
except Exception as e:
|
|
255
|
-
logger.error(
|
|
268
|
+
logger.error(
|
|
269
|
+
f"Gemini preflight capture failed: {e}",
|
|
270
|
+
extra={"project_id": request.project_id, "run_id": request.run_id},
|
|
271
|
+
exc_info=True,
|
|
272
|
+
)
|
|
256
273
|
return SpawnResult(
|
|
257
274
|
success=False,
|
|
258
275
|
run_id=request.run_id,
|
|
259
|
-
child_session_id=
|
|
276
|
+
child_session_id=None,
|
|
260
277
|
status="failed",
|
|
261
278
|
error=f"Gemini preflight capture failed: {e}",
|
|
262
279
|
)
|
|
@@ -265,20 +282,33 @@ async def _spawn_gemini_terminal(request: SpawnRequest) -> SpawnResult:
|
|
|
265
282
|
gobby_session_id = spawn_context.session_id
|
|
266
283
|
gemini_session_id = spawn_context.env_vars["GOBBY_GEMINI_EXTERNAL_ID"]
|
|
267
284
|
|
|
268
|
-
# Build command with
|
|
285
|
+
# Build command with resume (no env vars needed - session already linked)
|
|
269
286
|
cmd = build_gemini_command_with_resume(
|
|
270
287
|
gemini_external_id=gemini_session_id,
|
|
271
288
|
prompt=request.prompt,
|
|
272
|
-
auto_approve=True,
|
|
289
|
+
auto_approve=True,
|
|
273
290
|
gobby_session_id=gobby_session_id,
|
|
274
291
|
)
|
|
275
292
|
|
|
293
|
+
# Resolve sandbox config if provided
|
|
294
|
+
sandbox_env: dict[str, str] = {}
|
|
295
|
+
if request.sandbox_config and request.sandbox_config.enabled:
|
|
296
|
+
resolver = GeminiSandboxResolver()
|
|
297
|
+
paths = compute_sandbox_paths(
|
|
298
|
+
config=request.sandbox_config,
|
|
299
|
+
workspace_path=request.cwd,
|
|
300
|
+
)
|
|
301
|
+
sandbox_args, sandbox_env = resolver.resolve(request.sandbox_config, paths)
|
|
302
|
+
# Append sandbox args to command (e.g., -s flag)
|
|
303
|
+
cmd.extend(sandbox_args)
|
|
304
|
+
|
|
276
305
|
# Spawn in terminal
|
|
277
306
|
terminal_spawner = TerminalSpawner()
|
|
278
307
|
terminal_result = terminal_spawner.spawn(
|
|
279
308
|
command=cmd,
|
|
280
309
|
cwd=request.cwd,
|
|
281
310
|
terminal=request.terminal,
|
|
311
|
+
env=sandbox_env if sandbox_env else None,
|
|
282
312
|
)
|
|
283
313
|
|
|
284
314
|
if not terminal_result.success:
|
|
@@ -293,7 +323,7 @@ async def _spawn_gemini_terminal(request: SpawnRequest) -> SpawnResult:
|
|
|
293
323
|
return SpawnResult(
|
|
294
324
|
success=True,
|
|
295
325
|
run_id=f"gemini-{gemini_session_id[:8]}",
|
|
296
|
-
child_session_id=gobby_session_id,
|
|
326
|
+
child_session_id=gobby_session_id, # Now properly set!
|
|
297
327
|
status="pending",
|
|
298
328
|
pid=terminal_result.pid,
|
|
299
329
|
gemini_session_id=gemini_session_id,
|
gobby/agents/spawners/macos.py
CHANGED
|
@@ -31,6 +31,24 @@ class GhosttySpawner(TerminalSpawnerBase):
|
|
|
31
31
|
def terminal_type(self) -> TerminalType:
|
|
32
32
|
return TerminalType.GHOSTTY
|
|
33
33
|
|
|
34
|
+
def _is_ghostty_running(self) -> bool:
|
|
35
|
+
"""Check if Ghostty is currently running on macOS."""
|
|
36
|
+
try:
|
|
37
|
+
result = subprocess.run( # nosec B603, B607 - osascript is safe
|
|
38
|
+
[
|
|
39
|
+
"/usr/bin/osascript",
|
|
40
|
+
"-e",
|
|
41
|
+
'tell application "System Events" to (name of processes) contains "Ghostty"',
|
|
42
|
+
],
|
|
43
|
+
capture_output=True,
|
|
44
|
+
text=True,
|
|
45
|
+
timeout=5,
|
|
46
|
+
)
|
|
47
|
+
return result.stdout.strip().lower() == "true"
|
|
48
|
+
except Exception:
|
|
49
|
+
# If we can't determine, assume running to use safer -n behavior
|
|
50
|
+
return True
|
|
51
|
+
|
|
34
52
|
def is_available(self) -> bool:
|
|
35
53
|
config = get_tty_config().get_terminal_config("ghostty")
|
|
36
54
|
if not config.enabled:
|
|
@@ -66,7 +84,14 @@ class GhosttySpawner(TerminalSpawnerBase):
|
|
|
66
84
|
ghostty_args.extend(tty_config.options)
|
|
67
85
|
ghostty_args.extend(["-e"] + command)
|
|
68
86
|
|
|
69
|
-
|
|
87
|
+
# Check if Ghostty is already running
|
|
88
|
+
# If running: use -n to open a new window
|
|
89
|
+
# If not running: omit -n to avoid double window on first launch
|
|
90
|
+
ghostty_running = self._is_ghostty_running()
|
|
91
|
+
if ghostty_running:
|
|
92
|
+
args = ["open", "-na", app_path, "--args"] + ghostty_args
|
|
93
|
+
else:
|
|
94
|
+
args = ["open", "-a", app_path, "--args"] + ghostty_args
|
|
70
95
|
else:
|
|
71
96
|
# On Linux/other platforms, use ghostty CLI directly
|
|
72
97
|
cli_command = tty_config.command or "ghostty"
|
gobby/cli/__init__.py
CHANGED
|
@@ -24,7 +24,6 @@ from .projects import projects
|
|
|
24
24
|
from .sessions import sessions
|
|
25
25
|
from .skills import skills
|
|
26
26
|
from .tasks import tasks
|
|
27
|
-
from .tui import ui
|
|
28
27
|
from .workflows import workflows
|
|
29
28
|
from .worktrees import worktrees
|
|
30
29
|
|
|
@@ -70,4 +69,3 @@ cli.add_command(conductor)
|
|
|
70
69
|
cli.add_command(hooks)
|
|
71
70
|
cli.add_command(plugins)
|
|
72
71
|
cli.add_command(webhooks)
|
|
73
|
-
cli.add_command(ui)
|
gobby/cli/memory.py
CHANGED
|
@@ -282,6 +282,191 @@ def export_memories(
|
|
|
282
282
|
click.echo(markdown)
|
|
283
283
|
|
|
284
284
|
|
|
285
|
+
@memory.command("dedupe")
|
|
286
|
+
@click.option("--dry-run", is_flag=True, help="Show duplicates without deleting")
|
|
287
|
+
@click.pass_context
|
|
288
|
+
def dedupe_memories(ctx: click.Context, dry_run: bool) -> None:
|
|
289
|
+
"""Remove duplicate memories (same content, different IDs).
|
|
290
|
+
|
|
291
|
+
Identifies memories with identical content but different IDs (caused by
|
|
292
|
+
project_id variations) and removes duplicates, keeping the earliest one.
|
|
293
|
+
|
|
294
|
+
Examples:
|
|
295
|
+
|
|
296
|
+
gobby memory dedupe --dry-run # Preview duplicates
|
|
297
|
+
|
|
298
|
+
gobby memory dedupe # Remove duplicates
|
|
299
|
+
"""
|
|
300
|
+
manager = get_memory_manager(ctx)
|
|
301
|
+
|
|
302
|
+
# Get all memories
|
|
303
|
+
memories = manager.list_memories(limit=10000)
|
|
304
|
+
|
|
305
|
+
if not memories:
|
|
306
|
+
click.echo("No memories found.")
|
|
307
|
+
return
|
|
308
|
+
|
|
309
|
+
# Group by normalized content
|
|
310
|
+
content_groups: dict[str, list[tuple[str, str, str | None]]] = {}
|
|
311
|
+
for m in memories:
|
|
312
|
+
normalized = m.content.strip()
|
|
313
|
+
if normalized not in content_groups:
|
|
314
|
+
content_groups[normalized] = []
|
|
315
|
+
content_groups[normalized].append((m.id, m.created_at, m.project_id))
|
|
316
|
+
|
|
317
|
+
# Find duplicates
|
|
318
|
+
duplicates_to_delete: list[str] = []
|
|
319
|
+
duplicate_count = 0
|
|
320
|
+
|
|
321
|
+
for content, entries in content_groups.items():
|
|
322
|
+
if len(entries) > 1:
|
|
323
|
+
duplicate_count += len(entries) - 1
|
|
324
|
+
# Sort by created_at to keep earliest
|
|
325
|
+
entries.sort(key=lambda x: x[1])
|
|
326
|
+
keeper = entries[0]
|
|
327
|
+
to_delete = entries[1:]
|
|
328
|
+
|
|
329
|
+
if dry_run:
|
|
330
|
+
click.echo(f"\nDuplicate content ({len(entries)} copies):")
|
|
331
|
+
click.echo(f" Content: {content[:80]}{'...' if len(content) > 80 else ''}")
|
|
332
|
+
click.echo(f" Keep: {keeper[0][:12]} (created: {keeper[1][:19]})")
|
|
333
|
+
for d in to_delete:
|
|
334
|
+
click.echo(f" Delete: {d[0][:12]} (created: {d[1][:19]}, project: {d[2]})")
|
|
335
|
+
else:
|
|
336
|
+
for d in to_delete:
|
|
337
|
+
duplicates_to_delete.append(d[0])
|
|
338
|
+
|
|
339
|
+
if dry_run:
|
|
340
|
+
click.echo(f"\nFound {duplicate_count} duplicate memories.")
|
|
341
|
+
click.echo("Run without --dry-run to delete them.")
|
|
342
|
+
else:
|
|
343
|
+
# Delete duplicates
|
|
344
|
+
deleted = 0
|
|
345
|
+
for memory_id in duplicates_to_delete:
|
|
346
|
+
if manager.forget(memory_id):
|
|
347
|
+
deleted += 1
|
|
348
|
+
|
|
349
|
+
click.echo(f"Deleted {deleted} duplicate memories.")
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
@memory.command("fix-null-project")
|
|
353
|
+
@click.option("--dry-run", is_flag=True, help="Show affected memories without updating")
|
|
354
|
+
@click.pass_context
|
|
355
|
+
def fix_null_project(ctx: click.Context, dry_run: bool) -> None:
|
|
356
|
+
"""Fix memories with NULL project_id from their source session.
|
|
357
|
+
|
|
358
|
+
Finds memories with source_type='session' and NULL project_id, then
|
|
359
|
+
looks up the source session to get the correct project_id.
|
|
360
|
+
|
|
361
|
+
Examples:
|
|
362
|
+
|
|
363
|
+
gobby memory fix-null-project --dry-run # Preview changes
|
|
364
|
+
|
|
365
|
+
gobby memory fix-null-project # Apply fixes
|
|
366
|
+
"""
|
|
367
|
+
from gobby.storage.sessions import LocalSessionManager
|
|
368
|
+
|
|
369
|
+
db = LocalDatabase()
|
|
370
|
+
session_mgr = LocalSessionManager(db)
|
|
371
|
+
|
|
372
|
+
# Find memories with NULL project_id and session source
|
|
373
|
+
rows = db.fetchall(
|
|
374
|
+
"""
|
|
375
|
+
SELECT id, content, source_session_id
|
|
376
|
+
FROM memories
|
|
377
|
+
WHERE project_id IS NULL AND source_type = 'session' AND source_session_id IS NOT NULL
|
|
378
|
+
""",
|
|
379
|
+
(),
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
if not rows:
|
|
383
|
+
click.echo("No memories with NULL project_id from sessions found.")
|
|
384
|
+
return
|
|
385
|
+
|
|
386
|
+
click.echo(f"Found {len(rows)} memories with NULL project_id from sessions.")
|
|
387
|
+
|
|
388
|
+
fixed = 0
|
|
389
|
+
for row in rows:
|
|
390
|
+
memory_id = row["id"]
|
|
391
|
+
session_id = row["source_session_id"]
|
|
392
|
+
content_preview = row["content"][:50] if row["content"] else ""
|
|
393
|
+
|
|
394
|
+
# Look up session to get project_id
|
|
395
|
+
session = session_mgr.get(session_id)
|
|
396
|
+
if session and session.project_id:
|
|
397
|
+
if dry_run:
|
|
398
|
+
click.echo(
|
|
399
|
+
f" Would fix {memory_id[:12]}: set project_id={session.project_id[:12]}"
|
|
400
|
+
)
|
|
401
|
+
click.echo(f" Content: {content_preview}...")
|
|
402
|
+
else:
|
|
403
|
+
# Update the memory's project_id
|
|
404
|
+
with db.transaction() as conn:
|
|
405
|
+
conn.execute(
|
|
406
|
+
"UPDATE memories SET project_id = ? WHERE id = ?",
|
|
407
|
+
(session.project_id, memory_id),
|
|
408
|
+
)
|
|
409
|
+
fixed += 1
|
|
410
|
+
else:
|
|
411
|
+
if dry_run:
|
|
412
|
+
click.echo(
|
|
413
|
+
f" Cannot fix {memory_id[:12]}: session {session_id} not found or has no project_id"
|
|
414
|
+
)
|
|
415
|
+
|
|
416
|
+
if dry_run:
|
|
417
|
+
click.echo(f"\nWould fix {fixed} memories. Run without --dry-run to apply.")
|
|
418
|
+
else:
|
|
419
|
+
click.echo(f"Fixed {fixed} memories with project_id from their source sessions.")
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
@memory.command("backup")
|
|
423
|
+
@click.option(
|
|
424
|
+
"--output",
|
|
425
|
+
"-o",
|
|
426
|
+
"output_path",
|
|
427
|
+
type=click.Path(),
|
|
428
|
+
help="Output file path (default: .gobby/memories.jsonl)",
|
|
429
|
+
)
|
|
430
|
+
@click.pass_context
|
|
431
|
+
def backup_memories(ctx: click.Context, output_path: str | None) -> None:
|
|
432
|
+
"""Backup memories to JSONL file.
|
|
433
|
+
|
|
434
|
+
Exports all memories to a JSONL file for backup/disaster recovery.
|
|
435
|
+
This runs synchronously and can be used even when the daemon is not running.
|
|
436
|
+
|
|
437
|
+
Examples:
|
|
438
|
+
|
|
439
|
+
gobby memory backup # Export to .gobby/memories.jsonl
|
|
440
|
+
|
|
441
|
+
gobby memory backup -o ~/backups/mem.jsonl # Export to custom path
|
|
442
|
+
"""
|
|
443
|
+
from pathlib import Path
|
|
444
|
+
|
|
445
|
+
from gobby.config.persistence import MemorySyncConfig
|
|
446
|
+
from gobby.sync.memories import MemoryBackupManager
|
|
447
|
+
|
|
448
|
+
manager = get_memory_manager(ctx)
|
|
449
|
+
|
|
450
|
+
# Create a backup manager with custom or default path
|
|
451
|
+
if output_path:
|
|
452
|
+
export_path = Path(output_path)
|
|
453
|
+
else:
|
|
454
|
+
export_path = Path(".gobby/memories.jsonl")
|
|
455
|
+
|
|
456
|
+
config = MemorySyncConfig(enabled=True, export_path=export_path)
|
|
457
|
+
backup_mgr = MemoryBackupManager(
|
|
458
|
+
db=manager.db,
|
|
459
|
+
memory_manager=manager,
|
|
460
|
+
config=config,
|
|
461
|
+
)
|
|
462
|
+
|
|
463
|
+
count = backup_mgr.backup_sync()
|
|
464
|
+
if count > 0:
|
|
465
|
+
click.echo(f"Backed up {count} memories to {export_path}")
|
|
466
|
+
else:
|
|
467
|
+
click.echo("No memories to backup.")
|
|
468
|
+
|
|
469
|
+
|
|
285
470
|
def resolve_memory_id(manager: MemoryManager, memory_ref: str) -> str:
|
|
286
471
|
"""Resolve memory reference (UUID or prefix) to full ID."""
|
|
287
472
|
# Try exact match first
|
gobby/clones/git.py
CHANGED
|
@@ -231,6 +231,89 @@ class CloneGitManager:
|
|
|
231
231
|
error=str(e),
|
|
232
232
|
)
|
|
233
233
|
|
|
234
|
+
def full_clone(
|
|
235
|
+
self,
|
|
236
|
+
remote_url: str,
|
|
237
|
+
clone_path: str | Path,
|
|
238
|
+
branch: str = "main",
|
|
239
|
+
) -> GitOperationResult:
|
|
240
|
+
"""
|
|
241
|
+
Create a full (non-shallow) clone of a repository.
|
|
242
|
+
|
|
243
|
+
Args:
|
|
244
|
+
remote_url: URL of the remote repository (HTTPS or SSH)
|
|
245
|
+
clone_path: Path where clone will be created
|
|
246
|
+
branch: Branch to clone
|
|
247
|
+
|
|
248
|
+
Returns:
|
|
249
|
+
GitOperationResult with success status and message
|
|
250
|
+
"""
|
|
251
|
+
clone_path = Path(clone_path)
|
|
252
|
+
|
|
253
|
+
# Check if path already exists
|
|
254
|
+
if clone_path.exists():
|
|
255
|
+
return GitOperationResult(
|
|
256
|
+
success=False,
|
|
257
|
+
message=f"Path already exists: {clone_path}",
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
# Ensure parent directory exists
|
|
261
|
+
clone_path.parent.mkdir(parents=True, exist_ok=True)
|
|
262
|
+
|
|
263
|
+
try:
|
|
264
|
+
# Build clone command without --depth (full clone)
|
|
265
|
+
cmd = [
|
|
266
|
+
"git",
|
|
267
|
+
"clone",
|
|
268
|
+
"-b",
|
|
269
|
+
branch,
|
|
270
|
+
remote_url,
|
|
271
|
+
str(clone_path),
|
|
272
|
+
]
|
|
273
|
+
|
|
274
|
+
# Sanitize URL in command before logging to avoid exposing credentials
|
|
275
|
+
safe_cmd = cmd.copy()
|
|
276
|
+
safe_cmd[safe_cmd.index(remote_url)] = _sanitize_url(remote_url)
|
|
277
|
+
logger.debug(f"Running: {' '.join(safe_cmd)}")
|
|
278
|
+
|
|
279
|
+
result = subprocess.run( # nosec B603 B607 - cmd built from hardcoded git arguments
|
|
280
|
+
cmd,
|
|
281
|
+
capture_output=True,
|
|
282
|
+
text=True,
|
|
283
|
+
timeout=600, # 10 minutes for full clone
|
|
284
|
+
)
|
|
285
|
+
|
|
286
|
+
if result.returncode == 0:
|
|
287
|
+
return GitOperationResult(
|
|
288
|
+
success=True,
|
|
289
|
+
message=f"Successfully cloned to {clone_path}",
|
|
290
|
+
output=result.stdout,
|
|
291
|
+
)
|
|
292
|
+
else:
|
|
293
|
+
return GitOperationResult(
|
|
294
|
+
success=False,
|
|
295
|
+
message=f"Clone failed: {result.stderr}",
|
|
296
|
+
error=result.stderr,
|
|
297
|
+
)
|
|
298
|
+
|
|
299
|
+
except subprocess.TimeoutExpired:
|
|
300
|
+
# Clean up partial clone
|
|
301
|
+
if clone_path.exists():
|
|
302
|
+
shutil.rmtree(clone_path, ignore_errors=True)
|
|
303
|
+
return GitOperationResult(
|
|
304
|
+
success=False,
|
|
305
|
+
message="Git clone timed out",
|
|
306
|
+
)
|
|
307
|
+
except Exception as e:
|
|
308
|
+
# Clean up partial clone
|
|
309
|
+
if clone_path.exists():
|
|
310
|
+
shutil.rmtree(clone_path, ignore_errors=True)
|
|
311
|
+
return GitOperationResult(
|
|
312
|
+
success=False,
|
|
313
|
+
message=f"Error cloning repository: {e}",
|
|
314
|
+
error=str(e),
|
|
315
|
+
)
|
|
316
|
+
|
|
234
317
|
def sync_clone(
|
|
235
318
|
self,
|
|
236
319
|
clone_path: str | Path,
|
|
@@ -422,6 +505,100 @@ class CloneGitManager:
|
|
|
422
505
|
logger.error(f"Error getting clone status: {e}")
|
|
423
506
|
return None
|
|
424
507
|
|
|
508
|
+
def create_clone(
|
|
509
|
+
self,
|
|
510
|
+
clone_path: str | Path,
|
|
511
|
+
branch_name: str,
|
|
512
|
+
base_branch: str = "main",
|
|
513
|
+
shallow: bool = True,
|
|
514
|
+
) -> GitOperationResult:
|
|
515
|
+
"""
|
|
516
|
+
Create a clone for isolated work.
|
|
517
|
+
|
|
518
|
+
This is the high-level API used by CloneIsolationHandler.
|
|
519
|
+
It gets the remote URL from the current repository and creates
|
|
520
|
+
either a shallow or full clone at the specified path.
|
|
521
|
+
|
|
522
|
+
Args:
|
|
523
|
+
clone_path: Path where clone will be created
|
|
524
|
+
branch_name: Branch to create/checkout in the clone
|
|
525
|
+
base_branch: Base branch to clone from (default: main)
|
|
526
|
+
shallow: Whether to create a shallow clone (default: True)
|
|
527
|
+
|
|
528
|
+
Returns:
|
|
529
|
+
GitOperationResult with success status and message
|
|
530
|
+
"""
|
|
531
|
+
# Get remote URL from current repo
|
|
532
|
+
remote_url = self.get_remote_url()
|
|
533
|
+
if not remote_url:
|
|
534
|
+
return GitOperationResult(
|
|
535
|
+
success=False,
|
|
536
|
+
message="Could not get remote URL from repository",
|
|
537
|
+
error="no_remote_url",
|
|
538
|
+
)
|
|
539
|
+
|
|
540
|
+
# Create clone (shallow or full based on parameter)
|
|
541
|
+
if shallow:
|
|
542
|
+
result = self.shallow_clone(
|
|
543
|
+
remote_url=remote_url,
|
|
544
|
+
clone_path=clone_path,
|
|
545
|
+
branch=base_branch,
|
|
546
|
+
depth=1,
|
|
547
|
+
)
|
|
548
|
+
else:
|
|
549
|
+
result = self.full_clone(
|
|
550
|
+
remote_url=remote_url,
|
|
551
|
+
clone_path=clone_path,
|
|
552
|
+
branch=base_branch,
|
|
553
|
+
)
|
|
554
|
+
|
|
555
|
+
if not result.success:
|
|
556
|
+
return result
|
|
557
|
+
|
|
558
|
+
# If branch_name differs from base_branch, create and checkout the new branch
|
|
559
|
+
if branch_name != base_branch:
|
|
560
|
+
try:
|
|
561
|
+
# Create new branch from base
|
|
562
|
+
create_result = self._run_git(
|
|
563
|
+
["checkout", "-b", branch_name],
|
|
564
|
+
cwd=clone_path,
|
|
565
|
+
timeout=30,
|
|
566
|
+
)
|
|
567
|
+
if create_result.returncode != 0:
|
|
568
|
+
# Clean up the clone on branch creation failure
|
|
569
|
+
try:
|
|
570
|
+
if Path(clone_path).exists():
|
|
571
|
+
shutil.rmtree(clone_path)
|
|
572
|
+
except Exception as cleanup_err:
|
|
573
|
+
logger.warning(
|
|
574
|
+
f"Failed to clean up clone after branch creation failure: {cleanup_err}"
|
|
575
|
+
)
|
|
576
|
+
return GitOperationResult(
|
|
577
|
+
success=False,
|
|
578
|
+
message=f"Failed to create branch {branch_name}: {create_result.stderr}",
|
|
579
|
+
error=create_result.stderr,
|
|
580
|
+
)
|
|
581
|
+
except Exception as e:
|
|
582
|
+
# Clean up the clone on exception
|
|
583
|
+
try:
|
|
584
|
+
if Path(clone_path).exists():
|
|
585
|
+
shutil.rmtree(clone_path)
|
|
586
|
+
except Exception as cleanup_err:
|
|
587
|
+
logger.warning(
|
|
588
|
+
f"Failed to clean up clone after branch creation error: {cleanup_err}"
|
|
589
|
+
)
|
|
590
|
+
return GitOperationResult(
|
|
591
|
+
success=False,
|
|
592
|
+
message=f"Error creating branch: {e}",
|
|
593
|
+
error=str(e),
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
return GitOperationResult(
|
|
597
|
+
success=True,
|
|
598
|
+
message=f"Successfully created clone at {clone_path} on branch {branch_name}",
|
|
599
|
+
output=result.output,
|
|
600
|
+
)
|
|
601
|
+
|
|
425
602
|
def merge_branch(
|
|
426
603
|
self,
|
|
427
604
|
source_branch: str,
|
gobby/config/skills.py
CHANGED
|
@@ -11,6 +11,37 @@ from typing import Literal
|
|
|
11
11
|
from pydantic import BaseModel, Field, field_validator
|
|
12
12
|
|
|
13
13
|
|
|
14
|
+
class HubConfig(BaseModel):
|
|
15
|
+
"""
|
|
16
|
+
Configuration for a skill hub or collection.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
type: Literal["clawdhub", "skillhub", "github-collection"] = Field(
|
|
20
|
+
...,
|
|
21
|
+
description="Type of the hub: 'clawdhub', 'skillhub', or 'github-collection'",
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
base_url: str | None = Field(
|
|
25
|
+
default=None,
|
|
26
|
+
description="Base URL for the hub",
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
repo: str | None = Field(
|
|
30
|
+
default=None,
|
|
31
|
+
description="GitHub repository (e.g. 'owner/repo')",
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
branch: str | None = Field(
|
|
35
|
+
default=None,
|
|
36
|
+
description="Git branch to use",
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
auth_key_name: str | None = Field(
|
|
40
|
+
default=None,
|
|
41
|
+
description="Environment variable name for auth key",
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
|
|
14
45
|
class SkillsConfig(BaseModel):
|
|
15
46
|
"""
|
|
16
47
|
Configuration for skill injection and discovery.
|