gobby 0.2.5__py3-none-any.whl → 0.2.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (148) hide show
  1. gobby/adapters/claude_code.py +13 -4
  2. gobby/adapters/codex.py +43 -3
  3. gobby/agents/runner.py +8 -0
  4. gobby/cli/__init__.py +6 -0
  5. gobby/cli/clones.py +419 -0
  6. gobby/cli/conductor.py +266 -0
  7. gobby/cli/installers/antigravity.py +3 -9
  8. gobby/cli/installers/claude.py +9 -9
  9. gobby/cli/installers/codex.py +2 -8
  10. gobby/cli/installers/gemini.py +2 -8
  11. gobby/cli/installers/shared.py +71 -8
  12. gobby/cli/skills.py +858 -0
  13. gobby/cli/tasks/ai.py +0 -440
  14. gobby/cli/tasks/crud.py +44 -6
  15. gobby/cli/tasks/main.py +0 -4
  16. gobby/cli/tui.py +2 -2
  17. gobby/cli/utils.py +3 -3
  18. gobby/clones/__init__.py +13 -0
  19. gobby/clones/git.py +547 -0
  20. gobby/conductor/__init__.py +16 -0
  21. gobby/conductor/alerts.py +135 -0
  22. gobby/conductor/loop.py +164 -0
  23. gobby/conductor/monitors/__init__.py +11 -0
  24. gobby/conductor/monitors/agents.py +116 -0
  25. gobby/conductor/monitors/tasks.py +155 -0
  26. gobby/conductor/pricing.py +234 -0
  27. gobby/conductor/token_tracker.py +160 -0
  28. gobby/config/app.py +63 -1
  29. gobby/config/search.py +110 -0
  30. gobby/config/servers.py +1 -1
  31. gobby/config/skills.py +43 -0
  32. gobby/config/tasks.py +6 -14
  33. gobby/hooks/event_handlers.py +145 -2
  34. gobby/hooks/hook_manager.py +48 -2
  35. gobby/hooks/skill_manager.py +130 -0
  36. gobby/install/claude/hooks/hook_dispatcher.py +4 -4
  37. gobby/install/codex/hooks/hook_dispatcher.py +1 -1
  38. gobby/install/gemini/hooks/hook_dispatcher.py +87 -12
  39. gobby/llm/claude.py +22 -34
  40. gobby/llm/claude_executor.py +46 -256
  41. gobby/llm/codex_executor.py +59 -291
  42. gobby/llm/executor.py +21 -0
  43. gobby/llm/gemini.py +134 -110
  44. gobby/llm/litellm_executor.py +143 -6
  45. gobby/llm/resolver.py +95 -33
  46. gobby/mcp_proxy/instructions.py +54 -0
  47. gobby/mcp_proxy/models.py +15 -0
  48. gobby/mcp_proxy/registries.py +68 -5
  49. gobby/mcp_proxy/server.py +33 -3
  50. gobby/mcp_proxy/services/tool_proxy.py +81 -1
  51. gobby/mcp_proxy/stdio.py +2 -1
  52. gobby/mcp_proxy/tools/__init__.py +0 -2
  53. gobby/mcp_proxy/tools/agent_messaging.py +317 -0
  54. gobby/mcp_proxy/tools/clones.py +903 -0
  55. gobby/mcp_proxy/tools/memory.py +1 -24
  56. gobby/mcp_proxy/tools/metrics.py +65 -1
  57. gobby/mcp_proxy/tools/orchestration/__init__.py +3 -0
  58. gobby/mcp_proxy/tools/orchestration/cleanup.py +151 -0
  59. gobby/mcp_proxy/tools/orchestration/wait.py +467 -0
  60. gobby/mcp_proxy/tools/session_messages.py +1 -2
  61. gobby/mcp_proxy/tools/skills/__init__.py +631 -0
  62. gobby/mcp_proxy/tools/task_orchestration.py +7 -0
  63. gobby/mcp_proxy/tools/task_readiness.py +14 -0
  64. gobby/mcp_proxy/tools/task_sync.py +1 -1
  65. gobby/mcp_proxy/tools/tasks/_context.py +0 -20
  66. gobby/mcp_proxy/tools/tasks/_crud.py +91 -4
  67. gobby/mcp_proxy/tools/tasks/_expansion.py +348 -0
  68. gobby/mcp_proxy/tools/tasks/_factory.py +6 -16
  69. gobby/mcp_proxy/tools/tasks/_lifecycle.py +60 -29
  70. gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +18 -29
  71. gobby/mcp_proxy/tools/workflows.py +1 -1
  72. gobby/mcp_proxy/tools/worktrees.py +5 -0
  73. gobby/memory/backends/__init__.py +6 -1
  74. gobby/memory/backends/mem0.py +6 -1
  75. gobby/memory/extractor.py +477 -0
  76. gobby/memory/manager.py +11 -2
  77. gobby/prompts/defaults/handoff/compact.md +63 -0
  78. gobby/prompts/defaults/handoff/session_end.md +57 -0
  79. gobby/prompts/defaults/memory/extract.md +61 -0
  80. gobby/runner.py +37 -16
  81. gobby/search/__init__.py +48 -6
  82. gobby/search/backends/__init__.py +159 -0
  83. gobby/search/backends/embedding.py +225 -0
  84. gobby/search/embeddings.py +238 -0
  85. gobby/search/models.py +148 -0
  86. gobby/search/unified.py +496 -0
  87. gobby/servers/http.py +23 -8
  88. gobby/servers/routes/admin.py +280 -0
  89. gobby/servers/routes/mcp/tools.py +241 -52
  90. gobby/servers/websocket.py +2 -2
  91. gobby/sessions/analyzer.py +2 -0
  92. gobby/sessions/transcripts/base.py +1 -0
  93. gobby/sessions/transcripts/claude.py +64 -5
  94. gobby/skills/__init__.py +91 -0
  95. gobby/skills/loader.py +685 -0
  96. gobby/skills/manager.py +384 -0
  97. gobby/skills/parser.py +258 -0
  98. gobby/skills/search.py +463 -0
  99. gobby/skills/sync.py +119 -0
  100. gobby/skills/updater.py +385 -0
  101. gobby/skills/validator.py +368 -0
  102. gobby/storage/clones.py +378 -0
  103. gobby/storage/database.py +1 -1
  104. gobby/storage/memories.py +43 -13
  105. gobby/storage/migrations.py +180 -6
  106. gobby/storage/sessions.py +73 -0
  107. gobby/storage/skills.py +749 -0
  108. gobby/storage/tasks/_crud.py +4 -4
  109. gobby/storage/tasks/_lifecycle.py +41 -6
  110. gobby/storage/tasks/_manager.py +14 -5
  111. gobby/storage/tasks/_models.py +8 -3
  112. gobby/sync/memories.py +39 -4
  113. gobby/sync/tasks.py +83 -6
  114. gobby/tasks/__init__.py +1 -2
  115. gobby/tasks/validation.py +24 -15
  116. gobby/tui/api_client.py +4 -7
  117. gobby/tui/app.py +5 -3
  118. gobby/tui/screens/orchestrator.py +1 -2
  119. gobby/tui/screens/tasks.py +2 -4
  120. gobby/tui/ws_client.py +1 -1
  121. gobby/utils/daemon_client.py +2 -2
  122. gobby/workflows/actions.py +84 -2
  123. gobby/workflows/context_actions.py +43 -0
  124. gobby/workflows/detection_helpers.py +115 -31
  125. gobby/workflows/engine.py +13 -2
  126. gobby/workflows/lifecycle_evaluator.py +29 -1
  127. gobby/workflows/loader.py +19 -6
  128. gobby/workflows/memory_actions.py +74 -0
  129. gobby/workflows/summary_actions.py +17 -0
  130. gobby/workflows/task_enforcement_actions.py +448 -6
  131. {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/METADATA +82 -21
  132. {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/RECORD +136 -107
  133. gobby/install/codex/prompts/forget.md +0 -7
  134. gobby/install/codex/prompts/memories.md +0 -7
  135. gobby/install/codex/prompts/recall.md +0 -7
  136. gobby/install/codex/prompts/remember.md +0 -13
  137. gobby/llm/gemini_executor.py +0 -339
  138. gobby/mcp_proxy/tools/task_expansion.py +0 -591
  139. gobby/tasks/context.py +0 -747
  140. gobby/tasks/criteria.py +0 -342
  141. gobby/tasks/expansion.py +0 -626
  142. gobby/tasks/prompts/expand.py +0 -327
  143. gobby/tasks/research.py +0 -421
  144. gobby/tasks/tdd.py +0 -352
  145. {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/WHEEL +0 -0
  146. {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/entry_points.txt +0 -0
  147. {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/licenses/LICENSE.md +0 -0
  148. {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,378 @@
1
+ """Local clone storage manager.
2
+
3
+ Manages local git clones for parallel development, distinct from worktrees.
4
+ Clones are full repository copies while worktrees share a single .git directory.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import logging
10
+ from dataclasses import dataclass
11
+ from datetime import UTC, datetime
12
+ from enum import Enum
13
+ from typing import Any
14
+
15
+ from gobby.storage.database import DatabaseProtocol
16
+ from gobby.utils.id import generate_prefixed_id
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class CloneStatus(str, Enum):
22
+ """Clone status values."""
23
+
24
+ ACTIVE = "active"
25
+ SYNCING = "syncing"
26
+ STALE = "stale"
27
+ CLEANUP = "cleanup"
28
+
29
+
30
+ @dataclass
31
+ class Clone:
32
+ """Clone data model."""
33
+
34
+ id: str
35
+ project_id: str
36
+ branch_name: str
37
+ clone_path: str
38
+ base_branch: str
39
+ task_id: str | None
40
+ agent_session_id: str | None
41
+ status: str
42
+ remote_url: str | None
43
+ last_sync_at: str | None
44
+ cleanup_after: str | None
45
+ created_at: str
46
+ updated_at: str
47
+
48
+ @classmethod
49
+ def from_row(cls, row: Any) -> Clone:
50
+ """Create Clone from database row."""
51
+ return cls(
52
+ id=row["id"],
53
+ project_id=row["project_id"],
54
+ branch_name=row["branch_name"],
55
+ clone_path=row["clone_path"],
56
+ base_branch=row["base_branch"],
57
+ task_id=row["task_id"],
58
+ agent_session_id=row["agent_session_id"],
59
+ status=row["status"],
60
+ remote_url=row["remote_url"],
61
+ last_sync_at=row["last_sync_at"],
62
+ cleanup_after=row["cleanup_after"],
63
+ created_at=row["created_at"],
64
+ updated_at=row["updated_at"],
65
+ )
66
+
67
+ def to_dict(self) -> dict[str, Any]:
68
+ """Convert to dictionary."""
69
+ return {
70
+ "id": self.id,
71
+ "project_id": self.project_id,
72
+ "branch_name": self.branch_name,
73
+ "clone_path": self.clone_path,
74
+ "base_branch": self.base_branch,
75
+ "task_id": self.task_id,
76
+ "agent_session_id": self.agent_session_id,
77
+ "status": self.status,
78
+ "remote_url": self.remote_url,
79
+ "last_sync_at": self.last_sync_at,
80
+ "cleanup_after": self.cleanup_after,
81
+ "created_at": self.created_at,
82
+ "updated_at": self.updated_at,
83
+ }
84
+
85
+
86
+ class LocalCloneManager:
87
+ """Manager for local clone storage."""
88
+
89
+ def __init__(self, db: DatabaseProtocol):
90
+ """Initialize with database connection."""
91
+ self.db = db
92
+
93
+ def create(
94
+ self,
95
+ project_id: str,
96
+ branch_name: str,
97
+ clone_path: str,
98
+ base_branch: str = "main",
99
+ task_id: str | None = None,
100
+ agent_session_id: str | None = None,
101
+ remote_url: str | None = None,
102
+ cleanup_after: str | None = None,
103
+ ) -> Clone:
104
+ """
105
+ Create a new clone record.
106
+
107
+ Args:
108
+ project_id: Project ID
109
+ branch_name: Git branch name
110
+ clone_path: Absolute path to clone directory
111
+ base_branch: Base branch for the clone
112
+ task_id: Optional task ID to link
113
+ agent_session_id: Optional session ID that owns this clone
114
+ remote_url: Optional remote URL of the repository
115
+ cleanup_after: Optional ISO timestamp for automatic cleanup
116
+
117
+ Returns:
118
+ Created Clone instance
119
+ """
120
+ clone_id = generate_prefixed_id("clone", length=6)
121
+ now = datetime.now(UTC).isoformat()
122
+
123
+ self.db.execute(
124
+ """
125
+ INSERT INTO clones (
126
+ id, project_id, branch_name, clone_path, base_branch,
127
+ task_id, agent_session_id, status, remote_url,
128
+ last_sync_at, cleanup_after, created_at, updated_at
129
+ )
130
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
131
+ """,
132
+ (
133
+ clone_id,
134
+ project_id,
135
+ branch_name,
136
+ clone_path,
137
+ base_branch,
138
+ task_id,
139
+ agent_session_id,
140
+ CloneStatus.ACTIVE.value,
141
+ remote_url,
142
+ None, # last_sync_at
143
+ cleanup_after,
144
+ now,
145
+ now,
146
+ ),
147
+ )
148
+
149
+ return Clone(
150
+ id=clone_id,
151
+ project_id=project_id,
152
+ branch_name=branch_name,
153
+ clone_path=clone_path,
154
+ base_branch=base_branch,
155
+ task_id=task_id,
156
+ agent_session_id=agent_session_id,
157
+ status=CloneStatus.ACTIVE.value,
158
+ remote_url=remote_url,
159
+ last_sync_at=None,
160
+ cleanup_after=cleanup_after,
161
+ created_at=now,
162
+ updated_at=now,
163
+ )
164
+
165
+ def get(self, clone_id: str) -> Clone | None:
166
+ """Get clone by ID."""
167
+ row = self.db.fetchone("SELECT * FROM clones WHERE id = ?", (clone_id,))
168
+ return Clone.from_row(row) if row else None
169
+
170
+ def get_by_task(self, task_id: str) -> Clone | None:
171
+ """Get clone linked to a task."""
172
+ row = self.db.fetchone("SELECT * FROM clones WHERE task_id = ?", (task_id,))
173
+ return Clone.from_row(row) if row else None
174
+
175
+ def get_by_path(self, clone_path: str) -> Clone | None:
176
+ """Get clone by path."""
177
+ row = self.db.fetchone("SELECT * FROM clones WHERE clone_path = ?", (clone_path,))
178
+ return Clone.from_row(row) if row else None
179
+
180
+ def get_by_branch(self, project_id: str, branch_name: str) -> Clone | None:
181
+ """Get clone by project and branch name."""
182
+ row = self.db.fetchone(
183
+ "SELECT * FROM clones WHERE project_id = ? AND branch_name = ?",
184
+ (project_id, branch_name),
185
+ )
186
+ return Clone.from_row(row) if row else None
187
+
188
+ def list_clones(
189
+ self,
190
+ project_id: str | None = None,
191
+ status: str | None = None,
192
+ agent_session_id: str | None = None,
193
+ limit: int = 50,
194
+ ) -> list[Clone]:
195
+ """
196
+ List clones with optional filters.
197
+
198
+ Args:
199
+ project_id: Filter by project
200
+ status: Filter by status
201
+ agent_session_id: Filter by owning session
202
+ limit: Maximum number of results
203
+
204
+ Returns:
205
+ List of Clone instances
206
+ """
207
+ conditions = []
208
+ params: list[Any] = []
209
+
210
+ if project_id:
211
+ conditions.append("project_id = ?")
212
+ params.append(project_id)
213
+ if status:
214
+ conditions.append("status = ?")
215
+ params.append(status)
216
+ if agent_session_id:
217
+ conditions.append("agent_session_id = ?")
218
+ params.append(agent_session_id)
219
+
220
+ where_clause = " AND ".join(conditions) if conditions else "1=1"
221
+ params.append(limit)
222
+
223
+ # nosec B608: where_clause built from hardcoded condition strings, values parameterized
224
+ rows = self.db.fetchall(
225
+ f"""
226
+ SELECT * FROM clones
227
+ WHERE {where_clause}
228
+ ORDER BY created_at DESC
229
+ LIMIT ?
230
+ """, # nosec B608
231
+ tuple(params),
232
+ )
233
+ return [Clone.from_row(row) for row in rows]
234
+
235
+ # Allowlist of valid clone column names to prevent SQL injection
236
+ _VALID_UPDATE_FIELDS = frozenset(
237
+ {
238
+ "branch_name",
239
+ "base_branch",
240
+ "clone_path",
241
+ "status",
242
+ "agent_session_id",
243
+ "task_id",
244
+ "remote_url",
245
+ "last_sync_at",
246
+ "cleanup_after",
247
+ "updated_at",
248
+ }
249
+ )
250
+
251
+ def update(self, clone_id: str, **fields: Any) -> Clone | None:
252
+ """
253
+ Update clone fields.
254
+
255
+ Args:
256
+ clone_id: Clone ID to update
257
+ **fields: Fields to update (must be valid column names)
258
+
259
+ Returns:
260
+ Updated Clone or None if not found
261
+
262
+ Raises:
263
+ ValueError: If any field name is not in the allowlist
264
+ """
265
+ if not fields:
266
+ return self.get(clone_id)
267
+
268
+ # Validate field names against allowlist to prevent SQL injection
269
+ invalid_fields = set(fields.keys()) - self._VALID_UPDATE_FIELDS
270
+ if invalid_fields:
271
+ raise ValueError(f"Invalid field names: {invalid_fields}")
272
+
273
+ # Add updated_at timestamp
274
+ fields["updated_at"] = datetime.now(UTC).isoformat()
275
+
276
+ # nosec B608: Fields validated against _VALID_UPDATE_FIELDS allowlist above
277
+ set_clause = ", ".join(f"{key} = ?" for key in fields.keys())
278
+ values = list(fields.values()) + [clone_id]
279
+
280
+ self.db.execute(
281
+ f"UPDATE clones SET {set_clause} WHERE id = ?", # nosec B608
282
+ tuple(values),
283
+ )
284
+
285
+ return self.get(clone_id)
286
+
287
+ def delete(self, clone_id: str) -> bool:
288
+ """
289
+ Delete clone record.
290
+
291
+ Args:
292
+ clone_id: Clone ID to delete
293
+
294
+ Returns:
295
+ True if deleted, False if not found
296
+ """
297
+ cursor = self.db.execute("DELETE FROM clones WHERE id = ?", (clone_id,))
298
+ return cursor.rowcount > 0
299
+
300
+ # Status transition methods
301
+
302
+ def mark_syncing(self, clone_id: str) -> Clone | None:
303
+ """
304
+ Mark clone as syncing.
305
+
306
+ Args:
307
+ clone_id: Clone ID
308
+
309
+ Returns:
310
+ Updated Clone or None if not found
311
+ """
312
+ return self.update(clone_id, status=CloneStatus.SYNCING.value)
313
+
314
+ def mark_stale(self, clone_id: str) -> Clone | None:
315
+ """
316
+ Mark clone as stale (inactive).
317
+
318
+ Args:
319
+ clone_id: Clone ID
320
+
321
+ Returns:
322
+ Updated Clone or None if not found
323
+ """
324
+ return self.update(clone_id, status=CloneStatus.STALE.value)
325
+
326
+ def mark_cleanup(self, clone_id: str) -> Clone | None:
327
+ """
328
+ Mark clone for cleanup.
329
+
330
+ Args:
331
+ clone_id: Clone ID
332
+
333
+ Returns:
334
+ Updated Clone or None if not found
335
+ """
336
+ return self.update(clone_id, status=CloneStatus.CLEANUP.value)
337
+
338
+ def record_sync(self, clone_id: str) -> Clone | None:
339
+ """
340
+ Record a sync operation on a clone.
341
+
342
+ Args:
343
+ clone_id: Clone ID
344
+
345
+ Returns:
346
+ Updated Clone or None if not found
347
+ """
348
+ now = datetime.now(UTC).isoformat()
349
+ return self.update(
350
+ clone_id,
351
+ status=CloneStatus.ACTIVE.value,
352
+ last_sync_at=now,
353
+ )
354
+
355
+ def claim(self, clone_id: str, session_id: str) -> Clone | None:
356
+ """
357
+ Claim a clone for an agent session.
358
+
359
+ Args:
360
+ clone_id: Clone ID
361
+ session_id: Session ID claiming ownership
362
+
363
+ Returns:
364
+ Updated Clone or None if not found
365
+ """
366
+ return self.update(clone_id, agent_session_id=session_id)
367
+
368
+ def release(self, clone_id: str) -> Clone | None:
369
+ """
370
+ Release a clone from its current owner.
371
+
372
+ Args:
373
+ clone_id: Clone ID
374
+
375
+ Returns:
376
+ Updated Clone or None if not found
377
+ """
378
+ return self.update(clone_id, agent_session_id=None)
gobby/storage/database.py CHANGED
@@ -21,7 +21,7 @@ from typing import TYPE_CHECKING, Any, Protocol, cast, runtime_checkable
21
21
 
22
22
  def _adapt_datetime(val: datetime) -> str:
23
23
  """Adapt datetime to ISO format string for SQLite storage."""
24
- return val.isoformat(" ")
24
+ return val.isoformat()
25
25
 
26
26
 
27
27
  def _adapt_date(val: date) -> str:
gobby/storage/memories.py CHANGED
@@ -138,10 +138,18 @@ class LocalMemoryManager:
138
138
  tags: list[str] | None = None,
139
139
  media: str | None = None,
140
140
  ) -> Memory:
141
+ # Validate that content is not empty
142
+ if not content or not content.strip():
143
+ logger.warning("Skipping memory creation: empty content provided")
144
+ raise ValueError("Memory content cannot be empty")
145
+
141
146
  now = datetime.now(UTC).isoformat()
142
- # Ensure consistent ID for same content/project to avoid dupes?
143
- # Actually random/content-based might be better. Let's use content.
144
- memory_id = generate_prefixed_id("mm", content + str(project_id))
147
+ # Normalize content for consistent ID generation (avoid duplicates from
148
+ # whitespace differences or project_id inconsistency)
149
+ normalized_content = content.strip()
150
+ project_str = project_id if project_id else ""
151
+ # Use delimiter to prevent collisions (e.g., "abc" + "def" vs "abcd" + "ef")
152
+ memory_id = generate_prefixed_id("mm", f"{normalized_content}||{project_str}")
145
153
 
146
154
  # Check if memory already exists to avoid duplicate insert errors
147
155
  existing_row = self.db.fetchone("SELECT * FROM memories WHERE id = ?", (memory_id,))
@@ -190,18 +198,40 @@ class LocalMemoryManager:
190
198
 
191
199
  def content_exists(self, content: str, project_id: str | None = None) -> bool:
192
200
  """Check if a memory with identical content already exists."""
193
- if project_id:
194
- row = self.db.fetchone(
195
- "SELECT 1 FROM memories WHERE content = ? AND project_id = ?",
196
- (content, project_id),
197
- )
198
- else:
199
- row = self.db.fetchone(
200
- "SELECT 1 FROM memories WHERE content = ? AND project_id IS NULL",
201
- (content,),
202
- )
201
+ # Normalize content same way as ID generation in create_memory
202
+ normalized_content = content.strip()
203
+ project_str = project_id if project_id else ""
204
+ # Use delimiter to match create_memory ID generation
205
+ memory_id = generate_prefixed_id("mm", f"{normalized_content}||{project_str}")
206
+
207
+ # Check by ID (content-hash based) for consistent dedup
208
+ row = self.db.fetchone("SELECT 1 FROM memories WHERE id = ?", (memory_id,))
203
209
  return row is not None
204
210
 
211
+ def get_memory_by_content(self, content: str, project_id: str | None = None) -> Memory | None:
212
+ """Get a memory by its exact content, using the content-derived ID.
213
+
214
+ This provides a reliable way to fetch an existing memory without
215
+ relying on search result ordering.
216
+
217
+ Args:
218
+ content: The exact content to look up (will be normalized)
219
+ project_id: Optional project ID for scoping
220
+
221
+ Returns:
222
+ The Memory object if found, None otherwise
223
+ """
224
+ # Normalize content same way as ID generation in create_memory
225
+ normalized_content = content.strip()
226
+ project_str = project_id if project_id else ""
227
+ # Use delimiter to match create_memory ID generation
228
+ memory_id = generate_prefixed_id("mm", f"{normalized_content}||{project_str}")
229
+
230
+ try:
231
+ return self.get_memory(memory_id)
232
+ except ValueError:
233
+ return None
234
+
205
235
  def update_memory(
206
236
  self,
207
237
  memory_id: str,
@@ -358,7 +358,7 @@ CREATE TABLE tasks (
358
358
  agent_name TEXT,
359
359
  reference_doc TEXT,
360
360
  is_expanded INTEGER DEFAULT 0,
361
- is_tdd_applied INTEGER DEFAULT 0,
361
+ expansion_status TEXT DEFAULT 'none',
362
362
  requires_user_review INTEGER DEFAULT 0,
363
363
  accepted_by_user INTEGER DEFAULT 0,
364
364
  created_at TEXT NOT NULL,
@@ -606,7 +606,11 @@ def _migrate_add_reference_doc(db: LocalDatabase) -> None:
606
606
 
607
607
 
608
608
  def _migrate_add_boolean_columns(db: LocalDatabase) -> None:
609
- """Add is_enriched, is_expanded, is_tdd_applied columns to tasks table."""
609
+ """Add is_enriched and is_expanded columns to tasks table.
610
+
611
+ Note: is_tdd_applied was previously added here but is now deprecated and
612
+ removed in migration 74. This migration only adds is_enriched and is_expanded.
613
+ """
610
614
  row = db.fetchone("SELECT sql FROM sqlite_master WHERE type='table' AND name='tasks'")
611
615
  if not row:
612
616
  return
@@ -622,10 +626,6 @@ def _migrate_add_boolean_columns(db: LocalDatabase) -> None:
622
626
  db.execute("ALTER TABLE tasks ADD COLUMN is_expanded INTEGER DEFAULT 0")
623
627
  logger.info("Added is_expanded column to tasks table")
624
628
 
625
- if "is_tdd_applied" not in sql_lower:
626
- db.execute("ALTER TABLE tasks ADD COLUMN is_tdd_applied INTEGER DEFAULT 0")
627
- logger.info("Added is_tdd_applied column to tasks table")
628
-
629
629
 
630
630
  def _migrate_add_review_columns(db: LocalDatabase) -> None:
631
631
  """Add requires_user_review and accepted_by_user columns for review status support."""
@@ -718,6 +718,168 @@ def _migrate_add_media_column(db: LocalDatabase) -> None:
718
718
  logger.debug("media column already exists, skipping")
719
719
 
720
720
 
721
+ def _migrate_add_expansion_status(db: LocalDatabase) -> None:
722
+ """Add expansion_status column to tasks table for skill-based expansion."""
723
+ row = db.fetchone("SELECT sql FROM sqlite_master WHERE type='table' AND name='tasks'")
724
+ if row and "expansion_status" not in row["sql"].lower():
725
+ db.execute("ALTER TABLE tasks ADD COLUMN expansion_status TEXT DEFAULT 'none'")
726
+ logger.info("Added expansion_status column to tasks table")
727
+ else:
728
+ logger.debug("expansion_status column already exists, skipping")
729
+
730
+
731
+ def _migrate_add_skills_table(db: LocalDatabase) -> None:
732
+ """Add skills table for Agent Skills spec compliant skill storage.
733
+
734
+ Skills provide structured instructions for AI agents following the
735
+ Agent Skills specification (agentskills.io) with Gobby-specific extensions.
736
+ """
737
+ # Check if table already exists
738
+ row = db.fetchone("SELECT name FROM sqlite_master WHERE type='table' AND name='skills'")
739
+ if row:
740
+ logger.debug("skills table already exists, skipping")
741
+ return
742
+
743
+ # Create the skills table
744
+ db.execute("""
745
+ CREATE TABLE skills (
746
+ id TEXT PRIMARY KEY,
747
+ name TEXT NOT NULL,
748
+ description TEXT NOT NULL,
749
+ content TEXT NOT NULL,
750
+ version TEXT,
751
+ license TEXT,
752
+ compatibility TEXT,
753
+ allowed_tools TEXT,
754
+ metadata TEXT,
755
+ source_path TEXT,
756
+ source_type TEXT,
757
+ source_ref TEXT,
758
+ enabled INTEGER DEFAULT 1,
759
+ project_id TEXT REFERENCES projects(id) ON DELETE CASCADE,
760
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
761
+ updated_at TEXT NOT NULL DEFAULT (datetime('now'))
762
+ )
763
+ """)
764
+
765
+ # Create indexes
766
+ db.execute("CREATE INDEX idx_skills_name ON skills(name)")
767
+ db.execute("CREATE INDEX idx_skills_project_id ON skills(project_id)")
768
+ db.execute("CREATE INDEX idx_skills_enabled ON skills(enabled)")
769
+ # Unique constraint: name must be unique within a project scope
770
+ db.execute("CREATE UNIQUE INDEX idx_skills_name_project ON skills(name, project_id)")
771
+ # Partial unique index for global skills (project_id IS NULL)
772
+ # This enforces uniqueness for global skill names since NULL != NULL in SQL
773
+ db.execute(
774
+ "CREATE UNIQUE INDEX idx_skills_name_global ON skills(name) WHERE project_id IS NULL"
775
+ )
776
+
777
+ logger.info("Created skills table with indexes")
778
+
779
+
780
+ def _migrate_add_skills_global_unique_index(db: LocalDatabase) -> None:
781
+ """Add partial unique index for global skills (project_id IS NULL).
782
+
783
+ This enforces uniqueness for global skill names since NULL != NULL in SQL.
784
+ The existing idx_skills_name_project only enforces uniqueness within a project scope.
785
+ """
786
+ # Check if index already exists (fresh database from v70+)
787
+ row = db.fetchone(
788
+ "SELECT name FROM sqlite_master WHERE type='index' AND name='idx_skills_name_global'"
789
+ )
790
+ if row:
791
+ logger.debug("idx_skills_name_global index already exists, skipping")
792
+ return
793
+
794
+ # Check if skills table exists (might not if on old version that never created it)
795
+ row = db.fetchone("SELECT name FROM sqlite_master WHERE type='table' AND name='skills'")
796
+ if not row:
797
+ logger.debug("skills table does not exist, skipping")
798
+ return
799
+
800
+ db.execute(
801
+ "CREATE UNIQUE INDEX idx_skills_name_global ON skills(name) WHERE project_id IS NULL"
802
+ )
803
+ logger.debug("Added idx_skills_name_global partial unique index to skills table")
804
+
805
+
806
+ def _migrate_add_clones_table(db: LocalDatabase) -> None:
807
+ """Add clones table for local git clone management.
808
+
809
+ Clones are full repository copies, distinct from worktrees which share
810
+ a single .git directory. This enables parallel development across machines
811
+ or isolated environments.
812
+ """
813
+ # Check if table already exists
814
+ row = db.fetchone("SELECT name FROM sqlite_master WHERE type='table' AND name='clones'")
815
+ if row:
816
+ logger.debug("clones table already exists, skipping")
817
+ return
818
+
819
+ # Create the clones table
820
+ db.execute("""
821
+ CREATE TABLE clones (
822
+ id TEXT PRIMARY KEY,
823
+ project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
824
+ branch_name TEXT NOT NULL,
825
+ clone_path TEXT NOT NULL,
826
+ base_branch TEXT DEFAULT 'main',
827
+ task_id TEXT REFERENCES tasks(id) ON DELETE SET NULL,
828
+ agent_session_id TEXT REFERENCES sessions(id) ON DELETE SET NULL,
829
+ status TEXT DEFAULT 'active',
830
+ remote_url TEXT,
831
+ last_sync_at TEXT,
832
+ cleanup_after TEXT,
833
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
834
+ updated_at TEXT NOT NULL DEFAULT (datetime('now'))
835
+ )
836
+ """)
837
+
838
+ # Create indexes
839
+ db.execute("CREATE INDEX idx_clones_project ON clones(project_id)")
840
+ db.execute("CREATE INDEX idx_clones_status ON clones(status)")
841
+ db.execute("CREATE INDEX idx_clones_task ON clones(task_id)")
842
+ db.execute("CREATE INDEX idx_clones_session ON clones(agent_session_id)")
843
+ db.execute("CREATE UNIQUE INDEX idx_clones_path ON clones(clone_path)")
844
+
845
+ logger.debug("Created clones table with indexes")
846
+
847
+
848
+ def _migrate_add_model_column(db: LocalDatabase) -> None:
849
+ """Add model column to sessions table for cost tracking by model.
850
+
851
+ This enables the TokenTracker to aggregate usage by model and apply
852
+ model-specific pricing for budget tracking.
853
+ """
854
+ row = db.fetchone("SELECT sql FROM sqlite_master WHERE type='table' AND name='sessions'")
855
+ if row and "model" not in row["sql"].lower():
856
+ db.execute("ALTER TABLE sessions ADD COLUMN model TEXT")
857
+ logger.info("Added model column to sessions table")
858
+ else:
859
+ logger.debug("model column already exists, skipping")
860
+
861
+
862
+ def _migrate_drop_is_tdd_applied(db: LocalDatabase) -> None:
863
+ """Drop deprecated is_tdd_applied column from tasks table.
864
+
865
+ The is_tdd_applied flag is no longer used after removing the TDD sandwich pattern.
866
+ TDD instructions are now embedded directly in task descriptions for code/config categories.
867
+ SQLite 3.35.0+ supports ALTER TABLE DROP COLUMN.
868
+ """
869
+ row = db.fetchone("SELECT sql FROM sqlite_master WHERE type='table' AND name='tasks'")
870
+ if not row:
871
+ return
872
+
873
+ if "is_tdd_applied" in row["sql"].lower():
874
+ try:
875
+ db.execute("ALTER TABLE tasks DROP COLUMN is_tdd_applied")
876
+ logger.info("Dropped is_tdd_applied column from tasks table")
877
+ except Exception as e:
878
+ # SQLite < 3.35.0 doesn't support DROP COLUMN
879
+ # Column will remain but be unused - not a problem
880
+ logger.warning(f"Could not drop is_tdd_applied column (SQLite < 3.35?): {e}")
881
+
882
+
721
883
  MIGRATIONS: list[tuple[int, str, MigrationAction]] = [
722
884
  # TDD Expansion Restructure: Rename test_strategy to category
723
885
  (61, "Rename test_strategy to category", _migrate_test_strategy_to_category),
@@ -735,6 +897,18 @@ MIGRATIONS: list[tuple[int, str, MigrationAction]] = [
735
897
  (67, "Add inter_session_messages table", _migrate_add_inter_session_messages),
736
898
  # Memory V3 Phase 2: Add media column for multimodal support
737
899
  (68, "Add media column to memories", _migrate_add_media_column),
900
+ # Skill-based expansion: Add expansion_status column to tasks
901
+ (69, "Add expansion_status column to tasks", _migrate_add_expansion_status),
902
+ # Skills storage: Add skills table for Agent Skills spec
903
+ (70, "Add skills table", _migrate_add_skills_table),
904
+ # Skills: Add partial unique index for global skills
905
+ (71, "Add global skills unique index", _migrate_add_skills_global_unique_index),
906
+ # Local clones: Add table for git clone management
907
+ (72, "Add clones table", _migrate_add_clones_table),
908
+ # Token tracking: Add model column to sessions for cost tracking by model
909
+ (73, "Add model column to sessions", _migrate_add_model_column),
910
+ # TDD cleanup: Drop unused is_tdd_applied column from tasks
911
+ (74, "Drop is_tdd_applied column from tasks", _migrate_drop_is_tdd_applied),
738
912
  ]
739
913
 
740
914