intent-cli-python 1.2.0__tar.gz → 1.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/PKG-INFO +19 -1
  2. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/README.md +18 -0
  3. intent_cli_python-1.3.0/apps/__init__.py +1 -0
  4. intent_cli_python-1.3.0/apps/inthub_api/__init__.py +1 -0
  5. intent_cli_python-1.3.0/apps/inthub_api/__main__.py +4 -0
  6. intent_cli_python-1.3.0/apps/inthub_api/common.py +43 -0
  7. intent_cli_python-1.3.0/apps/inthub_api/db.py +47 -0
  8. intent_cli_python-1.3.0/apps/inthub_api/ingest.py +170 -0
  9. intent_cli_python-1.3.0/apps/inthub_api/queries.py +366 -0
  10. intent_cli_python-1.3.0/apps/inthub_api/server.py +168 -0
  11. intent_cli_python-1.3.0/apps/inthub_api/store.py +31 -0
  12. intent_cli_python-1.3.0/apps/inthub_web/__init__.py +1 -0
  13. intent_cli_python-1.3.0/apps/inthub_web/__main__.py +4 -0
  14. intent_cli_python-1.3.0/apps/inthub_web/server.py +87 -0
  15. intent_cli_python-1.3.0/apps/inthub_web/static/app.js +745 -0
  16. intent_cli_python-1.3.0/apps/inthub_web/static/index.html +123 -0
  17. intent_cli_python-1.3.0/apps/inthub_web/static/styles.css +490 -0
  18. {intent_cli_python-1.2.0/src → intent_cli_python-1.3.0}/intent_cli_python.egg-info/PKG-INFO +19 -1
  19. intent_cli_python-1.3.0/intent_cli_python.egg-info/SOURCES.txt +37 -0
  20. intent_cli_python-1.3.0/intent_cli_python.egg-info/entry_points.txt +4 -0
  21. {intent_cli_python-1.2.0/src → intent_cli_python-1.3.0}/intent_cli_python.egg-info/top_level.txt +1 -0
  22. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/pyproject.toml +8 -5
  23. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/tests/test_cli.py +42 -2
  24. intent_cli_python-1.2.0/src/intent_cli_python.egg-info/SOURCES.txt +0 -22
  25. intent_cli_python-1.2.0/src/intent_cli_python.egg-info/entry_points.txt +0 -2
  26. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/LICENSE +0 -0
  27. {intent_cli_python-1.2.0/src → intent_cli_python-1.3.0}/intent_cli_python.egg-info/dependency_links.txt +0 -0
  28. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/setup.cfg +0 -0
  29. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/__init__.py +0 -0
  30. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/__main__.py +0 -0
  31. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/cli.py +0 -0
  32. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/commands/__init__.py +0 -0
  33. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/commands/common.py +0 -0
  34. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/commands/core.py +0 -0
  35. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/commands/hub.py +0 -0
  36. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/hub/__init__.py +0 -0
  37. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/hub/client.py +0 -0
  38. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/hub/payload.py +0 -0
  39. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/hub/runtime.py +0 -0
  40. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/output.py +0 -0
  41. {intent_cli_python-1.2.0 → intent_cli_python-1.3.0}/src/intent_cli/store.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: intent-cli-python
3
- Version: 1.2.0
3
+ Version: 1.3.0
4
4
  Summary: Semantic history for agent-driven development. Records what you did and why.
5
5
  Author: Zeng Deyang
6
6
  License-Expression: MIT
@@ -72,6 +72,24 @@ pip install intent-cli-python
72
72
 
73
73
  Requires Python 3.9+ and Git.
74
74
 
75
+ ### Run the local IntHub shell
76
+
77
+ The first read-only IntHub shell now ships with the package:
78
+
79
+ ```bash
80
+ inthub-api --db-path .inthub/inthub.db
81
+ inthub-web --api-base-url http://127.0.0.1:8000
82
+ ```
83
+
84
+ If you are running from source instead of an installed package, the same entrypoints are available with:
85
+
86
+ ```bash
87
+ python -m apps.inthub_api --db-path .inthub/inthub.db
88
+ python -m apps.inthub_web --api-base-url http://127.0.0.1:8000
89
+ ```
90
+
91
+ Then use `itt hub login`, `itt hub link`, and `itt hub sync` from a local Intent workspace to populate the read-only IntHub project view.
92
+
75
93
  ### Install the skills.sh skill
76
94
 
77
95
  ```bash
@@ -48,6 +48,24 @@ pip install intent-cli-python
48
48
 
49
49
  Requires Python 3.9+ and Git.
50
50
 
51
+ ### Run the local IntHub shell
52
+
53
+ The first read-only IntHub shell now ships with the package:
54
+
55
+ ```bash
56
+ inthub-api --db-path .inthub/inthub.db
57
+ inthub-web --api-base-url http://127.0.0.1:8000
58
+ ```
59
+
60
+ If you are running from source instead of an installed package, the same entrypoints are available with:
61
+
62
+ ```bash
63
+ python -m apps.inthub_api --db-path .inthub/inthub.db
64
+ python -m apps.inthub_web --api-base-url http://127.0.0.1:8000
65
+ ```
66
+
67
+ Then use `itt hub login`, `itt hub link`, and `itt hub sync` from a local Intent workspace to populate the read-only IntHub project view.
68
+
51
69
  ### Install the skills.sh skill
52
70
 
53
71
  ```bash
@@ -0,0 +1 @@
1
+ """App packages for the Intent monorepo."""
@@ -0,0 +1 @@
1
+ """IntHub API package."""
@@ -0,0 +1,4 @@
1
+ from apps.inthub_api.server import main
2
+
3
+
4
+ main()
@@ -0,0 +1,43 @@
1
+ """Shared helpers for the IntHub API."""
2
+
3
+ import uuid
4
+ from datetime import datetime, timezone
5
+
6
+
7
+ def now_utc():
8
+ return datetime.now(timezone.utc).isoformat()
9
+
10
+
11
+ def new_id(prefix):
12
+ return f"{prefix}_{uuid.uuid4().hex[:12]}"
13
+
14
+
15
+ def make_remote_object_id(workspace_id, local_object_id):
16
+ return f"{workspace_id}__{local_object_id}"
17
+
18
+
19
+ def split_remote_object_id(remote_object_id):
20
+ parts = remote_object_id.split("__", 1)
21
+ if len(parts) != 2:
22
+ raise ValueError("Invalid remote object ID.")
23
+ return parts[0], parts[1]
24
+
25
+
26
+ class APIError(Exception):
27
+ def __init__(self, code, message, status=400, details=None):
28
+ super().__init__(message)
29
+ self.code = code
30
+ self.message = message
31
+ self.status = status
32
+ self.details = details or {}
33
+
34
+
35
+ def require_repo(repo):
36
+ required = ("provider", "repo_id", "owner", "name")
37
+ missing = [key for key in required if not repo.get(key)]
38
+ if missing:
39
+ raise APIError(
40
+ "INVALID_INPUT",
41
+ f"Missing repo fields: {', '.join(missing)}.",
42
+ status=400,
43
+ )
@@ -0,0 +1,47 @@
1
+ """SQLite helpers for the IntHub API."""
2
+
3
+ import sqlite3
4
+ from pathlib import Path
5
+
6
+
7
+ def connect(db_path):
8
+ path = Path(db_path)
9
+ path.parent.mkdir(parents=True, exist_ok=True)
10
+ conn = sqlite3.connect(path, check_same_thread=False)
11
+ conn.row_factory = sqlite3.Row
12
+ init_db(conn)
13
+ return conn
14
+
15
+
16
+ def init_db(conn):
17
+ conn.executescript(
18
+ """
19
+ CREATE TABLE IF NOT EXISTS projects (
20
+ id TEXT PRIMARY KEY,
21
+ name TEXT NOT NULL,
22
+ provider TEXT NOT NULL,
23
+ repo_id TEXT NOT NULL UNIQUE,
24
+ owner TEXT NOT NULL,
25
+ repo_name TEXT NOT NULL,
26
+ created_at TEXT NOT NULL
27
+ );
28
+
29
+ CREATE TABLE IF NOT EXISTS workspaces (
30
+ id TEXT PRIMARY KEY,
31
+ project_id TEXT NOT NULL,
32
+ provider TEXT NOT NULL,
33
+ repo_id TEXT NOT NULL,
34
+ created_at TEXT NOT NULL
35
+ );
36
+
37
+ CREATE TABLE IF NOT EXISTS sync_batches (
38
+ id TEXT PRIMARY KEY,
39
+ project_id TEXT NOT NULL,
40
+ workspace_id TEXT NOT NULL,
41
+ generated_at TEXT NOT NULL,
42
+ accepted_at TEXT NOT NULL,
43
+ payload_json TEXT NOT NULL
44
+ );
45
+ """
46
+ )
47
+ conn.commit()
@@ -0,0 +1,170 @@
1
+ """Write-path logic for the IntHub API."""
2
+
3
+ import json
4
+
5
+ from apps.inthub_api.common import APIError, new_id, now_utc, require_repo
6
+ from apps.inthub_api.db import connect
7
+
8
+
9
+ def link_project(db_path, project_name, repo, workspace_id):
10
+ require_repo(repo)
11
+ if repo.get("provider") != "github":
12
+ raise APIError(
13
+ "PROVIDER_UNSUPPORTED",
14
+ f"Unsupported provider '{repo.get('provider')}'.",
15
+ status=400,
16
+ )
17
+
18
+ with connect(db_path) as conn:
19
+ project = conn.execute(
20
+ "SELECT * FROM projects WHERE provider = ? AND repo_id = ?",
21
+ (repo["provider"], repo["repo_id"]),
22
+ ).fetchone()
23
+
24
+ if project is None:
25
+ project_id = new_id("proj")
26
+ conn.execute(
27
+ """
28
+ INSERT INTO projects (id, name, provider, repo_id, owner, repo_name, created_at)
29
+ VALUES (?, ?, ?, ?, ?, ?, ?)
30
+ """,
31
+ (
32
+ project_id,
33
+ project_name or repo["name"],
34
+ repo["provider"],
35
+ repo["repo_id"],
36
+ repo["owner"],
37
+ repo["name"],
38
+ now_utc(),
39
+ ),
40
+ )
41
+ else:
42
+ project_id = project["id"]
43
+
44
+ if not workspace_id:
45
+ workspace_id = new_id("wks")
46
+
47
+ workspace = conn.execute(
48
+ "SELECT * FROM workspaces WHERE id = ?",
49
+ (workspace_id,),
50
+ ).fetchone()
51
+ if workspace is None:
52
+ conn.execute(
53
+ """
54
+ INSERT INTO workspaces (id, project_id, provider, repo_id, created_at)
55
+ VALUES (?, ?, ?, ?, ?)
56
+ """,
57
+ (workspace_id, project_id, repo["provider"], repo["repo_id"], now_utc()),
58
+ )
59
+ else:
60
+ if workspace["project_id"] != project_id or workspace["repo_id"] != repo["repo_id"]:
61
+ raise APIError(
62
+ "STATE_CONFLICT",
63
+ f"Workspace {workspace_id} is already linked to another project or repo.",
64
+ status=409,
65
+ )
66
+
67
+ conn.commit()
68
+ return {
69
+ "project_id": project_id,
70
+ "workspace_id": workspace_id,
71
+ "repo_binding": {
72
+ "provider": repo["provider"],
73
+ "repo_id": repo["repo_id"],
74
+ "owner": repo["owner"],
75
+ "name": repo["name"],
76
+ },
77
+ }
78
+
79
+
80
+ def store_sync_batch(db_path, payload):
81
+ required = ("sync_batch_id", "project_id", "repo", "workspace", "snapshot")
82
+ missing = [key for key in required if key not in payload]
83
+ if missing:
84
+ raise APIError(
85
+ "INVALID_INPUT",
86
+ f"Missing sync batch fields: {', '.join(missing)}.",
87
+ status=400,
88
+ )
89
+
90
+ repo = payload["repo"]
91
+ workspace = payload["workspace"]
92
+ require_repo(repo)
93
+
94
+ workspace_id = workspace.get("workspace_id")
95
+ if not workspace_id:
96
+ raise APIError("INVALID_INPUT", "Missing workspace.workspace_id.", status=400)
97
+
98
+ with connect(db_path) as conn:
99
+ existing = conn.execute(
100
+ "SELECT accepted_at FROM sync_batches WHERE id = ?",
101
+ (payload["sync_batch_id"],),
102
+ ).fetchone()
103
+ if existing is not None:
104
+ return {
105
+ "sync_batch_id": payload["sync_batch_id"],
106
+ "project_id": payload["project_id"],
107
+ "workspace_id": workspace_id,
108
+ "accepted_at": existing["accepted_at"],
109
+ "duplicate": True,
110
+ }
111
+
112
+ project = conn.execute(
113
+ "SELECT * FROM projects WHERE id = ?",
114
+ (payload["project_id"],),
115
+ ).fetchone()
116
+ if project is None:
117
+ raise APIError(
118
+ "OBJECT_NOT_FOUND",
119
+ f"Project {payload['project_id']} not found.",
120
+ status=404,
121
+ )
122
+ if project["provider"] != repo["provider"] or project["repo_id"] != repo["repo_id"]:
123
+ raise APIError(
124
+ "STATE_CONFLICT",
125
+ "Sync batch repo does not match the linked project repo.",
126
+ status=409,
127
+ )
128
+
129
+ workspace_row = conn.execute(
130
+ "SELECT * FROM workspaces WHERE id = ? AND project_id = ?",
131
+ (workspace_id, payload["project_id"]),
132
+ ).fetchone()
133
+ if workspace_row is None:
134
+ raise APIError(
135
+ "OBJECT_NOT_FOUND",
136
+ f"Workspace {workspace_id} is not linked to project {payload['project_id']}.",
137
+ status=404,
138
+ )
139
+
140
+ snapshot = payload["snapshot"]
141
+ if snapshot.get("schema_version") != "1.0":
142
+ raise APIError(
143
+ "SCHEMA_VERSION_MISMATCH",
144
+ f"Unsupported schema_version '{snapshot.get('schema_version')}'.",
145
+ status=400,
146
+ )
147
+
148
+ accepted_at = now_utc()
149
+ conn.execute(
150
+ """
151
+ INSERT INTO sync_batches (id, project_id, workspace_id, generated_at, accepted_at, payload_json)
152
+ VALUES (?, ?, ?, ?, ?, ?)
153
+ """,
154
+ (
155
+ payload["sync_batch_id"],
156
+ payload["project_id"],
157
+ workspace_id,
158
+ payload.get("generated_at", accepted_at),
159
+ accepted_at,
160
+ json.dumps(payload, ensure_ascii=False),
161
+ ),
162
+ )
163
+ conn.commit()
164
+ return {
165
+ "sync_batch_id": payload["sync_batch_id"],
166
+ "project_id": payload["project_id"],
167
+ "workspace_id": workspace_id,
168
+ "accepted_at": accepted_at,
169
+ "duplicate": False,
170
+ }
@@ -0,0 +1,366 @@
1
+ """Read-model queries for the IntHub API."""
2
+
3
+ import json
4
+
5
+ from apps.inthub_api.common import APIError, make_remote_object_id, split_remote_object_id
6
+ from apps.inthub_api.db import connect
7
+
8
+
9
+ def _project_row(conn, project_id):
10
+ project = conn.execute(
11
+ "SELECT * FROM projects WHERE id = ?",
12
+ (project_id,),
13
+ ).fetchone()
14
+ if project is None:
15
+ raise APIError("OBJECT_NOT_FOUND", f"Project {project_id} not found.", status=404)
16
+ return project
17
+
18
+
19
+ def _latest_payloads(conn, project_id):
20
+ rows = conn.execute(
21
+ """
22
+ SELECT sb.payload_json
23
+ FROM sync_batches AS sb
24
+ JOIN (
25
+ SELECT workspace_id, MAX(rowid) AS max_rowid
26
+ FROM sync_batches
27
+ WHERE project_id = ?
28
+ GROUP BY workspace_id
29
+ ) latest
30
+ ON sb.rowid = latest.max_rowid
31
+ ORDER BY sb.accepted_at DESC
32
+ """,
33
+ (project_id,),
34
+ ).fetchall()
35
+ return [json.loads(row["payload_json"]) for row in rows]
36
+
37
+
38
+ def _workspace_state(payload):
39
+ return {
40
+ "workspace_id": payload["workspace"]["workspace_id"],
41
+ "last_synced_at": payload.get("generated_at"),
42
+ "branch": payload.get("git", {}).get("branch"),
43
+ "head_commit": payload.get("git", {}).get("head_commit"),
44
+ "dirty": payload.get("git", {}).get("dirty"),
45
+ }
46
+
47
+
48
+ def _latest_snap_for_intent(intent, snaps):
49
+ snap_ids = intent.get("snap_ids", [])
50
+ if not snap_ids:
51
+ return None
52
+ latest_id = snap_ids[-1]
53
+ for snap in snaps:
54
+ if snap["id"] == latest_id:
55
+ return snap
56
+ return None
57
+
58
+
59
+ def _latest_payload_for_workspace(conn, workspace_id):
60
+ row = conn.execute(
61
+ """
62
+ SELECT payload_json
63
+ FROM sync_batches
64
+ WHERE workspace_id = ?
65
+ ORDER BY rowid DESC
66
+ LIMIT 1
67
+ """,
68
+ (workspace_id,),
69
+ ).fetchone()
70
+ if row is None:
71
+ raise APIError(
72
+ "OBJECT_NOT_FOUND",
73
+ f"No sync batch found for workspace {workspace_id}.",
74
+ status=404,
75
+ )
76
+ return json.loads(row["payload_json"])
77
+
78
+
79
+ def project_overview(db_path, project_id):
80
+ with connect(db_path) as conn:
81
+ project = _project_row(conn, project_id)
82
+ payloads = _latest_payloads(conn, project_id)
83
+
84
+ active_intents = []
85
+ active_decisions = []
86
+ recent_snaps = []
87
+ workspaces = []
88
+
89
+ for payload in payloads:
90
+ workspace_id = payload["workspace"]["workspace_id"]
91
+ snapshot = payload.get("snapshot", {})
92
+ git = payload.get("git", {})
93
+ workspaces.append(_workspace_state(payload))
94
+
95
+ intents = snapshot.get("intents", [])
96
+ snaps = snapshot.get("snaps", [])
97
+ decisions = snapshot.get("decisions", [])
98
+
99
+ for intent in intents:
100
+ if intent.get("status") != "active":
101
+ continue
102
+ active_intents.append({
103
+ "remote_id": make_remote_object_id(workspace_id, intent["id"]),
104
+ "workspace_id": workspace_id,
105
+ "id": intent["id"],
106
+ "title": intent["title"],
107
+ "status": intent["status"],
108
+ "decision_ids": intent.get("decision_ids", []),
109
+ "latest_snap_id": intent.get("snap_ids", [None])[-1] if intent.get("snap_ids") else None,
110
+ "branch": git.get("branch"),
111
+ "head_commit": git.get("head_commit"),
112
+ "dirty": git.get("dirty"),
113
+ })
114
+
115
+ for decision in decisions:
116
+ if decision.get("status") != "active":
117
+ continue
118
+ active_decisions.append({
119
+ "remote_id": make_remote_object_id(workspace_id, decision["id"]),
120
+ "workspace_id": workspace_id,
121
+ "id": decision["id"],
122
+ "title": decision["title"],
123
+ "status": decision["status"],
124
+ "intent_ids": decision.get("intent_ids", []),
125
+ })
126
+
127
+ for snap in snaps:
128
+ recent_snaps.append({
129
+ "remote_id": make_remote_object_id(workspace_id, snap["id"]),
130
+ "workspace_id": workspace_id,
131
+ "id": snap["id"],
132
+ "title": snap["title"],
133
+ "intent_id": snap.get("intent_id"),
134
+ "status": snap["status"],
135
+ "summary": snap.get("summary", ""),
136
+ "feedback": snap.get("feedback", ""),
137
+ "created_at": snap.get("created_at"),
138
+ })
139
+
140
+ recent_snaps.sort(key=lambda snap: snap.get("created_at", ""), reverse=True)
141
+ return {
142
+ "project": {
143
+ "id": project["id"],
144
+ "name": project["name"],
145
+ "repo": {
146
+ "provider": project["provider"],
147
+ "repo_id": project["repo_id"],
148
+ "owner": project["owner"],
149
+ "name": project["repo_name"],
150
+ },
151
+ },
152
+ "workspaces": workspaces,
153
+ "active_intents": active_intents,
154
+ "active_decisions": active_decisions,
155
+ "recent_snaps": recent_snaps[:10],
156
+ }
157
+
158
+
159
+ def list_projects(db_path):
160
+ with connect(db_path) as conn:
161
+ rows = conn.execute(
162
+ """
163
+ SELECT
164
+ p.*,
165
+ COUNT(DISTINCT w.id) AS workspace_count,
166
+ MAX(sb.accepted_at) AS last_synced_at
167
+ FROM projects AS p
168
+ LEFT JOIN workspaces AS w
169
+ ON w.project_id = p.id
170
+ LEFT JOIN sync_batches AS sb
171
+ ON sb.project_id = p.id
172
+ GROUP BY p.id
173
+ ORDER BY COALESCE(MAX(sb.accepted_at), p.created_at) DESC, p.created_at DESC
174
+ """
175
+ ).fetchall()
176
+
177
+ return {
178
+ "projects": [
179
+ {
180
+ "id": row["id"],
181
+ "name": row["name"],
182
+ "repo": {
183
+ "provider": row["provider"],
184
+ "repo_id": row["repo_id"],
185
+ "owner": row["owner"],
186
+ "name": row["repo_name"],
187
+ },
188
+ "workspace_count": row["workspace_count"],
189
+ "last_synced_at": row["last_synced_at"],
190
+ "created_at": row["created_at"],
191
+ }
192
+ for row in rows
193
+ ]
194
+ }
195
+
196
+
197
+ def project_handoff(db_path, project_id):
198
+ with connect(db_path) as conn:
199
+ project = _project_row(conn, project_id)
200
+ payloads = _latest_payloads(conn, project_id)
201
+
202
+ intents_view = []
203
+ active_decisions = []
204
+
205
+ for payload in payloads:
206
+ workspace_id = payload["workspace"]["workspace_id"]
207
+ snapshot = payload.get("snapshot", {})
208
+ git = payload.get("git", {})
209
+ intents = snapshot.get("intents", [])
210
+ snaps = snapshot.get("snaps", [])
211
+ decisions = snapshot.get("decisions", [])
212
+
213
+ for decision in decisions:
214
+ if decision.get("status") == "active":
215
+ active_decisions.append({
216
+ "remote_id": make_remote_object_id(workspace_id, decision["id"]),
217
+ "workspace_id": workspace_id,
218
+ "id": decision["id"],
219
+ "title": decision["title"],
220
+ "status": decision["status"],
221
+ })
222
+
223
+ for intent in intents:
224
+ if intent.get("status") != "active":
225
+ continue
226
+ latest_snap = _latest_snap_for_intent(intent, snaps)
227
+ intents_view.append({
228
+ "remote_id": make_remote_object_id(workspace_id, intent["id"]),
229
+ "workspace_id": workspace_id,
230
+ "id": intent["id"],
231
+ "title": intent["title"],
232
+ "status": intent["status"],
233
+ "source_query": intent.get("source_query", ""),
234
+ "rationale": intent.get("rationale", ""),
235
+ "decision_ids": intent.get("decision_ids", []),
236
+ "latest_snap": latest_snap,
237
+ "git": {
238
+ "branch": git.get("branch"),
239
+ "head_commit": git.get("head_commit"),
240
+ "dirty": git.get("dirty"),
241
+ },
242
+ "synced_at": payload.get("generated_at"),
243
+ })
244
+
245
+ return {
246
+ "project": {
247
+ "id": project["id"],
248
+ "name": project["name"],
249
+ },
250
+ "active_decisions": active_decisions,
251
+ "intents": intents_view,
252
+ }
253
+
254
+
255
+ def get_intent_detail(db_path, remote_object_id):
256
+ workspace_id, local_object_id = split_remote_object_id(remote_object_id)
257
+ with connect(db_path) as conn:
258
+ payload = _latest_payload_for_workspace(conn, workspace_id)
259
+
260
+ snapshot = payload.get("snapshot", {})
261
+ intents = snapshot.get("intents", [])
262
+ snaps = snapshot.get("snaps", [])
263
+ for intent in intents:
264
+ if intent["id"] != local_object_id:
265
+ continue
266
+ related_snaps = [snap for snap in snaps if snap.get("intent_id") == local_object_id]
267
+ return {
268
+ "remote_id": remote_object_id,
269
+ "workspace_id": workspace_id,
270
+ "id": local_object_id,
271
+ "intent": intent,
272
+ "snaps": related_snaps,
273
+ "git": payload.get("git", {}),
274
+ "synced_at": payload.get("generated_at"),
275
+ }
276
+ raise APIError("OBJECT_NOT_FOUND", f"Intent {remote_object_id} not found.", status=404)
277
+
278
+
279
+ def get_decision_detail(db_path, remote_object_id):
280
+ workspace_id, local_object_id = split_remote_object_id(remote_object_id)
281
+ with connect(db_path) as conn:
282
+ payload = _latest_payload_for_workspace(conn, workspace_id)
283
+
284
+ snapshot = payload.get("snapshot", {})
285
+ decisions = snapshot.get("decisions", [])
286
+ intents = snapshot.get("intents", [])
287
+ for decision in decisions:
288
+ if decision["id"] != local_object_id:
289
+ continue
290
+ related_intents = [intent for intent in intents if intent["id"] in decision.get("intent_ids", [])]
291
+ return {
292
+ "remote_id": remote_object_id,
293
+ "workspace_id": workspace_id,
294
+ "id": local_object_id,
295
+ "decision": decision,
296
+ "intents": related_intents,
297
+ "synced_at": payload.get("generated_at"),
298
+ }
299
+ raise APIError("OBJECT_NOT_FOUND", f"Decision {remote_object_id} not found.", status=404)
300
+
301
+
302
+ def get_snap_detail(db_path, remote_object_id):
303
+ workspace_id, local_object_id = split_remote_object_id(remote_object_id)
304
+ with connect(db_path) as conn:
305
+ payload = _latest_payload_for_workspace(conn, workspace_id)
306
+
307
+ snapshot = payload.get("snapshot", {})
308
+ snaps = snapshot.get("snaps", [])
309
+ intents = snapshot.get("intents", [])
310
+ for snap in snaps:
311
+ if snap["id"] != local_object_id:
312
+ continue
313
+ parent_intent = None
314
+ intent_id = snap.get("intent_id")
315
+ if intent_id:
316
+ for intent in intents:
317
+ if intent["id"] == intent_id:
318
+ parent_intent = intent
319
+ break
320
+ return {
321
+ "remote_id": remote_object_id,
322
+ "workspace_id": workspace_id,
323
+ "id": local_object_id,
324
+ "snap": snap,
325
+ "intent": parent_intent,
326
+ "git": payload.get("git", {}),
327
+ "synced_at": payload.get("generated_at"),
328
+ }
329
+ raise APIError("OBJECT_NOT_FOUND", f"Snap {remote_object_id} not found.", status=404)
330
+
331
+
332
+ def search_project(db_path, project_id, query):
333
+ with connect(db_path) as conn:
334
+ _project_row(conn, project_id)
335
+ payloads = _latest_payloads(conn, project_id)
336
+
337
+ q = (query or "").strip().lower()
338
+ matches = []
339
+ if not q:
340
+ return {"project_id": project_id, "query": query, "matches": matches}
341
+
342
+ for payload in payloads:
343
+ workspace_id = payload["workspace"]["workspace_id"]
344
+ snapshot = payload.get("snapshot", {})
345
+ for object_type in ("intents", "snaps", "decisions"):
346
+ for obj in snapshot.get(object_type, []):
347
+ fields = [
348
+ obj.get("title", ""),
349
+ obj.get("query", ""),
350
+ obj.get("source_query", ""),
351
+ obj.get("summary", ""),
352
+ obj.get("rationale", ""),
353
+ ]
354
+ haystack = " ".join(fields).lower()
355
+ if q not in haystack:
356
+ continue
357
+ matches.append({
358
+ "object_type": object_type[:-1],
359
+ "remote_id": make_remote_object_id(workspace_id, obj["id"]),
360
+ "workspace_id": workspace_id,
361
+ "id": obj["id"],
362
+ "title": obj.get("title", ""),
363
+ "status": obj.get("status", ""),
364
+ })
365
+
366
+ return {"project_id": project_id, "query": query, "matches": matches}