@graph-tl/graph 0.1.10 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/{chunk-TWT5GUXW.js → chunk-CCGKUMCW.js} +63 -10
- package/dist/chunk-CCGKUMCW.js.map +1 -0
- package/dist/{chunk-ILTJI4ZN.js → chunk-JRMFXD5I.js} +20 -4
- package/dist/chunk-JRMFXD5I.js.map +1 -0
- package/dist/index.js +2 -2
- package/dist/{init-RQFGF5BB.js → init-VII7APUJ.js} +19 -4
- package/dist/init-VII7APUJ.js.map +1 -0
- package/dist/{nodes-4OJBNDHG.js → nodes-YNM6KEK2.js} +4 -2
- package/dist/{server-VKTFTBXC.js → server-X36DXLEG.js} +144 -37
- package/dist/server-X36DXLEG.js.map +1 -0
- package/package.json +1 -1
- package/dist/chunk-ILTJI4ZN.js.map +0 -1
- package/dist/chunk-TWT5GUXW.js.map +0 -1
- package/dist/init-RQFGF5BB.js.map +0 -1
- package/dist/server-VKTFTBXC.js.map +0 -1
- /package/dist/{nodes-4OJBNDHG.js.map → nodes-YNM6KEK2.js.map} +0 -0
package/README.md
CHANGED
|
@@ -156,6 +156,7 @@ Environment variables (all optional):
|
|
|
156
156
|
| `GRAPH_AGENT` | `default-agent` | Agent identity for audit trail |
|
|
157
157
|
| `GRAPH_DB` | `~/.graph/db/<hash>/graph.db` | Database path (per-project, outside your repo) |
|
|
158
158
|
| `GRAPH_CLAIM_TTL` | `60` | Soft claim expiry in minutes |
|
|
159
|
+
| `GRAPH_UPDATE_CHECK` | `0` | Set to `1` to check npm for newer versions on startup |
|
|
159
160
|
|
|
160
161
|
## Token efficiency
|
|
161
162
|
|
|
@@ -101,6 +101,13 @@ function migrate(db2) {
|
|
|
101
101
|
if (hasDiscovery.cnt === 0) {
|
|
102
102
|
db2.exec("ALTER TABLE nodes ADD COLUMN discovery TEXT DEFAULT NULL");
|
|
103
103
|
}
|
|
104
|
+
const hasBlocked = db2.prepare(
|
|
105
|
+
"SELECT COUNT(*) as cnt FROM pragma_table_info('nodes') WHERE name = 'blocked'"
|
|
106
|
+
).get();
|
|
107
|
+
if (hasBlocked.cnt === 0) {
|
|
108
|
+
db2.exec("ALTER TABLE nodes ADD COLUMN blocked INTEGER NOT NULL DEFAULT 0");
|
|
109
|
+
db2.exec("ALTER TABLE nodes ADD COLUMN blocked_reason TEXT DEFAULT NULL");
|
|
110
|
+
}
|
|
104
111
|
}
|
|
105
112
|
function checkpointDb() {
|
|
106
113
|
if (db) {
|
|
@@ -245,6 +252,8 @@ function rowToNode(row) {
|
|
|
245
252
|
resolved: row.resolved === 1,
|
|
246
253
|
depth: row.depth,
|
|
247
254
|
discovery: row.discovery ?? null,
|
|
255
|
+
blocked: row.blocked === 1,
|
|
256
|
+
blocked_reason: row.blocked_reason ?? null,
|
|
248
257
|
state: row.state ? JSON.parse(row.state) : null,
|
|
249
258
|
properties: JSON.parse(row.properties),
|
|
250
259
|
context_links: JSON.parse(row.context_links),
|
|
@@ -272,6 +281,8 @@ function createNode(input) {
|
|
|
272
281
|
resolved: false,
|
|
273
282
|
depth,
|
|
274
283
|
discovery: input.discovery ?? null,
|
|
284
|
+
blocked: false,
|
|
285
|
+
blocked_reason: null,
|
|
275
286
|
state: input.state ?? null,
|
|
276
287
|
properties: input.properties ?? {},
|
|
277
288
|
context_links: input.context_links ?? [],
|
|
@@ -281,8 +292,8 @@ function createNode(input) {
|
|
|
281
292
|
updated_at: now
|
|
282
293
|
};
|
|
283
294
|
db2.prepare(`
|
|
284
|
-
INSERT INTO nodes (id, rev, parent, project, summary, resolved, depth, discovery, state, properties, context_links, evidence, created_by, created_at, updated_at)
|
|
285
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
295
|
+
INSERT INTO nodes (id, rev, parent, project, summary, resolved, depth, discovery, blocked, blocked_reason, state, properties, context_links, evidence, created_by, created_at, updated_at)
|
|
296
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
286
297
|
`).run(
|
|
287
298
|
node.id,
|
|
288
299
|
node.rev,
|
|
@@ -292,6 +303,8 @@ function createNode(input) {
|
|
|
292
303
|
0,
|
|
293
304
|
node.depth,
|
|
294
305
|
node.discovery,
|
|
306
|
+
0,
|
|
307
|
+
null,
|
|
295
308
|
node.state !== null ? JSON.stringify(node.state) : null,
|
|
296
309
|
JSON.stringify(node.properties),
|
|
297
310
|
JSON.stringify(node.context_links),
|
|
@@ -366,6 +379,8 @@ function updateNode(input) {
|
|
|
366
379
|
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
367
380
|
let newResolved = node.resolved;
|
|
368
381
|
let newDiscovery = node.discovery;
|
|
382
|
+
let newBlocked = node.blocked;
|
|
383
|
+
let newBlockedReason = node.blocked_reason;
|
|
369
384
|
let newState = node.state;
|
|
370
385
|
let newSummary = node.summary;
|
|
371
386
|
let newProperties = { ...node.properties };
|
|
@@ -389,6 +404,20 @@ function updateNode(input) {
|
|
|
389
404
|
changes.push({ field: "discovery", before: node.discovery, after: input.discovery });
|
|
390
405
|
newDiscovery = input.discovery;
|
|
391
406
|
}
|
|
407
|
+
if (input.blocked !== void 0 && input.blocked !== node.blocked) {
|
|
408
|
+
changes.push({ field: "blocked", before: node.blocked, after: input.blocked });
|
|
409
|
+
newBlocked = input.blocked;
|
|
410
|
+
if (!input.blocked && input.blocked_reason === void 0) {
|
|
411
|
+
if (node.blocked_reason !== null) {
|
|
412
|
+
changes.push({ field: "blocked_reason", before: node.blocked_reason, after: null });
|
|
413
|
+
}
|
|
414
|
+
newBlockedReason = null;
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
if (input.blocked_reason !== void 0 && input.blocked_reason !== node.blocked_reason) {
|
|
418
|
+
changes.push({ field: "blocked_reason", before: node.blocked_reason, after: input.blocked_reason });
|
|
419
|
+
newBlockedReason = input.blocked_reason;
|
|
420
|
+
}
|
|
392
421
|
if (input.state !== void 0) {
|
|
393
422
|
changes.push({ field: "state", before: node.state, after: input.state });
|
|
394
423
|
newState = input.state;
|
|
@@ -448,6 +477,8 @@ function updateNode(input) {
|
|
|
448
477
|
rev = ?,
|
|
449
478
|
resolved = ?,
|
|
450
479
|
discovery = ?,
|
|
480
|
+
blocked = ?,
|
|
481
|
+
blocked_reason = ?,
|
|
451
482
|
state = ?,
|
|
452
483
|
summary = ?,
|
|
453
484
|
properties = ?,
|
|
@@ -459,6 +490,8 @@ function updateNode(input) {
|
|
|
459
490
|
newRev,
|
|
460
491
|
newResolved ? 1 : 0,
|
|
461
492
|
newDiscovery,
|
|
493
|
+
newBlocked ? 1 : 0,
|
|
494
|
+
newBlockedReason,
|
|
462
495
|
newState !== null ? JSON.stringify(newState) : null,
|
|
463
496
|
newSummary,
|
|
464
497
|
JSON.stringify(newProperties),
|
|
@@ -471,6 +504,21 @@ function updateNode(input) {
|
|
|
471
504
|
logEvent(input.node_id, input.agent, action, changes);
|
|
472
505
|
return getNodeOrThrow(input.node_id);
|
|
473
506
|
}
|
|
507
|
+
function getSubtreeProgress(nodeId) {
|
|
508
|
+
const db2 = getDb();
|
|
509
|
+
const row = db2.prepare(
|
|
510
|
+
`WITH RECURSIVE descendants(id) AS (
|
|
511
|
+
SELECT id FROM nodes WHERE id = ?
|
|
512
|
+
UNION ALL
|
|
513
|
+
SELECT n.id FROM nodes n JOIN descendants d ON n.parent = d.id
|
|
514
|
+
)
|
|
515
|
+
SELECT
|
|
516
|
+
COUNT(*) as total,
|
|
517
|
+
SUM(CASE WHEN n.resolved = 1 THEN 1 ELSE 0 END) as resolved
|
|
518
|
+
FROM descendants d JOIN nodes n ON n.id = d.id`
|
|
519
|
+
).get(nodeId);
|
|
520
|
+
return { resolved: row.resolved, total: row.total };
|
|
521
|
+
}
|
|
474
522
|
function getProjectSummary(project) {
|
|
475
523
|
const db2 = getDb();
|
|
476
524
|
const counts = db2.prepare(
|
|
@@ -480,15 +528,19 @@ function getProjectSummary(project) {
|
|
|
480
528
|
FROM nodes WHERE project = ?`
|
|
481
529
|
).get(project);
|
|
482
530
|
const blocked = db2.prepare(
|
|
483
|
-
`SELECT COUNT(DISTINCT
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
531
|
+
`SELECT COUNT(DISTINCT id) as count FROM (
|
|
532
|
+
SELECT n.id FROM nodes n
|
|
533
|
+
WHERE n.project = ? AND n.resolved = 0 AND n.blocked = 1
|
|
534
|
+
UNION
|
|
535
|
+
SELECT n.id FROM nodes n
|
|
536
|
+
JOIN edges e ON e.from_node = n.id AND e.type = 'depends_on'
|
|
537
|
+
JOIN nodes dep ON dep.id = e.to_node AND dep.resolved = 0
|
|
538
|
+
WHERE n.project = ? AND n.resolved = 0
|
|
539
|
+
)`
|
|
540
|
+
).get(project, project);
|
|
489
541
|
const actionable = db2.prepare(
|
|
490
542
|
`SELECT COUNT(*) as count FROM nodes n
|
|
491
|
-
WHERE n.project = ? AND n.resolved = 0
|
|
543
|
+
WHERE n.project = ? AND n.resolved = 0 AND n.blocked = 0
|
|
492
544
|
AND NOT EXISTS (
|
|
493
545
|
SELECT 1 FROM nodes child WHERE child.parent = n.id AND child.resolved = 0
|
|
494
546
|
)
|
|
@@ -529,6 +581,7 @@ export {
|
|
|
529
581
|
getProjectRoot,
|
|
530
582
|
listProjects,
|
|
531
583
|
updateNode,
|
|
584
|
+
getSubtreeProgress,
|
|
532
585
|
getProjectSummary
|
|
533
586
|
};
|
|
534
|
-
//# sourceMappingURL=chunk-
|
|
587
|
+
//# sourceMappingURL=chunk-CCGKUMCW.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/nodes.ts","../src/db.ts","../src/events.ts","../src/validate.ts"],"sourcesContent":["import { nanoid } from \"nanoid\";\nimport { getDb } from \"./db.js\";\nimport { logEvent } from \"./events.js\";\nimport { EngineError } from \"./validate.js\";\nimport type { Node, NodeRow, Evidence, FieldChange } from \"./types.js\";\n\n// --- Row <-> Node conversion ---\n\nfunction rowToNode(row: NodeRow): Node {\n return {\n id: row.id,\n rev: row.rev,\n parent: row.parent,\n project: row.project,\n summary: row.summary,\n resolved: row.resolved === 1,\n depth: row.depth,\n discovery: row.discovery ?? null,\n blocked: row.blocked === 1,\n blocked_reason: row.blocked_reason ?? null,\n state: row.state ? JSON.parse(row.state) : null,\n properties: JSON.parse(row.properties),\n context_links: JSON.parse(row.context_links),\n evidence: JSON.parse(row.evidence),\n created_by: row.created_by,\n created_at: row.created_at,\n updated_at: row.updated_at,\n };\n}\n\n// --- Create ---\n\nexport interface CreateNodeInput {\n parent?: string;\n project: string;\n summary: string;\n discovery?: string | null;\n state?: unknown;\n properties?: Record<string, unknown>;\n context_links?: string[];\n agent: string;\n}\n\nexport function createNode(input: CreateNodeInput): Node {\n const db = getDb();\n const now = new Date().toISOString();\n const id = nanoid();\n\n // [sl:yBBVr4wcgVfWA_w8U8hQo] Compute depth from parent\n let depth = 0;\n if (input.parent) {\n const parentRow = db.prepare(\"SELECT depth FROM nodes WHERE id = ?\").get(input.parent) as { depth: number } | undefined;\n if (parentRow) depth = parentRow.depth + 1;\n }\n\n const node: Node = {\n id,\n rev: 1,\n parent: input.parent ?? null,\n project: input.project,\n summary: input.summary,\n resolved: false,\n depth,\n discovery: input.discovery ?? null,\n blocked: false,\n blocked_reason: null,\n state: input.state ?? null,\n properties: input.properties ?? {},\n context_links: input.context_links ?? [],\n evidence: [],\n created_by: input.agent,\n created_at: now,\n updated_at: now,\n };\n\n db.prepare(`\n INSERT INTO nodes (id, rev, parent, project, summary, resolved, depth, discovery, blocked, blocked_reason, state, properties, context_links, evidence, created_by, created_at, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `).run(\n node.id,\n node.rev,\n node.parent,\n node.project,\n node.summary,\n 0,\n node.depth,\n node.discovery,\n 0,\n null,\n node.state !== null ? JSON.stringify(node.state) : null,\n JSON.stringify(node.properties),\n JSON.stringify(node.context_links),\n JSON.stringify(node.evidence),\n node.created_by,\n node.created_at,\n node.updated_at\n );\n\n logEvent(node.id, input.agent, \"created\", [\n { field: \"summary\", before: null, after: node.summary },\n ]);\n\n return node;\n}\n\n// --- Read ---\n\nexport function getNode(id: string): Node | null {\n const db = getDb();\n const row = db.prepare(\"SELECT * FROM nodes WHERE id = ?\").get(id) as\n | NodeRow\n | undefined;\n return row ? rowToNode(row) : null;\n}\n\nexport function getNodeOrThrow(id: string): Node {\n const node = getNode(id);\n if (!node) {\n throw new EngineError(\"node_not_found\", `Node not found: ${id}. Verify the ID is correct and the node hasn't been deleted.`);\n }\n return node;\n}\n\nexport function getChildren(parentId: string): Node[] {\n const db = getDb();\n const rows = db\n .prepare(\"SELECT * FROM nodes WHERE parent = ?\")\n .all(parentId) as NodeRow[];\n return rows.map(rowToNode);\n}\n\nexport function getAncestors(nodeId: string): Array<{ id: string; summary: string; resolved: boolean }> {\n const ancestors: Array<{ id: string; summary: string; resolved: boolean }> = [];\n let current = getNode(nodeId);\n\n while (current?.parent) {\n const parent = getNode(current.parent);\n if (!parent) break;\n ancestors.unshift({ id: parent.id, summary: parent.summary, resolved: parent.resolved });\n current = parent;\n }\n\n return ancestors;\n}\n\nexport function getProjectRoot(project: string): Node | null {\n const db = getDb();\n const row = db\n .prepare(\"SELECT * FROM nodes WHERE project = ? AND parent IS NULL\")\n .get(project) as NodeRow | undefined;\n return row ? rowToNode(row) : null;\n}\n\nexport function listProjects(): Array<{\n project: string;\n id: string;\n summary: string;\n total: number;\n resolved: number;\n unresolved: number;\n updated_at: string;\n}> {\n const db = getDb();\n\n const roots = db\n .prepare(\"SELECT * FROM nodes WHERE parent IS NULL\")\n .all() as NodeRow[];\n\n return roots.map((root) => {\n const counts = db\n .prepare(\n `SELECT\n COUNT(*) as total,\n SUM(CASE WHEN resolved = 1 THEN 1 ELSE 0 END) as resolved\n FROM nodes WHERE project = ?`\n )\n .get(root.project) as { total: number; resolved: number };\n\n return {\n project: root.project,\n id: root.id,\n summary: root.summary,\n total: counts.total,\n resolved: counts.resolved,\n unresolved: counts.total - counts.resolved,\n updated_at: root.updated_at,\n };\n });\n}\n\n// --- Update ---\n\nexport interface UpdateNodeInput {\n node_id: string;\n agent: string;\n resolved?: boolean;\n discovery?: string | null;\n blocked?: boolean;\n blocked_reason?: string | null;\n state?: unknown;\n summary?: string;\n properties?: Record<string, unknown>;\n add_context_links?: string[];\n remove_context_links?: string[];\n add_evidence?: Array<{ type: string; ref: string }>;\n}\n\nexport function updateNode(input: UpdateNodeInput): Node {\n const db = getDb();\n const node = getNodeOrThrow(input.node_id);\n const changes: FieldChange[] = [];\n const now = new Date().toISOString();\n\n let newResolved = node.resolved;\n let newDiscovery = node.discovery;\n let newBlocked = node.blocked;\n let newBlockedReason = node.blocked_reason;\n let newState = node.state;\n let newSummary = node.summary;\n let newProperties = { ...node.properties };\n let newContextLinks = [...node.context_links];\n let newEvidence = [...node.evidence];\n\n // [sl:OZ0or-q5TserCEfWUeMVv] Require evidence when resolving\n if (input.resolved === true && !node.resolved) {\n const hasExistingEvidence = node.evidence.length > 0;\n const hasNewEvidence = input.add_evidence && input.add_evidence.length > 0;\n if (!hasExistingEvidence && !hasNewEvidence) {\n throw new EngineError(\n \"evidence_required\",\n `Cannot resolve node ${input.node_id} without evidence. Add at least one add_evidence entry (type: 'git', 'note', 'test', etc.) explaining what was done.`\n );\n }\n }\n\n if (input.resolved !== undefined && input.resolved !== node.resolved) {\n changes.push({ field: \"resolved\", before: node.resolved, after: input.resolved });\n newResolved = input.resolved;\n }\n\n if (input.discovery !== undefined && input.discovery !== node.discovery) {\n changes.push({ field: \"discovery\", before: node.discovery, after: input.discovery });\n newDiscovery = input.discovery;\n }\n\n if (input.blocked !== undefined && input.blocked !== node.blocked) {\n changes.push({ field: \"blocked\", before: node.blocked, after: input.blocked });\n newBlocked = input.blocked;\n // Clear blocked_reason when unblocking (unless explicitly set)\n if (!input.blocked && input.blocked_reason === undefined) {\n if (node.blocked_reason !== null) {\n changes.push({ field: \"blocked_reason\", before: node.blocked_reason, after: null });\n }\n newBlockedReason = null;\n }\n }\n\n if (input.blocked_reason !== undefined && input.blocked_reason !== node.blocked_reason) {\n changes.push({ field: \"blocked_reason\", before: node.blocked_reason, after: input.blocked_reason });\n newBlockedReason = input.blocked_reason;\n }\n\n if (input.state !== undefined) {\n changes.push({ field: \"state\", before: node.state, after: input.state });\n newState = input.state;\n }\n\n if (input.summary !== undefined && input.summary !== node.summary) {\n changes.push({ field: \"summary\", before: node.summary, after: input.summary });\n newSummary = input.summary;\n }\n\n if (input.properties) {\n for (const [key, value] of Object.entries(input.properties)) {\n if (value === null) {\n if (key in newProperties) {\n changes.push({ field: `properties.${key}`, before: newProperties[key], after: null });\n delete newProperties[key];\n }\n } else {\n changes.push({ field: `properties.${key}`, before: newProperties[key] ?? null, after: value });\n newProperties[key] = value;\n }\n }\n }\n\n if (input.add_context_links) {\n for (const link of input.add_context_links) {\n if (!newContextLinks.includes(link)) {\n newContextLinks.push(link);\n changes.push({ field: \"context_links\", before: null, after: link });\n }\n }\n }\n\n if (input.remove_context_links) {\n for (const link of input.remove_context_links) {\n const idx = newContextLinks.indexOf(link);\n if (idx !== -1) {\n newContextLinks.splice(idx, 1);\n changes.push({ field: \"context_links\", before: link, after: null });\n }\n }\n }\n\n if (input.add_evidence) {\n for (const ev of input.add_evidence) {\n const evidence: Evidence = {\n type: ev.type,\n ref: ev.ref,\n agent: input.agent,\n timestamp: now,\n };\n newEvidence.push(evidence);\n changes.push({ field: \"evidence\", before: null, after: evidence });\n }\n }\n\n if (changes.length === 0) {\n return node;\n }\n\n const newRev = node.rev + 1;\n\n db.prepare(`\n UPDATE nodes SET\n rev = ?,\n resolved = ?,\n discovery = ?,\n blocked = ?,\n blocked_reason = ?,\n state = ?,\n summary = ?,\n properties = ?,\n context_links = ?,\n evidence = ?,\n updated_at = ?\n WHERE id = ?\n `).run(\n newRev,\n newResolved ? 1 : 0,\n newDiscovery,\n newBlocked ? 1 : 0,\n newBlockedReason,\n newState !== null ? JSON.stringify(newState) : null,\n newSummary,\n JSON.stringify(newProperties),\n JSON.stringify(newContextLinks),\n JSON.stringify(newEvidence),\n now,\n input.node_id\n );\n\n const action = input.resolved === true ? \"resolved\" : \"updated\";\n logEvent(input.node_id, input.agent, action, changes);\n\n return getNodeOrThrow(input.node_id);\n}\n\n// --- Progress ---\n\nexport function getSubtreeProgress(nodeId: string): { resolved: number; total: number } {\n const db = getDb();\n const row = db.prepare(\n `WITH RECURSIVE descendants(id) AS (\n SELECT id FROM nodes WHERE id = ?\n UNION ALL\n SELECT n.id FROM nodes n JOIN descendants d ON n.parent = d.id\n )\n SELECT\n COUNT(*) as total,\n SUM(CASE WHEN n.resolved = 1 THEN 1 ELSE 0 END) as resolved\n FROM descendants d JOIN nodes n ON n.id = d.id`\n ).get(nodeId) as { total: number; resolved: number };\n return { resolved: row.resolved, total: row.total };\n}\n\n// --- Query helpers ---\n\nexport function getProjectSummary(project: string): {\n total: number;\n resolved: number;\n unresolved: number;\n blocked: number;\n actionable: number;\n} {\n const db = getDb();\n\n const counts = db\n .prepare(\n `SELECT\n COUNT(*) as total,\n SUM(CASE WHEN resolved = 1 THEN 1 ELSE 0 END) as resolved\n FROM nodes WHERE project = ?`\n )\n .get(project) as { total: number; resolved: number };\n\n // Blocked: unresolved nodes that are manually blocked OR have unresolved dependencies\n const blocked = db\n .prepare(\n `SELECT COUNT(DISTINCT id) as count FROM (\n SELECT n.id FROM nodes n\n WHERE n.project = ? AND n.resolved = 0 AND n.blocked = 1\n UNION\n SELECT n.id FROM nodes n\n JOIN edges e ON e.from_node = n.id AND e.type = 'depends_on'\n JOIN nodes dep ON dep.id = e.to_node AND dep.resolved = 0\n WHERE n.project = ? AND n.resolved = 0\n )`\n )\n .get(project, project) as { count: number };\n\n // Actionable: unresolved leaves (no unresolved children) with all deps resolved and not manually blocked\n const actionable = db\n .prepare(\n `SELECT COUNT(*) as count FROM nodes n\n WHERE n.project = ? AND n.resolved = 0 AND n.blocked = 0\n AND NOT EXISTS (\n SELECT 1 FROM nodes child WHERE child.parent = n.id AND child.resolved = 0\n )\n AND NOT EXISTS (\n SELECT 1 FROM edges e\n JOIN nodes dep ON dep.id = e.to_node AND dep.resolved = 0\n WHERE e.from_node = n.id AND e.type = 'depends_on'\n )`\n )\n .get(project) as { count: number };\n\n return {\n total: counts.total,\n resolved: counts.resolved,\n unresolved: counts.total - counts.resolved,\n blocked: blocked.count,\n actionable: actionable.count,\n };\n}\n","import Database from \"better-sqlite3\";\nimport path from \"path\";\n\nlet db: Database.Database;\nlet dbPath: string;\n\nexport function setDbPath(p: string): void {\n dbPath = p;\n}\n\nexport function getDb(): Database.Database {\n if (!db) {\n const resolvedPath = dbPath ?? path.resolve(\"graph.db\");\n db = new Database(resolvedPath);\n db.pragma(\"journal_mode = WAL\");\n db.pragma(\"synchronous = FULL\");\n db.pragma(\"foreign_keys = ON\");\n migrate(db);\n }\n return db;\n}\n\nexport function initDb(p?: string): Database.Database {\n // Close existing db if any (used by tests to reset state)\n if (db) {\n db.close();\n db = undefined!;\n }\n if (p) dbPath = p;\n return getDb();\n}\n\nfunction migrate(db: Database.Database): void {\n db.exec(`\n CREATE TABLE IF NOT EXISTS nodes (\n id TEXT PRIMARY KEY,\n rev INTEGER NOT NULL DEFAULT 1,\n parent TEXT REFERENCES nodes(id),\n project TEXT NOT NULL,\n summary TEXT NOT NULL,\n resolved INTEGER NOT NULL DEFAULT 0,\n depth INTEGER NOT NULL DEFAULT 0,\n state TEXT,\n properties TEXT NOT NULL DEFAULT '{}',\n context_links TEXT NOT NULL DEFAULT '[]',\n evidence TEXT NOT NULL DEFAULT '[]',\n created_by TEXT NOT NULL,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n );\n\n CREATE TABLE IF NOT EXISTS edges (\n id TEXT PRIMARY KEY,\n from_node TEXT NOT NULL REFERENCES nodes(id),\n to_node TEXT NOT NULL REFERENCES nodes(id),\n type TEXT NOT NULL,\n created_at TEXT NOT NULL,\n UNIQUE(from_node, to_node, type)\n );\n\n CREATE TABLE IF NOT EXISTS events (\n id TEXT PRIMARY KEY,\n node_id TEXT NOT NULL REFERENCES nodes(id),\n agent TEXT NOT NULL,\n action TEXT NOT NULL,\n changes TEXT NOT NULL,\n timestamp TEXT NOT NULL\n );\n\n CREATE INDEX IF NOT EXISTS idx_nodes_project ON nodes(project);\n CREATE INDEX IF NOT EXISTS idx_nodes_parent ON nodes(parent);\n CREATE INDEX IF NOT EXISTS idx_nodes_resolved ON nodes(project, resolved);\n CREATE INDEX IF NOT EXISTS idx_edges_from ON edges(from_node);\n CREATE INDEX IF NOT EXISTS idx_edges_to ON edges(to_node);\n CREATE INDEX IF NOT EXISTS idx_edges_type ON edges(from_node, type);\n CREATE INDEX IF NOT EXISTS idx_events_node ON events(node_id);\n\n CREATE TABLE IF NOT EXISTS knowledge (\n id TEXT PRIMARY KEY,\n project TEXT NOT NULL,\n key TEXT NOT NULL,\n content TEXT NOT NULL,\n created_by TEXT NOT NULL,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n UNIQUE(project, key)\n );\n\n CREATE INDEX IF NOT EXISTS idx_knowledge_project ON knowledge(project);\n `);\n\n // [sl:yBBVr4wcgVfWA_w8U8hQo] Migration: add depth column if it doesn't exist\n const hasDepth = db.prepare(\n \"SELECT COUNT(*) as cnt FROM pragma_table_info('nodes') WHERE name = 'depth'\"\n ).get() as { cnt: number };\n\n if (hasDepth.cnt === 0) {\n db.exec(\"ALTER TABLE nodes ADD COLUMN depth INTEGER NOT NULL DEFAULT 0\");\n // Backfill depths using recursive CTE\n db.exec(`\n WITH RECURSIVE tree(id, depth) AS (\n SELECT id, 0 FROM nodes WHERE parent IS NULL\n UNION ALL\n SELECT n.id, t.depth + 1\n FROM nodes n JOIN tree t ON n.parent = t.id\n )\n UPDATE nodes SET depth = (SELECT depth FROM tree WHERE tree.id = nodes.id)\n `);\n }\n\n // [sl:AOXqUIhpW2-gdMqWATf66] Migration: add discovery column if it doesn't exist\n const hasDiscovery = db.prepare(\n \"SELECT COUNT(*) as cnt FROM pragma_table_info('nodes') WHERE name = 'discovery'\"\n ).get() as { cnt: number };\n\n if (hasDiscovery.cnt === 0) {\n db.exec(\"ALTER TABLE nodes ADD COLUMN discovery TEXT DEFAULT NULL\");\n }\n\n // Migration: add blocked/blocked_reason columns if they don't exist\n const hasBlocked = db.prepare(\n \"SELECT COUNT(*) as cnt FROM pragma_table_info('nodes') WHERE name = 'blocked'\"\n ).get() as { cnt: number };\n\n if (hasBlocked.cnt === 0) {\n db.exec(\"ALTER TABLE nodes ADD COLUMN blocked INTEGER NOT NULL DEFAULT 0\");\n db.exec(\"ALTER TABLE nodes ADD COLUMN blocked_reason TEXT DEFAULT NULL\");\n }\n}\n\nexport function checkpointDb(): void {\n if (db) {\n db.pragma(\"wal_checkpoint(TRUNCATE)\");\n }\n}\n\nexport function closeDb(): void {\n if (db) {\n checkpointDb();\n db.close();\n }\n}\n","import { nanoid } from \"nanoid\";\nimport { getDb } from \"./db.js\";\nimport type { FieldChange, Event } from \"./types.js\";\n\nconst INSERT_EVENT = `\n INSERT INTO events (id, node_id, agent, action, changes, timestamp)\n VALUES (?, ?, ?, ?, ?, ?)\n`;\n\nexport function logEvent(\n nodeId: string,\n agent: string,\n action: string,\n changes: FieldChange[]\n): Event {\n const db = getDb();\n const event: Event = {\n id: nanoid(),\n node_id: nodeId,\n agent,\n action,\n changes,\n timestamp: new Date().toISOString(),\n };\n\n db.prepare(INSERT_EVENT).run(\n event.id,\n event.node_id,\n event.agent,\n event.action,\n JSON.stringify(event.changes),\n event.timestamp\n );\n\n return event;\n}\n\nexport function getEvents(\n nodeId: string,\n limit: number = 20,\n cursor?: string\n): { events: Event[]; next_cursor: string | null } {\n const db = getDb();\n\n let query: string;\n let params: unknown[];\n\n if (cursor) {\n query = `\n SELECT * FROM events\n WHERE node_id = ? AND timestamp < ?\n ORDER BY timestamp DESC\n LIMIT ?\n `;\n params = [nodeId, cursor, limit + 1];\n } else {\n query = `\n SELECT * FROM events\n WHERE node_id = ?\n ORDER BY timestamp DESC\n LIMIT ?\n `;\n params = [nodeId, limit + 1];\n }\n\n const rows = db.prepare(query).all(...params) as Array<{\n id: string;\n node_id: string;\n agent: string;\n action: string;\n changes: string;\n timestamp: string;\n }>;\n\n const hasMore = rows.length > limit;\n const slice = hasMore ? rows.slice(0, limit) : rows;\n\n const events: Event[] = slice.map((row) => ({\n id: row.id,\n node_id: row.node_id,\n agent: row.agent,\n action: row.action,\n changes: JSON.parse(row.changes),\n timestamp: row.timestamp,\n }));\n\n return {\n events,\n next_cursor: hasMore ? slice[slice.length - 1].timestamp : null,\n };\n}\n","export class ValidationError extends Error {\n code = \"validation_error\";\n constructor(message: string) {\n super(message);\n this.name = \"ValidationError\";\n }\n}\n\nexport class EngineError extends Error {\n code: string;\n constructor(code: string, message: string) {\n super(message);\n this.name = \"EngineError\";\n this.code = code;\n }\n}\n\nexport function requireString(value: unknown, field: string): string {\n if (typeof value !== \"string\" || value.trim().length === 0) {\n throw new ValidationError(`${field} is required and must be a non-empty string`);\n }\n return value.trim();\n}\n\nexport function optionalString(value: unknown, field: string): string | undefined {\n if (value === undefined || value === null) return undefined;\n if (typeof value !== \"string\") {\n throw new ValidationError(`${field} must be a string`);\n }\n return value;\n}\n\nexport function requireArray<T>(value: unknown, field: string): T[] {\n if (!Array.isArray(value) || value.length === 0) {\n throw new ValidationError(`${field} is required and must be a non-empty array`);\n }\n return value as T[];\n}\n\nexport function optionalArray<T>(value: unknown, field: string): T[] | undefined {\n if (value === undefined || value === null) return undefined;\n if (!Array.isArray(value)) {\n throw new ValidationError(`${field} must be an array`);\n }\n return value as T[];\n}\n\nexport function optionalNumber(value: unknown, field: string, min?: number, max?: number): number | undefined {\n if (value === undefined || value === null) return undefined;\n if (typeof value !== \"number\" || isNaN(value)) {\n throw new ValidationError(`${field} must be a number`);\n }\n if (min !== undefined && value < min) {\n throw new ValidationError(`${field} must be >= ${min}`);\n }\n if (max !== undefined && value > max) {\n throw new ValidationError(`${field} must be <= ${max}`);\n }\n return value;\n}\n\nexport function optionalBoolean(value: unknown, field: string): boolean | undefined {\n if (value === undefined || value === null) return undefined;\n if (typeof value !== \"boolean\") {\n throw new ValidationError(`${field} must be a boolean`);\n }\n return value;\n}\n\nexport function requireObject(value: unknown, field: string): Record<string, unknown> {\n if (value === null || typeof value !== \"object\" || Array.isArray(value)) {\n throw new ValidationError(`${field} is required and must be an object`);\n }\n return value as Record<string, unknown>;\n}\n"],"mappings":";;;AAAA,SAAS,UAAAA,eAAc;;;ACAvB,OAAO,cAAc;AACrB,OAAO,UAAU;AAEjB,IAAI;AACJ,IAAI;AAEG,SAAS,UAAU,GAAiB;AACzC,WAAS;AACX;AAEO,SAAS,QAA2B;AACzC,MAAI,CAAC,IAAI;AACP,UAAM,eAAe,UAAU,KAAK,QAAQ,UAAU;AACtD,SAAK,IAAI,SAAS,YAAY;AAC9B,OAAG,OAAO,oBAAoB;AAC9B,OAAG,OAAO,oBAAoB;AAC9B,OAAG,OAAO,mBAAmB;AAC7B,YAAQ,EAAE;AAAA,EACZ;AACA,SAAO;AACT;AAYA,SAAS,QAAQC,KAA6B;AAC5C,EAAAA,IAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAwDP;AAGD,QAAM,WAAWA,IAAG;AAAA,IAClB;AAAA,EACF,EAAE,IAAI;AAEN,MAAI,SAAS,QAAQ,GAAG;AACtB,IAAAA,IAAG,KAAK,+DAA+D;AAEvE,IAAAA,IAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAQP;AAAA,EACH;AAGA,QAAM,eAAeA,IAAG;AAAA,IACtB;AAAA,EACF,EAAE,IAAI;AAEN,MAAI,aAAa,QAAQ,GAAG;AAC1B,IAAAA,IAAG,KAAK,0DAA0D;AAAA,EACpE;AAGA,QAAM,aAAaA,IAAG;AAAA,IACpB;AAAA,EACF,EAAE,IAAI;AAEN,MAAI,WAAW,QAAQ,GAAG;AACxB,IAAAA,IAAG,KAAK,iEAAiE;AACzE,IAAAA,IAAG,KAAK,+DAA+D;AAAA,EACzE;AACF;AAEO,SAAS,eAAqB;AACnC,MAAI,IAAI;AACN,OAAG,OAAO,0BAA0B;AAAA,EACtC;AACF;AAEO,SAAS,UAAgB;AAC9B,MAAI,IAAI;AACN,iBAAa;AACb,OAAG,MAAM;AAAA,EACX;AACF;;;AC7IA,SAAS,cAAc;AAIvB,IAAM,eAAe;AAAA;AAAA;AAAA;AAKd,SAAS,SACd,QACA,OACA,QACA,SACO;AACP,QAAMC,MAAK,MAAM;AACjB,QAAM,QAAe;AAAA,IACnB,IAAI,OAAO;AAAA,IACX,SAAS;AAAA,IACT;AAAA,IACA;AAAA,IACA;AAAA,IACA,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,EACpC;AAEA,EAAAA,IAAG,QAAQ,YAAY,EAAE;AAAA,IACvB,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,MAAM;AAAA,IACN,KAAK,UAAU,MAAM,OAAO;AAAA,IAC5B,MAAM;AAAA,EACR;AAEA,SAAO;AACT;AAEO,SAAS,UACd,QACA,QAAgB,IAChB,QACiD;AACjD,QAAMA,MAAK,MAAM;AAEjB,MAAI;AACJ,MAAI;AAEJ,MAAI,QAAQ;AACV,YAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAMR,aAAS,CAAC,QAAQ,QAAQ,QAAQ,CAAC;AAAA,EACrC,OAAO;AACL,YAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAMR,aAAS,CAAC,QAAQ,QAAQ,CAAC;AAAA,EAC7B;AAEA,QAAM,OAAOA,IAAG,QAAQ,KAAK,EAAE,IAAI,GAAG,MAAM;AAS5C,QAAM,UAAU,KAAK,SAAS;AAC9B,QAAM,QAAQ,UAAU,KAAK,MAAM,GAAG,KAAK,IAAI;AAE/C,QAAM,SAAkB,MAAM,IAAI,CAAC,SAAS;AAAA,IAC1C,IAAI,IAAI;AAAA,IACR,SAAS,IAAI;AAAA,IACb,OAAO,IAAI;AAAA,IACX,QAAQ,IAAI;AAAA,IACZ,SAAS,KAAK,MAAM,IAAI,OAAO;AAAA,IAC/B,WAAW,IAAI;AAAA,EACjB,EAAE;AAEF,SAAO;AAAA,IACL;AAAA,IACA,aAAa,UAAU,MAAM,MAAM,SAAS,CAAC,EAAE,YAAY;AAAA,EAC7D;AACF;;;AC1FO,IAAM,kBAAN,cAA8B,MAAM;AAAA,EACzC,OAAO;AAAA,EACP,YAAY,SAAiB;AAC3B,UAAM,OAAO;AACb,SAAK,OAAO;AAAA,EACd;AACF;AAEO,IAAM,cAAN,cAA0B,MAAM;AAAA,EACrC;AAAA,EACA,YAAY,MAAc,SAAiB;AACzC,UAAM,OAAO;AACb,SAAK,OAAO;AACZ,SAAK,OAAO;AAAA,EACd;AACF;AAEO,SAAS,cAAc,OAAgB,OAAuB;AACnE,MAAI,OAAO,UAAU,YAAY,MAAM,KAAK,EAAE,WAAW,GAAG;AAC1D,UAAM,IAAI,gBAAgB,GAAG,KAAK,6CAA6C;AAAA,EACjF;AACA,SAAO,MAAM,KAAK;AACpB;AAEO,SAAS,eAAe,OAAgB,OAAmC;AAChF,MAAI,UAAU,UAAa,UAAU,KAAM,QAAO;AAClD,MAAI,OAAO,UAAU,UAAU;AAC7B,UAAM,IAAI,gBAAgB,GAAG,KAAK,mBAAmB;AAAA,EACvD;AACA,SAAO;AACT;AAEO,SAAS,aAAgB,OAAgB,OAAoB;AAClE,MAAI,CAAC,MAAM,QAAQ,KAAK,KAAK,MAAM,WAAW,GAAG;AAC/C,UAAM,IAAI,gBAAgB,GAAG,KAAK,4CAA4C;AAAA,EAChF;AACA,SAAO;AACT;AAUO,SAAS,eAAe,OAAgB,OAAe,KAAc,KAAkC;AAC5G,MAAI,UAAU,UAAa,UAAU,KAAM,QAAO;AAClD,MAAI,OAAO,UAAU,YAAY,MAAM,KAAK,GAAG;AAC7C,UAAM,IAAI,gBAAgB,GAAG,KAAK,mBAAmB;AAAA,EACvD;AACA,MAAI,QAAQ,UAAa,QAAQ,KAAK;AACpC,UAAM,IAAI,gBAAgB,GAAG,KAAK,eAAe,GAAG,EAAE;AAAA,EACxD;AACA,MAAI,QAAQ,UAAa,QAAQ,KAAK;AACpC,UAAM,IAAI,gBAAgB,GAAG,KAAK,eAAe,GAAG,EAAE;AAAA,EACxD;AACA,SAAO;AACT;AAEO,SAAS,gBAAgB,OAAgB,OAAoC;AAClF,MAAI,UAAU,UAAa,UAAU,KAAM,QAAO;AAClD,MAAI,OAAO,UAAU,WAAW;AAC9B,UAAM,IAAI,gBAAgB,GAAG,KAAK,oBAAoB;AAAA,EACxD;AACA,SAAO;AACT;;;AH3DA,SAAS,UAAU,KAAoB;AACrC,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,KAAK,IAAI;AAAA,IACT,QAAQ,IAAI;AAAA,IACZ,SAAS,IAAI;AAAA,IACb,SAAS,IAAI;AAAA,IACb,UAAU,IAAI,aAAa;AAAA,IAC3B,OAAO,IAAI;AAAA,IACX,WAAW,IAAI,aAAa;AAAA,IAC5B,SAAS,IAAI,YAAY;AAAA,IACzB,gBAAgB,IAAI,kBAAkB;AAAA,IACtC,OAAO,IAAI,QAAQ,KAAK,MAAM,IAAI,KAAK,IAAI;AAAA,IAC3C,YAAY,KAAK,MAAM,IAAI,UAAU;AAAA,IACrC,eAAe,KAAK,MAAM,IAAI,aAAa;AAAA,IAC3C,UAAU,KAAK,MAAM,IAAI,QAAQ;AAAA,IACjC,YAAY,IAAI;AAAA,IAChB,YAAY,IAAI;AAAA,IAChB,YAAY,IAAI;AAAA,EAClB;AACF;AAeO,SAAS,WAAW,OAA8B;AACvD,QAAMC,MAAK,MAAM;AACjB,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AACnC,QAAM,KAAKC,QAAO;AAGlB,MAAI,QAAQ;AACZ,MAAI,MAAM,QAAQ;AAChB,UAAM,YAAYD,IAAG,QAAQ,sCAAsC,EAAE,IAAI,MAAM,MAAM;AACrF,QAAI,UAAW,SAAQ,UAAU,QAAQ;AAAA,EAC3C;AAEA,QAAM,OAAa;AAAA,IACjB;AAAA,IACA,KAAK;AAAA,IACL,QAAQ,MAAM,UAAU;AAAA,IACxB,SAAS,MAAM;AAAA,IACf,SAAS,MAAM;AAAA,IACf,UAAU;AAAA,IACV;AAAA,IACA,WAAW,MAAM,aAAa;AAAA,IAC9B,SAAS;AAAA,IACT,gBAAgB;AAAA,IAChB,OAAO,MAAM,SAAS;AAAA,IACtB,YAAY,MAAM,cAAc,CAAC;AAAA,IACjC,eAAe,MAAM,iBAAiB,CAAC;AAAA,IACvC,UAAU,CAAC;AAAA,IACX,YAAY,MAAM;AAAA,IAClB,YAAY;AAAA,IACZ,YAAY;AAAA,EACd;AAEA,EAAAA,IAAG,QAAQ;AAAA;AAAA;AAAA,GAGV,EAAE;AAAA,IACD,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL;AAAA,IACA;AAAA,IACA,KAAK,UAAU,OAAO,KAAK,UAAU,KAAK,KAAK,IAAI;AAAA,IACnD,KAAK,UAAU,KAAK,UAAU;AAAA,IAC9B,KAAK,UAAU,KAAK,aAAa;AAAA,IACjC,KAAK,UAAU,KAAK,QAAQ;AAAA,IAC5B,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,EACP;AAEA,WAAS,KAAK,IAAI,MAAM,OAAO,WAAW;AAAA,IACxC,EAAE,OAAO,WAAW,QAAQ,MAAM,OAAO,KAAK,QAAQ;AAAA,EACxD,CAAC;AAED,SAAO;AACT;AAIO,SAAS,QAAQ,IAAyB;AAC/C,QAAMA,MAAK,MAAM;AACjB,QAAM,MAAMA,IAAG,QAAQ,kCAAkC,EAAE,IAAI,EAAE;AAGjE,SAAO,MAAM,UAAU,GAAG,IAAI;AAChC;AAEO,SAAS,eAAe,IAAkB;AAC/C,QAAM,OAAO,QAAQ,EAAE;AACvB,MAAI,CAAC,MAAM;AACT,UAAM,IAAI,YAAY,kBAAkB,mBAAmB,EAAE,8DAA8D;AAAA,EAC7H;AACA,SAAO;AACT;AAEO,SAAS,YAAY,UAA0B;AACpD,QAAMA,MAAK,MAAM;AACjB,QAAM,OAAOA,IACV,QAAQ,sCAAsC,EAC9C,IAAI,QAAQ;AACf,SAAO,KAAK,IAAI,SAAS;AAC3B;AAEO,SAAS,aAAa,QAA2E;AACtG,QAAM,YAAuE,CAAC;AAC9E,MAAI,UAAU,QAAQ,MAAM;AAE5B,SAAO,SAAS,QAAQ;AACtB,UAAM,SAAS,QAAQ,QAAQ,MAAM;AACrC,QAAI,CAAC,OAAQ;AACb,cAAU,QAAQ,EAAE,IAAI,OAAO,IAAI,SAAS,OAAO,SAAS,UAAU,OAAO,SAAS,CAAC;AACvF,cAAU;AAAA,EACZ;AAEA,SAAO;AACT;AAEO,SAAS,eAAe,SAA8B;AAC3D,QAAMA,MAAK,MAAM;AACjB,QAAM,MAAMA,IACT,QAAQ,0DAA0D,EAClE,IAAI,OAAO;AACd,SAAO,MAAM,UAAU,GAAG,IAAI;AAChC;AAEO,SAAS,eAQb;AACD,QAAMA,MAAK,MAAM;AAEjB,QAAM,QAAQA,IACX,QAAQ,0CAA0C,EAClD,IAAI;AAEP,SAAO,MAAM,IAAI,CAAC,SAAS;AACzB,UAAM,SAASA,IACZ;AAAA,MACC;AAAA;AAAA;AAAA;AAAA,IAIF,EACC,IAAI,KAAK,OAAO;AAEnB,WAAO;AAAA,MACL,SAAS,KAAK;AAAA,MACd,IAAI,KAAK;AAAA,MACT,SAAS,KAAK;AAAA,MACd,OAAO,OAAO;AAAA,MACd,UAAU,OAAO;AAAA,MACjB,YAAY,OAAO,QAAQ,OAAO;AAAA,MAClC,YAAY,KAAK;AAAA,IACnB;AAAA,EACF,CAAC;AACH;AAmBO,SAAS,WAAW,OAA8B;AACvD,QAAMA,MAAK,MAAM;AACjB,QAAM,OAAO,eAAe,MAAM,OAAO;AACzC,QAAM,UAAyB,CAAC;AAChC,QAAM,OAAM,oBAAI,KAAK,GAAE,YAAY;AAEnC,MAAI,cAAc,KAAK;AACvB,MAAI,eAAe,KAAK;AACxB,MAAI,aAAa,KAAK;AACtB,MAAI,mBAAmB,KAAK;AAC5B,MAAI,WAAW,KAAK;AACpB,MAAI,aAAa,KAAK;AACtB,MAAI,gBAAgB,EAAE,GAAG,KAAK,WAAW;AACzC,MAAI,kBAAkB,CAAC,GAAG,KAAK,aAAa;AAC5C,MAAI,cAAc,CAAC,GAAG,KAAK,QAAQ;AAGnC,MAAI,MAAM,aAAa,QAAQ,CAAC,KAAK,UAAU;AAC7C,UAAM,sBAAsB,KAAK,SAAS,SAAS;AACnD,UAAM,iBAAiB,MAAM,gBAAgB,MAAM,aAAa,SAAS;AACzE,QAAI,CAAC,uBAAuB,CAAC,gBAAgB;AAC3C,YAAM,IAAI;AAAA,QACR;AAAA,QACA,uBAAuB,MAAM,OAAO;AAAA,MACtC;AAAA,IACF;AAAA,EACF;AAEA,MAAI,MAAM,aAAa,UAAa,MAAM,aAAa,KAAK,UAAU;AACpE,YAAQ,KAAK,EAAE,OAAO,YAAY,QAAQ,KAAK,UAAU,OAAO,MAAM,SAAS,CAAC;AAChF,kBAAc,MAAM;AAAA,EACtB;AAEA,MAAI,MAAM,cAAc,UAAa,MAAM,cAAc,KAAK,WAAW;AACvE,YAAQ,KAAK,EAAE,OAAO,aAAa,QAAQ,KAAK,WAAW,OAAO,MAAM,UAAU,CAAC;AACnF,mBAAe,MAAM;AAAA,EACvB;AAEA,MAAI,MAAM,YAAY,UAAa,MAAM,YAAY,KAAK,SAAS;AACjE,YAAQ,KAAK,EAAE,OAAO,WAAW,QAAQ,KAAK,SAAS,OAAO,MAAM,QAAQ,CAAC;AAC7E,iBAAa,MAAM;AAEnB,QAAI,CAAC,MAAM,WAAW,MAAM,mBAAmB,QAAW;AACxD,UAAI,KAAK,mBAAmB,MAAM;AAChC,gBAAQ,KAAK,EAAE,OAAO,kBAAkB,QAAQ,KAAK,gBAAgB,OAAO,KAAK,CAAC;AAAA,MACpF;AACA,yBAAmB;AAAA,IACrB;AAAA,EACF;AAEA,MAAI,MAAM,mBAAmB,UAAa,MAAM,mBAAmB,KAAK,gBAAgB;AACtF,YAAQ,KAAK,EAAE,OAAO,kBAAkB,QAAQ,KAAK,gBAAgB,OAAO,MAAM,eAAe,CAAC;AAClG,uBAAmB,MAAM;AAAA,EAC3B;AAEA,MAAI,MAAM,UAAU,QAAW;AAC7B,YAAQ,KAAK,EAAE,OAAO,SAAS,QAAQ,KAAK,OAAO,OAAO,MAAM,MAAM,CAAC;AACvE,eAAW,MAAM;AAAA,EACnB;AAEA,MAAI,MAAM,YAAY,UAAa,MAAM,YAAY,KAAK,SAAS;AACjE,YAAQ,KAAK,EAAE,OAAO,WAAW,QAAQ,KAAK,SAAS,OAAO,MAAM,QAAQ,CAAC;AAC7E,iBAAa,MAAM;AAAA,EACrB;AAEA,MAAI,MAAM,YAAY;AACpB,eAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,UAAU,GAAG;AAC3D,UAAI,UAAU,MAAM;AAClB,YAAI,OAAO,eAAe;AACxB,kBAAQ,KAAK,EAAE,OAAO,cAAc,GAAG,IAAI,QAAQ,cAAc,GAAG,GAAG,OAAO,KAAK,CAAC;AACpF,iBAAO,cAAc,GAAG;AAAA,QAC1B;AAAA,MACF,OAAO;AACL,gBAAQ,KAAK,EAAE,OAAO,cAAc,GAAG,IAAI,QAAQ,cAAc,GAAG,KAAK,MAAM,OAAO,MAAM,CAAC;AAC7F,sBAAc,GAAG,IAAI;AAAA,MACvB;AAAA,IACF;AAAA,EACF;AAEA,MAAI,MAAM,mBAAmB;AAC3B,eAAW,QAAQ,MAAM,mBAAmB;AAC1C,UAAI,CAAC,gBAAgB,SAAS,IAAI,GAAG;AACnC,wBAAgB,KAAK,IAAI;AACzB,gBAAQ,KAAK,EAAE,OAAO,iBAAiB,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AAEA,MAAI,MAAM,sBAAsB;AAC9B,eAAW,QAAQ,MAAM,sBAAsB;AAC7C,YAAM,MAAM,gBAAgB,QAAQ,IAAI;AACxC,UAAI,QAAQ,IAAI;AACd,wBAAgB,OAAO,KAAK,CAAC;AAC7B,gBAAQ,KAAK,EAAE,OAAO,iBAAiB,QAAQ,MAAM,OAAO,KAAK,CAAC;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AAEA,MAAI,MAAM,cAAc;AACtB,eAAW,MAAM,MAAM,cAAc;AACnC,YAAM,WAAqB;AAAA,QACzB,MAAM,GAAG;AAAA,QACT,KAAK,GAAG;AAAA,QACR,OAAO,MAAM;AAAA,QACb,WAAW;AAAA,MACb;AACA,kBAAY,KAAK,QAAQ;AACzB,cAAQ,KAAK,EAAE,OAAO,YAAY,QAAQ,MAAM,OAAO,SAAS,CAAC;AAAA,IACnE;AAAA,EACF;AAEA,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO;AAAA,EACT;AAEA,QAAM,SAAS,KAAK,MAAM;AAE1B,EAAAA,IAAG,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAcV,EAAE;AAAA,IACD;AAAA,IACA,cAAc,IAAI;AAAA,IAClB;AAAA,IACA,aAAa,IAAI;AAAA,IACjB;AAAA,IACA,aAAa,OAAO,KAAK,UAAU,QAAQ,IAAI;AAAA,IAC/C;AAAA,IACA,KAAK,UAAU,aAAa;AAAA,IAC5B,KAAK,UAAU,eAAe;AAAA,IAC9B,KAAK,UAAU,WAAW;AAAA,IAC1B;AAAA,IACA,MAAM;AAAA,EACR;AAEA,QAAM,SAAS,MAAM,aAAa,OAAO,aAAa;AACtD,WAAS,MAAM,SAAS,MAAM,OAAO,QAAQ,OAAO;AAEpD,SAAO,eAAe,MAAM,OAAO;AACrC;AAIO,SAAS,mBAAmB,QAAqD;AACtF,QAAMA,MAAK,MAAM;AACjB,QAAM,MAAMA,IAAG;AAAA,IACb;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASF,EAAE,IAAI,MAAM;AACZ,SAAO,EAAE,UAAU,IAAI,UAAU,OAAO,IAAI,MAAM;AACpD;AAIO,SAAS,kBAAkB,SAMhC;AACA,QAAMA,MAAK,MAAM;AAEjB,QAAM,SAASA,IACZ;AAAA,IACC;AAAA;AAAA;AAAA;AAAA,EAIF,EACC,IAAI,OAAO;AAGd,QAAM,UAAUA,IACb;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASF,EACC,IAAI,SAAS,OAAO;AAGvB,QAAM,aAAaA,IAChB;AAAA,IACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUF,EACC,IAAI,OAAO;AAEd,SAAO;AAAA,IACL,OAAO,OAAO;AAAA,IACd,UAAU,OAAO;AAAA,IACjB,YAAY,OAAO,QAAQ,OAAO;AAAA,IAClC,SAAS,QAAQ;AAAA,IACjB,YAAY,WAAW;AAAA,EACzB;AACF;","names":["nanoid","db","db","db","nanoid"]}
|
|
@@ -1,8 +1,10 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
|
|
3
3
|
// src/tools/agent-config.ts
|
|
4
|
-
|
|
4
|
+
function agentPrompt(version) {
|
|
5
|
+
return `---
|
|
5
6
|
name: graph
|
|
7
|
+
version: ${version}
|
|
6
8
|
description: Use this agent for tasks tracked in Graph. Enforces the claim-work-resolve workflow \u2014 always checks graph_next before working, adds new work to the graph before executing, and resolves with evidence.
|
|
7
9
|
tools: Read, Edit, Write, Bash, Glob, Grep, Task(Explore), AskUserQuestion
|
|
8
10
|
model: sonnet
|
|
@@ -98,6 +100,19 @@ The user controls the pace. Do not auto-claim the next task.
|
|
|
98
100
|
- NEVER skip discovery on nodes with discovery:pending \u2014 the system will block you from decomposing
|
|
99
101
|
- If you're approaching context limits, ensure your current task's state is captured (update with evidence even if not fully resolved) so the next agent can pick up where you left off
|
|
100
102
|
|
|
103
|
+
# Record observations proactively
|
|
104
|
+
|
|
105
|
+
Graph is the project memory across sessions. If something isn't in Graph, it's effectively forgotten. While working, record things you notice \u2014 even if they're not part of your current task:
|
|
106
|
+
|
|
107
|
+
- **Warnings & errors**: CI failures, deprecation warnings, security vulnerabilities, linter issues
|
|
108
|
+
- **Tech debt**: Code smells, outdated dependencies, missing tests, hardcoded values
|
|
109
|
+
- **Broken things**: Flaky tests, dead links, misconfigured environments
|
|
110
|
+
- **Ideas & improvements**: Performance opportunities, UX issues, missing features
|
|
111
|
+
|
|
112
|
+
Use \`graph_plan\` to add observation nodes under the project root. Keep them lightweight \u2014 a clear summary is enough. They can always be dropped later if irrelevant.
|
|
113
|
+
|
|
114
|
+
Default to "if in doubt, add a node." It's cheap to create and the next session will thank you.
|
|
115
|
+
|
|
101
116
|
# Common mistakes to avoid
|
|
102
117
|
|
|
103
118
|
- Setting dependencies on parent nodes instead of leaf nodes
|
|
@@ -108,9 +123,10 @@ The user controls the pace. Do not auto-claim the next task.
|
|
|
108
123
|
- Trying to decompose a node without completing discovery first
|
|
109
124
|
- Not writing knowledge entries during discovery \u2014 future agents need this context
|
|
110
125
|
`;
|
|
111
|
-
|
|
126
|
+
}
|
|
127
|
+
function handleAgentConfig(version) {
|
|
112
128
|
return {
|
|
113
|
-
agent_file:
|
|
129
|
+
agent_file: agentPrompt(version),
|
|
114
130
|
install_path: ".claude/agents/graph.md",
|
|
115
131
|
instructions: "Save the agent_file content to .claude/agents/graph.md in your project root. Claude Code will automatically discover it and use it when tasks match the agent description."
|
|
116
132
|
};
|
|
@@ -119,4 +135,4 @@ function handleAgentConfig() {
|
|
|
119
135
|
export {
|
|
120
136
|
handleAgentConfig
|
|
121
137
|
};
|
|
122
|
-
//# sourceMappingURL=chunk-
|
|
138
|
+
//# sourceMappingURL=chunk-JRMFXD5I.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/tools/agent-config.ts"],"sourcesContent":["// [sl:fV9I7Vel3xT5d_Ws2YHul] Subagent delivery — free for all (retention hook)\n\nfunction agentPrompt(version: string): string {\n return `---\nname: graph\nversion: ${version}\ndescription: Use this agent for tasks tracked in Graph. Enforces the claim-work-resolve workflow — always checks graph_next before working, adds new work to the graph before executing, and resolves with evidence.\ntools: Read, Edit, Write, Bash, Glob, Grep, Task(Explore), AskUserQuestion\nmodel: sonnet\n---\n\nYou are a graph-optimized agent. You execute tasks tracked in a Graph project. Follow this workflow strictly. The human directs, you execute through the graph.\n\n# Workflow\n\n## 1. ORIENT\nOn your first call, orient yourself:\n\\`\\`\\`\ngraph_onboard({ project: \"<project-name>\" })\n\\`\\`\\`\nRead the \\`hint\\` field first — it tells you exactly what to do next. Then read the summary, evidence, knowledge, and actionable tasks.\n\n**First-run:** If the tree is empty and discovery is \\`\"pending\"\\`, this is a brand new project. Jump directly to DISCOVER below. Do not call graph_next on an empty project.\n\n## 2. DISCOVER (when discovery is pending)\nIf the project root or a task node has \\`discovery: \"pending\"\\`, you must complete discovery before decomposing it. Discovery is an interview with the user to understand what needs to happen.\n\nUse AskUserQuestion to cover these areas (adapt to what's relevant — skip what's obvious):\n- **Scope** — What exactly needs to happen? What's explicitly out of scope?\n- **Existing patterns** — How does the codebase currently handle similar things? (explore first, then confirm)\n- **Technical approach** — What libraries, APIs, or patterns should we use?\n- **Acceptance criteria** — How will we know it's done? What does success look like?\n\nAfter the interview:\n1. Write findings as knowledge: \\`graph_knowledge_write({ project, key: \"discovery-<topic>\", content: \"...\" })\\`\n2. Flip discovery to done: \\`graph_update({ updates: [{ node_id: \"<id>\", discovery: \"done\" }] })\\`\n3. NOW decompose with graph_plan\n\nDo NOT skip discovery. If you try to add children to a node with \\`discovery: \"pending\"\\`, graph_plan will reject it.\n\n## 3. CLAIM\nGet your next task:\n\\`\\`\\`\ngraph_next({ project: \"<project-name>\", claim: true })\n\\`\\`\\`\nRead the task summary, ancestor chain (for scope), resolved dependencies (for context on what was done before you), and context links (for files to look at).\n\n## 4. PLAN\nIf you discover work that isn't in the graph, add it BEFORE executing:\n\\`\\`\\`\ngraph_plan({ nodes: [{ ref: \"new-work\", parent_ref: \"<parent-id>\", summary: \"...\" }] })\n\\`\\`\\`\nNever execute ad-hoc work. The graph is the source of truth.\n\nWhen decomposing work:\n- Set dependencies on LEAF nodes, not parent nodes. If \"Page A\" depends on \"Layout\", the dependency is from \"Page A\" to \"Layout\", not from the \"Pages\" parent to \"Layout\".\n- Keep tasks small and specific. A task should be completable in one session.\n- Parent nodes are organizational — they resolve when all children resolve. Don't put work in parent nodes.\n\n## 5. WORK\nExecute the claimed task. While working:\n- Annotate key code changes with \\`// [sl:nodeId]\\` where nodeId is the task you're working on\n- This creates a traceable link from code back to the task, its evidence, and its history\n- Build and run tests before considering a task done\n\n## 6. RESOLVE\nWhen done, resolve the task with evidence:\n\\`\\`\\`\ngraph_update({ updates: [{\n node_id: \"<task-id>\",\n resolved: true,\n add_evidence: [\n { type: \"note\", ref: \"What you did and why\" },\n { type: \"git\", ref: \"<commit-hash> — <summary>\" },\n { type: \"test\", ref: \"Test results\" }\n ],\n add_context_links: [\"path/to/files/you/touched\"]\n}] })\n\\`\\`\\`\nEvidence is mandatory. At minimum, include one note explaining what you did.\n\n## 7. PAUSE\nAfter resolving a task, STOP. Tell the user:\n- What you just completed\n- What the next actionable task is\n- Wait for the user to say \"continue\" before claiming the next task\n\nThe user controls the pace. Do not auto-claim the next task.\n\n# Rules\n\n- NEVER start work without a claimed task\n- NEVER resolve without evidence\n- NEVER execute ad-hoc work — add it to the graph first via graph_plan\n- NEVER auto-continue to the next task — pause and let the user decide\n- ALWAYS build and test before resolving\n- ALWAYS include context_links for files you modified when resolving\n- Parent nodes auto-resolve when all their children are resolved — you don't need to manually resolve them\n- NEVER skip discovery on nodes with discovery:pending — the system will block you from decomposing\n- If you're approaching context limits, ensure your current task's state is captured (update with evidence even if not fully resolved) so the next agent can pick up where you left off\n\n# Record observations proactively\n\nGraph is the project memory across sessions. If something isn't in Graph, it's effectively forgotten. While working, record things you notice — even if they're not part of your current task:\n\n- **Warnings & errors**: CI failures, deprecation warnings, security vulnerabilities, linter issues\n- **Tech debt**: Code smells, outdated dependencies, missing tests, hardcoded values\n- **Broken things**: Flaky tests, dead links, misconfigured environments\n- **Ideas & improvements**: Performance opportunities, UX issues, missing features\n\nUse \\`graph_plan\\` to add observation nodes under the project root. Keep them lightweight — a clear summary is enough. They can always be dropped later if irrelevant.\n\nDefault to \"if in doubt, add a node.\" It's cheap to create and the next session will thank you.\n\n# Common mistakes to avoid\n\n- Setting dependencies on parent nodes instead of leaf nodes\n- Running project scaffolding tools (create-next-app, etc.) before planning in the graph\n- Resolving tasks without running tests\n- Doing work that isn't tracked in the graph\n- Continuing to the next task without pausing for user review\n- Trying to decompose a node without completing discovery first\n- Not writing knowledge entries during discovery — future agents need this context\n`;\n}\n\nexport interface AgentConfigResult {\n agent_file: string;\n install_path: string;\n instructions: string;\n}\n\nexport function handleAgentConfig(version: string): AgentConfigResult {\n return {\n agent_file: agentPrompt(version),\n install_path: \".claude/agents/graph.md\",\n instructions:\n \"Save the agent_file content to .claude/agents/graph.md in your project root. \" +\n \"Claude Code will automatically discover it and use it when tasks match the agent description.\",\n };\n}\n"],"mappings":";;;AAEA,SAAS,YAAY,SAAyB;AAC5C,SAAO;AAAA;AAAA,WAEE,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAuHlB;AAQO,SAAS,kBAAkB,SAAoC;AACpE,SAAO;AAAA,IACL,YAAY,YAAY,OAAO;AAAA,IAC/B,cAAc;AAAA,IACd,cACE;AAAA,EAEJ;AACF;","names":[]}
|
package/dist/index.js
CHANGED
|
@@ -6,10 +6,10 @@ if (args[0] === "activate") {
|
|
|
6
6
|
const { activate } = await import("./activate-DSDTR2EJ.js");
|
|
7
7
|
activate(args[1]);
|
|
8
8
|
} else if (args[0] === "init") {
|
|
9
|
-
const { init } = await import("./init-
|
|
9
|
+
const { init } = await import("./init-VII7APUJ.js");
|
|
10
10
|
init();
|
|
11
11
|
} else {
|
|
12
|
-
const { startServer } = await import("./server-
|
|
12
|
+
const { startServer } = await import("./server-X36DXLEG.js");
|
|
13
13
|
startServer().catch((error) => {
|
|
14
14
|
console.error("Failed to start graph:", error);
|
|
15
15
|
process.exit(1);
|
|
@@ -1,11 +1,19 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
3
|
handleAgentConfig
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-JRMFXD5I.js";
|
|
5
5
|
|
|
6
6
|
// src/init.ts
|
|
7
7
|
import { readFileSync, writeFileSync, existsSync, mkdirSync } from "fs";
|
|
8
8
|
import { join, dirname } from "path";
|
|
9
|
+
import { fileURLToPath } from "url";
|
|
10
|
+
var PKG_VERSION = "0.0.0";
|
|
11
|
+
try {
|
|
12
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
13
|
+
const pkg = JSON.parse(readFileSync(join(__dirname, "..", "package.json"), "utf-8"));
|
|
14
|
+
PKG_VERSION = pkg.version;
|
|
15
|
+
} catch {
|
|
16
|
+
}
|
|
9
17
|
var MCP_CONFIG = {
|
|
10
18
|
command: "npx",
|
|
11
19
|
args: ["-y", "@graph-tl/graph"],
|
|
@@ -39,10 +47,17 @@ function init() {
|
|
|
39
47
|
wrote = true;
|
|
40
48
|
}
|
|
41
49
|
const agentPath = join(cwd, ".claude", "agents", "graph.md");
|
|
50
|
+
const { agent_file } = handleAgentConfig(PKG_VERSION);
|
|
42
51
|
if (existsSync(agentPath)) {
|
|
43
|
-
|
|
52
|
+
const current = readFileSync(agentPath, "utf8");
|
|
53
|
+
if (current === agent_file) {
|
|
54
|
+
console.log("\u2713 .claude/agents/graph.md \u2014 already up to date");
|
|
55
|
+
} else {
|
|
56
|
+
writeFileSync(agentPath, agent_file, "utf8");
|
|
57
|
+
console.log("\u2713 .claude/agents/graph.md \u2014 updated");
|
|
58
|
+
wrote = true;
|
|
59
|
+
}
|
|
44
60
|
} else {
|
|
45
|
-
const { agent_file } = handleAgentConfig();
|
|
46
61
|
mkdirSync(dirname(agentPath), { recursive: true });
|
|
47
62
|
writeFileSync(agentPath, agent_file, "utf8");
|
|
48
63
|
console.log("\u2713 .claude/agents/graph.md \u2014 created graph workflow agent");
|
|
@@ -61,4 +76,4 @@ function init() {
|
|
|
61
76
|
export {
|
|
62
77
|
init
|
|
63
78
|
};
|
|
64
|
-
//# sourceMappingURL=init-
|
|
79
|
+
//# sourceMappingURL=init-VII7APUJ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/init.ts"],"sourcesContent":["import { readFileSync, writeFileSync, existsSync, mkdirSync } from \"fs\";\nimport { join, dirname } from \"path\";\nimport { fileURLToPath } from \"url\";\nimport { handleAgentConfig } from \"./tools/agent-config.js\";\n\n// [sl:hy8oXisWnrZN1BfkonUqd] npx @graph-tl/graph init — zero friction onboarding\n\nlet PKG_VERSION = \"0.0.0\";\ntry {\n const __dirname = dirname(fileURLToPath(import.meta.url));\n const pkg = JSON.parse(readFileSync(join(__dirname, \"..\", \"package.json\"), \"utf-8\"));\n PKG_VERSION = pkg.version;\n} catch {}\n\nconst MCP_CONFIG = {\n command: \"npx\",\n args: [\"-y\", \"@graph-tl/graph\"],\n env: {\n GRAPH_AGENT: \"claude-code\",\n },\n};\n\nexport function init(): void {\n const cwd = process.cwd();\n let wrote = false;\n\n // 1. Write .mcp.json\n const configPath = join(cwd, \".mcp.json\");\n if (existsSync(configPath)) {\n try {\n const config = JSON.parse(readFileSync(configPath, \"utf8\"));\n if (config.mcpServers?.graph) {\n console.log(\"✓ .mcp.json — graph already configured\");\n } else {\n config.mcpServers = config.mcpServers ?? {};\n config.mcpServers.graph = MCP_CONFIG;\n writeFileSync(configPath, JSON.stringify(config, null, 2) + \"\\n\", \"utf8\");\n console.log(\"✓ .mcp.json — added graph server\");\n wrote = true;\n }\n } catch {\n console.error(`✗ .mcp.json exists but is not valid JSON — skipping`);\n }\n } else {\n const config = { mcpServers: { graph: MCP_CONFIG } };\n writeFileSync(configPath, JSON.stringify(config, null, 2) + \"\\n\", \"utf8\");\n console.log(\"✓ .mcp.json — created with graph server\");\n wrote = true;\n }\n\n // 2. Write .claude/agents/graph.md\n const agentPath = join(cwd, \".claude\", \"agents\", \"graph.md\");\n const { agent_file } = handleAgentConfig(PKG_VERSION);\n if (existsSync(agentPath)) {\n const current = readFileSync(agentPath, \"utf8\");\n if (current === agent_file) {\n console.log(\"✓ .claude/agents/graph.md — already up to date\");\n } else {\n writeFileSync(agentPath, agent_file, \"utf8\");\n console.log(\"✓ .claude/agents/graph.md — updated\");\n wrote = true;\n }\n } else {\n mkdirSync(dirname(agentPath), { recursive: true });\n writeFileSync(agentPath, agent_file, \"utf8\");\n console.log(\"✓ .claude/agents/graph.md — created graph workflow agent\");\n wrote = true;\n }\n\n // 3. Summary\n console.log(\"\");\n if (wrote) {\n console.log(\"Graph is ready. Restart Claude Code to load the MCP server.\");\n console.log(\"\");\n console.log(\"Then try:\");\n console.log(' \"Use graph to plan building a REST API with auth and tests.\"');\n } else {\n console.log(\"Graph is already set up — nothing to do.\");\n }\n}\n"],"mappings":";;;;;;AAAA,SAAS,cAAc,eAAe,YAAY,iBAAiB;AACnE,SAAS,MAAM,eAAe;AAC9B,SAAS,qBAAqB;AAK9B,IAAI,cAAc;AAClB,IAAI;AACF,QAAM,YAAY,QAAQ,cAAc,YAAY,GAAG,CAAC;AACxD,QAAM,MAAM,KAAK,MAAM,aAAa,KAAK,WAAW,MAAM,cAAc,GAAG,OAAO,CAAC;AACnF,gBAAc,IAAI;AACpB,QAAQ;AAAC;AAET,IAAM,aAAa;AAAA,EACjB,SAAS;AAAA,EACT,MAAM,CAAC,MAAM,iBAAiB;AAAA,EAC9B,KAAK;AAAA,IACH,aAAa;AAAA,EACf;AACF;AAEO,SAAS,OAAa;AAC3B,QAAM,MAAM,QAAQ,IAAI;AACxB,MAAI,QAAQ;AAGZ,QAAM,aAAa,KAAK,KAAK,WAAW;AACxC,MAAI,WAAW,UAAU,GAAG;AAC1B,QAAI;AACF,YAAM,SAAS,KAAK,MAAM,aAAa,YAAY,MAAM,CAAC;AAC1D,UAAI,OAAO,YAAY,OAAO;AAC5B,gBAAQ,IAAI,kDAAwC;AAAA,MACtD,OAAO;AACL,eAAO,aAAa,OAAO,cAAc,CAAC;AAC1C,eAAO,WAAW,QAAQ;AAC1B,sBAAc,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI,MAAM,MAAM;AACxE,gBAAQ,IAAI,4CAAkC;AAC9C,gBAAQ;AAAA,MACV;AAAA,IACF,QAAQ;AACN,cAAQ,MAAM,+DAAqD;AAAA,IACrE;AAAA,EACF,OAAO;AACL,UAAM,SAAS,EAAE,YAAY,EAAE,OAAO,WAAW,EAAE;AACnD,kBAAc,YAAY,KAAK,UAAU,QAAQ,MAAM,CAAC,IAAI,MAAM,MAAM;AACxE,YAAQ,IAAI,mDAAyC;AACrD,YAAQ;AAAA,EACV;AAGA,QAAM,YAAY,KAAK,KAAK,WAAW,UAAU,UAAU;AAC3D,QAAM,EAAE,WAAW,IAAI,kBAAkB,WAAW;AACpD,MAAI,WAAW,SAAS,GAAG;AACzB,UAAM,UAAU,aAAa,WAAW,MAAM;AAC9C,QAAI,YAAY,YAAY;AAC1B,cAAQ,IAAI,0DAAgD;AAAA,IAC9D,OAAO;AACL,oBAAc,WAAW,YAAY,MAAM;AAC3C,cAAQ,IAAI,+CAAqC;AACjD,cAAQ;AAAA,IACV;AAAA,EACF,OAAO;AACL,cAAU,QAAQ,SAAS,GAAG,EAAE,WAAW,KAAK,CAAC;AACjD,kBAAc,WAAW,YAAY,MAAM;AAC3C,YAAQ,IAAI,oEAA0D;AACtE,YAAQ;AAAA,EACV;AAGA,UAAQ,IAAI,EAAE;AACd,MAAI,OAAO;AACT,YAAQ,IAAI,6DAA6D;AACzE,YAAQ,IAAI,EAAE;AACd,YAAQ,IAAI,WAAW;AACvB,YAAQ,IAAI,gEAAgE;AAAA,EAC9E,OAAO;AACL,YAAQ,IAAI,+CAA0C;AAAA,EACxD;AACF;","names":[]}
|
|
@@ -7,9 +7,10 @@ import {
|
|
|
7
7
|
getNodeOrThrow,
|
|
8
8
|
getProjectRoot,
|
|
9
9
|
getProjectSummary,
|
|
10
|
+
getSubtreeProgress,
|
|
10
11
|
listProjects,
|
|
11
12
|
updateNode
|
|
12
|
-
} from "./chunk-
|
|
13
|
+
} from "./chunk-CCGKUMCW.js";
|
|
13
14
|
export {
|
|
14
15
|
createNode,
|
|
15
16
|
getAncestors,
|
|
@@ -18,7 +19,8 @@ export {
|
|
|
18
19
|
getNodeOrThrow,
|
|
19
20
|
getProjectRoot,
|
|
20
21
|
getProjectSummary,
|
|
22
|
+
getSubtreeProgress,
|
|
21
23
|
listProjects,
|
|
22
24
|
updateNode
|
|
23
25
|
};
|
|
24
|
-
//# sourceMappingURL=nodes-
|
|
26
|
+
//# sourceMappingURL=nodes-YNM6KEK2.js.map
|