@goondocks/myco 0.9.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +1 -4
- package/.claude-plugin/plugin.json +1 -1
- package/README.md +19 -2
- package/dist/{agent-run-EFICNTAU.js → agent-run-CGXF5PPC.js} +7 -7
- package/dist/{agent-tasks-RXJ7Z5NG.js → agent-tasks-T7NVI3R7.js} +7 -7
- package/dist/{chunk-JMJJEQ3P.js → chunk-5LPERML5.js} +3 -3
- package/dist/{chunk-RJ6ZQKG5.js → chunk-5QERXFH7.js} +2 -2
- package/dist/{chunk-UBZPD4HN.js → chunk-5SDH75YC.js} +2 -2
- package/dist/{chunk-5VZ52A4T.js → chunk-76ZO5RGT.js} +16 -2
- package/dist/{chunk-5VZ52A4T.js.map → chunk-76ZO5RGT.js.map} +1 -1
- package/dist/{chunk-46PWOKSI.js → chunk-AEJS57ZK.js} +2 -2
- package/dist/{chunk-DCXRSSBP.js → chunk-C3AEZ3BZ.js} +3 -3
- package/dist/{chunk-4LPQ26CK.js → chunk-CUDM5YJY.js} +25 -8
- package/dist/chunk-CUDM5YJY.js.map +1 -0
- package/dist/{chunk-YDN4OM33.js → chunk-D6DXYAFK.js} +20 -7
- package/dist/chunk-D6DXYAFK.js.map +1 -0
- package/dist/chunk-ENWBFX7F.js +50 -0
- package/dist/chunk-ENWBFX7F.js.map +1 -0
- package/dist/{chunk-OXZSXYAT.js → chunk-FFQES5MC.js} +48 -21
- package/dist/chunk-FFQES5MC.js.map +1 -0
- package/dist/{chunk-U3IBO3O3.js → chunk-FMIWFRAM.js} +3 -3
- package/dist/{chunk-KYLDNM7H.js → chunk-FPMEIN2W.js} +2 -2
- package/dist/{chunk-PB6TOLRQ.js → chunk-G2LQBFE3.js} +2 -2
- package/dist/{chunk-XNOCTDHF.js → chunk-J4RVYUH4.js} +2 -2
- package/dist/{chunk-MHSCMET3.js → chunk-MAZOVVDU.js} +33 -3
- package/dist/chunk-MAZOVVDU.js.map +1 -0
- package/dist/{chunk-JYOOJCPQ.js → chunk-MKKXCCQ5.js} +5 -5
- package/dist/{chunk-QIK2XSDQ.js → chunk-MSXYUXZR.js} +4 -4
- package/dist/{chunk-FFAYUQ5N.js → chunk-RJMXDUMA.js} +2 -1
- package/dist/{chunk-WGTCA2NU.js → chunk-S6I62FAH.js} +10 -2
- package/dist/{chunk-WGTCA2NU.js.map → chunk-S6I62FAH.js.map} +1 -1
- package/dist/{chunk-3K5WGSJ4.js → chunk-U7UUJ4FD.js} +23 -8
- package/dist/chunk-U7UUJ4FD.js.map +1 -0
- package/dist/{chunk-PT5IC642.js → chunk-W6HI4CCS.js} +2 -2
- package/dist/{chunk-KB4DGYIY.js → chunk-WXSJKESH.js} +12 -7
- package/dist/{chunk-KB4DGYIY.js.map → chunk-WXSJKESH.js.map} +1 -1
- package/dist/{chunk-KV4OC4H3.js → chunk-WZZH3YXJ.js} +119 -16
- package/dist/chunk-WZZH3YXJ.js.map +1 -0
- package/dist/chunk-XLY3REL3.js +165 -0
- package/dist/chunk-XLY3REL3.js.map +1 -0
- package/dist/{chunk-TRUJLI6K.js → chunk-YZMNEIFI.js} +9 -5
- package/dist/chunk-YZMNEIFI.js.map +1 -0
- package/dist/{chunk-2T7RPVPP.js → chunk-ZESTWGJT.js} +2 -2
- package/dist/{chunk-BUSP3OJB.js → chunk-ZMW6KQX2.js} +3 -3
- package/dist/{cli-ODLFRIYS.js → cli-6CPFJGRZ.js} +47 -36
- package/dist/cli-6CPFJGRZ.js.map +1 -0
- package/dist/client-B27SN5QG.js +15 -0
- package/dist/{config-UR5BSGVX.js → config-G3CSGI7P.js} +2 -2
- package/dist/{detect-providers-Q42OD4OS.js → detect-providers-AZ6DEQU7.js} +5 -5
- package/dist/{doctor-JLKTXDEH.js → doctor-RHHWJTMB.js} +10 -10
- package/dist/{executor-ONSDHPGX.js → executor-A5C5KDLP.js} +33 -20
- package/dist/executor-A5C5KDLP.js.map +1 -0
- package/dist/{init-6GWY345B.js → init-ARJROOWV.js} +15 -15
- package/dist/{init-wizard-UONLDYLI.js → init-wizard-XNFOZCEB.js} +8 -8
- package/dist/llm-XJFHRFHB.js +17 -0
- package/dist/{loader-SH67XD54.js → loader-GKXR5ONU.js} +4 -4
- package/dist/{loader-XVXKZZDH.js → loader-PZ7ZRSA4.js} +8 -4
- package/dist/{logs-QZVYF6FP.js → logs-LXHPDKUA.js} +3 -3
- package/dist/machine-id-RCM7TXPJ.js +13 -0
- package/dist/{main-BMCL7CPO.js → main-PVX6R3I6.js} +752 -80
- package/dist/main-PVX6R3I6.js.map +1 -0
- package/dist/{openai-embeddings-C265WRNK.js → openai-embeddings-ST3B6GW7.js} +5 -5
- package/dist/{openrouter-U6VFCRX2.js → openrouter-HJHOO3EO.js} +5 -5
- package/dist/{post-compact-OWFSOITU.js → post-compact-LR3DSGT3.js} +7 -7
- package/dist/{post-tool-use-DOUM7CGQ.js → post-tool-use-SOFVNFU3.js} +6 -6
- package/dist/{post-tool-use-failure-SG3C7PE6.js → post-tool-use-failure-2CZZZASB.js} +7 -7
- package/dist/{pre-compact-3J33CHXQ.js → pre-compact-3E3D6565.js} +7 -7
- package/dist/{provider-check-3WBPZADE.js → provider-check-SOTDYLJE.js} +5 -5
- package/dist/{registry-J4XTWARS.js → registry-WVZG6R2R.js} +5 -5
- package/dist/{resolution-events-TFEQPVKS.js → resolution-events-UPHJJLDQ.js} +5 -2
- package/dist/{restart-2VM33WOB.js → restart-XIUFVS33.js} +8 -8
- package/dist/{search-ZGQR5MDE.js → search-VB6Z2ZXV.js} +8 -8
- package/dist/{server-6KMBJCHZ.js → server-AKPBRP6Z.js} +5 -5
- package/dist/{session-Z2FXDDG6.js → session-UVZS6CY5.js} +9 -8
- package/dist/{session-Z2FXDDG6.js.map → session-UVZS6CY5.js.map} +1 -1
- package/dist/{session-end-FLVX32LE.js → session-end-YMQ44U6Z.js} +6 -6
- package/dist/{session-start-UCLK7PXE.js → session-start-3754HF3N.js} +11 -10
- package/dist/{session-start-UCLK7PXE.js.map → session-start-3754HF3N.js.map} +1 -1
- package/dist/{setup-llm-GKMCHURK.js → setup-llm-NWHOPJUV.js} +8 -8
- package/dist/src/cli.js +1 -1
- package/dist/src/daemon/main.js +1 -1
- package/dist/src/hooks/post-tool-use.js +1 -1
- package/dist/src/hooks/session-end.js +1 -1
- package/dist/src/hooks/session-start.js +1 -1
- package/dist/src/hooks/stop.js +1 -1
- package/dist/src/hooks/user-prompt-submit.js +1 -1
- package/dist/src/mcp/server.js +1 -1
- package/dist/{stats-IUJPZSVZ.js → stats-CDQXOTEC.js} +9 -9
- package/dist/{stop-XRQLLXST.js → stop-WSFGRPXZ.js} +6 -6
- package/dist/{stop-failure-2CAJJKRG.js → stop-failure-4FR7574F.js} +7 -7
- package/dist/{subagent-start-MWWQTZMQ.js → subagent-start-7SGBXJYP.js} +7 -7
- package/dist/{subagent-stop-PJXYGRXB.js → subagent-stop-MRVTNX3V.js} +7 -7
- package/dist/{task-completed-4LFRJVGI.js → task-completed-XXPYPSRV.js} +7 -7
- package/dist/team-XMHYCKFF.js +251 -0
- package/dist/team-XMHYCKFF.js.map +1 -0
- package/dist/ui/assets/index-BGbil7f1.css +1 -0
- package/dist/ui/assets/index-CPA_uq_j.js +794 -0
- package/dist/ui/index.html +2 -2
- package/dist/update-W3UFZU4G.js +79 -0
- package/dist/update-W3UFZU4G.js.map +1 -0
- package/dist/{user-prompt-submit-KSM3AR6P.js → user-prompt-submit-LSWCYUW3.js} +6 -6
- package/dist/{verify-UDAYVX37.js → verify-O7TQ5DDY.js} +9 -9
- package/dist/{version-KLBN4HZT.js → version-VWWY7SPQ.js} +2 -2
- package/dist/version-VWWY7SPQ.js.map +1 -0
- package/package.json +1 -1
- package/dist/chunk-3K5WGSJ4.js.map +0 -1
- package/dist/chunk-4LPQ26CK.js.map +0 -1
- package/dist/chunk-KV4OC4H3.js.map +0 -1
- package/dist/chunk-MHSCMET3.js.map +0 -1
- package/dist/chunk-OXZSXYAT.js.map +0 -1
- package/dist/chunk-TRUJLI6K.js.map +0 -1
- package/dist/chunk-YDN4OM33.js.map +0 -1
- package/dist/cli-ODLFRIYS.js.map +0 -1
- package/dist/client-MXRNQ5FI.js +0 -13
- package/dist/executor-ONSDHPGX.js.map +0 -1
- package/dist/llm-BV3QNVRD.js +0 -17
- package/dist/main-BMCL7CPO.js.map +0 -1
- package/dist/ui/assets/index-DZrElonz.js +0 -744
- package/dist/ui/assets/index-TkeiYbZB.css +0 -1
- /package/dist/{agent-run-EFICNTAU.js.map → agent-run-CGXF5PPC.js.map} +0 -0
- /package/dist/{agent-tasks-RXJ7Z5NG.js.map → agent-tasks-T7NVI3R7.js.map} +0 -0
- /package/dist/{chunk-JMJJEQ3P.js.map → chunk-5LPERML5.js.map} +0 -0
- /package/dist/{chunk-RJ6ZQKG5.js.map → chunk-5QERXFH7.js.map} +0 -0
- /package/dist/{chunk-UBZPD4HN.js.map → chunk-5SDH75YC.js.map} +0 -0
- /package/dist/{chunk-46PWOKSI.js.map → chunk-AEJS57ZK.js.map} +0 -0
- /package/dist/{chunk-DCXRSSBP.js.map → chunk-C3AEZ3BZ.js.map} +0 -0
- /package/dist/{chunk-U3IBO3O3.js.map → chunk-FMIWFRAM.js.map} +0 -0
- /package/dist/{chunk-KYLDNM7H.js.map → chunk-FPMEIN2W.js.map} +0 -0
- /package/dist/{chunk-PB6TOLRQ.js.map → chunk-G2LQBFE3.js.map} +0 -0
- /package/dist/{chunk-XNOCTDHF.js.map → chunk-J4RVYUH4.js.map} +0 -0
- /package/dist/{chunk-JYOOJCPQ.js.map → chunk-MKKXCCQ5.js.map} +0 -0
- /package/dist/{chunk-QIK2XSDQ.js.map → chunk-MSXYUXZR.js.map} +0 -0
- /package/dist/{chunk-FFAYUQ5N.js.map → chunk-RJMXDUMA.js.map} +0 -0
- /package/dist/{chunk-PT5IC642.js.map → chunk-W6HI4CCS.js.map} +0 -0
- /package/dist/{chunk-2T7RPVPP.js.map → chunk-ZESTWGJT.js.map} +0 -0
- /package/dist/{chunk-BUSP3OJB.js.map → chunk-ZMW6KQX2.js.map} +0 -0
- /package/dist/{client-MXRNQ5FI.js.map → client-B27SN5QG.js.map} +0 -0
- /package/dist/{config-UR5BSGVX.js.map → config-G3CSGI7P.js.map} +0 -0
- /package/dist/{detect-providers-Q42OD4OS.js.map → detect-providers-AZ6DEQU7.js.map} +0 -0
- /package/dist/{doctor-JLKTXDEH.js.map → doctor-RHHWJTMB.js.map} +0 -0
- /package/dist/{init-6GWY345B.js.map → init-ARJROOWV.js.map} +0 -0
- /package/dist/{init-wizard-UONLDYLI.js.map → init-wizard-XNFOZCEB.js.map} +0 -0
- /package/dist/{llm-BV3QNVRD.js.map → llm-XJFHRFHB.js.map} +0 -0
- /package/dist/{loader-SH67XD54.js.map → loader-GKXR5ONU.js.map} +0 -0
- /package/dist/{loader-XVXKZZDH.js.map → loader-PZ7ZRSA4.js.map} +0 -0
- /package/dist/{logs-QZVYF6FP.js.map → logs-LXHPDKUA.js.map} +0 -0
- /package/dist/{openai-embeddings-C265WRNK.js.map → machine-id-RCM7TXPJ.js.map} +0 -0
- /package/dist/{openrouter-U6VFCRX2.js.map → openai-embeddings-ST3B6GW7.js.map} +0 -0
- /package/dist/{provider-check-3WBPZADE.js.map → openrouter-HJHOO3EO.js.map} +0 -0
- /package/dist/{post-compact-OWFSOITU.js.map → post-compact-LR3DSGT3.js.map} +0 -0
- /package/dist/{post-tool-use-DOUM7CGQ.js.map → post-tool-use-SOFVNFU3.js.map} +0 -0
- /package/dist/{post-tool-use-failure-SG3C7PE6.js.map → post-tool-use-failure-2CZZZASB.js.map} +0 -0
- /package/dist/{pre-compact-3J33CHXQ.js.map → pre-compact-3E3D6565.js.map} +0 -0
- /package/dist/{registry-J4XTWARS.js.map → provider-check-SOTDYLJE.js.map} +0 -0
- /package/dist/{resolution-events-TFEQPVKS.js.map → registry-WVZG6R2R.js.map} +0 -0
- /package/dist/{version-KLBN4HZT.js.map → resolution-events-UPHJJLDQ.js.map} +0 -0
- /package/dist/{restart-2VM33WOB.js.map → restart-XIUFVS33.js.map} +0 -0
- /package/dist/{search-ZGQR5MDE.js.map → search-VB6Z2ZXV.js.map} +0 -0
- /package/dist/{server-6KMBJCHZ.js.map → server-AKPBRP6Z.js.map} +0 -0
- /package/dist/{session-end-FLVX32LE.js.map → session-end-YMQ44U6Z.js.map} +0 -0
- /package/dist/{setup-llm-GKMCHURK.js.map → setup-llm-NWHOPJUV.js.map} +0 -0
- /package/dist/{stats-IUJPZSVZ.js.map → stats-CDQXOTEC.js.map} +0 -0
- /package/dist/{stop-XRQLLXST.js.map → stop-WSFGRPXZ.js.map} +0 -0
- /package/dist/{stop-failure-2CAJJKRG.js.map → stop-failure-4FR7574F.js.map} +0 -0
- /package/dist/{subagent-start-MWWQTZMQ.js.map → subagent-start-7SGBXJYP.js.map} +0 -0
- /package/dist/{subagent-stop-PJXYGRXB.js.map → subagent-stop-MRVTNX3V.js.map} +0 -0
- /package/dist/{task-completed-4LFRJVGI.js.map → task-completed-XXPYPSRV.js.map} +0 -0
- /package/dist/{user-prompt-submit-KSM3AR6P.js.map → user-prompt-submit-LSWCYUW3.js.map} +0 -0
- /package/dist/{verify-UDAYVX37.js.map → verify-O7TQ5DDY.js.map} +0 -0
|
@@ -1,8 +1,12 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
|
+
import {
|
|
3
|
+
syncRow
|
|
4
|
+
} from "./chunk-XLY3REL3.js";
|
|
2
5
|
import {
|
|
3
6
|
getDatabase
|
|
4
7
|
} from "./chunk-MYX5NCRH.js";
|
|
5
8
|
import {
|
|
9
|
+
DEFAULT_MACHINE_ID,
|
|
6
10
|
DIGEST_TIERS,
|
|
7
11
|
EDGE_TYPE_DERIVED_FROM,
|
|
8
12
|
EDGE_TYPE_EXTRACTED_FROM,
|
|
@@ -10,7 +14,7 @@ import {
|
|
|
10
14
|
EDGE_TYPE_HAS_BATCH,
|
|
11
15
|
GRAPH_EDGE_DEFAULT_CONFIDENCE,
|
|
12
16
|
QUERY_DEFAULT_LIST_LIMIT
|
|
13
|
-
} from "./chunk-
|
|
17
|
+
} from "./chunk-76ZO5RGT.js";
|
|
14
18
|
|
|
15
19
|
// src/db/queries/batches.ts
|
|
16
20
|
var DEFAULT_UNPROCESSED_LIMIT = 100;
|
|
@@ -34,7 +38,9 @@ var BATCH_COLUMNS = [
|
|
|
34
38
|
"activity_count",
|
|
35
39
|
"processed",
|
|
36
40
|
"content_hash",
|
|
37
|
-
"created_at"
|
|
41
|
+
"created_at",
|
|
42
|
+
"machine_id",
|
|
43
|
+
"synced_at"
|
|
38
44
|
];
|
|
39
45
|
var SELECT_COLUMNS = BATCH_COLUMNS.join(", ");
|
|
40
46
|
function toBatchRow(row) {
|
|
@@ -51,7 +57,9 @@ function toBatchRow(row) {
|
|
|
51
57
|
activity_count: row.activity_count,
|
|
52
58
|
processed: row.processed,
|
|
53
59
|
content_hash: row.content_hash ?? null,
|
|
54
|
-
created_at: row.created_at
|
|
60
|
+
created_at: row.created_at,
|
|
61
|
+
machine_id: row.machine_id ?? DEFAULT_MACHINE_ID,
|
|
62
|
+
synced_at: row.synced_at ?? null
|
|
55
63
|
};
|
|
56
64
|
}
|
|
57
65
|
function populateBatchResponses(sessionId, responses) {
|
|
@@ -129,13 +137,13 @@ function insertBatchStateless(data) {
|
|
|
129
137
|
`INSERT INTO prompt_batches (
|
|
130
138
|
session_id, prompt_number, user_prompt, response_summary,
|
|
131
139
|
classification, started_at, ended_at, status,
|
|
132
|
-
activity_count, processed, content_hash, created_at
|
|
140
|
+
activity_count, processed, content_hash, created_at, machine_id
|
|
133
141
|
) VALUES (
|
|
134
142
|
?,
|
|
135
143
|
(SELECT COALESCE(MAX(prompt_number), 0) + 1 FROM prompt_batches WHERE session_id = ?),
|
|
136
144
|
?, NULL,
|
|
137
145
|
NULL, ?, NULL, ?,
|
|
138
|
-
?, ?, NULL, ?
|
|
146
|
+
?, ?, NULL, ?, ?
|
|
139
147
|
)`
|
|
140
148
|
).run(
|
|
141
149
|
data.session_id,
|
|
@@ -145,7 +153,8 @@ function insertBatchStateless(data) {
|
|
|
145
153
|
data.status ?? DEFAULT_STATUS,
|
|
146
154
|
DEFAULT_ACTIVITY_COUNT,
|
|
147
155
|
DEFAULT_PROCESSED,
|
|
148
|
-
data.created_at
|
|
156
|
+
data.created_at,
|
|
157
|
+
DEFAULT_MACHINE_ID
|
|
149
158
|
);
|
|
150
159
|
const batchId = Number(info.lastInsertRowid);
|
|
151
160
|
const userPrompt = data.user_prompt ?? null;
|
|
@@ -508,7 +517,9 @@ var GRAPH_EDGE_COLUMNS = [
|
|
|
508
517
|
"session_id",
|
|
509
518
|
"confidence",
|
|
510
519
|
"properties",
|
|
511
|
-
"created_at"
|
|
520
|
+
"created_at",
|
|
521
|
+
"machine_id",
|
|
522
|
+
"synced_at"
|
|
512
523
|
];
|
|
513
524
|
var SELECT_COLUMNS5 = GRAPH_EDGE_COLUMNS.join(", ");
|
|
514
525
|
function toGraphEdgeRow(row) {
|
|
@@ -523,7 +534,9 @@ function toGraphEdgeRow(row) {
|
|
|
523
534
|
session_id: row.session_id ?? null,
|
|
524
535
|
confidence: row.confidence,
|
|
525
536
|
properties: row.properties ?? null,
|
|
526
|
-
created_at: row.created_at
|
|
537
|
+
created_at: row.created_at,
|
|
538
|
+
machine_id: row.machine_id ?? DEFAULT_MACHINE_ID,
|
|
539
|
+
synced_at: row.synced_at ?? null
|
|
527
540
|
};
|
|
528
541
|
}
|
|
529
542
|
function insertGraphEdge(data) {
|
|
@@ -532,8 +545,8 @@ function insertGraphEdge(data) {
|
|
|
532
545
|
db.prepare(
|
|
533
546
|
`INSERT INTO graph_edges (
|
|
534
547
|
id, agent_id, source_id, source_type, target_id, target_type,
|
|
535
|
-
type, session_id, confidence, properties, created_at
|
|
536
|
-
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
|
548
|
+
type, session_id, confidence, properties, created_at, machine_id
|
|
549
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
|
|
537
550
|
).run(
|
|
538
551
|
id,
|
|
539
552
|
data.agent_id,
|
|
@@ -545,11 +558,14 @@ function insertGraphEdge(data) {
|
|
|
545
558
|
data.session_id ?? null,
|
|
546
559
|
data.confidence ?? GRAPH_EDGE_DEFAULT_CONFIDENCE,
|
|
547
560
|
data.properties ?? null,
|
|
548
|
-
data.created_at
|
|
561
|
+
data.created_at,
|
|
562
|
+
data.machine_id ?? DEFAULT_MACHINE_ID
|
|
549
563
|
);
|
|
550
|
-
|
|
564
|
+
const row = toGraphEdgeRow(
|
|
551
565
|
db.prepare(`SELECT ${SELECT_COLUMNS5} FROM graph_edges WHERE id = ?`).get(id)
|
|
552
566
|
);
|
|
567
|
+
syncRow("graph_edges", row);
|
|
568
|
+
return row;
|
|
553
569
|
}
|
|
554
570
|
function listGraphEdges(options = {}) {
|
|
555
571
|
const db = getDatabase();
|
|
@@ -688,7 +704,9 @@ var ENTITY_COLUMNS = [
|
|
|
688
704
|
"properties",
|
|
689
705
|
"first_seen",
|
|
690
706
|
"last_seen",
|
|
691
|
-
"status"
|
|
707
|
+
"status",
|
|
708
|
+
"machine_id",
|
|
709
|
+
"synced_at"
|
|
692
710
|
];
|
|
693
711
|
var SELECT_COLUMNS6 = ENTITY_COLUMNS.join(", ");
|
|
694
712
|
function toEntityRow(row) {
|
|
@@ -700,14 +718,16 @@ function toEntityRow(row) {
|
|
|
700
718
|
properties: row.properties ?? null,
|
|
701
719
|
first_seen: row.first_seen,
|
|
702
720
|
last_seen: row.last_seen,
|
|
703
|
-
status: row.status ?? "active"
|
|
721
|
+
status: row.status ?? "active",
|
|
722
|
+
machine_id: row.machine_id ?? DEFAULT_MACHINE_ID,
|
|
723
|
+
synced_at: row.synced_at ?? null
|
|
704
724
|
};
|
|
705
725
|
}
|
|
706
726
|
function insertEntity(data) {
|
|
707
727
|
const db = getDatabase();
|
|
708
728
|
db.prepare(
|
|
709
|
-
`INSERT INTO entities (id, agent_id, type, name, properties, first_seen, last_seen)
|
|
710
|
-
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
729
|
+
`INSERT INTO entities (id, agent_id, type, name, properties, first_seen, last_seen, machine_id)
|
|
730
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
|
711
731
|
ON CONFLICT (agent_id, type, name) DO UPDATE SET
|
|
712
732
|
properties = COALESCE(EXCLUDED.properties, entities.properties),
|
|
713
733
|
last_seen = EXCLUDED.last_seen`
|
|
@@ -718,15 +738,18 @@ function insertEntity(data) {
|
|
|
718
738
|
data.name,
|
|
719
739
|
data.properties ?? null,
|
|
720
740
|
data.first_seen,
|
|
721
|
-
data.last_seen
|
|
741
|
+
data.last_seen,
|
|
742
|
+
data.machine_id ?? DEFAULT_MACHINE_ID
|
|
722
743
|
);
|
|
723
|
-
|
|
744
|
+
const row = toEntityRow(
|
|
724
745
|
db.prepare(`SELECT ${SELECT_COLUMNS6} FROM entities WHERE agent_id = ? AND type = ? AND name = ?`).get(
|
|
725
746
|
data.agent_id,
|
|
726
747
|
data.type,
|
|
727
748
|
data.name
|
|
728
749
|
)
|
|
729
750
|
);
|
|
751
|
+
syncRow("entities", row);
|
|
752
|
+
return row;
|
|
730
753
|
}
|
|
731
754
|
function getEntity(id) {
|
|
732
755
|
const db = getDatabase();
|
|
@@ -789,7 +812,9 @@ var EXTRACT_COLUMNS = [
|
|
|
789
812
|
"tier",
|
|
790
813
|
"content",
|
|
791
814
|
"substrate_hash",
|
|
792
|
-
"generated_at"
|
|
815
|
+
"generated_at",
|
|
816
|
+
"machine_id",
|
|
817
|
+
"synced_at"
|
|
793
818
|
];
|
|
794
819
|
var SELECT_COLUMNS7 = EXTRACT_COLUMNS.join(", ");
|
|
795
820
|
function toDigestExtractRow(row) {
|
|
@@ -799,7 +824,9 @@ function toDigestExtractRow(row) {
|
|
|
799
824
|
tier: row.tier,
|
|
800
825
|
content: row.content,
|
|
801
826
|
substrate_hash: row.substrate_hash ?? null,
|
|
802
|
-
generated_at: row.generated_at
|
|
827
|
+
generated_at: row.generated_at,
|
|
828
|
+
machine_id: row.machine_id ?? DEFAULT_MACHINE_ID,
|
|
829
|
+
synced_at: row.synced_at ?? null
|
|
803
830
|
};
|
|
804
831
|
}
|
|
805
832
|
function upsertDigestExtract(data) {
|
|
@@ -874,4 +901,4 @@ export {
|
|
|
874
901
|
createSporeLineage,
|
|
875
902
|
createBatchLineage
|
|
876
903
|
};
|
|
877
|
-
//# sourceMappingURL=chunk-
|
|
904
|
+
//# sourceMappingURL=chunk-FFQES5MC.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/db/queries/batches.ts","../src/utils/error-message.ts","../src/db/queries/turns.ts","../src/db/queries/runs.ts","../src/db/queries/reports.ts","../src/db/queries/graph-edges.ts","../src/db/queries/lineage.ts","../src/db/queries/entities.ts","../src/db/queries/digest-extracts.ts"],"sourcesContent":["/**\n * Prompt batch CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { DEFAULT_MACHINE_ID } from '@myco/constants.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of unprocessed batches returned when no limit given. */\nconst DEFAULT_UNPROCESSED_LIMIT = 100;\n\n/** Default number of batches returned by listBatchesBySession when no limit given. */\nexport const BATCHES_DEFAULT_LIMIT = 200;\n\n/** Batch status value when a batch is closed normally. */\nconst STATUS_COMPLETED = 'completed';\n\n/** Default batch status for new batches. */\nconst DEFAULT_STATUS = 'active';\n\n/** Default activity count for new batches. */\nconst DEFAULT_ACTIVITY_COUNT = 0;\n\n/** Default processed flag for new batches. */\nconst DEFAULT_PROCESSED = 0;\n\n/** Processed flag value indicating a batch has been processed. */\nconst PROCESSED_FLAG = 1;\n\n/** Number of characters used for prompt prefix matching. */\nconst PROMPT_PREFIX_MATCH_CHARS = 60;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Filter options for `listBatchesBySession`. */\nexport interface ListBatchesBySessionOptions {\n limit?: number;\n offset?: number;\n}\n\n/** Fields required (or optional) when inserting a prompt batch. */\nexport interface BatchInsert {\n session_id: string;\n created_at: number;\n prompt_number?: number | null;\n user_prompt?: string | null;\n response_summary?: string | null;\n classification?: string | null;\n started_at?: number | null;\n ended_at?: number | null;\n status?: string;\n activity_count?: number;\n processed?: number;\n content_hash?: string | null;\n machine_id?: string;\n}\n\n/** Row shape returned from batch queries. */\nexport interface BatchRow {\n id: number;\n session_id: string;\n prompt_number: number | null;\n user_prompt: string | null;\n response_summary: string | null;\n classification: string | null;\n started_at: number | null;\n ended_at: number | null;\n status: string;\n activity_count: number;\n processed: number;\n content_hash: string | null;\n created_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst BATCH_COLUMNS = [\n 'id',\n 'session_id',\n 'prompt_number',\n 'user_prompt',\n 'response_summary',\n 'classification',\n 'started_at',\n 'ended_at',\n 'status',\n 'activity_count',\n 'processed',\n 'content_hash',\n 'created_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = BATCH_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed BatchRow. */\nfunction toBatchRow(row: Record<string, unknown>): BatchRow {\n return {\n id: row.id as number,\n session_id: row.session_id as string,\n prompt_number: (row.prompt_number as number) ?? null,\n user_prompt: (row.user_prompt as string) ?? null,\n response_summary: (row.response_summary as string) ?? null,\n classification: (row.classification as string) ?? null,\n started_at: (row.started_at as number) ?? null,\n ended_at: (row.ended_at as number) ?? null,\n status: row.status as string,\n activity_count: row.activity_count as number,\n processed: row.processed as number,\n content_hash: (row.content_hash as string) ?? null,\n created_at: row.created_at as number,\n machine_id: (row.machine_id as string) ?? DEFAULT_MACHINE_ID,\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new prompt batch.\n *\n * The `id` is auto-generated by the INTEGER PRIMARY KEY (AUTOINCREMENT).\n * FTS5 index is kept in sync via a follow-up INSERT into prompt_batches_fts.\n */\nexport function insertBatch(data: BatchInsert): BatchRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO prompt_batches (\n session_id, prompt_number, user_prompt, response_summary,\n classification, started_at, ended_at, status,\n activity_count, processed, content_hash, created_at, machine_id\n ) VALUES (\n ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?, ?, ?, ?\n )`,\n ).run(\n data.session_id,\n data.prompt_number ?? null,\n data.user_prompt ?? null,\n data.response_summary ?? null,\n data.classification ?? null,\n data.started_at ?? null,\n data.ended_at ?? null,\n data.status ?? DEFAULT_STATUS,\n data.activity_count ?? DEFAULT_ACTIVITY_COUNT,\n data.processed ?? DEFAULT_PROCESSED,\n data.content_hash ?? null,\n data.created_at,\n data.machine_id ?? DEFAULT_MACHINE_ID,\n );\n\n const batchId = Number(info.lastInsertRowid);\n\n // FTS5 sync\n const userPrompt = data.user_prompt ?? null;\n if (userPrompt) {\n db.prepare('INSERT INTO prompt_batches_fts(rowid, user_prompt) VALUES (?, ?)').run(batchId, userPrompt);\n }\n\n const row = toBatchRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM prompt_batches WHERE id = ?`).get(batchId) as Record<string, unknown>,\n );\n\n syncRow('prompt_batches', row);\n\n return row;\n}\n\n/**\n * Close a batch — set status to 'completed' and record the end time.\n *\n * @returns the updated row, or null if the batch does not exist.\n */\nexport function closeBatch(\n id: number,\n endedAt: number,\n): BatchRow | null {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE prompt_batches\n SET status = ?, ended_at = ?\n WHERE id = ?`,\n ).run(STATUS_COMPLETED, endedAt, id);\n\n if (info.changes === 0) return null;\n\n return toBatchRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM prompt_batches WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * Populate response_summary on batches from transcript turns.\n *\n * Matches transcript turns (ordered by position) to batches (ordered by id ASC).\n * This is resilient to prompt_number duplicates caused by daemon restarts.\n * Only updates batches that don't already have a response_summary.\n *\n * @param sessionId — the session to update\n * @param responses — array of { response } ordered by turn position (1-indexed)\n */\nexport function populateBatchResponses(\n sessionId: string,\n responses: Array<{ turnIndex: number; response: string }>,\n): void {\n const db = getDatabase();\n\n // Get all batches for this session ordered by id (insertion order = true order)\n const batches = db.prepare(\n `SELECT id FROM prompt_batches WHERE session_id = ? ORDER BY id ASC`,\n ).all(sessionId) as Array<{ id: number }>;\n\n // Map each response to the batch at the same position\n for (const { turnIndex, response } of responses) {\n const batchIndex = turnIndex - 1; // turns are 1-indexed\n if (batchIndex >= 0 && batchIndex < batches.length) {\n const batchId = batches[batchIndex].id;\n db.prepare(\n `UPDATE prompt_batches SET response_summary = ? WHERE id = ? AND response_summary IS NULL`,\n ).run(response, batchId);\n }\n }\n}\n\n/**\n * Get unprocessed batches, ordered by id ASC (insertion order).\n *\n * Supports cursor-based pagination via `after_id` and a `limit` cap.\n */\nexport function getUnprocessedBatches(\n options: { after_id?: number; limit?: number } = {},\n): BatchRow[] {\n const db = getDatabase();\n\n const conditions: string[] = [`processed = ?`];\n const params: unknown[] = [DEFAULT_PROCESSED];\n\n if (options.after_id !== undefined) {\n conditions.push(`id > ?`);\n params.push(options.after_id);\n }\n\n const limit = options.limit ?? DEFAULT_UNPROCESSED_LIMIT;\n params.push(limit);\n\n const where = conditions.join(' AND ');\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM prompt_batches\n WHERE ${where}\n ORDER BY id ASC\n LIMIT ?`,\n ).all(...params) as Record<string, unknown>[];\n\n return rows.map(toBatchRow);\n}\n\n/**\n * Increment the activity_count for a batch by 1.\n *\n * @returns the updated row, or null if the batch does not exist.\n */\nexport function incrementActivityCount(\n id: number,\n): BatchRow | null {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE prompt_batches\n SET activity_count = activity_count + 1\n WHERE id = ?`,\n ).run(id);\n\n if (info.changes === 0) return null;\n\n return toBatchRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM prompt_batches WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * Mark a batch as processed (processed = 1).\n *\n * @returns the updated row, or null if the batch does not exist.\n */\nexport function markBatchProcessed(\n id: number,\n): BatchRow | null {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE prompt_batches\n SET processed = ?\n WHERE id = ?`,\n ).run(PROCESSED_FLAG, id);\n\n if (info.changes === 0) return null;\n\n return toBatchRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM prompt_batches WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * Get a batch's ID by session and prompt number.\n * Used to link attachments to their prompt batch at stop time.\n */\nexport function getBatchIdByPromptNumber(\n sessionId: string,\n promptNumber: number,\n): number | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT id FROM prompt_batches WHERE session_id = ? AND prompt_number = ? LIMIT 1`,\n ).get(sessionId, promptNumber) as { id: number } | undefined;\n\n return row ? row.id : null;\n}\n\n/**\n * Find a batch by matching the start of its user_prompt text.\n * Used for attachment matching after transcript compaction where turn indices no longer\n * align with prompt_numbers.\n */\nexport function findBatchByPromptPrefix(\n sessionId: string,\n promptPrefix: string,\n): { id: number; prompt_number: number } | null {\n const db = getDatabase();\n // Match first N chars — enough to be unique, tolerant of minor differences\n const prefix = promptPrefix.slice(0, PROMPT_PREFIX_MATCH_CHARS);\n const row = db.prepare(\n `SELECT id, prompt_number FROM prompt_batches\n WHERE session_id = ? AND user_prompt LIKE ? || '%'\n LIMIT 1`,\n ).get(sessionId, prefix) as { id: number; prompt_number: number } | undefined;\n return row ?? null;\n}\n\n/** Fields required when inserting a batch statelessly (prompt_number derived from DB). */\nexport interface StatelessBatchInsert {\n session_id: string;\n created_at: number;\n user_prompt?: string | null;\n started_at?: number | null;\n status?: string;\n}\n\n/**\n * Insert a new prompt batch with prompt_number derived from an inline subquery.\n *\n * The prompt_number is set to `COALESCE(MAX(prompt_number), 0) + 1` for the\n * session, so the caller never needs a separate SELECT. This makes the insert\n * stateless — no in-memory counter required.\n *\n * FTS5 index is kept in sync via a follow-up INSERT into prompt_batches_fts.\n */\nexport function insertBatchStateless(data: StatelessBatchInsert): BatchRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO prompt_batches (\n session_id, prompt_number, user_prompt, response_summary,\n classification, started_at, ended_at, status,\n activity_count, processed, content_hash, created_at, machine_id\n ) VALUES (\n ?,\n (SELECT COALESCE(MAX(prompt_number), 0) + 1 FROM prompt_batches WHERE session_id = ?),\n ?, NULL,\n NULL, ?, NULL, ?,\n ?, ?, NULL, ?, ?\n )`,\n ).run(\n data.session_id,\n data.session_id,\n data.user_prompt ?? null,\n data.started_at ?? null,\n data.status ?? DEFAULT_STATUS,\n DEFAULT_ACTIVITY_COUNT,\n DEFAULT_PROCESSED,\n data.created_at,\n DEFAULT_MACHINE_ID,\n );\n\n const batchId = Number(info.lastInsertRowid);\n\n // FTS5 sync\n const userPrompt = data.user_prompt ?? null;\n if (userPrompt) {\n db.prepare('INSERT INTO prompt_batches_fts(rowid, user_prompt) VALUES (?, ?)').run(batchId, userPrompt);\n }\n\n return toBatchRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM prompt_batches WHERE id = ?`).get(batchId) as Record<string, unknown>,\n );\n}\n\n/**\n * Close all open batches for a session — blind UPDATE, no prior SELECT needed.\n *\n * Sets `status = 'completed'` and `ended_at` on every batch that has no\n * `ended_at` value yet. Returns the number of batches closed.\n */\nexport function closeOpenBatches(\n sessionId: string,\n endedAt: number,\n): number {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE prompt_batches\n SET status = ?, ended_at = ?\n WHERE session_id = ? AND ended_at IS NULL`,\n ).run(STATUS_COMPLETED, endedAt, sessionId);\n\n return info.changes;\n}\n\n/**\n * Set response_summary on a batch if it doesn't already have one.\n *\n * Idempotent — only updates NULL response_summary.\n */\nexport function setResponseSummary(\n batchId: number,\n summary: string,\n): void {\n const db = getDatabase();\n db.prepare(\n `UPDATE prompt_batches SET response_summary = ? WHERE id = ? AND response_summary IS NULL`,\n ).run(summary, batchId);\n}\n\n/**\n * Get the most recent batch for a session (by id DESC), regardless of status.\n *\n * Used by processStopEvent to attach the AI response and images to the\n * correct batch without positional turn mapping.\n */\nexport function getLatestBatch(\n sessionId: string,\n): BatchRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM prompt_batches\n WHERE session_id = ?\n ORDER BY id DESC LIMIT 1`,\n ).get(sessionId) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toBatchRow(row);\n}\n\nexport function listBatchesBySession(\n sessionId: string,\n options: ListBatchesBySessionOptions = {},\n): BatchRow[] {\n const db = getDatabase();\n\n const limit = options.limit ?? BATCHES_DEFAULT_LIMIT;\n const offset = options.offset ?? 0;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM prompt_batches\n WHERE session_id = ?\n ORDER BY prompt_number ASC\n LIMIT ?\n OFFSET ?`,\n ).all(sessionId, limit, offset) as Record<string, unknown>[];\n\n return rows.map(toBatchRow);\n}\n","/**\n * Extract a human-readable error message from an unknown thrown value.\n *\n * Handles Error instances, strings, and arbitrary objects. Never throws.\n */\nexport function errorMessage(err: unknown): string {\n if (err instanceof Error) return err.message || err.constructor.name || 'Error';\n if (typeof err === 'string') return err || 'Empty string error';\n try { return JSON.stringify(err); } catch { return 'Unserializable error'; }\n}\n","/**\n * Agent turn CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a turn. */\nexport interface TurnInsert {\n run_id: string;\n agent_id: string;\n turn_number: number;\n tool_name: string;\n tool_input?: string | null;\n tool_output_summary?: string | null;\n started_at?: number | null;\n completed_at?: number | null;\n}\n\n/** Row shape returned from agent_turns queries (all columns). */\nexport interface TurnRow {\n id: number;\n run_id: string;\n agent_id: string;\n turn_number: number;\n tool_name: string;\n tool_input: string | null;\n tool_output_summary: string | null;\n started_at: number | null;\n completed_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst TURN_COLUMNS = [\n 'id',\n 'run_id',\n 'agent_id',\n 'turn_number',\n 'tool_name',\n 'tool_input',\n 'tool_output_summary',\n 'started_at',\n 'completed_at',\n] as const;\n\nconst SELECT_COLUMNS = TURN_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed TurnRow. */\nfunction toTurnRow(row: Record<string, unknown>): TurnRow {\n return {\n id: row.id as number,\n run_id: row.run_id as string,\n agent_id: row.agent_id as string,\n turn_number: row.turn_number as number,\n tool_name: row.tool_name as string,\n tool_input: (row.tool_input as string) ?? null,\n tool_output_summary: (row.tool_output_summary as string) ?? null,\n started_at: (row.started_at as number) ?? null,\n completed_at: (row.completed_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new agent turn.\n *\n * The `id` is auto-generated by the INTEGER PRIMARY KEY (AUTOINCREMENT).\n */\nexport function insertTurn(data: TurnInsert): TurnRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO agent_turns (\n run_id, agent_id, turn_number, tool_name,\n tool_input, tool_output_summary, started_at, completed_at\n ) VALUES (\n ?, ?, ?, ?,\n ?, ?, ?, ?\n )`,\n ).run(\n data.run_id,\n data.agent_id,\n data.turn_number,\n data.tool_name,\n data.tool_input ?? null,\n data.tool_output_summary ?? null,\n data.started_at ?? null,\n data.completed_at ?? null,\n );\n\n const turnId = Number(info.lastInsertRowid);\n\n return toTurnRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM agent_turns WHERE id = ?`).get(turnId) as Record<string, unknown>,\n );\n}\n\n/**\n * List all turns for a specific run, ordered by turn_number ASC.\n */\nexport function listTurns(runId: string): TurnRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM agent_turns\n WHERE run_id = ?\n ORDER BY turn_number ASC`,\n ).all(runId) as Record<string, unknown>[];\n\n return rows.map(toTurnRow);\n}\n\n/**\n * List all agent turns for a run, ordered by turn_number ASC.\n *\n * Alias for `listTurns` with an explicit \"by run\" naming convention used\n * by the dashboard API layer.\n */\nexport function listTurnsByRun(runId: string): TurnRow[] {\n return listTurns(runId);\n}\n","/**\n * Agent run CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of runs returned by listRuns when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n/** Default run status for new runs. */\nconst DEFAULT_STATUS = 'pending';\n\n/** Run status indicating the run is currently executing. */\nexport const STATUS_RUNNING = 'running';\n\n/** Run status for a successfully completed run. */\nexport const STATUS_COMPLETED = 'completed';\n\n/** Run status for a run that encountered an error. */\nexport const STATUS_FAILED = 'failed';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a run. */\nexport interface RunInsert {\n id: string;\n agent_id: string;\n task?: string | null;\n instruction?: string | null;\n status?: string;\n started_at?: number | null;\n completed_at?: number | null;\n tokens_used?: number | null;\n cost_usd?: number | null;\n actions_taken?: string | null;\n error?: string | null;\n}\n\n/** Row shape returned from agent_runs queries (all columns). */\nexport interface RunRow {\n id: string;\n agent_id: string;\n task: string | null;\n instruction: string | null;\n status: string;\n started_at: number | null;\n completed_at: number | null;\n tokens_used: number | null;\n cost_usd: number | null;\n actions_taken: string | null;\n error: string | null;\n}\n\n/** Completion data passed to updateRunStatus. */\nexport interface RunCompletion {\n completed_at?: number;\n tokens_used?: number;\n cost_usd?: number;\n actions_taken?: string;\n error?: string;\n}\n\n/** Filter options for `listRuns`. */\nexport interface ListRunsOptions {\n limit?: number;\n offset?: number;\n agent_id?: string;\n status?: string;\n task?: string;\n search?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst RUN_COLUMNS = [\n 'id',\n 'agent_id',\n 'task',\n 'instruction',\n 'status',\n 'started_at',\n 'completed_at',\n 'tokens_used',\n 'cost_usd',\n 'actions_taken',\n 'error',\n] as const;\n\nconst SELECT_COLUMNS = RUN_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed RunRow. */\nfunction toRunRow(row: Record<string, unknown>): RunRow {\n return {\n id: row.id as string,\n agent_id: row.agent_id as string,\n task: (row.task as string) ?? null,\n instruction: (row.instruction as string) ?? null,\n status: row.status as string,\n started_at: (row.started_at as number) ?? null,\n completed_at: (row.completed_at as number) ?? null,\n tokens_used: (row.tokens_used as number) ?? null,\n cost_usd: (row.cost_usd as number) ?? null,\n actions_taken: (row.actions_taken as string) ?? null,\n error: (row.error as string) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new agent run.\n */\nexport function insertRun(data: RunInsert): RunRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO agent_runs (\n id, agent_id, task, instruction, status,\n started_at, completed_at, tokens_used, cost_usd,\n actions_taken, error\n ) VALUES (\n ?, ?, ?, ?, ?,\n ?, ?, ?, ?,\n ?, ?\n )`,\n ).run(\n data.id,\n data.agent_id,\n data.task ?? null,\n data.instruction ?? null,\n data.status ?? DEFAULT_STATUS,\n data.started_at ?? null,\n data.completed_at ?? null,\n data.tokens_used ?? null,\n data.cost_usd ?? null,\n data.actions_taken ?? null,\n data.error ?? null,\n );\n\n return toRunRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM agent_runs WHERE id = ?`).get(data.id) as Record<string, unknown>,\n );\n}\n\n/**\n * Retrieve a single run by id.\n *\n * @returns the run row, or null if not found.\n */\nexport function getRun(id: string): RunRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM agent_runs WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toRunRow(row);\n}\n\n/** Build a WHERE clause and params array from ListRunsOptions filter fields. */\nfunction buildRunsWhere(\n options: Omit<ListRunsOptions, 'limit' | 'offset'>,\n): { where: string; params: unknown[] } {\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.agent_id !== undefined) {\n conditions.push(`agent_id = ?`);\n params.push(options.agent_id);\n }\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n }\n if (options.task !== undefined) {\n conditions.push(`task = ?`);\n params.push(options.task);\n }\n if (options.search !== undefined && options.search.length > 0) {\n conditions.push(`task LIKE ?`);\n params.push(`%${options.search}%`);\n }\n\n return {\n where: conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '',\n params,\n };\n}\n\n/**\n * List runs with optional filters, ordered by started_at DESC (nulls last).\n */\nexport function listRuns(\n options: ListRunsOptions = {},\n): RunRow[] {\n const db = getDatabase();\n const { where, params } = buildRunsWhere(options);\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n const offset = options.offset ?? 0;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM agent_runs\n ${where}\n ORDER BY started_at DESC NULLS LAST\n LIMIT ?\n OFFSET ?`,\n ).all(...params, limit, offset) as Record<string, unknown>[];\n\n return rows.map(toRunRow);\n}\n\n/**\n * Count runs matching the given filters (no limit/offset).\n */\nexport function countRuns(\n options: Omit<ListRunsOptions, 'limit' | 'offset'> = {},\n): number {\n const db = getDatabase();\n const { where, params } = buildRunsWhere(options);\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM agent_runs ${where}`,\n ).get(...params) as { count: number };\n\n return row.count;\n}\n\n/**\n * Update a run's status, with optional completion data.\n *\n * @returns the updated row, or null if the run does not exist.\n */\nexport function updateRunStatus(\n id: string,\n status: string,\n completion?: RunCompletion,\n): RunRow | null {\n const db = getDatabase();\n\n const setClauses: string[] = ['status = ?'];\n const params: unknown[] = [status];\n\n if (completion?.completed_at !== undefined) {\n setClauses.push(`completed_at = ?`);\n params.push(completion.completed_at);\n }\n\n if (completion?.tokens_used !== undefined) {\n setClauses.push(`tokens_used = ?`);\n params.push(completion.tokens_used);\n }\n\n if (completion?.cost_usd !== undefined) {\n setClauses.push(`cost_usd = ?`);\n params.push(completion.cost_usd);\n }\n\n if (completion?.actions_taken !== undefined) {\n setClauses.push(`actions_taken = ?`);\n params.push(completion.actions_taken);\n }\n\n if (completion?.error !== undefined) {\n setClauses.push(`error = ?`);\n params.push(completion.error);\n }\n\n params.push(id);\n\n const info = db.prepare(\n `UPDATE agent_runs\n SET ${setClauses.join(', ')}\n WHERE id = ?`,\n ).run(...params);\n\n if (info.changes === 0) return null;\n\n return toRunRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM agent_runs WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * Get the currently running run for an agent, if any.\n *\n * @returns the running run row, or null if no run is active.\n */\nexport function getRunningRun(\n agentId: string,\n): RunRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM agent_runs\n WHERE agent_id = ? AND status = ?\n ORDER BY started_at DESC NULLS LAST\n LIMIT 1`,\n ).get(agentId, STATUS_RUNNING) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toRunRow(row);\n}\n","/**\n * Agent report CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of reports returned by list queries when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting a report. */\nexport interface ReportInsert {\n run_id: string;\n agent_id: string;\n action: string;\n summary: string;\n details?: string | null;\n created_at: number;\n}\n\n/** Row shape returned from agent_reports queries (all columns). */\nexport interface ReportRow {\n id: number;\n run_id: string;\n agent_id: string;\n action: string;\n summary: string;\n details: string | null;\n created_at: number;\n}\n\n/** Filter options for `listReportsByAgent`. */\nexport interface ListReportsByAgentOptions {\n limit?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst REPORT_COLUMNS = [\n 'id',\n 'run_id',\n 'agent_id',\n 'action',\n 'summary',\n 'details',\n 'created_at',\n] as const;\n\nconst SELECT_COLUMNS = REPORT_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed ReportRow. */\nfunction toReportRow(row: Record<string, unknown>): ReportRow {\n return {\n id: row.id as number,\n run_id: row.run_id as string,\n agent_id: row.agent_id as string,\n action: row.action as string,\n summary: row.summary as string,\n details: (row.details as string) ?? null,\n created_at: row.created_at as number,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new agent report.\n *\n * The `id` is auto-generated by the INTEGER PRIMARY KEY (AUTOINCREMENT).\n */\nexport function insertReport(data: ReportInsert): ReportRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO agent_reports (\n run_id, agent_id, action, summary, details, created_at\n ) VALUES (\n ?, ?, ?, ?, ?, ?\n )`,\n ).run(\n data.run_id,\n data.agent_id,\n data.action,\n data.summary,\n data.details ?? null,\n data.created_at,\n );\n\n const reportId = Number(info.lastInsertRowid);\n\n return toReportRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM agent_reports WHERE id = ?`).get(reportId) as Record<string, unknown>,\n );\n}\n\n/**\n * List all reports for a specific run, ordered by created_at ASC.\n */\nexport function listReports(runId: string): ReportRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM agent_reports\n WHERE run_id = ?\n ORDER BY created_at ASC`,\n ).all(runId) as Record<string, unknown>[];\n\n return rows.map(toReportRow);\n}\n\n/**\n * List reports by agent, ordered by created_at DESC.\n */\nexport function listReportsByAgent(\n agentId: string,\n options: ListReportsByAgentOptions = {},\n): ReportRow[] {\n const db = getDatabase();\n\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM agent_reports\n WHERE agent_id = ?\n ORDER BY created_at DESC\n LIMIT ?`,\n ).all(agentId, limit) as Record<string, unknown>[];\n\n return rows.map(toReportRow);\n}\n","/**\n * Graph edge CRUD query helpers.\n *\n * Unlike the `edges` table (which has FK constraints to entities), `graph_edges`\n * supports edges between any node types (session, batch, spore, entity).\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport crypto from 'node:crypto';\nimport { getDatabase } from '@myco/db/client.js';\nimport { QUERY_DEFAULT_LIST_LIMIT, GRAPH_EDGE_DEFAULT_CONFIDENCE, DEFAULT_MACHINE_ID } from '@myco/constants.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default BFS traversal depth. */\nconst DEFAULT_BFS_DEPTH = 2;\n\n/** Maximum BFS traversal depth (capped for performance). */\nconst MAX_BFS_DEPTH = 5;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Valid node types in the graph. */\nexport type GraphNodeType = 'session' | 'batch' | 'spore' | 'entity';\n\n/** Lineage edge types (auto-created by daemon, no LLM). */\nexport type LineageEdgeType = 'FROM_SESSION' | 'EXTRACTED_FROM' | 'DERIVED_FROM' | 'HAS_BATCH';\n\n/** Semantic edge types (created by intelligence agent, LLM-driven). */\nexport type SemanticEdgeType = 'RELATES_TO' | 'SUPERSEDED_BY' | 'REFERENCES' | 'DEPENDS_ON' | 'AFFECTS';\n\n/** All valid graph edge types. */\nexport type GraphEdgeType = LineageEdgeType | SemanticEdgeType;\n\n/** Fields required (or optional) when inserting a graph edge. */\nexport interface GraphEdgeInsert {\n agent_id: string;\n source_id: string;\n source_type: GraphNodeType;\n target_id: string;\n target_type: GraphNodeType;\n type: GraphEdgeType;\n created_at: number;\n session_id?: string;\n confidence?: number;\n properties?: string;\n machine_id?: string;\n}\n\n/** Row shape returned from graph edge queries. */\nexport interface GraphEdgeRow {\n id: string;\n agent_id: string;\n source_id: string;\n source_type: string;\n target_id: string;\n target_type: string;\n type: string;\n session_id: string | null;\n confidence: number;\n properties: string | null;\n created_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Filter options for `listGraphEdges`. */\nexport interface ListGraphEdgesOptions {\n sourceId?: string;\n targetId?: string;\n type?: string;\n agentId?: string;\n limit?: number;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst GRAPH_EDGE_COLUMNS = [\n 'id',\n 'agent_id',\n 'source_id',\n 'source_type',\n 'target_id',\n 'target_type',\n 'type',\n 'session_id',\n 'confidence',\n 'properties',\n 'created_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = GRAPH_EDGE_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed GraphEdgeRow. */\nfunction toGraphEdgeRow(row: Record<string, unknown>): GraphEdgeRow {\n return {\n id: row.id as string,\n agent_id: row.agent_id as string,\n source_id: row.source_id as string,\n source_type: row.source_type as string,\n target_id: row.target_id as string,\n target_type: row.target_type as string,\n type: row.type as string,\n session_id: (row.session_id as string) ?? null,\n confidence: row.confidence as number,\n properties: (row.properties as string) ?? null,\n created_at: row.created_at as number,\n machine_id: (row.machine_id as string) ?? DEFAULT_MACHINE_ID,\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert a new graph edge.\n *\n * Generates a UUID id automatically.\n */\nexport function insertGraphEdge(data: GraphEdgeInsert): GraphEdgeRow {\n const db = getDatabase();\n const id = crypto.randomUUID();\n\n db.prepare(\n `INSERT INTO graph_edges (\n id, agent_id, source_id, source_type, target_id, target_type,\n type, session_id, confidence, properties, created_at, machine_id\n ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,\n ).run(\n id,\n data.agent_id,\n data.source_id,\n data.source_type,\n data.target_id,\n data.target_type,\n data.type,\n data.session_id ?? null,\n data.confidence ?? GRAPH_EDGE_DEFAULT_CONFIDENCE,\n data.properties ?? null,\n data.created_at,\n data.machine_id ?? DEFAULT_MACHINE_ID,\n );\n\n const row = toGraphEdgeRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM graph_edges WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n\n syncRow('graph_edges', row);\n\n return row;\n}\n\n/**\n * List graph edges with optional filters, ordered by created_at DESC.\n */\nexport function listGraphEdges(\n options: ListGraphEdgesOptions = {},\n): GraphEdgeRow[] {\n const db = getDatabase();\n\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.sourceId !== undefined) {\n conditions.push(`source_id = ?`);\n params.push(options.sourceId);\n }\n\n if (options.targetId !== undefined) {\n conditions.push(`target_id = ?`);\n params.push(options.targetId);\n }\n\n if (options.type !== undefined) {\n conditions.push(`type = ?`);\n params.push(options.type);\n }\n\n if (options.agentId !== undefined) {\n conditions.push(`agent_id = ?`);\n params.push(options.agentId);\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n const limit = options.limit ?? QUERY_DEFAULT_LIST_LIMIT;\n\n params.push(limit);\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM graph_edges\n ${where}\n ORDER BY created_at DESC\n LIMIT ?`,\n ).all(...params) as Record<string, unknown>[];\n\n return rows.map(toGraphEdgeRow);\n}\n\n/**\n * BFS traversal from a node across graph edges.\n *\n * Returns all edges reachable within `depth` hops from the starting node.\n *\n * @param nodeId - The starting node ID.\n * @param nodeType - The starting node type.\n * @param options - Optional depth limit (default 2, max 5).\n */\nexport function getGraphForNode(\n nodeId: string,\n nodeType: GraphNodeType,\n options?: { depth?: number },\n): { edges: GraphEdgeRow[] } {\n const db = getDatabase();\n const depth = Math.min(Math.max(options?.depth ?? DEFAULT_BFS_DEPTH, 1), MAX_BFS_DEPTH);\n\n const seenEdgeIds = new Set<string>();\n const collectedEdges: GraphEdgeRow[] = [];\n const visited = new Set<string>([`${nodeType}:${nodeId}`]);\n let frontier = new Set<string>([nodeId]);\n\n for (let hop = 0; hop < depth; hop++) {\n if (frontier.size === 0) break;\n\n const frontierArray = Array.from(frontier);\n const placeholders = frontierArray.map(() => `?`).join(', ');\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM graph_edges\n WHERE source_id IN (${placeholders}) OR target_id IN (${placeholders})`,\n ).all(...frontierArray, ...frontierArray) as Record<string, unknown>[];\n\n const nextFrontier = new Set<string>();\n\n for (const row of rows) {\n const edge = toGraphEdgeRow(row);\n if (!seenEdgeIds.has(edge.id)) {\n seenEdgeIds.add(edge.id);\n collectedEdges.push(edge);\n }\n const sourceKey = `${edge.source_type}:${edge.source_id}`;\n const targetKey = `${edge.target_type}:${edge.target_id}`;\n if (!visited.has(sourceKey)) {\n visited.add(sourceKey);\n nextFrontier.add(edge.source_id);\n }\n if (!visited.has(targetKey)) {\n visited.add(targetKey);\n nextFrontier.add(edge.target_id);\n }\n }\n\n frontier = nextFrontier;\n }\n\n return { edges: collectedEdges };\n}\n","/**\n * Lineage edge creation helpers.\n *\n * Creates automatic graph edges when spores and batches are inserted.\n * These are structural (no LLM needed) — the daemon layer calls them.\n *\n * Edge types created:\n * - FROM_SESSION: spore -> session (the session it was extracted from)\n * - EXTRACTED_FROM: spore -> batch (the prompt batch it was extracted from)\n * - DERIVED_FROM: wisdom spore -> source spore (consolidation provenance)\n * - HAS_BATCH: session -> batch (prompt batch belongs to session)\n */\n\nimport { insertGraphEdge } from './graph-edges.js';\nimport {\n EDGE_TYPE_FROM_SESSION,\n EDGE_TYPE_EXTRACTED_FROM,\n EDGE_TYPE_DERIVED_FROM,\n EDGE_TYPE_HAS_BATCH,\n} from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Spore lineage\n// ---------------------------------------------------------------------------\n\n/** Create lineage edges for a newly inserted spore. */\nexport function createSporeLineage(spore: {\n id: string;\n agent_id: string;\n session_id?: string | null;\n prompt_batch_id?: number | null;\n observation_type?: string;\n properties?: string | null;\n created_at: number;\n}): void {\n if (spore.session_id) {\n insertGraphEdge({\n agent_id: spore.agent_id,\n source_id: spore.id,\n source_type: 'spore',\n target_id: spore.session_id,\n target_type: 'session',\n type: EDGE_TYPE_FROM_SESSION,\n created_at: spore.created_at,\n });\n }\n\n if (spore.prompt_batch_id != null) {\n insertGraphEdge({\n agent_id: spore.agent_id,\n source_id: spore.id,\n source_type: 'spore',\n target_id: String(spore.prompt_batch_id),\n target_type: 'batch',\n type: EDGE_TYPE_EXTRACTED_FROM,\n created_at: spore.created_at,\n });\n }\n\n // DERIVED_FROM edges for wisdom spores\n if (spore.observation_type === 'wisdom' && spore.properties) {\n try {\n const props = JSON.parse(spore.properties);\n if (Array.isArray(props.consolidated_from)) {\n for (const sourceId of props.consolidated_from) {\n insertGraphEdge({\n agent_id: spore.agent_id,\n source_id: spore.id,\n source_type: 'spore',\n target_id: sourceId,\n target_type: 'spore',\n type: EDGE_TYPE_DERIVED_FROM,\n created_at: spore.created_at,\n });\n }\n }\n } catch { /* ignore malformed properties */ }\n }\n}\n\n// ---------------------------------------------------------------------------\n// Batch lineage\n// ---------------------------------------------------------------------------\n\n/** Create a HAS_BATCH lineage edge from session to batch. */\nexport function createBatchLineage(\n agentId: string,\n sessionId: string,\n batchId: number,\n createdAt: number,\n): void {\n insertGraphEdge({\n agent_id: agentId,\n source_id: sessionId,\n source_type: 'session',\n target_id: String(batchId),\n target_type: 'batch',\n type: EDGE_TYPE_HAS_BATCH,\n created_at: createdAt,\n });\n}\n","/**\n * Entity CRUD query helpers for the knowledge graph.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { DEFAULT_MACHINE_ID } from '@myco/constants.js';\nimport { getGraphForNode, type GraphEdgeRow } from '@myco/db/queries/graph-edges.js';\nimport { syncRow } from '@myco/db/queries/team-outbox.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Default number of entities returned by listEntities when no limit given. */\nconst DEFAULT_LIST_LIMIT = 100;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required (or optional) when inserting an entity. */\nexport interface EntityInsert {\n id: string;\n agent_id: string;\n type: string;\n name: string;\n first_seen: number;\n last_seen: number;\n properties?: string | null;\n machine_id?: string;\n}\n\n/** Row shape returned from entity queries (all columns). */\nexport interface EntityRow {\n id: string;\n agent_id: string;\n type: string;\n name: string;\n properties: string | null;\n first_seen: number;\n last_seen: number;\n status: string;\n machine_id: string;\n synced_at: number | null;\n}\n\n/** Filter options for `listEntities`. */\nexport interface ListEntitiesOptions {\n agent_id?: string;\n type?: string;\n /** Filter by exact entity name. */\n name?: string;\n /** Filter by status (default 'active'). */\n status?: string;\n /** Filter by entity_mentions subquery — must be paired with note_type. */\n mentioned_in?: string;\n /** Required when mentioned_in is provided. */\n note_type?: string;\n limit?: number;\n offset?: number;\n}\n\n/** Return type for `getEntityWithEdges`. */\nexport interface EntityGraph {\n center: EntityRow;\n nodes: EntityRow[];\n edges: GraphEdgeRow[];\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst ENTITY_COLUMNS = [\n 'id',\n 'agent_id',\n 'type',\n 'name',\n 'properties',\n 'first_seen',\n 'last_seen',\n 'status',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = ENTITY_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed EntityRow. */\nfunction toEntityRow(row: Record<string, unknown>): EntityRow {\n return {\n id: row.id as string,\n agent_id: row.agent_id as string,\n type: row.type as string,\n name: row.name as string,\n properties: (row.properties as string) ?? null,\n first_seen: row.first_seen as number,\n last_seen: row.last_seen as number,\n status: (row.status as string) ?? 'active',\n machine_id: (row.machine_id as string) ?? DEFAULT_MACHINE_ID,\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Insert or update an entity. Uses UPSERT on (agent_id, type, name).\n *\n * On conflict, updates properties (if provided) and last_seen.\n */\nexport function insertEntity(data: EntityInsert): EntityRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO entities (id, agent_id, type, name, properties, first_seen, last_seen, machine_id)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n ON CONFLICT (agent_id, type, name) DO UPDATE SET\n properties = COALESCE(EXCLUDED.properties, entities.properties),\n last_seen = EXCLUDED.last_seen`,\n ).run(\n data.id,\n data.agent_id,\n data.type,\n data.name,\n data.properties ?? null,\n data.first_seen,\n data.last_seen,\n data.machine_id ?? DEFAULT_MACHINE_ID,\n );\n\n // On conflict, the passed-in id may not be the actual row id. Look up by unique key.\n const row = toEntityRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM entities WHERE agent_id = ? AND type = ? AND name = ?`).get(\n data.agent_id,\n data.type,\n data.name,\n ) as Record<string, unknown>,\n );\n\n syncRow('entities', row);\n\n return row;\n}\n\n/**\n * Retrieve a single entity by id.\n *\n * @returns the entity row, or null if not found.\n */\nexport function getEntity(id: string): EntityRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM entities WHERE id = ?`,\n ).get(id) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toEntityRow(row);\n}\n\n/**\n * List entities with optional filters, ordered by last_seen DESC.\n *\n * Defaults to `status = 'active'` — archived entities are excluded unless\n * `status` is explicitly provided. Pass `status: undefined` in options to\n * get only active entities (the default), or set a specific status string.\n *\n * When both `mentioned_in` and `note_type` are provided, filters to entities\n * referenced in a specific note via the entity_mentions subquery.\n */\nexport function listEntities(\n options: ListEntitiesOptions = {},\n): EntityRow[] {\n const db = getDatabase();\n\n const conditions: string[] = [];\n const params: unknown[] = [];\n\n if (options.agent_id !== undefined) {\n conditions.push(`agent_id = ?`);\n params.push(options.agent_id);\n }\n\n if (options.type !== undefined) {\n conditions.push(`type = ?`);\n params.push(options.type);\n }\n\n if (options.name !== undefined) {\n conditions.push(`name = ?`);\n params.push(options.name);\n }\n\n // Default: only show active entities (status column added in v5)\n if (options.status !== undefined) {\n conditions.push(`status = ?`);\n params.push(options.status);\n } else {\n conditions.push(`status = ?`);\n params.push('active');\n }\n\n if (options.mentioned_in !== undefined && options.note_type !== undefined) {\n conditions.push(\n `id IN (SELECT entity_id FROM entity_mentions WHERE note_id = ? AND note_type = ?)`,\n );\n params.push(options.mentioned_in);\n params.push(options.note_type);\n }\n\n const where = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';\n const limit = options.limit ?? DEFAULT_LIST_LIMIT;\n const offset = options.offset ?? 0;\n\n params.push(limit);\n params.push(offset);\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM entities\n ${where}\n ORDER BY last_seen DESC\n LIMIT ?\n OFFSET ?`,\n ).all(...params) as Record<string, unknown>[];\n\n return rows.map(toEntityRow);\n}\n\n/**\n * Fetch an entity and its surrounding graph via BFS traversal.\n *\n * Delegates to `getGraphForNode` (graph_edges table) for the BFS,\n * then fetches entity rows for all connected entity nodes.\n *\n * @param entityId - The center entity to expand from.\n * @param depth - Number of hops to traverse (1-3, default 1).\n * @returns `{ center, nodes, edges }` where nodes are all connected entities\n * (excluding center) and edges are deduplicated across BFS iterations.\n */\nexport function getEntityWithEdges(\n entityId: string,\n depth = 1,\n): EntityGraph | null {\n const db = getDatabase();\n\n const center = getEntity(entityId);\n if (center === null) return null;\n\n const clampedDepth = Math.min(Math.max(depth, 1), 3);\n const graph = getGraphForNode(entityId, 'entity', { depth: clampedDepth });\n\n // Collect all entity node IDs from edges (excluding center)\n const nodeIdSet = new Set<string>();\n for (const edge of graph.edges) {\n if (edge.source_type === 'entity' && edge.source_id !== entityId) nodeIdSet.add(edge.source_id);\n if (edge.target_type === 'entity' && edge.target_id !== entityId) nodeIdSet.add(edge.target_id);\n }\n\n // Fetch all connected entity nodes\n const nodeIds = Array.from(nodeIdSet);\n let nodes: EntityRow[] = [];\n if (nodeIds.length > 0) {\n const placeholders = nodeIds.map(() => `?`).join(', ');\n const nodeRows = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM entities WHERE id IN (${placeholders})`,\n ).all(...nodeIds) as Record<string, unknown>[];\n nodes = nodeRows.map(toEntityRow);\n }\n\n return { center, nodes, edges: graph.edges };\n}\n","/**\n * Digest extract CRUD query helpers.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { DIGEST_TIERS, DEFAULT_MACHINE_ID } from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required when upserting a digest extract. */\nexport interface DigestExtractUpsert {\n agent_id: string;\n tier: number;\n content: string;\n generated_at: number;\n machine_id?: string;\n}\n\n/** Row shape returned from digest_extracts queries (all columns). */\nexport interface DigestExtractRow {\n id: number;\n agent_id: string;\n tier: number;\n content: string;\n substrate_hash: string | null;\n generated_at: number;\n machine_id: string;\n synced_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst EXTRACT_COLUMNS = [\n 'id',\n 'agent_id',\n 'tier',\n 'content',\n 'substrate_hash',\n 'generated_at',\n 'machine_id',\n 'synced_at',\n] as const;\n\nconst SELECT_COLUMNS = EXTRACT_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed DigestExtractRow. */\nfunction toDigestExtractRow(row: Record<string, unknown>): DigestExtractRow {\n return {\n id: row.id as number,\n agent_id: row.agent_id as string,\n tier: row.tier as number,\n content: row.content as string,\n substrate_hash: (row.substrate_hash as string) ?? null,\n generated_at: row.generated_at as number,\n machine_id: (row.machine_id as string) ?? DEFAULT_MACHINE_ID,\n synced_at: (row.synced_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Upsert a digest extract. Uses ON CONFLICT on (agent_id, tier).\n *\n * Creates or updates the extract for the given agent and token tier.\n * Uses lastInsertRowid for SERIAL PK on insert, or falls back to\n * SELECT for the conflict (update) case.\n */\nexport function upsertDigestExtract(\n data: DigestExtractUpsert,\n): DigestExtractRow {\n const db = getDatabase();\n\n db.prepare(\n `INSERT INTO digest_extracts (agent_id, tier, content, generated_at)\n VALUES (?, ?, ?, ?)\n ON CONFLICT (agent_id, tier) DO UPDATE SET\n content = EXCLUDED.content,\n generated_at = EXCLUDED.generated_at`,\n ).run(data.agent_id, data.tier, data.content, data.generated_at);\n\n // Always look up by composite unique key — works for both insert and update cases.\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM digest_extracts WHERE agent_id = ? AND tier = ?`,\n ).get(data.agent_id, data.tier);\n\n return toDigestExtractRow(row as Record<string, unknown>);\n}\n\n/**\n * Get a digest extract for a specific agent and tier.\n *\n * @returns the extract row, or null if not found.\n */\nexport function getDigestExtract(\n agentId: string,\n tier: number,\n): DigestExtractRow | null {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT ${SELECT_COLUMNS} FROM digest_extracts\n WHERE agent_id = ? AND tier = ?`,\n ).get(agentId, tier) as Record<string, unknown> | undefined;\n\n if (!row) return null;\n return toDigestExtractRow(row);\n}\n\n/**\n * List digest extracts for an agent, filtered to configured tiers, ordered by tier ASC.\n */\nexport function listDigestExtracts(\n agentId: string,\n): DigestExtractRow[] {\n const db = getDatabase();\n const tierPlaceholders = DIGEST_TIERS.map(() => '?').join(', ');\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM digest_extracts\n WHERE agent_id = ? AND tier IN (${tierPlaceholders})\n ORDER BY tier ASC`,\n ).all(agentId, ...DIGEST_TIERS) as Record<string, unknown>[];\n\n return rows.map(toDigestExtractRow);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAgBA,IAAM,4BAA4B;AAG3B,IAAM,wBAAwB;AAGrC,IAAM,mBAAmB;AAGzB,IAAM,iBAAiB;AAGvB,IAAM,yBAAyB;AAG/B,IAAM,oBAAoB;AAG1B,IAAM,iBAAiB;AAGvB,IAAM,4BAA4B;AAoDlC,IAAM,gBAAgB;AAAA,EACpB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,cAAc,KAAK,IAAI;AAO9C,SAAS,WAAW,KAAwC;AAC1D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,YAAY,IAAI;AAAA,IAChB,eAAgB,IAAI,iBAA4B;AAAA,IAChD,aAAc,IAAI,eAA0B;AAAA,IAC5C,kBAAmB,IAAI,oBAA+B;AAAA,IACtD,gBAAiB,IAAI,kBAA6B;AAAA,IAClD,YAAa,IAAI,cAAyB;AAAA,IAC1C,UAAW,IAAI,YAAuB;AAAA,IACtC,QAAQ,IAAI;AAAA,IACZ,gBAAgB,IAAI;AAAA,IACpB,WAAW,IAAI;AAAA,IACf,cAAe,IAAI,gBAA2B;AAAA,IAC9C,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AA4FO,SAAS,uBACd,WACA,WACM;AACN,QAAM,KAAK,YAAY;AAGvB,QAAM,UAAU,GAAG;AAAA,IACjB;AAAA,EACF,EAAE,IAAI,SAAS;AAGf,aAAW,EAAE,WAAW,SAAS,KAAK,WAAW;AAC/C,UAAM,aAAa,YAAY;AAC/B,QAAI,cAAc,KAAK,aAAa,QAAQ,QAAQ;AAClD,YAAM,UAAU,QAAQ,UAAU,EAAE;AACpC,SAAG;AAAA,QACD;AAAA,MACF,EAAE,IAAI,UAAU,OAAO;AAAA,IACzB;AAAA,EACF;AACF;AAOO,SAAS,sBACd,UAAiD,CAAC,GACtC;AACZ,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC,eAAe;AAC7C,QAAM,SAAoB,CAAC,iBAAiB;AAE5C,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,QAAQ;AACxB,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AAEA,QAAM,QAAQ,QAAQ,SAAS;AAC/B,SAAO,KAAK,KAAK;AAEjB,QAAM,QAAQ,WAAW,KAAK,OAAO;AAErC,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA,aAEf,KAAK;AAAA;AAAA;AAAA,EAGhB,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,KAAK,IAAI,UAAU;AAC5B;AAOO,SAAS,uBACd,IACiB;AACjB,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,EAAE;AAER,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,mCAAmC,EAAE,IAAI,EAAE;AAAA,EAChF;AACF;AAOO,SAAS,mBACd,IACiB;AACjB,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,gBAAgB,EAAE;AAExB,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,mCAAmC,EAAE,IAAI,EAAE;AAAA,EAChF;AACF;AAwBO,SAAS,wBACd,WACA,cAC8C;AAC9C,QAAM,KAAK,YAAY;AAEvB,QAAM,SAAS,aAAa,MAAM,GAAG,yBAAyB;AAC9D,QAAM,MAAM,GAAG;AAAA,IACb;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,WAAW,MAAM;AACvB,SAAO,OAAO;AAChB;AAoBO,SAAS,qBAAqB,MAAsC;AACzE,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAWF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,eAAe;AAAA,IACpB,KAAK,cAAc;AAAA,IACnB,KAAK,UAAU;AAAA,IACf;AAAA,IACA;AAAA,IACA,KAAK;AAAA,IACL;AAAA,EACF;AAEA,QAAM,UAAU,OAAO,KAAK,eAAe;AAG3C,QAAM,aAAa,KAAK,eAAe;AACvC,MAAI,YAAY;AACd,OAAG,QAAQ,kEAAkE,EAAE,IAAI,SAAS,UAAU;AAAA,EACxG;AAEA,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,mCAAmC,EAAE,IAAI,OAAO;AAAA,EACrF;AACF;AAQO,SAAS,iBACd,WACA,SACQ;AACR,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,kBAAkB,SAAS,SAAS;AAE1C,SAAO,KAAK;AACd;AAOO,SAAS,mBACd,SACA,SACM;AACN,QAAM,KAAK,YAAY;AACvB,KAAG;AAAA,IACD;AAAA,EACF,EAAE,IAAI,SAAS,OAAO;AACxB;AAQO,SAAS,eACd,WACiB;AACjB,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAU,cAAc;AAAA;AAAA;AAAA,EAG1B,EAAE,IAAI,SAAS;AAEf,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,WAAW,GAAG;AACvB;AAEO,SAAS,qBACd,WACA,UAAuC,CAAC,GAC5B;AACZ,QAAM,KAAK,YAAY;AAEvB,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAEjC,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAM1B,EAAE,IAAI,WAAW,OAAO,MAAM;AAE9B,SAAO,KAAK,IAAI,UAAU;AAC5B;;;AC7eO,SAAS,aAAa,KAAsB;AACjD,MAAI,eAAe,MAAO,QAAO,IAAI,WAAW,IAAI,YAAY,QAAQ;AACxE,MAAI,OAAO,QAAQ,SAAU,QAAO,OAAO;AAC3C,MAAI;AAAE,WAAO,KAAK,UAAU,GAAG;AAAA,EAAG,QAAQ;AAAE,WAAO;AAAA,EAAwB;AAC7E;;;ACiCA,IAAM,eAAe;AAAA,EACnB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMA,kBAAiB,aAAa,KAAK,IAAI;AAO7C,SAAS,UAAU,KAAuC;AACxD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,QAAQ,IAAI;AAAA,IACZ,UAAU,IAAI;AAAA,IACd,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,YAAa,IAAI,cAAyB;AAAA,IAC1C,qBAAsB,IAAI,uBAAkC;AAAA,IAC5D,YAAa,IAAI,cAAyB;AAAA,IAC1C,cAAe,IAAI,gBAA2B;AAAA,EAChD;AACF;AAWO,SAAS,WAAW,MAA2B;AACpD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,uBAAuB;AAAA,IAC5B,KAAK,cAAc;AAAA,IACnB,KAAK,gBAAgB;AAAA,EACvB;AAEA,QAAM,SAAS,OAAO,KAAK,eAAe;AAE1C,SAAO;AAAA,IACL,GAAG,QAAQ,UAAUA,eAAc,gCAAgC,EAAE,IAAI,MAAM;AAAA,EACjF;AACF;AAKO,SAAS,UAAU,OAA0B;AAClD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA;AAAA;AAAA,EAI1B,EAAE,IAAI,KAAK;AAEX,SAAO,KAAK,IAAI,SAAS;AAC3B;AAQO,SAAS,eAAe,OAA0B;AACvD,SAAO,UAAU,KAAK;AACxB;;;AC3HA,IAAM,qBAAqB;AAG3B,IAAMC,kBAAiB;AAGhB,IAAM,iBAAiB;AAGvB,IAAMC,oBAAmB;AAGzB,IAAM,gBAAgB;AA2D7B,IAAM,cAAc;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,YAAY,KAAK,IAAI;AAO5C,SAAS,SAAS,KAAsC;AACtD,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,MAAO,IAAI,QAAmB;AAAA,IAC9B,aAAc,IAAI,eAA0B;AAAA,IAC5C,QAAQ,IAAI;AAAA,IACZ,YAAa,IAAI,cAAyB;AAAA,IAC1C,cAAe,IAAI,gBAA2B;AAAA,IAC9C,aAAc,IAAI,eAA0B;AAAA,IAC5C,UAAW,IAAI,YAAuB;AAAA,IACtC,eAAgB,IAAI,iBAA4B;AAAA,IAChD,OAAQ,IAAI,SAAoB;AAAA,EAClC;AACF;AASO,SAAS,UAAU,MAAyB;AACjD,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,QAAQ;AAAA,IACb,KAAK,eAAe;AAAA,IACpB,KAAK,UAAUF;AAAA,IACf,KAAK,cAAc;AAAA,IACnB,KAAK,gBAAgB;AAAA,IACrB,KAAK,eAAe;AAAA,IACpB,KAAK,YAAY;AAAA,IACjB,KAAK,iBAAiB;AAAA,IACtB,KAAK,SAAS;AAAA,EAChB;AAEA,SAAO;AAAA,IACL,GAAG,QAAQ,UAAUE,eAAc,+BAA+B,EAAE,IAAI,KAAK,EAAE;AAAA,EACjF;AACF;AAOO,SAAS,OAAO,IAA2B;AAChD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,SAAS,GAAG;AACrB;AAGA,SAAS,eACP,SACsC;AACtC,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,cAAc;AAC9B,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AACA,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B;AACA,MAAI,QAAQ,SAAS,QAAW;AAC9B,eAAW,KAAK,UAAU;AAC1B,WAAO,KAAK,QAAQ,IAAI;AAAA,EAC1B;AACA,MAAI,QAAQ,WAAW,UAAa,QAAQ,OAAO,SAAS,GAAG;AAC7D,eAAW,KAAK,aAAa;AAC7B,WAAO,KAAK,IAAI,QAAQ,MAAM,GAAG;AAAA,EACnC;AAEA,SAAO;AAAA,IACL,OAAO,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAAA,IACrE;AAAA,EACF;AACF;AAKO,SAAS,SACd,UAA2B,CAAC,GAClB;AACV,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,eAAe,OAAO;AAChD,QAAM,QAAQ,QAAQ,SAAS;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAEjC,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA;AAAA,EAIV,EAAE,IAAI,GAAG,QAAQ,OAAO,MAAM;AAE9B,SAAO,KAAK,IAAI,QAAQ;AAC1B;AAKO,SAAS,UACd,UAAqD,CAAC,GAC9C;AACR,QAAM,KAAK,YAAY;AACvB,QAAM,EAAE,OAAO,OAAO,IAAI,eAAe,OAAO;AAEhD,QAAM,MAAM,GAAG;AAAA,IACb,4CAA4C,KAAK;AAAA,EACnD,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,IAAI;AACb;AAOO,SAAS,gBACd,IACA,QACA,YACe;AACf,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC,YAAY;AAC1C,QAAM,SAAoB,CAAC,MAAM;AAEjC,MAAI,YAAY,iBAAiB,QAAW;AAC1C,eAAW,KAAK,kBAAkB;AAClC,WAAO,KAAK,WAAW,YAAY;AAAA,EACrC;AAEA,MAAI,YAAY,gBAAgB,QAAW;AACzC,eAAW,KAAK,iBAAiB;AACjC,WAAO,KAAK,WAAW,WAAW;AAAA,EACpC;AAEA,MAAI,YAAY,aAAa,QAAW;AACtC,eAAW,KAAK,cAAc;AAC9B,WAAO,KAAK,WAAW,QAAQ;AAAA,EACjC;AAEA,MAAI,YAAY,kBAAkB,QAAW;AAC3C,eAAW,KAAK,mBAAmB;AACnC,WAAO,KAAK,WAAW,aAAa;AAAA,EACtC;AAEA,MAAI,YAAY,UAAU,QAAW;AACnC,eAAW,KAAK,WAAW;AAC3B,WAAO,KAAK,WAAW,KAAK;AAAA,EAC9B;AAEA,SAAO,KAAK,EAAE;AAEd,QAAM,OAAO,GAAG;AAAA,IACd;AAAA,WACO,WAAW,KAAK,IAAI,CAAC;AAAA;AAAA,EAE9B,EAAE,IAAI,GAAG,MAAM;AAEf,MAAI,KAAK,YAAY,EAAG,QAAO;AAE/B,SAAO;AAAA,IACL,GAAG,QAAQ,UAAUA,eAAc,+BAA+B,EAAE,IAAI,EAAE;AAAA,EAC5E;AACF;AAOO,SAAS,cACd,SACe;AACf,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA;AAAA;AAAA;AAAA;AAAA,EAK1B,EAAE,IAAI,SAAS,cAAc;AAE7B,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,SAAS,GAAG;AACrB;;;AC/QA,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,eAAe,KAAK,IAAI;AAO/C,SAAS,YAAY,KAAyC;AAC5D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,QAAQ,IAAI;AAAA,IACZ,UAAU,IAAI;AAAA,IACd,QAAQ,IAAI;AAAA,IACZ,SAAS,IAAI;AAAA,IACb,SAAU,IAAI,WAAsB;AAAA,IACpC,YAAY,IAAI;AAAA,EAClB;AACF;AAWO,SAAS,aAAa,MAA+B;AAC1D,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,WAAW;AAAA,IAChB,KAAK;AAAA,EACP;AAEA,QAAM,WAAW,OAAO,KAAK,eAAe;AAE5C,SAAO;AAAA,IACL,GAAG,QAAQ,UAAUA,eAAc,kCAAkC,EAAE,IAAI,QAAQ;AAAA,EACrF;AACF;AAKO,SAAS,YAAY,OAA4B;AACtD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA;AAAA;AAAA,EAI1B,EAAE,IAAI,KAAK;AAEX,SAAO,KAAK,IAAI,WAAW;AAC7B;;;ACrHA,OAAO,YAAY;AAUnB,IAAM,oBAAoB;AAG1B,IAAM,gBAAgB;AA+DtB,IAAM,qBAAqB;AAAA,EACzB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,mBAAmB,KAAK,IAAI;AAOnD,SAAS,eAAe,KAA4C;AAClE,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,WAAW,IAAI;AAAA,IACf,aAAa,IAAI;AAAA,IACjB,WAAW,IAAI;AAAA,IACf,aAAa,IAAI;AAAA,IACjB,MAAM,IAAI;AAAA,IACV,YAAa,IAAI,cAAyB;AAAA,IAC1C,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,YAAY,IAAI;AAAA,IAChB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAWO,SAAS,gBAAgB,MAAqC;AACnE,QAAM,KAAK,YAAY;AACvB,QAAM,KAAK,OAAO,WAAW;AAE7B,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA,EAIF,EAAE;AAAA,IACA;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK,cAAc;AAAA,IACnB,KAAK,cAAc;AAAA,IACnB,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,EACrB;AAEA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAUA,eAAc,gCAAgC,EAAE,IAAI,EAAE;AAAA,EAC7E;AAEA,UAAQ,eAAe,GAAG;AAE1B,SAAO;AACT;AAKO,SAAS,eACd,UAAiC,CAAC,GAClB;AAChB,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,eAAe;AAC/B,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AAEA,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,eAAe;AAC/B,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AAEA,MAAI,QAAQ,SAAS,QAAW;AAC9B,eAAW,KAAK,UAAU;AAC1B,WAAO,KAAK,QAAQ,IAAI;AAAA,EAC1B;AAEA,MAAI,QAAQ,YAAY,QAAW;AACjC,eAAW,KAAK,cAAc;AAC9B,WAAO,KAAK,QAAQ,OAAO;AAAA,EAC7B;AAEA,QAAM,QAAQ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAC5E,QAAM,QAAQ,QAAQ,SAAS;AAE/B,SAAO,KAAK,KAAK;AAEjB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA,EAGV,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,KAAK,IAAI,cAAc;AAChC;AAWO,SAAS,gBACd,QACA,UACA,SAC2B;AAC3B,QAAM,KAAK,YAAY;AACvB,QAAM,QAAQ,KAAK,IAAI,KAAK,IAAI,SAAS,SAAS,mBAAmB,CAAC,GAAG,aAAa;AAEtF,QAAM,cAAc,oBAAI,IAAY;AACpC,QAAM,iBAAiC,CAAC;AACxC,QAAM,UAAU,oBAAI,IAAY,CAAC,GAAG,QAAQ,IAAI,MAAM,EAAE,CAAC;AACzD,MAAI,WAAW,oBAAI,IAAY,CAAC,MAAM,CAAC;AAEvC,WAAS,MAAM,GAAG,MAAM,OAAO,OAAO;AACpC,QAAI,SAAS,SAAS,EAAG;AAEzB,UAAM,gBAAgB,MAAM,KAAK,QAAQ;AACzC,UAAM,eAAe,cAAc,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAE3D,UAAM,OAAO,GAAG;AAAA,MACd,UAAUA,eAAc;AAAA;AAAA,6BAED,YAAY,sBAAsB,YAAY;AAAA,IACvE,EAAE,IAAI,GAAG,eAAe,GAAG,aAAa;AAExC,UAAM,eAAe,oBAAI,IAAY;AAErC,eAAW,OAAO,MAAM;AACtB,YAAM,OAAO,eAAe,GAAG;AAC/B,UAAI,CAAC,YAAY,IAAI,KAAK,EAAE,GAAG;AAC7B,oBAAY,IAAI,KAAK,EAAE;AACvB,uBAAe,KAAK,IAAI;AAAA,MAC1B;AACA,YAAM,YAAY,GAAG,KAAK,WAAW,IAAI,KAAK,SAAS;AACvD,YAAM,YAAY,GAAG,KAAK,WAAW,IAAI,KAAK,SAAS;AACvD,UAAI,CAAC,QAAQ,IAAI,SAAS,GAAG;AAC3B,gBAAQ,IAAI,SAAS;AACrB,qBAAa,IAAI,KAAK,SAAS;AAAA,MACjC;AACA,UAAI,CAAC,QAAQ,IAAI,SAAS,GAAG;AAC3B,gBAAQ,IAAI,SAAS;AACrB,qBAAa,IAAI,KAAK,SAAS;AAAA,MACjC;AAAA,IACF;AAEA,eAAW;AAAA,EACb;AAEA,SAAO,EAAE,OAAO,eAAe;AACjC;;;ACxPO,SAAS,mBAAmB,OAQ1B;AACP,MAAI,MAAM,YAAY;AACpB,oBAAgB;AAAA,MACd,UAAU,MAAM;AAAA,MAChB,WAAW,MAAM;AAAA,MACjB,aAAa;AAAA,MACb,WAAW,MAAM;AAAA,MACjB,aAAa;AAAA,MACb,MAAM;AAAA,MACN,YAAY,MAAM;AAAA,IACpB,CAAC;AAAA,EACH;AAEA,MAAI,MAAM,mBAAmB,MAAM;AACjC,oBAAgB;AAAA,MACd,UAAU,MAAM;AAAA,MAChB,WAAW,MAAM;AAAA,MACjB,aAAa;AAAA,MACb,WAAW,OAAO,MAAM,eAAe;AAAA,MACvC,aAAa;AAAA,MACb,MAAM;AAAA,MACN,YAAY,MAAM;AAAA,IACpB,CAAC;AAAA,EACH;AAGA,MAAI,MAAM,qBAAqB,YAAY,MAAM,YAAY;AAC3D,QAAI;AACF,YAAM,QAAQ,KAAK,MAAM,MAAM,UAAU;AACzC,UAAI,MAAM,QAAQ,MAAM,iBAAiB,GAAG;AAC1C,mBAAW,YAAY,MAAM,mBAAmB;AAC9C,0BAAgB;AAAA,YACd,UAAU,MAAM;AAAA,YAChB,WAAW,MAAM;AAAA,YACjB,aAAa;AAAA,YACb,WAAW;AAAA,YACX,aAAa;AAAA,YACb,MAAM;AAAA,YACN,YAAY,MAAM;AAAA,UACpB,CAAC;AAAA,QACH;AAAA,MACF;AAAA,IACF,QAAQ;AAAA,IAAoC;AAAA,EAC9C;AACF;AAOO,SAAS,mBACd,SACA,WACA,SACA,WACM;AACN,kBAAgB;AAAA,IACd,UAAU;AAAA,IACV,WAAW;AAAA,IACX,aAAa;AAAA,IACb,WAAW,OAAO,OAAO;AAAA,IACzB,aAAa;AAAA,IACb,MAAM;AAAA,IACN,YAAY;AAAA,EACd,CAAC;AACH;;;ACnFA,IAAMC,sBAAqB;AA2D3B,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,eAAe,KAAK,IAAI;AAO/C,SAAS,YAAY,KAAyC;AAC5D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,MAAM,IAAI;AAAA,IACV,MAAM,IAAI;AAAA,IACV,YAAa,IAAI,cAAyB;AAAA,IAC1C,YAAY,IAAI;AAAA,IAChB,WAAW,IAAI;AAAA,IACf,QAAS,IAAI,UAAqB;AAAA,IAClC,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAWO,SAAS,aAAa,MAA+B;AAC1D,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,IACnB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,cAAc;AAAA,EACrB;AAGA,QAAM,MAAM;AAAA,IACV,GAAG,QAAQ,UAAUA,eAAc,6DAA6D,EAAE;AAAA,MAChG,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,IACP;AAAA,EACF;AAEA,UAAQ,YAAY,GAAG;AAEvB,SAAO;AACT;AAOO,SAAS,UAAU,IAA8B;AACtD,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,EAAE;AAER,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,YAAY,GAAG;AACxB;AAYO,SAAS,aACd,UAA+B,CAAC,GACnB;AACb,QAAM,KAAK,YAAY;AAEvB,QAAM,aAAuB,CAAC;AAC9B,QAAM,SAAoB,CAAC;AAE3B,MAAI,QAAQ,aAAa,QAAW;AAClC,eAAW,KAAK,cAAc;AAC9B,WAAO,KAAK,QAAQ,QAAQ;AAAA,EAC9B;AAEA,MAAI,QAAQ,SAAS,QAAW;AAC9B,eAAW,KAAK,UAAU;AAC1B,WAAO,KAAK,QAAQ,IAAI;AAAA,EAC1B;AAEA,MAAI,QAAQ,SAAS,QAAW;AAC9B,eAAW,KAAK,UAAU;AAC1B,WAAO,KAAK,QAAQ,IAAI;AAAA,EAC1B;AAGA,MAAI,QAAQ,WAAW,QAAW;AAChC,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ,MAAM;AAAA,EAC5B,OAAO;AACL,eAAW,KAAK,YAAY;AAC5B,WAAO,KAAK,QAAQ;AAAA,EACtB;AAEA,MAAI,QAAQ,iBAAiB,UAAa,QAAQ,cAAc,QAAW;AACzE,eAAW;AAAA,MACT;AAAA,IACF;AACA,WAAO,KAAK,QAAQ,YAAY;AAChC,WAAO,KAAK,QAAQ,SAAS;AAAA,EAC/B;AAEA,QAAM,QAAQ,WAAW,SAAS,IAAI,SAAS,WAAW,KAAK,OAAO,CAAC,KAAK;AAC5E,QAAM,QAAQ,QAAQ,SAASD;AAC/B,QAAM,SAAS,QAAQ,UAAU;AAEjC,SAAO,KAAK,KAAK;AACjB,SAAO,KAAK,MAAM;AAElB,QAAM,OAAO,GAAG;AAAA,IACd,UAAUC,eAAc;AAAA;AAAA,OAErB,KAAK;AAAA;AAAA;AAAA;AAAA,EAIV,EAAE,IAAI,GAAG,MAAM;AAEf,SAAO,KAAK,IAAI,WAAW;AAC7B;;;ACtMA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAMC,kBAAiB,gBAAgB,KAAK,IAAI;AAOhD,SAAS,mBAAmB,KAAgD;AAC1E,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,UAAU,IAAI;AAAA,IACd,MAAM,IAAI;AAAA,IACV,SAAS,IAAI;AAAA,IACb,gBAAiB,IAAI,kBAA6B;AAAA,IAClD,cAAc,IAAI;AAAA,IAClB,YAAa,IAAI,cAAyB;AAAA,IAC1C,WAAY,IAAI,aAAwB;AAAA,EAC1C;AACF;AAaO,SAAS,oBACd,MACkB;AAClB,QAAM,KAAK,YAAY;AAEvB,KAAG;AAAA,IACD;AAAA;AAAA;AAAA;AAAA;AAAA,EAKF,EAAE,IAAI,KAAK,UAAU,KAAK,MAAM,KAAK,SAAS,KAAK,YAAY;AAG/D,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA,EAC1B,EAAE,IAAI,KAAK,UAAU,KAAK,IAAI;AAE9B,SAAO,mBAAmB,GAA8B;AAC1D;AAOO,SAAS,iBACd,SACA,MACyB;AACzB,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb,UAAUA,eAAc;AAAA;AAAA,EAE1B,EAAE,IAAI,SAAS,IAAI;AAEnB,MAAI,CAAC,IAAK,QAAO;AACjB,SAAO,mBAAmB,GAAG;AAC/B;AAKO,SAAS,mBACd,SACoB;AACpB,QAAM,KAAK,YAAY;AACvB,QAAM,mBAAmB,aAAa,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAE9D,QAAM,OAAO,GAAG;AAAA,IACd,UAAUA,eAAc;AAAA;AAAA,uCAEW,gBAAgB;AAAA;AAAA,EAErD,EAAE,IAAI,SAAS,GAAG,YAAY;AAE9B,SAAO,KAAK,IAAI,kBAAkB;AACpC;","names":["SELECT_COLUMNS","DEFAULT_STATUS","STATUS_COMPLETED","SELECT_COLUMNS","SELECT_COLUMNS","SELECT_COLUMNS","DEFAULT_LIST_LIMIT","SELECT_COLUMNS","SELECT_COLUMNS"]}
|
|
@@ -4,10 +4,10 @@ import {
|
|
|
4
4
|
} from "./chunk-V7XG6V6C.js";
|
|
5
5
|
import {
|
|
6
6
|
readStdin
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-J4RVYUH4.js";
|
|
8
8
|
import {
|
|
9
9
|
DaemonClient
|
|
10
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-YZMNEIFI.js";
|
|
11
11
|
import {
|
|
12
12
|
resolveVaultDir
|
|
13
13
|
} from "./chunk-JTYZRPX5.js";
|
|
@@ -38,4 +38,4 @@ async function sendEvent(hookName, buildEvent) {
|
|
|
38
38
|
export {
|
|
39
39
|
sendEvent
|
|
40
40
|
};
|
|
41
|
-
//# sourceMappingURL=chunk-
|
|
41
|
+
//# sourceMappingURL=chunk-FMIWFRAM.js.map
|
|
@@ -2,7 +2,7 @@ import { createRequire as __cr } from 'node:module'; const require = __cr(import
|
|
|
2
2
|
import {
|
|
3
3
|
EMBEDDING_REQUEST_TIMEOUT_MS,
|
|
4
4
|
PROVIDER_DETECT_TIMEOUT_MS
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-76ZO5RGT.js";
|
|
6
6
|
|
|
7
7
|
// src/cli/providers/cloud-embedding-base.ts
|
|
8
8
|
var ERROR_BODY_PREVIEW_CHARS = 500;
|
|
@@ -63,4 +63,4 @@ var CloudEmbeddingBase = class {
|
|
|
63
63
|
export {
|
|
64
64
|
CloudEmbeddingBase
|
|
65
65
|
};
|
|
66
|
-
//# sourceMappingURL=chunk-
|
|
66
|
+
//# sourceMappingURL=chunk-FPMEIN2W.js.map
|
|
@@ -11,7 +11,7 @@ var cached;
|
|
|
11
11
|
function getPluginVersion() {
|
|
12
12
|
if (cached) return cached;
|
|
13
13
|
if (true) {
|
|
14
|
-
cached = "0.
|
|
14
|
+
cached = "0.10.0";
|
|
15
15
|
return cached;
|
|
16
16
|
}
|
|
17
17
|
const root = findPackageRoot(path.dirname(fileURLToPath(import.meta.url)));
|
|
@@ -32,4 +32,4 @@ function getPluginVersion() {
|
|
|
32
32
|
export {
|
|
33
33
|
getPluginVersion
|
|
34
34
|
};
|
|
35
|
-
//# sourceMappingURL=chunk-
|
|
35
|
+
//# sourceMappingURL=chunk-G2LQBFE3.js.map
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
STDIN_TIMEOUT_MS
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-76ZO5RGT.js";
|
|
5
5
|
|
|
6
6
|
// src/hooks/read-stdin.ts
|
|
7
7
|
function readStdin() {
|
|
@@ -18,4 +18,4 @@ function readStdin() {
|
|
|
18
18
|
export {
|
|
19
19
|
readStdin
|
|
20
20
|
};
|
|
21
|
-
//# sourceMappingURL=chunk-
|
|
21
|
+
//# sourceMappingURL=chunk-J4RVYUH4.js.map
|
|
@@ -72,6 +72,20 @@ var AgentSchema = external_exports.object({
|
|
|
72
72
|
/** Per-task overrides keyed by task name. */
|
|
73
73
|
tasks: external_exports.record(external_exports.string(), TaskProviderOverrideSchema).optional()
|
|
74
74
|
});
|
|
75
|
+
var BackupSchema = external_exports.object({
|
|
76
|
+
/** Override directory for backup files (absolute path). When unset, defaults to {vaultDir}/backups. */
|
|
77
|
+
dir: external_exports.string().optional()
|
|
78
|
+
});
|
|
79
|
+
var TeamSchema = external_exports.object({
|
|
80
|
+
/** Whether team sync is enabled. */
|
|
81
|
+
enabled: external_exports.boolean().default(false),
|
|
82
|
+
/** Cloudflare Worker URL for team sync. */
|
|
83
|
+
worker_url: external_exports.string().url().optional(),
|
|
84
|
+
/** Team identifier for sync grouping. */
|
|
85
|
+
team_id: external_exports.string().optional(),
|
|
86
|
+
/** Sync interval in minutes. */
|
|
87
|
+
interval_minutes: external_exports.number().int().min(1).max(1440).default(15)
|
|
88
|
+
});
|
|
75
89
|
var MycoConfigSchema = external_exports.preprocess(
|
|
76
90
|
(raw) => {
|
|
77
91
|
if (raw && typeof raw === "object" && "curation" in raw && !("agent" in raw)) {
|
|
@@ -87,7 +101,9 @@ var MycoConfigSchema = external_exports.preprocess(
|
|
|
87
101
|
daemon: DaemonSchema.default(() => DaemonSchema.parse({})),
|
|
88
102
|
capture: CaptureSchema.default(() => CaptureSchema.parse({})),
|
|
89
103
|
agent: AgentSchema.default(() => AgentSchema.parse({})),
|
|
90
|
-
context: ContextSchema.default(() => ContextSchema.parse({}))
|
|
104
|
+
context: ContextSchema.default(() => ContextSchema.parse({})),
|
|
105
|
+
backup: BackupSchema.default(() => BackupSchema.parse({})),
|
|
106
|
+
team: TeamSchema.default(() => TeamSchema.parse({}))
|
|
91
107
|
})
|
|
92
108
|
);
|
|
93
109
|
|
|
@@ -264,12 +280,26 @@ function updateConfig(vaultDir, fn) {
|
|
|
264
280
|
saveConfig(vaultDir, updated);
|
|
265
281
|
return updated;
|
|
266
282
|
}
|
|
283
|
+
function updateBackupConfig(vaultDir, backup) {
|
|
284
|
+
return updateConfig(vaultDir, (config) => ({
|
|
285
|
+
...config,
|
|
286
|
+
backup: { ...config.backup, ...backup }
|
|
287
|
+
}));
|
|
288
|
+
}
|
|
289
|
+
function updateTeamConfig(vaultDir, team) {
|
|
290
|
+
return updateConfig(vaultDir, (config) => ({
|
|
291
|
+
...config,
|
|
292
|
+
team: { ...config.team, ...team }
|
|
293
|
+
}));
|
|
294
|
+
}
|
|
267
295
|
|
|
268
296
|
export {
|
|
269
297
|
MycoConfigSchema,
|
|
270
298
|
CONFIG_FILENAME,
|
|
271
299
|
loadConfig,
|
|
272
300
|
saveConfig,
|
|
273
|
-
updateConfig
|
|
301
|
+
updateConfig,
|
|
302
|
+
updateBackupConfig,
|
|
303
|
+
updateTeamConfig
|
|
274
304
|
};
|
|
275
|
-
//# sourceMappingURL=chunk-
|
|
305
|
+
//# sourceMappingURL=chunk-MAZOVVDU.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/config/loader.ts","../src/config/schema.ts","../src/config/migrations.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport YAML from 'yaml';\nimport { MycoConfigSchema, type MycoConfig, type BackupConfig, type TeamConfig } from './schema.js';\nimport { runMigrations, CURRENT_MIGRATION_VERSION } from './migrations.js';\n\nexport const CONFIG_FILENAME = 'myco.yaml';\n\nexport function loadConfig(vaultDir: string): MycoConfig {\n const configPath = path.join(vaultDir, CONFIG_FILENAME);\n\n if (!fs.existsSync(configPath)) {\n throw new Error(`myco.yaml not found in ${vaultDir}`);\n }\n\n const raw = fs.readFileSync(configPath, 'utf-8');\n const parsed = YAML.parse(raw) as Record<string, unknown>;\n\n // Detect v1 config and guide migration\n if (parsed.version === 1 || (parsed.intelligence as Record<string, unknown>)?.backend) {\n throw new Error(\n 'Myco config uses v1 format. Run /myco:setup-llm to reconfigure for v2.',\n );\n }\n\n // --- v2 → v3 migration ---\n let v2Migrated = false;\n if (parsed.version === 2) {\n // Extract intelligence.embedding to top-level embedding\n const intel = parsed.intelligence as Record<string, unknown> | undefined;\n const embeddingConfig = intel?.embedding as Record<string, unknown> | undefined;\n if (embeddingConfig && !parsed.embedding) {\n // Map v2 'lm-studio' to v3 'openai-compatible' for embedding provider\n if (embeddingConfig.provider === 'lm-studio') {\n embeddingConfig.provider = 'openai-compatible';\n }\n parsed.embedding = embeddingConfig;\n }\n\n // Keep daemon.port and daemon.log_level, drop grace_period and max_log_size\n const daemon = parsed.daemon as Record<string, unknown> | undefined;\n if (daemon) {\n const { port, log_level } = daemon;\n parsed.daemon = { port: port ?? null, log_level: log_level ?? 'info' };\n }\n\n // Keep capture basics, drop token-related fields; migrate artifact_watch → plan_dirs\n const capture = parsed.capture as Record<string, unknown> | undefined;\n if (capture) {\n const { transcript_paths, artifact_watch, plan_dirs, artifact_extensions, buffer_max_events } = capture;\n parsed.capture = {\n transcript_paths,\n plan_dirs: plan_dirs ?? artifact_watch,\n artifact_extensions,\n buffer_max_events,\n };\n }\n\n // Drop removed top-level sections\n delete parsed.intelligence;\n delete parsed.context;\n delete parsed.team;\n delete parsed.digest;\n delete parsed.pipeline;\n\n // Set version to 3\n parsed.version = 3;\n v2Migrated = true;\n\n process.stderr.write('[myco migration] Migrated config from v2 to v3\\n');\n }\n\n // Run numbered migrations (for v3+ forward migrations)\n const migrationsRan = runMigrations(parsed, vaultDir, (msg) => {\n process.stderr.write(`[myco migration] ${msg}\\n`);\n });\n\n // Parse with Zod to fill in defaults for new config sections\n const config = MycoConfigSchema.parse(parsed);\n\n // Write back if v2→v3 migration ran, numbered migrations ran, or new defaults were added\n const needsWrite = v2Migrated\n || migrationsRan\n || (parsed.config_version as number ?? 0) < CURRENT_MIGRATION_VERSION\n || parsed.version !== config.version;\n\n if (needsWrite) {\n const fullConfig = JSON.parse(JSON.stringify(config)) as Record<string, unknown>;\n fs.writeFileSync(configPath, YAML.stringify(fullConfig), 'utf-8');\n }\n\n return config;\n}\n\nexport function saveConfig(vaultDir: string, config: MycoConfig): void {\n // Validate before writing — OAK lesson: validate on write, not just read\n const validated = MycoConfigSchema.parse(config);\n\n const configPath = path.join(vaultDir, CONFIG_FILENAME);\n fs.mkdirSync(vaultDir, { recursive: true });\n fs.writeFileSync(configPath, YAML.stringify(validated), 'utf-8');\n}\n\nexport function updateConfig(\n vaultDir: string,\n fn: (config: MycoConfig) => MycoConfig,\n): MycoConfig {\n const current = loadConfig(vaultDir);\n const updated = fn(current);\n saveConfig(vaultDir, updated);\n return updated;\n}\n\nexport function updateBackupConfig(\n vaultDir: string,\n backup: Partial<BackupConfig>,\n): MycoConfig {\n return updateConfig(vaultDir, (config) => ({\n ...config,\n backup: { ...config.backup, ...backup },\n }));\n}\n\nexport function updateTeamConfig(\n vaultDir: string,\n team: Partial<TeamConfig>,\n): MycoConfig {\n return updateConfig(vaultDir, (config) => ({\n ...config,\n team: { ...config.team, ...team },\n }));\n}\n","import { z } from 'zod';\n\nconst EmbeddingProviderSchema = z.object({\n provider: z.enum(['ollama', 'openai-compatible', 'openrouter', 'openai']).default('ollama'),\n model: z.string().default('bge-m3'),\n base_url: z.string().url().optional(),\n});\n\nconst DaemonSchema = z.object({\n port: z.number().int().min(1024).max(65535).nullable().default(null),\n log_level: z.enum(['debug', 'info', 'warn', 'error']).default('info'),\n log_retention_days: z.number().int().min(1).max(365).default(30),\n});\n\nconst CaptureSchema = z.object({\n transcript_paths: z.array(z.string()).default([]),\n plan_dirs: z.array(z.string()).default([]),\n artifact_extensions: z.array(z.string()).default(['.md']),\n buffer_max_events: z.number().int().positive().default(500),\n});\n\n/** Provider config shape used in both task-level and phase-level overrides. */\nconst ProviderOverrideSchema = z.object({\n type: z.enum(['cloud', 'ollama', 'lmstudio']),\n base_url: z.string().optional(),\n model: z.string().optional(),\n /** Context window size for local models (Ollama num_ctx, LM Studio context_length). */\n context_length: z.number().int().positive().optional(),\n});\n\n/** Per-phase overrides within a task — keyed by phase name. */\nconst PhaseOverrideSchema = z.object({\n provider: ProviderOverrideSchema.optional(),\n model: z.string().optional(),\n maxTurns: z.number().int().positive().optional(),\n});\n\n/** Per-task config override — stored in myco.yaml under agent.tasks. */\nconst TaskProviderOverrideSchema = z.object({\n provider: ProviderOverrideSchema.optional(),\n model: z.string().optional(),\n maxTurns: z.number().int().positive().optional(),\n timeoutSeconds: z.number().int().positive().optional(),\n phases: z.record(z.string(), PhaseOverrideSchema).optional(),\n});\n\nconst ContextSchema = z.object({\n /** Which digest tier to inject at session start. */\n digest_tier: z.number().int().default(5000),\n /** Enable semantic spore search on each user prompt. */\n prompt_search: z.boolean().default(true),\n /** Max spores to inject per prompt (0-10). */\n prompt_max_spores: z.number().int().min(0).max(10).default(3),\n});\n\nconst AgentSchema = z.object({\n /** Whether the daemon automatically runs the agent on unprocessed batches. */\n auto_run: z.boolean().default(true),\n /** Seconds between agent timer checks. */\n interval_seconds: z.number().int().positive().default(300),\n /** Number of batches between event-driven summary triggers (0 to disable). */\n summary_batch_interval: z.number().int().min(0).default(5),\n /** Global default provider — applies to all tasks unless overridden per-task. */\n provider: ProviderOverrideSchema.optional(),\n /** Global default model — applies to all tasks unless overridden per-task. */\n model: z.string().optional(),\n /** Per-task overrides keyed by task name. */\n tasks: z.record(z.string(), TaskProviderOverrideSchema).optional(),\n});\n\nconst BackupSchema = z.object({\n /** Override directory for backup files (absolute path). When unset, defaults to {vaultDir}/backups. */\n dir: z.string().optional(),\n});\n\nconst TeamSchema = z.object({\n /** Whether team sync is enabled. */\n enabled: z.boolean().default(false),\n /** Cloudflare Worker URL for team sync. */\n worker_url: z.string().url().optional(),\n /** Team identifier for sync grouping. */\n team_id: z.string().optional(),\n /** Sync interval in minutes. */\n interval_minutes: z.number().int().min(1).max(1440).default(15),\n});\n\nexport const MycoConfigSchema = z.preprocess(\n (raw: unknown) => {\n if (raw && typeof raw === 'object' && 'curation' in raw && !('agent' in raw)) {\n const { curation, ...rest } = raw as Record<string, unknown>;\n return { ...rest, agent: curation };\n }\n return raw;\n },\n z.object({\n version: z.literal(3),\n config_version: z.number().int().nonnegative().default(0),\n embedding: EmbeddingProviderSchema.default(() => EmbeddingProviderSchema.parse({})),\n daemon: DaemonSchema.default(() => DaemonSchema.parse({})),\n capture: CaptureSchema.default(() => CaptureSchema.parse({})),\n agent: AgentSchema.default(() => AgentSchema.parse({})),\n context: ContextSchema.default(() => ContextSchema.parse({})),\n backup: BackupSchema.default(() => BackupSchema.parse({})),\n team: TeamSchema.default(() => TeamSchema.parse({})),\n }),\n);\n\nexport type MycoConfig = z.output<typeof MycoConfigSchema>;\nexport type EmbeddingProviderConfig = z.infer<typeof EmbeddingProviderSchema>;\nexport type TaskProviderOverride = z.infer<typeof TaskProviderOverrideSchema>;\nexport type PhaseOverride = z.infer<typeof PhaseOverrideSchema>;\nexport type ContextConfig = z.infer<typeof ContextSchema>;\nexport type BackupConfig = z.infer<typeof BackupSchema>;\nexport type TeamConfig = z.infer<typeof TeamSchema>;\n","/**\n * Config and vault migrations — run once per version, tracked by config_version.\n *\n * Each migration has a version number, a name, and a function that receives\n * the raw parsed YAML doc and the vault directory. Migrations run in order\n * and are skipped if config_version is already past them.\n *\n * To add a new migration:\n * 1. Add an entry to MIGRATIONS with the next version number\n * 2. Write the migrate function — it receives the mutable doc and vaultDir\n * 3. The framework handles version tracking and writing the config back\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nexport interface Migration {\n version: number;\n name: string;\n migrate: (doc: Record<string, unknown>, vaultDir: string) => void;\n}\n\n/** Regex matching both quoted and unquoted YAML: type: memory, type: \"memory\", type: 'memory' */\nconst MEMORY_TYPE_PATTERN = /type:\\s*[\"']?memory[\"']?/g;\n\nexport const MIGRATIONS: Migration[] = [\n {\n version: 1,\n name: 'rename-memories-to-spores',\n migrate: (doc, vaultDir) => {\n // Config: rename context.layers.memories → context.layers.spores\n const context = doc.context as Record<string, unknown> | undefined;\n const layers = context?.layers as Record<string, unknown> | undefined;\n if (layers && 'memories' in layers && !('spores' in layers)) {\n layers.spores = layers.memories;\n delete layers.memories;\n }\n\n // Vault: rename memories/ directory → spores/\n const memoriesDir = path.join(vaultDir, 'memories');\n const sporesDir = path.join(vaultDir, 'spores');\n\n if (!fs.existsSync(memoriesDir)) return;\n\n if (fs.existsSync(sporesDir)) {\n // Both exist (interrupted migration) — merge remaining files\n const moveRemaining = (srcDir: string, destDir: string): void => {\n for (const entry of fs.readdirSync(srcDir, { withFileTypes: true })) {\n const srcPath = path.join(srcDir, entry.name);\n const destPath = path.join(destDir, entry.name);\n if (entry.isDirectory()) {\n if (!fs.existsSync(destPath)) fs.mkdirSync(destPath, { recursive: true });\n moveRemaining(srcPath, destPath);\n } else if (!fs.existsSync(destPath)) {\n fs.renameSync(srcPath, destPath);\n }\n }\n };\n moveRemaining(memoriesDir, sporesDir);\n fs.rmSync(memoriesDir, { recursive: true, force: true });\n } else {\n fs.renameSync(memoriesDir, sporesDir);\n }\n\n // Update frontmatter type: memory → type: spore (handles quoted and unquoted)\n const walkUpdate = (dir: string): void => {\n for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) { walkUpdate(fullPath); continue; }\n if (!entry.name.endsWith('.md')) continue;\n const content = fs.readFileSync(fullPath, 'utf-8');\n MEMORY_TYPE_PATTERN.lastIndex = 0;\n if (MEMORY_TYPE_PATTERN.test(content)) {\n MEMORY_TYPE_PATTERN.lastIndex = 0;\n fs.writeFileSync(fullPath, content.replace(MEMORY_TYPE_PATTERN, 'type: spore'));\n }\n }\n };\n walkUpdate(sporesDir);\n\n // Legacy: update wikilink references in Markdown files (pre-SQLite migration): [[memories/...]] → [[spores/...]]\n const walkLinks = (dir: string): void => {\n for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) { walkLinks(fullPath); continue; }\n if (!entry.name.endsWith('.md')) continue;\n const content = fs.readFileSync(fullPath, 'utf-8');\n if (content.includes('memories/')) {\n fs.writeFileSync(fullPath, content.replace(/memories\\//g, 'spores/'));\n }\n }\n };\n walkLinks(vaultDir);\n },\n },\n {\n version: 2,\n name: 'consolidation-boolean-to-object',\n migrate: (doc) => {\n const digest = doc.digest as Record<string, unknown> | undefined;\n if (!digest) return;\n\n const consolidation = digest.consolidation;\n if (typeof consolidation === 'boolean') {\n digest.consolidation = { enabled: consolidation, max_tokens: 2048 };\n }\n },\n },\n];\n\n/** Current migration version — the highest version in MIGRATIONS. */\nexport const CURRENT_MIGRATION_VERSION = MIGRATIONS[MIGRATIONS.length - 1]?.version ?? 0;\n\n/**\n * Run all pending migrations on the raw config doc.\n * Returns true if any migrations ran (caller should reindex).\n */\nexport function runMigrations(\n doc: Record<string, unknown>,\n vaultDir: string,\n log?: (message: string) => void,\n): boolean {\n const currentVersion = (doc.config_version as number) ?? 0;\n let ran = false;\n\n for (const migration of MIGRATIONS) {\n if (migration.version <= currentVersion) continue;\n\n migration.migrate(doc, vaultDir);\n doc.config_version = migration.version;\n ran = true;\n }\n\n if (ran) {\n const from = currentVersion;\n const to = (doc.config_version as number) ?? 0;\n log?.(`Migrated config from v${from} to v${to}`);\n }\n\n return ran;\n}\n"],"mappings":";;;;;;;;;;;;AAEA,kBAAiB;AAFjB,OAAOA,SAAQ;AACf,OAAOC,WAAU;;;ACCjB,IAAM,0BAA0B,iBAAE,OAAO;AAAA,EACvC,UAAU,iBAAE,KAAK,CAAC,UAAU,qBAAqB,cAAc,QAAQ,CAAC,EAAE,QAAQ,QAAQ;AAAA,EAC1F,OAAO,iBAAE,OAAO,EAAE,QAAQ,QAAQ;AAAA,EAClC,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AACtC,CAAC;AAED,IAAM,eAAe,iBAAE,OAAO;AAAA,EAC5B,MAAM,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,IAAI,EAAE,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI;AAAA,EACnE,WAAW,iBAAE,KAAK,CAAC,SAAS,QAAQ,QAAQ,OAAO,CAAC,EAAE,QAAQ,MAAM;AAAA,EACpE,oBAAoB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,GAAG,EAAE,QAAQ,EAAE;AACjE,CAAC;AAED,IAAM,gBAAgB,iBAAE,OAAO;AAAA,EAC7B,kBAAkB,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EAChD,WAAW,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EACzC,qBAAqB,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,KAAK,CAAC;AAAA,EACxD,mBAAmB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,GAAG;AAC5D,CAAC;AAGD,IAAM,yBAAyB,iBAAE,OAAO;AAAA,EACtC,MAAM,iBAAE,KAAK,CAAC,SAAS,UAAU,UAAU,CAAC;AAAA,EAC5C,UAAU,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AACvD,CAAC;AAGD,IAAM,sBAAsB,iBAAE,OAAO;AAAA,EACnC,UAAU,uBAAuB,SAAS;AAAA,EAC1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AACjD,CAAC;AAGD,IAAM,6BAA6B,iBAAE,OAAO;AAAA,EAC1C,UAAU,uBAAuB,SAAS;AAAA,EAC1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EAC/C,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EACrD,QAAQ,iBAAE,OAAO,iBAAE,OAAO,GAAG,mBAAmB,EAAE,SAAS;AAC7D,CAAC;AAED,IAAM,gBAAgB,iBAAE,OAAO;AAAA;AAAA,EAE7B,aAAa,iBAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,GAAI;AAAA;AAAA,EAE1C,eAAe,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAEvC,mBAAmB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,EAAE,QAAQ,CAAC;AAC9D,CAAC;AAED,IAAM,cAAc,iBAAE,OAAO;AAAA;AAAA,EAE3B,UAAU,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAElC,kBAAkB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,GAAG;AAAA;AAAA,EAEzD,wBAAwB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,QAAQ,CAAC;AAAA;AAAA,EAEzD,UAAU,uBAAuB,SAAS;AAAA;AAAA,EAE1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,OAAO,iBAAE,OAAO,iBAAE,OAAO,GAAG,0BAA0B,EAAE,SAAS;AACnE,CAAC;AAED,IAAM,eAAe,iBAAE,OAAO;AAAA;AAAA,EAE5B,KAAK,iBAAE,OAAO,EAAE,SAAS;AAC3B,CAAC;AAED,IAAM,aAAa,iBAAE,OAAO;AAAA;AAAA,EAE1B,SAAS,iBAAE,QAAQ,EAAE,QAAQ,KAAK;AAAA;AAAA,EAElC,YAAY,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA,EAEtC,SAAS,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE7B,kBAAkB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,IAAI,EAAE,QAAQ,EAAE;AAChE,CAAC;AAEM,IAAM,mBAAmB,iBAAE;AAAA,EAChC,CAAC,QAAiB;AAChB,QAAI,OAAO,OAAO,QAAQ,YAAY,cAAc,OAAO,EAAE,WAAW,MAAM;AAC5E,YAAM,EAAE,UAAU,GAAG,KAAK,IAAI;AAC9B,aAAO,EAAE,GAAG,MAAM,OAAO,SAAS;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EACA,iBAAE,OAAO;AAAA,IACP,SAAS,iBAAE,QAAQ,CAAC;AAAA,IACpB,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,QAAQ,CAAC;AAAA,IACxD,WAAW,wBAAwB,QAAQ,MAAM,wBAAwB,MAAM,CAAC,CAAC,CAAC;AAAA,IAClF,QAAQ,aAAa,QAAQ,MAAM,aAAa,MAAM,CAAC,CAAC,CAAC;AAAA,IACzD,SAAS,cAAc,QAAQ,MAAM,cAAc,MAAM,CAAC,CAAC,CAAC;AAAA,IAC5D,OAAO,YAAY,QAAQ,MAAM,YAAY,MAAM,CAAC,CAAC,CAAC;AAAA,IACtD,SAAS,cAAc,QAAQ,MAAM,cAAc,MAAM,CAAC,CAAC,CAAC;AAAA,IAC5D,QAAQ,aAAa,QAAQ,MAAM,aAAa,MAAM,CAAC,CAAC,CAAC;AAAA,IACzD,MAAM,WAAW,QAAQ,MAAM,WAAW,MAAM,CAAC,CAAC,CAAC;AAAA,EACrD,CAAC;AACH;;;AC5FA,OAAO,QAAQ;AACf,OAAO,UAAU;AASjB,IAAM,sBAAsB;AAErB,IAAM,aAA0B;AAAA,EACrC;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,SAAS,CAAC,KAAK,aAAa;AAE1B,YAAM,UAAU,IAAI;AACpB,YAAM,SAAS,SAAS;AACxB,UAAI,UAAU,cAAc,UAAU,EAAE,YAAY,SAAS;AAC3D,eAAO,SAAS,OAAO;AACvB,eAAO,OAAO;AAAA,MAChB;AAGA,YAAM,cAAc,KAAK,KAAK,UAAU,UAAU;AAClD,YAAM,YAAY,KAAK,KAAK,UAAU,QAAQ;AAE9C,UAAI,CAAC,GAAG,WAAW,WAAW,EAAG;AAEjC,UAAI,GAAG,WAAW,SAAS,GAAG;AAE5B,cAAM,gBAAgB,CAAC,QAAgB,YAA0B;AAC/D,qBAAW,SAAS,GAAG,YAAY,QAAQ,EAAE,eAAe,KAAK,CAAC,GAAG;AACnE,kBAAM,UAAU,KAAK,KAAK,QAAQ,MAAM,IAAI;AAC5C,kBAAM,WAAW,KAAK,KAAK,SAAS,MAAM,IAAI;AAC9C,gBAAI,MAAM,YAAY,GAAG;AACvB,kBAAI,CAAC,GAAG,WAAW,QAAQ,EAAG,IAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AACxE,4BAAc,SAAS,QAAQ;AAAA,YACjC,WAAW,CAAC,GAAG,WAAW,QAAQ,GAAG;AACnC,iBAAG,WAAW,SAAS,QAAQ;AAAA,YACjC;AAAA,UACF;AAAA,QACF;AACA,sBAAc,aAAa,SAAS;AACpC,WAAG,OAAO,aAAa,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,MACzD,OAAO;AACL,WAAG,WAAW,aAAa,SAAS;AAAA,MACtC;AAGA,YAAM,aAAa,CAAC,QAAsB;AACxC,mBAAW,SAAS,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC,GAAG;AAChE,gBAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAI,MAAM,YAAY,GAAG;AAAE,uBAAW,QAAQ;AAAG;AAAA,UAAU;AAC3D,cAAI,CAAC,MAAM,KAAK,SAAS,KAAK,EAAG;AACjC,gBAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,8BAAoB,YAAY;AAChC,cAAI,oBAAoB,KAAK,OAAO,GAAG;AACrC,gCAAoB,YAAY;AAChC,eAAG,cAAc,UAAU,QAAQ,QAAQ,qBAAqB,aAAa,CAAC;AAAA,UAChF;AAAA,QACF;AAAA,MACF;AACA,iBAAW,SAAS;AAGpB,YAAM,YAAY,CAAC,QAAsB;AACvC,mBAAW,SAAS,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC,GAAG;AAChE,gBAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAI,MAAM,YAAY,GAAG;AAAE,sBAAU,QAAQ;AAAG;AAAA,UAAU;AAC1D,cAAI,CAAC,MAAM,KAAK,SAAS,KAAK,EAAG;AACjC,gBAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,cAAI,QAAQ,SAAS,WAAW,GAAG;AACjC,eAAG,cAAc,UAAU,QAAQ,QAAQ,eAAe,SAAS,CAAC;AAAA,UACtE;AAAA,QACF;AAAA,MACF;AACA,gBAAU,QAAQ;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,SAAS,CAAC,QAAQ;AAChB,YAAM,SAAS,IAAI;AACnB,UAAI,CAAC,OAAQ;AAEb,YAAM,gBAAgB,OAAO;AAC7B,UAAI,OAAO,kBAAkB,WAAW;AACtC,eAAO,gBAAgB,EAAE,SAAS,eAAe,YAAY,KAAK;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AACF;AAGO,IAAM,4BAA4B,WAAW,WAAW,SAAS,CAAC,GAAG,WAAW;AAMhF,SAAS,cACd,KACA,UACA,KACS;AACT,QAAM,iBAAkB,IAAI,kBAA6B;AACzD,MAAI,MAAM;AAEV,aAAW,aAAa,YAAY;AAClC,QAAI,UAAU,WAAW,eAAgB;AAEzC,cAAU,QAAQ,KAAK,QAAQ;AAC/B,QAAI,iBAAiB,UAAU;AAC/B,UAAM;AAAA,EACR;AAEA,MAAI,KAAK;AACP,UAAM,OAAO;AACb,UAAM,KAAM,IAAI,kBAA6B;AAC7C,UAAM,yBAAyB,IAAI,QAAQ,EAAE,EAAE;AAAA,EACjD;AAEA,SAAO;AACT;;;AFtIO,IAAM,kBAAkB;AAExB,SAAS,WAAW,UAA8B;AACvD,QAAM,aAAaC,MAAK,KAAK,UAAU,eAAe;AAEtD,MAAI,CAACC,IAAG,WAAW,UAAU,GAAG;AAC9B,UAAM,IAAI,MAAM,0BAA0B,QAAQ,EAAE;AAAA,EACtD;AAEA,QAAM,MAAMA,IAAG,aAAa,YAAY,OAAO;AAC/C,QAAM,SAAS,YAAAC,QAAK,MAAM,GAAG;AAG7B,MAAI,OAAO,YAAY,KAAM,OAAO,cAA0C,SAAS;AACrF,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAGA,MAAI,aAAa;AACjB,MAAI,OAAO,YAAY,GAAG;AAExB,UAAM,QAAQ,OAAO;AACrB,UAAM,kBAAkB,OAAO;AAC/B,QAAI,mBAAmB,CAAC,OAAO,WAAW;AAExC,UAAI,gBAAgB,aAAa,aAAa;AAC5C,wBAAgB,WAAW;AAAA,MAC7B;AACA,aAAO,YAAY;AAAA,IACrB;AAGA,UAAM,SAAS,OAAO;AACtB,QAAI,QAAQ;AACV,YAAM,EAAE,MAAM,UAAU,IAAI;AAC5B,aAAO,SAAS,EAAE,MAAM,QAAQ,MAAM,WAAW,aAAa,OAAO;AAAA,IACvE;AAGA,UAAM,UAAU,OAAO;AACvB,QAAI,SAAS;AACX,YAAM,EAAE,kBAAkB,gBAAgB,WAAW,qBAAqB,kBAAkB,IAAI;AAChG,aAAO,UAAU;AAAA,QACf;AAAA,QACA,WAAW,aAAa;AAAA,QACxB;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AAGd,WAAO,UAAU;AACjB,iBAAa;AAEb,YAAQ,OAAO,MAAM,kDAAkD;AAAA,EACzE;AAGA,QAAM,gBAAgB,cAAc,QAAQ,UAAU,CAAC,QAAQ;AAC7D,YAAQ,OAAO,MAAM,oBAAoB,GAAG;AAAA,CAAI;AAAA,EAClD,CAAC;AAGD,QAAM,SAAS,iBAAiB,MAAM,MAAM;AAG5C,QAAM,aAAa,cACd,kBACC,OAAO,kBAA4B,KAAK,6BACzC,OAAO,YAAY,OAAO;AAE/B,MAAI,YAAY;AACd,UAAM,aAAa,KAAK,MAAM,KAAK,UAAU,MAAM,CAAC;AACpD,IAAAD,IAAG,cAAc,YAAY,YAAAC,QAAK,UAAU,UAAU,GAAG,OAAO;AAAA,EAClE;AAEA,SAAO;AACT;AAEO,SAAS,WAAW,UAAkB,QAA0B;AAErE,QAAM,YAAY,iBAAiB,MAAM,MAAM;AAE/C,QAAM,aAAaF,MAAK,KAAK,UAAU,eAAe;AACtD,EAAAC,IAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAC1C,EAAAA,IAAG,cAAc,YAAY,YAAAC,QAAK,UAAU,SAAS,GAAG,OAAO;AACjE;AAEO,SAAS,aACd,UACA,IACY;AACZ,QAAM,UAAU,WAAW,QAAQ;AACnC,QAAM,UAAU,GAAG,OAAO;AAC1B,aAAW,UAAU,OAAO;AAC5B,SAAO;AACT;AAEO,SAAS,mBACd,UACA,QACY;AACZ,SAAO,aAAa,UAAU,CAAC,YAAY;AAAA,IACzC,GAAG;AAAA,IACH,QAAQ,EAAE,GAAG,OAAO,QAAQ,GAAG,OAAO;AAAA,EACxC,EAAE;AACJ;AAEO,SAAS,iBACd,UACA,MACY;AACZ,SAAO,aAAa,UAAU,CAAC,YAAY;AAAA,IACzC,GAAG;AAAA,IACH,MAAM,EAAE,GAAG,OAAO,MAAM,GAAG,KAAK;AAAA,EAClC,EAAE;AACJ;","names":["fs","path","path","fs","YAML"]}
|
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
OpenRouterEmbeddingProvider
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-AEJS57ZK.js";
|
|
5
5
|
import {
|
|
6
6
|
OpenAIEmbeddingProvider
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-5QERXFH7.js";
|
|
8
8
|
import {
|
|
9
9
|
LmStudioBackend,
|
|
10
10
|
OllamaBackend
|
|
11
|
-
} from "./chunk-
|
|
11
|
+
} from "./chunk-5SDH75YC.js";
|
|
12
12
|
import {
|
|
13
13
|
LLM_REQUEST_TIMEOUT_MS
|
|
14
|
-
} from "./chunk-
|
|
14
|
+
} from "./chunk-76ZO5RGT.js";
|
|
15
15
|
|
|
16
16
|
// node_modules/@anthropic-ai/sdk/internal/tslib.mjs
|
|
17
17
|
function __classPrivateFieldSet(receiver, state, value, kind, f) {
|
|
@@ -4929,4 +4929,4 @@ export {
|
|
|
4929
4929
|
createLlmProvider,
|
|
4930
4930
|
createEmbeddingProvider
|
|
4931
4931
|
};
|
|
4932
|
-
//# sourceMappingURL=chunk-
|
|
4932
|
+
//# sourceMappingURL=chunk-MKKXCCQ5.js.map
|
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
isProcessAlive
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-WXSJKESH.js";
|
|
5
5
|
import {
|
|
6
6
|
loadConfig
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-MAZOVVDU.js";
|
|
8
8
|
import {
|
|
9
9
|
getDatabase
|
|
10
10
|
} from "./chunk-MYX5NCRH.js";
|
|
11
11
|
import {
|
|
12
12
|
DIGEST_TIERS
|
|
13
|
-
} from "./chunk-
|
|
13
|
+
} from "./chunk-76ZO5RGT.js";
|
|
14
14
|
|
|
15
15
|
// src/db/queries/embeddings.ts
|
|
16
16
|
var EMBEDDABLE_TABLES = ["sessions", "spores", "plans", "artifacts"];
|
|
@@ -184,4 +184,4 @@ export {
|
|
|
184
184
|
getEmbeddingQueueDepth,
|
|
185
185
|
gatherStats
|
|
186
186
|
};
|
|
187
|
-
//# sourceMappingURL=chunk-
|
|
187
|
+
//# sourceMappingURL=chunk-MSXYUXZR.js.map
|
|
@@ -70,7 +70,15 @@ var LOG_KINDS = {
|
|
|
70
70
|
// MCP
|
|
71
71
|
MCP_EVENT: "mcp.event",
|
|
72
72
|
// Log retention
|
|
73
|
-
LOG_RETENTION: "log.retention"
|
|
73
|
+
LOG_RETENTION: "log.retention",
|
|
74
|
+
// Backup
|
|
75
|
+
BACKUP_START: "backup.start",
|
|
76
|
+
BACKUP_COMPLETE: "backup.complete",
|
|
77
|
+
BACKUP_ERROR: "backup.error",
|
|
78
|
+
// Team sync
|
|
79
|
+
TEAM_SYNC_START: "team-sync.start",
|
|
80
|
+
TEAM_SYNC_COMPLETE: "team-sync.complete",
|
|
81
|
+
TEAM_SYNC_ERROR: "team-sync.error"
|
|
74
82
|
};
|
|
75
83
|
function kindToComponent(kind) {
|
|
76
84
|
const dot = kind.indexOf(".");
|
|
@@ -81,4 +89,4 @@ export {
|
|
|
81
89
|
LOG_KINDS,
|
|
82
90
|
kindToComponent
|
|
83
91
|
};
|
|
84
|
-
//# sourceMappingURL=chunk-
|
|
92
|
+
//# sourceMappingURL=chunk-S6I62FAH.js.map
|