@goondocks/myco 0.9.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/marketplace.json +1 -4
- package/.claude-plugin/plugin.json +1 -1
- package/README.md +19 -2
- package/dist/{agent-run-EFICNTAU.js → agent-run-CGXF5PPC.js} +7 -7
- package/dist/{agent-tasks-RXJ7Z5NG.js → agent-tasks-T7NVI3R7.js} +7 -7
- package/dist/{chunk-JMJJEQ3P.js → chunk-5LPERML5.js} +3 -3
- package/dist/{chunk-RJ6ZQKG5.js → chunk-5QERXFH7.js} +2 -2
- package/dist/{chunk-UBZPD4HN.js → chunk-5SDH75YC.js} +2 -2
- package/dist/{chunk-5VZ52A4T.js → chunk-76ZO5RGT.js} +16 -2
- package/dist/{chunk-5VZ52A4T.js.map → chunk-76ZO5RGT.js.map} +1 -1
- package/dist/{chunk-46PWOKSI.js → chunk-AEJS57ZK.js} +2 -2
- package/dist/{chunk-DCXRSSBP.js → chunk-C3AEZ3BZ.js} +3 -3
- package/dist/{chunk-4LPQ26CK.js → chunk-CUDM5YJY.js} +25 -8
- package/dist/chunk-CUDM5YJY.js.map +1 -0
- package/dist/{chunk-YDN4OM33.js → chunk-D6DXYAFK.js} +20 -7
- package/dist/chunk-D6DXYAFK.js.map +1 -0
- package/dist/chunk-ENWBFX7F.js +50 -0
- package/dist/chunk-ENWBFX7F.js.map +1 -0
- package/dist/{chunk-OXZSXYAT.js → chunk-FFQES5MC.js} +48 -21
- package/dist/chunk-FFQES5MC.js.map +1 -0
- package/dist/{chunk-U3IBO3O3.js → chunk-FMIWFRAM.js} +3 -3
- package/dist/{chunk-KYLDNM7H.js → chunk-FPMEIN2W.js} +2 -2
- package/dist/{chunk-PB6TOLRQ.js → chunk-G2LQBFE3.js} +2 -2
- package/dist/{chunk-XNOCTDHF.js → chunk-J4RVYUH4.js} +2 -2
- package/dist/{chunk-MHSCMET3.js → chunk-MAZOVVDU.js} +33 -3
- package/dist/chunk-MAZOVVDU.js.map +1 -0
- package/dist/{chunk-JYOOJCPQ.js → chunk-MKKXCCQ5.js} +5 -5
- package/dist/{chunk-QIK2XSDQ.js → chunk-MSXYUXZR.js} +4 -4
- package/dist/{chunk-FFAYUQ5N.js → chunk-RJMXDUMA.js} +2 -1
- package/dist/{chunk-WGTCA2NU.js → chunk-S6I62FAH.js} +10 -2
- package/dist/{chunk-WGTCA2NU.js.map → chunk-S6I62FAH.js.map} +1 -1
- package/dist/{chunk-3K5WGSJ4.js → chunk-U7UUJ4FD.js} +23 -8
- package/dist/chunk-U7UUJ4FD.js.map +1 -0
- package/dist/{chunk-PT5IC642.js → chunk-W6HI4CCS.js} +2 -2
- package/dist/{chunk-KB4DGYIY.js → chunk-WXSJKESH.js} +12 -7
- package/dist/{chunk-KB4DGYIY.js.map → chunk-WXSJKESH.js.map} +1 -1
- package/dist/{chunk-KV4OC4H3.js → chunk-WZZH3YXJ.js} +119 -16
- package/dist/chunk-WZZH3YXJ.js.map +1 -0
- package/dist/chunk-XLY3REL3.js +165 -0
- package/dist/chunk-XLY3REL3.js.map +1 -0
- package/dist/{chunk-TRUJLI6K.js → chunk-YZMNEIFI.js} +9 -5
- package/dist/chunk-YZMNEIFI.js.map +1 -0
- package/dist/{chunk-2T7RPVPP.js → chunk-ZESTWGJT.js} +2 -2
- package/dist/{chunk-BUSP3OJB.js → chunk-ZMW6KQX2.js} +3 -3
- package/dist/{cli-ODLFRIYS.js → cli-6CPFJGRZ.js} +47 -36
- package/dist/cli-6CPFJGRZ.js.map +1 -0
- package/dist/client-B27SN5QG.js +15 -0
- package/dist/{config-UR5BSGVX.js → config-G3CSGI7P.js} +2 -2
- package/dist/{detect-providers-Q42OD4OS.js → detect-providers-AZ6DEQU7.js} +5 -5
- package/dist/{doctor-JLKTXDEH.js → doctor-RHHWJTMB.js} +10 -10
- package/dist/{executor-ONSDHPGX.js → executor-A5C5KDLP.js} +33 -20
- package/dist/executor-A5C5KDLP.js.map +1 -0
- package/dist/{init-6GWY345B.js → init-ARJROOWV.js} +15 -15
- package/dist/{init-wizard-UONLDYLI.js → init-wizard-XNFOZCEB.js} +8 -8
- package/dist/llm-XJFHRFHB.js +17 -0
- package/dist/{loader-SH67XD54.js → loader-GKXR5ONU.js} +4 -4
- package/dist/{loader-XVXKZZDH.js → loader-PZ7ZRSA4.js} +8 -4
- package/dist/{logs-QZVYF6FP.js → logs-LXHPDKUA.js} +3 -3
- package/dist/machine-id-RCM7TXPJ.js +13 -0
- package/dist/{main-BMCL7CPO.js → main-PVX6R3I6.js} +752 -80
- package/dist/main-PVX6R3I6.js.map +1 -0
- package/dist/{openai-embeddings-C265WRNK.js → openai-embeddings-ST3B6GW7.js} +5 -5
- package/dist/{openrouter-U6VFCRX2.js → openrouter-HJHOO3EO.js} +5 -5
- package/dist/{post-compact-OWFSOITU.js → post-compact-LR3DSGT3.js} +7 -7
- package/dist/{post-tool-use-DOUM7CGQ.js → post-tool-use-SOFVNFU3.js} +6 -6
- package/dist/{post-tool-use-failure-SG3C7PE6.js → post-tool-use-failure-2CZZZASB.js} +7 -7
- package/dist/{pre-compact-3J33CHXQ.js → pre-compact-3E3D6565.js} +7 -7
- package/dist/{provider-check-3WBPZADE.js → provider-check-SOTDYLJE.js} +5 -5
- package/dist/{registry-J4XTWARS.js → registry-WVZG6R2R.js} +5 -5
- package/dist/{resolution-events-TFEQPVKS.js → resolution-events-UPHJJLDQ.js} +5 -2
- package/dist/{restart-2VM33WOB.js → restart-XIUFVS33.js} +8 -8
- package/dist/{search-ZGQR5MDE.js → search-VB6Z2ZXV.js} +8 -8
- package/dist/{server-6KMBJCHZ.js → server-AKPBRP6Z.js} +5 -5
- package/dist/{session-Z2FXDDG6.js → session-UVZS6CY5.js} +9 -8
- package/dist/{session-Z2FXDDG6.js.map → session-UVZS6CY5.js.map} +1 -1
- package/dist/{session-end-FLVX32LE.js → session-end-YMQ44U6Z.js} +6 -6
- package/dist/{session-start-UCLK7PXE.js → session-start-3754HF3N.js} +11 -10
- package/dist/{session-start-UCLK7PXE.js.map → session-start-3754HF3N.js.map} +1 -1
- package/dist/{setup-llm-GKMCHURK.js → setup-llm-NWHOPJUV.js} +8 -8
- package/dist/src/cli.js +1 -1
- package/dist/src/daemon/main.js +1 -1
- package/dist/src/hooks/post-tool-use.js +1 -1
- package/dist/src/hooks/session-end.js +1 -1
- package/dist/src/hooks/session-start.js +1 -1
- package/dist/src/hooks/stop.js +1 -1
- package/dist/src/hooks/user-prompt-submit.js +1 -1
- package/dist/src/mcp/server.js +1 -1
- package/dist/{stats-IUJPZSVZ.js → stats-CDQXOTEC.js} +9 -9
- package/dist/{stop-XRQLLXST.js → stop-WSFGRPXZ.js} +6 -6
- package/dist/{stop-failure-2CAJJKRG.js → stop-failure-4FR7574F.js} +7 -7
- package/dist/{subagent-start-MWWQTZMQ.js → subagent-start-7SGBXJYP.js} +7 -7
- package/dist/{subagent-stop-PJXYGRXB.js → subagent-stop-MRVTNX3V.js} +7 -7
- package/dist/{task-completed-4LFRJVGI.js → task-completed-XXPYPSRV.js} +7 -7
- package/dist/team-XMHYCKFF.js +251 -0
- package/dist/team-XMHYCKFF.js.map +1 -0
- package/dist/ui/assets/index-BGbil7f1.css +1 -0
- package/dist/ui/assets/index-CPA_uq_j.js +794 -0
- package/dist/ui/index.html +2 -2
- package/dist/update-W3UFZU4G.js +79 -0
- package/dist/update-W3UFZU4G.js.map +1 -0
- package/dist/{user-prompt-submit-KSM3AR6P.js → user-prompt-submit-LSWCYUW3.js} +6 -6
- package/dist/{verify-UDAYVX37.js → verify-O7TQ5DDY.js} +9 -9
- package/dist/{version-KLBN4HZT.js → version-VWWY7SPQ.js} +2 -2
- package/dist/version-VWWY7SPQ.js.map +1 -0
- package/package.json +1 -1
- package/dist/chunk-3K5WGSJ4.js.map +0 -1
- package/dist/chunk-4LPQ26CK.js.map +0 -1
- package/dist/chunk-KV4OC4H3.js.map +0 -1
- package/dist/chunk-MHSCMET3.js.map +0 -1
- package/dist/chunk-OXZSXYAT.js.map +0 -1
- package/dist/chunk-TRUJLI6K.js.map +0 -1
- package/dist/chunk-YDN4OM33.js.map +0 -1
- package/dist/cli-ODLFRIYS.js.map +0 -1
- package/dist/client-MXRNQ5FI.js +0 -13
- package/dist/executor-ONSDHPGX.js.map +0 -1
- package/dist/llm-BV3QNVRD.js +0 -17
- package/dist/main-BMCL7CPO.js.map +0 -1
- package/dist/ui/assets/index-DZrElonz.js +0 -744
- package/dist/ui/assets/index-TkeiYbZB.css +0 -1
- /package/dist/{agent-run-EFICNTAU.js.map → agent-run-CGXF5PPC.js.map} +0 -0
- /package/dist/{agent-tasks-RXJ7Z5NG.js.map → agent-tasks-T7NVI3R7.js.map} +0 -0
- /package/dist/{chunk-JMJJEQ3P.js.map → chunk-5LPERML5.js.map} +0 -0
- /package/dist/{chunk-RJ6ZQKG5.js.map → chunk-5QERXFH7.js.map} +0 -0
- /package/dist/{chunk-UBZPD4HN.js.map → chunk-5SDH75YC.js.map} +0 -0
- /package/dist/{chunk-46PWOKSI.js.map → chunk-AEJS57ZK.js.map} +0 -0
- /package/dist/{chunk-DCXRSSBP.js.map → chunk-C3AEZ3BZ.js.map} +0 -0
- /package/dist/{chunk-U3IBO3O3.js.map → chunk-FMIWFRAM.js.map} +0 -0
- /package/dist/{chunk-KYLDNM7H.js.map → chunk-FPMEIN2W.js.map} +0 -0
- /package/dist/{chunk-PB6TOLRQ.js.map → chunk-G2LQBFE3.js.map} +0 -0
- /package/dist/{chunk-XNOCTDHF.js.map → chunk-J4RVYUH4.js.map} +0 -0
- /package/dist/{chunk-JYOOJCPQ.js.map → chunk-MKKXCCQ5.js.map} +0 -0
- /package/dist/{chunk-QIK2XSDQ.js.map → chunk-MSXYUXZR.js.map} +0 -0
- /package/dist/{chunk-FFAYUQ5N.js.map → chunk-RJMXDUMA.js.map} +0 -0
- /package/dist/{chunk-PT5IC642.js.map → chunk-W6HI4CCS.js.map} +0 -0
- /package/dist/{chunk-2T7RPVPP.js.map → chunk-ZESTWGJT.js.map} +0 -0
- /package/dist/{chunk-BUSP3OJB.js.map → chunk-ZMW6KQX2.js.map} +0 -0
- /package/dist/{client-MXRNQ5FI.js.map → client-B27SN5QG.js.map} +0 -0
- /package/dist/{config-UR5BSGVX.js.map → config-G3CSGI7P.js.map} +0 -0
- /package/dist/{detect-providers-Q42OD4OS.js.map → detect-providers-AZ6DEQU7.js.map} +0 -0
- /package/dist/{doctor-JLKTXDEH.js.map → doctor-RHHWJTMB.js.map} +0 -0
- /package/dist/{init-6GWY345B.js.map → init-ARJROOWV.js.map} +0 -0
- /package/dist/{init-wizard-UONLDYLI.js.map → init-wizard-XNFOZCEB.js.map} +0 -0
- /package/dist/{llm-BV3QNVRD.js.map → llm-XJFHRFHB.js.map} +0 -0
- /package/dist/{loader-SH67XD54.js.map → loader-GKXR5ONU.js.map} +0 -0
- /package/dist/{loader-XVXKZZDH.js.map → loader-PZ7ZRSA4.js.map} +0 -0
- /package/dist/{logs-QZVYF6FP.js.map → logs-LXHPDKUA.js.map} +0 -0
- /package/dist/{openai-embeddings-C265WRNK.js.map → machine-id-RCM7TXPJ.js.map} +0 -0
- /package/dist/{openrouter-U6VFCRX2.js.map → openai-embeddings-ST3B6GW7.js.map} +0 -0
- /package/dist/{provider-check-3WBPZADE.js.map → openrouter-HJHOO3EO.js.map} +0 -0
- /package/dist/{post-compact-OWFSOITU.js.map → post-compact-LR3DSGT3.js.map} +0 -0
- /package/dist/{post-tool-use-DOUM7CGQ.js.map → post-tool-use-SOFVNFU3.js.map} +0 -0
- /package/dist/{post-tool-use-failure-SG3C7PE6.js.map → post-tool-use-failure-2CZZZASB.js.map} +0 -0
- /package/dist/{pre-compact-3J33CHXQ.js.map → pre-compact-3E3D6565.js.map} +0 -0
- /package/dist/{registry-J4XTWARS.js.map → provider-check-SOTDYLJE.js.map} +0 -0
- /package/dist/{resolution-events-TFEQPVKS.js.map → registry-WVZG6R2R.js.map} +0 -0
- /package/dist/{version-KLBN4HZT.js.map → resolution-events-UPHJJLDQ.js.map} +0 -0
- /package/dist/{restart-2VM33WOB.js.map → restart-XIUFVS33.js.map} +0 -0
- /package/dist/{search-ZGQR5MDE.js.map → search-VB6Z2ZXV.js.map} +0 -0
- /package/dist/{server-6KMBJCHZ.js.map → server-AKPBRP6Z.js.map} +0 -0
- /package/dist/{session-end-FLVX32LE.js.map → session-end-YMQ44U6Z.js.map} +0 -0
- /package/dist/{setup-llm-GKMCHURK.js.map → setup-llm-NWHOPJUV.js.map} +0 -0
- /package/dist/{stats-IUJPZSVZ.js.map → stats-CDQXOTEC.js.map} +0 -0
- /package/dist/{stop-XRQLLXST.js.map → stop-WSFGRPXZ.js.map} +0 -0
- /package/dist/{stop-failure-2CAJJKRG.js.map → stop-failure-4FR7574F.js.map} +0 -0
- /package/dist/{subagent-start-MWWQTZMQ.js.map → subagent-start-7SGBXJYP.js.map} +0 -0
- /package/dist/{subagent-stop-PJXYGRXB.js.map → subagent-stop-MRVTNX3V.js.map} +0 -0
- /package/dist/{task-completed-4LFRJVGI.js.map → task-completed-XXPYPSRV.js.map} +0 -0
- /package/dist/{user-prompt-submit-KSM3AR6P.js.map → user-prompt-submit-LSWCYUW3.js.map} +0 -0
- /package/dist/{verify-UDAYVX37.js.map → verify-O7TQ5DDY.js.map} +0 -0
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/db/schema.ts"],"sourcesContent":["/**\n * SQLite database schema -- all capture, intelligence, and agent state tables.\n *\n * Uses `CREATE TABLE IF NOT EXISTS` and `CREATE INDEX IF NOT EXISTS` throughout\n * for idempotency. Running `createSchema()` multiple times is always safe.\n *\n * Timestamp convention: all timestamps are INTEGER (Unix epoch seconds).\n * Content hashing: all `content_hash` columns are TEXT with UNIQUE constraint.\n * Embedding dimensions: 1024 (bge-m3 default) -- used by external sqlite-vec store.\n *\n * Vector columns live in a separate sqlite-vec virtual table, not inline.\n * Tables that participate in vector search carry an `embedded INTEGER DEFAULT 0`\n * flag so the embedder knows which rows still need vectors.\n */\n\nimport type { Database } from 'better-sqlite3';\nimport { epochSeconds, DEFAULT_MACHINE_ID } from '@myco/constants.js';\n\n/** Current schema version -- fresh start for the SQLite era. */\nexport const SCHEMA_VERSION = 4;\n\n// Re-export for backwards compat (other modules import from schema.ts)\nexport { DEFAULT_MACHINE_ID };\n\n/** Embedding vector dimensions (bge-m3 default). */\nexport const EMBEDDING_DIMENSIONS = 1024;\n\n// ---------------------------------------------------------------------------\n// DDL statements\n// ---------------------------------------------------------------------------\n\nconst SCHEMA_VERSION_TABLE = `\n CREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at INTEGER NOT NULL\n )`;\n\n// -- Capture Layer ----------------------------------------------------------\n\nconst SESSIONS_TABLE = `\n CREATE TABLE IF NOT EXISTS sessions (\n id TEXT PRIMARY KEY,\n agent TEXT NOT NULL,\n \"user\" TEXT,\n project_root TEXT,\n branch TEXT,\n started_at INTEGER NOT NULL,\n ended_at INTEGER,\n status TEXT DEFAULT 'active',\n prompt_count INTEGER DEFAULT 0,\n tool_count INTEGER DEFAULT 0,\n title TEXT,\n summary TEXT,\n transcript_path TEXT,\n parent_session_id TEXT,\n parent_session_reason TEXT,\n processed INTEGER DEFAULT 0,\n content_hash TEXT UNIQUE,\n created_at INTEGER NOT NULL,\n embedded INTEGER DEFAULT 0,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst PROMPT_BATCHES_TABLE = `\n CREATE TABLE IF NOT EXISTS prompt_batches (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL REFERENCES sessions(id),\n prompt_number INTEGER,\n user_prompt TEXT,\n response_summary TEXT,\n classification TEXT,\n started_at INTEGER,\n ended_at INTEGER,\n status TEXT DEFAULT 'active',\n activity_count INTEGER DEFAULT 0,\n processed INTEGER DEFAULT 0,\n content_hash TEXT UNIQUE,\n created_at INTEGER NOT NULL,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ACTIVITIES_TABLE = `\n CREATE TABLE IF NOT EXISTS activities (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL REFERENCES sessions(id),\n prompt_batch_id INTEGER REFERENCES prompt_batches(id),\n tool_name TEXT NOT NULL,\n tool_input TEXT,\n tool_output_summary TEXT,\n file_path TEXT,\n files_affected TEXT,\n duration_ms INTEGER,\n success INTEGER DEFAULT 1,\n error_message TEXT,\n timestamp INTEGER NOT NULL,\n processed INTEGER DEFAULT 0,\n content_hash TEXT UNIQUE,\n created_at INTEGER NOT NULL\n )`;\n\nconst PLANS_TABLE = `\n CREATE TABLE IF NOT EXISTS plans (\n id TEXT PRIMARY KEY,\n status TEXT DEFAULT 'active',\n author TEXT,\n title TEXT,\n content TEXT,\n source_path TEXT,\n tags TEXT,\n session_id TEXT REFERENCES sessions(id),\n prompt_batch_id INTEGER REFERENCES prompt_batches(id),\n content_hash TEXT,\n processed INTEGER DEFAULT 0,\n created_at INTEGER NOT NULL,\n updated_at INTEGER,\n embedded INTEGER DEFAULT 0,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ARTIFACTS_TABLE = `\n CREATE TABLE IF NOT EXISTS artifacts (\n id TEXT PRIMARY KEY,\n artifact_type TEXT,\n source_path TEXT NOT NULL,\n title TEXT NOT NULL,\n content TEXT,\n last_captured_by TEXT,\n tags TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER,\n embedded INTEGER DEFAULT 0,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst TEAM_MEMBERS_TABLE = `\n CREATE TABLE IF NOT EXISTS team_members (\n id TEXT PRIMARY KEY,\n \"user\" TEXT NOT NULL,\n role TEXT,\n joined TEXT,\n tags TEXT,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ATTACHMENTS_TABLE = `\n CREATE TABLE IF NOT EXISTS attachments (\n id TEXT PRIMARY KEY,\n session_id TEXT REFERENCES sessions(id),\n prompt_batch_id INTEGER REFERENCES prompt_batches(id),\n file_path TEXT NOT NULL,\n media_type TEXT,\n description TEXT,\n data BLOB,\n content_hash TEXT,\n created_at INTEGER NOT NULL\n )`;\n\n// -- Intelligence Layer -----------------------------------------------------\n\nconst AGENTS_TABLE = `\n CREATE TABLE IF NOT EXISTS agents (\n id TEXT PRIMARY KEY,\n name TEXT NOT NULL,\n provider TEXT,\n model TEXT,\n system_prompt_hash TEXT,\n config TEXT,\n source TEXT NOT NULL DEFAULT 'built-in',\n system_prompt TEXT,\n max_turns INTEGER,\n timeout_seconds INTEGER,\n tool_access TEXT,\n enabled INTEGER NOT NULL DEFAULT 1,\n created_at INTEGER NOT NULL,\n updated_at INTEGER\n )`;\n\nconst SPORES_TABLE = `\n CREATE TABLE IF NOT EXISTS spores (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n session_id TEXT REFERENCES sessions(id),\n prompt_batch_id INTEGER REFERENCES prompt_batches(id),\n observation_type TEXT NOT NULL,\n status TEXT DEFAULT 'active',\n content TEXT NOT NULL,\n context TEXT,\n importance INTEGER DEFAULT 5,\n file_path TEXT,\n tags TEXT,\n content_hash TEXT UNIQUE,\n properties TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER,\n embedded INTEGER DEFAULT 0,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ENTITIES_TABLE = `\n CREATE TABLE IF NOT EXISTS entities (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n type TEXT NOT NULL,\n name TEXT NOT NULL,\n properties TEXT,\n first_seen INTEGER NOT NULL,\n last_seen INTEGER NOT NULL,\n status TEXT DEFAULT 'active',\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER,\n UNIQUE (agent_id, type, name)\n )`;\n\nconst GRAPH_EDGES_TABLE = `\n CREATE TABLE IF NOT EXISTS graph_edges (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n source_id TEXT NOT NULL,\n source_type TEXT NOT NULL,\n target_id TEXT NOT NULL,\n target_type TEXT NOT NULL,\n type TEXT NOT NULL,\n session_id TEXT,\n confidence REAL DEFAULT 1.0,\n properties TEXT,\n created_at INTEGER NOT NULL,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst ENTITY_MENTIONS_TABLE = `\n CREATE TABLE IF NOT EXISTS entity_mentions (\n entity_id TEXT NOT NULL REFERENCES entities(id),\n note_id TEXT NOT NULL,\n note_type TEXT NOT NULL,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER,\n UNIQUE (entity_id, note_id, note_type, agent_id)\n )`;\n\nconst RESOLUTION_EVENTS_TABLE = `\n CREATE TABLE IF NOT EXISTS resolution_events (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n spore_id TEXT NOT NULL REFERENCES spores(id),\n action TEXT NOT NULL,\n new_spore_id TEXT,\n reason TEXT,\n session_id TEXT,\n created_at INTEGER NOT NULL,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER\n )`;\n\nconst DIGEST_EXTRACTS_TABLE = `\n CREATE TABLE IF NOT EXISTS digest_extracts (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n tier INTEGER NOT NULL,\n content TEXT NOT NULL,\n substrate_hash TEXT,\n generated_at INTEGER NOT NULL,\n machine_id TEXT NOT NULL DEFAULT 'local',\n synced_at INTEGER,\n UNIQUE (agent_id, tier)\n )`;\n\n// -- Agent State Layer ------------------------------------------------------\n\nconst AGENT_RUNS_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_runs (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n task TEXT,\n instruction TEXT,\n status TEXT DEFAULT 'pending',\n started_at INTEGER,\n completed_at INTEGER,\n tokens_used INTEGER,\n cost_usd REAL,\n actions_taken TEXT,\n error TEXT\n )`;\n\nconst AGENT_REPORTS_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_reports (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n run_id TEXT NOT NULL REFERENCES agent_runs(id),\n agent_id TEXT NOT NULL REFERENCES agents(id),\n action TEXT NOT NULL,\n summary TEXT NOT NULL,\n details TEXT,\n created_at INTEGER NOT NULL\n )`;\n\nconst AGENT_TURNS_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_turns (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n run_id TEXT NOT NULL REFERENCES agent_runs(id),\n agent_id TEXT NOT NULL REFERENCES agents(id),\n turn_number INTEGER NOT NULL,\n tool_name TEXT NOT NULL,\n tool_input TEXT,\n tool_output_summary TEXT,\n started_at INTEGER,\n completed_at INTEGER\n )`;\n\nconst AGENT_TASKS_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_tasks (\n id TEXT PRIMARY KEY,\n agent_id TEXT NOT NULL REFERENCES agents(id),\n source TEXT NOT NULL DEFAULT 'built-in',\n display_name TEXT,\n description TEXT,\n prompt TEXT NOT NULL,\n is_default INTEGER DEFAULT 0,\n tool_overrides TEXT,\n model TEXT,\n config TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER\n )`;\n\nconst AGENT_STATE_TABLE = `\n CREATE TABLE IF NOT EXISTS agent_state (\n agent_id TEXT NOT NULL REFERENCES agents(id),\n key TEXT NOT NULL,\n value TEXT NOT NULL,\n updated_at INTEGER NOT NULL,\n PRIMARY KEY (agent_id, key)\n )`;\n\n// -- Sync Layer -------------------------------------------------------------\n\nconst TEAM_OUTBOX_TABLE = `\n CREATE TABLE IF NOT EXISTS team_outbox (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n table_name TEXT NOT NULL,\n row_id TEXT NOT NULL,\n operation TEXT NOT NULL DEFAULT 'upsert',\n payload TEXT NOT NULL,\n machine_id TEXT NOT NULL,\n created_at INTEGER NOT NULL,\n sent_at INTEGER\n )`;\n\n// -- Logging Layer ----------------------------------------------------------\n\nconst LOG_ENTRIES_TABLE = `\n CREATE TABLE IF NOT EXISTS log_entries (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n timestamp TEXT NOT NULL,\n level TEXT NOT NULL,\n component TEXT NOT NULL,\n kind TEXT NOT NULL,\n message TEXT NOT NULL,\n data TEXT,\n session_id TEXT\n )`;\n\n// -- FTS5 Virtual Tables ----------------------------------------------------\n\nconst FTS_TABLES = [\n `CREATE VIRTUAL TABLE IF NOT EXISTS prompt_batches_fts\n USING fts5(user_prompt, content='prompt_batches', content_rowid='id')`,\n\n `CREATE VIRTUAL TABLE IF NOT EXISTS activities_fts\n USING fts5(tool_name, tool_input, file_path, content='activities', content_rowid='id')`,\n\n `CREATE VIRTUAL TABLE IF NOT EXISTS log_entries_fts\n USING fts5(message, content='log_entries', content_rowid='id')`,\n\n // FTS5 sync triggers for log_entries (external-content table)\n `CREATE TRIGGER IF NOT EXISTS log_entries_ai AFTER INSERT ON log_entries BEGIN\n INSERT INTO log_entries_fts(rowid, message) VALUES (new.id, new.message);\n END`,\n\n `CREATE TRIGGER IF NOT EXISTS log_entries_ad AFTER DELETE ON log_entries BEGIN\n INSERT INTO log_entries_fts(log_entries_fts, rowid, message) VALUES('delete', old.id, old.message);\n END`,\n];\n\n// -- Indexes ----------------------------------------------------------------\n\nconst SECONDARY_INDEXES = [\n // Sessions\n 'CREATE INDEX IF NOT EXISTS idx_sessions_status ON sessions (status)',\n 'CREATE INDEX IF NOT EXISTS idx_sessions_processed ON sessions (processed)',\n 'CREATE INDEX IF NOT EXISTS idx_sessions_started_at ON sessions (started_at)',\n 'CREATE INDEX IF NOT EXISTS idx_sessions_agent ON sessions (agent)',\n\n // Prompt batches\n 'CREATE INDEX IF NOT EXISTS idx_prompt_batches_session_id ON prompt_batches (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_prompt_batches_processed ON prompt_batches (processed)',\n 'CREATE INDEX IF NOT EXISTS idx_prompt_batches_status ON prompt_batches (status)',\n\n // Activities\n 'CREATE INDEX IF NOT EXISTS idx_activities_session_id ON activities (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_activities_prompt_batch_id ON activities (prompt_batch_id)',\n 'CREATE INDEX IF NOT EXISTS idx_activities_tool_name ON activities (tool_name)',\n 'CREATE INDEX IF NOT EXISTS idx_activities_timestamp ON activities (timestamp)',\n 'CREATE INDEX IF NOT EXISTS idx_activities_processed ON activities (processed)',\n\n // Spores\n 'CREATE INDEX IF NOT EXISTS idx_spores_agent_id ON spores (agent_id)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_session_id ON spores (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_status ON spores (status)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_observation_type ON spores (observation_type)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_created_at ON spores (created_at)',\n\n // Entities\n 'CREATE INDEX IF NOT EXISTS idx_entities_agent_id ON entities (agent_id)',\n 'CREATE INDEX IF NOT EXISTS idx_entities_type ON entities (type)',\n\n // Graph edges\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_source ON graph_edges (source_id, source_type)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_target ON graph_edges (target_id, target_type)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_type ON graph_edges (type)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_agent ON graph_edges (agent_id)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_source_type ON graph_edges (source_id, type)',\n\n // Entity mentions\n 'CREATE INDEX IF NOT EXISTS idx_entity_mentions_entity_id ON entity_mentions (entity_id)',\n 'CREATE INDEX IF NOT EXISTS idx_entity_mentions_agent_id ON entity_mentions (agent_id)',\n\n // Resolution events\n 'CREATE INDEX IF NOT EXISTS idx_resolution_events_spore_id ON resolution_events (spore_id)',\n 'CREATE INDEX IF NOT EXISTS idx_resolution_events_agent_id ON resolution_events (agent_id)',\n\n // Digest extracts\n 'CREATE INDEX IF NOT EXISTS idx_digest_extracts_agent_id ON digest_extracts (agent_id)',\n\n // Agent runs\n 'CREATE INDEX IF NOT EXISTS idx_agent_runs_agent_id ON agent_runs (agent_id)',\n 'CREATE INDEX IF NOT EXISTS idx_agent_runs_status ON agent_runs (status)',\n 'CREATE INDEX IF NOT EXISTS idx_agent_runs_agent_status ON agent_runs (agent_id, status)',\n\n // Agent reports\n 'CREATE INDEX IF NOT EXISTS idx_agent_reports_run_id ON agent_reports (run_id)',\n\n // Agent turns\n 'CREATE INDEX IF NOT EXISTS idx_agent_turns_run_id ON agent_turns (run_id)',\n\n // Agent tasks\n 'CREATE INDEX IF NOT EXISTS idx_agent_tasks_agent_id ON agent_tasks (agent_id)',\n\n // Plans\n 'CREATE INDEX IF NOT EXISTS idx_plans_session_id ON plans (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_plans_source_path ON plans (source_path)',\n 'CREATE INDEX IF NOT EXISTS idx_plans_content_hash ON plans (content_hash)',\n // Attachments\n 'CREATE INDEX IF NOT EXISTS idx_attachments_file_path ON attachments (file_path)',\n\n // Team outbox\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_pending ON team_outbox (sent_at, created_at)',\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_table_name ON team_outbox (table_name)',\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_row_lookup ON team_outbox (table_name, row_id)',\n\n // Machine ID (synced tables)\n 'CREATE INDEX IF NOT EXISTS idx_sessions_machine_id ON sessions (machine_id)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_machine_id ON spores (machine_id)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_machine_id ON graph_edges (machine_id)',\n\n // Log entries\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_timestamp ON log_entries (timestamp)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_level ON log_entries (level)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_component ON log_entries (component)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_kind ON log_entries (kind)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_session_id ON log_entries (session_id)',\n];\n\n// -- Ordered table creation -------------------------------------------------\n\nconst TABLE_DDLS = [\n SCHEMA_VERSION_TABLE,\n // Capture layer (order matters for FK references)\n SESSIONS_TABLE,\n PROMPT_BATCHES_TABLE,\n ACTIVITIES_TABLE,\n PLANS_TABLE,\n ARTIFACTS_TABLE,\n TEAM_MEMBERS_TABLE,\n ATTACHMENTS_TABLE,\n // Intelligence layer\n AGENTS_TABLE,\n SPORES_TABLE,\n ENTITIES_TABLE,\n GRAPH_EDGES_TABLE,\n ENTITY_MENTIONS_TABLE,\n RESOLUTION_EVENTS_TABLE,\n DIGEST_EXTRACTS_TABLE,\n // Agent state layer\n AGENT_RUNS_TABLE,\n AGENT_REPORTS_TABLE,\n AGENT_TURNS_TABLE,\n AGENT_TASKS_TABLE,\n AGENT_STATE_TABLE,\n // Sync layer\n TEAM_OUTBOX_TABLE,\n // Logging layer\n LOG_ENTRIES_TABLE,\n];\n\n// ---------------------------------------------------------------------------\n// Migrations\n// ---------------------------------------------------------------------------\n\n/**\n * Migrate a version-1 database to version-2.\n *\n * Version 2 adds:\n * - plans.session_id, plans.prompt_batch_id, plans.content_hash\n * - attachments.data, attachments.content_hash\n * - indexes: idx_plans_session_id, idx_plans_source_path, idx_plans_content_hash\n *\n * Each ALTER TABLE is wrapped in try/catch so re-running is safe -- SQLite\n * throws \"duplicate column name\" if the column already exists, which we ignore.\n */\nfunction migrateV1ToV2(db: Database): void {\n db.exec('BEGIN');\n try {\n const alterStatements = [\n 'ALTER TABLE plans ADD COLUMN session_id TEXT REFERENCES sessions(id)',\n 'ALTER TABLE plans ADD COLUMN prompt_batch_id INTEGER REFERENCES prompt_batches(id)',\n 'ALTER TABLE plans ADD COLUMN content_hash TEXT',\n 'ALTER TABLE attachments ADD COLUMN data BLOB',\n 'ALTER TABLE attachments ADD COLUMN content_hash TEXT',\n ];\n\n for (const stmt of alterStatements) {\n try {\n db.exec(stmt);\n } catch {\n // Column already exists -- safe to ignore on re-run\n }\n }\n\n // Indexes use IF NOT EXISTS so they are idempotent\n const newIndexes = [\n 'CREATE INDEX IF NOT EXISTS idx_plans_session_id ON plans (session_id)',\n 'CREATE INDEX IF NOT EXISTS idx_plans_source_path ON plans (source_path)',\n 'CREATE INDEX IF NOT EXISTS idx_plans_content_hash ON plans (content_hash)',\n 'CREATE INDEX IF NOT EXISTS idx_attachments_file_path ON attachments (file_path)',\n ];\n\n for (const idx of newIndexes) {\n db.exec(idx);\n }\n\n db.prepare(\n `INSERT INTO schema_version (version, applied_at)\n VALUES (?, ?)\n ON CONFLICT (version) DO NOTHING`\n ).run(2, epochSeconds());\n\n db.exec('COMMIT');\n } catch (err) {\n db.exec('ROLLBACK');\n throw err;\n }\n}\n\n/**\n * Migrate a version-2 database to version-3.\n *\n * Version 3 adds:\n * - log_entries table\n * - log_entries_fts virtual table (FTS5)\n * - indexes: idx_log_entries_timestamp, _level, _component, _kind, _session_id\n *\n * Uses `CREATE ... IF NOT EXISTS` throughout for idempotency.\n */\nfunction migrateV2ToV3(db: Database): void {\n db.exec('BEGIN');\n try {\n db.exec(LOG_ENTRIES_TABLE);\n\n db.exec(\n `CREATE VIRTUAL TABLE IF NOT EXISTS log_entries_fts\n USING fts5(message, content='log_entries', content_rowid='id')`\n );\n\n // FTS5 sync triggers for log_entries\n db.exec(\n `CREATE TRIGGER IF NOT EXISTS log_entries_ai AFTER INSERT ON log_entries BEGIN\n INSERT INTO log_entries_fts(rowid, message) VALUES (new.id, new.message);\n END`\n );\n db.exec(\n `CREATE TRIGGER IF NOT EXISTS log_entries_ad AFTER DELETE ON log_entries BEGIN\n INSERT INTO log_entries_fts(log_entries_fts, rowid, message) VALUES('delete', old.id, old.message);\n END`\n );\n\n const newIndexes = [\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_timestamp ON log_entries (timestamp)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_level ON log_entries (level)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_component ON log_entries (component)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_kind ON log_entries (kind)',\n 'CREATE INDEX IF NOT EXISTS idx_log_entries_session_id ON log_entries (session_id)',\n ];\n\n for (const idx of newIndexes) {\n db.exec(idx);\n }\n\n db.prepare(\n `INSERT INTO schema_version (version, applied_at)\n VALUES (?, ?)\n ON CONFLICT (version) DO NOTHING`\n ).run(3, epochSeconds());\n\n db.exec('COMMIT');\n } catch (err) {\n db.exec('ROLLBACK');\n throw err;\n }\n}\n\n/**\n * Migrate a version-3 database to version-4.\n *\n * Version 4 adds multi-machine support:\n * - machine_id TEXT NOT NULL DEFAULT 'local' on all synced tables\n * - synced_at INTEGER on all synced tables\n * - team_outbox table + indexes\n * - machine_id indexes on high-traffic tables\n *\n * Backfills existing rows with the provided machineId.\n */\nfunction migrateV3ToV4(db: Database, machineId: string): void {\n db.exec('BEGIN');\n try {\n // Tables that need machine_id + synced_at columns\n const syncedTables = [\n 'sessions',\n 'prompt_batches',\n 'spores',\n 'entities',\n 'graph_edges',\n 'entity_mentions',\n 'resolution_events',\n 'plans',\n 'artifacts',\n 'digest_extracts',\n 'team_members',\n ];\n\n for (const table of syncedTables) {\n try {\n db.exec(`ALTER TABLE ${table} ADD COLUMN machine_id TEXT NOT NULL DEFAULT 'local'`);\n } catch {\n // Column already exists -- safe to ignore on re-run\n }\n try {\n db.exec(`ALTER TABLE ${table} ADD COLUMN synced_at INTEGER`);\n } catch {\n // Column already exists -- safe to ignore on re-run\n }\n }\n\n // Backfill machine_id on existing rows\n for (const table of syncedTables) {\n db.prepare(`UPDATE ${table} SET machine_id = ? WHERE machine_id = 'local'`).run(machineId);\n }\n\n // Create team_outbox table\n db.exec(TEAM_OUTBOX_TABLE);\n\n // Create new indexes (IF NOT EXISTS for idempotency)\n const newIndexes = [\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_pending ON team_outbox (sent_at, created_at)',\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_table_name ON team_outbox (table_name)',\n 'CREATE INDEX IF NOT EXISTS idx_team_outbox_row_lookup ON team_outbox (table_name, row_id)',\n 'CREATE INDEX IF NOT EXISTS idx_sessions_machine_id ON sessions (machine_id)',\n 'CREATE INDEX IF NOT EXISTS idx_spores_machine_id ON spores (machine_id)',\n 'CREATE INDEX IF NOT EXISTS idx_graph_edges_machine_id ON graph_edges (machine_id)',\n ];\n\n for (const idx of newIndexes) {\n db.exec(idx);\n }\n\n db.prepare(\n `INSERT INTO schema_version (version, applied_at)\n VALUES (?, ?)\n ON CONFLICT (version) DO NOTHING`\n ).run(4, epochSeconds());\n\n db.exec('COMMIT');\n } catch (err) {\n db.exec('ROLLBACK');\n throw err;\n }\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Create all database tables, indexes, and record the schema version.\n *\n * Fully idempotent -- safe to call on every startup. Uses `IF NOT EXISTS`\n * for all DDL and `ON CONFLICT DO NOTHING` for the version row.\n *\n * @param db — better-sqlite3 Database instance.\n * @param machineId — machine identifier for backfilling existing rows during\n * v3→v4 migration. Defaults to `'local'` (tests, init).\n */\nexport function createSchema(db: Database, machineId: string = DEFAULT_MACHINE_ID): void {\n // Fast-path: skip if already at current version\n try {\n const row = db.prepare(\n 'SELECT version FROM schema_version ORDER BY version DESC LIMIT 1'\n ).get() as { version: number } | undefined;\n if (row?.version === SCHEMA_VERSION) return;\n // Migration path: version 1 → 2 (then fall through to check for 2 → 3)\n if (row?.version === 1) {\n migrateV1ToV2(db);\n }\n // Migration path: version 2 → 3\n const afterV1Migration = (db.prepare(\n 'SELECT version FROM schema_version ORDER BY version DESC LIMIT 1'\n ).get() as { version: number } | undefined)?.version ?? 0;\n if (afterV1Migration < 3) {\n migrateV2ToV3(db);\n }\n // Migration path: version 3 → 4\n const afterV2Migration = (db.prepare(\n 'SELECT version FROM schema_version ORDER BY version DESC LIMIT 1'\n ).get() as { version: number } | undefined)?.version ?? 0;\n if (afterV2Migration < 4) {\n migrateV3ToV4(db, machineId);\n }\n return;\n } catch {\n // Table doesn't exist yet -- first run\n }\n\n for (const ddl of TABLE_DDLS) {\n db.exec(ddl);\n }\n\n for (const ddl of FTS_TABLES) {\n db.exec(ddl);\n }\n\n for (const idx of SECONDARY_INDEXES) {\n db.exec(idx);\n }\n\n db.prepare(\n `INSERT INTO schema_version (version, applied_at)\n VALUES (?, ?)\n ON CONFLICT (version) DO NOTHING`\n ).run(SCHEMA_VERSION, epochSeconds());\n}\n"],"mappings":";;;;;;;AAmBO,IAAM,iBAAiB;AAMvB,IAAM,uBAAuB;AAMpC,IAAM,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAQ7B,IAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAyBvB,IAAM,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmB7B,IAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAmBzB,IAAM,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoBpB,IAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgBxB,IAAM,qBAAqB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAW3B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAe1B,IAAM,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAkBrB,IAAM,eAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAsBrB,IAAM,iBAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAevB,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAiB1B,IAAM,wBAAwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAW9B,IAAM,0BAA0B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAchC,IAAM,wBAAwB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAe9B,IAAM,mBAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAezB,IAAM,sBAAsB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAW5B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAa1B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgB1B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAW1B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAc1B,IAAM,oBAAoB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAc1B,IAAM,aAAa;AAAA,EACjB;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA;AAAA,EAIA;AAAA;AAAA;AAAA,EAIA;AAAA;AAAA;AAGF;AAIA,IAAM,oBAAoB;AAAA;AAAA,EAExB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAGA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAIA,IAAM,aAAa;AAAA,EACjB;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA;AAAA,EAEA;AAAA;AAAA,EAEA;AACF;AAiBA,SAAS,cAAc,IAAoB;AACzC,KAAG,KAAK,OAAO;AACf,MAAI;AACF,UAAM,kBAAkB;AAAA,MACtB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,QAAQ,iBAAiB;AAClC,UAAI;AACF,WAAG,KAAK,IAAI;AAAA,MACd,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,OAAO,YAAY;AAC5B,SAAG,KAAK,GAAG;AAAA,IACb;AAEA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF,EAAE,IAAI,GAAG,aAAa,CAAC;AAEvB,OAAG,KAAK,QAAQ;AAAA,EAClB,SAAS,KAAK;AACZ,OAAG,KAAK,UAAU;AAClB,UAAM;AAAA,EACR;AACF;AAYA,SAAS,cAAc,IAAoB;AACzC,KAAG,KAAK,OAAO;AACf,MAAI;AACF,OAAG,KAAK,iBAAiB;AAEzB,OAAG;AAAA,MACD;AAAA;AAAA,IAEF;AAGA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF;AACA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF;AAEA,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,OAAO,YAAY;AAC5B,SAAG,KAAK,GAAG;AAAA,IACb;AAEA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF,EAAE,IAAI,GAAG,aAAa,CAAC;AAEvB,OAAG,KAAK,QAAQ;AAAA,EAClB,SAAS,KAAK;AACZ,OAAG,KAAK,UAAU;AAClB,UAAM;AAAA,EACR;AACF;AAaA,SAAS,cAAc,IAAc,WAAyB;AAC5D,KAAG,KAAK,OAAO;AACf,MAAI;AAEF,UAAM,eAAe;AAAA,MACnB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,SAAS,cAAc;AAChC,UAAI;AACF,WAAG,KAAK,eAAe,KAAK,sDAAsD;AAAA,MACpF,QAAQ;AAAA,MAER;AACA,UAAI;AACF,WAAG,KAAK,eAAe,KAAK,+BAA+B;AAAA,MAC7D,QAAQ;AAAA,MAER;AAAA,IACF;AAGA,eAAW,SAAS,cAAc;AAChC,SAAG,QAAQ,UAAU,KAAK,gDAAgD,EAAE,IAAI,SAAS;AAAA,IAC3F;AAGA,OAAG,KAAK,iBAAiB;AAGzB,UAAM,aAAa;AAAA,MACjB;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,IACF;AAEA,eAAW,OAAO,YAAY;AAC5B,SAAG,KAAK,GAAG;AAAA,IACb;AAEA,OAAG;AAAA,MACD;AAAA;AAAA;AAAA,IAGF,EAAE,IAAI,GAAG,aAAa,CAAC;AAEvB,OAAG,KAAK,QAAQ;AAAA,EAClB,SAAS,KAAK;AACZ,OAAG,KAAK,UAAU;AAClB,UAAM;AAAA,EACR;AACF;AAgBO,SAAS,aAAa,IAAc,YAAoB,oBAA0B;AAEvF,MAAI;AACF,UAAM,MAAM,GAAG;AAAA,MACb;AAAA,IACF,EAAE,IAAI;AACN,QAAI,KAAK,YAAY,eAAgB;AAErC,QAAI,KAAK,YAAY,GAAG;AACtB,oBAAc,EAAE;AAAA,IAClB;AAEA,UAAM,mBAAoB,GAAG;AAAA,MAC3B;AAAA,IACF,EAAE,IAAI,GAAuC,WAAW;AACxD,QAAI,mBAAmB,GAAG;AACxB,oBAAc,EAAE;AAAA,IAClB;AAEA,UAAM,mBAAoB,GAAG;AAAA,MAC3B;AAAA,IACF,EAAE,IAAI,GAAuC,WAAW;AACxD,QAAI,mBAAmB,GAAG;AACxB,oBAAc,IAAI,SAAS;AAAA,IAC7B;AACA;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,aAAW,OAAO,YAAY;AAC5B,OAAG,KAAK,GAAG;AAAA,EACb;AAEA,aAAW,OAAO,YAAY;AAC5B,OAAG,KAAK,GAAG;AAAA,EACb;AAEA,aAAW,OAAO,mBAAmB;AACnC,OAAG,KAAK,GAAG;AAAA,EACb;AAEA,KAAG;AAAA,IACD;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,gBAAgB,aAAa,CAAC;AACtC;","names":[]}
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
|
+
import {
|
|
3
|
+
getDatabase
|
|
4
|
+
} from "./chunk-MYX5NCRH.js";
|
|
5
|
+
import {
|
|
6
|
+
DEFAULT_MACHINE_ID
|
|
7
|
+
} from "./chunk-76ZO5RGT.js";
|
|
8
|
+
|
|
9
|
+
// src/daemon/team-context.ts
|
|
10
|
+
var teamSyncEnabled = false;
|
|
11
|
+
var teamMachineId = DEFAULT_MACHINE_ID;
|
|
12
|
+
function initTeamContext(enabled, machineId) {
|
|
13
|
+
teamSyncEnabled = enabled;
|
|
14
|
+
teamMachineId = machineId;
|
|
15
|
+
}
|
|
16
|
+
function isTeamSyncEnabled() {
|
|
17
|
+
return teamSyncEnabled;
|
|
18
|
+
}
|
|
19
|
+
function getTeamMachineId() {
|
|
20
|
+
return teamMachineId;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
// src/db/queries/team-outbox.ts
|
|
24
|
+
var BURST_BATCH_SIZE = 200;
|
|
25
|
+
var SENT_PRUNE_AGE_SECONDS = 86400;
|
|
26
|
+
var MS_PER_SECOND = 1e3;
|
|
27
|
+
var OUTBOX_COLUMNS = [
|
|
28
|
+
"id",
|
|
29
|
+
"table_name",
|
|
30
|
+
"row_id",
|
|
31
|
+
"operation",
|
|
32
|
+
"payload",
|
|
33
|
+
"machine_id",
|
|
34
|
+
"created_at",
|
|
35
|
+
"sent_at"
|
|
36
|
+
];
|
|
37
|
+
var SELECT_COLUMNS = OUTBOX_COLUMNS.join(", ");
|
|
38
|
+
function toOutboxRow(row) {
|
|
39
|
+
return {
|
|
40
|
+
id: row.id,
|
|
41
|
+
table_name: row.table_name,
|
|
42
|
+
row_id: row.row_id,
|
|
43
|
+
operation: row.operation,
|
|
44
|
+
payload: row.payload,
|
|
45
|
+
machine_id: row.machine_id,
|
|
46
|
+
created_at: row.created_at,
|
|
47
|
+
sent_at: row.sent_at ?? null
|
|
48
|
+
};
|
|
49
|
+
}
|
|
50
|
+
function syncRow(tableName, row) {
|
|
51
|
+
if (!isTeamSyncEnabled()) return;
|
|
52
|
+
enqueueOutbox({
|
|
53
|
+
table_name: tableName,
|
|
54
|
+
row_id: String(row.id),
|
|
55
|
+
payload: JSON.stringify(row),
|
|
56
|
+
machine_id: getTeamMachineId(),
|
|
57
|
+
created_at: row.created_at ?? Math.floor(Date.now() / 1e3)
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
function enqueueOutbox(data) {
|
|
61
|
+
const db = getDatabase();
|
|
62
|
+
const info = db.prepare(
|
|
63
|
+
`INSERT INTO team_outbox (
|
|
64
|
+
table_name, row_id, operation, payload, machine_id, created_at
|
|
65
|
+
) VALUES (?, ?, ?, ?, ?, ?)`
|
|
66
|
+
).run(
|
|
67
|
+
data.table_name,
|
|
68
|
+
data.row_id,
|
|
69
|
+
data.operation ?? "upsert",
|
|
70
|
+
data.payload,
|
|
71
|
+
data.machine_id,
|
|
72
|
+
data.created_at
|
|
73
|
+
);
|
|
74
|
+
const id = Number(info.lastInsertRowid);
|
|
75
|
+
return toOutboxRow(
|
|
76
|
+
db.prepare(`SELECT ${SELECT_COLUMNS} FROM team_outbox WHERE id = ?`).get(id)
|
|
77
|
+
);
|
|
78
|
+
}
|
|
79
|
+
function listPending(limit) {
|
|
80
|
+
const db = getDatabase();
|
|
81
|
+
const rows = db.prepare(
|
|
82
|
+
`SELECT ${SELECT_COLUMNS}
|
|
83
|
+
FROM team_outbox
|
|
84
|
+
WHERE sent_at IS NULL
|
|
85
|
+
ORDER BY created_at ASC
|
|
86
|
+
LIMIT ?`
|
|
87
|
+
).all(limit ?? BURST_BATCH_SIZE);
|
|
88
|
+
return rows.map(toOutboxRow);
|
|
89
|
+
}
|
|
90
|
+
function markSent(ids, sentAt) {
|
|
91
|
+
if (ids.length === 0) return;
|
|
92
|
+
const db = getDatabase();
|
|
93
|
+
const placeholders = ids.map(() => "?").join(", ");
|
|
94
|
+
db.prepare(
|
|
95
|
+
`UPDATE team_outbox
|
|
96
|
+
SET sent_at = ?
|
|
97
|
+
WHERE id IN (${placeholders})`
|
|
98
|
+
).run(sentAt, ...ids);
|
|
99
|
+
}
|
|
100
|
+
function pruneOld() {
|
|
101
|
+
const db = getDatabase();
|
|
102
|
+
const cutoff = Math.floor(Date.now() / MS_PER_SECOND) - SENT_PRUNE_AGE_SECONDS;
|
|
103
|
+
const info = db.prepare(
|
|
104
|
+
`DELETE FROM team_outbox
|
|
105
|
+
WHERE sent_at IS NOT NULL AND sent_at < ?`
|
|
106
|
+
).run(cutoff);
|
|
107
|
+
return info.changes;
|
|
108
|
+
}
|
|
109
|
+
function countPending() {
|
|
110
|
+
const db = getDatabase();
|
|
111
|
+
const row = db.prepare(
|
|
112
|
+
`SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL`
|
|
113
|
+
).get();
|
|
114
|
+
return row.count;
|
|
115
|
+
}
|
|
116
|
+
var BACKFILL_TABLES = [
|
|
117
|
+
"sessions",
|
|
118
|
+
"prompt_batches",
|
|
119
|
+
"spores",
|
|
120
|
+
"entities",
|
|
121
|
+
"graph_edges",
|
|
122
|
+
"resolution_events",
|
|
123
|
+
"plans",
|
|
124
|
+
"artifacts",
|
|
125
|
+
"digest_extracts"
|
|
126
|
+
];
|
|
127
|
+
function backfillUnsynced(machineId) {
|
|
128
|
+
const db = getDatabase();
|
|
129
|
+
let total = 0;
|
|
130
|
+
const now = Math.floor(Date.now() / MS_PER_SECOND);
|
|
131
|
+
for (const table of BACKFILL_TABLES) {
|
|
132
|
+
const rows = db.prepare(
|
|
133
|
+
`SELECT * FROM ${table}
|
|
134
|
+
WHERE synced_at IS NULL
|
|
135
|
+
AND NOT EXISTS (
|
|
136
|
+
SELECT 1 FROM team_outbox
|
|
137
|
+
WHERE team_outbox.table_name = ? AND team_outbox.row_id = CAST(${table}.id AS TEXT)
|
|
138
|
+
)`
|
|
139
|
+
).all(table);
|
|
140
|
+
if (rows.length === 0) continue;
|
|
141
|
+
const insertBatch = db.transaction((batchRows) => {
|
|
142
|
+
const stmt = db.prepare(
|
|
143
|
+
`INSERT INTO team_outbox (table_name, row_id, operation, payload, machine_id, created_at)
|
|
144
|
+
VALUES (?, ?, 'upsert', ?, ?, ?)`
|
|
145
|
+
);
|
|
146
|
+
for (const row of batchRows) {
|
|
147
|
+
stmt.run(table, String(row.id), JSON.stringify(row), machineId, now);
|
|
148
|
+
}
|
|
149
|
+
});
|
|
150
|
+
insertBatch(rows);
|
|
151
|
+
total += rows.length;
|
|
152
|
+
}
|
|
153
|
+
return total;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
export {
|
|
157
|
+
initTeamContext,
|
|
158
|
+
syncRow,
|
|
159
|
+
listPending,
|
|
160
|
+
markSent,
|
|
161
|
+
pruneOld,
|
|
162
|
+
countPending,
|
|
163
|
+
backfillUnsynced
|
|
164
|
+
};
|
|
165
|
+
//# sourceMappingURL=chunk-XLY3REL3.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/daemon/team-context.ts","../src/db/queries/team-outbox.ts"],"sourcesContent":["/**\n * Module-level state for team sync.\n *\n * Initialized once by the daemon on startup. Query modules import\n * `isTeamSyncEnabled()` and `getTeamMachineId()` to decide whether\n * to enqueue outbox records on write.\n */\n\nimport { SYNC_PROTOCOL_VERSION, DEFAULT_MACHINE_ID } from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Module state\n// ---------------------------------------------------------------------------\n\nlet teamSyncEnabled = false;\nlet teamMachineId = DEFAULT_MACHINE_ID;\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Initialize team context. Called once on daemon startup.\n */\nexport function initTeamContext(enabled: boolean, machineId: string): void {\n teamSyncEnabled = enabled;\n teamMachineId = machineId;\n}\n\n/**\n * Whether team sync is currently enabled.\n *\n * Query modules check this before enqueuing outbox records.\n */\nexport function isTeamSyncEnabled(): boolean {\n return teamSyncEnabled;\n}\n\n/**\n * The machine ID for this instance.\n */\nexport function getTeamMachineId(): string {\n return teamMachineId;\n}\n\n/**\n * The sync protocol version in use.\n */\nexport function getTeamSyncProtocolVersion(): number {\n return SYNC_PROTOCOL_VERSION;\n}\n\n/**\n * Reset team context (for testing).\n */\nexport function resetTeamContext(): void {\n teamSyncEnabled = false;\n teamMachineId = DEFAULT_MACHINE_ID;\n}\n","/**\n * Team outbox CRUD query helpers.\n *\n * The outbox pattern: write paths enqueue records here when team sync is enabled.\n * The sync client flushes pending records in batches to the Cloudflare Worker.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { isTeamSyncEnabled, getTeamMachineId } from '@myco/daemon/team-context.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Max records returned per listPending call. */\nconst BURST_BATCH_SIZE = 200;\n\n/** Age in seconds after which sent records are pruned (24 hours). */\nconst SENT_PRUNE_AGE_SECONDS = 86_400;\n\n/** Milliseconds-per-second multiplier for epoch math. */\nconst MS_PER_SECOND = 1000;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required when enqueuing an outbox record. */\nexport interface OutboxInsert {\n table_name: string;\n row_id: string;\n operation?: string;\n payload: string;\n machine_id: string;\n created_at: number;\n}\n\n/** Row shape returned from outbox queries. */\nexport interface OutboxRow {\n id: number;\n table_name: string;\n row_id: string;\n operation: string;\n payload: string;\n machine_id: string;\n created_at: number;\n sent_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst OUTBOX_COLUMNS = [\n 'id',\n 'table_name',\n 'row_id',\n 'operation',\n 'payload',\n 'machine_id',\n 'created_at',\n 'sent_at',\n] as const;\n\nconst SELECT_COLUMNS = OUTBOX_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed OutboxRow. */\nfunction toOutboxRow(row: Record<string, unknown>): OutboxRow {\n return {\n id: row.id as number,\n table_name: row.table_name as string,\n row_id: row.row_id as string,\n operation: row.operation as string,\n payload: row.payload as string,\n machine_id: row.machine_id as string,\n created_at: row.created_at as number,\n sent_at: (row.sent_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Convenience helper — used by query modules\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue a row for team sync if sync is enabled.\n *\n * Centralizes the if-enabled / enqueue / serialize pattern that every\n * write-path query module previously duplicated inline.\n */\nexport function syncRow(tableName: string, row: { id: string | number; created_at?: number }): void {\n if (!isTeamSyncEnabled()) return;\n enqueueOutbox({\n table_name: tableName,\n row_id: String(row.id),\n payload: JSON.stringify(row),\n machine_id: getTeamMachineId(),\n created_at: row.created_at ?? Math.floor(Date.now() / 1000),\n });\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue a record into the team outbox for later sync.\n *\n * Inserted with `sent_at = NULL` (pending).\n */\nexport function enqueueOutbox(data: OutboxInsert): OutboxRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO team_outbox (\n table_name, row_id, operation, payload, machine_id, created_at\n ) VALUES (?, ?, ?, ?, ?, ?)`,\n ).run(\n data.table_name,\n data.row_id,\n data.operation ?? 'upsert',\n data.payload,\n data.machine_id,\n data.created_at,\n );\n\n const id = Number(info.lastInsertRowid);\n\n return toOutboxRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM team_outbox WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * List pending outbox records (oldest-first).\n *\n * Uses burst sizing: fetches BURST_BATCH_SIZE rows and returns them all.\n * If fewer than BURST_THRESHOLD rows come back, callers get a normal-size\n * batch; if more, the full burst. This avoids a separate COUNT query.\n */\nexport function listPending(limit?: number): OutboxRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM team_outbox\n WHERE sent_at IS NULL\n ORDER BY created_at ASC\n LIMIT ?`,\n ).all(limit ?? BURST_BATCH_SIZE) as Record<string, unknown>[];\n\n return rows.map(toOutboxRow);\n}\n\n/**\n * Mark outbox records as sent by setting sent_at.\n */\nexport function markSent(ids: number[], sentAt: number): void {\n if (ids.length === 0) return;\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET sent_at = ?\n WHERE id IN (${placeholders})`,\n ).run(sentAt, ...ids);\n}\n\n/**\n * Reset sent_at to NULL for records that need to be retried.\n *\n * This allows the sync client to re-enqueue specific records for retry.\n */\nexport function markForRetry(ids: number[]): void {\n if (ids.length === 0) return;\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET sent_at = NULL\n WHERE id IN (${placeholders})`,\n ).run(...ids);\n}\n\n/**\n * Prune old outbox records.\n *\n * Removes sent records older than 24 hours.\n *\n * @returns the number of records deleted.\n */\nexport function pruneOld(): number {\n const db = getDatabase();\n const cutoff = Math.floor(Date.now() / MS_PER_SECOND) - SENT_PRUNE_AGE_SECONDS;\n\n const info = db.prepare(\n `DELETE FROM team_outbox\n WHERE sent_at IS NOT NULL AND sent_at < ?`,\n ).run(cutoff);\n\n return info.changes;\n}\n\n/**\n * Count pending (unsent) outbox records.\n */\nexport function countPending(): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL`,\n ).get() as { count: number };\n\n return row.count;\n}\n\n// ---------------------------------------------------------------------------\n// Backfill\n// ---------------------------------------------------------------------------\n\n/** Tables to backfill (must have id, machine_id, synced_at columns). */\nconst BACKFILL_TABLES = [\n 'sessions',\n 'prompt_batches',\n 'spores',\n 'entities',\n 'graph_edges',\n 'resolution_events',\n 'plans',\n 'artifacts',\n 'digest_extracts',\n] as const;\n// entity_mentions excluded — no `id` column (composite key entity_id+note_id+note_type)\n\n/**\n * Enqueue all unsynced records across all synced tables into the outbox.\n *\n * Scans each table for rows where `synced_at IS NULL`, serializes the full\n * row as JSON, and inserts into the outbox. Idempotent — re-running only\n * picks up rows not yet in the outbox (checked via existing outbox entries).\n *\n * @returns the total number of records enqueued.\n */\nexport function backfillUnsynced(machineId: string): number {\n const db = getDatabase();\n let total = 0;\n\n const now = Math.floor(Date.now() / MS_PER_SECOND);\n\n // Process one table at a time in separate transactions to avoid long locks\n for (const table of BACKFILL_TABLES) {\n const rows = db.prepare(\n `SELECT * FROM ${table}\n WHERE synced_at IS NULL\n AND NOT EXISTS (\n SELECT 1 FROM team_outbox\n WHERE team_outbox.table_name = ? AND team_outbox.row_id = CAST(${table}.id AS TEXT)\n )`,\n ).all(table) as Record<string, unknown>[];\n\n if (rows.length === 0) continue;\n\n const insertBatch = db.transaction((batchRows: Record<string, unknown>[]) => {\n const stmt = db.prepare(\n `INSERT INTO team_outbox (table_name, row_id, operation, payload, machine_id, created_at)\n VALUES (?, ?, 'upsert', ?, ?, ?)`,\n );\n for (const row of batchRows) {\n stmt.run(table, String(row.id), JSON.stringify(row), machineId, now);\n }\n });\n\n insertBatch(rows);\n total += rows.length;\n }\n\n return total;\n}\n\n"],"mappings":";;;;;;;;;AAcA,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AASb,SAAS,gBAAgB,SAAkB,WAAyB;AACzE,oBAAkB;AAClB,kBAAgB;AAClB;AAOO,SAAS,oBAA6B;AAC3C,SAAO;AACT;AAKO,SAAS,mBAA2B;AACzC,SAAO;AACT;;;ACzBA,IAAM,mBAAmB;AAGzB,IAAM,yBAAyB;AAG/B,IAAM,gBAAgB;AAgCtB,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,eAAe,KAAK,IAAI;AAO/C,SAAS,YAAY,KAAyC;AAC5D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,YAAY,IAAI;AAAA,IAChB,QAAQ,IAAI;AAAA,IACZ,WAAW,IAAI;AAAA,IACf,SAAS,IAAI;AAAA,IACb,YAAY,IAAI;AAAA,IAChB,YAAY,IAAI;AAAA,IAChB,SAAU,IAAI,WAAsB;AAAA,EACtC;AACF;AAYO,SAAS,QAAQ,WAAmB,KAAyD;AAClG,MAAI,CAAC,kBAAkB,EAAG;AAC1B,gBAAc;AAAA,IACZ,YAAY;AAAA,IACZ,QAAQ,OAAO,IAAI,EAAE;AAAA,IACrB,SAAS,KAAK,UAAU,GAAG;AAAA,IAC3B,YAAY,iBAAiB;AAAA,IAC7B,YAAY,IAAI,cAAc,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAAA,EAC5D,CAAC;AACH;AAWO,SAAS,cAAc,MAA+B;AAC3D,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,aAAa;AAAA,IAClB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,EACP;AAEA,QAAM,KAAK,OAAO,KAAK,eAAe;AAEtC,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,EAAE;AAAA,EAC7E;AACF;AASO,SAAS,YAAY,OAA6B;AACvD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA,EAK1B,EAAE,IAAI,SAAS,gBAAgB;AAE/B,SAAO,KAAK,IAAI,WAAW;AAC7B;AAKO,SAAS,SAAS,KAAe,QAAsB;AAC5D,MAAI,IAAI,WAAW,EAAG;AAEtB,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAEjD,KAAG;AAAA,IACD;AAAA;AAAA,oBAEgB,YAAY;AAAA,EAC9B,EAAE,IAAI,QAAQ,GAAG,GAAG;AACtB;AA2BO,SAAS,WAAmB;AACjC,QAAM,KAAK,YAAY;AACvB,QAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa,IAAI;AAExD,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA,EAEF,EAAE,IAAI,MAAM;AAEZ,SAAO,KAAK;AACd;AAKO,SAAS,eAAuB;AACrC,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI;AAEN,SAAO,IAAI;AACb;AAOA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAYO,SAAS,iBAAiB,WAA2B;AAC1D,QAAM,KAAK,YAAY;AACvB,MAAI,QAAQ;AAEZ,QAAM,MAAM,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa;AAGjD,aAAW,SAAS,iBAAiB;AACnC,UAAM,OAAO,GAAG;AAAA,MACd,iBAAiB,KAAK;AAAA;AAAA;AAAA;AAAA,0EAI8C,KAAK;AAAA;AAAA,IAE3E,EAAE,IAAI,KAAK;AAEX,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,cAAc,GAAG,YAAY,CAAC,cAAyC;AAC3E,YAAM,OAAO,GAAG;AAAA,QACd;AAAA;AAAA,MAEF;AACA,iBAAW,OAAO,WAAW;AAC3B,aAAK,IAAI,OAAO,OAAO,IAAI,EAAE,GAAG,KAAK,UAAU,GAAG,GAAG,WAAW,GAAG;AAAA,MACrE;AAAA,IACF,CAAC;AAED,gBAAY,IAAI;AAChB,aAAS,KAAK;AAAA,EAChB;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -4,15 +4,18 @@ import {
|
|
|
4
4
|
DAEMON_HEALTH_CHECK_TIMEOUT_MS,
|
|
5
5
|
DAEMON_HEALTH_RETRY_DELAYS,
|
|
6
6
|
DAEMON_STALE_GRACE_PERIOD_MS
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-76ZO5RGT.js";
|
|
8
8
|
import {
|
|
9
9
|
getPluginVersion
|
|
10
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-G2LQBFE3.js";
|
|
11
11
|
|
|
12
12
|
// src/hooks/client.ts
|
|
13
13
|
import fs from "fs";
|
|
14
14
|
import path from "path";
|
|
15
15
|
import { spawn } from "child_process";
|
|
16
|
+
function resolveCliEntryPath() {
|
|
17
|
+
return { execPath: process.execPath, cliEntry: process.argv[1] };
|
|
18
|
+
}
|
|
16
19
|
var DaemonClient = class {
|
|
17
20
|
vaultDir;
|
|
18
21
|
constructor(vaultDir) {
|
|
@@ -141,8 +144,8 @@ var DaemonClient = class {
|
|
|
141
144
|
return false;
|
|
142
145
|
}
|
|
143
146
|
spawnDaemon() {
|
|
144
|
-
const
|
|
145
|
-
const child = spawn(
|
|
147
|
+
const { execPath, cliEntry } = resolveCliEntryPath();
|
|
148
|
+
const child = spawn(execPath, [cliEntry, "daemon", "--vault", this.vaultDir], {
|
|
146
149
|
detached: true,
|
|
147
150
|
stdio: "ignore"
|
|
148
151
|
});
|
|
@@ -162,6 +165,7 @@ var DaemonClient = class {
|
|
|
162
165
|
};
|
|
163
166
|
|
|
164
167
|
export {
|
|
168
|
+
resolveCliEntryPath,
|
|
165
169
|
DaemonClient
|
|
166
170
|
};
|
|
167
|
-
//# sourceMappingURL=chunk-
|
|
171
|
+
//# sourceMappingURL=chunk-YZMNEIFI.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/hooks/client.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport { spawn } from 'node:child_process';\nimport { DAEMON_CLIENT_TIMEOUT_MS, DAEMON_HEALTH_CHECK_TIMEOUT_MS, DAEMON_HEALTH_RETRY_DELAYS, DAEMON_STALE_GRACE_PERIOD_MS } from '../constants.js';\nimport { getPluginVersion } from '../version.js';\n\ninterface DaemonInfo {\n pid: number;\n port: number;\n}\n\n/**\n * Resolve the CLI entry point for spawning daemon processes.\n * Uses process.argv[1] so the daemon restarts from the same binary\n * (myco-dev vs global myco) that launched the current process.\n */\nexport function resolveCliEntryPath(): { execPath: string; cliEntry: string } {\n return { execPath: process.execPath, cliEntry: process.argv[1] };\n}\n\ninterface HealthResponse {\n myco: boolean;\n version?: string;\n}\n\ninterface ClientResult {\n ok: boolean;\n data?: any;\n}\n\nexport class DaemonClient {\n private vaultDir: string;\n\n constructor(vaultDir: string) {\n this.vaultDir = vaultDir;\n }\n\n async post(endpoint: string, body: unknown): Promise<ClientResult> {\n try {\n const info = this.readDaemonJson();\n if (!info) return { ok: false };\n\n const res = await fetch(`http://127.0.0.1:${info.port}${endpoint}`, {\n method: 'POST',\n headers: { 'Content-Type': 'application/json' },\n body: JSON.stringify(body),\n signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS),\n });\n\n if (!res.ok) return { ok: false };\n const data = await res.json();\n return { ok: true, data };\n } catch {\n return { ok: false };\n }\n }\n\n async get(endpoint: string): Promise<ClientResult> {\n try {\n const info = this.readDaemonJson();\n if (!info) return { ok: false };\n\n const res = await fetch(`http://127.0.0.1:${info.port}${endpoint}`, {\n signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS),\n });\n\n if (!res.ok) return { ok: false };\n const data = await res.json();\n return { ok: true, data };\n } catch {\n return { ok: false };\n }\n }\n\n async delete(endpoint: string): Promise<ClientResult> {\n try {\n const info = this.readDaemonJson();\n if (!info) return { ok: false };\n\n const res = await fetch(`http://127.0.0.1:${info.port}${endpoint}`, {\n method: 'DELETE',\n signal: AbortSignal.timeout(DAEMON_CLIENT_TIMEOUT_MS),\n });\n\n if (!res.ok) return { ok: false };\n const data = await res.json();\n return { ok: true, data };\n } catch {\n return { ok: false };\n }\n }\n\n async isHealthy(cachedInfo?: DaemonInfo | null): Promise<boolean> {\n try {\n const info = cachedInfo ?? this.readDaemonJson();\n if (!info) return false;\n\n const res = await fetch(`http://127.0.0.1:${info.port}/health`, {\n signal: AbortSignal.timeout(DAEMON_HEALTH_CHECK_TIMEOUT_MS),\n });\n if (!res.ok) return false;\n const data = await res.json() as HealthResponse;\n return data.myco === true;\n } catch {\n return false;\n }\n }\n\n /**\n * Check if the daemon is running a stale version.\n * Returns true if the daemon's version doesn't match the current plugin version.\n * Skips the check if daemon.json was written recently (grace period) to prevent\n * rapid restart loops from concurrent hooks or session reloads.\n */\n private async isStale(info: DaemonInfo): Promise<boolean> {\n try {\n const jsonPath = path.join(this.vaultDir, 'daemon.json');\n const stat = fs.statSync(jsonPath);\n if (Date.now() - stat.mtimeMs < DAEMON_STALE_GRACE_PERIOD_MS) {\n return false;\n }\n\n const res = await fetch(`http://127.0.0.1:${info.port}/health`, {\n signal: AbortSignal.timeout(DAEMON_HEALTH_CHECK_TIMEOUT_MS),\n });\n if (!res.ok) return false;\n const data = await res.json() as HealthResponse;\n if (!data.myco) return false;\n\n // No version in response = old daemon that predates this check\n if (!data.version) return true;\n\n return data.version !== getPluginVersion();\n } catch {\n return false;\n }\n }\n\n /**\n * Kill the running daemon process.\n */\n private killDaemon(info: DaemonInfo | null): void {\n try {\n if (!info) return;\n process.kill(info.pid, 'SIGTERM');\n } catch { /* already dead */ }\n try {\n fs.unlinkSync(path.join(this.vaultDir, 'daemon.json'));\n } catch { /* already gone */ }\n }\n\n /**\n * Ensure the daemon is running. Spawns it if unhealthy.\n * When checkStale is true (default), also restarts a healthy daemon if its\n * version doesn't match the current plugin version. Use checkStale: false\n * for hooks that just need the daemon alive (e.g., stop) without triggering\n * version-driven restarts.\n */\n async ensureRunning(opts?: { checkStale?: boolean }): Promise<boolean> {\n const checkStale = opts?.checkStale ?? true;\n const info = this.readDaemonJson();\n\n if (checkStale && info && await this.isStale(info)) {\n this.killDaemon(info);\n // Brief pause for port release\n await new Promise((r) => setTimeout(r, 200));\n } else if (await this.isHealthy(info)) {\n return true;\n }\n\n this.spawnDaemon();\n\n for (const delay of DAEMON_HEALTH_RETRY_DELAYS) {\n await new Promise((r) => setTimeout(r, delay));\n if (await this.isHealthy()) return true;\n }\n return false;\n }\n\n spawnDaemon(): void {\n const { execPath, cliEntry } = resolveCliEntryPath();\n const child = spawn(execPath, [cliEntry, 'daemon', '--vault', this.vaultDir], {\n detached: true,\n stdio: 'ignore',\n });\n child.unref();\n }\n\n private readDaemonJson(): DaemonInfo | null {\n try {\n const jsonPath = path.join(this.vaultDir, 'daemon.json');\n const content = fs.readFileSync(jsonPath, 'utf-8');\n const info = JSON.parse(content);\n if (typeof info.port !== 'number') return null;\n return info as DaemonInfo;\n } catch {\n return null;\n }\n }\n}\n"],"mappings":";;;;;;;;;;;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,SAAS,aAAa;AAcf,SAAS,sBAA8D;AAC5E,SAAO,EAAE,UAAU,QAAQ,UAAU,UAAU,QAAQ,KAAK,CAAC,EAAE;AACjE;AAYO,IAAM,eAAN,MAAmB;AAAA,EAChB;AAAA,EAER,YAAY,UAAkB;AAC5B,SAAK,WAAW;AAAA,EAClB;AAAA,EAEA,MAAM,KAAK,UAAkB,MAAsC;AACjE,QAAI;AACF,YAAM,OAAO,KAAK,eAAe;AACjC,UAAI,CAAC,KAAM,QAAO,EAAE,IAAI,MAAM;AAE9B,YAAM,MAAM,MAAM,MAAM,oBAAoB,KAAK,IAAI,GAAG,QAAQ,IAAI;AAAA,QAClE,QAAQ;AAAA,QACR,SAAS,EAAE,gBAAgB,mBAAmB;AAAA,QAC9C,MAAM,KAAK,UAAU,IAAI;AAAA,QACzB,QAAQ,YAAY,QAAQ,wBAAwB;AAAA,MACtD,CAAC;AAED,UAAI,CAAC,IAAI,GAAI,QAAO,EAAE,IAAI,MAAM;AAChC,YAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,aAAO,EAAE,IAAI,MAAM,KAAK;AAAA,IAC1B,QAAQ;AACN,aAAO,EAAE,IAAI,MAAM;AAAA,IACrB;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,UAAyC;AACjD,QAAI;AACF,YAAM,OAAO,KAAK,eAAe;AACjC,UAAI,CAAC,KAAM,QAAO,EAAE,IAAI,MAAM;AAE9B,YAAM,MAAM,MAAM,MAAM,oBAAoB,KAAK,IAAI,GAAG,QAAQ,IAAI;AAAA,QAClE,QAAQ,YAAY,QAAQ,wBAAwB;AAAA,MACtD,CAAC;AAED,UAAI,CAAC,IAAI,GAAI,QAAO,EAAE,IAAI,MAAM;AAChC,YAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,aAAO,EAAE,IAAI,MAAM,KAAK;AAAA,IAC1B,QAAQ;AACN,aAAO,EAAE,IAAI,MAAM;AAAA,IACrB;AAAA,EACF;AAAA,EAEA,MAAM,OAAO,UAAyC;AACpD,QAAI;AACF,YAAM,OAAO,KAAK,eAAe;AACjC,UAAI,CAAC,KAAM,QAAO,EAAE,IAAI,MAAM;AAE9B,YAAM,MAAM,MAAM,MAAM,oBAAoB,KAAK,IAAI,GAAG,QAAQ,IAAI;AAAA,QAClE,QAAQ;AAAA,QACR,QAAQ,YAAY,QAAQ,wBAAwB;AAAA,MACtD,CAAC;AAED,UAAI,CAAC,IAAI,GAAI,QAAO,EAAE,IAAI,MAAM;AAChC,YAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,aAAO,EAAE,IAAI,MAAM,KAAK;AAAA,IAC1B,QAAQ;AACN,aAAO,EAAE,IAAI,MAAM;AAAA,IACrB;AAAA,EACF;AAAA,EAEA,MAAM,UAAU,YAAkD;AAChE,QAAI;AACF,YAAM,OAAO,cAAc,KAAK,eAAe;AAC/C,UAAI,CAAC,KAAM,QAAO;AAElB,YAAM,MAAM,MAAM,MAAM,oBAAoB,KAAK,IAAI,WAAW;AAAA,QAC9D,QAAQ,YAAY,QAAQ,8BAA8B;AAAA,MAC5D,CAAC;AACD,UAAI,CAAC,IAAI,GAAI,QAAO;AACpB,YAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,aAAO,KAAK,SAAS;AAAA,IACvB,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAc,QAAQ,MAAoC;AACxD,QAAI;AACF,YAAM,WAAW,KAAK,KAAK,KAAK,UAAU,aAAa;AACvD,YAAM,OAAO,GAAG,SAAS,QAAQ;AACjC,UAAI,KAAK,IAAI,IAAI,KAAK,UAAU,8BAA8B;AAC5D,eAAO;AAAA,MACT;AAEA,YAAM,MAAM,MAAM,MAAM,oBAAoB,KAAK,IAAI,WAAW;AAAA,QAC9D,QAAQ,YAAY,QAAQ,8BAA8B;AAAA,MAC5D,CAAC;AACD,UAAI,CAAC,IAAI,GAAI,QAAO;AACpB,YAAM,OAAO,MAAM,IAAI,KAAK;AAC5B,UAAI,CAAC,KAAK,KAAM,QAAO;AAGvB,UAAI,CAAC,KAAK,QAAS,QAAO;AAE1B,aAAO,KAAK,YAAY,iBAAiB;AAAA,IAC3C,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKQ,WAAW,MAA+B;AAChD,QAAI;AACF,UAAI,CAAC,KAAM;AACX,cAAQ,KAAK,KAAK,KAAK,SAAS;AAAA,IAClC,QAAQ;AAAA,IAAqB;AAC7B,QAAI;AACF,SAAG,WAAW,KAAK,KAAK,KAAK,UAAU,aAAa,CAAC;AAAA,IACvD,QAAQ;AAAA,IAAqB;AAAA,EAC/B;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EASA,MAAM,cAAc,MAAmD;AACrE,UAAM,aAAa,MAAM,cAAc;AACvC,UAAM,OAAO,KAAK,eAAe;AAEjC,QAAI,cAAc,QAAQ,MAAM,KAAK,QAAQ,IAAI,GAAG;AAClD,WAAK,WAAW,IAAI;AAEpB,YAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,GAAG,CAAC;AAAA,IAC7C,WAAW,MAAM,KAAK,UAAU,IAAI,GAAG;AACrC,aAAO;AAAA,IACT;AAEA,SAAK,YAAY;AAEjB,eAAW,SAAS,4BAA4B;AAC9C,YAAM,IAAI,QAAQ,CAAC,MAAM,WAAW,GAAG,KAAK,CAAC;AAC7C,UAAI,MAAM,KAAK,UAAU,EAAG,QAAO;AAAA,IACrC;AACA,WAAO;AAAA,EACT;AAAA,EAEA,cAAoB;AAClB,UAAM,EAAE,UAAU,SAAS,IAAI,oBAAoB;AACnD,UAAM,QAAQ,MAAM,UAAU,CAAC,UAAU,UAAU,WAAW,KAAK,QAAQ,GAAG;AAAA,MAC5E,UAAU;AAAA,MACV,OAAO;AAAA,IACT,CAAC;AACD,UAAM,MAAM;AAAA,EACd;AAAA,EAEQ,iBAAoC;AAC1C,QAAI;AACF,YAAM,WAAW,KAAK,KAAK,KAAK,UAAU,aAAa;AACvD,YAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,YAAM,OAAO,KAAK,MAAM,OAAO;AAC/B,UAAI,OAAO,KAAK,SAAS,SAAU,QAAO;AAC1C,aAAO;AAAA,IACT,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF;AACF;","names":[]}
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
kindToComponent
|
|
4
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-S6I62FAH.js";
|
|
5
5
|
|
|
6
6
|
// src/daemon/logger.ts
|
|
7
7
|
import fs from "fs";
|
|
@@ -113,4 +113,4 @@ export {
|
|
|
113
113
|
LEVEL_ORDER,
|
|
114
114
|
DaemonLogger
|
|
115
115
|
};
|
|
116
|
-
//# sourceMappingURL=chunk-
|
|
116
|
+
//# sourceMappingURL=chunk-ZESTWGJT.js.map
|
|
@@ -3,7 +3,7 @@ import {
|
|
|
3
3
|
AgentTaskSchema,
|
|
4
4
|
loadAgentTasks,
|
|
5
5
|
taskFromParsed
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-5LPERML5.js";
|
|
7
7
|
import {
|
|
8
8
|
require_dist
|
|
9
9
|
} from "./chunk-D7TYRPRM.js";
|
|
@@ -13,7 +13,7 @@ import {
|
|
|
13
13
|
TASK_NAME_PATTERN,
|
|
14
14
|
USER_TASKS_DIR,
|
|
15
15
|
USER_TASK_SOURCE
|
|
16
|
-
} from "./chunk-
|
|
16
|
+
} from "./chunk-76ZO5RGT.js";
|
|
17
17
|
import {
|
|
18
18
|
__toESM
|
|
19
19
|
} from "./chunk-PZUWP5VK.js";
|
|
@@ -100,4 +100,4 @@ export {
|
|
|
100
100
|
deleteUserTask,
|
|
101
101
|
copyTaskToUser
|
|
102
102
|
};
|
|
103
|
-
//# sourceMappingURL=chunk-
|
|
103
|
+
//# sourceMappingURL=chunk-ZMW6KQX2.js.map
|
|
@@ -2,14 +2,14 @@
|
|
|
2
2
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
3
3
|
import {
|
|
4
4
|
loadEnv
|
|
5
|
-
} from "./chunk-
|
|
5
|
+
} from "./chunk-WXSJKESH.js";
|
|
6
6
|
import "./chunk-SAKJMNSR.js";
|
|
7
|
-
import "./chunk-
|
|
7
|
+
import "./chunk-5SDH75YC.js";
|
|
8
8
|
import "./chunk-MYX5NCRH.js";
|
|
9
|
-
import "./chunk-
|
|
10
|
-
import "./chunk-
|
|
11
|
-
import "./chunk-
|
|
12
|
-
import "./chunk-
|
|
9
|
+
import "./chunk-YZMNEIFI.js";
|
|
10
|
+
import "./chunk-76ZO5RGT.js";
|
|
11
|
+
import "./chunk-S6I62FAH.js";
|
|
12
|
+
import "./chunk-G2LQBFE3.js";
|
|
13
13
|
import "./chunk-LPUQPDC2.js";
|
|
14
14
|
import {
|
|
15
15
|
resolveVaultDir
|
|
@@ -24,6 +24,7 @@ var USAGE = `Usage: myco <command> [args]
|
|
|
24
24
|
|
|
25
25
|
Commands:
|
|
26
26
|
init [options] Initialize a new vault
|
|
27
|
+
update Update vault files and symbiont plugins
|
|
27
28
|
config <get|set> [args] Get or set vault config values
|
|
28
29
|
detect-providers Detect available LLM/embedding providers (JSON)
|
|
29
30
|
verify Test LLM and embedding connectivity
|
|
@@ -36,6 +37,7 @@ Commands:
|
|
|
36
37
|
setup-digest [options] Configure digest and capture settings
|
|
37
38
|
agent [options] Run the intelligence agent
|
|
38
39
|
task <subcommand> Manage agent task definitions
|
|
40
|
+
team <init|upgrade> Provision or upgrade team sync infrastructure
|
|
39
41
|
doctor [--fix] Check vault health and repair issues
|
|
40
42
|
restart Restart the daemon
|
|
41
43
|
version Show plugin version
|
|
@@ -49,29 +51,29 @@ async function main() {
|
|
|
49
51
|
process.stdout.write(USAGE);
|
|
50
52
|
return;
|
|
51
53
|
}
|
|
52
|
-
if (cmd === "init") return (await import("./init-
|
|
53
|
-
if (cmd === "detect-providers") return (await import("./detect-providers-
|
|
54
|
+
if (cmd === "init") return (await import("./init-ARJROOWV.js")).run(args);
|
|
55
|
+
if (cmd === "detect-providers") return (await import("./detect-providers-AZ6DEQU7.js")).run(args);
|
|
54
56
|
if (cmd === "version" || cmd === "--version" || cmd === "-v") {
|
|
55
|
-
const { getPluginVersion } = await import("./version-
|
|
57
|
+
const { getPluginVersion } = await import("./version-VWWY7SPQ.js");
|
|
56
58
|
console.log(getPluginVersion());
|
|
57
59
|
return;
|
|
58
60
|
}
|
|
59
|
-
if (cmd === "mcp") return (await import("./server-
|
|
61
|
+
if (cmd === "mcp") return (await import("./server-AKPBRP6Z.js")).main();
|
|
60
62
|
if (cmd === "hook") {
|
|
61
63
|
const hookName = args[0];
|
|
62
64
|
const HOOK_DISPATCH = {
|
|
63
|
-
"session-start": () => import("./session-start-
|
|
64
|
-
"session-end": () => import("./session-end-
|
|
65
|
-
"stop": () => import("./stop-
|
|
66
|
-
"user-prompt-submit": () => import("./user-prompt-submit-
|
|
67
|
-
"post-tool-use": () => import("./post-tool-use-
|
|
68
|
-
"post-tool-use-failure": () => import("./post-tool-use-failure-
|
|
69
|
-
"subagent-start": () => import("./subagent-start-
|
|
70
|
-
"subagent-stop": () => import("./subagent-stop-
|
|
71
|
-
"stop-failure": () => import("./stop-failure-
|
|
72
|
-
"task-completed": () => import("./task-completed-
|
|
73
|
-
"pre-compact": () => import("./pre-compact-
|
|
74
|
-
"post-compact": () => import("./post-compact-
|
|
65
|
+
"session-start": () => import("./session-start-3754HF3N.js"),
|
|
66
|
+
"session-end": () => import("./session-end-YMQ44U6Z.js"),
|
|
67
|
+
"stop": () => import("./stop-WSFGRPXZ.js"),
|
|
68
|
+
"user-prompt-submit": () => import("./user-prompt-submit-LSWCYUW3.js"),
|
|
69
|
+
"post-tool-use": () => import("./post-tool-use-SOFVNFU3.js"),
|
|
70
|
+
"post-tool-use-failure": () => import("./post-tool-use-failure-2CZZZASB.js"),
|
|
71
|
+
"subagent-start": () => import("./subagent-start-7SGBXJYP.js"),
|
|
72
|
+
"subagent-stop": () => import("./subagent-stop-MRVTNX3V.js"),
|
|
73
|
+
"stop-failure": () => import("./stop-failure-4FR7574F.js"),
|
|
74
|
+
"task-completed": () => import("./task-completed-XXPYPSRV.js"),
|
|
75
|
+
"pre-compact": () => import("./pre-compact-3E3D6565.js"),
|
|
76
|
+
"post-compact": () => import("./post-compact-LR3DSGT3.js")
|
|
75
77
|
};
|
|
76
78
|
const loader = HOOK_DISPATCH[hookName];
|
|
77
79
|
if (!loader) {
|
|
@@ -80,11 +82,12 @@ async function main() {
|
|
|
80
82
|
}
|
|
81
83
|
return (await loader()).main();
|
|
82
84
|
}
|
|
83
|
-
if (cmd === "daemon") return (await import("./main-
|
|
85
|
+
if (cmd === "daemon") return (await import("./main-PVX6R3I6.js")).main();
|
|
84
86
|
if (cmd === "doctor") {
|
|
85
87
|
const vaultDir2 = resolveVaultDir();
|
|
86
|
-
return (await import("./doctor-
|
|
88
|
+
return (await import("./doctor-RHHWJTMB.js")).run(args, vaultDir2);
|
|
87
89
|
}
|
|
90
|
+
if (cmd === "update") return (await import("./update-W3UFZU4G.js")).run(args);
|
|
88
91
|
const vaultDir = resolveVaultDir();
|
|
89
92
|
if (!fs.existsSync(path.join(vaultDir, "myco.yaml"))) {
|
|
90
93
|
console.error(`No myco.yaml found in ${vaultDir}. Run 'myco init' first.`);
|
|
@@ -92,29 +95,37 @@ async function main() {
|
|
|
92
95
|
}
|
|
93
96
|
switch (cmd) {
|
|
94
97
|
case "config":
|
|
95
|
-
return (await import("./config-
|
|
98
|
+
return (await import("./config-G3CSGI7P.js")).run(args, vaultDir);
|
|
96
99
|
case "verify":
|
|
97
|
-
return (await import("./verify-
|
|
100
|
+
return (await import("./verify-O7TQ5DDY.js")).run(args, vaultDir);
|
|
98
101
|
case "stats":
|
|
99
|
-
return (await import("./stats-
|
|
102
|
+
return (await import("./stats-CDQXOTEC.js")).run(args, vaultDir);
|
|
100
103
|
case "search":
|
|
101
|
-
return (await import("./search-
|
|
104
|
+
return (await import("./search-VB6Z2ZXV.js")).run(args, vaultDir);
|
|
102
105
|
case "vectors":
|
|
103
|
-
return (await import("./search-
|
|
106
|
+
return (await import("./search-VB6Z2ZXV.js")).runVectors(args, vaultDir);
|
|
104
107
|
case "session":
|
|
105
|
-
return (await import("./session-
|
|
108
|
+
return (await import("./session-UVZS6CY5.js")).run(args, vaultDir);
|
|
106
109
|
case "setup-llm":
|
|
107
|
-
return (await import("./setup-llm-
|
|
110
|
+
return (await import("./setup-llm-NWHOPJUV.js")).run(args, vaultDir);
|
|
108
111
|
case "setup-digest":
|
|
109
112
|
return (await import("./setup-digest-4KDSXAIV.js")).run(args, vaultDir);
|
|
110
113
|
case "agent":
|
|
111
|
-
return (await import("./agent-run-
|
|
114
|
+
return (await import("./agent-run-CGXF5PPC.js")).run(args, vaultDir);
|
|
112
115
|
case "task":
|
|
113
|
-
return (await import("./agent-tasks-
|
|
116
|
+
return (await import("./agent-tasks-T7NVI3R7.js")).run(args, vaultDir);
|
|
117
|
+
case "team": {
|
|
118
|
+
const sub = args[0];
|
|
119
|
+
if (sub === "init") return (await import("./team-XMHYCKFF.js")).teamInit(vaultDir);
|
|
120
|
+
if (sub === "upgrade") return (await import("./team-XMHYCKFF.js")).teamUpgrade(vaultDir);
|
|
121
|
+
console.error("Usage: myco team <init|upgrade>");
|
|
122
|
+
process.exit(1);
|
|
123
|
+
break;
|
|
124
|
+
}
|
|
114
125
|
case "restart":
|
|
115
|
-
return (await import("./restart-
|
|
126
|
+
return (await import("./restart-XIUFVS33.js")).run(args, vaultDir);
|
|
116
127
|
case "logs":
|
|
117
|
-
return (await import("./logs-
|
|
128
|
+
return (await import("./logs-LXHPDKUA.js")).run(args, vaultDir);
|
|
118
129
|
default:
|
|
119
130
|
console.error(`Unknown command: ${cmd}`);
|
|
120
131
|
process.stdout.write(USAGE);
|
|
@@ -125,4 +136,4 @@ main().catch((err) => {
|
|
|
125
136
|
console.error(`myco: ${err.message}`);
|
|
126
137
|
process.exit(1);
|
|
127
138
|
});
|
|
128
|
-
//# sourceMappingURL=cli-
|
|
139
|
+
//# sourceMappingURL=cli-6CPFJGRZ.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/cli.ts"],"sourcesContent":["#!/usr/bin/env node\nimport { loadEnv } from './cli/shared.js';\nimport { resolveVaultDir } from './vault/resolve.js';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nloadEnv();\n\nconst USAGE = `Usage: myco <command> [args]\n\nCommands:\n init [options] Initialize a new vault\n update Update vault files and symbiont plugins\n config <get|set> [args] Get or set vault config values\n detect-providers Detect available LLM/embedding providers (JSON)\n verify Test LLM and embedding connectivity\n stats Vault health, index counts, vector count\n search <query> Combined FTS + vector search with scores\n vectors <query> Raw vector search with similarity scores\n session [id|latest] Show a session\n logs [options] View daemon logs\n setup-llm [options] Configure LLM and embedding providers\n setup-digest [options] Configure digest and capture settings\n agent [options] Run the intelligence agent\n task <subcommand> Manage agent task definitions\n team <init|upgrade> Provision or upgrade team sync infrastructure\n doctor [--fix] Check vault health and repair issues\n restart Restart the daemon\n version Show plugin version\n mcp Start the MCP stdio server\n hook <name> Run a hook (session-start, session-end, stop, user-prompt-submit, post-tool-use, post-tool-use-failure, subagent-start, subagent-stop, stop-failure, task-completed, pre-compact, post-compact)\n daemon --vault <dir> Start the daemon process\n`;\n\nasync function main(): Promise<void> {\n const [cmd, ...args] = process.argv.slice(2);\n if (!cmd || cmd === '--help' || cmd === '-h') {\n process.stdout.write(USAGE);\n return;\n }\n\n if (cmd === 'init') return (await import('./cli/init.js')).run(args);\n if (cmd === 'detect-providers') return (await import('./cli/detect-providers.js')).run(args);\n if (cmd === 'version' || cmd === '--version' || cmd === '-v') {\n const { getPluginVersion } = await import('./version.js');\n console.log(getPluginVersion());\n return;\n }\n if (cmd === 'mcp') return (await import('./mcp/server.js')).main();\n if (cmd === 'hook') {\n const hookName = args[0];\n const HOOK_DISPATCH: Record<string, () => Promise<{ main: () => Promise<void> }>> = {\n 'session-start': () => import('./hooks/session-start.js'),\n 'session-end': () => import('./hooks/session-end.js'),\n 'stop': () => import('./hooks/stop.js'),\n 'user-prompt-submit': () => import('./hooks/user-prompt-submit.js'),\n 'post-tool-use': () => import('./hooks/post-tool-use.js'),\n 'post-tool-use-failure': () => import('./hooks/post-tool-use-failure.js'),\n 'subagent-start': () => import('./hooks/subagent-start.js'),\n 'subagent-stop': () => import('./hooks/subagent-stop.js'),\n 'stop-failure': () => import('./hooks/stop-failure.js'),\n 'task-completed': () => import('./hooks/task-completed.js'),\n 'pre-compact': () => import('./hooks/pre-compact.js'),\n 'post-compact': () => import('./hooks/post-compact.js'),\n };\n const loader = HOOK_DISPATCH[hookName];\n if (!loader) {\n console.error(`Unknown hook: ${hookName}. Available: ${Object.keys(HOOK_DISPATCH).join(', ')}`);\n process.exit(1);\n }\n return (await loader()).main();\n }\n if (cmd === 'daemon') return (await import('./daemon/main.js')).main();\n\n if (cmd === 'doctor') {\n const vaultDir = resolveVaultDir();\n return (await import('./cli/doctor.js')).run(args, vaultDir);\n }\n\n if (cmd === 'update') return (await import('./cli/update.js')).run(args);\n\n const vaultDir = resolveVaultDir();\n if (!fs.existsSync(path.join(vaultDir, 'myco.yaml'))) {\n console.error(`No myco.yaml found in ${vaultDir}. Run 'myco init' first.`);\n process.exit(1);\n }\n\n switch (cmd) {\n case 'config': return (await import('./cli/config.js')).run(args, vaultDir);\n case 'verify': return (await import('./cli/verify.js')).run(args, vaultDir);\n case 'stats': return (await import('./cli/stats.js')).run(args, vaultDir);\n case 'search': return (await import('./cli/search.js')).run(args, vaultDir);\n case 'vectors': return (await import('./cli/search.js')).runVectors(args, vaultDir);\n case 'session': return (await import('./cli/session.js')).run(args, vaultDir);\n case 'setup-llm': return (await import('./cli/setup-llm.js')).run(args, vaultDir);\n case 'setup-digest': return (await import('./cli/setup-digest.js')).run(args, vaultDir);\n case 'agent': return (await import('./cli/agent-run.js')).run(args, vaultDir);\n case 'task': return (await import('./cli/agent-tasks.js')).run(args, vaultDir);\n case 'team': {\n const sub = args[0];\n if (sub === 'init') return (await import('./cli/team.js')).teamInit(vaultDir);\n if (sub === 'upgrade') return (await import('./cli/team.js')).teamUpgrade(vaultDir);\n console.error('Usage: myco team <init|upgrade>');\n process.exit(1);\n break;\n }\n case 'restart': return (await import('./cli/restart.js')).run(args, vaultDir);\n case 'logs': return (await import('./cli/logs.js')).run(args, vaultDir);\n default:\n console.error(`Unknown command: ${cmd}`);\n process.stdout.write(USAGE);\n process.exit(1);\n }\n}\n\nmain().catch((err) => {\n console.error(`myco: ${(err as Error).message}`);\n process.exit(1);\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAGA,OAAO,QAAQ;AACf,OAAO,UAAU;AAEjB,QAAQ;AAER,IAAM,QAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AA0Bd,eAAe,OAAsB;AACnC,QAAM,CAAC,KAAK,GAAG,IAAI,IAAI,QAAQ,KAAK,MAAM,CAAC;AAC3C,MAAI,CAAC,OAAO,QAAQ,YAAY,QAAQ,MAAM;AAC5C,YAAQ,OAAO,MAAM,KAAK;AAC1B;AAAA,EACF;AAEA,MAAI,QAAQ,OAAQ,SAAQ,MAAM,OAAO,oBAAe,GAAG,IAAI,IAAI;AACnE,MAAI,QAAQ,mBAAoB,SAAQ,MAAM,OAAO,gCAA2B,GAAG,IAAI,IAAI;AAC3F,MAAI,QAAQ,aAAa,QAAQ,eAAe,QAAQ,MAAM;AAC5D,UAAM,EAAE,iBAAiB,IAAI,MAAM,OAAO,uBAAc;AACxD,YAAQ,IAAI,iBAAiB,CAAC;AAC9B;AAAA,EACF;AACA,MAAI,QAAQ,MAAO,SAAQ,MAAM,OAAO,sBAAiB,GAAG,KAAK;AACjE,MAAI,QAAQ,QAAQ;AAClB,UAAM,WAAW,KAAK,CAAC;AACvB,UAAM,gBAA8E;AAAA,MAClF,iBAAiB,MAAM,OAAO,6BAA0B;AAAA,MACxD,eAAe,MAAM,OAAO,2BAAwB;AAAA,MACpD,QAAQ,MAAM,OAAO,oBAAiB;AAAA,MACtC,sBAAsB,MAAM,OAAO,kCAA+B;AAAA,MAClE,iBAAiB,MAAM,OAAO,6BAA0B;AAAA,MACxD,yBAAyB,MAAM,OAAO,qCAAkC;AAAA,MACxE,kBAAkB,MAAM,OAAO,8BAA2B;AAAA,MAC1D,iBAAiB,MAAM,OAAO,6BAA0B;AAAA,MACxD,gBAAgB,MAAM,OAAO,4BAAyB;AAAA,MACtD,kBAAkB,MAAM,OAAO,8BAA2B;AAAA,MAC1D,eAAe,MAAM,OAAO,2BAAwB;AAAA,MACpD,gBAAgB,MAAM,OAAO,4BAAyB;AAAA,IACxD;AACA,UAAM,SAAS,cAAc,QAAQ;AACrC,QAAI,CAAC,QAAQ;AACX,cAAQ,MAAM,iBAAiB,QAAQ,gBAAgB,OAAO,KAAK,aAAa,EAAE,KAAK,IAAI,CAAC,EAAE;AAC9F,cAAQ,KAAK,CAAC;AAAA,IAChB;AACA,YAAQ,MAAM,OAAO,GAAG,KAAK;AAAA,EAC/B;AACA,MAAI,QAAQ,SAAU,SAAQ,MAAM,OAAO,oBAAkB,GAAG,KAAK;AAErE,MAAI,QAAQ,UAAU;AACpB,UAAMA,YAAW,gBAAgB;AACjC,YAAQ,MAAM,OAAO,sBAAiB,GAAG,IAAI,MAAMA,SAAQ;AAAA,EAC7D;AAEA,MAAI,QAAQ,SAAU,SAAQ,MAAM,OAAO,sBAAiB,GAAG,IAAI,IAAI;AAEvE,QAAM,WAAW,gBAAgB;AACjC,MAAI,CAAC,GAAG,WAAW,KAAK,KAAK,UAAU,WAAW,CAAC,GAAG;AACpD,YAAQ,MAAM,yBAAyB,QAAQ,0BAA0B;AACzE,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,UAAQ,KAAK;AAAA,IACX,KAAK;AAAU,cAAQ,MAAM,OAAO,sBAAiB,GAAG,IAAI,MAAM,QAAQ;AAAA,IAC1E,KAAK;AAAU,cAAQ,MAAM,OAAO,sBAAiB,GAAG,IAAI,MAAM,QAAQ;AAAA,IAC1E,KAAK;AAAS,cAAQ,MAAM,OAAO,qBAAgB,GAAG,IAAI,MAAM,QAAQ;AAAA,IACxE,KAAK;AAAU,cAAQ,MAAM,OAAO,sBAAiB,GAAG,IAAI,MAAM,QAAQ;AAAA,IAC1E,KAAK;AAAW,cAAQ,MAAM,OAAO,sBAAiB,GAAG,WAAW,MAAM,QAAQ;AAAA,IAClF,KAAK;AAAW,cAAQ,MAAM,OAAO,uBAAkB,GAAG,IAAI,MAAM,QAAQ;AAAA,IAC5E,KAAK;AAAa,cAAQ,MAAM,OAAO,yBAAoB,GAAG,IAAI,MAAM,QAAQ;AAAA,IAChF,KAAK;AAAgB,cAAQ,MAAM,OAAO,4BAAuB,GAAG,IAAI,MAAM,QAAQ;AAAA,IACtF,KAAK;AAAS,cAAQ,MAAM,OAAO,yBAAoB,GAAG,IAAI,MAAM,QAAQ;AAAA,IAC5E,KAAK;AAAQ,cAAQ,MAAM,OAAO,2BAAsB,GAAG,IAAI,MAAM,QAAQ;AAAA,IAC7E,KAAK,QAAQ;AACX,YAAM,MAAM,KAAK,CAAC;AAClB,UAAI,QAAQ,OAAQ,SAAQ,MAAM,OAAO,oBAAe,GAAG,SAAS,QAAQ;AAC5E,UAAI,QAAQ,UAAW,SAAQ,MAAM,OAAO,oBAAe,GAAG,YAAY,QAAQ;AAClF,cAAQ,MAAM,iCAAiC;AAC/C,cAAQ,KAAK,CAAC;AACd;AAAA,IACF;AAAA,IACA,KAAK;AAAW,cAAQ,MAAM,OAAO,uBAAkB,GAAG,IAAI,MAAM,QAAQ;AAAA,IAC5E,KAAK;AAAQ,cAAQ,MAAM,OAAO,oBAAe,GAAG,IAAI,MAAM,QAAQ;AAAA,IACtE;AACE,cAAQ,MAAM,oBAAoB,GAAG,EAAE;AACvC,cAAQ,OAAO,MAAM,KAAK;AAC1B,cAAQ,KAAK,CAAC;AAAA,EAClB;AACF;AAEA,KAAK,EAAE,MAAM,CAAC,QAAQ;AACpB,UAAQ,MAAM,SAAU,IAAc,OAAO,EAAE;AAC/C,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":["vaultDir"]}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
|
+
import {
|
|
3
|
+
DaemonClient,
|
|
4
|
+
resolveCliEntryPath
|
|
5
|
+
} from "./chunk-YZMNEIFI.js";
|
|
6
|
+
import "./chunk-76ZO5RGT.js";
|
|
7
|
+
import "./chunk-S6I62FAH.js";
|
|
8
|
+
import "./chunk-G2LQBFE3.js";
|
|
9
|
+
import "./chunk-LPUQPDC2.js";
|
|
10
|
+
import "./chunk-PZUWP5VK.js";
|
|
11
|
+
export {
|
|
12
|
+
DaemonClient,
|
|
13
|
+
resolveCliEntryPath
|
|
14
|
+
};
|
|
15
|
+
//# sourceMappingURL=client-B27SN5QG.js.map
|
|
@@ -5,7 +5,7 @@ import {
|
|
|
5
5
|
import {
|
|
6
6
|
loadConfig,
|
|
7
7
|
updateConfig
|
|
8
|
-
} from "./chunk-
|
|
8
|
+
} from "./chunk-MAZOVVDU.js";
|
|
9
9
|
import "./chunk-D7TYRPRM.js";
|
|
10
10
|
import "./chunk-E4VLWIJC.js";
|
|
11
11
|
import "./chunk-KH64DHOY.js";
|
|
@@ -85,4 +85,4 @@ function parseValue(raw) {
|
|
|
85
85
|
export {
|
|
86
86
|
run
|
|
87
87
|
};
|
|
88
|
-
//# sourceMappingURL=config-
|
|
88
|
+
//# sourceMappingURL=config-G3CSGI7P.js.map
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
|
|
2
2
|
import {
|
|
3
3
|
checkLocalProvider
|
|
4
|
-
} from "./chunk-
|
|
5
|
-
import "./chunk-
|
|
6
|
-
import "./chunk-
|
|
7
|
-
import "./chunk-
|
|
4
|
+
} from "./chunk-C3AEZ3BZ.js";
|
|
5
|
+
import "./chunk-5SDH75YC.js";
|
|
6
|
+
import "./chunk-76ZO5RGT.js";
|
|
7
|
+
import "./chunk-S6I62FAH.js";
|
|
8
8
|
import "./chunk-PZUWP5VK.js";
|
|
9
9
|
|
|
10
10
|
// src/cli/detect-providers.ts
|
|
@@ -23,4 +23,4 @@ async function run(_args) {
|
|
|
23
23
|
export {
|
|
24
24
|
run
|
|
25
25
|
};
|
|
26
|
-
//# sourceMappingURL=detect-providers-
|
|
26
|
+
//# sourceMappingURL=detect-providers-AZ6DEQU7.js.map
|