@goondocks/myco 0.19.6 → 0.20.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (185) hide show
  1. package/dist/{agent-run-WK5NKBYA.js → agent-run-PQXC246Y.js} +8 -8
  2. package/dist/{agent-tasks-2E73GG3A.js → agent-tasks-323BZEBX.js} +8 -8
  3. package/dist/{chunk-VLGBWOBY.js → chunk-22JALAXN.js} +6 -6
  4. package/dist/chunk-22JALAXN.js.map +1 -0
  5. package/dist/{chunk-DURKJTVO.js → chunk-3WOS4TAR.js} +9 -1
  6. package/dist/chunk-3WOS4TAR.js.map +1 -0
  7. package/dist/{chunk-QH5HS54N.js → chunk-3WWJOTYG.js} +3 -3
  8. package/dist/{chunk-Q4QD6LJT.js → chunk-4M7EWPIA.js} +3 -3
  9. package/dist/{chunk-FGKCE5AE.js → chunk-4YFKBL3F.js} +2 -2
  10. package/dist/{chunk-KYH4V4ML.js → chunk-57O67XVF.js} +3 -3
  11. package/dist/{chunk-7ONVLO43.js → chunk-5XIVBO25.js} +2 -2
  12. package/dist/{chunk-W3JUH5S3.js → chunk-63ZGP4Q2.js} +4 -4
  13. package/dist/{chunk-ST2D3SGM.js → chunk-AIYFHQRX.js} +2 -2
  14. package/dist/{chunk-6ZDJXSEO.js → chunk-BPRIYNLE.js} +3 -3
  15. package/dist/chunk-CUDIZJY7.js +36 -0
  16. package/dist/chunk-CUDIZJY7.js.map +1 -0
  17. package/dist/{chunk-WKNAKQKA.js → chunk-DCSGJ7W4.js} +13 -19
  18. package/dist/chunk-DCSGJ7W4.js.map +1 -0
  19. package/dist/{chunk-Q6OEZM3S.js → chunk-EVDQKYCG.js} +237 -10
  20. package/dist/chunk-EVDQKYCG.js.map +1 -0
  21. package/dist/{chunk-P5VNHGVZ.js → chunk-FGY7J6EZ.js} +67 -16
  22. package/dist/chunk-FGY7J6EZ.js.map +1 -0
  23. package/dist/{chunk-I54KLC6H.js → chunk-FLLBJLHM.js} +3 -1
  24. package/dist/{chunk-I54KLC6H.js.map → chunk-FLLBJLHM.js.map} +1 -1
  25. package/dist/{chunk-PMT2LSTQ.js → chunk-FMRZ26U5.js} +2 -2
  26. package/dist/{chunk-3J6TUJSV.js → chunk-KHT24OWC.js} +3 -3
  27. package/dist/{chunk-TMNFCUAD.js → chunk-LZP4IJB3.js} +51 -23
  28. package/dist/chunk-LZP4IJB3.js.map +1 -0
  29. package/dist/{chunk-6DDRJQ4X.js → chunk-MYOZLMB2.js} +2 -2
  30. package/dist/{chunk-UVKQ62II.js → chunk-NGROSFOH.js} +24 -2
  31. package/dist/chunk-NGROSFOH.js.map +1 -0
  32. package/dist/{chunk-FKBPXCH3.js → chunk-QS5TWZBL.js} +4 -4
  33. package/dist/{chunk-NKQZ73LL.js → chunk-SRXTSI25.js} +109 -3
  34. package/dist/chunk-SRXTSI25.js.map +1 -0
  35. package/dist/{chunk-NCBLB2C6.js → chunk-UEWMSIL3.js} +7 -4
  36. package/dist/chunk-UEWMSIL3.js.map +1 -0
  37. package/dist/{chunk-GFR542SM.js → chunk-US4LNCAT.js} +5 -11
  38. package/dist/chunk-US4LNCAT.js.map +1 -0
  39. package/dist/{chunk-44PZCAYS.js → chunk-XL75KZGI.js} +23 -13
  40. package/dist/chunk-XL75KZGI.js.map +1 -0
  41. package/dist/chunk-ZXZPJJN3.js +54 -0
  42. package/dist/chunk-ZXZPJJN3.js.map +1 -0
  43. package/dist/{cli-LCTXK7N6.js → cli-UWBAOHLL.js} +43 -43
  44. package/dist/{client-S47ENM76.js → client-SLDDMSKN.js} +4 -4
  45. package/dist/{config-IO5WALOD.js → config-XPV5GDE4.js} +8 -16
  46. package/dist/config-XPV5GDE4.js.map +1 -0
  47. package/dist/{detect-BEOIHGBC.js → detect-PXNM6TA7.js} +2 -2
  48. package/dist/{detect-providers-2EY55EHK.js → detect-providers-5KOPZ7J2.js} +4 -4
  49. package/dist/{doctor-EE6GAC54.js → doctor-UUUOVDZS.js} +14 -14
  50. package/dist/doctor-UUUOVDZS.js.map +1 -0
  51. package/dist/{executor-NXNSUEMQ.js → executor-J4IBXSBY.js} +21 -22
  52. package/dist/executor-J4IBXSBY.js.map +1 -0
  53. package/dist/{init-IPL3XV6F.js → init-UDH3ZBDD.js} +17 -21
  54. package/dist/init-UDH3ZBDD.js.map +1 -0
  55. package/dist/{installer-WMTB4NCX.js → installer-I6KRGJOO.js} +4 -4
  56. package/dist/{llm-SWDDQQWY.js → llm-TH4NLIRM.js} +4 -4
  57. package/dist/{loader-V774GZU4.js → loader-H7OFASVC.js} +15 -3
  58. package/dist/{loader-AAZ6VUIA.js → loader-TSB5M7FD.js} +3 -3
  59. package/dist/{logs-KNKPQE5A.js → logs-7YVGGBIS.js} +2 -2
  60. package/dist/{main-RPJSS7PT.js → main-E7HU4QYR.js} +842 -249
  61. package/dist/main-E7HU4QYR.js.map +1 -0
  62. package/dist/{open-OYBKVBYX.js → open-4UGDPBKN.js} +8 -8
  63. package/dist/{post-compact-E2OVMNGQ.js → post-compact-BFU3WZSV.js} +9 -8
  64. package/dist/{post-compact-E2OVMNGQ.js.map → post-compact-BFU3WZSV.js.map} +1 -1
  65. package/dist/{post-tool-use-FGQE26GJ.js → post-tool-use-RYHXLCTC.js} +8 -7
  66. package/dist/{post-tool-use-FGQE26GJ.js.map → post-tool-use-RYHXLCTC.js.map} +1 -1
  67. package/dist/{post-tool-use-failure-3CITJYQK.js → post-tool-use-failure-DCVHK2P3.js} +9 -8
  68. package/dist/{post-tool-use-failure-3CITJYQK.js.map → post-tool-use-failure-DCVHK2P3.js.map} +1 -1
  69. package/dist/{pre-compact-GYMHCXII.js → pre-compact-EZZCJ6AG.js} +9 -8
  70. package/dist/{pre-compact-GYMHCXII.js.map → pre-compact-EZZCJ6AG.js.map} +1 -1
  71. package/dist/{provider-check-WCM3SDTM.js → provider-check-43LAMSMH.js} +4 -4
  72. package/dist/{registry-OCM4WAPJ.js → registry-MGJSJBAS.js} +4 -4
  73. package/dist/{remove-72ER3TG5.js → remove-GBBQG3SJ.js} +10 -10
  74. package/dist/{restart-EQHEJCGT.js → restart-RMIGKMA6.js} +9 -9
  75. package/dist/{search-JOBYIW43.js → search-2A5AJVNG.js} +9 -9
  76. package/dist/{server-PZCWYWZL.js → server-TSYZMFFK.js} +40 -27
  77. package/dist/{server-PZCWYWZL.js.map → server-TSYZMFFK.js.map} +1 -1
  78. package/dist/{session-APO4A2C7.js → session-NKREOTEO.js} +9 -10
  79. package/dist/{session-APO4A2C7.js.map → session-NKREOTEO.js.map} +1 -1
  80. package/dist/{session-end-4V4VHAOQ.js → session-end-DQELLTGJ.js} +8 -7
  81. package/dist/{session-end-4V4VHAOQ.js.map → session-end-DQELLTGJ.js.map} +1 -1
  82. package/dist/{session-start-K6ESRZU7.js → session-start-5SUTR5GP.js} +17 -17
  83. package/dist/session-start-5SUTR5GP.js.map +1 -0
  84. package/dist/{setup-llm-QUWOSB7A.js → setup-llm-SLX5764H.js} +10 -9
  85. package/dist/{setup-llm-QUWOSB7A.js.map → setup-llm-SLX5764H.js.map} +1 -1
  86. package/dist/src/cli.js +1 -1
  87. package/dist/src/daemon/main.js +1 -1
  88. package/dist/src/hooks/post-tool-use.js +1 -1
  89. package/dist/src/hooks/session-end.js +1 -1
  90. package/dist/src/hooks/session-start.js +1 -1
  91. package/dist/src/hooks/stop.js +1 -1
  92. package/dist/src/hooks/user-prompt-submit.js +1 -1
  93. package/dist/src/mcp/server.js +1 -1
  94. package/dist/src/symbionts/manifests/codex.yaml +28 -0
  95. package/dist/{stats-TYOZAOP2.js → stats-ARBLNEE3.js} +9 -9
  96. package/dist/{stop-2COOWEYG.js → stop-5AJXBKEE.js} +8 -7
  97. package/dist/{stop-2COOWEYG.js.map → stop-5AJXBKEE.js.map} +1 -1
  98. package/dist/{stop-failure-UQ33GZLE.js → stop-failure-XGKTZPLR.js} +9 -8
  99. package/dist/{stop-failure-UQ33GZLE.js.map → stop-failure-XGKTZPLR.js.map} +1 -1
  100. package/dist/{subagent-start-YENEY6VF.js → subagent-start-2JLIPGJN.js} +9 -8
  101. package/dist/{subagent-start-YENEY6VF.js.map → subagent-start-2JLIPGJN.js.map} +1 -1
  102. package/dist/{subagent-stop-W2757YDB.js → subagent-stop-FV4EOKWZ.js} +9 -8
  103. package/dist/{subagent-stop-W2757YDB.js.map → subagent-stop-FV4EOKWZ.js.map} +1 -1
  104. package/dist/{task-completed-KU6GWMWV.js → task-completed-XJKT366I.js} +9 -8
  105. package/dist/{task-completed-KU6GWMWV.js.map → task-completed-XJKT366I.js.map} +1 -1
  106. package/dist/{team-SNLC6FZM.js → team-3JKF7VAD.js} +5 -5
  107. package/dist/ui/assets/index-C2JuNtRB.css +1 -0
  108. package/dist/ui/assets/index-JLVaQKV2.js +832 -0
  109. package/dist/ui/favicon-dusk.svg +11 -0
  110. package/dist/ui/favicon-moss.svg +11 -0
  111. package/dist/ui/favicon-plum.svg +11 -0
  112. package/dist/ui/favicon-sage.svg +11 -0
  113. package/dist/ui/favicon-slate.svg +11 -0
  114. package/dist/ui/favicon-terracotta.svg +11 -0
  115. package/dist/ui/index.html +3 -3
  116. package/dist/{update-5VYNQZJ4.js → update-XVSAMN4O.js} +10 -10
  117. package/dist/{user-prompt-submit-KATLHAKA.js → user-prompt-submit-TTBTHBBH.js} +9 -8
  118. package/dist/{user-prompt-submit-KATLHAKA.js.map → user-prompt-submit-TTBTHBBH.js.map} +1 -1
  119. package/dist/{verify-BGJVB3K2.js → verify-LMHNEYIP.js} +7 -7
  120. package/dist/verify-LMHNEYIP.js.map +1 -0
  121. package/dist/{version-MKNN5GYM.js → version-HRVSEMUR.js} +2 -2
  122. package/package.json +1 -1
  123. package/skills/myco/SKILL.md +78 -43
  124. package/skills/myco/references/vault-status.md +1 -1
  125. package/dist/chunk-44PZCAYS.js.map +0 -1
  126. package/dist/chunk-5ZT2Q6P5.js +0 -25
  127. package/dist/chunk-5ZT2Q6P5.js.map +0 -1
  128. package/dist/chunk-AULBWINA.js +0 -227
  129. package/dist/chunk-AULBWINA.js.map +0 -1
  130. package/dist/chunk-DURKJTVO.js.map +0 -1
  131. package/dist/chunk-GFR542SM.js.map +0 -1
  132. package/dist/chunk-NCBLB2C6.js.map +0 -1
  133. package/dist/chunk-NKQZ73LL.js.map +0 -1
  134. package/dist/chunk-P5VNHGVZ.js.map +0 -1
  135. package/dist/chunk-Q6OEZM3S.js.map +0 -1
  136. package/dist/chunk-TMNFCUAD.js.map +0 -1
  137. package/dist/chunk-UVKQ62II.js.map +0 -1
  138. package/dist/chunk-VLGBWOBY.js.map +0 -1
  139. package/dist/chunk-VQF5E4ZX.js +0 -91
  140. package/dist/chunk-VQF5E4ZX.js.map +0 -1
  141. package/dist/chunk-WKNAKQKA.js.map +0 -1
  142. package/dist/config-IO5WALOD.js.map +0 -1
  143. package/dist/doctor-EE6GAC54.js.map +0 -1
  144. package/dist/executor-NXNSUEMQ.js.map +0 -1
  145. package/dist/init-IPL3XV6F.js.map +0 -1
  146. package/dist/main-RPJSS7PT.js.map +0 -1
  147. package/dist/resolution-events-PYLSI6QT.js +0 -15
  148. package/dist/session-start-K6ESRZU7.js.map +0 -1
  149. package/dist/ui/assets/index-816yFmz_.js +0 -842
  150. package/dist/ui/assets/index-Dj6vQpFd.css +0 -1
  151. package/dist/verify-BGJVB3K2.js.map +0 -1
  152. package/dist/version-MKNN5GYM.js.map +0 -1
  153. /package/dist/{agent-run-WK5NKBYA.js.map → agent-run-PQXC246Y.js.map} +0 -0
  154. /package/dist/{agent-tasks-2E73GG3A.js.map → agent-tasks-323BZEBX.js.map} +0 -0
  155. /package/dist/{chunk-QH5HS54N.js.map → chunk-3WWJOTYG.js.map} +0 -0
  156. /package/dist/{chunk-Q4QD6LJT.js.map → chunk-4M7EWPIA.js.map} +0 -0
  157. /package/dist/{chunk-FGKCE5AE.js.map → chunk-4YFKBL3F.js.map} +0 -0
  158. /package/dist/{chunk-KYH4V4ML.js.map → chunk-57O67XVF.js.map} +0 -0
  159. /package/dist/{chunk-7ONVLO43.js.map → chunk-5XIVBO25.js.map} +0 -0
  160. /package/dist/{chunk-W3JUH5S3.js.map → chunk-63ZGP4Q2.js.map} +0 -0
  161. /package/dist/{chunk-ST2D3SGM.js.map → chunk-AIYFHQRX.js.map} +0 -0
  162. /package/dist/{chunk-6ZDJXSEO.js.map → chunk-BPRIYNLE.js.map} +0 -0
  163. /package/dist/{chunk-PMT2LSTQ.js.map → chunk-FMRZ26U5.js.map} +0 -0
  164. /package/dist/{chunk-3J6TUJSV.js.map → chunk-KHT24OWC.js.map} +0 -0
  165. /package/dist/{chunk-6DDRJQ4X.js.map → chunk-MYOZLMB2.js.map} +0 -0
  166. /package/dist/{chunk-FKBPXCH3.js.map → chunk-QS5TWZBL.js.map} +0 -0
  167. /package/dist/{cli-LCTXK7N6.js.map → cli-UWBAOHLL.js.map} +0 -0
  168. /package/dist/{client-S47ENM76.js.map → client-SLDDMSKN.js.map} +0 -0
  169. /package/dist/{detect-BEOIHGBC.js.map → detect-PXNM6TA7.js.map} +0 -0
  170. /package/dist/{detect-providers-2EY55EHK.js.map → detect-providers-5KOPZ7J2.js.map} +0 -0
  171. /package/dist/{installer-WMTB4NCX.js.map → installer-I6KRGJOO.js.map} +0 -0
  172. /package/dist/{llm-SWDDQQWY.js.map → llm-TH4NLIRM.js.map} +0 -0
  173. /package/dist/{loader-AAZ6VUIA.js.map → loader-H7OFASVC.js.map} +0 -0
  174. /package/dist/{loader-V774GZU4.js.map → loader-TSB5M7FD.js.map} +0 -0
  175. /package/dist/{logs-KNKPQE5A.js.map → logs-7YVGGBIS.js.map} +0 -0
  176. /package/dist/{open-OYBKVBYX.js.map → open-4UGDPBKN.js.map} +0 -0
  177. /package/dist/{provider-check-WCM3SDTM.js.map → provider-check-43LAMSMH.js.map} +0 -0
  178. /package/dist/{registry-OCM4WAPJ.js.map → registry-MGJSJBAS.js.map} +0 -0
  179. /package/dist/{remove-72ER3TG5.js.map → remove-GBBQG3SJ.js.map} +0 -0
  180. /package/dist/{restart-EQHEJCGT.js.map → restart-RMIGKMA6.js.map} +0 -0
  181. /package/dist/{search-JOBYIW43.js.map → search-2A5AJVNG.js.map} +0 -0
  182. /package/dist/{stats-TYOZAOP2.js.map → stats-ARBLNEE3.js.map} +0 -0
  183. /package/dist/{resolution-events-PYLSI6QT.js.map → team-3JKF7VAD.js.map} +0 -0
  184. /package/dist/{update-5VYNQZJ4.js.map → update-XVSAMN4O.js.map} +0 -0
  185. /package/dist/{team-SNLC6FZM.js.map → version-HRVSEMUR.js.map} +0 -0
@@ -1,227 +0,0 @@
1
- import { createRequire as __cr } from 'node:module'; const require = __cr(import.meta.url);
2
- import {
3
- getDatabase
4
- } from "./chunk-MYX5NCRH.js";
5
- import {
6
- DEFAULT_MACHINE_ID
7
- } from "./chunk-I54KLC6H.js";
8
-
9
- // src/daemon/team-context.ts
10
- var teamSyncEnabled = false;
11
- var teamMachineId = DEFAULT_MACHINE_ID;
12
- function initTeamContext(enabled, machineId) {
13
- teamSyncEnabled = enabled;
14
- teamMachineId = machineId;
15
- }
16
- function isTeamSyncEnabled() {
17
- return teamSyncEnabled;
18
- }
19
- function getTeamMachineId() {
20
- return teamMachineId;
21
- }
22
-
23
- // src/db/queries/team-outbox.ts
24
- var BURST_BATCH_SIZE = 200;
25
- var SENT_PRUNE_AGE_SECONDS = 86400;
26
- var MAX_OUTBOX_RETRIES = 10;
27
- var MS_PER_SECOND = 1e3;
28
- var OUTBOX_COLUMNS = [
29
- "id",
30
- "table_name",
31
- "row_id",
32
- "operation",
33
- "payload",
34
- "machine_id",
35
- "created_at",
36
- "sent_at",
37
- "retry_count",
38
- "last_attempt_at"
39
- ];
40
- var SELECT_COLUMNS = OUTBOX_COLUMNS.join(", ");
41
- function toOutboxRow(row) {
42
- return {
43
- id: row.id,
44
- table_name: row.table_name,
45
- row_id: row.row_id,
46
- operation: row.operation,
47
- payload: row.payload,
48
- machine_id: row.machine_id,
49
- created_at: row.created_at,
50
- sent_at: row.sent_at ?? null,
51
- retry_count: row.retry_count ?? 0,
52
- last_attempt_at: row.last_attempt_at ?? null
53
- };
54
- }
55
- function syncRow(tableName, row) {
56
- if (!isTeamSyncEnabled()) return;
57
- enqueueOutbox({
58
- table_name: tableName,
59
- row_id: String(row.id),
60
- payload: JSON.stringify(row),
61
- machine_id: getTeamMachineId(),
62
- created_at: row.created_at ?? Math.floor(Date.now() / 1e3)
63
- });
64
- }
65
- function enqueueOutbox(data) {
66
- const db = getDatabase();
67
- const info = db.prepare(
68
- `INSERT INTO team_outbox (
69
- table_name, row_id, operation, payload, machine_id, created_at
70
- ) VALUES (?, ?, ?, ?, ?, ?)`
71
- ).run(
72
- data.table_name,
73
- data.row_id,
74
- data.operation ?? "upsert",
75
- data.payload,
76
- data.machine_id,
77
- data.created_at
78
- );
79
- const id = Number(info.lastInsertRowid);
80
- return toOutboxRow(
81
- db.prepare(`SELECT ${SELECT_COLUMNS} FROM team_outbox WHERE id = ?`).get(id)
82
- );
83
- }
84
- function listPending(limit) {
85
- const db = getDatabase();
86
- const rows = db.prepare(
87
- `SELECT ${SELECT_COLUMNS}
88
- FROM team_outbox
89
- WHERE sent_at IS NULL AND retry_count < ?
90
- ORDER BY created_at ASC
91
- LIMIT ?`
92
- ).all(MAX_OUTBOX_RETRIES, limit ?? BURST_BATCH_SIZE);
93
- return rows.map(toOutboxRow);
94
- }
95
- function markSent(ids, sentAt) {
96
- if (ids.length === 0) return;
97
- const db = getDatabase();
98
- const placeholders = ids.map(() => "?").join(", ");
99
- db.prepare(
100
- `UPDATE team_outbox
101
- SET sent_at = ?
102
- WHERE id IN (${placeholders})`
103
- ).run(sentAt, ...ids);
104
- }
105
- function incrementRetryCount(ids, attemptAt) {
106
- if (ids.length === 0) return [];
107
- const db = getDatabase();
108
- const placeholders = ids.map(() => "?").join(", ");
109
- db.prepare(
110
- `UPDATE team_outbox
111
- SET retry_count = retry_count + 1, last_attempt_at = ?
112
- WHERE id IN (${placeholders})`
113
- ).run(attemptAt, ...ids);
114
- const deadLettered = db.prepare(
115
- `SELECT id FROM team_outbox
116
- WHERE id IN (${placeholders}) AND retry_count >= ?`
117
- ).all(...ids, MAX_OUTBOX_RETRIES);
118
- return deadLettered.map((r) => r.id);
119
- }
120
- function retryDeadLettered() {
121
- const db = getDatabase();
122
- const info = db.prepare(
123
- `UPDATE team_outbox
124
- SET retry_count = 0, last_attempt_at = NULL
125
- WHERE sent_at IS NULL AND retry_count >= ?`
126
- ).run(MAX_OUTBOX_RETRIES);
127
- return info.changes;
128
- }
129
- function countDeadLettered() {
130
- const db = getDatabase();
131
- const row = db.prepare(
132
- `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count >= ?`
133
- ).get(MAX_OUTBOX_RETRIES);
134
- return row.count;
135
- }
136
- function pruneOld() {
137
- const db = getDatabase();
138
- const cutoff = Math.floor(Date.now() / MS_PER_SECOND) - SENT_PRUNE_AGE_SECONDS;
139
- const info = db.prepare(
140
- `DELETE FROM team_outbox
141
- WHERE sent_at IS NOT NULL AND sent_at < ?`
142
- ).run(cutoff);
143
- return info.changes;
144
- }
145
- function countPending() {
146
- const db = getDatabase();
147
- const row = db.prepare(
148
- `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count < ?`
149
- ).get(MAX_OUTBOX_RETRIES);
150
- return row.count;
151
- }
152
- var BACKFILL_TABLES = [
153
- "sessions",
154
- "prompt_batches",
155
- "spores",
156
- "entities",
157
- "graph_edges",
158
- "resolution_events",
159
- "plans",
160
- "artifacts",
161
- "digest_extracts",
162
- "skill_candidates",
163
- "skill_records"
164
- ];
165
- var BACKFILL_TABLE_SET = new Set(BACKFILL_TABLES);
166
- function markSourceRowsSynced(records, syncedAt) {
167
- const db = getDatabase();
168
- const byTable = /* @__PURE__ */ new Map();
169
- for (const rec of records) {
170
- if (!BACKFILL_TABLE_SET.has(rec.table_name)) continue;
171
- const ids = byTable.get(rec.table_name) ?? [];
172
- ids.push(rec.row_id);
173
- byTable.set(rec.table_name, ids);
174
- }
175
- for (const [table, ids] of byTable) {
176
- const placeholders = ids.map(() => "?").join(", ");
177
- db.prepare(
178
- `UPDATE ${table} SET synced_at = ? WHERE id IN (${placeholders}) AND synced_at IS NULL`
179
- ).run(syncedAt, ...ids);
180
- }
181
- }
182
- function backfillUnsynced(machineId) {
183
- const db = getDatabase();
184
- let total = 0;
185
- const now = Math.floor(Date.now() / MS_PER_SECOND);
186
- for (const table of BACKFILL_TABLES) {
187
- const rows = db.prepare(
188
- `SELECT * FROM ${table}
189
- WHERE synced_at IS NULL
190
- AND NOT EXISTS (
191
- SELECT 1 FROM team_outbox
192
- WHERE team_outbox.table_name = ? AND team_outbox.row_id = CAST(${table}.id AS TEXT)
193
- )`
194
- ).all(table);
195
- if (rows.length === 0) continue;
196
- const insertBatch = db.transaction((batchRows) => {
197
- const stmt = db.prepare(
198
- `INSERT INTO team_outbox (table_name, row_id, operation, payload, machine_id, created_at)
199
- VALUES (?, ?, 'upsert', ?, ?, ?)`
200
- );
201
- for (const row of batchRows) {
202
- stmt.run(table, String(row.id), JSON.stringify(row), machineId, now);
203
- }
204
- });
205
- insertBatch(rows);
206
- total += rows.length;
207
- }
208
- return total;
209
- }
210
-
211
- export {
212
- initTeamContext,
213
- isTeamSyncEnabled,
214
- getTeamMachineId,
215
- syncRow,
216
- enqueueOutbox,
217
- listPending,
218
- markSent,
219
- incrementRetryCount,
220
- retryDeadLettered,
221
- countDeadLettered,
222
- pruneOld,
223
- countPending,
224
- markSourceRowsSynced,
225
- backfillUnsynced
226
- };
227
- //# sourceMappingURL=chunk-AULBWINA.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/daemon/team-context.ts","../src/db/queries/team-outbox.ts"],"sourcesContent":["/**\n * Module-level state for team sync.\n *\n * Initialized once by the daemon on startup. Query modules import\n * `isTeamSyncEnabled()` and `getTeamMachineId()` to decide whether\n * to enqueue outbox records on write.\n */\n\nimport { SYNC_PROTOCOL_VERSION, DEFAULT_MACHINE_ID } from '@myco/constants.js';\n\n// ---------------------------------------------------------------------------\n// Module state\n// ---------------------------------------------------------------------------\n\nlet teamSyncEnabled = false;\nlet teamMachineId = DEFAULT_MACHINE_ID;\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Initialize team context. Called once on daemon startup.\n */\nexport function initTeamContext(enabled: boolean, machineId: string): void {\n teamSyncEnabled = enabled;\n teamMachineId = machineId;\n}\n\n/**\n * Whether team sync is currently enabled.\n *\n * Query modules check this before enqueuing outbox records.\n */\nexport function isTeamSyncEnabled(): boolean {\n return teamSyncEnabled;\n}\n\n/**\n * The machine ID for this instance.\n */\nexport function getTeamMachineId(): string {\n return teamMachineId;\n}\n\n/**\n * The sync protocol version in use.\n */\nexport function getTeamSyncProtocolVersion(): number {\n return SYNC_PROTOCOL_VERSION;\n}\n\n/**\n * Reset team context (for testing).\n */\nexport function resetTeamContext(): void {\n teamSyncEnabled = false;\n teamMachineId = DEFAULT_MACHINE_ID;\n}\n","/**\n * Team outbox CRUD query helpers.\n *\n * The outbox pattern: write paths enqueue records here when team sync is enabled.\n * The sync client flushes pending records in batches to the Cloudflare Worker.\n *\n * All functions obtain the SQLite instance internally via `getDatabase()`.\n * Queries use positional `?` placeholders throughout (better-sqlite3).\n */\n\nimport { getDatabase } from '@myco/db/client.js';\nimport { isTeamSyncEnabled, getTeamMachineId } from '@myco/daemon/team-context.js';\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Max records returned per listPending call. */\nconst BURST_BATCH_SIZE = 200;\n\n/** Age in seconds after which sent records are pruned (24 hours). */\nconst SENT_PRUNE_AGE_SECONDS = 86_400;\n\n/** Max retry attempts before a record is dead-lettered. */\nexport const MAX_OUTBOX_RETRIES = 10;\n\n/** Milliseconds-per-second multiplier for epoch math. */\nconst MS_PER_SECOND = 1000;\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\n/** Fields required when enqueuing an outbox record. */\nexport interface OutboxInsert {\n table_name: string;\n row_id: string;\n operation?: string;\n payload: string;\n machine_id: string;\n created_at: number;\n}\n\n/** Row shape returned from outbox queries. */\nexport interface OutboxRow {\n id: number;\n table_name: string;\n row_id: string;\n operation: string;\n payload: string;\n machine_id: string;\n created_at: number;\n sent_at: number | null;\n retry_count: number;\n last_attempt_at: number | null;\n}\n\n// ---------------------------------------------------------------------------\n// Column list\n// ---------------------------------------------------------------------------\n\nconst OUTBOX_COLUMNS = [\n 'id',\n 'table_name',\n 'row_id',\n 'operation',\n 'payload',\n 'machine_id',\n 'created_at',\n 'sent_at',\n 'retry_count',\n 'last_attempt_at',\n] as const;\n\nconst SELECT_COLUMNS = OUTBOX_COLUMNS.join(', ');\n\n// ---------------------------------------------------------------------------\n// Helpers\n// ---------------------------------------------------------------------------\n\n/** Normalize a SQLite result row into a typed OutboxRow. */\nfunction toOutboxRow(row: Record<string, unknown>): OutboxRow {\n return {\n id: row.id as number,\n table_name: row.table_name as string,\n row_id: row.row_id as string,\n operation: row.operation as string,\n payload: row.payload as string,\n machine_id: row.machine_id as string,\n created_at: row.created_at as number,\n sent_at: (row.sent_at as number) ?? null,\n retry_count: (row.retry_count as number) ?? 0,\n last_attempt_at: (row.last_attempt_at as number) ?? null,\n };\n}\n\n// ---------------------------------------------------------------------------\n// Convenience helper — used by query modules\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue a row for team sync if sync is enabled.\n *\n * Centralizes the if-enabled / enqueue / serialize pattern that every\n * write-path query module previously duplicated inline.\n */\nexport function syncRow(tableName: string, row: { id: string | number; created_at?: number }): void {\n if (!isTeamSyncEnabled()) return;\n enqueueOutbox({\n table_name: tableName,\n row_id: String(row.id),\n payload: JSON.stringify(row),\n machine_id: getTeamMachineId(),\n created_at: row.created_at ?? Math.floor(Date.now() / 1000),\n });\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue a record into the team outbox for later sync.\n *\n * Inserted with `sent_at = NULL` (pending).\n */\nexport function enqueueOutbox(data: OutboxInsert): OutboxRow {\n const db = getDatabase();\n\n const info = db.prepare(\n `INSERT INTO team_outbox (\n table_name, row_id, operation, payload, machine_id, created_at\n ) VALUES (?, ?, ?, ?, ?, ?)`,\n ).run(\n data.table_name,\n data.row_id,\n data.operation ?? 'upsert',\n data.payload,\n data.machine_id,\n data.created_at,\n );\n\n const id = Number(info.lastInsertRowid);\n\n return toOutboxRow(\n db.prepare(`SELECT ${SELECT_COLUMNS} FROM team_outbox WHERE id = ?`).get(id) as Record<string, unknown>,\n );\n}\n\n/**\n * List pending outbox records (oldest-first).\n *\n * Uses burst sizing: fetches BURST_BATCH_SIZE rows and returns them all.\n * If fewer than BURST_THRESHOLD rows come back, callers get a normal-size\n * batch; if more, the full burst. This avoids a separate COUNT query.\n */\nexport function listPending(limit?: number): OutboxRow[] {\n const db = getDatabase();\n\n const rows = db.prepare(\n `SELECT ${SELECT_COLUMNS}\n FROM team_outbox\n WHERE sent_at IS NULL AND retry_count < ?\n ORDER BY created_at ASC\n LIMIT ?`,\n ).all(MAX_OUTBOX_RETRIES, limit ?? BURST_BATCH_SIZE) as Record<string, unknown>[];\n\n return rows.map(toOutboxRow);\n}\n\n/**\n * Mark outbox records as sent by setting sent_at.\n */\nexport function markSent(ids: number[], sentAt: number): void {\n if (ids.length === 0) return;\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET sent_at = ?\n WHERE id IN (${placeholders})`,\n ).run(sentAt, ...ids);\n}\n\n/**\n * Reset sent_at to NULL for records that need to be retried.\n *\n * This allows the sync client to re-enqueue specific records for retry.\n */\nexport function markForRetry(ids: number[]): void {\n if (ids.length === 0) return;\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET sent_at = NULL\n WHERE id IN (${placeholders})`,\n ).run(...ids);\n}\n\n/**\n * Increment retry_count and set last_attempt_at for failed outbox records.\n *\n * @returns IDs of records that have now reached MAX_OUTBOX_RETRIES (newly dead-lettered).\n */\nexport function incrementRetryCount(ids: number[], attemptAt: number): number[] {\n if (ids.length === 0) return [];\n\n const db = getDatabase();\n const placeholders = ids.map(() => '?').join(', ');\n\n db.prepare(\n `UPDATE team_outbox\n SET retry_count = retry_count + 1, last_attempt_at = ?\n WHERE id IN (${placeholders})`,\n ).run(attemptAt, ...ids);\n\n // Return IDs that just hit the threshold\n const deadLettered = db.prepare(\n `SELECT id FROM team_outbox\n WHERE id IN (${placeholders}) AND retry_count >= ?`,\n ).all(...ids, MAX_OUTBOX_RETRIES) as Array<{ id: number }>;\n\n return deadLettered.map((r) => r.id);\n}\n\n/**\n * Reset all dead-lettered records back to pending for retry.\n *\n * @returns the number of records reset.\n */\nexport function retryDeadLettered(): number {\n const db = getDatabase();\n\n const info = db.prepare(\n `UPDATE team_outbox\n SET retry_count = 0, last_attempt_at = NULL\n WHERE sent_at IS NULL AND retry_count >= ?`,\n ).run(MAX_OUTBOX_RETRIES);\n\n return info.changes;\n}\n\n/**\n * Count dead-lettered outbox records (exceeded max retries, never sent).\n */\nexport function countDeadLettered(): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count >= ?`,\n ).get(MAX_OUTBOX_RETRIES) as { count: number };\n\n return row.count;\n}\n\n/**\n * Prune old outbox records.\n *\n * Removes sent records older than 24 hours.\n *\n * @returns the number of records deleted.\n */\nexport function pruneOld(): number {\n const db = getDatabase();\n const cutoff = Math.floor(Date.now() / MS_PER_SECOND) - SENT_PRUNE_AGE_SECONDS;\n\n const info = db.prepare(\n `DELETE FROM team_outbox\n WHERE sent_at IS NOT NULL AND sent_at < ?`,\n ).run(cutoff);\n\n return info.changes;\n}\n\n/**\n * Count pending (unsent) outbox records.\n */\nexport function countPending(): number {\n const db = getDatabase();\n\n const row = db.prepare(\n `SELECT COUNT(*) as count FROM team_outbox WHERE sent_at IS NULL AND retry_count < ?`,\n ).get(MAX_OUTBOX_RETRIES) as { count: number };\n\n return row.count;\n}\n\n// ---------------------------------------------------------------------------\n// Source-row sync bookkeeping\n// ---------------------------------------------------------------------------\n\n/** Tables eligible for backfill/sync (must have id, machine_id, synced_at columns). */\nconst BACKFILL_TABLES = [\n 'sessions',\n 'prompt_batches',\n 'spores',\n 'entities',\n 'graph_edges',\n 'resolution_events',\n 'plans',\n 'artifacts',\n 'digest_extracts',\n 'skill_candidates',\n 'skill_records',\n] as const;\n// entity_mentions excluded — no `id` column (composite key entity_id+note_id+note_type)\n// skill_usage excluded — no `synced_at` column (syncs via syncRow on insert)\n\nconst BACKFILL_TABLE_SET = new Set<string>(BACKFILL_TABLES);\n\n/**\n * Mark source rows as synced after successful outbox flush.\n *\n * Groups outbox records by table_name, then sets `synced_at` on the\n * corresponding source rows. This closes the re-enqueue loop: once\n * synced_at is non-NULL, `backfillUnsynced` skips the row even after\n * the outbox entry is pruned.\n */\nexport function markSourceRowsSynced(records: OutboxRow[], syncedAt: number): void {\n const db = getDatabase();\n\n // Group row_ids by table\n const byTable = new Map<string, string[]>();\n for (const rec of records) {\n if (!BACKFILL_TABLE_SET.has(rec.table_name)) continue;\n const ids = byTable.get(rec.table_name) ?? [];\n ids.push(rec.row_id);\n byTable.set(rec.table_name, ids);\n }\n\n for (const [table, ids] of byTable) {\n const placeholders = ids.map(() => '?').join(', ');\n db.prepare(\n `UPDATE ${table} SET synced_at = ? WHERE id IN (${placeholders}) AND synced_at IS NULL`,\n ).run(syncedAt, ...ids);\n }\n}\n\n// ---------------------------------------------------------------------------\n// Backfill\n// ---------------------------------------------------------------------------\n\n/**\n * Enqueue all unsynced records across all synced tables into the outbox.\n *\n * Scans each table for rows where `synced_at IS NULL`, serializes the full\n * row as JSON, and inserts into the outbox. Idempotent — re-running only\n * picks up rows not yet in the outbox (checked via existing outbox entries).\n *\n * @returns the total number of records enqueued.\n */\nexport function backfillUnsynced(machineId: string): number {\n const db = getDatabase();\n let total = 0;\n\n const now = Math.floor(Date.now() / MS_PER_SECOND);\n\n // Process one table at a time in separate transactions to avoid long locks\n for (const table of BACKFILL_TABLES) {\n const rows = db.prepare(\n `SELECT * FROM ${table}\n WHERE synced_at IS NULL\n AND NOT EXISTS (\n SELECT 1 FROM team_outbox\n WHERE team_outbox.table_name = ? AND team_outbox.row_id = CAST(${table}.id AS TEXT)\n )`,\n ).all(table) as Record<string, unknown>[];\n\n if (rows.length === 0) continue;\n\n const insertBatch = db.transaction((batchRows: Record<string, unknown>[]) => {\n const stmt = db.prepare(\n `INSERT INTO team_outbox (table_name, row_id, operation, payload, machine_id, created_at)\n VALUES (?, ?, 'upsert', ?, ?, ?)`,\n );\n for (const row of batchRows) {\n stmt.run(table, String(row.id), JSON.stringify(row), machineId, now);\n }\n });\n\n insertBatch(rows);\n total += rows.length;\n }\n\n return total;\n}\n\n"],"mappings":";;;;;;;;;AAcA,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AASb,SAAS,gBAAgB,SAAkB,WAAyB;AACzE,oBAAkB;AAClB,kBAAgB;AAClB;AAOO,SAAS,oBAA6B;AAC3C,SAAO;AACT;AAKO,SAAS,mBAA2B;AACzC,SAAO;AACT;;;ACzBA,IAAM,mBAAmB;AAGzB,IAAM,yBAAyB;AAGxB,IAAM,qBAAqB;AAGlC,IAAM,gBAAgB;AAkCtB,IAAM,iBAAiB;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAEA,IAAM,iBAAiB,eAAe,KAAK,IAAI;AAO/C,SAAS,YAAY,KAAyC;AAC5D,SAAO;AAAA,IACL,IAAI,IAAI;AAAA,IACR,YAAY,IAAI;AAAA,IAChB,QAAQ,IAAI;AAAA,IACZ,WAAW,IAAI;AAAA,IACf,SAAS,IAAI;AAAA,IACb,YAAY,IAAI;AAAA,IAChB,YAAY,IAAI;AAAA,IAChB,SAAU,IAAI,WAAsB;AAAA,IACpC,aAAc,IAAI,eAA0B;AAAA,IAC5C,iBAAkB,IAAI,mBAA8B;AAAA,EACtD;AACF;AAYO,SAAS,QAAQ,WAAmB,KAAyD;AAClG,MAAI,CAAC,kBAAkB,EAAG;AAC1B,gBAAc;AAAA,IACZ,YAAY;AAAA,IACZ,QAAQ,OAAO,IAAI,EAAE;AAAA,IACrB,SAAS,KAAK,UAAU,GAAG;AAAA,IAC3B,YAAY,iBAAiB;AAAA,IAC7B,YAAY,IAAI,cAAc,KAAK,MAAM,KAAK,IAAI,IAAI,GAAI;AAAA,EAC5D,CAAC;AACH;AAWO,SAAS,cAAc,MAA+B;AAC3D,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,aAAa;AAAA,IAClB,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,EACP;AAEA,QAAM,KAAK,OAAO,KAAK,eAAe;AAEtC,SAAO;AAAA,IACL,GAAG,QAAQ,UAAU,cAAc,gCAAgC,EAAE,IAAI,EAAE;AAAA,EAC7E;AACF;AASO,SAAS,YAAY,OAA6B;AACvD,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd,UAAU,cAAc;AAAA;AAAA;AAAA;AAAA;AAAA,EAK1B,EAAE,IAAI,oBAAoB,SAAS,gBAAgB;AAEnD,SAAO,KAAK,IAAI,WAAW;AAC7B;AAKO,SAAS,SAAS,KAAe,QAAsB;AAC5D,MAAI,IAAI,WAAW,EAAG;AAEtB,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAEjD,KAAG;AAAA,IACD;AAAA;AAAA,oBAEgB,YAAY;AAAA,EAC9B,EAAE,IAAI,QAAQ,GAAG,GAAG;AACtB;AAyBO,SAAS,oBAAoB,KAAe,WAA6B;AAC9E,MAAI,IAAI,WAAW,EAAG,QAAO,CAAC;AAE9B,QAAM,KAAK,YAAY;AACvB,QAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AAEjD,KAAG;AAAA,IACD;AAAA;AAAA,oBAEgB,YAAY;AAAA,EAC9B,EAAE,IAAI,WAAW,GAAG,GAAG;AAGvB,QAAM,eAAe,GAAG;AAAA,IACtB;AAAA,oBACgB,YAAY;AAAA,EAC9B,EAAE,IAAI,GAAG,KAAK,kBAAkB;AAEhC,SAAO,aAAa,IAAI,CAAC,MAAM,EAAE,EAAE;AACrC;AAOO,SAAS,oBAA4B;AAC1C,QAAM,KAAK,YAAY;AAEvB,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA;AAAA,EAGF,EAAE,IAAI,kBAAkB;AAExB,SAAO,KAAK;AACd;AAKO,SAAS,oBAA4B;AAC1C,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,kBAAkB;AAExB,SAAO,IAAI;AACb;AASO,SAAS,WAAmB;AACjC,QAAM,KAAK,YAAY;AACvB,QAAM,SAAS,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa,IAAI;AAExD,QAAM,OAAO,GAAG;AAAA,IACd;AAAA;AAAA,EAEF,EAAE,IAAI,MAAM;AAEZ,SAAO,KAAK;AACd;AAKO,SAAS,eAAuB;AACrC,QAAM,KAAK,YAAY;AAEvB,QAAM,MAAM,GAAG;AAAA,IACb;AAAA,EACF,EAAE,IAAI,kBAAkB;AAExB,SAAO,IAAI;AACb;AAOA,IAAM,kBAAkB;AAAA,EACtB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF;AAIA,IAAM,qBAAqB,IAAI,IAAY,eAAe;AAUnD,SAAS,qBAAqB,SAAsB,UAAwB;AACjF,QAAM,KAAK,YAAY;AAGvB,QAAM,UAAU,oBAAI,IAAsB;AAC1C,aAAW,OAAO,SAAS;AACzB,QAAI,CAAC,mBAAmB,IAAI,IAAI,UAAU,EAAG;AAC7C,UAAM,MAAM,QAAQ,IAAI,IAAI,UAAU,KAAK,CAAC;AAC5C,QAAI,KAAK,IAAI,MAAM;AACnB,YAAQ,IAAI,IAAI,YAAY,GAAG;AAAA,EACjC;AAEA,aAAW,CAAC,OAAO,GAAG,KAAK,SAAS;AAClC,UAAM,eAAe,IAAI,IAAI,MAAM,GAAG,EAAE,KAAK,IAAI;AACjD,OAAG;AAAA,MACD,UAAU,KAAK,mCAAmC,YAAY;AAAA,IAChE,EAAE,IAAI,UAAU,GAAG,GAAG;AAAA,EACxB;AACF;AAeO,SAAS,iBAAiB,WAA2B;AAC1D,QAAM,KAAK,YAAY;AACvB,MAAI,QAAQ;AAEZ,QAAM,MAAM,KAAK,MAAM,KAAK,IAAI,IAAI,aAAa;AAGjD,aAAW,SAAS,iBAAiB;AACnC,UAAM,OAAO,GAAG;AAAA,MACd,iBAAiB,KAAK;AAAA;AAAA;AAAA;AAAA,0EAI8C,KAAK;AAAA;AAAA,IAE3E,EAAE,IAAI,KAAK;AAEX,QAAI,KAAK,WAAW,EAAG;AAEvB,UAAM,cAAc,GAAG,YAAY,CAAC,cAAyC;AAC3E,YAAM,OAAO,GAAG;AAAA,QACd;AAAA;AAAA,MAEF;AACA,iBAAW,OAAO,WAAW;AAC3B,aAAK,IAAI,OAAO,OAAO,IAAI,EAAE,GAAG,KAAK,UAAU,GAAG,GAAG,WAAW,GAAG;AAAA,MACrE;AAAA,IACF,CAAC;AAED,gBAAY,IAAI;AAChB,aAAS,KAAK;AAAA,EAChB;AAEA,SAAO;AACT;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/daemon/logger.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport { kindToComponent } from '@myco/constants/log-kinds.js';\n\nexport interface LogEntry {\n timestamp: string;\n level: string;\n kind: string;\n component: string;\n message: string;\n [key: string]: unknown;\n}\n\nexport type LogLevel = 'debug' | 'info' | 'warn' | 'error';\n\nexport type LogPersistFn = (entry: LogEntry) => void;\n\n/**\n * Structural logger interface consumed by manager classes.\n * `DaemonLogger` is the concrete implementation, but managers accept any\n * object matching this shape so tests can substitute mocks.\n */\nexport interface Logger {\n debug(cat: string, msg: string, data?: Record<string, unknown>): void;\n info(cat: string, msg: string, data?: Record<string, unknown>): void;\n warn(cat: string, msg: string, data?: Record<string, unknown>): void;\n error(cat: string, msg: string, data?: Record<string, unknown>): void;\n}\n\nexport const LEVEL_ORDER: Record<LogLevel, number> = {\n debug: 0, info: 1, warn: 2, error: 3,\n};\n\ninterface LoggerOptions {\n level?: LogLevel;\n maxSize?: number;\n maxFiles?: number;\n}\n\nexport class DaemonLogger {\n private logPath: string;\n private fd: number | null = null;\n private currentSize = 0;\n private level: LogLevel;\n private maxSize: number;\n private maxFiles: number;\n private logDir: string;\n private persistFn: LogPersistFn | null = null;\n\n constructor(logDir: string, options: LoggerOptions = {}) {\n this.logDir = logDir;\n this.logPath = path.join(logDir, 'daemon.log');\n this.level = options.level ?? 'info';\n this.maxSize = options.maxSize ?? 5_242_880;\n this.maxFiles = options.maxFiles ?? 3;\n\n fs.mkdirSync(logDir, { recursive: true });\n this.fd = fs.openSync(this.logPath, 'a');\n try {\n this.currentSize = fs.fstatSync(this.fd).size;\n } catch {\n this.currentSize = 0;\n }\n }\n\n setPersistFn(fn: LogPersistFn): void {\n this.persistFn = fn;\n }\n\n debug(kind: string, message: string, data?: Record<string, unknown>): void {\n this.write('debug', kind, message, data);\n }\n\n info(kind: string, message: string, data?: Record<string, unknown>): void {\n this.write('info', kind, message, data);\n }\n\n warn(kind: string, message: string, data?: Record<string, unknown>): void {\n this.write('warn', kind, message, data);\n }\n\n error(kind: string, message: string, data?: Record<string, unknown>): void {\n this.write('error', kind, message, data);\n }\n\n /** Dispatch a log entry by dynamic level string. */\n log(level: string, kind: string, message: string, data?: Record<string, unknown>): void {\n if (level in LEVEL_ORDER) {\n this.write(level as LogLevel, kind, message, data);\n }\n }\n\n close(): void {\n if (this.fd !== null) {\n fs.closeSync(this.fd);\n this.fd = null;\n }\n }\n\n private write(level: LogLevel, kind: string, message: string, data?: Record<string, unknown>): void {\n if (LEVEL_ORDER[level] < LEVEL_ORDER[this.level]) return;\n\n const entry: LogEntry = {\n timestamp: new Date().toISOString(),\n level,\n kind,\n component: kindToComponent(kind),\n message,\n ...data,\n };\n\n if (this.persistFn !== null) {\n try {\n this.persistFn(entry);\n } catch {\n // File write is the safety net — persist failures are non-fatal\n }\n }\n\n const line = JSON.stringify(entry) + '\\n';\n const bytes = Buffer.byteLength(line);\n\n if (this.currentSize + bytes > this.maxSize) {\n this.rotate();\n }\n\n if (this.fd !== null) {\n fs.writeSync(this.fd, line);\n this.currentSize += bytes;\n }\n }\n\n private rotate(): void {\n this.close();\n\n for (let i = this.maxFiles - 1; i >= 1; i--) {\n const from = path.join(this.logDir, `daemon.${i}.log`);\n const to = path.join(this.logDir, `daemon.${i + 1}.log`);\n if (fs.existsSync(from)) {\n if (i + 1 > this.maxFiles) {\n fs.unlinkSync(from);\n } else {\n fs.renameSync(from, to);\n }\n }\n }\n\n if (fs.existsSync(this.logPath)) {\n fs.renameSync(this.logPath, path.join(this.logDir, 'daemon.1.log'));\n }\n\n this.fd = fs.openSync(this.logPath, 'a');\n this.currentSize = 0;\n }\n}\n"],"mappings":";;;;;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AA4BV,IAAM,cAAwC;AAAA,EACnD,OAAO;AAAA,EAAG,MAAM;AAAA,EAAG,MAAM;AAAA,EAAG,OAAO;AACrC;AAQO,IAAM,eAAN,MAAmB;AAAA,EAChB;AAAA,EACA,KAAoB;AAAA,EACpB,cAAc;AAAA,EACd;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA,YAAiC;AAAA,EAEzC,YAAY,QAAgB,UAAyB,CAAC,GAAG;AACvD,SAAK,SAAS;AACd,SAAK,UAAU,KAAK,KAAK,QAAQ,YAAY;AAC7C,SAAK,QAAQ,QAAQ,SAAS;AAC9B,SAAK,UAAU,QAAQ,WAAW;AAClC,SAAK,WAAW,QAAQ,YAAY;AAEpC,OAAG,UAAU,QAAQ,EAAE,WAAW,KAAK,CAAC;AACxC,SAAK,KAAK,GAAG,SAAS,KAAK,SAAS,GAAG;AACvC,QAAI;AACF,WAAK,cAAc,GAAG,UAAU,KAAK,EAAE,EAAE;AAAA,IAC3C,QAAQ;AACN,WAAK,cAAc;AAAA,IACrB;AAAA,EACF;AAAA,EAEA,aAAa,IAAwB;AACnC,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,MAAc,SAAiB,MAAsC;AACzE,SAAK,MAAM,SAAS,MAAM,SAAS,IAAI;AAAA,EACzC;AAAA,EAEA,KAAK,MAAc,SAAiB,MAAsC;AACxE,SAAK,MAAM,QAAQ,MAAM,SAAS,IAAI;AAAA,EACxC;AAAA,EAEA,KAAK,MAAc,SAAiB,MAAsC;AACxE,SAAK,MAAM,QAAQ,MAAM,SAAS,IAAI;AAAA,EACxC;AAAA,EAEA,MAAM,MAAc,SAAiB,MAAsC;AACzE,SAAK,MAAM,SAAS,MAAM,SAAS,IAAI;AAAA,EACzC;AAAA;AAAA,EAGA,IAAI,OAAe,MAAc,SAAiB,MAAsC;AACtF,QAAI,SAAS,aAAa;AACxB,WAAK,MAAM,OAAmB,MAAM,SAAS,IAAI;AAAA,IACnD;AAAA,EACF;AAAA,EAEA,QAAc;AACZ,QAAI,KAAK,OAAO,MAAM;AACpB,SAAG,UAAU,KAAK,EAAE;AACpB,WAAK,KAAK;AAAA,IACZ;AAAA,EACF;AAAA,EAEQ,MAAM,OAAiB,MAAc,SAAiB,MAAsC;AAClG,QAAI,YAAY,KAAK,IAAI,YAAY,KAAK,KAAK,EAAG;AAElD,UAAM,QAAkB;AAAA,MACtB,YAAW,oBAAI,KAAK,GAAE,YAAY;AAAA,MAClC;AAAA,MACA;AAAA,MACA,WAAW,gBAAgB,IAAI;AAAA,MAC/B;AAAA,MACA,GAAG;AAAA,IACL;AAEA,QAAI,KAAK,cAAc,MAAM;AAC3B,UAAI;AACF,aAAK,UAAU,KAAK;AAAA,MACtB,QAAQ;AAAA,MAER;AAAA,IACF;AAEA,UAAM,OAAO,KAAK,UAAU,KAAK,IAAI;AACrC,UAAM,QAAQ,OAAO,WAAW,IAAI;AAEpC,QAAI,KAAK,cAAc,QAAQ,KAAK,SAAS;AAC3C,WAAK,OAAO;AAAA,IACd;AAEA,QAAI,KAAK,OAAO,MAAM;AACpB,SAAG,UAAU,KAAK,IAAI,IAAI;AAC1B,WAAK,eAAe;AAAA,IACtB;AAAA,EACF;AAAA,EAEQ,SAAe;AACrB,SAAK,MAAM;AAEX,aAAS,IAAI,KAAK,WAAW,GAAG,KAAK,GAAG,KAAK;AAC3C,YAAM,OAAO,KAAK,KAAK,KAAK,QAAQ,UAAU,CAAC,MAAM;AACrD,YAAM,KAAK,KAAK,KAAK,KAAK,QAAQ,UAAU,IAAI,CAAC,MAAM;AACvD,UAAI,GAAG,WAAW,IAAI,GAAG;AACvB,YAAI,IAAI,IAAI,KAAK,UAAU;AACzB,aAAG,WAAW,IAAI;AAAA,QACpB,OAAO;AACL,aAAG,WAAW,MAAM,EAAE;AAAA,QACxB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,GAAG,WAAW,KAAK,OAAO,GAAG;AAC/B,SAAG,WAAW,KAAK,SAAS,KAAK,KAAK,KAAK,QAAQ,cAAc,CAAC;AAAA,IACpE;AAEA,SAAK,KAAK,GAAG,SAAS,KAAK,SAAS,GAAG;AACvC,SAAK,cAAc;AAAA,EACrB;AACF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/config/updates.ts"],"sourcesContent":["import type { MycoConfig, EmbeddingProviderConfig, ContextConfig, TaskProviderOverride, PhaseOverride } from './schema.js';\n\n/**\n * Set a value at a dot-separated path, returning a new config object.\n * Creates intermediate objects along the path as needed.\n */\nexport function withValue(config: MycoConfig, dotPath: string, value: unknown): MycoConfig {\n const clone = structuredClone(config) as MycoConfig & Record<string, unknown>;\n const segments = dotPath.split('.');\n let current: Record<string, unknown> = clone;\n\n for (let i = 0; i < segments.length - 1; i++) {\n const segment = segments[i];\n if (current[segment] === undefined || current[segment] === null || typeof current[segment] !== 'object') {\n current[segment] = {};\n }\n current = current[segment] as Record<string, unknown>;\n }\n\n current[segments[segments.length - 1]] = value;\n\n return clone as MycoConfig;\n}\n\n/** Provider override shape used in task config updates. Null means delete. */\ninterface ProviderInput {\n type: 'anthropic' | 'ollama' | 'lmstudio';\n model?: string;\n base_url?: string;\n context_length?: number;\n}\n\n/** Phase override input. Null fields mean delete. */\ninterface PhaseInput {\n provider?: ProviderInput | null;\n model?: string | null;\n maxTurns?: number | null;\n}\n\n/** Input shape for task config updates. Null values mean \"delete this field\". */\nexport interface TaskConfigUpdate {\n provider?: ProviderInput | null;\n model?: string | null;\n maxTurns?: number | null;\n timeoutSeconds?: number | null;\n phases?: Record<string, PhaseInput | null> | null;\n schedule?: { enabled?: boolean; intervalSeconds?: number; runIn?: ('active' | 'idle' | 'sleep')[] } | null;\n}\n\n/**\n * Apply partial task config updates, returning a new config object.\n * Null values delete fields. Empty task entries and phase maps are cleaned up.\n */\nexport function withTaskConfig(\n config: MycoConfig,\n taskId: string,\n update: TaskConfigUpdate,\n): MycoConfig {\n const tasks = { ...(config.agent.tasks ?? {}) };\n const entry: TaskProviderOverride = { ...(tasks[taskId] ?? {}) };\n\n // Apply top-level fields\n if ('provider' in update) {\n if (update.provider === null) {\n delete entry.provider;\n } else if (update.provider !== undefined) {\n entry.provider = { ...update.provider };\n }\n }\n\n if ('model' in update) {\n if (update.model === null) delete entry.model;\n else if (update.model !== undefined) entry.model = update.model;\n }\n\n if ('maxTurns' in update) {\n if (update.maxTurns === null) delete entry.maxTurns;\n else if (update.maxTurns !== undefined) entry.maxTurns = update.maxTurns;\n }\n\n if ('timeoutSeconds' in update) {\n if (update.timeoutSeconds === null) delete entry.timeoutSeconds;\n else if (update.timeoutSeconds !== undefined) entry.timeoutSeconds = update.timeoutSeconds;\n }\n\n // Handle schedule\n if ('schedule' in update) {\n if (update.schedule === null) {\n delete entry.schedule;\n } else if (update.schedule !== undefined) {\n entry.schedule = { ...entry.schedule, ...update.schedule };\n }\n }\n\n // Apply phase overrides\n if ('phases' in update) {\n if (update.phases === null) {\n delete entry.phases;\n } else if (update.phases !== undefined) {\n const phases: Record<string, PhaseOverride> = { ...(entry.phases ?? {}) };\n\n for (const [phaseName, phaseValue] of Object.entries(update.phases)) {\n if (phaseValue === null) {\n delete phases[phaseName];\n } else {\n const pe: PhaseOverride = { ...(phases[phaseName] ?? {}) };\n if ('provider' in phaseValue) {\n if (phaseValue.provider === null) delete pe.provider;\n else if (phaseValue.provider !== undefined) pe.provider = { ...phaseValue.provider };\n }\n if ('model' in phaseValue) {\n if (phaseValue.model === null) delete pe.model;\n else if (phaseValue.model !== undefined) pe.model = phaseValue.model;\n }\n if ('maxTurns' in phaseValue) {\n if (phaseValue.maxTurns === null) delete pe.maxTurns;\n else if (phaseValue.maxTurns !== undefined) pe.maxTurns = phaseValue.maxTurns;\n }\n phases[phaseName] = pe;\n }\n }\n\n // Clean up empty phases map\n if (Object.keys(phases).length === 0) {\n delete entry.phases;\n } else {\n entry.phases = phases;\n }\n }\n }\n\n // Clean up empty task entry\n if (Object.keys(entry).length === 0) {\n delete tasks[taskId];\n } else {\n tasks[taskId] = entry;\n }\n\n return {\n ...config,\n agent: {\n ...config.agent,\n tasks: Object.keys(tasks).length > 0 ? tasks : undefined,\n },\n };\n}\n\n/**\n * Merge partial embedding updates into config, returning a new config object.\n */\nexport function withEmbedding(\n config: MycoConfig,\n updates: Partial<EmbeddingProviderConfig>,\n): MycoConfig {\n return {\n ...config,\n embedding: { ...config.embedding, ...updates },\n };\n}\n\n/**\n * Merge partial context injection updates into config, returning a new config object.\n */\nexport function withContext(\n config: MycoConfig,\n updates: Partial<ContextConfig>,\n): MycoConfig {\n return {\n ...config,\n context: { ...config.context, ...updates },\n };\n}\n"],"mappings":";;;AAMO,SAAS,UAAU,QAAoB,SAAiB,OAA4B;AACzF,QAAM,QAAQ,gBAAgB,MAAM;AACpC,QAAM,WAAW,QAAQ,MAAM,GAAG;AAClC,MAAI,UAAmC;AAEvC,WAAS,IAAI,GAAG,IAAI,SAAS,SAAS,GAAG,KAAK;AAC5C,UAAM,UAAU,SAAS,CAAC;AAC1B,QAAI,QAAQ,OAAO,MAAM,UAAa,QAAQ,OAAO,MAAM,QAAQ,OAAO,QAAQ,OAAO,MAAM,UAAU;AACvG,cAAQ,OAAO,IAAI,CAAC;AAAA,IACtB;AACA,cAAU,QAAQ,OAAO;AAAA,EAC3B;AAEA,UAAQ,SAAS,SAAS,SAAS,CAAC,CAAC,IAAI;AAEzC,SAAO;AACT;AA+BO,SAAS,eACd,QACA,QACA,QACY;AACZ,QAAM,QAAQ,EAAE,GAAI,OAAO,MAAM,SAAS,CAAC,EAAG;AAC9C,QAAM,QAA8B,EAAE,GAAI,MAAM,MAAM,KAAK,CAAC,EAAG;AAG/D,MAAI,cAAc,QAAQ;AACxB,QAAI,OAAO,aAAa,MAAM;AAC5B,aAAO,MAAM;AAAA,IACf,WAAW,OAAO,aAAa,QAAW;AACxC,YAAM,WAAW,EAAE,GAAG,OAAO,SAAS;AAAA,IACxC;AAAA,EACF;AAEA,MAAI,WAAW,QAAQ;AACrB,QAAI,OAAO,UAAU,KAAM,QAAO,MAAM;AAAA,aAC/B,OAAO,UAAU,OAAW,OAAM,QAAQ,OAAO;AAAA,EAC5D;AAEA,MAAI,cAAc,QAAQ;AACxB,QAAI,OAAO,aAAa,KAAM,QAAO,MAAM;AAAA,aAClC,OAAO,aAAa,OAAW,OAAM,WAAW,OAAO;AAAA,EAClE;AAEA,MAAI,oBAAoB,QAAQ;AAC9B,QAAI,OAAO,mBAAmB,KAAM,QAAO,MAAM;AAAA,aACxC,OAAO,mBAAmB,OAAW,OAAM,iBAAiB,OAAO;AAAA,EAC9E;AAGA,MAAI,cAAc,QAAQ;AACxB,QAAI,OAAO,aAAa,MAAM;AAC5B,aAAO,MAAM;AAAA,IACf,WAAW,OAAO,aAAa,QAAW;AACxC,YAAM,WAAW,EAAE,GAAG,MAAM,UAAU,GAAG,OAAO,SAAS;AAAA,IAC3D;AAAA,EACF;AAGA,MAAI,YAAY,QAAQ;AACtB,QAAI,OAAO,WAAW,MAAM;AAC1B,aAAO,MAAM;AAAA,IACf,WAAW,OAAO,WAAW,QAAW;AACtC,YAAM,SAAwC,EAAE,GAAI,MAAM,UAAU,CAAC,EAAG;AAExE,iBAAW,CAAC,WAAW,UAAU,KAAK,OAAO,QAAQ,OAAO,MAAM,GAAG;AACnE,YAAI,eAAe,MAAM;AACvB,iBAAO,OAAO,SAAS;AAAA,QACzB,OAAO;AACL,gBAAM,KAAoB,EAAE,GAAI,OAAO,SAAS,KAAK,CAAC,EAAG;AACzD,cAAI,cAAc,YAAY;AAC5B,gBAAI,WAAW,aAAa,KAAM,QAAO,GAAG;AAAA,qBACnC,WAAW,aAAa,OAAW,IAAG,WAAW,EAAE,GAAG,WAAW,SAAS;AAAA,UACrF;AACA,cAAI,WAAW,YAAY;AACzB,gBAAI,WAAW,UAAU,KAAM,QAAO,GAAG;AAAA,qBAChC,WAAW,UAAU,OAAW,IAAG,QAAQ,WAAW;AAAA,UACjE;AACA,cAAI,cAAc,YAAY;AAC5B,gBAAI,WAAW,aAAa,KAAM,QAAO,GAAG;AAAA,qBACnC,WAAW,aAAa,OAAW,IAAG,WAAW,WAAW;AAAA,UACvE;AACA,iBAAO,SAAS,IAAI;AAAA,QACtB;AAAA,MACF;AAGA,UAAI,OAAO,KAAK,MAAM,EAAE,WAAW,GAAG;AACpC,eAAO,MAAM;AAAA,MACf,OAAO;AACL,cAAM,SAAS;AAAA,MACjB;AAAA,IACF;AAAA,EACF;AAGA,MAAI,OAAO,KAAK,KAAK,EAAE,WAAW,GAAG;AACnC,WAAO,MAAM,MAAM;AAAA,EACrB,OAAO;AACL,UAAM,MAAM,IAAI;AAAA,EAClB;AAEA,SAAO;AAAA,IACL,GAAG;AAAA,IACH,OAAO;AAAA,MACL,GAAG,OAAO;AAAA,MACV,OAAO,OAAO,KAAK,KAAK,EAAE,SAAS,IAAI,QAAQ;AAAA,IACjD;AAAA,EACF;AACF;AAKO,SAAS,cACd,QACA,SACY;AACZ,SAAO;AAAA,IACL,GAAG;AAAA,IACH,WAAW,EAAE,GAAG,OAAO,WAAW,GAAG,QAAQ;AAAA,EAC/C;AACF;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/cli/shared.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport os from 'node:os';\nimport { OllamaBackend } from '../intelligence/ollama.js';\nimport { LmStudioBackend } from '../intelligence/lm-studio.js';\n\nimport { DaemonClient } from '../hooks/client.js';\nimport { initDatabase, closeDatabase, vaultDbPath } from '../db/client.js';\nimport { SymbiontInstaller } from '../symbionts/installer.js';\nimport type { SymbiontManifest } from '../symbionts/manifest-schema.js';\n\nexport { parseStringFlag, parseIntFlag } from '../logs/format.js';\n\n/**\n * Initialize the singleton database for direct CLI reads.\n * Used by CLI commands that only need reads (stats, search, session).\n * Does NOT require the daemon to be running — WAL mode allows concurrent reads.\n *\n * @returns a cleanup function that closes the database.\n */\nexport function initVaultDb(vaultDir: string): () => void {\n initDatabase(vaultDbPath(vaultDir));\n return closeDatabase;\n}\n\n/** Connect to the daemon, ensuring it's running. Exits on failure. */\nexport async function connectToDaemon(vaultDir: string): Promise<DaemonClient> {\n const client = new DaemonClient(vaultDir);\n const healthy = await client.ensureRunning();\n if (!healthy) {\n console.error('Failed to connect to daemon');\n process.exit(1);\n }\n return client;\n}\n\n/** Load .env from cwd (not script location — that's the plugin install dir). */\nexport function loadEnv(): void {\n const envPath = path.resolve(process.cwd(), '.env');\n if (!fs.existsSync(envPath)) return;\n for (const line of fs.readFileSync(envPath, 'utf-8').split('\\n')) {\n const match = line.match(/^\\s*([^#=]+?)\\s*=\\s*(.*?)\\s*$/);\n if (match && !process.env[match[1]]) {\n process.env[match[1]] = match[2];\n }\n }\n}\n\nexport function isProcessAlive(pid: number): boolean {\n try { process.kill(pid, 0); return true; } catch { return false; }\n}\n\n// --- Provider defaults (sourced from backend classes) ---\nexport const PROVIDER_DEFAULTS: Record<string, { base_url: string }> = {\n ollama: { base_url: OllamaBackend.DEFAULT_BASE_URL },\n 'lm-studio': { base_url: LmStudioBackend.DEFAULT_BASE_URL },\n};\n\n\nexport const VAULT_GITIGNORE = `# SQLite database\nmyco.db*\nvectors.db*\n\n# Daemon state — per-machine, ephemeral\ndaemon.json\nbuffer/\nlogs/\n\n# Secrets — API keys for cloud providers\nsecrets.env\n\n# Machine ID\nmachine_id\n\n# Update tracking — per-machine state\nlast-update-version\nrestart-reason.json\n\n# Binary attachments — screenshots captured from transcripts\nattachments/\n\n# Team sync admin state + worker deployment\nteam/\n\n# Staged files generated by the myco agent\nstaging/\n\n# Runtime command alias — per-contributor override for which myco binary\n# the hook guard invokes. Default (file absent) is \\`myco\\`; \\`make dev-link\\`\n# writes \\`myco-dev\\`; users can hand-edit for PATH conflicts or pinning.\n# Never committed — different contributors use different aliases.\nruntime.command\n`;\n\n/** Collapse an absolute home-dir path to its `~/` form for portable config storage. */\nexport function collapseHomePath(absPath: string): string {\n const home = os.homedir();\n if (absPath.startsWith(home + path.sep) || absPath === home) {\n return '~' + absPath.slice(home.length);\n }\n return absPath;\n}\n\n/**\n * Run the SymbiontInstaller for each symbiont manifest and log results.\n * Shared between myco init and myco update.\n */\nexport function registerSymbionts(\n manifests: SymbiontManifest[],\n projectRoot: string,\n packageRoot: string,\n verb: 'Registered' | 'Updated',\n): number {\n let count = 0;\n for (const manifest of manifests) {\n try {\n const installer = new SymbiontInstaller(manifest, projectRoot, packageRoot);\n const result = installer.install();\n\n const installed = [\n result.hooks && 'hooks',\n result.mcp && 'MCP server',\n result.skills && 'skills',\n result.settings && 'settings',\n result.instructions && 'instructions',\n result.pluginPackage && 'plugin deps',\n ].filter(Boolean);\n\n if (installed.length > 0) {\n console.log(` \\u2713 ${verb} ${manifest.displayName}: ${installed.join(', ')}`);\n count++;\n } else {\n console.log(` \\u2013 ${manifest.displayName}: no registration targets configured`);\n }\n } catch (err) {\n console.error(` \\u2717 Failed to register ${manifest.displayName}: ${(err as Error).message}`);\n }\n }\n return count;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA,OAAO,QAAQ;AACf,OAAO,UAAU;AACjB,OAAO,QAAQ;AAkBR,SAAS,YAAY,UAA8B;AACxD,eAAa,YAAY,QAAQ,CAAC;AAClC,SAAO;AACT;AAGA,eAAsB,gBAAgB,UAAyC;AAC7E,QAAM,SAAS,IAAI,aAAa,QAAQ;AACxC,QAAM,UAAU,MAAM,OAAO,cAAc;AAC3C,MAAI,CAAC,SAAS;AACZ,YAAQ,MAAM,6BAA6B;AAC3C,YAAQ,KAAK,CAAC;AAAA,EAChB;AACA,SAAO;AACT;AAGO,SAAS,UAAgB;AAC9B,QAAM,UAAU,KAAK,QAAQ,QAAQ,IAAI,GAAG,MAAM;AAClD,MAAI,CAAC,GAAG,WAAW,OAAO,EAAG;AAC7B,aAAW,QAAQ,GAAG,aAAa,SAAS,OAAO,EAAE,MAAM,IAAI,GAAG;AAChE,UAAM,QAAQ,KAAK,MAAM,+BAA+B;AACxD,QAAI,SAAS,CAAC,QAAQ,IAAI,MAAM,CAAC,CAAC,GAAG;AACnC,cAAQ,IAAI,MAAM,CAAC,CAAC,IAAI,MAAM,CAAC;AAAA,IACjC;AAAA,EACF;AACF;AAEO,SAAS,eAAe,KAAsB;AACnD,MAAI;AAAE,YAAQ,KAAK,KAAK,CAAC;AAAG,WAAO;AAAA,EAAM,QAAQ;AAAE,WAAO;AAAA,EAAO;AACnE;AAGO,IAAM,oBAA0D;AAAA,EACrE,QAAQ,EAAE,UAAU,cAAc,iBAAiB;AAAA,EACnD,aAAa,EAAE,UAAU,gBAAgB,iBAAiB;AAC5D;AAGO,IAAM,kBAAkB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgDxB,SAAS,kBACd,WACA,aACA,aACA,MACQ;AACR,MAAI,QAAQ;AACZ,aAAW,YAAY,WAAW;AAChC,QAAI;AACF,YAAM,YAAY,IAAI,kBAAkB,UAAU,aAAa,WAAW;AAC1E,YAAM,SAAS,UAAU,QAAQ;AAEjC,YAAM,YAAY;AAAA,QAChB,OAAO,SAAS;AAAA,QAChB,OAAO,OAAO;AAAA,QACd,OAAO,UAAU;AAAA,QACjB,OAAO,YAAY;AAAA,QACnB,OAAO,gBAAgB;AAAA,QACvB,OAAO,iBAAiB;AAAA,MAC1B,EAAE,OAAO,OAAO;AAEhB,UAAI,UAAU,SAAS,GAAG;AACxB,gBAAQ,IAAI,YAAY,IAAI,IAAI,SAAS,WAAW,KAAK,UAAU,KAAK,IAAI,CAAC,EAAE;AAC/E;AAAA,MACF,OAAO;AACL,gBAAQ,IAAI,YAAY,SAAS,WAAW,sCAAsC;AAAA,MACpF;AAAA,IACF,SAAS,KAAK;AACZ,cAAQ,MAAM,+BAA+B,SAAS,WAAW,KAAM,IAAc,OAAO,EAAE;AAAA,IAChG;AAAA,EACF;AACA,SAAO;AACT;","names":[]}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/config/loader.ts","../src/config/schema.ts","../src/config/migrations.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport YAML from 'yaml';\nimport { MycoConfigSchema, type MycoConfig, type BackupConfig, type TeamConfig } from './schema.js';\nimport { runMigrations, CURRENT_MIGRATION_VERSION } from './migrations.js';\n\nexport const CONFIG_FILENAME = 'myco.yaml';\n\nexport function loadConfig(vaultDir: string): MycoConfig {\n const configPath = path.join(vaultDir, CONFIG_FILENAME);\n\n if (!fs.existsSync(configPath)) {\n throw new Error(`myco.yaml not found in ${vaultDir}`);\n }\n\n const raw = fs.readFileSync(configPath, 'utf-8');\n const parsed = YAML.parse(raw) as Record<string, unknown>;\n\n // Detect v1 config and guide migration\n if (parsed.version === 1 || (parsed.intelligence as Record<string, unknown>)?.backend) {\n throw new Error(\n 'Myco config uses v1 format. Run /myco:setup-llm to reconfigure for v2.',\n );\n }\n\n // --- v2 → v3 migration ---\n let v2Migrated = false;\n if (parsed.version === 2) {\n // Extract intelligence.embedding to top-level embedding\n const intel = parsed.intelligence as Record<string, unknown> | undefined;\n const embeddingConfig = intel?.embedding as Record<string, unknown> | undefined;\n if (embeddingConfig && !parsed.embedding) {\n // Map v2 'lm-studio' to v3 'openai-compatible' for embedding provider\n if (embeddingConfig.provider === 'lm-studio') {\n embeddingConfig.provider = 'openai-compatible';\n }\n parsed.embedding = embeddingConfig;\n }\n\n // Keep daemon.port and daemon.log_level, drop grace_period and max_log_size\n const daemon = parsed.daemon as Record<string, unknown> | undefined;\n if (daemon) {\n const { port, log_level } = daemon;\n parsed.daemon = { port: port ?? null, log_level: log_level ?? 'info' };\n }\n\n // Keep capture basics, drop token-related fields; migrate artifact_watch → plan_dirs\n const capture = parsed.capture as Record<string, unknown> | undefined;\n if (capture) {\n const { transcript_paths, artifact_watch, plan_dirs, artifact_extensions, buffer_max_events } = capture;\n parsed.capture = {\n transcript_paths,\n plan_dirs: plan_dirs ?? artifact_watch,\n artifact_extensions,\n buffer_max_events,\n };\n }\n\n // Drop removed top-level sections\n delete parsed.intelligence;\n delete parsed.context;\n delete parsed.team;\n delete parsed.digest;\n delete parsed.pipeline;\n\n // Set version to 3\n parsed.version = 3;\n v2Migrated = true;\n\n process.stderr.write('[myco migration] Migrated config from v2 to v3\\n');\n }\n\n // Run numbered migrations (for v3+ forward migrations)\n const migrationsRan = runMigrations(parsed, vaultDir, (msg) => {\n process.stderr.write(`[myco migration] ${msg}\\n`);\n });\n\n // Parse with Zod to fill in defaults for new config sections\n const config = MycoConfigSchema.parse(parsed);\n\n // Write back if v2→v3 migration ran, numbered migrations ran, or new defaults were added\n const needsWrite = v2Migrated\n || migrationsRan\n || (parsed.config_version as number ?? 0) < CURRENT_MIGRATION_VERSION\n || parsed.version !== config.version;\n\n if (needsWrite) {\n const fullConfig = JSON.parse(JSON.stringify(config)) as Record<string, unknown>;\n fs.writeFileSync(configPath, YAML.stringify(fullConfig), 'utf-8');\n }\n\n return config;\n}\n\nexport function saveConfig(vaultDir: string, config: MycoConfig): void {\n // Validate before writing — OAK lesson: validate on write, not just read\n const validated = MycoConfigSchema.parse(config);\n\n const configPath = path.join(vaultDir, CONFIG_FILENAME);\n fs.mkdirSync(vaultDir, { recursive: true });\n fs.writeFileSync(configPath, YAML.stringify(validated), 'utf-8');\n}\n\nexport function updateConfig(\n vaultDir: string,\n fn: (config: MycoConfig) => MycoConfig,\n): MycoConfig {\n const current = loadConfig(vaultDir);\n const updated = fn(current);\n saveConfig(vaultDir, updated);\n return updated;\n}\n\nexport function updateBackupConfig(\n vaultDir: string,\n backup: Partial<BackupConfig>,\n): MycoConfig {\n return updateConfig(vaultDir, (config) => ({\n ...config,\n backup: { ...config.backup, ...backup },\n }));\n}\n\n/**\n * Extract the set of enabled symbiont names from config.\n * Returns null when the `symbionts` section is absent (pre-existing installs),\n * signalling callers to fall back to their own heuristic.\n */\nexport function getEnabledSymbiontNames(config: MycoConfig): Set<string> | null {\n if (!config.symbionts) return null;\n return new Set(\n Object.entries(config.symbionts)\n .filter(([, entry]) => entry.enabled)\n .map(([name]) => name),\n );\n}\n\nexport function updateTeamConfig(\n vaultDir: string,\n team: Partial<TeamConfig>,\n): MycoConfig {\n return updateConfig(vaultDir, (config) => ({\n ...config,\n team: { ...config.team, ...team },\n }));\n}\n","import { z } from 'zod';\nimport { SCHEDULABLE_POWER_STATES } from '@myco/constants.js';\n\nconst EmbeddingProviderSchema = z.object({\n provider: z.enum(['ollama', 'openai-compatible', 'openrouter', 'openai']).default('ollama'),\n model: z.string().default('bge-m3'),\n base_url: z.string().url().optional(),\n});\n\nconst DaemonSchema = z.object({\n port: z.number().int().min(1024).max(65535).nullable().default(null),\n log_level: z.enum(['debug', 'info', 'warn', 'error']).default('info'),\n log_retention_days: z.number().int().min(1).max(365).default(30),\n /**\n * Time without new prompts before an active session is auto-completed (ms).\n * Intelligence tasks (full-intelligence, skill-survey, etc.) only process\n * settled sessions, so this threshold directly controls how fresh their\n * inputs are. Defaults to 1 hour.\n */\n stale_session_threshold_ms: z.number().int().min(60_000).default(60 * 60 * 1000),\n});\n\nconst CaptureSchema = z.object({\n transcript_paths: z.array(z.string()).default([]),\n plan_dirs: z.array(z.string()).default([]),\n ignore_plan_dirs_in_git: z.boolean().default(false),\n artifact_extensions: z.array(z.string()).default(['.md']),\n buffer_max_events: z.number().int().positive().default(500),\n});\n\n/** Provider config shape used in both task-level and phase-level overrides. */\nconst ProviderOverrideSchema = z.object({\n type: z.enum(['anthropic', 'ollama', 'lmstudio']),\n base_url: z.string().optional(),\n model: z.string().optional(),\n /** Context window size for local models (Ollama num_ctx, LM Studio context_length). */\n context_length: z.number().int().positive().optional(),\n});\n\n/** Per-phase overrides within a task — keyed by phase name. */\nconst PhaseOverrideSchema = z.object({\n provider: ProviderOverrideSchema.optional(),\n model: z.string().optional(),\n maxTurns: z.number().int().positive().optional(),\n});\n\n/** Per-task schedule override — partial, merges with YAML defaults. */\nconst ScheduleOverrideSchema = z.object({\n enabled: z.boolean().optional(),\n intervalSeconds: z.number().int().positive().optional(),\n runIn: z.array(z.enum([...SCHEDULABLE_POWER_STATES])).optional(),\n preCondition: z.enum(['has-unprocessed-batches', 'has-active-skills', 'has-approved-candidates']).optional(),\n}).optional();\n\n/** Per-task config override — stored in myco.yaml under agent.tasks. */\nconst TaskProviderOverrideSchema = z.object({\n provider: ProviderOverrideSchema.optional(),\n model: z.string().optional(),\n maxTurns: z.number().int().positive().optional(),\n timeoutSeconds: z.number().int().positive().optional(),\n phases: z.record(z.string(), PhaseOverrideSchema).optional(),\n schedule: ScheduleOverrideSchema,\n /** Task-specific params — keys and types vary per task. */\n params: z.record(z.string(), z.union([z.string(), z.number(), z.boolean()])).optional(),\n});\n\nconst ContextSchema = z.object({\n /** Which digest tier to inject at session start. */\n digest_tier: z.number().int().default(5000),\n /** Enable semantic spore search on each user prompt. */\n prompt_search: z.boolean().default(true),\n /** Max spores to inject per prompt (0-10). */\n prompt_max_spores: z.number().int().min(0).max(10).default(3),\n});\n\nconst AgentSchema = z.object({\n /** Number of batches between event-driven summary triggers (0 to disable). */\n summary_batch_interval: z.number().int().min(0).default(5),\n /** Global toggle for PowerManager-scheduled agent tasks. */\n scheduled_tasks_enabled: z.boolean().default(true),\n /** Global toggle for event-driven agent tasks (title-summary). */\n event_tasks_enabled: z.boolean().default(true),\n /** Global default provider — applies to all tasks unless overridden per-task. */\n provider: ProviderOverrideSchema.optional(),\n /** Global default model — applies to all tasks unless overridden per-task. */\n model: z.string().optional(),\n /** Per-task overrides keyed by task name. */\n tasks: z.record(z.string(), TaskProviderOverrideSchema).optional(),\n});\n\nconst BackupSchema = z.object({\n /** Override directory for backup files. Supports ~ for home directory. When unset, defaults to .myco/backups. */\n dir: z.string().optional(),\n});\n\nconst MaintenanceSchema = z.object({\n /** Automatically run PRAGMA optimize on a schedule. */\n auto_optimize: z.boolean().default(true),\n /** How often to run auto-optimize, in hours (1–720). */\n auto_optimize_interval_hours: z.number().int().min(1).max(720).default(24),\n});\n\nconst TeamSchema = z.object({\n /** Whether team sync is enabled. */\n enabled: z.boolean().default(false),\n /** Cloudflare Worker URL for team sync. */\n worker_url: z.string().url().optional(),\n /** Team identifier for sync grouping. */\n team_id: z.string().optional(),\n /** Sync interval in minutes. */\n interval_minutes: z.number().int().min(1).max(1440).default(15),\n});\n\nconst SkillsSchema = z.object({\n /** Auto-generate candidates above this confidence score. */\n confidence_threshold: z.number().min(0).max(1).default(0.7),\n /** Flag unused skills after this many days. */\n usage_stale_days: z.number().int().positive().default(30),\n});\n\nconst NotificationsSchema = z.object({\n /** Master switch — disables all notifications when false. */\n enabled: z.boolean().default(true),\n /** Allow browser system notifications (Notification API). */\n system_notifications: z.boolean().default(false),\n /** Default display mode for new notification types. */\n default_mode: z.enum(['banner', 'summary']).default('summary'),\n /** Per-domain settings. Keys are domain names from the registry. */\n domains: z.record(z.string(), z.object({\n enabled: z.boolean().default(true),\n /** Override display mode for this domain. Omit to use global default_mode. */\n mode: z.enum(['banner', 'summary']).optional(),\n })).default({}),\n});\n\nconst SymbiontEntrySchema = z.object({\n enabled: z.boolean().default(true),\n});\n\nexport const MycoConfigSchema = z.preprocess(\n (raw: unknown) => {\n if (raw && typeof raw === 'object' && 'curation' in raw && !('agent' in raw)) {\n const { curation, ...rest } = raw as Record<string, unknown>;\n return { ...rest, agent: curation };\n }\n return raw;\n },\n z.object({\n version: z.literal(3),\n config_version: z.number().int().nonnegative().default(0),\n embedding: EmbeddingProviderSchema.default(() => EmbeddingProviderSchema.parse({})),\n daemon: DaemonSchema.default(() => DaemonSchema.parse({})),\n capture: CaptureSchema.default(() => CaptureSchema.parse({})),\n agent: AgentSchema.default(() => AgentSchema.parse({})),\n context: ContextSchema.default(() => ContextSchema.parse({})),\n backup: BackupSchema.default(() => BackupSchema.parse({})),\n maintenance: MaintenanceSchema.default(() => MaintenanceSchema.parse({})),\n team: TeamSchema.default(() => TeamSchema.parse({})),\n skills: SkillsSchema.default(() => SkillsSchema.parse({})),\n notifications: NotificationsSchema.default(() => NotificationsSchema.parse({})),\n symbionts: z.record(z.string(), SymbiontEntrySchema).optional(),\n }),\n);\n\nexport type MycoConfig = z.output<typeof MycoConfigSchema>;\nexport type EmbeddingProviderConfig = z.infer<typeof EmbeddingProviderSchema>;\nexport type TaskProviderOverride = z.infer<typeof TaskProviderOverrideSchema>;\nexport type PhaseOverride = z.infer<typeof PhaseOverrideSchema>;\nexport type ScheduleOverride = z.infer<typeof ScheduleOverrideSchema>;\nexport type ContextConfig = z.infer<typeof ContextSchema>;\nexport type BackupConfig = z.infer<typeof BackupSchema>;\nexport type TeamConfig = z.infer<typeof TeamSchema>;\nexport type SkillsConfig = z.infer<typeof SkillsSchema>;\nexport type NotificationsConfig = z.infer<typeof NotificationsSchema>;\nexport type SymbiontEntry = z.infer<typeof SymbiontEntrySchema>;\n","/**\n * Config and vault migrations — run once per version, tracked by config_version.\n *\n * Each migration has a version number, a name, and a function that receives\n * the raw parsed YAML doc and the vault directory. Migrations run in order\n * and are skipped if config_version is already past them.\n *\n * To add a new migration:\n * 1. Add an entry to MIGRATIONS with the next version number\n * 2. Write the migrate function — it receives the mutable doc and vaultDir\n * 3. The framework handles version tracking and writing the config back\n */\n\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nexport interface Migration {\n version: number;\n name: string;\n migrate: (doc: Record<string, unknown>, vaultDir: string) => void;\n}\n\n/** Regex matching both quoted and unquoted YAML: type: memory, type: \"memory\", type: 'memory' */\nconst MEMORY_TYPE_PATTERN = /type:\\s*[\"']?memory[\"']?/g;\n\nexport const MIGRATIONS: Migration[] = [\n {\n version: 1,\n name: 'rename-memories-to-spores',\n migrate: (doc, vaultDir) => {\n // Config: rename context.layers.memories → context.layers.spores\n const context = doc.context as Record<string, unknown> | undefined;\n const layers = context?.layers as Record<string, unknown> | undefined;\n if (layers && 'memories' in layers && !('spores' in layers)) {\n layers.spores = layers.memories;\n delete layers.memories;\n }\n\n // Vault: rename memories/ directory → spores/\n const memoriesDir = path.join(vaultDir, 'memories');\n const sporesDir = path.join(vaultDir, 'spores');\n\n if (!fs.existsSync(memoriesDir)) return;\n\n if (fs.existsSync(sporesDir)) {\n // Both exist (interrupted migration) — merge remaining files\n const moveRemaining = (srcDir: string, destDir: string): void => {\n for (const entry of fs.readdirSync(srcDir, { withFileTypes: true })) {\n const srcPath = path.join(srcDir, entry.name);\n const destPath = path.join(destDir, entry.name);\n if (entry.isDirectory()) {\n if (!fs.existsSync(destPath)) fs.mkdirSync(destPath, { recursive: true });\n moveRemaining(srcPath, destPath);\n } else if (!fs.existsSync(destPath)) {\n fs.renameSync(srcPath, destPath);\n }\n }\n };\n moveRemaining(memoriesDir, sporesDir);\n fs.rmSync(memoriesDir, { recursive: true, force: true });\n } else {\n fs.renameSync(memoriesDir, sporesDir);\n }\n\n // Update frontmatter type: memory → type: spore (handles quoted and unquoted)\n const walkUpdate = (dir: string): void => {\n for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) { walkUpdate(fullPath); continue; }\n if (!entry.name.endsWith('.md')) continue;\n const content = fs.readFileSync(fullPath, 'utf-8');\n MEMORY_TYPE_PATTERN.lastIndex = 0;\n if (MEMORY_TYPE_PATTERN.test(content)) {\n MEMORY_TYPE_PATTERN.lastIndex = 0;\n fs.writeFileSync(fullPath, content.replace(MEMORY_TYPE_PATTERN, 'type: spore'));\n }\n }\n };\n walkUpdate(sporesDir);\n\n // Legacy: update wikilink references in Markdown files (pre-SQLite migration): [[memories/...]] → [[spores/...]]\n const walkLinks = (dir: string): void => {\n for (const entry of fs.readdirSync(dir, { withFileTypes: true })) {\n const fullPath = path.join(dir, entry.name);\n if (entry.isDirectory()) { walkLinks(fullPath); continue; }\n if (!entry.name.endsWith('.md')) continue;\n const content = fs.readFileSync(fullPath, 'utf-8');\n if (content.includes('memories/')) {\n fs.writeFileSync(fullPath, content.replace(/memories\\//g, 'spores/'));\n }\n }\n };\n walkLinks(vaultDir);\n },\n },\n {\n version: 2,\n name: 'consolidation-boolean-to-object',\n migrate: (doc) => {\n const digest = doc.digest as Record<string, unknown> | undefined;\n if (!digest) return;\n\n const consolidation = digest.consolidation;\n if (typeof consolidation === 'boolean') {\n digest.consolidation = { enabled: consolidation, max_tokens: 2048 };\n }\n },\n },\n {\n version: 3,\n name: 'schedule-to-task-level',\n migrate(doc: Record<string, unknown>, _vaultDir: string): void {\n const agent = (doc.agent ?? {}) as Record<string, unknown>;\n const skills = (doc.skills ?? {}) as Record<string, unknown>;\n const tasks = ((agent.tasks ?? {}) as Record<string, Record<string, unknown>>);\n\n /** Default interval for full-intelligence task (5 minutes). */\n const DEFAULT_INTELLIGENCE_INTERVAL_SECONDS = 300;\n\n const VALID_SCHEDULE_STATES = ['active', 'idle', 'sleep'] as const;\n\n // Migrate agent.auto_run + interval_seconds → full-intelligence schedule\n if ('auto_run' in agent || 'interval_seconds' in agent) {\n const fiTask = tasks['full-intelligence'] ?? {};\n fiTask.schedule = {\n enabled: agent.auto_run ?? true,\n intervalSeconds: agent.interval_seconds ?? DEFAULT_INTELLIGENCE_INTERVAL_SECONDS,\n };\n tasks['full-intelligence'] = fiTask;\n delete agent.auto_run;\n delete agent.interval_seconds;\n }\n\n // Migrate skills.auto_survey → skill-survey schedule\n if ('auto_survey' in skills) {\n const ssTask = tasks['skill-survey'] ?? {};\n ssTask.schedule = {\n enabled: skills.auto_survey ?? false,\n };\n tasks['skill-survey'] = ssTask;\n delete skills.auto_survey;\n }\n\n // Migrate skills.auto_evolve + evolve_cadence → skill-evolve schedule\n if ('auto_evolve' in skills || 'evolve_cadence' in skills) {\n const seTask = tasks['skill-evolve'] ?? {};\n const schedule: Record<string, unknown> = {\n enabled: skills.auto_evolve ?? false,\n };\n if ('evolve_cadence' in skills) {\n const cadence = String(skills.evolve_cadence);\n schedule.runIn = VALID_SCHEDULE_STATES.includes(cadence as typeof VALID_SCHEDULE_STATES[number])\n ? [cadence]\n : ['idle']; // fallback to safe default\n }\n seTask.schedule = schedule;\n tasks['skill-evolve'] = seTask;\n delete skills.auto_evolve;\n delete skills.evolve_cadence;\n }\n\n // Write back tasks if any were created\n if (Object.keys(tasks).length > 0) {\n agent.tasks = tasks;\n }\n doc.agent = agent;\n doc.skills = skills;\n },\n },\n {\n version: 4,\n name: 'rename-cloud-provider-to-anthropic',\n migrate(doc: Record<string, unknown>, _vaultDir: string): void {\n // Rename `provider.type: cloud` -> `provider.type: anthropic` everywhere\n // it appears in the agent config: global default, per-task, per-phase.\n const renameProvider = (provider: unknown): void => {\n if (\n provider &&\n typeof provider === 'object' &&\n (provider as Record<string, unknown>).type === 'cloud'\n ) {\n (provider as Record<string, unknown>).type = 'anthropic';\n }\n };\n\n const agent = doc.agent as Record<string, unknown> | undefined;\n if (!agent) return;\n\n // Global default provider\n renameProvider(agent.provider);\n\n // Per-task overrides\n const tasks = agent.tasks as Record<string, Record<string, unknown>> | undefined;\n if (tasks) {\n for (const taskConfig of Object.values(tasks)) {\n renameProvider(taskConfig.provider);\n\n // Per-phase overrides within a task\n const phases = taskConfig.phases as Record<string, Record<string, unknown>> | undefined;\n if (phases) {\n for (const phaseConfig of Object.values(phases)) {\n renameProvider(phaseConfig.provider);\n }\n }\n }\n }\n },\n },\n];\n\n/** Current migration version — the highest version in MIGRATIONS. */\nexport const CURRENT_MIGRATION_VERSION = MIGRATIONS[MIGRATIONS.length - 1]?.version ?? 0;\n\n/**\n * Run all pending migrations on the raw config doc.\n * Returns true if any migrations ran (caller should reindex).\n */\nexport function runMigrations(\n doc: Record<string, unknown>,\n vaultDir: string,\n log?: (message: string) => void,\n): boolean {\n const currentVersion = (doc.config_version as number) ?? 0;\n let ran = false;\n\n for (const migration of MIGRATIONS) {\n if (migration.version <= currentVersion) continue;\n\n migration.migrate(doc, vaultDir);\n doc.config_version = migration.version;\n ran = true;\n }\n\n if (ran) {\n const from = currentVersion;\n const to = (doc.config_version as number) ?? 0;\n log?.(`Migrated config from v${from} to v${to}`);\n }\n\n return ran;\n}\n"],"mappings":";;;;;;;;;;;;;;;AAEA,kBAAiB;AAFjB,OAAOA,SAAQ;AACf,OAAOC,WAAU;;;ACEjB,IAAM,0BAA0B,iBAAE,OAAO;AAAA,EACvC,UAAU,iBAAE,KAAK,CAAC,UAAU,qBAAqB,cAAc,QAAQ,CAAC,EAAE,QAAQ,QAAQ;AAAA,EAC1F,OAAO,iBAAE,OAAO,EAAE,QAAQ,QAAQ;AAAA,EAClC,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AACtC,CAAC;AAED,IAAM,eAAe,iBAAE,OAAO;AAAA,EAC5B,MAAM,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,IAAI,EAAE,IAAI,KAAK,EAAE,SAAS,EAAE,QAAQ,IAAI;AAAA,EACnE,WAAW,iBAAE,KAAK,CAAC,SAAS,QAAQ,QAAQ,OAAO,CAAC,EAAE,QAAQ,MAAM;AAAA,EACpE,oBAAoB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,GAAG,EAAE,QAAQ,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAO/D,4BAA4B,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,GAAM,EAAE,QAAQ,KAAK,KAAK,GAAI;AACjF,CAAC;AAED,IAAM,gBAAgB,iBAAE,OAAO;AAAA,EAC7B,kBAAkB,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EAChD,WAAW,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,CAAC;AAAA,EACzC,yBAAyB,iBAAE,QAAQ,EAAE,QAAQ,KAAK;AAAA,EAClD,qBAAqB,iBAAE,MAAM,iBAAE,OAAO,CAAC,EAAE,QAAQ,CAAC,KAAK,CAAC;AAAA,EACxD,mBAAmB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,GAAG;AAC5D,CAAC;AAGD,IAAM,yBAAyB,iBAAE,OAAO;AAAA,EACtC,MAAM,iBAAE,KAAK,CAAC,aAAa,UAAU,UAAU,CAAC;AAAA,EAChD,UAAU,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC9B,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AACvD,CAAC;AAGD,IAAM,sBAAsB,iBAAE,OAAO;AAAA,EACnC,UAAU,uBAAuB,SAAS;AAAA,EAC1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AACjD,CAAC;AAGD,IAAM,yBAAyB,iBAAE,OAAO;AAAA,EACtC,SAAS,iBAAE,QAAQ,EAAE,SAAS;AAAA,EAC9B,iBAAiB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EACtD,OAAO,iBAAE,MAAM,iBAAE,KAAK,CAAC,GAAG,wBAAwB,CAAC,CAAC,EAAE,SAAS;AAAA,EAC/D,cAAc,iBAAE,KAAK,CAAC,2BAA2B,qBAAqB,yBAAyB,CAAC,EAAE,SAAS;AAC7G,CAAC,EAAE,SAAS;AAGZ,IAAM,6BAA6B,iBAAE,OAAO;AAAA,EAC1C,UAAU,uBAAuB,SAAS;AAAA,EAC1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA,EAC3B,UAAU,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EAC/C,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,SAAS;AAAA,EACrD,QAAQ,iBAAE,OAAO,iBAAE,OAAO,GAAG,mBAAmB,EAAE,SAAS;AAAA,EAC3D,UAAU;AAAA;AAAA,EAEV,QAAQ,iBAAE,OAAO,iBAAE,OAAO,GAAG,iBAAE,MAAM,CAAC,iBAAE,OAAO,GAAG,iBAAE,OAAO,GAAG,iBAAE,QAAQ,CAAC,CAAC,CAAC,EAAE,SAAS;AACxF,CAAC;AAED,IAAM,gBAAgB,iBAAE,OAAO;AAAA;AAAA,EAE7B,aAAa,iBAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,GAAI;AAAA;AAAA,EAE1C,eAAe,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAEvC,mBAAmB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,EAAE,EAAE,QAAQ,CAAC;AAC9D,CAAC;AAED,IAAM,cAAc,iBAAE,OAAO;AAAA;AAAA,EAE3B,wBAAwB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,QAAQ,CAAC;AAAA;AAAA,EAEzD,yBAAyB,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAEjD,qBAAqB,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAE7C,UAAU,uBAAuB,SAAS;AAAA;AAAA,EAE1C,OAAO,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE3B,OAAO,iBAAE,OAAO,iBAAE,OAAO,GAAG,0BAA0B,EAAE,SAAS;AACnE,CAAC;AAED,IAAM,eAAe,iBAAE,OAAO;AAAA;AAAA,EAE5B,KAAK,iBAAE,OAAO,EAAE,SAAS;AAC3B,CAAC;AAED,IAAM,oBAAoB,iBAAE,OAAO;AAAA;AAAA,EAEjC,eAAe,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAEvC,8BAA8B,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,GAAG,EAAE,QAAQ,EAAE;AAC3E,CAAC;AAED,IAAM,aAAa,iBAAE,OAAO;AAAA;AAAA,EAE1B,SAAS,iBAAE,QAAQ,EAAE,QAAQ,KAAK;AAAA;AAAA,EAElC,YAAY,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS;AAAA;AAAA,EAEtC,SAAS,iBAAE,OAAO,EAAE,SAAS;AAAA;AAAA,EAE7B,kBAAkB,iBAAE,OAAO,EAAE,IAAI,EAAE,IAAI,CAAC,EAAE,IAAI,IAAI,EAAE,QAAQ,EAAE;AAChE,CAAC;AAED,IAAM,eAAe,iBAAE,OAAO;AAAA;AAAA,EAE5B,sBAAsB,iBAAE,OAAO,EAAE,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE,QAAQ,GAAG;AAAA;AAAA,EAE1D,kBAAkB,iBAAE,OAAO,EAAE,IAAI,EAAE,SAAS,EAAE,QAAQ,EAAE;AAC1D,CAAC;AAED,IAAM,sBAAsB,iBAAE,OAAO;AAAA;AAAA,EAEnC,SAAS,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,EAEjC,sBAAsB,iBAAE,QAAQ,EAAE,QAAQ,KAAK;AAAA;AAAA,EAE/C,cAAc,iBAAE,KAAK,CAAC,UAAU,SAAS,CAAC,EAAE,QAAQ,SAAS;AAAA;AAAA,EAE7D,SAAS,iBAAE,OAAO,iBAAE,OAAO,GAAG,iBAAE,OAAO;AAAA,IACrC,SAAS,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AAAA;AAAA,IAEjC,MAAM,iBAAE,KAAK,CAAC,UAAU,SAAS,CAAC,EAAE,SAAS;AAAA,EAC/C,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC;AAChB,CAAC;AAED,IAAM,sBAAsB,iBAAE,OAAO;AAAA,EACnC,SAAS,iBAAE,QAAQ,EAAE,QAAQ,IAAI;AACnC,CAAC;AAEM,IAAM,mBAAmB,iBAAE;AAAA,EAChC,CAAC,QAAiB;AAChB,QAAI,OAAO,OAAO,QAAQ,YAAY,cAAc,OAAO,EAAE,WAAW,MAAM;AAC5E,YAAM,EAAE,UAAU,GAAG,KAAK,IAAI;AAC9B,aAAO,EAAE,GAAG,MAAM,OAAO,SAAS;AAAA,IACpC;AACA,WAAO;AAAA,EACT;AAAA,EACA,iBAAE,OAAO;AAAA,IACP,SAAS,iBAAE,QAAQ,CAAC;AAAA,IACpB,gBAAgB,iBAAE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,QAAQ,CAAC;AAAA,IACxD,WAAW,wBAAwB,QAAQ,MAAM,wBAAwB,MAAM,CAAC,CAAC,CAAC;AAAA,IAClF,QAAQ,aAAa,QAAQ,MAAM,aAAa,MAAM,CAAC,CAAC,CAAC;AAAA,IACzD,SAAS,cAAc,QAAQ,MAAM,cAAc,MAAM,CAAC,CAAC,CAAC;AAAA,IAC5D,OAAO,YAAY,QAAQ,MAAM,YAAY,MAAM,CAAC,CAAC,CAAC;AAAA,IACtD,SAAS,cAAc,QAAQ,MAAM,cAAc,MAAM,CAAC,CAAC,CAAC;AAAA,IAC5D,QAAQ,aAAa,QAAQ,MAAM,aAAa,MAAM,CAAC,CAAC,CAAC;AAAA,IACzD,aAAa,kBAAkB,QAAQ,MAAM,kBAAkB,MAAM,CAAC,CAAC,CAAC;AAAA,IACxE,MAAM,WAAW,QAAQ,MAAM,WAAW,MAAM,CAAC,CAAC,CAAC;AAAA,IACnD,QAAQ,aAAa,QAAQ,MAAM,aAAa,MAAM,CAAC,CAAC,CAAC;AAAA,IACzD,eAAe,oBAAoB,QAAQ,MAAM,oBAAoB,MAAM,CAAC,CAAC,CAAC;AAAA,IAC9E,WAAW,iBAAE,OAAO,iBAAE,OAAO,GAAG,mBAAmB,EAAE,SAAS;AAAA,EAChE,CAAC;AACH;;;ACrJA,OAAO,QAAQ;AACf,OAAO,UAAU;AASjB,IAAM,sBAAsB;AAErB,IAAM,aAA0B;AAAA,EACrC;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,SAAS,CAAC,KAAK,aAAa;AAE1B,YAAM,UAAU,IAAI;AACpB,YAAM,SAAS,SAAS;AACxB,UAAI,UAAU,cAAc,UAAU,EAAE,YAAY,SAAS;AAC3D,eAAO,SAAS,OAAO;AACvB,eAAO,OAAO;AAAA,MAChB;AAGA,YAAM,cAAc,KAAK,KAAK,UAAU,UAAU;AAClD,YAAM,YAAY,KAAK,KAAK,UAAU,QAAQ;AAE9C,UAAI,CAAC,GAAG,WAAW,WAAW,EAAG;AAEjC,UAAI,GAAG,WAAW,SAAS,GAAG;AAE5B,cAAM,gBAAgB,CAAC,QAAgB,YAA0B;AAC/D,qBAAW,SAAS,GAAG,YAAY,QAAQ,EAAE,eAAe,KAAK,CAAC,GAAG;AACnE,kBAAM,UAAU,KAAK,KAAK,QAAQ,MAAM,IAAI;AAC5C,kBAAM,WAAW,KAAK,KAAK,SAAS,MAAM,IAAI;AAC9C,gBAAI,MAAM,YAAY,GAAG;AACvB,kBAAI,CAAC,GAAG,WAAW,QAAQ,EAAG,IAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AACxE,4BAAc,SAAS,QAAQ;AAAA,YACjC,WAAW,CAAC,GAAG,WAAW,QAAQ,GAAG;AACnC,iBAAG,WAAW,SAAS,QAAQ;AAAA,YACjC;AAAA,UACF;AAAA,QACF;AACA,sBAAc,aAAa,SAAS;AACpC,WAAG,OAAO,aAAa,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,MACzD,OAAO;AACL,WAAG,WAAW,aAAa,SAAS;AAAA,MACtC;AAGA,YAAM,aAAa,CAAC,QAAsB;AACxC,mBAAW,SAAS,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC,GAAG;AAChE,gBAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAI,MAAM,YAAY,GAAG;AAAE,uBAAW,QAAQ;AAAG;AAAA,UAAU;AAC3D,cAAI,CAAC,MAAM,KAAK,SAAS,KAAK,EAAG;AACjC,gBAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,8BAAoB,YAAY;AAChC,cAAI,oBAAoB,KAAK,OAAO,GAAG;AACrC,gCAAoB,YAAY;AAChC,eAAG,cAAc,UAAU,QAAQ,QAAQ,qBAAqB,aAAa,CAAC;AAAA,UAChF;AAAA,QACF;AAAA,MACF;AACA,iBAAW,SAAS;AAGpB,YAAM,YAAY,CAAC,QAAsB;AACvC,mBAAW,SAAS,GAAG,YAAY,KAAK,EAAE,eAAe,KAAK,CAAC,GAAG;AAChE,gBAAM,WAAW,KAAK,KAAK,KAAK,MAAM,IAAI;AAC1C,cAAI,MAAM,YAAY,GAAG;AAAE,sBAAU,QAAQ;AAAG;AAAA,UAAU;AAC1D,cAAI,CAAC,MAAM,KAAK,SAAS,KAAK,EAAG;AACjC,gBAAM,UAAU,GAAG,aAAa,UAAU,OAAO;AACjD,cAAI,QAAQ,SAAS,WAAW,GAAG;AACjC,eAAG,cAAc,UAAU,QAAQ,QAAQ,eAAe,SAAS,CAAC;AAAA,UACtE;AAAA,QACF;AAAA,MACF;AACA,gBAAU,QAAQ;AAAA,IACpB;AAAA,EACF;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,SAAS,CAAC,QAAQ;AAChB,YAAM,SAAS,IAAI;AACnB,UAAI,CAAC,OAAQ;AAEb,YAAM,gBAAgB,OAAO;AAC7B,UAAI,OAAO,kBAAkB,WAAW;AACtC,eAAO,gBAAgB,EAAE,SAAS,eAAe,YAAY,KAAK;AAAA,MACpE;AAAA,IACF;AAAA,EACF;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ,KAA8B,WAAyB;AAC7D,YAAM,QAAS,IAAI,SAAS,CAAC;AAC7B,YAAM,SAAU,IAAI,UAAU,CAAC;AAC/B,YAAM,QAAU,MAAM,SAAS,CAAC;AAGhC,YAAM,wCAAwC;AAE9C,YAAM,wBAAwB,CAAC,UAAU,QAAQ,OAAO;AAGxD,UAAI,cAAc,SAAS,sBAAsB,OAAO;AACtD,cAAM,SAAS,MAAM,mBAAmB,KAAK,CAAC;AAC9C,eAAO,WAAW;AAAA,UAChB,SAAS,MAAM,YAAY;AAAA,UAC3B,iBAAiB,MAAM,oBAAoB;AAAA,QAC7C;AACA,cAAM,mBAAmB,IAAI;AAC7B,eAAO,MAAM;AACb,eAAO,MAAM;AAAA,MACf;AAGA,UAAI,iBAAiB,QAAQ;AAC3B,cAAM,SAAS,MAAM,cAAc,KAAK,CAAC;AACzC,eAAO,WAAW;AAAA,UAChB,SAAS,OAAO,eAAe;AAAA,QACjC;AACA,cAAM,cAAc,IAAI;AACxB,eAAO,OAAO;AAAA,MAChB;AAGA,UAAI,iBAAiB,UAAU,oBAAoB,QAAQ;AACzD,cAAM,SAAS,MAAM,cAAc,KAAK,CAAC;AACzC,cAAM,WAAoC;AAAA,UACxC,SAAS,OAAO,eAAe;AAAA,QACjC;AACA,YAAI,oBAAoB,QAAQ;AAC9B,gBAAM,UAAU,OAAO,OAAO,cAAc;AAC5C,mBAAS,QAAQ,sBAAsB,SAAS,OAA+C,IAC3F,CAAC,OAAO,IACR,CAAC,MAAM;AAAA,QACb;AACA,eAAO,WAAW;AAClB,cAAM,cAAc,IAAI;AACxB,eAAO,OAAO;AACd,eAAO,OAAO;AAAA,MAChB;AAGA,UAAI,OAAO,KAAK,KAAK,EAAE,SAAS,GAAG;AACjC,cAAM,QAAQ;AAAA,MAChB;AACA,UAAI,QAAQ;AACZ,UAAI,SAAS;AAAA,IACf;AAAA,EACF;AAAA,EACA;AAAA,IACE,SAAS;AAAA,IACT,MAAM;AAAA,IACN,QAAQ,KAA8B,WAAyB;AAG7D,YAAM,iBAAiB,CAAC,aAA4B;AAClD,YACE,YACA,OAAO,aAAa,YACnB,SAAqC,SAAS,SAC/C;AACA,UAAC,SAAqC,OAAO;AAAA,QAC/C;AAAA,MACF;AAEA,YAAM,QAAQ,IAAI;AAClB,UAAI,CAAC,MAAO;AAGZ,qBAAe,MAAM,QAAQ;AAG7B,YAAM,QAAQ,MAAM;AACpB,UAAI,OAAO;AACT,mBAAW,cAAc,OAAO,OAAO,KAAK,GAAG;AAC7C,yBAAe,WAAW,QAAQ;AAGlC,gBAAM,SAAS,WAAW;AAC1B,cAAI,QAAQ;AACV,uBAAW,eAAe,OAAO,OAAO,MAAM,GAAG;AAC/C,6BAAe,YAAY,QAAQ;AAAA,YACrC;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAAA,EACF;AACF;AAGO,IAAM,4BAA4B,WAAW,WAAW,SAAS,CAAC,GAAG,WAAW;AAMhF,SAAS,cACd,KACA,UACA,KACS;AACT,QAAM,iBAAkB,IAAI,kBAA6B;AACzD,MAAI,MAAM;AAEV,aAAW,aAAa,YAAY;AAClC,QAAI,UAAU,WAAW,eAAgB;AAEzC,cAAU,QAAQ,KAAK,QAAQ;AAC/B,QAAI,iBAAiB,UAAU;AAC/B,UAAM;AAAA,EACR;AAEA,MAAI,KAAK;AACP,UAAM,OAAO;AACb,UAAM,KAAM,IAAI,kBAA6B;AAC7C,UAAM,yBAAyB,IAAI,QAAQ,EAAE,EAAE;AAAA,EACjD;AAEA,SAAO;AACT;;;AF1OO,IAAM,kBAAkB;AAExB,SAAS,WAAW,UAA8B;AACvD,QAAM,aAAaC,MAAK,KAAK,UAAU,eAAe;AAEtD,MAAI,CAACC,IAAG,WAAW,UAAU,GAAG;AAC9B,UAAM,IAAI,MAAM,0BAA0B,QAAQ,EAAE;AAAA,EACtD;AAEA,QAAM,MAAMA,IAAG,aAAa,YAAY,OAAO;AAC/C,QAAM,SAAS,YAAAC,QAAK,MAAM,GAAG;AAG7B,MAAI,OAAO,YAAY,KAAM,OAAO,cAA0C,SAAS;AACrF,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAGA,MAAI,aAAa;AACjB,MAAI,OAAO,YAAY,GAAG;AAExB,UAAM,QAAQ,OAAO;AACrB,UAAM,kBAAkB,OAAO;AAC/B,QAAI,mBAAmB,CAAC,OAAO,WAAW;AAExC,UAAI,gBAAgB,aAAa,aAAa;AAC5C,wBAAgB,WAAW;AAAA,MAC7B;AACA,aAAO,YAAY;AAAA,IACrB;AAGA,UAAM,SAAS,OAAO;AACtB,QAAI,QAAQ;AACV,YAAM,EAAE,MAAM,UAAU,IAAI;AAC5B,aAAO,SAAS,EAAE,MAAM,QAAQ,MAAM,WAAW,aAAa,OAAO;AAAA,IACvE;AAGA,UAAM,UAAU,OAAO;AACvB,QAAI,SAAS;AACX,YAAM,EAAE,kBAAkB,gBAAgB,WAAW,qBAAqB,kBAAkB,IAAI;AAChG,aAAO,UAAU;AAAA,QACf;AAAA,QACA,WAAW,aAAa;AAAA,QACxB;AAAA,QACA;AAAA,MACF;AAAA,IACF;AAGA,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AACd,WAAO,OAAO;AAGd,WAAO,UAAU;AACjB,iBAAa;AAEb,YAAQ,OAAO,MAAM,kDAAkD;AAAA,EACzE;AAGA,QAAM,gBAAgB,cAAc,QAAQ,UAAU,CAAC,QAAQ;AAC7D,YAAQ,OAAO,MAAM,oBAAoB,GAAG;AAAA,CAAI;AAAA,EAClD,CAAC;AAGD,QAAM,SAAS,iBAAiB,MAAM,MAAM;AAG5C,QAAM,aAAa,cACd,kBACC,OAAO,kBAA4B,KAAK,6BACzC,OAAO,YAAY,OAAO;AAE/B,MAAI,YAAY;AACd,UAAM,aAAa,KAAK,MAAM,KAAK,UAAU,MAAM,CAAC;AACpD,IAAAD,IAAG,cAAc,YAAY,YAAAC,QAAK,UAAU,UAAU,GAAG,OAAO;AAAA,EAClE;AAEA,SAAO;AACT;AAEO,SAAS,WAAW,UAAkB,QAA0B;AAErE,QAAM,YAAY,iBAAiB,MAAM,MAAM;AAE/C,QAAM,aAAaF,MAAK,KAAK,UAAU,eAAe;AACtD,EAAAC,IAAG,UAAU,UAAU,EAAE,WAAW,KAAK,CAAC;AAC1C,EAAAA,IAAG,cAAc,YAAY,YAAAC,QAAK,UAAU,SAAS,GAAG,OAAO;AACjE;AAEO,SAAS,aACd,UACA,IACY;AACZ,QAAM,UAAU,WAAW,QAAQ;AACnC,QAAM,UAAU,GAAG,OAAO;AAC1B,aAAW,UAAU,OAAO;AAC5B,SAAO;AACT;AAEO,SAAS,mBACd,UACA,QACY;AACZ,SAAO,aAAa,UAAU,CAAC,YAAY;AAAA,IACzC,GAAG;AAAA,IACH,QAAQ,EAAE,GAAG,OAAO,QAAQ,GAAG,OAAO;AAAA,EACxC,EAAE;AACJ;AAOO,SAAS,wBAAwB,QAAwC;AAC9E,MAAI,CAAC,OAAO,UAAW,QAAO;AAC9B,SAAO,IAAI;AAAA,IACT,OAAO,QAAQ,OAAO,SAAS,EAC5B,OAAO,CAAC,CAAC,EAAE,KAAK,MAAM,MAAM,OAAO,EACnC,IAAI,CAAC,CAAC,IAAI,MAAM,IAAI;AAAA,EACzB;AACF;AAEO,SAAS,iBACd,UACA,MACY;AACZ,SAAO,aAAa,UAAU,CAAC,YAAY;AAAA,IACzC,GAAG;AAAA,IACH,MAAM,EAAE,GAAG,OAAO,MAAM,GAAG,KAAK;AAAA,EAClC,EAAE;AACJ;","names":["fs","path","path","fs","YAML"]}