panopticon-cli 0.4.4 → 0.4.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +84 -2695
- package/dist/{agents-B5NRTVHK.js → agents-54LDKMHR.js} +8 -3
- package/dist/chunk-44EOY2ZL.js +58 -0
- package/dist/chunk-44EOY2ZL.js.map +1 -0
- package/dist/chunk-BWGFN44T.js +224 -0
- package/dist/chunk-BWGFN44T.js.map +1 -0
- package/dist/chunk-F7NQZD6H.js +49 -0
- package/dist/chunk-F7NQZD6H.js.map +1 -0
- package/dist/chunk-HCTJFIJJ.js +159 -0
- package/dist/chunk-HCTJFIJJ.js.map +1 -0
- package/dist/chunk-JM6V62LT.js +650 -0
- package/dist/chunk-JM6V62LT.js.map +1 -0
- package/dist/chunk-K45YD6A3.js +254 -0
- package/dist/chunk-K45YD6A3.js.map +1 -0
- package/dist/chunk-KGPRXDMX.js +137 -0
- package/dist/chunk-KGPRXDMX.js.map +1 -0
- package/dist/chunk-KQAEUOML.js +278 -0
- package/dist/chunk-KQAEUOML.js.map +1 -0
- package/dist/chunk-NYVQC3D7.js +90 -0
- package/dist/chunk-NYVQC3D7.js.map +1 -0
- package/dist/chunk-PUR532O7.js +1556 -0
- package/dist/chunk-PUR532O7.js.map +1 -0
- package/dist/chunk-VTDDVLCK.js +1977 -0
- package/dist/chunk-VTDDVLCK.js.map +1 -0
- package/dist/chunk-Z24TY3XN.js +916 -0
- package/dist/chunk-Z24TY3XN.js.map +1 -0
- package/dist/chunk-ZHC57RCV.js +44 -0
- package/dist/chunk-ZHC57RCV.js.map +1 -0
- package/dist/{chunk-ITI4IC5A.js → chunk-ZZ3477GY.js} +69 -100
- package/dist/chunk-ZZ3477GY.js.map +1 -0
- package/dist/cli/index.js +4664 -2912
- package/dist/cli/index.js.map +1 -1
- package/dist/dashboard/public/assets/index-CRqsEkmn.css +32 -0
- package/dist/dashboard/public/assets/index-DPSUbu4A.js +645 -0
- package/dist/dashboard/public/index.html +15 -3
- package/dist/dashboard/server.js +45663 -17860
- package/dist/dns-L3L2BB27.js +30 -0
- package/dist/dns-L3L2BB27.js.map +1 -0
- package/dist/index.d.ts +63 -3
- package/dist/index.js +42 -18
- package/dist/index.js.map +1 -1
- package/dist/projects-ESIB34QQ.js +43 -0
- package/dist/projects-ESIB34QQ.js.map +1 -0
- package/dist/remote-agents-Z3R2A5BN.js +25 -0
- package/dist/remote-agents-Z3R2A5BN.js.map +1 -0
- package/dist/remote-workspace-HI4VML6H.js +179 -0
- package/dist/remote-workspace-HI4VML6H.js.map +1 -0
- package/dist/specialist-context-SNCJ7O7G.js +256 -0
- package/dist/specialist-context-SNCJ7O7G.js.map +1 -0
- package/dist/specialist-logs-A7ODEK2T.js +43 -0
- package/dist/specialist-logs-A7ODEK2T.js.map +1 -0
- package/dist/specialists-C7XLNSXQ.js +121 -0
- package/dist/specialists-C7XLNSXQ.js.map +1 -0
- package/dist/traefik-WI3KSRGG.js +12 -0
- package/dist/traefik-WI3KSRGG.js.map +1 -0
- package/package.json +1 -1
- package/templates/traefik/docker-compose.yml +1 -1
- package/templates/traefik/dynamic/panopticon.yml.template +41 -0
- package/templates/traefik/traefik.yml +8 -0
- package/dist/chunk-7HHDVXBM.js +0 -349
- package/dist/chunk-7HHDVXBM.js.map +0 -1
- package/dist/chunk-H45CLB7E.js +0 -2044
- package/dist/chunk-H45CLB7E.js.map +0 -1
- package/dist/chunk-ITI4IC5A.js.map +0 -1
- package/dist/dashboard/public/assets/index-BDd8hGYb.css +0 -32
- package/dist/dashboard/public/assets/index-sFwLPko-.js +0 -556
- package/templates/traefik/dynamic/panopticon.yml +0 -51
- /package/dist/{agents-B5NRTVHK.js.map → agents-54LDKMHR.js.map} +0 -0
|
@@ -0,0 +1,1977 @@
|
|
|
1
|
+
import {
|
|
2
|
+
checkHook,
|
|
3
|
+
getModelId,
|
|
4
|
+
init_hooks,
|
|
5
|
+
init_tmux,
|
|
6
|
+
init_work_type_router,
|
|
7
|
+
popFromHook,
|
|
8
|
+
pushToHook,
|
|
9
|
+
sendKeys
|
|
10
|
+
} from "./chunk-PUR532O7.js";
|
|
11
|
+
import {
|
|
12
|
+
init_projects,
|
|
13
|
+
projects_exports
|
|
14
|
+
} from "./chunk-K45YD6A3.js";
|
|
15
|
+
import {
|
|
16
|
+
COSTS_DIR,
|
|
17
|
+
PANOPTICON_HOME,
|
|
18
|
+
init_paths
|
|
19
|
+
} from "./chunk-KGPRXDMX.js";
|
|
20
|
+
import {
|
|
21
|
+
__esm,
|
|
22
|
+
__export,
|
|
23
|
+
__toCommonJS,
|
|
24
|
+
init_esm_shims
|
|
25
|
+
} from "./chunk-ZHC57RCV.js";
|
|
26
|
+
|
|
27
|
+
// src/lib/cost.ts
|
|
28
|
+
import { join } from "path";
|
|
29
|
+
function calculateCost(usage, pricing) {
|
|
30
|
+
let cost = 0;
|
|
31
|
+
let inputMultiplier = 1;
|
|
32
|
+
let outputMultiplier = 1;
|
|
33
|
+
const totalInputTokens = usage.inputTokens + (usage.cacheReadTokens || 0) + (usage.cacheWriteTokens || 0);
|
|
34
|
+
if ((pricing.model === "claude-sonnet-4" || pricing.model === "claude-sonnet-4.5") && totalInputTokens > 2e5) {
|
|
35
|
+
inputMultiplier = 2;
|
|
36
|
+
outputMultiplier = 1.5;
|
|
37
|
+
}
|
|
38
|
+
cost += usage.inputTokens / 1e3 * pricing.inputPer1k * inputMultiplier;
|
|
39
|
+
cost += usage.outputTokens / 1e3 * pricing.outputPer1k * outputMultiplier;
|
|
40
|
+
if (usage.cacheReadTokens && pricing.cacheReadPer1k) {
|
|
41
|
+
cost += usage.cacheReadTokens / 1e3 * pricing.cacheReadPer1k;
|
|
42
|
+
}
|
|
43
|
+
if (usage.cacheWriteTokens) {
|
|
44
|
+
const ttl = usage.cacheTTL || "5m";
|
|
45
|
+
const cacheWritePrice = ttl === "1h" ? pricing.cacheWrite1hPer1k : pricing.cacheWrite5mPer1k;
|
|
46
|
+
if (cacheWritePrice) {
|
|
47
|
+
cost += usage.cacheWriteTokens / 1e3 * cacheWritePrice;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
return Math.round(cost * 1e6) / 1e6;
|
|
51
|
+
}
|
|
52
|
+
function getPricing(provider, model) {
|
|
53
|
+
let pricing = DEFAULT_PRICING.find(
|
|
54
|
+
(p) => p.provider === provider && p.model === model
|
|
55
|
+
);
|
|
56
|
+
if (!pricing) {
|
|
57
|
+
pricing = DEFAULT_PRICING.find(
|
|
58
|
+
(p) => p.provider === provider && model.startsWith(p.model)
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
return pricing || null;
|
|
62
|
+
}
|
|
63
|
+
var DEFAULT_PRICING, BUDGETS_FILE;
|
|
64
|
+
var init_cost = __esm({
|
|
65
|
+
"src/lib/cost.ts"() {
|
|
66
|
+
"use strict";
|
|
67
|
+
init_esm_shims();
|
|
68
|
+
init_paths();
|
|
69
|
+
DEFAULT_PRICING = [
|
|
70
|
+
// Anthropic - 4.6 series
|
|
71
|
+
{ provider: "anthropic", model: "claude-opus-4.6", inputPer1k: 5e-3, outputPer1k: 0.025, cacheReadPer1k: 5e-4, cacheWrite5mPer1k: 625e-5, cacheWrite1hPer1k: 0.01, currency: "USD" },
|
|
72
|
+
{ provider: "anthropic", model: "claude-sonnet-4.5", inputPer1k: 3e-3, outputPer1k: 0.015, cacheReadPer1k: 3e-4, cacheWrite5mPer1k: 375e-5, cacheWrite1hPer1k: 6e-3, currency: "USD" },
|
|
73
|
+
{ provider: "anthropic", model: "claude-haiku-4.5", inputPer1k: 1e-3, outputPer1k: 5e-3, cacheReadPer1k: 1e-4, cacheWrite5mPer1k: 125e-5, cacheWrite1hPer1k: 2e-3, currency: "USD" },
|
|
74
|
+
// Anthropic - 4.x series
|
|
75
|
+
{ provider: "anthropic", model: "claude-opus-4-1", inputPer1k: 0.015, outputPer1k: 0.075, cacheReadPer1k: 15e-4, cacheWrite5mPer1k: 0.01875, cacheWrite1hPer1k: 0.03, currency: "USD" },
|
|
76
|
+
{ provider: "anthropic", model: "claude-opus-4", inputPer1k: 0.015, outputPer1k: 0.075, cacheReadPer1k: 15e-4, cacheWrite5mPer1k: 0.01875, cacheWrite1hPer1k: 0.03, currency: "USD" },
|
|
77
|
+
{ provider: "anthropic", model: "claude-sonnet-4", inputPer1k: 3e-3, outputPer1k: 0.015, cacheReadPer1k: 3e-4, cacheWrite5mPer1k: 375e-5, cacheWrite1hPer1k: 6e-3, currency: "USD" },
|
|
78
|
+
// Anthropic - Legacy
|
|
79
|
+
{ provider: "anthropic", model: "claude-haiku-3", inputPer1k: 25e-5, outputPer1k: 125e-5, cacheReadPer1k: 3e-5, cacheWrite5mPer1k: 3e-4, cacheWrite1hPer1k: 5e-4, currency: "USD" },
|
|
80
|
+
// OpenAI
|
|
81
|
+
{ provider: "openai", model: "gpt-4-turbo", inputPer1k: 0.01, outputPer1k: 0.03, currency: "USD" },
|
|
82
|
+
{ provider: "openai", model: "gpt-4o", inputPer1k: 5e-3, outputPer1k: 0.015, currency: "USD" },
|
|
83
|
+
{ provider: "openai", model: "gpt-4o-mini", inputPer1k: 15e-5, outputPer1k: 6e-4, currency: "USD" },
|
|
84
|
+
// Google
|
|
85
|
+
{ provider: "google", model: "gemini-1.5-pro", inputPer1k: 125e-5, outputPer1k: 5e-3, currency: "USD" },
|
|
86
|
+
{ provider: "google", model: "gemini-1.5-flash", inputPer1k: 75e-6, outputPer1k: 3e-4, currency: "USD" },
|
|
87
|
+
// Moonshot AI (Kimi)
|
|
88
|
+
{ provider: "custom", model: "kimi-for-coding", inputPer1k: 6e-4, outputPer1k: 2e-3, cacheReadPer1k: 6e-5, cacheWrite5mPer1k: 75e-5, currency: "USD" },
|
|
89
|
+
{ provider: "custom", model: "kimi-k2.5", inputPer1k: 6e-4, outputPer1k: 2e-3, cacheReadPer1k: 6e-5, cacheWrite5mPer1k: 75e-5, currency: "USD" }
|
|
90
|
+
];
|
|
91
|
+
BUDGETS_FILE = join(COSTS_DIR, "budgets.json");
|
|
92
|
+
}
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
// src/lib/cost-parsers/jsonl-parser.ts
|
|
96
|
+
import { existsSync, readFileSync, readdirSync, statSync } from "fs";
|
|
97
|
+
import { join as join2, basename } from "path";
|
|
98
|
+
import { homedir } from "os";
|
|
99
|
+
function getProjectDirs() {
|
|
100
|
+
if (!existsSync(CLAUDE_PROJECTS_DIR)) {
|
|
101
|
+
return [];
|
|
102
|
+
}
|
|
103
|
+
return readdirSync(CLAUDE_PROJECTS_DIR).map((name) => join2(CLAUDE_PROJECTS_DIR, name)).filter((path) => {
|
|
104
|
+
try {
|
|
105
|
+
return statSync(path).isDirectory();
|
|
106
|
+
} catch {
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
});
|
|
110
|
+
}
|
|
111
|
+
function getSessionFiles(projectDir) {
|
|
112
|
+
if (!existsSync(projectDir)) {
|
|
113
|
+
return [];
|
|
114
|
+
}
|
|
115
|
+
return readdirSync(projectDir).filter((name) => name.endsWith(".jsonl")).map((name) => join2(projectDir, name)).sort((a, b) => {
|
|
116
|
+
try {
|
|
117
|
+
return statSync(b).mtime.getTime() - statSync(a).mtime.getTime();
|
|
118
|
+
} catch {
|
|
119
|
+
return 0;
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
function getAllSessionFiles() {
|
|
124
|
+
const files = [];
|
|
125
|
+
for (const projectDir of getProjectDirs()) {
|
|
126
|
+
files.push(...getSessionFiles(projectDir));
|
|
127
|
+
}
|
|
128
|
+
return files.sort((a, b) => {
|
|
129
|
+
try {
|
|
130
|
+
return statSync(b).mtime.getTime() - statSync(a).mtime.getTime();
|
|
131
|
+
} catch {
|
|
132
|
+
return 0;
|
|
133
|
+
}
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
function normalizeModelName(model) {
|
|
137
|
+
if (model.includes("claude")) {
|
|
138
|
+
let normalizedModel = model;
|
|
139
|
+
if (model.includes("opus-4-6") || model.includes("opus-4.6")) {
|
|
140
|
+
normalizedModel = "claude-opus-4.6";
|
|
141
|
+
} else if (model.includes("opus-4-1") || model.includes("opus-4.1")) {
|
|
142
|
+
normalizedModel = "claude-opus-4-1";
|
|
143
|
+
} else if (model.includes("opus-4") || model.includes("opus")) {
|
|
144
|
+
normalizedModel = "claude-opus-4";
|
|
145
|
+
}
|
|
146
|
+
if (model.includes("sonnet-4-5") || model.includes("sonnet-4.5")) {
|
|
147
|
+
normalizedModel = "claude-sonnet-4.5";
|
|
148
|
+
} else if (model.includes("sonnet-4") || model.includes("sonnet")) {
|
|
149
|
+
normalizedModel = "claude-sonnet-4";
|
|
150
|
+
}
|
|
151
|
+
if (model.includes("haiku-4-5") || model.includes("haiku-4.5")) {
|
|
152
|
+
normalizedModel = "claude-haiku-4.5";
|
|
153
|
+
} else if (model.includes("haiku-3")) {
|
|
154
|
+
normalizedModel = "claude-haiku-3";
|
|
155
|
+
} else if (model.includes("haiku")) {
|
|
156
|
+
normalizedModel = "claude-haiku-4.5";
|
|
157
|
+
}
|
|
158
|
+
return { provider: "anthropic", model: normalizedModel };
|
|
159
|
+
}
|
|
160
|
+
if (model.includes("gpt")) {
|
|
161
|
+
return { provider: "openai", model };
|
|
162
|
+
}
|
|
163
|
+
if (model.includes("gemini")) {
|
|
164
|
+
return { provider: "google", model };
|
|
165
|
+
}
|
|
166
|
+
return { provider: "anthropic", model: "claude-sonnet-4" };
|
|
167
|
+
}
|
|
168
|
+
function parseClaudeSession(sessionFile) {
|
|
169
|
+
if (!existsSync(sessionFile)) {
|
|
170
|
+
return null;
|
|
171
|
+
}
|
|
172
|
+
const content = readFileSync(sessionFile, "utf-8");
|
|
173
|
+
const lines = content.split("\n").filter((line) => line.trim());
|
|
174
|
+
let sessionId = "";
|
|
175
|
+
let startTime = "";
|
|
176
|
+
let endTime = "";
|
|
177
|
+
let primaryModel = "";
|
|
178
|
+
let messageCount = 0;
|
|
179
|
+
const totalUsage = {
|
|
180
|
+
inputTokens: 0,
|
|
181
|
+
outputTokens: 0,
|
|
182
|
+
cacheReadTokens: 0,
|
|
183
|
+
cacheWriteTokens: 0
|
|
184
|
+
};
|
|
185
|
+
const modelBreakdown = {};
|
|
186
|
+
let totalCostV2 = 0;
|
|
187
|
+
for (const line of lines) {
|
|
188
|
+
try {
|
|
189
|
+
const msg = JSON.parse(line);
|
|
190
|
+
if (msg.sessionId && !sessionId) {
|
|
191
|
+
sessionId = msg.sessionId;
|
|
192
|
+
}
|
|
193
|
+
if (msg.timestamp) {
|
|
194
|
+
if (!startTime || msg.timestamp < startTime) {
|
|
195
|
+
startTime = msg.timestamp;
|
|
196
|
+
}
|
|
197
|
+
if (!endTime || msg.timestamp > endTime) {
|
|
198
|
+
endTime = msg.timestamp;
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
const usage = msg.message?.usage || msg.usage;
|
|
202
|
+
const modelId = msg.message?.model || msg.model;
|
|
203
|
+
if (usage) {
|
|
204
|
+
totalUsage.inputTokens += usage.input_tokens || 0;
|
|
205
|
+
totalUsage.outputTokens += usage.output_tokens || 0;
|
|
206
|
+
totalUsage.cacheReadTokens = (totalUsage.cacheReadTokens || 0) + (usage.cache_read_input_tokens || 0);
|
|
207
|
+
totalUsage.cacheWriteTokens = (totalUsage.cacheWriteTokens || 0) + (usage.cache_creation_input_tokens || 0);
|
|
208
|
+
messageCount++;
|
|
209
|
+
if (modelId) {
|
|
210
|
+
const { provider: provider2, model: normalizedModel } = normalizeModelName(modelId);
|
|
211
|
+
const pricing2 = getPricing(provider2, normalizedModel);
|
|
212
|
+
if (pricing2) {
|
|
213
|
+
const msgUsage = {
|
|
214
|
+
inputTokens: usage.input_tokens || 0,
|
|
215
|
+
outputTokens: usage.output_tokens || 0,
|
|
216
|
+
cacheReadTokens: usage.cache_read_input_tokens || 0,
|
|
217
|
+
cacheWriteTokens: usage.cache_creation_input_tokens || 0
|
|
218
|
+
};
|
|
219
|
+
const msgCost = calculateCost(msgUsage, pricing2);
|
|
220
|
+
totalCostV2 += msgCost;
|
|
221
|
+
if (!modelBreakdown[modelId]) {
|
|
222
|
+
modelBreakdown[modelId] = {
|
|
223
|
+
cost: 0,
|
|
224
|
+
inputTokens: 0,
|
|
225
|
+
outputTokens: 0,
|
|
226
|
+
messageCount: 0
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
modelBreakdown[modelId].cost += msgCost;
|
|
230
|
+
modelBreakdown[modelId].inputTokens += msgUsage.inputTokens;
|
|
231
|
+
modelBreakdown[modelId].outputTokens += msgUsage.outputTokens;
|
|
232
|
+
modelBreakdown[modelId].messageCount++;
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
if (modelId && !primaryModel) {
|
|
237
|
+
primaryModel = modelId;
|
|
238
|
+
}
|
|
239
|
+
} catch {
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
if (totalUsage.inputTokens === 0 && totalUsage.outputTokens === 0) {
|
|
243
|
+
return null;
|
|
244
|
+
}
|
|
245
|
+
if (!sessionId) {
|
|
246
|
+
sessionId = basename(sessionFile, ".jsonl");
|
|
247
|
+
}
|
|
248
|
+
if (!primaryModel) {
|
|
249
|
+
primaryModel = "claude-sonnet-4";
|
|
250
|
+
}
|
|
251
|
+
const normalizedModels = Object.keys(modelBreakdown).map((id) => normalizeModelName(id).model);
|
|
252
|
+
const modelDisplay = normalizedModels.length > 0 ? normalizedModels.length > 1 ? normalizedModels.join(" \u2192 ") : normalizedModels[0] : normalizeModelName(primaryModel).model;
|
|
253
|
+
const { provider, model } = normalizeModelName(primaryModel);
|
|
254
|
+
const pricing = getPricing(provider, model);
|
|
255
|
+
const cost = pricing ? calculateCost(totalUsage, pricing) : 0;
|
|
256
|
+
return {
|
|
257
|
+
sessionId,
|
|
258
|
+
sessionFile,
|
|
259
|
+
startTime: startTime || (/* @__PURE__ */ new Date()).toISOString(),
|
|
260
|
+
endTime: endTime || (/* @__PURE__ */ new Date()).toISOString(),
|
|
261
|
+
model: modelDisplay,
|
|
262
|
+
usage: totalUsage,
|
|
263
|
+
cost,
|
|
264
|
+
// DEPRECATED: First-model pricing
|
|
265
|
+
cost_v2: totalCostV2 > 0 ? totalCostV2 : void 0,
|
|
266
|
+
// NEW: Accurate per-message pricing
|
|
267
|
+
messageCount,
|
|
268
|
+
modelBreakdown: Object.keys(modelBreakdown).length > 0 ? modelBreakdown : void 0
|
|
269
|
+
// NEW: Cost breakdown by model
|
|
270
|
+
};
|
|
271
|
+
}
|
|
272
|
+
var CLAUDE_PROJECTS_DIR;
|
|
273
|
+
var init_jsonl_parser = __esm({
|
|
274
|
+
"src/lib/cost-parsers/jsonl-parser.ts"() {
|
|
275
|
+
"use strict";
|
|
276
|
+
init_esm_shims();
|
|
277
|
+
init_cost();
|
|
278
|
+
CLAUDE_PROJECTS_DIR = join2(homedir(), ".claude", "projects");
|
|
279
|
+
}
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
// src/lib/cloister/specialist-handoff-logger.ts
|
|
283
|
+
import { existsSync as existsSync2, mkdirSync, appendFileSync, readFileSync as readFileSync2 } from "fs";
|
|
284
|
+
import { join as join3 } from "path";
|
|
285
|
+
function ensureLogDir() {
|
|
286
|
+
const logDir = join3(PANOPTICON_HOME, "logs");
|
|
287
|
+
if (!existsSync2(logDir)) {
|
|
288
|
+
mkdirSync(logDir, { recursive: true });
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
function logSpecialistHandoff(event) {
|
|
292
|
+
ensureLogDir();
|
|
293
|
+
const line = JSON.stringify(event) + "\n";
|
|
294
|
+
appendFileSync(SPECIALIST_HANDOFF_LOG_FILE, line, "utf-8");
|
|
295
|
+
}
|
|
296
|
+
function createSpecialistHandoff(fromSpecialist, toSpecialist, issueId, priority, context) {
|
|
297
|
+
return {
|
|
298
|
+
id: `${toSpecialist}-${issueId}-${Date.now()}`,
|
|
299
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
300
|
+
issueId,
|
|
301
|
+
fromSpecialist,
|
|
302
|
+
toSpecialist,
|
|
303
|
+
status: "queued",
|
|
304
|
+
priority,
|
|
305
|
+
context
|
|
306
|
+
};
|
|
307
|
+
}
|
|
308
|
+
var SPECIALIST_HANDOFF_LOG_FILE;
|
|
309
|
+
var init_specialist_handoff_logger = __esm({
|
|
310
|
+
"src/lib/cloister/specialist-handoff-logger.ts"() {
|
|
311
|
+
"use strict";
|
|
312
|
+
init_esm_shims();
|
|
313
|
+
init_paths();
|
|
314
|
+
SPECIALIST_HANDOFF_LOG_FILE = join3(PANOPTICON_HOME, "logs", "specialist-handoffs.jsonl");
|
|
315
|
+
}
|
|
316
|
+
});
|
|
317
|
+
|
|
318
|
+
// src/lib/cloister/specialists.ts
|
|
319
|
+
var specialists_exports = {};
|
|
320
|
+
__export(specialists_exports, {
|
|
321
|
+
checkSpecialistQueue: () => checkSpecialistQueue,
|
|
322
|
+
clearSessionId: () => clearSessionId,
|
|
323
|
+
completeSpecialistTask: () => completeSpecialistTask,
|
|
324
|
+
countContextTokens: () => countContextTokens,
|
|
325
|
+
disableSpecialist: () => disableSpecialist,
|
|
326
|
+
enableSpecialist: () => enableSpecialist,
|
|
327
|
+
ensureProjectSpecialistDir: () => ensureProjectSpecialistDir,
|
|
328
|
+
exitGracePeriod: () => exitGracePeriod,
|
|
329
|
+
findSessionFile: () => findSessionFile,
|
|
330
|
+
getAllProjectSpecialistStatuses: () => getAllProjectSpecialistStatuses,
|
|
331
|
+
getAllSpecialistStatus: () => getAllSpecialistStatus,
|
|
332
|
+
getAllSpecialists: () => getAllSpecialists,
|
|
333
|
+
getEnabledSpecialists: () => getEnabledSpecialists,
|
|
334
|
+
getFeedbackStats: () => getFeedbackStats,
|
|
335
|
+
getGracePeriodState: () => getGracePeriodState,
|
|
336
|
+
getNextSpecialistTask: () => getNextSpecialistTask,
|
|
337
|
+
getPendingFeedback: () => getPendingFeedback,
|
|
338
|
+
getProjectSpecialistDir: () => getProjectSpecialistDir,
|
|
339
|
+
getProjectSpecialistMetadata: () => getProjectSpecialistMetadata,
|
|
340
|
+
getSessionFilePath: () => getSessionFilePath,
|
|
341
|
+
getSessionId: () => getSessionId,
|
|
342
|
+
getSpecialistMetadata: () => getSpecialistMetadata,
|
|
343
|
+
getSpecialistState: () => getSpecialistState,
|
|
344
|
+
getSpecialistStatus: () => getSpecialistStatus,
|
|
345
|
+
getTmuxSessionName: () => getTmuxSessionName,
|
|
346
|
+
incrementProjectRunCount: () => incrementProjectRunCount,
|
|
347
|
+
initSpecialistsDirectory: () => initSpecialistsDirectory,
|
|
348
|
+
initializeEnabledSpecialists: () => initializeEnabledSpecialists,
|
|
349
|
+
initializeSpecialist: () => initializeSpecialist,
|
|
350
|
+
isEnabled: () => isEnabled,
|
|
351
|
+
isInitialized: () => isInitialized,
|
|
352
|
+
isRunning: () => isRunning,
|
|
353
|
+
listProjectsWithSpecialists: () => listProjectsWithSpecialists,
|
|
354
|
+
listSessionFiles: () => listSessionFiles,
|
|
355
|
+
listSpecialistsForProject: () => listSpecialistsForProject,
|
|
356
|
+
loadRegistry: () => loadRegistry,
|
|
357
|
+
pauseGracePeriod: () => pauseGracePeriod,
|
|
358
|
+
recordWake: () => recordWake,
|
|
359
|
+
resumeGracePeriod: () => resumeGracePeriod,
|
|
360
|
+
saveRegistry: () => saveRegistry,
|
|
361
|
+
sendFeedbackToAgent: () => sendFeedbackToAgent,
|
|
362
|
+
setCurrentRun: () => setCurrentRun,
|
|
363
|
+
setSessionId: () => setSessionId,
|
|
364
|
+
signalSpecialistCompletion: () => signalSpecialistCompletion,
|
|
365
|
+
spawnEphemeralSpecialist: () => spawnEphemeralSpecialist,
|
|
366
|
+
startGracePeriod: () => startGracePeriod,
|
|
367
|
+
submitToSpecialistQueue: () => submitToSpecialistQueue,
|
|
368
|
+
terminateSpecialist: () => terminateSpecialist,
|
|
369
|
+
updateContextTokens: () => updateContextTokens,
|
|
370
|
+
updateProjectSpecialistMetadata: () => updateProjectSpecialistMetadata,
|
|
371
|
+
updateRunStatus: () => updateRunStatus,
|
|
372
|
+
updateSpecialistMetadata: () => updateSpecialistMetadata,
|
|
373
|
+
wakeSpecialist: () => wakeSpecialist,
|
|
374
|
+
wakeSpecialistOrQueue: () => wakeSpecialistOrQueue,
|
|
375
|
+
wakeSpecialistWithTask: () => wakeSpecialistWithTask
|
|
376
|
+
});
|
|
377
|
+
import { readFileSync as readFileSync3, writeFileSync, existsSync as existsSync3, mkdirSync as mkdirSync2, readdirSync as readdirSync2, unlinkSync, appendFileSync as appendFileSync2 } from "fs";
|
|
378
|
+
import { join as join4, basename as basename2 } from "path";
|
|
379
|
+
import { homedir as homedir2 } from "os";
|
|
380
|
+
import { exec } from "child_process";
|
|
381
|
+
import { promisify } from "util";
|
|
382
|
+
function initSpecialistsDirectory() {
|
|
383
|
+
if (!existsSync3(SPECIALISTS_DIR)) {
|
|
384
|
+
mkdirSync2(SPECIALISTS_DIR, { recursive: true });
|
|
385
|
+
}
|
|
386
|
+
if (!existsSync3(REGISTRY_FILE)) {
|
|
387
|
+
const registry = {
|
|
388
|
+
version: "2.0",
|
|
389
|
+
// Updated for per-project structure
|
|
390
|
+
defaults: {
|
|
391
|
+
contextRuns: 5,
|
|
392
|
+
digestModel: null,
|
|
393
|
+
retention: {
|
|
394
|
+
maxDays: 30,
|
|
395
|
+
maxRuns: 50
|
|
396
|
+
}
|
|
397
|
+
},
|
|
398
|
+
projects: {},
|
|
399
|
+
// Keep legacy specialists for backward compatibility during transition
|
|
400
|
+
specialists: DEFAULT_SPECIALISTS,
|
|
401
|
+
lastUpdated: (/* @__PURE__ */ new Date()).toISOString()
|
|
402
|
+
};
|
|
403
|
+
saveRegistry(registry);
|
|
404
|
+
} else {
|
|
405
|
+
migrateRegistryIfNeeded();
|
|
406
|
+
}
|
|
407
|
+
}
|
|
408
|
+
function migrateRegistryIfNeeded() {
|
|
409
|
+
try {
|
|
410
|
+
const content = readFileSync3(REGISTRY_FILE, "utf-8");
|
|
411
|
+
const registry = JSON.parse(content);
|
|
412
|
+
if (registry.version === "2.0" || registry.projects) {
|
|
413
|
+
return;
|
|
414
|
+
}
|
|
415
|
+
console.log("[specialists] Migrating registry to per-project structure...");
|
|
416
|
+
const migratedRegistry = {
|
|
417
|
+
version: "2.0",
|
|
418
|
+
defaults: {
|
|
419
|
+
contextRuns: 5,
|
|
420
|
+
digestModel: null,
|
|
421
|
+
retention: {
|
|
422
|
+
maxDays: 30,
|
|
423
|
+
maxRuns: 50
|
|
424
|
+
}
|
|
425
|
+
},
|
|
426
|
+
projects: {},
|
|
427
|
+
specialists: registry.specialists,
|
|
428
|
+
// Keep for backward compat
|
|
429
|
+
lastUpdated: (/* @__PURE__ */ new Date()).toISOString()
|
|
430
|
+
};
|
|
431
|
+
saveRegistry(migratedRegistry);
|
|
432
|
+
console.log("[specialists] Registry migration complete");
|
|
433
|
+
} catch (error) {
|
|
434
|
+
console.error("[specialists] Failed to migrate registry:", error);
|
|
435
|
+
}
|
|
436
|
+
}
|
|
437
|
+
function loadRegistry() {
|
|
438
|
+
initSpecialistsDirectory();
|
|
439
|
+
try {
|
|
440
|
+
const content = readFileSync3(REGISTRY_FILE, "utf-8");
|
|
441
|
+
return JSON.parse(content);
|
|
442
|
+
} catch (error) {
|
|
443
|
+
console.error("Failed to load specialist registry:", error);
|
|
444
|
+
return {
|
|
445
|
+
version: "1.0",
|
|
446
|
+
defaults: {
|
|
447
|
+
contextRuns: 5,
|
|
448
|
+
digestModel: null,
|
|
449
|
+
retention: { maxDays: 30, maxRuns: 50 }
|
|
450
|
+
},
|
|
451
|
+
projects: {},
|
|
452
|
+
specialists: DEFAULT_SPECIALISTS,
|
|
453
|
+
lastUpdated: (/* @__PURE__ */ new Date()).toISOString()
|
|
454
|
+
};
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
function saveRegistry(registry) {
|
|
458
|
+
if (!existsSync3(SPECIALISTS_DIR)) {
|
|
459
|
+
mkdirSync2(SPECIALISTS_DIR, { recursive: true });
|
|
460
|
+
}
|
|
461
|
+
registry.lastUpdated = (/* @__PURE__ */ new Date()).toISOString();
|
|
462
|
+
try {
|
|
463
|
+
const content = JSON.stringify(registry, null, 2);
|
|
464
|
+
writeFileSync(REGISTRY_FILE, content, "utf-8");
|
|
465
|
+
} catch (error) {
|
|
466
|
+
console.error("Failed to save specialist registry:", error);
|
|
467
|
+
throw error;
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
function getSessionFilePath(name) {
|
|
471
|
+
return join4(SPECIALISTS_DIR, `${name}.session`);
|
|
472
|
+
}
|
|
473
|
+
function getSessionId(name) {
|
|
474
|
+
const sessionFile = getSessionFilePath(name);
|
|
475
|
+
if (!existsSync3(sessionFile)) {
|
|
476
|
+
return null;
|
|
477
|
+
}
|
|
478
|
+
try {
|
|
479
|
+
return readFileSync3(sessionFile, "utf-8").trim();
|
|
480
|
+
} catch (error) {
|
|
481
|
+
console.error(`Failed to read session file for ${name}:`, error);
|
|
482
|
+
return null;
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
function setSessionId(name, sessionId) {
|
|
486
|
+
initSpecialistsDirectory();
|
|
487
|
+
const sessionFile = getSessionFilePath(name);
|
|
488
|
+
try {
|
|
489
|
+
writeFileSync(sessionFile, sessionId.trim(), "utf-8");
|
|
490
|
+
} catch (error) {
|
|
491
|
+
console.error(`Failed to write session file for ${name}:`, error);
|
|
492
|
+
throw error;
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
function clearSessionId(name) {
|
|
496
|
+
const sessionFile = getSessionFilePath(name);
|
|
497
|
+
if (!existsSync3(sessionFile)) {
|
|
498
|
+
return false;
|
|
499
|
+
}
|
|
500
|
+
try {
|
|
501
|
+
unlinkSync(sessionFile);
|
|
502
|
+
return true;
|
|
503
|
+
} catch (error) {
|
|
504
|
+
console.error(`Failed to delete session file for ${name}:`, error);
|
|
505
|
+
throw error;
|
|
506
|
+
}
|
|
507
|
+
}
|
|
508
|
+
function getSpecialistMetadata(name) {
|
|
509
|
+
const registry = loadRegistry();
|
|
510
|
+
return (registry.specialists ?? []).find((s) => s.name === name) || null;
|
|
511
|
+
}
|
|
512
|
+
function updateSpecialistMetadata(name, updates) {
|
|
513
|
+
const registry = loadRegistry();
|
|
514
|
+
const specialists = registry.specialists ?? [];
|
|
515
|
+
const index = specialists.findIndex((s) => s.name === name);
|
|
516
|
+
if (index === -1) {
|
|
517
|
+
throw new Error(`Specialist ${name} not found in registry`);
|
|
518
|
+
}
|
|
519
|
+
specialists[index] = {
|
|
520
|
+
...specialists[index],
|
|
521
|
+
...updates,
|
|
522
|
+
name
|
|
523
|
+
// Ensure name doesn't change
|
|
524
|
+
};
|
|
525
|
+
registry.specialists = specialists;
|
|
526
|
+
saveRegistry(registry);
|
|
527
|
+
}
|
|
528
|
+
function getAllSpecialists() {
|
|
529
|
+
const registry = loadRegistry();
|
|
530
|
+
return registry.specialists ?? [];
|
|
531
|
+
}
|
|
532
|
+
function isInitialized(name) {
|
|
533
|
+
return getSessionId(name) !== null;
|
|
534
|
+
}
|
|
535
|
+
function getSpecialistState(name) {
|
|
536
|
+
return isInitialized(name) ? "sleeping" : "uninitialized";
|
|
537
|
+
}
|
|
538
|
+
function getTmuxSessionName(name, projectKey) {
|
|
539
|
+
if (projectKey) {
|
|
540
|
+
return `specialist-${projectKey}-${name}`;
|
|
541
|
+
}
|
|
542
|
+
return `specialist-${name}`;
|
|
543
|
+
}
|
|
544
|
+
function recordWake(name, sessionId) {
|
|
545
|
+
const updates = {
|
|
546
|
+
lastWake: (/* @__PURE__ */ new Date()).toISOString()
|
|
547
|
+
};
|
|
548
|
+
if (sessionId) {
|
|
549
|
+
updates.sessionId = sessionId;
|
|
550
|
+
}
|
|
551
|
+
updateSpecialistMetadata(name, updates);
|
|
552
|
+
}
|
|
553
|
+
async function spawnEphemeralSpecialist(projectKey, specialistType, task) {
|
|
554
|
+
ensureProjectSpecialistDir(projectKey, specialistType);
|
|
555
|
+
const { loadContextDigest } = await import("./specialist-context-SNCJ7O7G.js");
|
|
556
|
+
const contextDigest = loadContextDigest(projectKey, specialistType);
|
|
557
|
+
const { createRunLog: createRunLog2 } = await import("./specialist-logs-A7ODEK2T.js");
|
|
558
|
+
const { runId, filePath: logFilePath } = createRunLog2(
|
|
559
|
+
projectKey,
|
|
560
|
+
specialistType,
|
|
561
|
+
task.issueId,
|
|
562
|
+
contextDigest || void 0
|
|
563
|
+
);
|
|
564
|
+
setCurrentRun(projectKey, specialistType, runId);
|
|
565
|
+
incrementProjectRunCount(projectKey, specialistType);
|
|
566
|
+
const taskPrompt = await buildTaskPrompt(projectKey, specialistType, task, contextDigest);
|
|
567
|
+
const tmuxSession = getTmuxSessionName(specialistType, projectKey);
|
|
568
|
+
const cwd = process.env.HOME || "/home/exedev";
|
|
569
|
+
try {
|
|
570
|
+
let model = "claude-sonnet-4-5";
|
|
571
|
+
try {
|
|
572
|
+
const workTypeId = `specialist-${specialistType}`;
|
|
573
|
+
model = getModelId(workTypeId);
|
|
574
|
+
} catch (error) {
|
|
575
|
+
console.warn(`Warning: Could not resolve model for ${specialistType}, using default`);
|
|
576
|
+
}
|
|
577
|
+
const permissionFlags = specialistType === "merge-agent" ? "--dangerously-skip-permissions --permission-mode bypassPermissions" : "--dangerously-skip-permissions";
|
|
578
|
+
const agentDir = join4(homedir2(), ".panopticon", "agents", tmuxSession);
|
|
579
|
+
await execAsync(`mkdir -p "${agentDir}"`, { encoding: "utf-8" });
|
|
580
|
+
const promptFile = join4(agentDir, "task-prompt.md");
|
|
581
|
+
writeFileSync(promptFile, taskPrompt);
|
|
582
|
+
const launcherScript = join4(agentDir, "launcher.sh");
|
|
583
|
+
writeFileSync(launcherScript, `#!/bin/bash
|
|
584
|
+
cd "${cwd}"
|
|
585
|
+
prompt=$(cat "${promptFile}")
|
|
586
|
+
|
|
587
|
+
# Run Claude and tee output to log file
|
|
588
|
+
claude ${permissionFlags} --model ${model} "$prompt" 2>&1 | tee -a "${logFilePath}"
|
|
589
|
+
|
|
590
|
+
# Signal completion
|
|
591
|
+
echo ""
|
|
592
|
+
echo "## Specialist completed task"
|
|
593
|
+
`, { mode: 493 });
|
|
594
|
+
await execAsync(
|
|
595
|
+
`tmux new-session -d -s "${tmuxSession}" "bash '${launcherScript}'"`,
|
|
596
|
+
{ encoding: "utf-8" }
|
|
597
|
+
);
|
|
598
|
+
const { saveAgentRuntimeState } = await import("./agents-54LDKMHR.js");
|
|
599
|
+
saveAgentRuntimeState(tmuxSession, {
|
|
600
|
+
state: "active",
|
|
601
|
+
lastActivity: (/* @__PURE__ */ new Date()).toISOString(),
|
|
602
|
+
currentIssue: task.issueId
|
|
603
|
+
});
|
|
604
|
+
console.log(`[specialist] Spawned ephemeral ${specialistType} for ${projectKey}/${task.issueId} (run: ${runId})`);
|
|
605
|
+
return {
|
|
606
|
+
success: true,
|
|
607
|
+
runId,
|
|
608
|
+
tmuxSession,
|
|
609
|
+
message: `Spawned specialist ${specialistType} for ${task.issueId}`
|
|
610
|
+
};
|
|
611
|
+
} catch (error) {
|
|
612
|
+
console.error(`[specialist] Failed to spawn ${specialistType}:`, error);
|
|
613
|
+
setCurrentRun(projectKey, specialistType, null);
|
|
614
|
+
return {
|
|
615
|
+
success: false,
|
|
616
|
+
message: `Failed to spawn specialist: ${error.message}`,
|
|
617
|
+
error: error.message
|
|
618
|
+
};
|
|
619
|
+
}
|
|
620
|
+
}
|
|
621
|
+
async function buildTaskPrompt(projectKey, specialistType, task, contextDigest) {
|
|
622
|
+
const { getSpecialistPromptOverride } = await import("./projects-ESIB34QQ.js");
|
|
623
|
+
const customPrompt = getSpecialistPromptOverride(projectKey, specialistType);
|
|
624
|
+
let prompt = `# ${specialistType} Task - ${task.issueId}
|
|
625
|
+
|
|
626
|
+
`;
|
|
627
|
+
if (contextDigest) {
|
|
628
|
+
prompt += `## Context from Recent Runs
|
|
629
|
+
|
|
630
|
+
${contextDigest}
|
|
631
|
+
|
|
632
|
+
`;
|
|
633
|
+
}
|
|
634
|
+
if (customPrompt) {
|
|
635
|
+
prompt += `## Project-Specific Guidelines
|
|
636
|
+
|
|
637
|
+
${customPrompt}
|
|
638
|
+
|
|
639
|
+
`;
|
|
640
|
+
}
|
|
641
|
+
prompt += `## Current Task
|
|
642
|
+
|
|
643
|
+
`;
|
|
644
|
+
prompt += `Issue: ${task.issueId}
|
|
645
|
+
`;
|
|
646
|
+
if (task.branch) prompt += `Branch: ${task.branch}
|
|
647
|
+
`;
|
|
648
|
+
if (task.workspace) prompt += `Workspace: ${task.workspace}
|
|
649
|
+
`;
|
|
650
|
+
if (task.prUrl) prompt += `PR URL: ${task.prUrl}
|
|
651
|
+
`;
|
|
652
|
+
prompt += `
|
|
653
|
+
`;
|
|
654
|
+
switch (specialistType) {
|
|
655
|
+
case "review-agent":
|
|
656
|
+
prompt += `Your task:
|
|
657
|
+
1. Review all changes in the branch
|
|
658
|
+
2. Check for code quality issues, security concerns, and best practices
|
|
659
|
+
3. Verify test FILES exist for new code (DO NOT run tests)
|
|
660
|
+
4. Provide specific, actionable feedback
|
|
661
|
+
5. Update status via API when done
|
|
662
|
+
|
|
663
|
+
IMPORTANT: DO NOT run tests. You are the REVIEW agent.
|
|
664
|
+
|
|
665
|
+
Update status via API:
|
|
666
|
+
- If issues found: POST to /api/workspaces/${task.issueId}/review-status with {"reviewStatus":"blocked","reviewNotes":"..."}
|
|
667
|
+
- If review passes: POST with {"reviewStatus":"passed"} then queue test-agent`;
|
|
668
|
+
break;
|
|
669
|
+
case "test-agent":
|
|
670
|
+
prompt += `Your task:
|
|
671
|
+
1. Run the full test suite
|
|
672
|
+
2. Analyze any failures in detail
|
|
673
|
+
3. Identify root causes
|
|
674
|
+
4. Update status via API when done
|
|
675
|
+
|
|
676
|
+
Update status via API:
|
|
677
|
+
- If tests pass: POST to /api/workspaces/${task.issueId}/review-status with {"testStatus":"passed"}
|
|
678
|
+
- If tests fail: POST with {"testStatus":"failed","testNotes":"..."}`;
|
|
679
|
+
break;
|
|
680
|
+
case "merge-agent":
|
|
681
|
+
prompt += `Your task:
|
|
682
|
+
1. Fetch the latest main branch
|
|
683
|
+
2. Attempt to merge ${task.branch} into main
|
|
684
|
+
3. Resolve conflicts intelligently if needed
|
|
685
|
+
4. Run tests to verify merge is clean
|
|
686
|
+
5. Complete merge if tests pass`;
|
|
687
|
+
break;
|
|
688
|
+
}
|
|
689
|
+
prompt += `
|
|
690
|
+
|
|
691
|
+
When you complete your task, report your findings and status.`;
|
|
692
|
+
return prompt;
|
|
693
|
+
}
|
|
694
|
+
function startGracePeriod(projectKey, specialistType, duration = 6e4) {
|
|
695
|
+
const key = `${projectKey}-${specialistType}`;
|
|
696
|
+
gracePeriodStates.set(key, {
|
|
697
|
+
active: true,
|
|
698
|
+
startedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
699
|
+
duration,
|
|
700
|
+
paused: false
|
|
701
|
+
});
|
|
702
|
+
console.log(`[specialist] Grace period started for ${projectKey}/${specialistType} (${duration}ms)`);
|
|
703
|
+
setTimeout(() => {
|
|
704
|
+
const state = gracePeriodStates.get(key);
|
|
705
|
+
if (state && state.active && !state.paused) {
|
|
706
|
+
terminateSpecialist(projectKey, specialistType);
|
|
707
|
+
}
|
|
708
|
+
}, duration);
|
|
709
|
+
}
|
|
710
|
+
function pauseGracePeriod(projectKey, specialistType) {
|
|
711
|
+
const key = `${projectKey}-${specialistType}`;
|
|
712
|
+
const state = gracePeriodStates.get(key);
|
|
713
|
+
if (!state || !state.active) {
|
|
714
|
+
return false;
|
|
715
|
+
}
|
|
716
|
+
const elapsed = Date.now() - new Date(state.startedAt).getTime();
|
|
717
|
+
const remaining = state.duration - elapsed;
|
|
718
|
+
state.paused = true;
|
|
719
|
+
state.pausedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
720
|
+
state.remainingTime = remaining;
|
|
721
|
+
gracePeriodStates.set(key, state);
|
|
722
|
+
console.log(`[specialist] Grace period paused for ${projectKey}/${specialistType}`);
|
|
723
|
+
return true;
|
|
724
|
+
}
|
|
725
|
+
function resumeGracePeriod(projectKey, specialistType) {
|
|
726
|
+
const key = `${projectKey}-${specialistType}`;
|
|
727
|
+
const state = gracePeriodStates.get(key);
|
|
728
|
+
if (!state || !state.active || !state.paused) {
|
|
729
|
+
return false;
|
|
730
|
+
}
|
|
731
|
+
state.paused = false;
|
|
732
|
+
state.startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
733
|
+
state.pausedAt = void 0;
|
|
734
|
+
gracePeriodStates.set(key, state);
|
|
735
|
+
console.log(`[specialist] Grace period resumed for ${projectKey}/${specialistType}`);
|
|
736
|
+
setTimeout(() => {
|
|
737
|
+
const currentState = gracePeriodStates.get(key);
|
|
738
|
+
if (currentState && currentState.active && !currentState.paused) {
|
|
739
|
+
terminateSpecialist(projectKey, specialistType);
|
|
740
|
+
}
|
|
741
|
+
}, state.remainingTime || 0);
|
|
742
|
+
return true;
|
|
743
|
+
}
|
|
744
|
+
function exitGracePeriod(projectKey, specialistType) {
|
|
745
|
+
const key = `${projectKey}-${specialistType}`;
|
|
746
|
+
gracePeriodStates.delete(key);
|
|
747
|
+
terminateSpecialist(projectKey, specialistType);
|
|
748
|
+
}
|
|
749
|
+
function getGracePeriodState(projectKey, specialistType) {
|
|
750
|
+
const key = `${projectKey}-${specialistType}`;
|
|
751
|
+
return gracePeriodStates.get(key) || null;
|
|
752
|
+
}
|
|
753
|
+
function signalSpecialistCompletion(projectKey, specialistType, result) {
|
|
754
|
+
const metadata = getProjectSpecialistMetadata(projectKey, specialistType);
|
|
755
|
+
updateRunStatus(projectKey, specialistType, result.status);
|
|
756
|
+
if (metadata.currentRun) {
|
|
757
|
+
const { finalizeRunLog: finalizeRunLog2 } = (init_specialist_logs(), __toCommonJS(specialist_logs_exports));
|
|
758
|
+
try {
|
|
759
|
+
finalizeRunLog2(projectKey, specialistType, metadata.currentRun, {
|
|
760
|
+
status: result.status,
|
|
761
|
+
notes: result.notes
|
|
762
|
+
});
|
|
763
|
+
} catch (error) {
|
|
764
|
+
console.error(`[specialist] Failed to finalize log:`, error);
|
|
765
|
+
}
|
|
766
|
+
}
|
|
767
|
+
startGracePeriod(projectKey, specialistType, 6e4);
|
|
768
|
+
console.log(`[specialist] ${specialistType} completed for ${projectKey} (status: ${result.status})`);
|
|
769
|
+
}
|
|
770
|
+
async function terminateSpecialist(projectKey, specialistType) {
|
|
771
|
+
const tmuxSession = getTmuxSessionName(specialistType, projectKey);
|
|
772
|
+
const metadata = getProjectSpecialistMetadata(projectKey, specialistType);
|
|
773
|
+
try {
|
|
774
|
+
await execAsync(`tmux kill-session -t "${tmuxSession}"`);
|
|
775
|
+
console.log(`[specialist] Terminated ${projectKey}/${specialistType}`);
|
|
776
|
+
} catch (error) {
|
|
777
|
+
console.error(`[specialist] Failed to kill tmux session ${tmuxSession}:`, error);
|
|
778
|
+
}
|
|
779
|
+
if (metadata.currentRun) {
|
|
780
|
+
const { finalizeRunLog: finalizeRunLog2 } = await import("./specialist-logs-A7ODEK2T.js");
|
|
781
|
+
try {
|
|
782
|
+
finalizeRunLog2(projectKey, specialistType, metadata.currentRun, {
|
|
783
|
+
status: metadata.lastRunStatus || "incomplete",
|
|
784
|
+
notes: "Specialist terminated"
|
|
785
|
+
});
|
|
786
|
+
} catch (error) {
|
|
787
|
+
console.error(`[specialist] Failed to finalize log:`, error);
|
|
788
|
+
}
|
|
789
|
+
setCurrentRun(projectKey, specialistType, null);
|
|
790
|
+
}
|
|
791
|
+
const key = `${projectKey}-${specialistType}`;
|
|
792
|
+
gracePeriodStates.delete(key);
|
|
793
|
+
const { saveAgentRuntimeState } = await import("./agents-54LDKMHR.js");
|
|
794
|
+
saveAgentRuntimeState(tmuxSession, {
|
|
795
|
+
state: "suspended",
|
|
796
|
+
lastActivity: (/* @__PURE__ */ new Date()).toISOString()
|
|
797
|
+
});
|
|
798
|
+
const { scheduleDigestGeneration } = await import("./specialist-context-SNCJ7O7G.js");
|
|
799
|
+
scheduleDigestGeneration(projectKey, specialistType);
|
|
800
|
+
scheduleLogCleanup(projectKey, specialistType);
|
|
801
|
+
}
|
|
802
|
+
function scheduleLogCleanup(projectKey, specialistType) {
|
|
803
|
+
Promise.resolve().then(async () => {
|
|
804
|
+
try {
|
|
805
|
+
const { cleanupOldLogs: cleanupOldLogs2 } = await import("./specialist-logs-A7ODEK2T.js");
|
|
806
|
+
const { getSpecialistRetention } = await import("./projects-ESIB34QQ.js");
|
|
807
|
+
const retention = getSpecialistRetention(projectKey);
|
|
808
|
+
const deleted = cleanupOldLogs2(projectKey, specialistType, { maxDays: retention.max_days, maxRuns: retention.max_runs });
|
|
809
|
+
if (deleted > 0) {
|
|
810
|
+
console.log(`[specialist] Cleaned up ${deleted} old logs for ${projectKey}/${specialistType}`);
|
|
811
|
+
}
|
|
812
|
+
} catch (error) {
|
|
813
|
+
console.error(`[specialist] Log cleanup failed for ${projectKey}/${specialistType}:`, error);
|
|
814
|
+
}
|
|
815
|
+
});
|
|
816
|
+
}
|
|
817
|
+
function getProjectSpecialistDir(projectKey, specialistType) {
|
|
818
|
+
return join4(SPECIALISTS_DIR, projectKey, specialistType);
|
|
819
|
+
}
|
|
820
|
+
function ensureProjectSpecialistDir(projectKey, specialistType) {
|
|
821
|
+
const specialistDir = getProjectSpecialistDir(projectKey, specialistType);
|
|
822
|
+
const runsDir = join4(specialistDir, "runs");
|
|
823
|
+
const contextDir = join4(specialistDir, "context");
|
|
824
|
+
if (!existsSync3(runsDir)) {
|
|
825
|
+
mkdirSync2(runsDir, { recursive: true });
|
|
826
|
+
}
|
|
827
|
+
if (!existsSync3(contextDir)) {
|
|
828
|
+
mkdirSync2(contextDir, { recursive: true });
|
|
829
|
+
}
|
|
830
|
+
}
|
|
831
|
+
function getProjectSpecialistMetadata(projectKey, specialistType) {
|
|
832
|
+
const registry = loadRegistry();
|
|
833
|
+
if (!registry.projects[projectKey]) {
|
|
834
|
+
registry.projects[projectKey] = {};
|
|
835
|
+
}
|
|
836
|
+
if (!registry.projects[projectKey][specialistType]) {
|
|
837
|
+
registry.projects[projectKey][specialistType] = {
|
|
838
|
+
runCount: 0,
|
|
839
|
+
lastRunAt: null,
|
|
840
|
+
lastRunStatus: null,
|
|
841
|
+
currentRun: null
|
|
842
|
+
};
|
|
843
|
+
saveRegistry(registry);
|
|
844
|
+
}
|
|
845
|
+
return registry.projects[projectKey][specialistType];
|
|
846
|
+
}
|
|
847
|
+
function updateProjectSpecialistMetadata(projectKey, specialistType, updates) {
|
|
848
|
+
const registry = loadRegistry();
|
|
849
|
+
if (!registry.projects[projectKey]) {
|
|
850
|
+
registry.projects[projectKey] = {};
|
|
851
|
+
}
|
|
852
|
+
if (!registry.projects[projectKey][specialistType]) {
|
|
853
|
+
registry.projects[projectKey][specialistType] = {
|
|
854
|
+
runCount: 0,
|
|
855
|
+
lastRunAt: null,
|
|
856
|
+
lastRunStatus: null,
|
|
857
|
+
currentRun: null
|
|
858
|
+
};
|
|
859
|
+
}
|
|
860
|
+
registry.projects[projectKey][specialistType] = {
|
|
861
|
+
...registry.projects[projectKey][specialistType],
|
|
862
|
+
...updates
|
|
863
|
+
};
|
|
864
|
+
saveRegistry(registry);
|
|
865
|
+
}
|
|
866
|
+
function incrementProjectRunCount(projectKey, specialistType) {
|
|
867
|
+
const metadata = getProjectSpecialistMetadata(projectKey, specialistType);
|
|
868
|
+
updateProjectSpecialistMetadata(projectKey, specialistType, {
|
|
869
|
+
runCount: metadata.runCount + 1,
|
|
870
|
+
lastRunAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
871
|
+
});
|
|
872
|
+
}
|
|
873
|
+
function setCurrentRun(projectKey, specialistType, runId) {
|
|
874
|
+
updateProjectSpecialistMetadata(projectKey, specialistType, { currentRun: runId });
|
|
875
|
+
}
|
|
876
|
+
function updateRunStatus(projectKey, specialistType, status) {
|
|
877
|
+
updateProjectSpecialistMetadata(projectKey, specialistType, { lastRunStatus: status });
|
|
878
|
+
}
|
|
879
|
+
function listProjectsWithSpecialists() {
|
|
880
|
+
const registry = loadRegistry();
|
|
881
|
+
return Object.keys(registry.projects);
|
|
882
|
+
}
|
|
883
|
+
function listSpecialistsForProject(projectKey) {
|
|
884
|
+
const registry = loadRegistry();
|
|
885
|
+
const project = registry.projects[projectKey];
|
|
886
|
+
if (!project) {
|
|
887
|
+
return [];
|
|
888
|
+
}
|
|
889
|
+
return Object.keys(project);
|
|
890
|
+
}
|
|
891
|
+
async function getAllProjectSpecialistStatuses() {
|
|
892
|
+
const registry = loadRegistry();
|
|
893
|
+
const results = [];
|
|
894
|
+
for (const [projectKey, specialists] of Object.entries(registry.projects)) {
|
|
895
|
+
for (const [specialistType, metadata] of Object.entries(specialists)) {
|
|
896
|
+
const tmuxSession = getTmuxSessionName(specialistType, projectKey);
|
|
897
|
+
const running = await isRunning(specialistType, projectKey);
|
|
898
|
+
results.push({
|
|
899
|
+
projectKey,
|
|
900
|
+
specialistType,
|
|
901
|
+
metadata,
|
|
902
|
+
isRunning: running,
|
|
903
|
+
tmuxSession
|
|
904
|
+
});
|
|
905
|
+
}
|
|
906
|
+
}
|
|
907
|
+
return results;
|
|
908
|
+
}
|
|
909
|
+
function updateContextTokens(name, tokens) {
|
|
910
|
+
updateSpecialistMetadata(name, { contextTokens: tokens });
|
|
911
|
+
}
|
|
912
|
+
function listSessionFiles() {
|
|
913
|
+
initSpecialistsDirectory();
|
|
914
|
+
try {
|
|
915
|
+
const files = readdirSync2(SPECIALISTS_DIR);
|
|
916
|
+
const sessionFiles = files.filter((f) => f.endsWith(".session"));
|
|
917
|
+
return sessionFiles.map((f) => f.replace(".session", ""));
|
|
918
|
+
} catch (error) {
|
|
919
|
+
console.error("Failed to list session files:", error);
|
|
920
|
+
return [];
|
|
921
|
+
}
|
|
922
|
+
}
|
|
923
|
+
function enableSpecialist(name) {
|
|
924
|
+
updateSpecialistMetadata(name, { enabled: true });
|
|
925
|
+
}
|
|
926
|
+
function disableSpecialist(name) {
|
|
927
|
+
updateSpecialistMetadata(name, { enabled: false });
|
|
928
|
+
}
|
|
929
|
+
function isEnabled(name) {
|
|
930
|
+
const metadata = getSpecialistMetadata(name);
|
|
931
|
+
return metadata?.enabled ?? false;
|
|
932
|
+
}
|
|
933
|
+
function getEnabledSpecialists() {
|
|
934
|
+
return getAllSpecialists().filter((s) => s.enabled);
|
|
935
|
+
}
|
|
936
|
+
function findSessionFile(sessionId) {
|
|
937
|
+
try {
|
|
938
|
+
const allFiles = getAllSessionFiles();
|
|
939
|
+
for (const file of allFiles) {
|
|
940
|
+
const fileSessionId = basename2(file, ".jsonl");
|
|
941
|
+
if (fileSessionId === sessionId) {
|
|
942
|
+
return file;
|
|
943
|
+
}
|
|
944
|
+
}
|
|
945
|
+
} catch {
|
|
946
|
+
}
|
|
947
|
+
return null;
|
|
948
|
+
}
|
|
949
|
+
function countContextTokens(name) {
|
|
950
|
+
const sessionId = getSessionId(name);
|
|
951
|
+
if (!sessionId) {
|
|
952
|
+
return null;
|
|
953
|
+
}
|
|
954
|
+
const sessionFile = findSessionFile(sessionId);
|
|
955
|
+
if (!sessionFile) {
|
|
956
|
+
return null;
|
|
957
|
+
}
|
|
958
|
+
const sessionUsage = parseClaudeSession(sessionFile);
|
|
959
|
+
if (!sessionUsage) {
|
|
960
|
+
return null;
|
|
961
|
+
}
|
|
962
|
+
return sessionUsage.usage.inputTokens + sessionUsage.usage.outputTokens + (sessionUsage.usage.cacheReadTokens || 0) + (sessionUsage.usage.cacheWriteTokens || 0);
|
|
963
|
+
}
|
|
964
|
+
async function isRunning(name, projectKey) {
|
|
965
|
+
const tmuxSession = getTmuxSessionName(name, projectKey);
|
|
966
|
+
try {
|
|
967
|
+
await execAsync(`tmux has-session -t ${tmuxSession}`);
|
|
968
|
+
return true;
|
|
969
|
+
} catch {
|
|
970
|
+
return false;
|
|
971
|
+
}
|
|
972
|
+
}
|
|
973
|
+
async function getSpecialistStatus(name, projectKey) {
|
|
974
|
+
const metadata = getSpecialistMetadata(name) || {
|
|
975
|
+
name,
|
|
976
|
+
displayName: name,
|
|
977
|
+
description: "",
|
|
978
|
+
enabled: false,
|
|
979
|
+
autoWake: false
|
|
980
|
+
};
|
|
981
|
+
const sessionId = getSessionId(name);
|
|
982
|
+
const running = await isRunning(name, projectKey);
|
|
983
|
+
const contextTokens = countContextTokens(name);
|
|
984
|
+
const { getAgentRuntimeState } = await import("./agents-54LDKMHR.js");
|
|
985
|
+
const tmuxSession = getTmuxSessionName(name, projectKey);
|
|
986
|
+
const runtimeState = getAgentRuntimeState(tmuxSession);
|
|
987
|
+
let state;
|
|
988
|
+
if (runtimeState) {
|
|
989
|
+
switch (runtimeState.state) {
|
|
990
|
+
case "active":
|
|
991
|
+
state = "active";
|
|
992
|
+
break;
|
|
993
|
+
case "idle":
|
|
994
|
+
state = "sleeping";
|
|
995
|
+
break;
|
|
996
|
+
case "suspended":
|
|
997
|
+
state = "sleeping";
|
|
998
|
+
break;
|
|
999
|
+
case "uninitialized":
|
|
1000
|
+
default:
|
|
1001
|
+
state = "uninitialized";
|
|
1002
|
+
break;
|
|
1003
|
+
}
|
|
1004
|
+
} else {
|
|
1005
|
+
if (running && sessionId) {
|
|
1006
|
+
state = "sleeping";
|
|
1007
|
+
} else if (sessionId) {
|
|
1008
|
+
state = "sleeping";
|
|
1009
|
+
} else {
|
|
1010
|
+
state = "uninitialized";
|
|
1011
|
+
}
|
|
1012
|
+
}
|
|
1013
|
+
return {
|
|
1014
|
+
...metadata,
|
|
1015
|
+
sessionId: sessionId || void 0,
|
|
1016
|
+
contextTokens: contextTokens || void 0,
|
|
1017
|
+
state,
|
|
1018
|
+
isRunning: running,
|
|
1019
|
+
tmuxSession: getTmuxSessionName(name, projectKey),
|
|
1020
|
+
currentIssue: runtimeState?.currentIssue
|
|
1021
|
+
};
|
|
1022
|
+
}
|
|
1023
|
+
async function getAllSpecialistStatus() {
|
|
1024
|
+
const specialists = getAllSpecialists();
|
|
1025
|
+
return Promise.all(specialists.map((metadata) => getSpecialistStatus(metadata.name)));
|
|
1026
|
+
}
|
|
1027
|
+
async function initializeSpecialist(name) {
|
|
1028
|
+
if (await isRunning(name)) {
|
|
1029
|
+
return {
|
|
1030
|
+
success: false,
|
|
1031
|
+
message: `Specialist ${name} is already running`,
|
|
1032
|
+
error: "already_running"
|
|
1033
|
+
};
|
|
1034
|
+
}
|
|
1035
|
+
if (getSessionId(name)) {
|
|
1036
|
+
return {
|
|
1037
|
+
success: false,
|
|
1038
|
+
message: `Specialist ${name} is already initialized. Use wake to start it.`,
|
|
1039
|
+
error: "already_initialized"
|
|
1040
|
+
};
|
|
1041
|
+
}
|
|
1042
|
+
const tmuxSession = getTmuxSessionName(name);
|
|
1043
|
+
const cwd = process.env.HOME || "/home/eltmon";
|
|
1044
|
+
let model = "claude-sonnet-4-5";
|
|
1045
|
+
try {
|
|
1046
|
+
const workTypeId = `specialist-${name}`;
|
|
1047
|
+
model = getModelId(workTypeId);
|
|
1048
|
+
} catch (error) {
|
|
1049
|
+
console.warn(`Warning: Could not resolve model for ${name}, using default model`);
|
|
1050
|
+
}
|
|
1051
|
+
const identityPrompt = `You are the ${name} specialist agent for Panopticon.
|
|
1052
|
+
Your role: ${name === "merge-agent" ? "Resolve merge conflicts and ensure clean integrations" : name === "review-agent" ? "Review code changes and provide quality feedback" : name === "test-agent" ? "Execute and analyze test results" : "Assist with development tasks"}
|
|
1053
|
+
|
|
1054
|
+
You will be woken up when your services are needed. For now, acknowledge your initialization and wait.
|
|
1055
|
+
Say: "I am the ${name} specialist, ready and waiting for tasks."`;
|
|
1056
|
+
try {
|
|
1057
|
+
const agentDir = join4(homedir2(), ".panopticon", "agents", tmuxSession);
|
|
1058
|
+
await execAsync(`mkdir -p "${agentDir}"`, { encoding: "utf-8" });
|
|
1059
|
+
const promptFile = join4(agentDir, "identity-prompt.md");
|
|
1060
|
+
const launcherScript = join4(agentDir, "launcher.sh");
|
|
1061
|
+
writeFileSync(promptFile, identityPrompt);
|
|
1062
|
+
writeFileSync(launcherScript, `#!/bin/bash
|
|
1063
|
+
cd "${cwd}"
|
|
1064
|
+
prompt=$(cat "${promptFile}")
|
|
1065
|
+
exec claude --dangerously-skip-permissions --model ${model} "$prompt"
|
|
1066
|
+
`, { mode: 493 });
|
|
1067
|
+
await execAsync(
|
|
1068
|
+
`tmux new-session -d -s "${tmuxSession}" "bash '${launcherScript}'"`,
|
|
1069
|
+
{ encoding: "utf-8" }
|
|
1070
|
+
);
|
|
1071
|
+
recordWake(name);
|
|
1072
|
+
return {
|
|
1073
|
+
success: true,
|
|
1074
|
+
message: `Specialist ${name} initialized and started`,
|
|
1075
|
+
tmuxSession
|
|
1076
|
+
};
|
|
1077
|
+
} catch (error) {
|
|
1078
|
+
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
1079
|
+
return {
|
|
1080
|
+
success: false,
|
|
1081
|
+
message: `Failed to initialize specialist ${name}: ${errorMessage}`,
|
|
1082
|
+
error: errorMessage
|
|
1083
|
+
};
|
|
1084
|
+
}
|
|
1085
|
+
}
|
|
1086
|
+
async function initializeEnabledSpecialists() {
|
|
1087
|
+
const enabled = getEnabledSpecialists();
|
|
1088
|
+
const results = [];
|
|
1089
|
+
for (const specialist of enabled) {
|
|
1090
|
+
const sessionId = getSessionId(specialist.name);
|
|
1091
|
+
if (!sessionId) {
|
|
1092
|
+
console.log(` \u2192 Auto-initializing specialist: ${specialist.name}`);
|
|
1093
|
+
const result = await initializeSpecialist(specialist.name);
|
|
1094
|
+
results.push({
|
|
1095
|
+
name: specialist.name,
|
|
1096
|
+
success: result.success,
|
|
1097
|
+
message: result.message
|
|
1098
|
+
});
|
|
1099
|
+
if (results.length < enabled.length) {
|
|
1100
|
+
await new Promise((resolve) => setTimeout(resolve, 1e3));
|
|
1101
|
+
}
|
|
1102
|
+
} else {
|
|
1103
|
+
results.push({
|
|
1104
|
+
name: specialist.name,
|
|
1105
|
+
success: true,
|
|
1106
|
+
message: `Already initialized with session ${sessionId.substring(0, 8)}...`
|
|
1107
|
+
});
|
|
1108
|
+
}
|
|
1109
|
+
}
|
|
1110
|
+
return results;
|
|
1111
|
+
}
|
|
1112
|
+
async function resetSpecialist(name) {
|
|
1113
|
+
const tmuxSession = getTmuxSessionName(name);
|
|
1114
|
+
try {
|
|
1115
|
+
await execAsync(`tmux send-keys -t "${tmuxSession}" C-c`, { encoding: "utf-8" });
|
|
1116
|
+
await new Promise((resolve) => setTimeout(resolve, 200));
|
|
1117
|
+
sendKeys(tmuxSession, "cd ~");
|
|
1118
|
+
await new Promise((resolve) => setTimeout(resolve, 200));
|
|
1119
|
+
await execAsync(`tmux send-keys -t "${tmuxSession}" C-u`, { encoding: "utf-8" });
|
|
1120
|
+
await new Promise((resolve) => setTimeout(resolve, 100));
|
|
1121
|
+
} catch (error) {
|
|
1122
|
+
console.error(`[specialist] Failed to reset ${name}:`, error);
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
async function wakeSpecialist(name, taskPrompt, options = {}) {
|
|
1126
|
+
const { waitForReady = true, startIfNotRunning = true, issueId } = options;
|
|
1127
|
+
const tmuxSession = getTmuxSessionName(name);
|
|
1128
|
+
const sessionId = getSessionId(name);
|
|
1129
|
+
const wasAlreadyRunning = await isRunning(name);
|
|
1130
|
+
if (!wasAlreadyRunning) {
|
|
1131
|
+
if (!startIfNotRunning) {
|
|
1132
|
+
return {
|
|
1133
|
+
success: false,
|
|
1134
|
+
message: `Specialist ${name} is not running`,
|
|
1135
|
+
wasAlreadyRunning: false,
|
|
1136
|
+
error: "not_running"
|
|
1137
|
+
};
|
|
1138
|
+
}
|
|
1139
|
+
const cwd = process.env.HOME || "/home/eltmon";
|
|
1140
|
+
try {
|
|
1141
|
+
const modelFlag = name === "merge-agent" ? "--model opus" : "";
|
|
1142
|
+
const permissionFlags = name === "merge-agent" ? "--dangerously-skip-permissions --permission-mode bypassPermissions" : "--dangerously-skip-permissions";
|
|
1143
|
+
const claudeCmd = sessionId ? `claude --resume "${sessionId}" ${modelFlag} ${permissionFlags}` : `claude ${modelFlag} ${permissionFlags}`;
|
|
1144
|
+
await execAsync(
|
|
1145
|
+
`tmux new-session -d -s "${tmuxSession}" -c "${cwd}" "${claudeCmd}"`,
|
|
1146
|
+
{ encoding: "utf-8" }
|
|
1147
|
+
);
|
|
1148
|
+
if (waitForReady) {
|
|
1149
|
+
await new Promise((resolve) => setTimeout(resolve, 3e3));
|
|
1150
|
+
}
|
|
1151
|
+
} catch (error) {
|
|
1152
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1153
|
+
return {
|
|
1154
|
+
success: false,
|
|
1155
|
+
message: `Failed to start specialist ${name}: ${msg}`,
|
|
1156
|
+
wasAlreadyRunning: false,
|
|
1157
|
+
error: msg
|
|
1158
|
+
};
|
|
1159
|
+
}
|
|
1160
|
+
}
|
|
1161
|
+
await resetSpecialist(name);
|
|
1162
|
+
try {
|
|
1163
|
+
const isLargePrompt = taskPrompt.length > 500 || taskPrompt.includes("\n");
|
|
1164
|
+
if (isLargePrompt) {
|
|
1165
|
+
if (!existsSync3(TASKS_DIR)) {
|
|
1166
|
+
mkdirSync2(TASKS_DIR, { recursive: true });
|
|
1167
|
+
}
|
|
1168
|
+
const taskFile = join4(TASKS_DIR, `${name}-${Date.now()}.md`);
|
|
1169
|
+
writeFileSync(taskFile, taskPrompt, "utf-8");
|
|
1170
|
+
const shortMessage = `Read and execute the task in: ${taskFile}`;
|
|
1171
|
+
sendKeys(tmuxSession, shortMessage);
|
|
1172
|
+
} else {
|
|
1173
|
+
sendKeys(tmuxSession, taskPrompt);
|
|
1174
|
+
}
|
|
1175
|
+
recordWake(name, sessionId || void 0);
|
|
1176
|
+
const { saveAgentRuntimeState } = await import("./agents-54LDKMHR.js");
|
|
1177
|
+
saveAgentRuntimeState(tmuxSession, {
|
|
1178
|
+
state: "active",
|
|
1179
|
+
lastActivity: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1180
|
+
currentIssue: issueId
|
|
1181
|
+
});
|
|
1182
|
+
return {
|
|
1183
|
+
success: true,
|
|
1184
|
+
message: wasAlreadyRunning ? `Sent task to running specialist ${name}` : `Started specialist ${name} and sent task`,
|
|
1185
|
+
tmuxSession,
|
|
1186
|
+
wasAlreadyRunning
|
|
1187
|
+
};
|
|
1188
|
+
} catch (error) {
|
|
1189
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1190
|
+
return {
|
|
1191
|
+
success: false,
|
|
1192
|
+
message: `Failed to send task to specialist ${name}: ${msg}`,
|
|
1193
|
+
tmuxSession,
|
|
1194
|
+
wasAlreadyRunning,
|
|
1195
|
+
error: msg
|
|
1196
|
+
};
|
|
1197
|
+
}
|
|
1198
|
+
}
|
|
1199
|
+
async function wakeSpecialistWithTask(name, task) {
|
|
1200
|
+
let prompt;
|
|
1201
|
+
switch (name) {
|
|
1202
|
+
case "merge-agent":
|
|
1203
|
+
prompt = `New merge task for ${task.issueId}:
|
|
1204
|
+
|
|
1205
|
+
Branch: ${task.branch || "unknown"}
|
|
1206
|
+
Workspace: ${task.workspace || "unknown"}
|
|
1207
|
+
${task.prUrl ? `PR URL: ${task.prUrl}` : ""}
|
|
1208
|
+
|
|
1209
|
+
Your task:
|
|
1210
|
+
1. Fetch the latest main branch
|
|
1211
|
+
2. Attempt to merge ${task.branch} into main
|
|
1212
|
+
3. If conflicts arise, resolve them intelligently based on context
|
|
1213
|
+
4. Run the test suite to verify the merge is clean
|
|
1214
|
+
5. If tests pass, complete the merge and push
|
|
1215
|
+
6. If tests fail, analyze the failures and either fix them or report back
|
|
1216
|
+
|
|
1217
|
+
When done, provide feedback on:
|
|
1218
|
+
- Any conflicts encountered and how you resolved them
|
|
1219
|
+
- Test results
|
|
1220
|
+
- Any patterns you notice that future agents should be aware of
|
|
1221
|
+
|
|
1222
|
+
Use the send-feedback-to-agent skill to report findings back to the issue agent.`;
|
|
1223
|
+
break;
|
|
1224
|
+
case "review-agent":
|
|
1225
|
+
prompt = `New review task for ${task.issueId}:
|
|
1226
|
+
|
|
1227
|
+
Branch: ${task.branch || "unknown"}
|
|
1228
|
+
Workspace: ${task.workspace || "unknown"}
|
|
1229
|
+
${task.prUrl ? `PR URL: ${task.prUrl}` : ""}
|
|
1230
|
+
|
|
1231
|
+
Your task:
|
|
1232
|
+
1. Review all changes in the branch compared to main
|
|
1233
|
+
2. Check for code quality issues, security concerns, and best practices
|
|
1234
|
+
3. Verify test FILES exist for new code (DO NOT run tests - test-agent does that)
|
|
1235
|
+
4. Provide specific, actionable feedback
|
|
1236
|
+
|
|
1237
|
+
IMPORTANT: DO NOT run tests (npm test). You are the REVIEW agent - you only review code.
|
|
1238
|
+
The TEST agent will run tests in the next step.
|
|
1239
|
+
|
|
1240
|
+
## How to Review Changes
|
|
1241
|
+
|
|
1242
|
+
**Step 1:** Get the list of changed files:
|
|
1243
|
+
\`\`\`bash
|
|
1244
|
+
cd ${task.workspace || "unknown"} && git diff --name-only main...HEAD
|
|
1245
|
+
\`\`\`
|
|
1246
|
+
|
|
1247
|
+
**Step 2:** Read the CURRENT version of each changed file using the Read tool.
|
|
1248
|
+
Review the actual file contents \u2014 do NOT rely solely on diff output.
|
|
1249
|
+
|
|
1250
|
+
**Step 3:** If you need to see what specifically changed, use:
|
|
1251
|
+
\`\`\`bash
|
|
1252
|
+
cd ${task.workspace || "unknown"} && git diff main...HEAD -- <file>
|
|
1253
|
+
\`\`\`
|
|
1254
|
+
|
|
1255
|
+
## Avoiding False Positives
|
|
1256
|
+
|
|
1257
|
+
**CRITICAL:** When reviewing diffs, understand that:
|
|
1258
|
+
- Lines starting with \`+\` are ADDITIONS (new code)
|
|
1259
|
+
- Lines starting with \`-\` are DELETIONS (removed code)
|
|
1260
|
+
- Lines without prefix are CONTEXT (unchanged surrounding code)
|
|
1261
|
+
- The SAME content may appear in both \`-\` and \`+\` sections when code is moved or reformatted \u2014 this is NOT duplication
|
|
1262
|
+
- A section shown in diff context does NOT mean it appears twice in the actual file
|
|
1263
|
+
- **Always read the actual file** to verify before claiming duplicate or redundant content
|
|
1264
|
+
|
|
1265
|
+
Do NOT flag:
|
|
1266
|
+
- Code that appears in both removed and added hunks (it was moved, not duplicated)
|
|
1267
|
+
- Diff context lines as "duplicate sections" \u2014 they exist once in the real file
|
|
1268
|
+
- Reformatted/restructured code as "duplicated"
|
|
1269
|
+
|
|
1270
|
+
## REQUIRED: Update Status via API
|
|
1271
|
+
|
|
1272
|
+
You MUST execute these curl commands and verify they succeed. Do NOT just describe them - actually RUN them with Bash.
|
|
1273
|
+
|
|
1274
|
+
If issues found:
|
|
1275
|
+
\`\`\`bash
|
|
1276
|
+
# EXECUTE THIS - verify you see JSON response with reviewStatus
|
|
1277
|
+
curl -s -X POST http://localhost:3011/api/workspaces/${task.issueId}/review-status -H "Content-Type: application/json" -d '{"reviewStatus":"blocked","reviewNotes":"[describe issues]"}' | jq .
|
|
1278
|
+
\`\`\`
|
|
1279
|
+
Then use send-feedback-to-agent skill to notify issue agent.
|
|
1280
|
+
|
|
1281
|
+
If review passes:
|
|
1282
|
+
\`\`\`bash
|
|
1283
|
+
# EXECUTE THIS FIRST - verify you see JSON response with reviewStatus:"passed"
|
|
1284
|
+
curl -s -X POST http://localhost:3011/api/workspaces/${task.issueId}/review-status -H "Content-Type: application/json" -d '{"reviewStatus":"passed"}' | jq .
|
|
1285
|
+
|
|
1286
|
+
# THEN EXECUTE THIS - verify you see JSON response with queued task
|
|
1287
|
+
curl -s -X POST http://localhost:3011/api/specialists/test-agent/queue -H "Content-Type: application/json" -d '{"issueId":"${task.issueId}","workspace":"${task.workspace}","branch":"${task.branch}"}' | jq .
|
|
1288
|
+
\`\`\`
|
|
1289
|
+
|
|
1290
|
+
\u26A0\uFE0F VERIFICATION: After running each curl, confirm you see valid JSON output. If you get an error, report it.`;
|
|
1291
|
+
break;
|
|
1292
|
+
case "test-agent":
|
|
1293
|
+
prompt = `New test task for ${task.issueId}:
|
|
1294
|
+
|
|
1295
|
+
Branch: ${task.branch || "unknown"}
|
|
1296
|
+
Workspace: ${task.workspace || "unknown"}
|
|
1297
|
+
|
|
1298
|
+
Your task:
|
|
1299
|
+
1. Run the full test suite on the feature branch
|
|
1300
|
+
2. Run the same test suite on the main branch (baseline)
|
|
1301
|
+
3. Compare results: identify which failures are NEW vs pre-existing
|
|
1302
|
+
4. Only fail the feature branch for NEW regressions
|
|
1303
|
+
5. Update status via API when done
|
|
1304
|
+
|
|
1305
|
+
## CRITICAL: Bash Timeout for Test Commands
|
|
1306
|
+
|
|
1307
|
+
**ALWAYS use timeout: 300000 (5 minutes) when running test commands.**
|
|
1308
|
+
Test suites commonly take 2-5 minutes. The default bash timeout is only 2 minutes and WILL cause premature failures.
|
|
1309
|
+
Do NOT run test commands in background mode \u2014 run them directly with a 5-minute timeout.
|
|
1310
|
+
|
|
1311
|
+
Example:
|
|
1312
|
+
\`\`\`bash
|
|
1313
|
+
cd ${task.workspace || "unknown"} && npm test 2>&1 | tail -30
|
|
1314
|
+
# Use timeout: 300000 for this command
|
|
1315
|
+
\`\`\`
|
|
1316
|
+
|
|
1317
|
+
## CRITICAL: Baseline Comparison
|
|
1318
|
+
|
|
1319
|
+
**You MUST compare test results against the main branch baseline.**
|
|
1320
|
+
|
|
1321
|
+
Pre-existing failures that also occur on main branch should NOT block the feature branch.
|
|
1322
|
+
|
|
1323
|
+
Steps:
|
|
1324
|
+
1. Run \`npm test\` (or detected command) on the feature branch - record results (timeout: 300000)
|
|
1325
|
+
2. Run tests on main branch baseline (timeout: 300000): \`cd ${task.context?.workspace ? task.context.workspace.replace(/workspaces\/feature-[^/]+/, "") : "unknown"} && npm test 2>&1 | tail -30\`
|
|
1326
|
+
3. Compare: any test that fails on BOTH branches is pre-existing
|
|
1327
|
+
4. Only NEW failures (pass on main, fail on feature) should block
|
|
1328
|
+
|
|
1329
|
+
**Pass criteria:** The feature branch introduces ZERO new test failures compared to main.
|
|
1330
|
+
**Fail criteria:** The feature branch introduces one or more NEW test failures not present on main.
|
|
1331
|
+
|
|
1332
|
+
Report pre-existing failures as informational notes, but do NOT block the feature for them.
|
|
1333
|
+
|
|
1334
|
+
## REQUIRED: Update Status via API
|
|
1335
|
+
|
|
1336
|
+
You MUST execute the appropriate curl command and verify it succeeds. Do NOT just describe it - actually RUN it with Bash.
|
|
1337
|
+
|
|
1338
|
+
If NO new regressions (tests PASS):
|
|
1339
|
+
\`\`\`bash
|
|
1340
|
+
# EXECUTE THIS - verify you see JSON response with testStatus:"passed"
|
|
1341
|
+
curl -s -X POST http://localhost:3011/api/workspaces/${task.issueId}/review-status -H "Content-Type: application/json" -d '{"testStatus":"passed","testNotes":"[summary including pre-existing failures if any]"}' | jq .
|
|
1342
|
+
\`\`\`
|
|
1343
|
+
|
|
1344
|
+
If NEW regressions found (tests FAIL):
|
|
1345
|
+
\`\`\`bash
|
|
1346
|
+
# EXECUTE THIS - verify you see JSON response with testStatus:"failed"
|
|
1347
|
+
curl -s -X POST http://localhost:3011/api/workspaces/${task.issueId}/review-status -H "Content-Type: application/json" -d '{"testStatus":"failed","testNotes":"[describe NEW failures only]"}' | jq .
|
|
1348
|
+
\`\`\`
|
|
1349
|
+
Then use send-feedback-to-agent skill to notify issue agent of NEW failures only.
|
|
1350
|
+
|
|
1351
|
+
\u26A0\uFE0F VERIFICATION: After running curl, confirm you see valid JSON output with the updated status. If you get an error or empty response, the update FAILED - report this.
|
|
1352
|
+
|
|
1353
|
+
IMPORTANT: Do NOT hand off to merge-agent. Human clicks Merge button when ready.`;
|
|
1354
|
+
break;
|
|
1355
|
+
default:
|
|
1356
|
+
prompt = `Task for ${task.issueId}: Please process this task and report findings.`;
|
|
1357
|
+
}
|
|
1358
|
+
return wakeSpecialist(name, prompt, { issueId: task.issueId });
|
|
1359
|
+
}
|
|
1360
|
+
async function wakeSpecialistOrQueue(name, task, options = {}) {
|
|
1361
|
+
const { priority = "normal", source = "handoff" } = options;
|
|
1362
|
+
const running = await isRunning(name);
|
|
1363
|
+
const { getAgentRuntimeState } = await import("./agents-54LDKMHR.js");
|
|
1364
|
+
const tmuxSession = getTmuxSessionName(name);
|
|
1365
|
+
const runtimeState = getAgentRuntimeState(tmuxSession);
|
|
1366
|
+
const idle = runtimeState?.state === "idle" || runtimeState?.state === "suspended";
|
|
1367
|
+
if (running && !idle) {
|
|
1368
|
+
try {
|
|
1369
|
+
submitToSpecialistQueue(name, {
|
|
1370
|
+
priority,
|
|
1371
|
+
source,
|
|
1372
|
+
issueId: task.issueId,
|
|
1373
|
+
workspace: task.workspace,
|
|
1374
|
+
branch: task.branch,
|
|
1375
|
+
prUrl: task.prUrl,
|
|
1376
|
+
context: task.context
|
|
1377
|
+
});
|
|
1378
|
+
console.log(`[specialist] ${name} busy, queued task for ${task.issueId} (priority: ${priority})`);
|
|
1379
|
+
return {
|
|
1380
|
+
success: true,
|
|
1381
|
+
queued: true,
|
|
1382
|
+
message: `Specialist ${name} is busy. Task queued with ${priority} priority.`
|
|
1383
|
+
};
|
|
1384
|
+
} catch (error) {
|
|
1385
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1386
|
+
return {
|
|
1387
|
+
success: false,
|
|
1388
|
+
queued: false,
|
|
1389
|
+
message: `Failed to queue task for ${name}: ${msg}`,
|
|
1390
|
+
error: msg
|
|
1391
|
+
};
|
|
1392
|
+
}
|
|
1393
|
+
}
|
|
1394
|
+
const { saveAgentRuntimeState } = await import("./agents-54LDKMHR.js");
|
|
1395
|
+
saveAgentRuntimeState(tmuxSession, {
|
|
1396
|
+
state: "active",
|
|
1397
|
+
lastActivity: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1398
|
+
currentIssue: task.issueId
|
|
1399
|
+
});
|
|
1400
|
+
console.log(`[specialist] ${name} marked active (preventing concurrent wakes)`);
|
|
1401
|
+
try {
|
|
1402
|
+
const wakeResult = await wakeSpecialistWithTask(name, task);
|
|
1403
|
+
if (!wakeResult.success) {
|
|
1404
|
+
saveAgentRuntimeState(tmuxSession, {
|
|
1405
|
+
state: "idle",
|
|
1406
|
+
lastActivity: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1407
|
+
currentIssue: void 0
|
|
1408
|
+
});
|
|
1409
|
+
}
|
|
1410
|
+
return {
|
|
1411
|
+
success: wakeResult.success,
|
|
1412
|
+
queued: false,
|
|
1413
|
+
message: wakeResult.message,
|
|
1414
|
+
error: wakeResult.error
|
|
1415
|
+
};
|
|
1416
|
+
} catch (error) {
|
|
1417
|
+
saveAgentRuntimeState(tmuxSession, {
|
|
1418
|
+
state: "idle",
|
|
1419
|
+
lastActivity: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1420
|
+
currentIssue: void 0
|
|
1421
|
+
});
|
|
1422
|
+
const msg = error instanceof Error ? error.message : String(error);
|
|
1423
|
+
return {
|
|
1424
|
+
success: false,
|
|
1425
|
+
queued: false,
|
|
1426
|
+
message: `Failed to wake specialist ${name}: ${msg}`,
|
|
1427
|
+
error: msg
|
|
1428
|
+
};
|
|
1429
|
+
}
|
|
1430
|
+
}
|
|
1431
|
+
function submitToSpecialistQueue(specialistName, task) {
|
|
1432
|
+
const item = {
|
|
1433
|
+
type: "task",
|
|
1434
|
+
priority: task.priority,
|
|
1435
|
+
source: task.source,
|
|
1436
|
+
payload: {
|
|
1437
|
+
issueId: task.issueId,
|
|
1438
|
+
context: {
|
|
1439
|
+
...task.context,
|
|
1440
|
+
prUrl: task.prUrl,
|
|
1441
|
+
workspace: task.workspace,
|
|
1442
|
+
branch: task.branch,
|
|
1443
|
+
filesChanged: task.filesChanged
|
|
1444
|
+
}
|
|
1445
|
+
}
|
|
1446
|
+
};
|
|
1447
|
+
const queueItem = pushToHook(specialistName, item);
|
|
1448
|
+
const handoffEvent = createSpecialistHandoff(
|
|
1449
|
+
task.source,
|
|
1450
|
+
// From (e.g., 'review-agent' or 'issue-agent')
|
|
1451
|
+
specialistName,
|
|
1452
|
+
// To specialist
|
|
1453
|
+
task.issueId,
|
|
1454
|
+
task.priority,
|
|
1455
|
+
{
|
|
1456
|
+
workspace: task.workspace,
|
|
1457
|
+
branch: task.branch,
|
|
1458
|
+
prUrl: task.prUrl,
|
|
1459
|
+
source: task.source
|
|
1460
|
+
}
|
|
1461
|
+
);
|
|
1462
|
+
logSpecialistHandoff(handoffEvent);
|
|
1463
|
+
return queueItem;
|
|
1464
|
+
}
|
|
1465
|
+
function checkSpecialistQueue(specialistName) {
|
|
1466
|
+
return checkHook(specialistName);
|
|
1467
|
+
}
|
|
1468
|
+
function completeSpecialistTask(specialistName, itemId) {
|
|
1469
|
+
return popFromHook(specialistName, itemId);
|
|
1470
|
+
}
|
|
1471
|
+
function getNextSpecialistTask(specialistName) {
|
|
1472
|
+
const { items } = checkSpecialistQueue(specialistName);
|
|
1473
|
+
return items.length > 0 ? items[0] : null;
|
|
1474
|
+
}
|
|
1475
|
+
async function sendFeedbackToAgent(feedback) {
|
|
1476
|
+
const { fromSpecialist, toIssueId, summary, details } = feedback;
|
|
1477
|
+
if (!existsSync3(FEEDBACK_DIR)) {
|
|
1478
|
+
mkdirSync2(FEEDBACK_DIR, { recursive: true });
|
|
1479
|
+
}
|
|
1480
|
+
const fullFeedback = {
|
|
1481
|
+
...feedback,
|
|
1482
|
+
id: `feedback-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`,
|
|
1483
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1484
|
+
};
|
|
1485
|
+
try {
|
|
1486
|
+
const line = JSON.stringify(fullFeedback) + "\n";
|
|
1487
|
+
appendFileSync2(FEEDBACK_LOG, line, "utf-8");
|
|
1488
|
+
} catch (error) {
|
|
1489
|
+
console.error(`[specialist] Failed to log feedback:`, error);
|
|
1490
|
+
}
|
|
1491
|
+
const agentSession = `agent-${toIssueId.toLowerCase()}`;
|
|
1492
|
+
try {
|
|
1493
|
+
const { messageAgent } = await import("./agents-54LDKMHR.js");
|
|
1494
|
+
const feedbackMessage = formatFeedbackForAgent(fullFeedback);
|
|
1495
|
+
await messageAgent(agentSession, feedbackMessage);
|
|
1496
|
+
console.log(`[specialist] Sent feedback from ${fromSpecialist} to ${agentSession}`);
|
|
1497
|
+
return true;
|
|
1498
|
+
} catch (err) {
|
|
1499
|
+
console.log(`[specialist] Could not send feedback to ${agentSession}:`, err);
|
|
1500
|
+
return false;
|
|
1501
|
+
}
|
|
1502
|
+
}
|
|
1503
|
+
function formatFeedbackForAgent(feedback) {
|
|
1504
|
+
const { fromSpecialist, feedbackType, category, summary, details, actionItems, patterns, suggestions } = feedback;
|
|
1505
|
+
const typeEmoji = {
|
|
1506
|
+
success: "\u2705",
|
|
1507
|
+
failure: "\u274C",
|
|
1508
|
+
warning: "\u26A0\uFE0F",
|
|
1509
|
+
insight: "\u{1F4A1}"
|
|
1510
|
+
}[feedbackType];
|
|
1511
|
+
let message = `
|
|
1512
|
+
${typeEmoji} **Feedback from ${fromSpecialist}** (${category})
|
|
1513
|
+
|
|
1514
|
+
`;
|
|
1515
|
+
message += `**Summary:** ${summary}
|
|
1516
|
+
|
|
1517
|
+
`;
|
|
1518
|
+
message += `**Details:**
|
|
1519
|
+
${details}
|
|
1520
|
+
`;
|
|
1521
|
+
if (actionItems?.length) {
|
|
1522
|
+
message += `
|
|
1523
|
+
**Action Items:**
|
|
1524
|
+
`;
|
|
1525
|
+
actionItems.forEach((item, i) => {
|
|
1526
|
+
message += `${i + 1}. ${item}
|
|
1527
|
+
`;
|
|
1528
|
+
});
|
|
1529
|
+
}
|
|
1530
|
+
if (patterns?.length) {
|
|
1531
|
+
message += `
|
|
1532
|
+
**Patterns Noticed:**
|
|
1533
|
+
`;
|
|
1534
|
+
patterns.forEach((pattern) => {
|
|
1535
|
+
message += `- ${pattern}
|
|
1536
|
+
`;
|
|
1537
|
+
});
|
|
1538
|
+
}
|
|
1539
|
+
if (suggestions?.length) {
|
|
1540
|
+
message += `
|
|
1541
|
+
**Suggestions:**
|
|
1542
|
+
`;
|
|
1543
|
+
suggestions.forEach((suggestion) => {
|
|
1544
|
+
message += `- ${suggestion}
|
|
1545
|
+
`;
|
|
1546
|
+
});
|
|
1547
|
+
}
|
|
1548
|
+
return message;
|
|
1549
|
+
}
|
|
1550
|
+
function getPendingFeedback(issueId) {
|
|
1551
|
+
if (!existsSync3(FEEDBACK_LOG)) {
|
|
1552
|
+
return [];
|
|
1553
|
+
}
|
|
1554
|
+
try {
|
|
1555
|
+
const content = readFileSync3(FEEDBACK_LOG, "utf-8");
|
|
1556
|
+
const lines = content.trim().split("\n").filter((l) => l.length > 0);
|
|
1557
|
+
const allFeedback = lines.map((line) => JSON.parse(line));
|
|
1558
|
+
return allFeedback.filter((f) => f.toIssueId.toLowerCase() === issueId.toLowerCase());
|
|
1559
|
+
} catch (error) {
|
|
1560
|
+
console.error(`[specialist] Failed to read feedback log:`, error);
|
|
1561
|
+
return [];
|
|
1562
|
+
}
|
|
1563
|
+
}
|
|
1564
|
+
function getFeedbackStats() {
|
|
1565
|
+
const stats = {
|
|
1566
|
+
bySpecialist: {
|
|
1567
|
+
"merge-agent": 0,
|
|
1568
|
+
"review-agent": 0,
|
|
1569
|
+
"test-agent": 0
|
|
1570
|
+
},
|
|
1571
|
+
byType: {},
|
|
1572
|
+
total: 0
|
|
1573
|
+
};
|
|
1574
|
+
if (!existsSync3(FEEDBACK_LOG)) {
|
|
1575
|
+
return stats;
|
|
1576
|
+
}
|
|
1577
|
+
try {
|
|
1578
|
+
const content = readFileSync3(FEEDBACK_LOG, "utf-8");
|
|
1579
|
+
const lines = content.trim().split("\n").filter((l) => l.length > 0);
|
|
1580
|
+
for (const line of lines) {
|
|
1581
|
+
const feedback = JSON.parse(line);
|
|
1582
|
+
stats.bySpecialist[feedback.fromSpecialist] = (stats.bySpecialist[feedback.fromSpecialist] || 0) + 1;
|
|
1583
|
+
stats.byType[feedback.feedbackType] = (stats.byType[feedback.feedbackType] || 0) + 1;
|
|
1584
|
+
stats.total++;
|
|
1585
|
+
}
|
|
1586
|
+
} catch (error) {
|
|
1587
|
+
console.error(`[specialist] Failed to read feedback stats:`, error);
|
|
1588
|
+
}
|
|
1589
|
+
return stats;
|
|
1590
|
+
}
|
|
1591
|
+
var execAsync, SPECIALISTS_DIR, REGISTRY_FILE, TASKS_DIR, DEFAULT_SPECIALISTS, gracePeriodStates, FEEDBACK_DIR, FEEDBACK_LOG;
|
|
1592
|
+
var init_specialists = __esm({
|
|
1593
|
+
"src/lib/cloister/specialists.ts"() {
|
|
1594
|
+
init_esm_shims();
|
|
1595
|
+
init_paths();
|
|
1596
|
+
init_jsonl_parser();
|
|
1597
|
+
init_specialist_handoff_logger();
|
|
1598
|
+
init_work_type_router();
|
|
1599
|
+
init_tmux();
|
|
1600
|
+
init_hooks();
|
|
1601
|
+
execAsync = promisify(exec);
|
|
1602
|
+
SPECIALISTS_DIR = join4(PANOPTICON_HOME, "specialists");
|
|
1603
|
+
REGISTRY_FILE = join4(SPECIALISTS_DIR, "registry.json");
|
|
1604
|
+
TASKS_DIR = join4(SPECIALISTS_DIR, "tasks");
|
|
1605
|
+
DEFAULT_SPECIALISTS = [
|
|
1606
|
+
{
|
|
1607
|
+
name: "merge-agent",
|
|
1608
|
+
displayName: "Merge Agent",
|
|
1609
|
+
description: "PR merging and conflict resolution",
|
|
1610
|
+
enabled: true,
|
|
1611
|
+
autoWake: true
|
|
1612
|
+
},
|
|
1613
|
+
{
|
|
1614
|
+
name: "review-agent",
|
|
1615
|
+
displayName: "Review Agent",
|
|
1616
|
+
description: "Code review and quality checks",
|
|
1617
|
+
enabled: true,
|
|
1618
|
+
autoWake: true
|
|
1619
|
+
},
|
|
1620
|
+
{
|
|
1621
|
+
name: "test-agent",
|
|
1622
|
+
displayName: "Test Agent",
|
|
1623
|
+
description: "Test execution and analysis",
|
|
1624
|
+
enabled: true,
|
|
1625
|
+
autoWake: true
|
|
1626
|
+
}
|
|
1627
|
+
];
|
|
1628
|
+
gracePeriodStates = /* @__PURE__ */ new Map();
|
|
1629
|
+
FEEDBACK_DIR = join4(PANOPTICON_HOME, "specialists", "feedback");
|
|
1630
|
+
FEEDBACK_LOG = join4(FEEDBACK_DIR, "feedback.jsonl");
|
|
1631
|
+
}
|
|
1632
|
+
});
|
|
1633
|
+
|
|
1634
|
+
// src/lib/cloister/specialist-logs.ts
|
|
1635
|
+
var specialist_logs_exports = {};
|
|
1636
|
+
__export(specialist_logs_exports, {
|
|
1637
|
+
MAX_LOG_SIZE: () => MAX_LOG_SIZE,
|
|
1638
|
+
appendToRunLog: () => appendToRunLog,
|
|
1639
|
+
checkLogSizeLimit: () => checkLogSizeLimit,
|
|
1640
|
+
cleanupAllLogs: () => cleanupAllLogs,
|
|
1641
|
+
cleanupOldLogs: () => cleanupOldLogs,
|
|
1642
|
+
createRunLog: () => createRunLog,
|
|
1643
|
+
finalizeRunLog: () => finalizeRunLog,
|
|
1644
|
+
generateRunId: () => generateRunId,
|
|
1645
|
+
getRecentRunLogs: () => getRecentRunLogs,
|
|
1646
|
+
getRunLog: () => getRunLog,
|
|
1647
|
+
getRunLogPath: () => getRunLogPath,
|
|
1648
|
+
getRunLogSize: () => getRunLogSize,
|
|
1649
|
+
getRunsDirectory: () => getRunsDirectory,
|
|
1650
|
+
isRunLogActive: () => isRunLogActive,
|
|
1651
|
+
listRunLogs: () => listRunLogs,
|
|
1652
|
+
parseLogMetadata: () => parseLogMetadata
|
|
1653
|
+
});
|
|
1654
|
+
import { existsSync as existsSync4, mkdirSync as mkdirSync3, writeFileSync as writeFileSync2, appendFileSync as appendFileSync3, readFileSync as readFileSync4, readdirSync as readdirSync3, statSync as statSync2, unlinkSync as unlinkSync2 } from "fs";
|
|
1655
|
+
import { join as join5, basename as basename3 } from "path";
|
|
1656
|
+
function getRunsDirectory(projectKey, specialistType) {
|
|
1657
|
+
return join5(SPECIALISTS_DIR2, projectKey, specialistType, "runs");
|
|
1658
|
+
}
|
|
1659
|
+
function generateRunId(issueId) {
|
|
1660
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[:.]/g, "-").substring(0, 19);
|
|
1661
|
+
return `${timestamp}-${issueId}`;
|
|
1662
|
+
}
|
|
1663
|
+
function getRunLogPath(projectKey, specialistType, runId) {
|
|
1664
|
+
const runsDir = getRunsDirectory(projectKey, specialistType);
|
|
1665
|
+
return join5(runsDir, `${runId}.log`);
|
|
1666
|
+
}
|
|
1667
|
+
function ensureRunsDirectory(projectKey, specialistType) {
|
|
1668
|
+
const runsDir = getRunsDirectory(projectKey, specialistType);
|
|
1669
|
+
if (!existsSync4(runsDir)) {
|
|
1670
|
+
mkdirSync3(runsDir, { recursive: true });
|
|
1671
|
+
}
|
|
1672
|
+
}
|
|
1673
|
+
function createRunLog(projectKey, specialistType, issueId, contextSeed) {
|
|
1674
|
+
ensureRunsDirectory(projectKey, specialistType);
|
|
1675
|
+
const runId = generateRunId(issueId);
|
|
1676
|
+
const filePath = getRunLogPath(projectKey, specialistType, runId);
|
|
1677
|
+
const startedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
1678
|
+
const header = `# ${specialistType} Run - ${issueId}
|
|
1679
|
+
Project: ${projectKey}
|
|
1680
|
+
Started: ${startedAt}
|
|
1681
|
+
Issue: ${issueId}
|
|
1682
|
+
Run ID: ${runId}
|
|
1683
|
+
|
|
1684
|
+
## Context Seed
|
|
1685
|
+
${contextSeed ? contextSeed : "[No context digest available]"}
|
|
1686
|
+
|
|
1687
|
+
## Session Transcript
|
|
1688
|
+
`;
|
|
1689
|
+
writeFileSync2(filePath, header, "utf-8");
|
|
1690
|
+
return { runId, filePath };
|
|
1691
|
+
}
|
|
1692
|
+
function appendToRunLog(projectKey, specialistType, runId, content) {
|
|
1693
|
+
const filePath = getRunLogPath(projectKey, specialistType, runId);
|
|
1694
|
+
if (!existsSync4(filePath)) {
|
|
1695
|
+
throw new Error(`Run log not found: ${filePath}`);
|
|
1696
|
+
}
|
|
1697
|
+
appendFileSync3(filePath, content, "utf-8");
|
|
1698
|
+
}
|
|
1699
|
+
function finalizeRunLog(projectKey, specialistType, runId, result) {
|
|
1700
|
+
const filePath = getRunLogPath(projectKey, specialistType, runId);
|
|
1701
|
+
if (!existsSync4(filePath)) {
|
|
1702
|
+
throw new Error(`Run log not found: ${filePath}`);
|
|
1703
|
+
}
|
|
1704
|
+
const content = readFileSync4(filePath, "utf-8");
|
|
1705
|
+
const startMatch = content.match(/^Started: (.+)$/m);
|
|
1706
|
+
const startedAt = startMatch ? new Date(startMatch[1]) : /* @__PURE__ */ new Date();
|
|
1707
|
+
const finishedAt = /* @__PURE__ */ new Date();
|
|
1708
|
+
const duration = finishedAt.getTime() - startedAt.getTime();
|
|
1709
|
+
const durationSeconds = Math.floor(duration / 1e3);
|
|
1710
|
+
const minutes = Math.floor(durationSeconds / 60);
|
|
1711
|
+
const seconds = durationSeconds % 60;
|
|
1712
|
+
const durationStr = `${minutes}m ${seconds}s`;
|
|
1713
|
+
const resultSection = `
|
|
1714
|
+
|
|
1715
|
+
## Result
|
|
1716
|
+
Status: ${result.status}
|
|
1717
|
+
${result.notes ? `Notes: ${result.notes}` : ""}
|
|
1718
|
+
Duration: ${durationStr}
|
|
1719
|
+
Finished: ${finishedAt.toISOString()}
|
|
1720
|
+
`;
|
|
1721
|
+
appendFileSync3(filePath, resultSection, "utf-8");
|
|
1722
|
+
}
|
|
1723
|
+
function getRunLog(projectKey, specialistType, runId) {
|
|
1724
|
+
const filePath = getRunLogPath(projectKey, specialistType, runId);
|
|
1725
|
+
if (!existsSync4(filePath)) {
|
|
1726
|
+
return null;
|
|
1727
|
+
}
|
|
1728
|
+
try {
|
|
1729
|
+
return readFileSync4(filePath, "utf-8");
|
|
1730
|
+
} catch (error) {
|
|
1731
|
+
console.error(`Failed to read run log ${runId}:`, error);
|
|
1732
|
+
return null;
|
|
1733
|
+
}
|
|
1734
|
+
}
|
|
1735
|
+
function parseLogMetadata(logContent) {
|
|
1736
|
+
const metadata = {};
|
|
1737
|
+
const projectMatch = logContent.match(/^Project: (.+)$/m);
|
|
1738
|
+
const startedMatch = logContent.match(/^Started: (.+)$/m);
|
|
1739
|
+
const issueMatch = logContent.match(/^Issue: (.+)$/m);
|
|
1740
|
+
const runIdMatch = logContent.match(/^Run ID: (.+)$/m);
|
|
1741
|
+
const statusMatch = logContent.match(/^Status: (.+)$/m);
|
|
1742
|
+
const notesMatch = logContent.match(/^Notes: (.+)$/m);
|
|
1743
|
+
const finishedMatch = logContent.match(/^Finished: (.+)$/m);
|
|
1744
|
+
const durationMatch = logContent.match(/^Duration: (.+)$/m);
|
|
1745
|
+
if (projectMatch) metadata.project = projectMatch[1].trim();
|
|
1746
|
+
if (startedMatch) metadata.startedAt = startedMatch[1].trim();
|
|
1747
|
+
if (issueMatch) metadata.issueId = issueMatch[1].trim();
|
|
1748
|
+
if (runIdMatch) metadata.runId = runIdMatch[1].trim();
|
|
1749
|
+
if (statusMatch) metadata.status = statusMatch[1].trim();
|
|
1750
|
+
if (notesMatch) metadata.notes = notesMatch[1].trim();
|
|
1751
|
+
if (finishedMatch) metadata.finishedAt = finishedMatch[1].trim();
|
|
1752
|
+
if (durationMatch) {
|
|
1753
|
+
const durationStr = durationMatch[1].trim();
|
|
1754
|
+
const minutesMatch = durationStr.match(/(\d+)m/);
|
|
1755
|
+
const secondsMatch = durationStr.match(/(\d+)s/);
|
|
1756
|
+
const minutes = minutesMatch ? parseInt(minutesMatch[1], 10) : 0;
|
|
1757
|
+
const seconds = secondsMatch ? parseInt(secondsMatch[1], 10) : 0;
|
|
1758
|
+
metadata.duration = (minutes * 60 + seconds) * 1e3;
|
|
1759
|
+
}
|
|
1760
|
+
return metadata;
|
|
1761
|
+
}
|
|
1762
|
+
function listRunLogs(projectKey, specialistType, options = {}) {
|
|
1763
|
+
const runsDir = getRunsDirectory(projectKey, specialistType);
|
|
1764
|
+
if (!existsSync4(runsDir)) {
|
|
1765
|
+
return [];
|
|
1766
|
+
}
|
|
1767
|
+
try {
|
|
1768
|
+
const files = readdirSync3(runsDir).filter((f) => f.endsWith(".log")).map((f) => {
|
|
1769
|
+
const filePath = join5(runsDir, f);
|
|
1770
|
+
const stats = statSync2(filePath);
|
|
1771
|
+
const runId = basename3(f, ".log");
|
|
1772
|
+
const content = readFileSync4(filePath, "utf-8");
|
|
1773
|
+
const metadata = parseLogMetadata(content);
|
|
1774
|
+
return {
|
|
1775
|
+
runId,
|
|
1776
|
+
filePath,
|
|
1777
|
+
metadata: {
|
|
1778
|
+
runId,
|
|
1779
|
+
project: projectKey,
|
|
1780
|
+
specialistType,
|
|
1781
|
+
issueId: metadata.issueId || "unknown",
|
|
1782
|
+
startedAt: metadata.startedAt || stats.birthtime.toISOString(),
|
|
1783
|
+
finishedAt: metadata.finishedAt,
|
|
1784
|
+
status: metadata.status,
|
|
1785
|
+
duration: metadata.duration,
|
|
1786
|
+
notes: metadata.notes
|
|
1787
|
+
},
|
|
1788
|
+
fileSize: stats.size,
|
|
1789
|
+
createdAt: stats.birthtime
|
|
1790
|
+
};
|
|
1791
|
+
});
|
|
1792
|
+
files.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
1793
|
+
const { limit, offset = 0 } = options;
|
|
1794
|
+
if (limit !== void 0) {
|
|
1795
|
+
return files.slice(offset, offset + limit);
|
|
1796
|
+
}
|
|
1797
|
+
return files.slice(offset);
|
|
1798
|
+
} catch (error) {
|
|
1799
|
+
console.error(`Failed to list run logs for ${projectKey}/${specialistType}:`, error);
|
|
1800
|
+
return [];
|
|
1801
|
+
}
|
|
1802
|
+
}
|
|
1803
|
+
function getRecentRunLogs(projectKey, specialistType, count) {
|
|
1804
|
+
return listRunLogs(projectKey, specialistType, { limit: count });
|
|
1805
|
+
}
|
|
1806
|
+
function cleanupOldLogs(projectKey, specialistType, retention) {
|
|
1807
|
+
const { maxDays, maxRuns } = retention;
|
|
1808
|
+
const allLogs = listRunLogs(projectKey, specialistType);
|
|
1809
|
+
if (allLogs.length === 0) {
|
|
1810
|
+
return 0;
|
|
1811
|
+
}
|
|
1812
|
+
const now = /* @__PURE__ */ new Date();
|
|
1813
|
+
const cutoffDate = new Date(now.getTime() - maxDays * 24 * 60 * 60 * 1e3);
|
|
1814
|
+
let deletedCount = 0;
|
|
1815
|
+
allLogs.forEach((log, index) => {
|
|
1816
|
+
if (index < maxRuns) {
|
|
1817
|
+
return;
|
|
1818
|
+
}
|
|
1819
|
+
if (log.createdAt >= cutoffDate) {
|
|
1820
|
+
return;
|
|
1821
|
+
}
|
|
1822
|
+
try {
|
|
1823
|
+
unlinkSync2(log.filePath);
|
|
1824
|
+
deletedCount++;
|
|
1825
|
+
console.log(`[specialist-logs] Deleted old log: ${log.runId}`);
|
|
1826
|
+
} catch (error) {
|
|
1827
|
+
console.error(`[specialist-logs] Failed to delete ${log.runId}:`, error);
|
|
1828
|
+
}
|
|
1829
|
+
});
|
|
1830
|
+
return deletedCount;
|
|
1831
|
+
}
|
|
1832
|
+
function isRunLogActive(projectKey, specialistType, runId) {
|
|
1833
|
+
const content = getRunLog(projectKey, specialistType, runId);
|
|
1834
|
+
if (!content) {
|
|
1835
|
+
return false;
|
|
1836
|
+
}
|
|
1837
|
+
return !content.includes("## Result");
|
|
1838
|
+
}
|
|
1839
|
+
function getRunLogSize(projectKey, specialistType, runId) {
|
|
1840
|
+
const filePath = getRunLogPath(projectKey, specialistType, runId);
|
|
1841
|
+
if (!existsSync4(filePath)) {
|
|
1842
|
+
return null;
|
|
1843
|
+
}
|
|
1844
|
+
try {
|
|
1845
|
+
const stats = statSync2(filePath);
|
|
1846
|
+
return stats.size;
|
|
1847
|
+
} catch (error) {
|
|
1848
|
+
return null;
|
|
1849
|
+
}
|
|
1850
|
+
}
|
|
1851
|
+
function checkLogSizeLimit(projectKey, specialistType, runId) {
|
|
1852
|
+
const size = getRunLogSize(projectKey, specialistType, runId);
|
|
1853
|
+
if (size === null) {
|
|
1854
|
+
return null;
|
|
1855
|
+
}
|
|
1856
|
+
if (size >= MAX_LOG_SIZE) {
|
|
1857
|
+
return {
|
|
1858
|
+
exceeded: true,
|
|
1859
|
+
size,
|
|
1860
|
+
limit: MAX_LOG_SIZE
|
|
1861
|
+
};
|
|
1862
|
+
}
|
|
1863
|
+
return null;
|
|
1864
|
+
}
|
|
1865
|
+
function cleanupAllLogs() {
|
|
1866
|
+
const { listProjectsWithSpecialists: listProjectsWithSpecialists2 } = (init_specialists(), __toCommonJS(specialists_exports));
|
|
1867
|
+
const { getSpecialistRetention } = (init_projects(), __toCommonJS(projects_exports));
|
|
1868
|
+
const results = {
|
|
1869
|
+
totalDeleted: 0,
|
|
1870
|
+
byProject: {}
|
|
1871
|
+
};
|
|
1872
|
+
const projects = listProjectsWithSpecialists2();
|
|
1873
|
+
for (const projectKey of projects) {
|
|
1874
|
+
results.byProject[projectKey] = {};
|
|
1875
|
+
const retention = getSpecialistRetention(projectKey);
|
|
1876
|
+
const specialistTypes = ["review-agent", "test-agent", "merge-agent"];
|
|
1877
|
+
for (const specialistType of specialistTypes) {
|
|
1878
|
+
const deleted = cleanupOldLogs(projectKey, specialistType, retention);
|
|
1879
|
+
if (deleted > 0) {
|
|
1880
|
+
results.byProject[projectKey][specialistType] = deleted;
|
|
1881
|
+
results.totalDeleted += deleted;
|
|
1882
|
+
}
|
|
1883
|
+
}
|
|
1884
|
+
}
|
|
1885
|
+
console.log(`[specialist-logs] Cleanup complete: deleted ${results.totalDeleted} old logs`);
|
|
1886
|
+
return results;
|
|
1887
|
+
}
|
|
1888
|
+
var SPECIALISTS_DIR2, MAX_LOG_SIZE;
|
|
1889
|
+
var init_specialist_logs = __esm({
|
|
1890
|
+
"src/lib/cloister/specialist-logs.ts"() {
|
|
1891
|
+
init_esm_shims();
|
|
1892
|
+
init_paths();
|
|
1893
|
+
SPECIALISTS_DIR2 = join5(PANOPTICON_HOME, "specialists");
|
|
1894
|
+
MAX_LOG_SIZE = 10 * 1024 * 1024;
|
|
1895
|
+
}
|
|
1896
|
+
});
|
|
1897
|
+
|
|
1898
|
+
export {
|
|
1899
|
+
getProjectDirs,
|
|
1900
|
+
getSessionFiles,
|
|
1901
|
+
parseClaudeSession,
|
|
1902
|
+
init_jsonl_parser,
|
|
1903
|
+
getRunsDirectory,
|
|
1904
|
+
generateRunId,
|
|
1905
|
+
getRunLogPath,
|
|
1906
|
+
createRunLog,
|
|
1907
|
+
appendToRunLog,
|
|
1908
|
+
finalizeRunLog,
|
|
1909
|
+
getRunLog,
|
|
1910
|
+
parseLogMetadata,
|
|
1911
|
+
listRunLogs,
|
|
1912
|
+
getRecentRunLogs,
|
|
1913
|
+
cleanupOldLogs,
|
|
1914
|
+
isRunLogActive,
|
|
1915
|
+
getRunLogSize,
|
|
1916
|
+
MAX_LOG_SIZE,
|
|
1917
|
+
checkLogSizeLimit,
|
|
1918
|
+
cleanupAllLogs,
|
|
1919
|
+
init_specialist_logs,
|
|
1920
|
+
initSpecialistsDirectory,
|
|
1921
|
+
loadRegistry,
|
|
1922
|
+
saveRegistry,
|
|
1923
|
+
getSessionFilePath,
|
|
1924
|
+
getSessionId,
|
|
1925
|
+
setSessionId,
|
|
1926
|
+
clearSessionId,
|
|
1927
|
+
getSpecialistMetadata,
|
|
1928
|
+
updateSpecialistMetadata,
|
|
1929
|
+
getAllSpecialists,
|
|
1930
|
+
isInitialized,
|
|
1931
|
+
getSpecialistState,
|
|
1932
|
+
getTmuxSessionName,
|
|
1933
|
+
recordWake,
|
|
1934
|
+
spawnEphemeralSpecialist,
|
|
1935
|
+
startGracePeriod,
|
|
1936
|
+
pauseGracePeriod,
|
|
1937
|
+
resumeGracePeriod,
|
|
1938
|
+
exitGracePeriod,
|
|
1939
|
+
getGracePeriodState,
|
|
1940
|
+
signalSpecialistCompletion,
|
|
1941
|
+
terminateSpecialist,
|
|
1942
|
+
getProjectSpecialistDir,
|
|
1943
|
+
ensureProjectSpecialistDir,
|
|
1944
|
+
getProjectSpecialistMetadata,
|
|
1945
|
+
updateProjectSpecialistMetadata,
|
|
1946
|
+
incrementProjectRunCount,
|
|
1947
|
+
setCurrentRun,
|
|
1948
|
+
updateRunStatus,
|
|
1949
|
+
listProjectsWithSpecialists,
|
|
1950
|
+
listSpecialistsForProject,
|
|
1951
|
+
getAllProjectSpecialistStatuses,
|
|
1952
|
+
updateContextTokens,
|
|
1953
|
+
listSessionFiles,
|
|
1954
|
+
enableSpecialist,
|
|
1955
|
+
disableSpecialist,
|
|
1956
|
+
isEnabled,
|
|
1957
|
+
getEnabledSpecialists,
|
|
1958
|
+
findSessionFile,
|
|
1959
|
+
countContextTokens,
|
|
1960
|
+
isRunning,
|
|
1961
|
+
getSpecialistStatus,
|
|
1962
|
+
getAllSpecialistStatus,
|
|
1963
|
+
initializeSpecialist,
|
|
1964
|
+
initializeEnabledSpecialists,
|
|
1965
|
+
wakeSpecialist,
|
|
1966
|
+
wakeSpecialistWithTask,
|
|
1967
|
+
wakeSpecialistOrQueue,
|
|
1968
|
+
submitToSpecialistQueue,
|
|
1969
|
+
checkSpecialistQueue,
|
|
1970
|
+
completeSpecialistTask,
|
|
1971
|
+
getNextSpecialistTask,
|
|
1972
|
+
sendFeedbackToAgent,
|
|
1973
|
+
getPendingFeedback,
|
|
1974
|
+
getFeedbackStats,
|
|
1975
|
+
init_specialists
|
|
1976
|
+
};
|
|
1977
|
+
//# sourceMappingURL=chunk-VTDDVLCK.js.map
|