@mclawnet/agent 0.6.21 → 0.6.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cli.js +63 -0
- package/dist/__tests__/checkpoint.test.d.ts +2 -0
- package/dist/__tests__/checkpoint.test.d.ts.map +1 -0
- package/dist/__tests__/fs-handler-decode.test.d.ts +2 -0
- package/dist/__tests__/fs-handler-decode.test.d.ts.map +1 -0
- package/dist/__tests__/idle-sweeper.test.d.ts +2 -0
- package/dist/__tests__/idle-sweeper.test.d.ts.map +1 -0
- package/dist/__tests__/mcp-config.test.d.ts +2 -0
- package/dist/__tests__/mcp-config.test.d.ts.map +1 -0
- package/dist/__tests__/schedule-runtime-spawn.test.d.ts +2 -0
- package/dist/__tests__/schedule-runtime-spawn.test.d.ts.map +1 -0
- package/dist/__tests__/schedule-runtime.test.d.ts +2 -0
- package/dist/__tests__/schedule-runtime.test.d.ts.map +1 -0
- package/dist/__tests__/session-limit.test.d.ts +2 -0
- package/dist/__tests__/session-limit.test.d.ts.map +1 -0
- package/dist/__tests__/swarm-cli-client.test.d.ts +2 -0
- package/dist/__tests__/swarm-cli-client.test.d.ts.map +1 -0
- package/dist/__tests__/swarm-control-dispatch.test.d.ts +2 -0
- package/dist/__tests__/swarm-control-dispatch.test.d.ts.map +1 -0
- package/dist/__tests__/swarm-session-bridge.test.d.ts +2 -0
- package/dist/__tests__/swarm-session-bridge.test.d.ts.map +1 -0
- package/dist/backend-adapter.d.ts +43 -0
- package/dist/backend-adapter.d.ts.map +1 -1
- package/dist/checkpoint.d.ts +67 -0
- package/dist/checkpoint.d.ts.map +1 -0
- package/dist/{chunk-RIK7IXSW.js → chunk-WJWCYGLQ.js} +1130 -147
- package/dist/chunk-WJWCYGLQ.js.map +1 -0
- package/dist/errors.d.ts +40 -0
- package/dist/errors.d.ts.map +1 -0
- package/dist/fs-handler.d.ts.map +1 -1
- package/dist/hub-connection.d.ts +13 -0
- package/dist/hub-connection.d.ts.map +1 -1
- package/dist/index.js +1 -1
- package/dist/schedule-runtime.d.ts +125 -0
- package/dist/schedule-runtime.d.ts.map +1 -0
- package/dist/session-manager.d.ts +102 -0
- package/dist/session-manager.d.ts.map +1 -1
- package/dist/skill-loader.d.ts +20 -0
- package/dist/skill-loader.d.ts.map +1 -1
- package/dist/start.d.ts +2 -0
- package/dist/start.d.ts.map +1 -1
- package/dist/start.js +1 -1
- package/dist/swarm-cli-client.d.ts +24 -0
- package/dist/swarm-cli-client.d.ts.map +1 -0
- package/dist/swarm-cli-client.js +83 -0
- package/dist/swarm-cli-client.js.map +1 -0
- package/dist/swarm-control-dispatch.d.ts +47 -0
- package/dist/swarm-control-dispatch.d.ts.map +1 -0
- package/dist/swarm-session-bridge.d.ts +22 -0
- package/dist/swarm-session-bridge.d.ts.map +1 -0
- package/package.json +7 -5
- package/dist/chunk-RIK7IXSW.js.map +0 -1
|
@@ -56,7 +56,7 @@ function extractWorkDirFromSessionFile(filePath) {
|
|
|
56
56
|
}
|
|
57
57
|
return null;
|
|
58
58
|
}
|
|
59
|
-
function getWorkDirFromProjectFolder(folderPath
|
|
59
|
+
function getWorkDirFromProjectFolder(folderPath) {
|
|
60
60
|
try {
|
|
61
61
|
const files = readdirSync(folderPath);
|
|
62
62
|
for (const file of files) {
|
|
@@ -67,13 +67,7 @@ function getWorkDirFromProjectFolder(folderPath, folderName) {
|
|
|
67
67
|
}
|
|
68
68
|
} catch {
|
|
69
69
|
}
|
|
70
|
-
|
|
71
|
-
return folderName.replace(/^([A-Za-z])--/, "$1:/").replace(/-/g, "/");
|
|
72
|
-
}
|
|
73
|
-
if (folderName.startsWith("-")) {
|
|
74
|
-
return "/" + folderName.substring(1).replace(/-/g, "/");
|
|
75
|
-
}
|
|
76
|
-
return folderName.replace(/-/g, "/");
|
|
70
|
+
return null;
|
|
77
71
|
}
|
|
78
72
|
async function handleListFolders() {
|
|
79
73
|
const projectsDir = getClaudeProjectsDir();
|
|
@@ -90,7 +84,8 @@ async function handleListFolders() {
|
|
|
90
84
|
}
|
|
91
85
|
if (!entryStat.isDirectory()) continue;
|
|
92
86
|
if (entry.includes("--crew-roles-")) continue;
|
|
93
|
-
const originalPath = getWorkDirFromProjectFolder(entryPath
|
|
87
|
+
const originalPath = getWorkDirFromProjectFolder(entryPath);
|
|
88
|
+
if (originalPath === null) continue;
|
|
94
89
|
let sessionCount = 0;
|
|
95
90
|
let lastModified = entryStat.mtime.getTime();
|
|
96
91
|
try {
|
|
@@ -251,9 +246,64 @@ async function handleLoadSessionHistory(workDir, claudeSessionId, opts) {
|
|
|
251
246
|
};
|
|
252
247
|
}
|
|
253
248
|
|
|
249
|
+
// src/swarm-control-dispatch.ts
|
|
250
|
+
import { randomUUID } from "crypto";
|
|
251
|
+
import { createLogger } from "@mclawnet/logger";
|
|
252
|
+
import { InboxStore } from "@mclawnet/swarm";
|
|
253
|
+
var log = createLogger({ module: "agent/swarm-control" });
|
|
254
|
+
async function handleSwarmControl(coord, msg, opts) {
|
|
255
|
+
if (!msg || msg.type !== "swarm_spawn" && msg.type !== "swarm_resume") {
|
|
256
|
+
return { ok: false, error: "unknown control type" };
|
|
257
|
+
}
|
|
258
|
+
if (msg.type === "swarm_resume") {
|
|
259
|
+
try {
|
|
260
|
+
await coord.recover(msg.swarmId);
|
|
261
|
+
log.info({ swarmId: msg.swarmId }, "swarm_resume: recovered");
|
|
262
|
+
return { ok: true, swarmId: msg.swarmId };
|
|
263
|
+
} catch (err) {
|
|
264
|
+
const error = err instanceof Error ? err.message : String(err);
|
|
265
|
+
log.error({ err, swarmId: msg.swarmId }, "swarm_resume failed");
|
|
266
|
+
return { ok: false, error };
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
const workDir = msg.workDir ?? opts.defaultWorkDir;
|
|
270
|
+
const swarmId = msg.swarmId ?? randomUUID();
|
|
271
|
+
try {
|
|
272
|
+
await coord.create(swarmId, { workDir, templateName: msg.teamName });
|
|
273
|
+
} catch (err) {
|
|
274
|
+
const error = err instanceof Error ? err.message : String(err);
|
|
275
|
+
log.error({ err, teamName: msg.teamName }, "swarm_spawn: create failed");
|
|
276
|
+
return { ok: false, error };
|
|
277
|
+
}
|
|
278
|
+
if (msg.msg && msg.msg.length > 0) {
|
|
279
|
+
const swarm = coord.getSwarm?.(swarmId);
|
|
280
|
+
const queen = coord.findQueenInstance?.(swarmId);
|
|
281
|
+
if (swarm && swarm.workDir && queen) {
|
|
282
|
+
try {
|
|
283
|
+
const store = new InboxStore(swarm.workDir, swarmId, opts.home);
|
|
284
|
+
await store.append(queen.instanceId, {
|
|
285
|
+
id: randomUUID(),
|
|
286
|
+
from: "user",
|
|
287
|
+
type: "user",
|
|
288
|
+
data: msg.msg,
|
|
289
|
+
timestamp: Date.now(),
|
|
290
|
+
delivered: false
|
|
291
|
+
});
|
|
292
|
+
await coord.inboxRelay.deliver(swarmId, queen.instanceId);
|
|
293
|
+
} catch (err) {
|
|
294
|
+
log.warn({ err, swarmId }, "swarm_spawn: inbox seed failed (non-fatal)");
|
|
295
|
+
}
|
|
296
|
+
} else {
|
|
297
|
+
log.warn({ swarmId, hasSwarm: !!swarm, hasQueen: !!queen }, "swarm_spawn: queen instance not found, skipping inbox seed");
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
log.info({ swarmId, teamName: msg.teamName }, "swarm_spawn: created");
|
|
301
|
+
return { ok: true, swarmId };
|
|
302
|
+
}
|
|
303
|
+
|
|
254
304
|
// src/hub-connection.ts
|
|
255
|
-
import { createLogger, previewFields } from "@mclawnet/logger";
|
|
256
|
-
var
|
|
305
|
+
import { createLogger as createLogger2, previewFields } from "@mclawnet/logger";
|
|
306
|
+
var log2 = createLogger2({ module: "agent" });
|
|
257
307
|
var HubConnection = class {
|
|
258
308
|
ws = null;
|
|
259
309
|
heartbeatTimer = null;
|
|
@@ -276,6 +326,8 @@ var HubConnection = class {
|
|
|
276
326
|
sessionManager = null;
|
|
277
327
|
/** Swarm coordinator — set after construction via setSwarmCoordinator() */
|
|
278
328
|
swarmCoordinator = null;
|
|
329
|
+
/** Schedule runtime — set after construction via setScheduleRuntime() */
|
|
330
|
+
scheduleRuntime = null;
|
|
279
331
|
/** Namespace-based generic message handlers */
|
|
280
332
|
namespaceHandlers = /* @__PURE__ */ new Map();
|
|
281
333
|
onMessage;
|
|
@@ -302,6 +354,9 @@ var HubConnection = class {
|
|
|
302
354
|
setSwarmCoordinator(coordinator) {
|
|
303
355
|
this.swarmCoordinator = coordinator;
|
|
304
356
|
}
|
|
357
|
+
setScheduleRuntime(runtime) {
|
|
358
|
+
this.scheduleRuntime = runtime;
|
|
359
|
+
}
|
|
305
360
|
registerNamespace(namespace, handler) {
|
|
306
361
|
this.namespaceHandlers.set(namespace, handler);
|
|
307
362
|
}
|
|
@@ -341,7 +396,7 @@ var HubConnection = class {
|
|
|
341
396
|
return;
|
|
342
397
|
}
|
|
343
398
|
if (this.authState === "pending" && data.type === "auth_required") {
|
|
344
|
-
|
|
399
|
+
log2.info("hub requires auth, sending credentials");
|
|
345
400
|
this.authState = "authenticating";
|
|
346
401
|
this.sendRaw({
|
|
347
402
|
type: "auth",
|
|
@@ -353,7 +408,7 @@ var HubConnection = class {
|
|
|
353
408
|
return;
|
|
354
409
|
}
|
|
355
410
|
if (this.authState === "authenticating" && data.type === "registered") {
|
|
356
|
-
|
|
411
|
+
log2.info({ agentId: data.agentId }, "registered with hub");
|
|
357
412
|
this.authState = "authenticated";
|
|
358
413
|
this.agentId = data.agentId ?? null;
|
|
359
414
|
this.startHeartbeat();
|
|
@@ -368,18 +423,18 @@ var HubConnection = class {
|
|
|
368
423
|
}
|
|
369
424
|
});
|
|
370
425
|
this.ws.on("close", (code, reason) => {
|
|
371
|
-
|
|
426
|
+
log2.warn({ code, reason: reason.toString() }, "disconnected from hub");
|
|
372
427
|
this.stopHeartbeat();
|
|
373
428
|
this.authState = "pending";
|
|
374
429
|
this.onDisconnect?.(code, reason.toString());
|
|
375
430
|
if (code === WS_CLOSE_INVALID_TOKEN) {
|
|
376
|
-
|
|
431
|
+
log2.error("auth failed \u2014 not reconnecting, check your token");
|
|
377
432
|
return;
|
|
378
433
|
}
|
|
379
434
|
this.scheduleReconnect();
|
|
380
435
|
});
|
|
381
436
|
this.ws.on("error", (err) => {
|
|
382
|
-
|
|
437
|
+
log2.error({ err }, "ws connection error");
|
|
383
438
|
this.onError?.(err);
|
|
384
439
|
});
|
|
385
440
|
}
|
|
@@ -396,12 +451,17 @@ var HubConnection = class {
|
|
|
396
451
|
}
|
|
397
452
|
// ── Session message handling ─────────────────────────────────────
|
|
398
453
|
handleSessionMessage(msg) {
|
|
454
|
+
const msgType = msg.type;
|
|
455
|
+
if (typeof msgType === "string" && msgType.startsWith("schedule:") && this.scheduleRuntime) {
|
|
456
|
+
this.scheduleRuntime.handleHubMessage(msg);
|
|
457
|
+
return true;
|
|
458
|
+
}
|
|
399
459
|
if (msg.type === "fs.list_dir") {
|
|
400
|
-
|
|
460
|
+
log2.info({ path: msg.path }, "fs.list_dir");
|
|
401
461
|
handleListDir(msg.path).then((result) => {
|
|
402
462
|
this.send({ type: "fs.list_dir_result", requestId: msg.requestId, ...result });
|
|
403
463
|
}).catch((err) => {
|
|
404
|
-
|
|
464
|
+
log2.error({ err, path: msg.path }, "fs.list_dir failed");
|
|
405
465
|
this.send({
|
|
406
466
|
type: "fs.list_dir_result",
|
|
407
467
|
requestId: msg.requestId,
|
|
@@ -412,21 +472,21 @@ var HubConnection = class {
|
|
|
412
472
|
return true;
|
|
413
473
|
}
|
|
414
474
|
if (msg.type === "list_folders") {
|
|
415
|
-
|
|
475
|
+
log2.info("list_folders");
|
|
416
476
|
handleListFolders().then((result) => {
|
|
417
477
|
this.send({ type: "folders_list_result", requestId: msg.requestId, ...result });
|
|
418
478
|
}).catch((err) => {
|
|
419
|
-
|
|
479
|
+
log2.error({ err }, "list_folders failed");
|
|
420
480
|
this.send({ type: "folders_list_result", requestId: msg.requestId, folders: [] });
|
|
421
481
|
});
|
|
422
482
|
return true;
|
|
423
483
|
}
|
|
424
484
|
if (msg.type === "list_history_sessions") {
|
|
425
|
-
|
|
485
|
+
log2.info({ workDir: msg.workDir }, "list_history_sessions");
|
|
426
486
|
handleListHistorySessions(msg.workDir).then((result) => {
|
|
427
487
|
this.send({ type: "history_sessions_result", requestId: msg.requestId, ...result });
|
|
428
488
|
}).catch((err) => {
|
|
429
|
-
|
|
489
|
+
log2.error({ err, workDir: msg.workDir }, "list_history_sessions failed");
|
|
430
490
|
this.send({
|
|
431
491
|
type: "history_sessions_result",
|
|
432
492
|
requestId: msg.requestId,
|
|
@@ -437,20 +497,20 @@ var HubConnection = class {
|
|
|
437
497
|
return true;
|
|
438
498
|
}
|
|
439
499
|
if (msg.type === "load_session_history") {
|
|
440
|
-
|
|
500
|
+
log2.info(
|
|
441
501
|
{ workDir: msg.workDir, claudeSessionId: msg.claudeSessionId, before: msg.before, limit: msg.limit },
|
|
442
502
|
"load_session_history"
|
|
443
503
|
);
|
|
444
504
|
handleLoadSessionHistory(msg.workDir, msg.claudeSessionId, { before: msg.before, limit: msg.limit }).then((result) => {
|
|
445
505
|
this.send({ type: "session_history_result", requestId: msg.requestId, ...result });
|
|
446
506
|
}).catch((err) => {
|
|
447
|
-
|
|
507
|
+
log2.error({ err, workDir: msg.workDir }, "load_session_history failed");
|
|
448
508
|
this.send({ type: "session_history_result", requestId: msg.requestId, messages: [], oldestSeq: 0, hasMore: false });
|
|
449
509
|
});
|
|
450
510
|
return true;
|
|
451
511
|
}
|
|
452
512
|
if (msg.type === "list_roles") {
|
|
453
|
-
|
|
513
|
+
log2.info("list_roles");
|
|
454
514
|
const roleNames = listRoles();
|
|
455
515
|
const roles = roleNames.map((name) => {
|
|
456
516
|
try {
|
|
@@ -476,7 +536,7 @@ var HubConnection = class {
|
|
|
476
536
|
return true;
|
|
477
537
|
}
|
|
478
538
|
if (msg.type === "list_templates") {
|
|
479
|
-
|
|
539
|
+
log2.info("list_templates");
|
|
480
540
|
try {
|
|
481
541
|
const names = listTemplates();
|
|
482
542
|
const templates = names.map((name) => {
|
|
@@ -495,7 +555,7 @@ var HubConnection = class {
|
|
|
495
555
|
templates
|
|
496
556
|
});
|
|
497
557
|
} catch (err) {
|
|
498
|
-
|
|
558
|
+
log2.error({ err }, "list_templates failed");
|
|
499
559
|
this.send({
|
|
500
560
|
type: "templates_list_result",
|
|
501
561
|
sessionId: msg.sessionId,
|
|
@@ -507,9 +567,9 @@ var HubConnection = class {
|
|
|
507
567
|
if (msg.type === "generic.request") {
|
|
508
568
|
const handler = this.namespaceHandlers.get(msg.namespace);
|
|
509
569
|
if (handler) {
|
|
510
|
-
|
|
570
|
+
log2.info({ namespace: msg.namespace, action: msg.action, requestId: msg.requestId }, "generic.request received");
|
|
511
571
|
handler(msg).then((result) => {
|
|
512
|
-
|
|
572
|
+
log2.info({ namespace: msg.namespace, action: msg.action, requestId: msg.requestId }, "generic.request handled OK");
|
|
513
573
|
this.send({
|
|
514
574
|
type: "generic.response",
|
|
515
575
|
namespace: msg.namespace,
|
|
@@ -518,7 +578,7 @@ var HubConnection = class {
|
|
|
518
578
|
requestId: msg.requestId
|
|
519
579
|
});
|
|
520
580
|
}).catch((err) => {
|
|
521
|
-
|
|
581
|
+
log2.error({ namespace: msg.namespace, action: msg.action, requestId: msg.requestId, err }, "generic.request handler error");
|
|
522
582
|
this.send({
|
|
523
583
|
type: "generic.response",
|
|
524
584
|
namespace: msg.namespace,
|
|
@@ -530,7 +590,7 @@ var HubConnection = class {
|
|
|
530
590
|
});
|
|
531
591
|
return true;
|
|
532
592
|
}
|
|
533
|
-
|
|
593
|
+
log2.warn({ namespace: msg.namespace, action: msg.action, requestId: msg.requestId }, "generic.request unknown namespace");
|
|
534
594
|
this.send({
|
|
535
595
|
type: "generic.response",
|
|
536
596
|
namespace: msg.namespace,
|
|
@@ -541,10 +601,22 @@ var HubConnection = class {
|
|
|
541
601
|
});
|
|
542
602
|
return true;
|
|
543
603
|
}
|
|
604
|
+
if ((msg.type === "swarm_spawn" || msg.type === "swarm_resume") && this.swarmCoordinator) {
|
|
605
|
+
const coord = this.swarmCoordinator;
|
|
606
|
+
handleSwarmControl(coord, msg, {
|
|
607
|
+
defaultWorkDir: process.cwd(),
|
|
608
|
+
home: process.env.CLAWNET_HOME
|
|
609
|
+
}).then((result) => {
|
|
610
|
+
log2.info({ type: msg.type, ok: result.ok, swarmId: result.swarmId, error: result.error }, "swarm control handled");
|
|
611
|
+
}).catch((err) => {
|
|
612
|
+
log2.error({ err, type: msg.type }, "swarm control crashed");
|
|
613
|
+
});
|
|
614
|
+
return true;
|
|
615
|
+
}
|
|
544
616
|
if (msg.type === "swarm.execute" && this.swarmCoordinator) {
|
|
545
617
|
const { sessionId, content, workDir, targetInstance, crewConfig } = msg;
|
|
546
618
|
if (this.swarmCoordinator.hasSwarm(sessionId)) {
|
|
547
|
-
|
|
619
|
+
log2.info({ sessionId, targetInstance }, "swarm.execute: forwarding to existing swarm");
|
|
548
620
|
this.swarmCoordinator.handleUserMessage(sessionId, content, targetInstance).catch((err) => {
|
|
549
621
|
this.send({
|
|
550
622
|
type: "session.error",
|
|
@@ -557,7 +629,7 @@ var HubConnection = class {
|
|
|
557
629
|
this.finishedSwarms.delete(sessionId);
|
|
558
630
|
const templateName = crewConfig.templateName;
|
|
559
631
|
const roles = crewConfig.roles;
|
|
560
|
-
|
|
632
|
+
log2.info({ sessionId, templateName, rolesCount: roles?.length }, "swarm.execute: continuing finished swarm");
|
|
561
633
|
this.swarmCoordinator.create(sessionId, { workDir, templateName, roles, task: content, isContinuation: true }).catch((err) => {
|
|
562
634
|
this.send({
|
|
563
635
|
type: "session.error",
|
|
@@ -566,7 +638,7 @@ var HubConnection = class {
|
|
|
566
638
|
});
|
|
567
639
|
});
|
|
568
640
|
} else {
|
|
569
|
-
|
|
641
|
+
log2.info({ sessionId }, "swarm.execute ignored: swarm finished, no config to recreate");
|
|
570
642
|
this.send({
|
|
571
643
|
type: "session.error",
|
|
572
644
|
sessionId,
|
|
@@ -576,7 +648,7 @@ var HubConnection = class {
|
|
|
576
648
|
} else if (crewConfig?.templateName || crewConfig?.roles) {
|
|
577
649
|
const templateName = crewConfig.templateName;
|
|
578
650
|
const roles = crewConfig.roles;
|
|
579
|
-
|
|
651
|
+
log2.info({ sessionId, templateName, rolesCount: roles?.length }, "swarm.execute: creating new swarm");
|
|
580
652
|
this.swarmCoordinator.create(sessionId, { workDir, templateName, roles, task: content }).catch((err) => {
|
|
581
653
|
this.send({
|
|
582
654
|
type: "session.error",
|
|
@@ -585,7 +657,7 @@ var HubConnection = class {
|
|
|
585
657
|
});
|
|
586
658
|
});
|
|
587
659
|
} else {
|
|
588
|
-
|
|
660
|
+
log2.info({ sessionId }, "swarm.execute ignored: swarm not found, no config");
|
|
589
661
|
this.send({
|
|
590
662
|
type: "session.error",
|
|
591
663
|
sessionId,
|
|
@@ -597,7 +669,7 @@ var HubConnection = class {
|
|
|
597
669
|
if (!this.sessionManager) return false;
|
|
598
670
|
if (msg.type === "abort_execution") {
|
|
599
671
|
const { sessionId } = msg;
|
|
600
|
-
|
|
672
|
+
log2.info({ sessionId }, "abort_execution");
|
|
601
673
|
if (this.sessionManager?.hasSession(sessionId)) {
|
|
602
674
|
this.sessionManager.abortSession(sessionId).then(() => {
|
|
603
675
|
this.send({ type: "execution_aborted", sessionId });
|
|
@@ -612,10 +684,20 @@ var HubConnection = class {
|
|
|
612
684
|
if (msg.type === "claude.execute") {
|
|
613
685
|
const { sessionId, content, workDir, claudeSessionId, useBrainCore } = msg;
|
|
614
686
|
const sm = this.sessionManager;
|
|
615
|
-
const spawnAndSend = (resumeId, label) => {
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
687
|
+
const spawnAndSend = (resumeId, label, maxOutputTokens) => {
|
|
688
|
+
log2.info(
|
|
689
|
+
{ sessionId, claudeSessionId: resumeId, workDir, label, maxOutputTokens },
|
|
690
|
+
"claude.execute: spawning"
|
|
691
|
+
);
|
|
692
|
+
log2.debug({ sessionId, ...previewFields(content) }, "claude.execute: input");
|
|
693
|
+
sm.createSession({
|
|
694
|
+
sessionId,
|
|
695
|
+
workDir,
|
|
696
|
+
resumeId,
|
|
697
|
+
useBrainCore,
|
|
698
|
+
roleId: "role-__assistant__",
|
|
699
|
+
maxOutputTokens
|
|
700
|
+
}).then(() => {
|
|
619
701
|
sm.sendInput(sessionId, content);
|
|
620
702
|
}).catch((err) => {
|
|
621
703
|
this.send({
|
|
@@ -626,29 +708,30 @@ var HubConnection = class {
|
|
|
626
708
|
});
|
|
627
709
|
};
|
|
628
710
|
if (sm.isHealthy(sessionId)) {
|
|
629
|
-
|
|
630
|
-
|
|
711
|
+
log2.info({ sessionId }, "claude.execute: reusing healthy session");
|
|
712
|
+
log2.debug({ sessionId, ...previewFields(content) }, "claude.execute: input");
|
|
631
713
|
sm.sendInput(sessionId, content);
|
|
632
714
|
} else if (sm.hasSession(sessionId) && claudeSessionId) {
|
|
633
|
-
|
|
715
|
+
const recommendedMax = sm.getRecommendedMaxOutputTokens(sessionId);
|
|
716
|
+
log2.warn({ sessionId, claudeSessionId }, "claude.execute: session unhealthy, recreating with --resume");
|
|
634
717
|
this.send({
|
|
635
718
|
type: "session.died",
|
|
636
719
|
sessionId,
|
|
637
720
|
reason: "unhealthy_before_input"
|
|
638
721
|
});
|
|
639
722
|
sm.abortSession(sessionId).catch((err) => {
|
|
640
|
-
|
|
641
|
-
}).then(() => spawnAndSend(claudeSessionId, "unhealthy_fallback_resume"));
|
|
723
|
+
log2.warn({ sessionId, err: err instanceof Error ? err.message : String(err) }, "abortSession failed during unhealthy fallback, proceeding to respawn");
|
|
724
|
+
}).then(() => spawnAndSend(claudeSessionId, "unhealthy_fallback_resume", recommendedMax));
|
|
642
725
|
} else if (claudeSessionId) {
|
|
643
|
-
spawnAndSend(claudeSessionId, "fresh_resume");
|
|
726
|
+
spawnAndSend(claudeSessionId, "fresh_resume", sm.getRecommendedMaxOutputTokens(sessionId));
|
|
644
727
|
} else {
|
|
645
|
-
spawnAndSend(void 0, "brand_new");
|
|
728
|
+
spawnAndSend(void 0, "brand_new", void 0);
|
|
646
729
|
}
|
|
647
730
|
return true;
|
|
648
731
|
}
|
|
649
732
|
if (msg.type === "session.force_restart") {
|
|
650
733
|
const { sessionId, reason } = msg;
|
|
651
|
-
|
|
734
|
+
log2.warn({ sessionId, reason }, "session.force_restart received");
|
|
652
735
|
const sm = this.sessionManager;
|
|
653
736
|
const reply = {
|
|
654
737
|
type: "session.died",
|
|
@@ -657,7 +740,7 @@ var HubConnection = class {
|
|
|
657
740
|
};
|
|
658
741
|
if (sm.hasSession(sessionId)) {
|
|
659
742
|
sm.abortSession(sessionId).catch((err) => {
|
|
660
|
-
|
|
743
|
+
log2.warn({ sessionId, err: err instanceof Error ? err.message : String(err) }, "abortSession failed during force_restart");
|
|
661
744
|
}).finally(() => {
|
|
662
745
|
this.send(reply);
|
|
663
746
|
});
|
|
@@ -667,7 +750,7 @@ var HubConnection = class {
|
|
|
667
750
|
return true;
|
|
668
751
|
}
|
|
669
752
|
if (msg.type === "session.create") {
|
|
670
|
-
|
|
753
|
+
log2.info({ sessionId: msg.sessionId, roleId: "role-__assistant__" }, "session.create with memory injection");
|
|
671
754
|
this.sessionManager.createSession({
|
|
672
755
|
sessionId: msg.sessionId,
|
|
673
756
|
workDir: msg.workDir,
|
|
@@ -689,13 +772,13 @@ var HubConnection = class {
|
|
|
689
772
|
return true;
|
|
690
773
|
}
|
|
691
774
|
if (msg.type === "session.close") {
|
|
692
|
-
|
|
775
|
+
log2.info({ sessionId: msg.sessionId }, "session.close");
|
|
693
776
|
this.sessionManager.closeSession(msg.sessionId).catch(() => {
|
|
694
777
|
});
|
|
695
778
|
return true;
|
|
696
779
|
}
|
|
697
780
|
if (msg.type === "claude.input") {
|
|
698
|
-
|
|
781
|
+
log2.debug(
|
|
699
782
|
{ sessionId: msg.sessionId, ...previewFields(msg.content) },
|
|
700
783
|
"claude.input"
|
|
701
784
|
);
|
|
@@ -711,27 +794,27 @@ var HubConnection = class {
|
|
|
711
794
|
const allIds = listRecoverableSwarmIds();
|
|
712
795
|
const snapshots = listRecoverableSwarms();
|
|
713
796
|
const recoverableIds = new Set(snapshots.map((s) => s.id));
|
|
714
|
-
for (const
|
|
715
|
-
if (!recoverableIds.has(
|
|
716
|
-
deleteSwarmSnapshot(
|
|
717
|
-
|
|
797
|
+
for (const { workDir, swarmId } of allIds) {
|
|
798
|
+
if (!recoverableIds.has(swarmId)) {
|
|
799
|
+
deleteSwarmSnapshot(workDir, swarmId);
|
|
800
|
+
log2.info({ swarmId }, "cleaned up non-recoverable swarm snapshot");
|
|
718
801
|
}
|
|
719
802
|
}
|
|
720
803
|
for (const snap of snapshots) {
|
|
721
|
-
|
|
804
|
+
log2.info({ swarmId: snap.id }, "recovering swarm");
|
|
722
805
|
recoverSwarm(this.swarmCoordinator, snap).catch((err) => {
|
|
723
|
-
|
|
806
|
+
log2.error({ err, swarmId: snap.id }, "failed to recover swarm");
|
|
724
807
|
});
|
|
725
808
|
}
|
|
726
809
|
} catch (err) {
|
|
727
|
-
|
|
810
|
+
log2.error({ err }, "swarm recovery failed");
|
|
728
811
|
}
|
|
729
812
|
}
|
|
730
813
|
reportLiveSessions() {
|
|
731
814
|
if (!this.sessionManager) return;
|
|
732
815
|
const ids = this.sessionManager.getActiveSessionIds();
|
|
733
816
|
if (ids.length === 0) return;
|
|
734
|
-
|
|
817
|
+
log2.info({ count: ids.length }, "reporting live sessions to hub after reconnect");
|
|
735
818
|
for (const sessionId of ids) {
|
|
736
819
|
this.send({
|
|
737
820
|
type: "session.state",
|
|
@@ -759,7 +842,7 @@ var HubConnection = class {
|
|
|
759
842
|
}
|
|
760
843
|
scheduleReconnect() {
|
|
761
844
|
if (this.destroyed) return;
|
|
762
|
-
|
|
845
|
+
log2.warn({ delayMs: this.reconnectDelay }, "reconnecting to hub...");
|
|
763
846
|
this.reconnectTimer = setTimeout(() => {
|
|
764
847
|
this.connect();
|
|
765
848
|
}, this.reconnectDelay);
|
|
@@ -786,9 +869,421 @@ var HubConnection = class {
|
|
|
786
869
|
}
|
|
787
870
|
};
|
|
788
871
|
|
|
872
|
+
// src/schedule-runtime.ts
|
|
873
|
+
import { spawn, execSync } from "child_process";
|
|
874
|
+
import {
|
|
875
|
+
JsonScheduleRepository,
|
|
876
|
+
SchedulerService,
|
|
877
|
+
createOneShotExecutor,
|
|
878
|
+
createSwarmExecutor,
|
|
879
|
+
MAX_STDOUT_BYTES,
|
|
880
|
+
MAX_STDERR_BYTES
|
|
881
|
+
} from "@mclawnet/scheduler";
|
|
882
|
+
import { createLogger as createLogger3 } from "@mclawnet/logger";
|
|
883
|
+
var log3 = createLogger3({ module: "agent/schedule-runtime" });
|
|
884
|
+
var SPAWN_STDOUT_CAP_BYTES = 1024 * 1024;
|
|
885
|
+
var SPAWN_STDERR_CAP_BYTES = 256 * 1024;
|
|
886
|
+
var ScheduleRuntime = class {
|
|
887
|
+
hub;
|
|
888
|
+
repo;
|
|
889
|
+
scheduler;
|
|
890
|
+
started = false;
|
|
891
|
+
// Tracks fire-and-forget handler promises so stop() can drain them before
|
|
892
|
+
// detaching listeners and stopping the scheduler. Without this, an in-flight
|
|
893
|
+
// handler could resolve after stop() returns and emit a phantom send().
|
|
894
|
+
inflight = /* @__PURE__ */ new Set();
|
|
895
|
+
// Bound handlers retained so stop() can detach them cleanly.
|
|
896
|
+
onChange;
|
|
897
|
+
onRun;
|
|
898
|
+
constructor(opts) {
|
|
899
|
+
this.hub = opts.hub;
|
|
900
|
+
this.repo = opts.repo ?? new JsonScheduleRepository();
|
|
901
|
+
const oneshotExec = opts.executors?.oneshot ?? createOneShotExecutor(makeRealOneShotSpawn());
|
|
902
|
+
const swarmExec = opts.executors?.swarm ?? createSwarmExecutor(
|
|
903
|
+
makeRealSwarmStarter({
|
|
904
|
+
sessionManager: opts.sessionManager,
|
|
905
|
+
swarmCoordinator: opts.swarmCoordinator
|
|
906
|
+
})
|
|
907
|
+
);
|
|
908
|
+
this.scheduler = opts.scheduler ?? new SchedulerService(this.repo, {
|
|
909
|
+
oneshot: oneshotExec,
|
|
910
|
+
swarm: swarmExec
|
|
911
|
+
});
|
|
912
|
+
this.onChange = (s) => {
|
|
913
|
+
this.safeSend({ type: "schedule:changed", schedule: s });
|
|
914
|
+
};
|
|
915
|
+
this.onRun = (r) => {
|
|
916
|
+
this.safeSend({ type: "schedule:run", run: r });
|
|
917
|
+
};
|
|
918
|
+
}
|
|
919
|
+
async start() {
|
|
920
|
+
if (this.started) return;
|
|
921
|
+
this.started = true;
|
|
922
|
+
this.scheduler.events.on("change", this.onChange);
|
|
923
|
+
this.scheduler.events.on("run", this.onRun);
|
|
924
|
+
await this.scheduler.start();
|
|
925
|
+
}
|
|
926
|
+
async stop() {
|
|
927
|
+
if (!this.started) return;
|
|
928
|
+
this.started = false;
|
|
929
|
+
const pending = Array.from(this.inflight);
|
|
930
|
+
if (pending.length > 0) {
|
|
931
|
+
await Promise.allSettled(pending);
|
|
932
|
+
}
|
|
933
|
+
this.scheduler.events.off("change", this.onChange);
|
|
934
|
+
this.scheduler.events.off("run", this.onRun);
|
|
935
|
+
await this.scheduler.stop();
|
|
936
|
+
}
|
|
937
|
+
/**
|
|
938
|
+
* Wraps `hub.send` with a started-flag guard so handlers that race against
|
|
939
|
+
* `stop()` can't emit phantom messages after the runtime has shut down.
|
|
940
|
+
*/
|
|
941
|
+
safeSend(msg) {
|
|
942
|
+
if (!this.started) return;
|
|
943
|
+
this.hub.send(msg);
|
|
944
|
+
}
|
|
945
|
+
/**
|
|
946
|
+
* Records `p` in the in-flight set so `stop()` can wait for it, and removes
|
|
947
|
+
* it on settle. Returns the same promise so callers can `void track(...)`.
|
|
948
|
+
*/
|
|
949
|
+
track(p) {
|
|
950
|
+
this.inflight.add(p);
|
|
951
|
+
return p.finally(() => {
|
|
952
|
+
this.inflight.delete(p);
|
|
953
|
+
});
|
|
954
|
+
}
|
|
955
|
+
/**
|
|
956
|
+
* Entry point for any `schedule:*` message arriving from the Hub. Always
|
|
957
|
+
* returns synchronously; per-branch async work runs on its own microtask
|
|
958
|
+
* chain and reports results back to the hub.
|
|
959
|
+
*/
|
|
960
|
+
handleHubMessage(msg) {
|
|
961
|
+
if (!isMsg(msg)) return;
|
|
962
|
+
const requestId = typeof msg.requestId === "string" ? msg.requestId : void 0;
|
|
963
|
+
const m = msg;
|
|
964
|
+
switch (msg.type) {
|
|
965
|
+
case "schedule:create":
|
|
966
|
+
void this.track(this.handleCreate(m, requestId));
|
|
967
|
+
return;
|
|
968
|
+
case "schedule:update":
|
|
969
|
+
void this.track(this.handleUpdate(m, requestId));
|
|
970
|
+
return;
|
|
971
|
+
case "schedule:delete":
|
|
972
|
+
void this.track(this.handleDelete(m, requestId));
|
|
973
|
+
return;
|
|
974
|
+
case "schedule:pause":
|
|
975
|
+
void this.track(this.handlePauseResume(m, requestId, "paused"));
|
|
976
|
+
return;
|
|
977
|
+
case "schedule:resume":
|
|
978
|
+
void this.track(this.handlePauseResume(m, requestId, "active"));
|
|
979
|
+
return;
|
|
980
|
+
case "schedule:trigger":
|
|
981
|
+
void this.track(this.handleTrigger(m, requestId));
|
|
982
|
+
return;
|
|
983
|
+
case "schedule:list":
|
|
984
|
+
void this.track(this.handleList(m, requestId));
|
|
985
|
+
return;
|
|
986
|
+
case "schedule:runs":
|
|
987
|
+
void this.track(this.handleRuns(m, requestId));
|
|
988
|
+
return;
|
|
989
|
+
case "schedule:log":
|
|
990
|
+
void this.track(this.handleLog(m, requestId));
|
|
991
|
+
return;
|
|
992
|
+
default:
|
|
993
|
+
log3.warn({ type: msg.type }, "schedule-runtime: unknown message type");
|
|
994
|
+
if (requestId !== void 0) {
|
|
995
|
+
this.safeSend({
|
|
996
|
+
type: "schedule:error",
|
|
997
|
+
requestId,
|
|
998
|
+
error: {
|
|
999
|
+
code: "INTERNAL",
|
|
1000
|
+
message: `unknown schedule message type: ${msg.type}`
|
|
1001
|
+
}
|
|
1002
|
+
});
|
|
1003
|
+
}
|
|
1004
|
+
}
|
|
1005
|
+
}
|
|
1006
|
+
// ── Per-branch handlers ─────────────────────────────────────────────
|
|
1007
|
+
async handleCreate(msg, requestId) {
|
|
1008
|
+
try {
|
|
1009
|
+
const created = await this.repo.create(
|
|
1010
|
+
msg.input
|
|
1011
|
+
);
|
|
1012
|
+
this.scheduler.register(created);
|
|
1013
|
+
this.safeSend({
|
|
1014
|
+
type: "schedule:create:result",
|
|
1015
|
+
requestId,
|
|
1016
|
+
schedule: created
|
|
1017
|
+
});
|
|
1018
|
+
this.safeSend({ type: "schedule:changed", schedule: created });
|
|
1019
|
+
} catch (err) {
|
|
1020
|
+
this.sendError(requestId, err);
|
|
1021
|
+
}
|
|
1022
|
+
}
|
|
1023
|
+
async handleUpdate(msg, requestId) {
|
|
1024
|
+
try {
|
|
1025
|
+
const id = String(msg.id);
|
|
1026
|
+
const updated = await this.repo.update(
|
|
1027
|
+
id,
|
|
1028
|
+
msg.patch
|
|
1029
|
+
);
|
|
1030
|
+
if (updated.status === "active") {
|
|
1031
|
+
this.scheduler.register(updated);
|
|
1032
|
+
} else {
|
|
1033
|
+
this.scheduler.unregister(id);
|
|
1034
|
+
}
|
|
1035
|
+
this.safeSend({
|
|
1036
|
+
type: "schedule:update:result",
|
|
1037
|
+
requestId,
|
|
1038
|
+
schedule: updated
|
|
1039
|
+
});
|
|
1040
|
+
this.safeSend({ type: "schedule:changed", schedule: updated });
|
|
1041
|
+
} catch (err) {
|
|
1042
|
+
this.sendError(requestId, err);
|
|
1043
|
+
}
|
|
1044
|
+
}
|
|
1045
|
+
async handleDelete(msg, requestId) {
|
|
1046
|
+
const id = String(msg.id);
|
|
1047
|
+
try {
|
|
1048
|
+
this.scheduler.unregister(id);
|
|
1049
|
+
await this.repo.delete(id);
|
|
1050
|
+
this.safeSend({ type: "schedule:delete:result", requestId, id });
|
|
1051
|
+
this.safeSend({ type: "schedule:removed", id });
|
|
1052
|
+
} catch (err) {
|
|
1053
|
+
this.sendError(requestId, err);
|
|
1054
|
+
}
|
|
1055
|
+
}
|
|
1056
|
+
async handlePauseResume(msg, requestId, nextStatus) {
|
|
1057
|
+
const id = String(msg.id);
|
|
1058
|
+
try {
|
|
1059
|
+
const updated = await this.repo.update(id, { status: nextStatus });
|
|
1060
|
+
if (nextStatus === "active") {
|
|
1061
|
+
this.scheduler.register(updated);
|
|
1062
|
+
} else {
|
|
1063
|
+
this.scheduler.unregister(id);
|
|
1064
|
+
}
|
|
1065
|
+
const resultType = nextStatus === "paused" ? "schedule:pause:result" : "schedule:resume:result";
|
|
1066
|
+
this.safeSend({ type: resultType, requestId, schedule: updated });
|
|
1067
|
+
this.safeSend({ type: "schedule:changed", schedule: updated });
|
|
1068
|
+
} catch (err) {
|
|
1069
|
+
this.sendError(requestId, err);
|
|
1070
|
+
}
|
|
1071
|
+
}
|
|
1072
|
+
async handleTrigger(msg, requestId) {
|
|
1073
|
+
const id = String(msg.id);
|
|
1074
|
+
try {
|
|
1075
|
+
await this.scheduler.triggerNow(id);
|
|
1076
|
+
const snap = await this.repo.getById(id);
|
|
1077
|
+
this.safeSend({
|
|
1078
|
+
type: "schedule:trigger:result",
|
|
1079
|
+
requestId,
|
|
1080
|
+
schedule: snap
|
|
1081
|
+
});
|
|
1082
|
+
} catch (err) {
|
|
1083
|
+
this.sendError(requestId, err);
|
|
1084
|
+
}
|
|
1085
|
+
}
|
|
1086
|
+
async handleList(msg, requestId) {
|
|
1087
|
+
try {
|
|
1088
|
+
const all = await this.repo.listAll();
|
|
1089
|
+
const list = typeof msg.encodedCwd === "string" ? all.filter((s) => s.encodedCwd === msg.encodedCwd) : all;
|
|
1090
|
+
this.safeSend({
|
|
1091
|
+
type: "schedule:list:result",
|
|
1092
|
+
requestId,
|
|
1093
|
+
schedules: list
|
|
1094
|
+
});
|
|
1095
|
+
} catch (err) {
|
|
1096
|
+
this.sendError(requestId, err);
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
async handleRuns(msg, requestId) {
|
|
1100
|
+
const id = String(msg.id);
|
|
1101
|
+
try {
|
|
1102
|
+
const runs = await this.repo.listRuns(id);
|
|
1103
|
+
this.safeSend({ type: "schedule:runs:result", requestId, runs });
|
|
1104
|
+
} catch (err) {
|
|
1105
|
+
this.sendError(requestId, err);
|
|
1106
|
+
}
|
|
1107
|
+
}
|
|
1108
|
+
async handleLog(msg, requestId) {
|
|
1109
|
+
const id = String(msg.id);
|
|
1110
|
+
const runId = String(msg.runId);
|
|
1111
|
+
try {
|
|
1112
|
+
const { stdout, stderr } = await this.repo.readRunLogs(id, runId);
|
|
1113
|
+
this.safeSend({
|
|
1114
|
+
type: "schedule:log:result",
|
|
1115
|
+
requestId,
|
|
1116
|
+
runId,
|
|
1117
|
+
stdout,
|
|
1118
|
+
stderr
|
|
1119
|
+
});
|
|
1120
|
+
} catch (err) {
|
|
1121
|
+
this.sendError(requestId, err);
|
|
1122
|
+
}
|
|
1123
|
+
}
|
|
1124
|
+
sendError(requestId, err) {
|
|
1125
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1126
|
+
log3.warn({ err: message, requestId }, "schedule-runtime: branch failed");
|
|
1127
|
+
this.safeSend({
|
|
1128
|
+
type: "schedule:error",
|
|
1129
|
+
requestId,
|
|
1130
|
+
error: { code: "INTERNAL", message }
|
|
1131
|
+
});
|
|
1132
|
+
}
|
|
1133
|
+
};
|
|
1134
|
+
function isMsg(x) {
|
|
1135
|
+
return typeof x === "object" && x !== null && typeof x.type === "string";
|
|
1136
|
+
}
|
|
1137
|
+
function resolveClaudeBin() {
|
|
1138
|
+
if (process.env.CLAUDE_BIN) return process.env.CLAUDE_BIN;
|
|
1139
|
+
try {
|
|
1140
|
+
const which = process.platform === "win32" ? "where" : "which";
|
|
1141
|
+
const out = execSync(`${which} claude`, { stdio: ["ignore", "pipe", "ignore"] }).toString().split(/\r?\n/).map((l) => l.trim()).filter(Boolean);
|
|
1142
|
+
if (out[0]) return out[0];
|
|
1143
|
+
} catch {
|
|
1144
|
+
}
|
|
1145
|
+
return "claude";
|
|
1146
|
+
}
|
|
1147
|
+
var BoundedBuffer = class {
|
|
1148
|
+
constructor(cap) {
|
|
1149
|
+
this.cap = cap;
|
|
1150
|
+
this.headCap = Math.floor(cap / 2);
|
|
1151
|
+
this.tailCap = cap - this.headCap;
|
|
1152
|
+
}
|
|
1153
|
+
head = Buffer.alloc(0);
|
|
1154
|
+
tailRing = [];
|
|
1155
|
+
tailBytes = 0;
|
|
1156
|
+
totalBytes = 0;
|
|
1157
|
+
headCap;
|
|
1158
|
+
tailCap;
|
|
1159
|
+
push(chunk) {
|
|
1160
|
+
if (chunk.length === 0) return;
|
|
1161
|
+
const buf = Buffer.from(chunk, "utf8");
|
|
1162
|
+
this.totalBytes += buf.byteLength;
|
|
1163
|
+
if (this.head.byteLength < this.headCap) {
|
|
1164
|
+
const need = this.headCap - this.head.byteLength;
|
|
1165
|
+
if (buf.byteLength <= need) {
|
|
1166
|
+
this.head = Buffer.concat([this.head, buf]);
|
|
1167
|
+
return;
|
|
1168
|
+
}
|
|
1169
|
+
let split = need;
|
|
1170
|
+
while (split > 0 && (buf[split] & 192) === 128) split -= 1;
|
|
1171
|
+
this.head = Buffer.concat([this.head, buf.subarray(0, split)]);
|
|
1172
|
+
this.appendToTail(buf.subarray(split));
|
|
1173
|
+
return;
|
|
1174
|
+
}
|
|
1175
|
+
this.appendToTail(buf);
|
|
1176
|
+
}
|
|
1177
|
+
appendToTail(buf) {
|
|
1178
|
+
if (buf.byteLength === 0) return;
|
|
1179
|
+
this.tailRing.push(buf);
|
|
1180
|
+
this.tailBytes += buf.byteLength;
|
|
1181
|
+
while (this.tailBytes > this.tailCap && this.tailRing.length > 0) {
|
|
1182
|
+
const front = this.tailRing[0];
|
|
1183
|
+
const overflow = this.tailBytes - this.tailCap;
|
|
1184
|
+
if (front.byteLength <= overflow) {
|
|
1185
|
+
this.tailRing.shift();
|
|
1186
|
+
this.tailBytes -= front.byteLength;
|
|
1187
|
+
} else {
|
|
1188
|
+
let start = overflow;
|
|
1189
|
+
while (start < front.byteLength && (front[start] & 192) === 128) {
|
|
1190
|
+
start += 1;
|
|
1191
|
+
}
|
|
1192
|
+
this.tailRing[0] = front.subarray(start);
|
|
1193
|
+
this.tailBytes -= start;
|
|
1194
|
+
}
|
|
1195
|
+
}
|
|
1196
|
+
}
|
|
1197
|
+
toString() {
|
|
1198
|
+
const tail = this.tailRing.length > 0 ? Buffer.concat(this.tailRing) : Buffer.alloc(0);
|
|
1199
|
+
if (this.totalBytes <= this.cap) {
|
|
1200
|
+
return Buffer.concat([this.head, tail]).toString("utf8");
|
|
1201
|
+
}
|
|
1202
|
+
const dropped = this.totalBytes - this.head.byteLength - tail.byteLength;
|
|
1203
|
+
const marker = `
|
|
1204
|
+
...[truncated ${dropped} bytes]...
|
|
1205
|
+
`;
|
|
1206
|
+
return this.head.toString("utf8") + marker + tail.toString("utf8");
|
|
1207
|
+
}
|
|
1208
|
+
};
|
|
1209
|
+
function makeRealOneShotSpawn() {
|
|
1210
|
+
return async (args, opts) => {
|
|
1211
|
+
const bin = resolveClaudeBin();
|
|
1212
|
+
const proc = spawn(bin, args, {
|
|
1213
|
+
cwd: opts.cwd,
|
|
1214
|
+
env: opts.env,
|
|
1215
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
1216
|
+
});
|
|
1217
|
+
const stdoutBuf = new BoundedBuffer(SPAWN_STDOUT_CAP_BYTES);
|
|
1218
|
+
const stderrBuf = new BoundedBuffer(SPAWN_STDERR_CAP_BYTES);
|
|
1219
|
+
let stdoutLineResidual = "";
|
|
1220
|
+
let lastAssistantText;
|
|
1221
|
+
const tryExtractAssistant = (line) => {
|
|
1222
|
+
const trimmed = line.trim();
|
|
1223
|
+
if (!trimmed) return;
|
|
1224
|
+
try {
|
|
1225
|
+
const obj = JSON.parse(trimmed);
|
|
1226
|
+
if (obj.type === "assistant" && obj.message?.content) {
|
|
1227
|
+
for (const piece of obj.message.content) {
|
|
1228
|
+
if (piece.type === "text" && typeof piece.text === "string") {
|
|
1229
|
+
lastAssistantText = piece.text;
|
|
1230
|
+
}
|
|
1231
|
+
}
|
|
1232
|
+
}
|
|
1233
|
+
} catch {
|
|
1234
|
+
}
|
|
1235
|
+
};
|
|
1236
|
+
proc.stdout?.setEncoding("utf8");
|
|
1237
|
+
proc.stderr?.setEncoding("utf8");
|
|
1238
|
+
proc.stdout?.on("data", (chunk) => {
|
|
1239
|
+
stdoutBuf.push(chunk);
|
|
1240
|
+
const combined = stdoutLineResidual + chunk;
|
|
1241
|
+
const lines = combined.split("\n");
|
|
1242
|
+
stdoutLineResidual = lines.pop() ?? "";
|
|
1243
|
+
for (const line of lines) tryExtractAssistant(line);
|
|
1244
|
+
});
|
|
1245
|
+
proc.stderr?.on("data", (chunk) => {
|
|
1246
|
+
stderrBuf.push(chunk);
|
|
1247
|
+
});
|
|
1248
|
+
const onAbort = () => {
|
|
1249
|
+
try {
|
|
1250
|
+
proc.kill("SIGTERM");
|
|
1251
|
+
} catch {
|
|
1252
|
+
}
|
|
1253
|
+
};
|
|
1254
|
+
if (opts.signal.aborted) onAbort();
|
|
1255
|
+
else opts.signal.addEventListener("abort", onAbort, { once: true });
|
|
1256
|
+
return new Promise((resolve, reject) => {
|
|
1257
|
+
proc.once("error", (err) => {
|
|
1258
|
+
opts.signal.removeEventListener("abort", onAbort);
|
|
1259
|
+
reject(err);
|
|
1260
|
+
});
|
|
1261
|
+
proc.once("close", (code) => {
|
|
1262
|
+
opts.signal.removeEventListener("abort", onAbort);
|
|
1263
|
+
if (stdoutLineResidual.length > 0) {
|
|
1264
|
+
tryExtractAssistant(stdoutLineResidual);
|
|
1265
|
+
stdoutLineResidual = "";
|
|
1266
|
+
}
|
|
1267
|
+
resolve({
|
|
1268
|
+
exitCode: code ?? -1,
|
|
1269
|
+
stdout: stdoutBuf.toString(),
|
|
1270
|
+
stderr: stderrBuf.toString(),
|
|
1271
|
+
lastAssistantText
|
|
1272
|
+
});
|
|
1273
|
+
});
|
|
1274
|
+
});
|
|
1275
|
+
};
|
|
1276
|
+
}
|
|
1277
|
+
function makeRealSwarmStarter(_deps) {
|
|
1278
|
+
return async () => {
|
|
1279
|
+
throw new Error("swarm scheduler executor not yet wired");
|
|
1280
|
+
};
|
|
1281
|
+
}
|
|
1282
|
+
|
|
789
1283
|
// src/session-manager.ts
|
|
790
|
-
import { createLogger as
|
|
1284
|
+
import { createLogger as createLogger5, previewFields as previewFields2 } from "@mclawnet/logger";
|
|
791
1285
|
import { buildMemorySection } from "@mclawnet/memory";
|
|
1286
|
+
import { MAX_TOKENS_LADDER, clampLadderIndex } from "@mclawnet/shared";
|
|
792
1287
|
|
|
793
1288
|
// src/skill-loader.ts
|
|
794
1289
|
import { existsSync as existsSync2, mkdirSync, readdirSync as readdirSync2, readFileSync as readFileSync2, writeFileSync } from "fs";
|
|
@@ -797,9 +1292,9 @@ import { join as join2, dirname } from "path";
|
|
|
797
1292
|
import { homedir as homedir2 } from "os";
|
|
798
1293
|
import { createRequire } from "module";
|
|
799
1294
|
import { fileURLToPath } from "url";
|
|
800
|
-
import { createLogger as
|
|
1295
|
+
import { createLogger as createLogger4 } from "@mclawnet/logger";
|
|
801
1296
|
import { ManifestManager, mergeSkillSections } from "@mclawnet/skill-manager";
|
|
802
|
-
var
|
|
1297
|
+
var log4 = createLogger4({ module: "agent/skill-loader" });
|
|
803
1298
|
var CLAWNET_DIR = join2(homedir2(), ".clawnet");
|
|
804
1299
|
var SKILLS_DIR = join2(CLAWNET_DIR, ".claude", "skills");
|
|
805
1300
|
var MCP_CONFIG_PATH = join2(CLAWNET_DIR, "mcp.json");
|
|
@@ -824,7 +1319,7 @@ function syncBuiltinSkills(rootDir, srcDir) {
|
|
|
824
1319
|
const skillsDir = join2(rootDir, ".claude", "skills");
|
|
825
1320
|
if (!existsSync2(skillsDir)) mkdirSync(skillsDir, { recursive: true });
|
|
826
1321
|
if (!existsSync2(srcDir)) {
|
|
827
|
-
|
|
1322
|
+
log4.debug({ srcDir }, "no built-in skills directory found, skipping");
|
|
828
1323
|
return;
|
|
829
1324
|
}
|
|
830
1325
|
const manifest = new ManifestManager(rootDir);
|
|
@@ -832,7 +1327,7 @@ function syncBuiltinSkills(rootDir, srcDir) {
|
|
|
832
1327
|
try {
|
|
833
1328
|
entries = readdirSync2(srcDir, { withFileTypes: true }).filter((d) => d.isDirectory()).map((d) => d.name);
|
|
834
1329
|
} catch {
|
|
835
|
-
|
|
1330
|
+
log4.warn({ srcDir }, "failed to read built-in skills directory");
|
|
836
1331
|
return;
|
|
837
1332
|
}
|
|
838
1333
|
for (const skillName of entries) {
|
|
@@ -845,7 +1340,7 @@ function syncBuiltinSkills(rootDir, srcDir) {
|
|
|
845
1340
|
try {
|
|
846
1341
|
officialContent = readFileSync2(srcSkillMd, "utf-8");
|
|
847
1342
|
} catch (err) {
|
|
848
|
-
|
|
1343
|
+
log4.warn({ skill: skillName, err }, "failed to read built-in skill");
|
|
849
1344
|
continue;
|
|
850
1345
|
}
|
|
851
1346
|
const officialHash = sha256(officialContent);
|
|
@@ -856,9 +1351,9 @@ function syncBuiltinSkills(rootDir, srcDir) {
|
|
|
856
1351
|
writeFileSync(destSkillMd, officialContent);
|
|
857
1352
|
writeFileSync(baseSnapshotPath, officialContent);
|
|
858
1353
|
manifest.register(skillName, officialHash, officialVersion);
|
|
859
|
-
|
|
1354
|
+
log4.info({ skill: skillName, version: officialVersion }, "installed built-in skill");
|
|
860
1355
|
} catch (err) {
|
|
861
|
-
|
|
1356
|
+
log4.warn({ skill: skillName, err }, "failed to install built-in skill");
|
|
862
1357
|
}
|
|
863
1358
|
continue;
|
|
864
1359
|
}
|
|
@@ -871,27 +1366,27 @@ function syncBuiltinSkills(rootDir, srcDir) {
|
|
|
871
1366
|
try {
|
|
872
1367
|
writeFileSync(baseSnapshotPath, userContent);
|
|
873
1368
|
} catch (err) {
|
|
874
|
-
|
|
1369
|
+
log4.debug({ skill: skillName, err }, "failed to write lazy .base.md");
|
|
875
1370
|
}
|
|
876
1371
|
}
|
|
877
1372
|
}
|
|
878
1373
|
const action = manifest.determineSyncAction(skillName, officialHash, officialVersion);
|
|
879
1374
|
switch (action) {
|
|
880
1375
|
case "skip":
|
|
881
|
-
|
|
1376
|
+
log4.debug({ skill: skillName }, "skill already up to date");
|
|
882
1377
|
break;
|
|
883
1378
|
case "direct-overwrite":
|
|
884
1379
|
try {
|
|
885
1380
|
writeFileSync(destSkillMd, officialContent);
|
|
886
1381
|
writeFileSync(baseSnapshotPath, officialContent);
|
|
887
1382
|
manifest.markSynced(skillName, officialHash, officialHash, officialVersion);
|
|
888
|
-
|
|
1383
|
+
log4.info({ skill: skillName, version: officialVersion }, "upgraded built-in skill");
|
|
889
1384
|
} catch (err) {
|
|
890
|
-
|
|
1385
|
+
log4.warn({ skill: skillName, err }, "failed to overwrite skill");
|
|
891
1386
|
}
|
|
892
1387
|
break;
|
|
893
1388
|
case "keep-user":
|
|
894
|
-
|
|
1389
|
+
log4.debug({ skill: skillName }, "keeping user-modified skill");
|
|
895
1390
|
break;
|
|
896
1391
|
case "needs-merge": {
|
|
897
1392
|
if (!existsSync2(baseSnapshotPath)) {
|
|
@@ -906,12 +1401,12 @@ function syncBuiltinSkills(rootDir, srcDir) {
|
|
|
906
1401
|
}
|
|
907
1402
|
])
|
|
908
1403
|
);
|
|
909
|
-
|
|
1404
|
+
log4.warn(
|
|
910
1405
|
{ skill: skillName },
|
|
911
1406
|
"missing .base.md snapshot \u2014 skipping auto-merge, wrote .conflict advisory"
|
|
912
1407
|
);
|
|
913
1408
|
} catch (err) {
|
|
914
|
-
|
|
1409
|
+
log4.warn({ skill: skillName, err }, "failed to write .conflict advisory");
|
|
915
1410
|
}
|
|
916
1411
|
break;
|
|
917
1412
|
}
|
|
@@ -923,9 +1418,9 @@ function syncBuiltinSkills(rootDir, srcDir) {
|
|
|
923
1418
|
writeFileSync(baseSnapshotPath, officialContent);
|
|
924
1419
|
const newCurrentHash = sha256(merged.content);
|
|
925
1420
|
manifest.markSynced(skillName, officialHash, newCurrentHash, officialVersion);
|
|
926
|
-
|
|
1421
|
+
log4.info({ skill: skillName }, "auto-merged skill upgrade");
|
|
927
1422
|
} catch (err) {
|
|
928
|
-
|
|
1423
|
+
log4.warn({ skill: skillName, err }, "failed to write merged skill");
|
|
929
1424
|
}
|
|
930
1425
|
} else {
|
|
931
1426
|
try {
|
|
@@ -933,12 +1428,12 @@ function syncBuiltinSkills(rootDir, srcDir) {
|
|
|
933
1428
|
destSkillMd + ".conflict",
|
|
934
1429
|
renderConflictFile(skillName, officialContent, merged.conflicts ?? [])
|
|
935
1430
|
);
|
|
936
|
-
|
|
1431
|
+
log4.warn(
|
|
937
1432
|
{ skill: skillName, conflicts: merged.conflicts?.length },
|
|
938
1433
|
"skill merge conflict \u2014 user version kept, .conflict file written"
|
|
939
1434
|
);
|
|
940
1435
|
} catch (err) {
|
|
941
|
-
|
|
1436
|
+
log4.warn({ skill: skillName, err }, "failed to write .conflict file");
|
|
942
1437
|
}
|
|
943
1438
|
}
|
|
944
1439
|
break;
|
|
@@ -979,7 +1474,7 @@ function renderConflictFile(skillName, officialContent, conflicts) {
|
|
|
979
1474
|
function ensureSkillsDir() {
|
|
980
1475
|
if (!existsSync2(SKILLS_DIR)) {
|
|
981
1476
|
mkdirSync(SKILLS_DIR, { recursive: true });
|
|
982
|
-
|
|
1477
|
+
log4.info({ dir: SKILLS_DIR }, "created skills directory");
|
|
983
1478
|
}
|
|
984
1479
|
}
|
|
985
1480
|
var cachedSkills = [];
|
|
@@ -989,7 +1484,7 @@ function scanSkills() {
|
|
|
989
1484
|
try {
|
|
990
1485
|
dirs = readdirSync2(SKILLS_DIR, { withFileTypes: true }).filter((d) => d.isDirectory()).map((d) => d.name);
|
|
991
1486
|
} catch {
|
|
992
|
-
|
|
1487
|
+
log4.warn("failed to read skills directory for scanning");
|
|
993
1488
|
return [];
|
|
994
1489
|
}
|
|
995
1490
|
const skills = [];
|
|
@@ -1006,11 +1501,11 @@ function scanSkills() {
|
|
|
1006
1501
|
});
|
|
1007
1502
|
}
|
|
1008
1503
|
} catch (err) {
|
|
1009
|
-
|
|
1504
|
+
log4.debug({ skill: dirName, err }, "failed to parse SKILL.md frontmatter");
|
|
1010
1505
|
}
|
|
1011
1506
|
}
|
|
1012
1507
|
cachedSkills = skills;
|
|
1013
|
-
|
|
1508
|
+
log4.info({ count: skills.length }, "scanned skills");
|
|
1014
1509
|
return skills;
|
|
1015
1510
|
}
|
|
1016
1511
|
function getSkillList() {
|
|
@@ -1031,7 +1526,7 @@ ${lines.join("\n")}
|
|
|
1031
1526
|
\u4F7F\u7528 skill_pending_list \u67E5\u770B\u8BE6\u60C5\uFF0Cskill_pending_approve / skill_pending_reject \u5904\u7406\u3002`;
|
|
1032
1527
|
return { count: pending.length, text };
|
|
1033
1528
|
} catch (err) {
|
|
1034
|
-
|
|
1529
|
+
log4.debug({ err }, "failed to read pending-evolutions.json");
|
|
1035
1530
|
return null;
|
|
1036
1531
|
}
|
|
1037
1532
|
}
|
|
@@ -1048,54 +1543,179 @@ function parseFrontmatter(content) {
|
|
|
1048
1543
|
};
|
|
1049
1544
|
}
|
|
1050
1545
|
function ensureMcpConfig() {
|
|
1051
|
-
if (existsSync2(MCP_CONFIG_PATH)) {
|
|
1052
|
-
log2.debug("mcp.json already exists, skipping");
|
|
1053
|
-
return;
|
|
1054
|
-
}
|
|
1055
1546
|
let mcpServerPath;
|
|
1056
1547
|
try {
|
|
1057
1548
|
const req = createRequire(import.meta.url);
|
|
1058
1549
|
const mcpPkgDir = dirname(req.resolve("@mclawnet/mcp-server/package.json"));
|
|
1059
1550
|
mcpServerPath = join2(mcpPkgDir, "dist", "server.js");
|
|
1060
1551
|
} catch {
|
|
1061
|
-
|
|
1552
|
+
log4.warn("could not resolve @mclawnet/mcp-server package path, skipping mcp.json generation");
|
|
1062
1553
|
return;
|
|
1063
1554
|
}
|
|
1064
|
-
const
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1555
|
+
const desired = { command: "node", args: [mcpServerPath] };
|
|
1556
|
+
let config = {};
|
|
1557
|
+
let fileExists = false;
|
|
1558
|
+
if (existsSync2(MCP_CONFIG_PATH)) {
|
|
1559
|
+
fileExists = true;
|
|
1560
|
+
try {
|
|
1561
|
+
config = JSON.parse(readFileSync2(MCP_CONFIG_PATH, "utf-8"));
|
|
1562
|
+
} catch (err) {
|
|
1563
|
+
log4.warn({ err, path: MCP_CONFIG_PATH }, "mcp.json malformed \u2014 leaving alone");
|
|
1564
|
+
return;
|
|
1070
1565
|
}
|
|
1071
|
-
|
|
1566
|
+
if (!config.mcpServers || typeof config.mcpServers !== "object") {
|
|
1567
|
+
config.mcpServers = {};
|
|
1568
|
+
}
|
|
1569
|
+
} else {
|
|
1570
|
+
config = { mcpServers: {} };
|
|
1571
|
+
}
|
|
1572
|
+
const servers = config.mcpServers;
|
|
1573
|
+
let changed = false;
|
|
1574
|
+
const existing = servers["clawnet-mcp"];
|
|
1575
|
+
const existingMatches = existing && existing.command === desired.command && Array.isArray(existing.args) && existing.args[0] === desired.args[0];
|
|
1576
|
+
if (!existingMatches) {
|
|
1577
|
+
servers["clawnet-mcp"] = desired;
|
|
1578
|
+
changed = true;
|
|
1579
|
+
}
|
|
1580
|
+
const legacy = servers["clawnet-memory"];
|
|
1581
|
+
if (legacy && legacy.command === "node" && Array.isArray(legacy.args) && typeof legacy.args[0] === "string" && /@mclawnet\/agent\/node_modules\/@mclawnet\/memory\/dist\/mcp\/server\.js$/.test(
|
|
1582
|
+
legacy.args[0]
|
|
1583
|
+
)) {
|
|
1584
|
+
delete servers["clawnet-memory"];
|
|
1585
|
+
changed = true;
|
|
1586
|
+
}
|
|
1587
|
+
if (!changed) {
|
|
1588
|
+
log4.debug("mcp.json already up-to-date \u2014 no write");
|
|
1589
|
+
return;
|
|
1590
|
+
}
|
|
1072
1591
|
try {
|
|
1073
1592
|
mkdirSync(CLAWNET_DIR, { recursive: true });
|
|
1074
1593
|
writeFileSync(MCP_CONFIG_PATH, JSON.stringify(config, null, 2) + "\n");
|
|
1075
|
-
|
|
1594
|
+
log4.info(
|
|
1595
|
+
{ path: MCP_CONFIG_PATH, fresh: !fileExists },
|
|
1596
|
+
fileExists ? "updated mcp.json (clawnet-mcp ensured)" : "generated default mcp.json"
|
|
1597
|
+
);
|
|
1598
|
+
} catch (err) {
|
|
1599
|
+
log4.warn({ err }, "failed to write mcp.json");
|
|
1600
|
+
}
|
|
1601
|
+
}
|
|
1602
|
+
|
|
1603
|
+
// src/errors.ts
|
|
1604
|
+
var SessionLimitReachedError = class extends Error {
|
|
1605
|
+
constructor(details) {
|
|
1606
|
+
super(buildMessage(details));
|
|
1607
|
+
this.details = details;
|
|
1608
|
+
this.name = "SessionLimitReachedError";
|
|
1609
|
+
}
|
|
1610
|
+
code = "SESSION_LIMIT_REACHED";
|
|
1611
|
+
};
|
|
1612
|
+
function buildMessage(d) {
|
|
1613
|
+
const kindParts = Object.entries(d.byKind).map(([k, n]) => `${k}=${n}`).join(", ");
|
|
1614
|
+
const oldest = d.oldestIdleSessionId ? `Oldest idle: ${d.oldestIdleSessionId} (${d.oldestIdleKind}, idle ${Math.round(
|
|
1615
|
+
(d.oldestIdleAgeMs ?? 0) / 1e3
|
|
1616
|
+
)}s).` : "No idle sessions to reclaim.";
|
|
1617
|
+
return `Session limit reached (${d.used}/${d.max}). ${kindParts}. ${oldest}`;
|
|
1618
|
+
}
|
|
1619
|
+
|
|
1620
|
+
// src/checkpoint.ts
|
|
1621
|
+
import { promises as fs } from "fs";
|
|
1622
|
+
async function atomicWriteJson(path, data) {
|
|
1623
|
+
const tmp = `${path}.tmp`;
|
|
1624
|
+
await fs.writeFile(tmp, JSON.stringify(data, null, 2));
|
|
1625
|
+
await fs.rename(tmp, path);
|
|
1626
|
+
}
|
|
1627
|
+
async function readCheckpoint(path) {
|
|
1628
|
+
let raw;
|
|
1629
|
+
try {
|
|
1630
|
+
raw = await fs.readFile(path, "utf8");
|
|
1631
|
+
} catch (err) {
|
|
1632
|
+
if (err && err.code === "ENOENT") return null;
|
|
1633
|
+
throw err;
|
|
1634
|
+
}
|
|
1635
|
+
return JSON.parse(raw);
|
|
1636
|
+
}
|
|
1637
|
+
function isPidAlive(pid) {
|
|
1638
|
+
try {
|
|
1639
|
+
process.kill(pid, 0);
|
|
1640
|
+
return true;
|
|
1076
1641
|
} catch (err) {
|
|
1077
|
-
|
|
1642
|
+
return err && err.code === "EPERM";
|
|
1078
1643
|
}
|
|
1079
1644
|
}
|
|
1080
1645
|
|
|
1081
1646
|
// src/session-manager.ts
|
|
1082
|
-
var
|
|
1083
|
-
var DEFAULT_MAX_PROCESSES =
|
|
1647
|
+
var log5 = createLogger5({ module: "agent/session-manager" });
|
|
1648
|
+
var DEFAULT_MAX_PROCESSES = 30;
|
|
1084
1649
|
var MAX_PROCESSES = Number(process.env.CLAWNET_MAX_PROCESSES) || DEFAULT_MAX_PROCESSES;
|
|
1650
|
+
var DEFAULT_CHAT_IDLE_TTL_MS = 60 * 60 * 1e3;
|
|
1651
|
+
var DEFAULT_IDLE_SWEEP_INTERVAL_MS = 5 * 60 * 1e3;
|
|
1652
|
+
var CHAT_IDLE_TTL_MS = (() => {
|
|
1653
|
+
const raw = process.env.CLAWNET_CHAT_IDLE_TTL_MS;
|
|
1654
|
+
if (raw === void 0) return DEFAULT_CHAT_IDLE_TTL_MS;
|
|
1655
|
+
const parsed = Number(raw);
|
|
1656
|
+
return Number.isFinite(parsed) ? parsed : DEFAULT_CHAT_IDLE_TTL_MS;
|
|
1657
|
+
})();
|
|
1658
|
+
var IDLE_SWEEP_INTERVAL_MS = (() => {
|
|
1659
|
+
const raw = process.env.CLAWNET_IDLE_SWEEP_INTERVAL_MS;
|
|
1660
|
+
if (raw === void 0) return DEFAULT_IDLE_SWEEP_INTERVAL_MS;
|
|
1661
|
+
const parsed = Number(raw);
|
|
1662
|
+
return Number.isFinite(parsed) && parsed > 0 ? parsed : DEFAULT_IDLE_SWEEP_INTERVAL_MS;
|
|
1663
|
+
})();
|
|
1085
1664
|
var SessionManager = class {
|
|
1086
1665
|
sessions = /* @__PURE__ */ new Map();
|
|
1087
1666
|
conversationBuffer = /* @__PURE__ */ new Map();
|
|
1667
|
+
// PR-A: parallel metadata map (kind, timestamps). See SessionMeta jsdoc.
|
|
1668
|
+
sessionMeta = /* @__PURE__ */ new Map();
|
|
1669
|
+
// PR-A: sessions currently inside a sendInput→turn_complete window. Idle
|
|
1670
|
+
// sweeper must skip these even if lastActivityAt looks ancient — a
|
|
1671
|
+
// long-running deep-research turn would otherwise be killed mid-flight.
|
|
1672
|
+
activelyExecuting = /* @__PURE__ */ new Set();
|
|
1088
1673
|
// Sessions whose abort is in flight. Treated as not-present by hasSession /
|
|
1089
1674
|
// isHealthy so a racing claude.execute (e.g., right after force_restart) is
|
|
1090
1675
|
// correctly routed to the "spawn new + --resume" branch instead of trying
|
|
1091
1676
|
// to write to a process we are about to kill.
|
|
1092
1677
|
aborting = /* @__PURE__ */ new Set();
|
|
1678
|
+
idleSweepTimer = null;
|
|
1679
|
+
// PR-A: effective sweeper config. Initialized from env at construct time
|
|
1680
|
+
// and overridable per-instance via startIdleSweeper(overrides) — the
|
|
1681
|
+
// override path is mostly for tests (drive sweeps with sub-second TTLs)
|
|
1682
|
+
// and for ops scripts that want a one-off short cycle.
|
|
1683
|
+
idleTtlMs = CHAT_IDLE_TTL_MS;
|
|
1684
|
+
idleSweepIntervalMs = IDLE_SWEEP_INTERVAL_MS;
|
|
1685
|
+
// PR-C: checkpoint state. `checkpointPath` is null when checkpoint is
|
|
1686
|
+
// disabled (no path supplied → tests, or env override later). Debouncer
|
|
1687
|
+
// coalesces structural events into ≤ 1 disk write per 5s; agentStartedAt
|
|
1688
|
+
// is recorded once at construction so a restart can correlate logs.
|
|
1689
|
+
checkpointPath;
|
|
1690
|
+
checkpointDebounceMs;
|
|
1691
|
+
checkpointDebouncer = null;
|
|
1692
|
+
agentStartedAt = Date.now();
|
|
1693
|
+
// PR-C fixup: one-shot guard. start.ts wires recoverFromCheckpoint() into
|
|
1694
|
+
// hub `onConnect`, which fires on every reconnect. After the first call the
|
|
1695
|
+
// checkpoint file holds *current-run* entries owned by us — re-classifying
|
|
1696
|
+
// them would falsely flag every live session as "orphan" and spam logs. The
|
|
1697
|
+
// flag is intentionally process-lifetime: a new agent process gets a fresh
|
|
1698
|
+
// SessionManager and re-runs recovery exactly once.
|
|
1699
|
+
hasRecovered = false;
|
|
1700
|
+
/**
|
|
1701
|
+
* Carry-over ladder indices from the previous run's checkpoint. Populated by
|
|
1702
|
+
* `recoverFromCheckpoint`, drained one-shot by `createSession`. Without this
|
|
1703
|
+
* a SIGTERM right after escalating to 16k would silently revert the next
|
|
1704
|
+
* spawn back to 32k and re-hit the 168k wall on its first turn.
|
|
1705
|
+
*/
|
|
1706
|
+
recoveredLadderIndex = /* @__PURE__ */ new Map();
|
|
1093
1707
|
adapter;
|
|
1094
1708
|
onOutput;
|
|
1095
1709
|
onTurnComplete;
|
|
1096
1710
|
onSessionError;
|
|
1097
1711
|
onSessionStarted;
|
|
1098
1712
|
onBeforeClose;
|
|
1713
|
+
// PR-A: classifies a sessionId as 'chat' or 'swarm-role'. Injected by
|
|
1714
|
+
// start.ts via SwarmCoordinator.isSwarmSession to keep SessionManager
|
|
1715
|
+
// independent of the swarm package. Defaults to 'chat' if absent — safe
|
|
1716
|
+
// because the worst-case (mis-classify a swarm-role as chat) is bounded by
|
|
1717
|
+
// activelyExecuting protection during turn execution.
|
|
1718
|
+
classify;
|
|
1099
1719
|
constructor(options) {
|
|
1100
1720
|
this.adapter = options.adapter;
|
|
1101
1721
|
this.onOutput = options.onOutput;
|
|
@@ -1103,13 +1723,16 @@ var SessionManager = class {
|
|
|
1103
1723
|
this.onSessionError = options.onSessionError;
|
|
1104
1724
|
this.onSessionStarted = options.onSessionStarted;
|
|
1105
1725
|
this.onBeforeClose = options.onBeforeClose;
|
|
1726
|
+
this.classify = options.classify ?? (() => "chat");
|
|
1727
|
+
this.checkpointPath = options.checkpointPath ?? null;
|
|
1728
|
+
this.checkpointDebounceMs = options.checkpointDebounceMs ?? 5e3;
|
|
1106
1729
|
}
|
|
1107
1730
|
async createSession(options) {
|
|
1108
1731
|
if (this.sessions.has(options.sessionId)) {
|
|
1109
1732
|
throw new Error(`Session ${options.sessionId} already exists`);
|
|
1110
1733
|
}
|
|
1111
1734
|
if (this.sessions.size >= MAX_PROCESSES) {
|
|
1112
|
-
throw new
|
|
1735
|
+
throw new SessionLimitReachedError(this.snapshotPoolForError());
|
|
1113
1736
|
}
|
|
1114
1737
|
if (options.roleId) {
|
|
1115
1738
|
try {
|
|
@@ -1120,9 +1743,9 @@ var SessionManager = class {
|
|
|
1120
1743
|
options.systemPrompt = options.systemPrompt ? `${memorySection}${roleHint}
|
|
1121
1744
|
|
|
1122
1745
|
${options.systemPrompt}` : `${memorySection}${roleHint}`;
|
|
1123
|
-
|
|
1746
|
+
log5.debug({ roleId: options.roleId, sessionId: options.sessionId }, "memory prompt + roleId hint injected");
|
|
1124
1747
|
} catch (err) {
|
|
1125
|
-
|
|
1748
|
+
log5.warn({ err, roleId: options.roleId }, "failed to build memory section, proceeding without");
|
|
1126
1749
|
}
|
|
1127
1750
|
}
|
|
1128
1751
|
try {
|
|
@@ -1133,17 +1756,34 @@ ${options.systemPrompt}` : `${memorySection}${roleHint}`;
|
|
|
1133
1756
|
[Skill \u8FDB\u5316\u63D0\u6848]
|
|
1134
1757
|
${notice.text}`;
|
|
1135
1758
|
options.systemPrompt = options.systemPrompt ? `${options.systemPrompt}${noticeBlock}` : noticeBlock.trimStart();
|
|
1136
|
-
|
|
1759
|
+
log5.debug(
|
|
1137
1760
|
{ sessionId: options.sessionId, pending: notice.count },
|
|
1138
1761
|
"pending skill evolution notice injected"
|
|
1139
1762
|
);
|
|
1140
1763
|
}
|
|
1141
1764
|
} catch (err) {
|
|
1142
|
-
|
|
1765
|
+
log5.debug({ err }, "failed to inject pending notification");
|
|
1143
1766
|
}
|
|
1144
1767
|
try {
|
|
1145
1768
|
const process2 = await this.adapter.spawn(options);
|
|
1146
1769
|
this.sessions.set(options.sessionId, process2);
|
|
1770
|
+
const now = Date.now();
|
|
1771
|
+
const kind = this.classify(options.sessionId);
|
|
1772
|
+
const agentInstanceId = kind === "swarm-role" ? options.sessionId.split("::")[1] : void 0;
|
|
1773
|
+
this.sessionMeta.set(options.sessionId, {
|
|
1774
|
+
kind,
|
|
1775
|
+
workDir: options.workDir ?? process2.workDir,
|
|
1776
|
+
agentInstanceId,
|
|
1777
|
+
startedAt: now,
|
|
1778
|
+
lastActivityAt: now,
|
|
1779
|
+
turnsCompleted: 0,
|
|
1780
|
+
// Restore ladder index if this sessionId was carried over from the
|
|
1781
|
+
// previous run's checkpoint. drain-on-read so a future re-creation of
|
|
1782
|
+
// a different session with the same id starts fresh.
|
|
1783
|
+
currentLadderIndex: this.recoveredLadderIndex.get(options.sessionId)
|
|
1784
|
+
});
|
|
1785
|
+
this.recoveredLadderIndex.delete(options.sessionId);
|
|
1786
|
+
this.scheduleCheckpoint();
|
|
1147
1787
|
this.adapter.onOutput(process2, (data) => {
|
|
1148
1788
|
const msg = data;
|
|
1149
1789
|
if (msg?.type === "assistant" && msg?.message?.content) {
|
|
@@ -1157,15 +1797,54 @@ ${notice.text}`;
|
|
|
1157
1797
|
this.onOutput(options.sessionId, data);
|
|
1158
1798
|
});
|
|
1159
1799
|
this.adapter.onTurnComplete?.(process2, (info) => {
|
|
1800
|
+
const meta = this.sessionMeta.get(options.sessionId);
|
|
1801
|
+
if (meta) {
|
|
1802
|
+
meta.lastActivityAt = Date.now();
|
|
1803
|
+
meta.turnsCompleted += 1;
|
|
1804
|
+
}
|
|
1805
|
+
this.activelyExecuting.delete(options.sessionId);
|
|
1160
1806
|
this.onTurnComplete(options.sessionId, info);
|
|
1161
1807
|
});
|
|
1162
1808
|
this.adapter.onError?.(process2, (error) => {
|
|
1809
|
+
this.activelyExecuting.delete(options.sessionId);
|
|
1163
1810
|
this.onSessionError(options.sessionId, error.message);
|
|
1164
1811
|
});
|
|
1812
|
+
this.adapter.onTokenBudgetWarning?.(process2, (info) => {
|
|
1813
|
+
const meta = this.sessionMeta.get(options.sessionId);
|
|
1814
|
+
if (!meta) return;
|
|
1815
|
+
const currentIndex = clampLadderIndex(meta.currentLadderIndex);
|
|
1816
|
+
const atFloor = currentIndex >= MAX_TOKENS_LADDER.length - 1;
|
|
1817
|
+
if (atFloor) {
|
|
1818
|
+
log5.warn(
|
|
1819
|
+
{
|
|
1820
|
+
sessionId: options.sessionId,
|
|
1821
|
+
used: info.used,
|
|
1822
|
+
hardLimit: info.hardLimit,
|
|
1823
|
+
maxOutputTokens: info.maxOutputTokens
|
|
1824
|
+
},
|
|
1825
|
+
"token_budget_warning at ladder floor (8k) \u2014 cannot escalate further"
|
|
1826
|
+
);
|
|
1827
|
+
return;
|
|
1828
|
+
}
|
|
1829
|
+
const nextIndex = currentIndex + 1;
|
|
1830
|
+
meta.currentLadderIndex = nextIndex;
|
|
1831
|
+
log5.warn(
|
|
1832
|
+
{
|
|
1833
|
+
sessionId: options.sessionId,
|
|
1834
|
+
used: info.used,
|
|
1835
|
+
hardLimit: info.hardLimit,
|
|
1836
|
+
previousMaxOutputTokens: info.maxOutputTokens,
|
|
1837
|
+
nextMaxOutputTokens: MAX_TOKENS_LADDER[nextIndex],
|
|
1838
|
+
ladderIndex: nextIndex
|
|
1839
|
+
},
|
|
1840
|
+
"token_budget_warning: escalating max_output_tokens for next spawn"
|
|
1841
|
+
);
|
|
1842
|
+
this.scheduleCheckpoint();
|
|
1843
|
+
});
|
|
1165
1844
|
if (this.onSessionStarted) {
|
|
1166
1845
|
this.adapter.onSessionStarted?.(process2, (info) => {
|
|
1167
1846
|
if (this.aborting.has(options.sessionId)) {
|
|
1168
|
-
|
|
1847
|
+
log5.debug(
|
|
1169
1848
|
{ sessionId: options.sessionId },
|
|
1170
1849
|
"suppressing late session_started \u2014 session is aborting"
|
|
1171
1850
|
);
|
|
@@ -1174,14 +1853,20 @@ ${notice.text}`;
|
|
|
1174
1853
|
if (this.sessions.get(options.sessionId) !== process2) {
|
|
1175
1854
|
return;
|
|
1176
1855
|
}
|
|
1856
|
+
const meta = this.sessionMeta.get(options.sessionId);
|
|
1857
|
+
if (meta) meta.claudeSessionId = info.claudeSessionId;
|
|
1858
|
+
this.scheduleCheckpoint();
|
|
1177
1859
|
this.onSessionStarted(options.sessionId, info);
|
|
1178
1860
|
});
|
|
1179
1861
|
}
|
|
1180
1862
|
this.adapter.onExit?.(process2, (code) => {
|
|
1181
1863
|
if (this.sessions.get(options.sessionId) === process2) {
|
|
1182
1864
|
this.sessions.delete(options.sessionId);
|
|
1865
|
+
this.sessionMeta.delete(options.sessionId);
|
|
1866
|
+
this.activelyExecuting.delete(options.sessionId);
|
|
1183
1867
|
this.conversationBuffer.delete(options.sessionId);
|
|
1184
|
-
|
|
1868
|
+
this.scheduleCheckpoint();
|
|
1869
|
+
log5.warn({ sessionId: options.sessionId, exitCode: code }, "backend process exited unexpectedly, evicted from session map");
|
|
1185
1870
|
this.onSessionError(options.sessionId, `backend process exited (code=${code ?? "null"})`);
|
|
1186
1871
|
}
|
|
1187
1872
|
});
|
|
@@ -1198,7 +1883,10 @@ ${notice.text}`;
|
|
|
1198
1883
|
this.onSessionError(sessionId, `No active session: ${sessionId}`);
|
|
1199
1884
|
return;
|
|
1200
1885
|
}
|
|
1201
|
-
|
|
1886
|
+
this.activelyExecuting.add(sessionId);
|
|
1887
|
+
const meta = this.sessionMeta.get(sessionId);
|
|
1888
|
+
if (meta) meta.lastActivityAt = Date.now();
|
|
1889
|
+
log5.debug(
|
|
1202
1890
|
{ sessionId, ...previewFields2(input) },
|
|
1203
1891
|
"sendInput \u2192 backend stdin"
|
|
1204
1892
|
);
|
|
@@ -1210,6 +1898,9 @@ ${notice.text}`;
|
|
|
1210
1898
|
this.aborting.add(sessionId);
|
|
1211
1899
|
this.conversationBuffer.delete(sessionId);
|
|
1212
1900
|
this.sessions.delete(sessionId);
|
|
1901
|
+
this.sessionMeta.delete(sessionId);
|
|
1902
|
+
this.activelyExecuting.delete(sessionId);
|
|
1903
|
+
this.scheduleCheckpoint();
|
|
1213
1904
|
try {
|
|
1214
1905
|
await this.adapter.stop(process2);
|
|
1215
1906
|
} finally {
|
|
@@ -1226,9 +1917,13 @@ ${notice.text}`;
|
|
|
1226
1917
|
}
|
|
1227
1918
|
this.conversationBuffer.delete(sessionId);
|
|
1228
1919
|
this.sessions.delete(sessionId);
|
|
1920
|
+
this.sessionMeta.delete(sessionId);
|
|
1921
|
+
this.activelyExecuting.delete(sessionId);
|
|
1922
|
+
this.scheduleCheckpoint();
|
|
1229
1923
|
await this.adapter.stop(process2);
|
|
1230
1924
|
}
|
|
1231
1925
|
async closeAll() {
|
|
1926
|
+
this.stopIdleSweeper();
|
|
1232
1927
|
const promises = Array.from(this.sessions.entries()).map(
|
|
1233
1928
|
async ([sessionId, process2]) => {
|
|
1234
1929
|
const messages = this.conversationBuffer.get(sessionId);
|
|
@@ -1238,11 +1933,22 @@ ${notice.text}`;
|
|
|
1238
1933
|
}
|
|
1239
1934
|
this.conversationBuffer.delete(sessionId);
|
|
1240
1935
|
this.sessions.delete(sessionId);
|
|
1936
|
+
this.sessionMeta.delete(sessionId);
|
|
1937
|
+
this.activelyExecuting.delete(sessionId);
|
|
1241
1938
|
await this.adapter.stop(process2).catch(() => {
|
|
1242
1939
|
});
|
|
1243
1940
|
}
|
|
1244
1941
|
);
|
|
1245
1942
|
await Promise.all(promises);
|
|
1943
|
+
if (this.checkpointPath) {
|
|
1944
|
+
if (this.checkpointDebouncer) {
|
|
1945
|
+
clearTimeout(this.checkpointDebouncer);
|
|
1946
|
+
this.checkpointDebouncer = null;
|
|
1947
|
+
}
|
|
1948
|
+
await this.flushCheckpoint().catch(
|
|
1949
|
+
(err) => log5.warn({ err }, "checkpoint: final flushCheckpoint failed")
|
|
1950
|
+
);
|
|
1951
|
+
}
|
|
1246
1952
|
}
|
|
1247
1953
|
hasSession(sessionId) {
|
|
1248
1954
|
return this.sessions.has(sessionId) && !this.aborting.has(sessionId);
|
|
@@ -1264,16 +1970,224 @@ ${notice.text}`;
|
|
|
1264
1970
|
getActiveSessionIds() {
|
|
1265
1971
|
return Array.from(this.sessions.keys());
|
|
1266
1972
|
}
|
|
1973
|
+
/**
|
|
1974
|
+
* Recommended `CLAUDE_CODE_MAX_OUTPUT_TOKENS` for the *next* spawn of this
|
|
1975
|
+
* session. Returns `undefined` for unknown sessions OR sessions still at
|
|
1976
|
+
* ladder index 0 — callers omit the env override in that case so the CLI
|
|
1977
|
+
* picks its own default. Returning `undefined` (instead of `32_000`) keeps
|
|
1978
|
+
* env composition free of redundant overrides for the common path.
|
|
1979
|
+
*
|
|
1980
|
+
* Used by hub-connection.spawnAndSend on every reuse / fresh-resume
|
|
1981
|
+
* branch — without that wiring the ladder change in meta would be inert.
|
|
1982
|
+
*/
|
|
1983
|
+
getRecommendedMaxOutputTokens(sessionId) {
|
|
1984
|
+
const meta = this.sessionMeta.get(sessionId);
|
|
1985
|
+
if (!meta) return void 0;
|
|
1986
|
+
const idx = clampLadderIndex(meta.currentLadderIndex);
|
|
1987
|
+
if (idx <= 0) return void 0;
|
|
1988
|
+
return MAX_TOKENS_LADDER[idx];
|
|
1989
|
+
}
|
|
1990
|
+
// ─── PR-A: idle TTL sweeper ─────────────────────────────────────────
|
|
1991
|
+
/**
|
|
1992
|
+
* Start the periodic idle sweeper. Idempotent: a second call is a no-op.
|
|
1993
|
+
* Disabled (no-op) when effective TTL is `<= 0` — set via env
|
|
1994
|
+
* `CLAWNET_CHAT_IDLE_TTL_MS=0` or via the `ttlMs` override.
|
|
1995
|
+
*
|
|
1996
|
+
* The timer is `unref()`-ed so it never blocks process exit.
|
|
1997
|
+
*
|
|
1998
|
+
* @param overrides - test/ops hook to override env-derived defaults.
|
|
1999
|
+
*/
|
|
2000
|
+
startIdleSweeper(overrides) {
|
|
2001
|
+
if (overrides?.ttlMs !== void 0) this.idleTtlMs = overrides.ttlMs;
|
|
2002
|
+
if (overrides?.intervalMs !== void 0)
|
|
2003
|
+
this.idleSweepIntervalMs = overrides.intervalMs;
|
|
2004
|
+
if (this.idleSweepTimer) return;
|
|
2005
|
+
if (this.idleTtlMs <= 0) {
|
|
2006
|
+
log5.info("idleSweeper disabled (ttlMs <= 0)");
|
|
2007
|
+
return;
|
|
2008
|
+
}
|
|
2009
|
+
this.idleSweepTimer = setInterval(() => {
|
|
2010
|
+
this.sweepIdleSessions().catch(
|
|
2011
|
+
(err) => log5.warn({ err }, "idleSweeper: tick failed")
|
|
2012
|
+
);
|
|
2013
|
+
}, this.idleSweepIntervalMs);
|
|
2014
|
+
this.idleSweepTimer.unref?.();
|
|
2015
|
+
log5.info(
|
|
2016
|
+
{ ttlMs: this.idleTtlMs, intervalMs: this.idleSweepIntervalMs },
|
|
2017
|
+
"idleSweeper started"
|
|
2018
|
+
);
|
|
2019
|
+
}
|
|
2020
|
+
stopIdleSweeper() {
|
|
2021
|
+
if (this.idleSweepTimer) {
|
|
2022
|
+
clearInterval(this.idleSweepTimer);
|
|
2023
|
+
this.idleSweepTimer = null;
|
|
2024
|
+
}
|
|
2025
|
+
}
|
|
2026
|
+
/**
|
|
2027
|
+
* One sweep tick. Exposed for tests (so they can drive sweeps deterministically
|
|
2028
|
+
* without faking interval timers) and for ops scripts. Safe to call concurrently
|
|
2029
|
+
* with normal traffic — the sweeper only ever calls closeSession, which itself
|
|
2030
|
+
* is idempotent on missing IDs.
|
|
2031
|
+
*/
|
|
2032
|
+
async sweepIdleSessions() {
|
|
2033
|
+
const now = Date.now();
|
|
2034
|
+
const ttl = this.idleTtlMs;
|
|
2035
|
+
if (ttl <= 0) return;
|
|
2036
|
+
const victims = [];
|
|
2037
|
+
for (const [sid, meta] of this.sessionMeta) {
|
|
2038
|
+
if (meta.kind !== "chat") continue;
|
|
2039
|
+
if (this.activelyExecuting.has(sid)) continue;
|
|
2040
|
+
if (this.aborting.has(sid)) continue;
|
|
2041
|
+
if (now - meta.lastActivityAt < ttl) continue;
|
|
2042
|
+
victims.push(sid);
|
|
2043
|
+
}
|
|
2044
|
+
for (const sid of victims) {
|
|
2045
|
+
const idleMs = now - (this.sessionMeta.get(sid)?.lastActivityAt ?? now);
|
|
2046
|
+
log5.info(
|
|
2047
|
+
{ sessionId: sid, idleMs, ttlMs: ttl },
|
|
2048
|
+
"idleSweeper: closing idle chat session"
|
|
2049
|
+
);
|
|
2050
|
+
await this.closeSession(sid).catch(
|
|
2051
|
+
(err) => log5.warn({ err, sessionId: sid }, "idleSweeper: closeSession failed, continuing")
|
|
2052
|
+
);
|
|
2053
|
+
}
|
|
2054
|
+
}
|
|
2055
|
+
// ─── PR-B: fail-fast saturation snapshot ────────────────────────────
|
|
2056
|
+
/**
|
|
2057
|
+
* Build the SessionPoolSnapshot attached to a SessionLimitReachedError.
|
|
2058
|
+
*
|
|
2059
|
+
* "Idle" here means "not currently executing a turn". A session whose
|
|
2060
|
+
* lastActivityAt is hours old but is mid-turn (long-running deep-research)
|
|
2061
|
+
* is intentionally NOT a candidate — pointing the user at it would be
|
|
2062
|
+
* misleading. The selection mirrors `sweepIdleSessions` filters minus the
|
|
2063
|
+
* TTL gate (since at saturation we report the LRU regardless of TTL).
|
|
2064
|
+
*/
|
|
2065
|
+
snapshotPoolForError() {
|
|
2066
|
+
const byKind = { chat: 0, "swarm-role": 0 };
|
|
2067
|
+
let oldestIdleSessionId = null;
|
|
2068
|
+
let oldestIdleTs = Infinity;
|
|
2069
|
+
let oldestIdleKind = null;
|
|
2070
|
+
const now = Date.now();
|
|
2071
|
+
for (const [sid, meta] of this.sessionMeta) {
|
|
2072
|
+
byKind[meta.kind]++;
|
|
2073
|
+
if (this.activelyExecuting.has(sid)) continue;
|
|
2074
|
+
if (this.aborting.has(sid)) continue;
|
|
2075
|
+
if (meta.lastActivityAt < oldestIdleTs) {
|
|
2076
|
+
oldestIdleTs = meta.lastActivityAt;
|
|
2077
|
+
oldestIdleSessionId = sid;
|
|
2078
|
+
oldestIdleKind = meta.kind;
|
|
2079
|
+
}
|
|
2080
|
+
}
|
|
2081
|
+
return {
|
|
2082
|
+
used: this.sessions.size,
|
|
2083
|
+
max: MAX_PROCESSES,
|
|
2084
|
+
byKind,
|
|
2085
|
+
oldestIdleSessionId,
|
|
2086
|
+
oldestIdleAgeMs: oldestIdleSessionId ? now - oldestIdleTs : null,
|
|
2087
|
+
oldestIdleKind
|
|
2088
|
+
};
|
|
2089
|
+
}
|
|
2090
|
+
// ─── PR-C: logical-state checkpoint ─────────────────────────────────
|
|
2091
|
+
/**
|
|
2092
|
+
* Schedule a debounced flush of the in-memory pool to the checkpoint file.
|
|
2093
|
+
*
|
|
2094
|
+
* Burst protection: if a debounce timer is already pending, this is a no-op
|
|
2095
|
+
* — the flush captures the pool state at firing time, so all events in the
|
|
2096
|
+
* 5s window are observable in the eventual snapshot. This collapses the
|
|
2097
|
+
* "create + onSessionStarted + close" 3-event sequence into one disk write.
|
|
2098
|
+
*/
|
|
2099
|
+
scheduleCheckpoint() {
|
|
2100
|
+
if (!this.checkpointPath) return;
|
|
2101
|
+
if (this.checkpointDebouncer) return;
|
|
2102
|
+
this.checkpointDebouncer = setTimeout(() => {
|
|
2103
|
+
this.checkpointDebouncer = null;
|
|
2104
|
+
this.flushCheckpoint().catch(
|
|
2105
|
+
(err) => log5.warn({ err }, "checkpoint: flush failed")
|
|
2106
|
+
);
|
|
2107
|
+
}, this.checkpointDebounceMs);
|
|
2108
|
+
this.checkpointDebouncer.unref?.();
|
|
2109
|
+
}
|
|
2110
|
+
/**
|
|
2111
|
+
* Flush the current pool to the checkpoint file via temp+rename. Public
|
|
2112
|
+
* for tests and for the graceful-shutdown final-flush in `closeAll`.
|
|
2113
|
+
*
|
|
2114
|
+
* Sessions whose underlying BackendProcess has no `pid` (e.g., in-process
|
|
2115
|
+
* mock backends) are skipped — without a pid the recovery probe can't
|
|
2116
|
+
* distinguish dead from alive, and writing them in would just generate
|
|
2117
|
+
* spurious session.died notifications on restart.
|
|
2118
|
+
*/
|
|
2119
|
+
async flushCheckpoint() {
|
|
2120
|
+
if (!this.checkpointPath) return;
|
|
2121
|
+
const entries = [];
|
|
2122
|
+
for (const [sid, meta] of this.sessionMeta) {
|
|
2123
|
+
const proc = this.sessions.get(sid);
|
|
2124
|
+
if (!proc) continue;
|
|
2125
|
+
if (proc.pid === void 0) continue;
|
|
2126
|
+
entries.push({
|
|
2127
|
+
sessionId: sid,
|
|
2128
|
+
kind: meta.kind,
|
|
2129
|
+
workDir: meta.workDir,
|
|
2130
|
+
claudeSessionId: meta.claudeSessionId,
|
|
2131
|
+
agentInstanceId: meta.agentInstanceId,
|
|
2132
|
+
startedAt: meta.startedAt,
|
|
2133
|
+
lastActivityAt: meta.lastActivityAt,
|
|
2134
|
+
pid: proc.pid,
|
|
2135
|
+
currentLadderIndex: meta.currentLadderIndex
|
|
2136
|
+
});
|
|
2137
|
+
}
|
|
2138
|
+
const data = {
|
|
2139
|
+
version: 1,
|
|
2140
|
+
agentStartedAt: this.agentStartedAt,
|
|
2141
|
+
entries
|
|
2142
|
+
};
|
|
2143
|
+
await atomicWriteJson(this.checkpointPath, data);
|
|
2144
|
+
}
|
|
2145
|
+
/**
|
|
2146
|
+
* Read the previous-run checkpoint, classify each entry as `dead` or
|
|
2147
|
+
* `orphan` via pid probe, then reset the file to the current (empty)
|
|
2148
|
+
* truth. Caller (start.ts) is expected to forward `dead` entries to hub
|
|
2149
|
+
* via `session.died` so db's processState clears.
|
|
2150
|
+
*
|
|
2151
|
+
* Returns `{dead: [], orphan: []}` on first boot (no file). Throws on
|
|
2152
|
+
* file-system error or JSON parse failure — start.ts catches and logs so a
|
|
2153
|
+
* corrupt checkpoint does not block agent startup.
|
|
2154
|
+
*/
|
|
2155
|
+
async recoverFromCheckpoint() {
|
|
2156
|
+
if (!this.checkpointPath) return { dead: [], orphan: [] };
|
|
2157
|
+
if (this.hasRecovered) return { dead: [], orphan: [] };
|
|
2158
|
+
this.hasRecovered = true;
|
|
2159
|
+
const file = await readCheckpoint(this.checkpointPath);
|
|
2160
|
+
if (!file) return { dead: [], orphan: [] };
|
|
2161
|
+
const dead = [];
|
|
2162
|
+
const orphan = [];
|
|
2163
|
+
for (const entry of file.entries) {
|
|
2164
|
+
if (typeof entry.currentLadderIndex === "number") {
|
|
2165
|
+
this.recoveredLadderIndex.set(entry.sessionId, entry.currentLadderIndex);
|
|
2166
|
+
}
|
|
2167
|
+
if (isPidAlive(entry.pid)) {
|
|
2168
|
+
orphan.push(entry);
|
|
2169
|
+
log5.warn(
|
|
2170
|
+
{ sessionId: entry.sessionId, pid: entry.pid },
|
|
2171
|
+
"recover: pid still alive but not owned by current agent (orphan)"
|
|
2172
|
+
);
|
|
2173
|
+
} else {
|
|
2174
|
+
dead.push(entry);
|
|
2175
|
+
}
|
|
2176
|
+
}
|
|
2177
|
+
await this.flushCheckpoint();
|
|
2178
|
+
return { dead, orphan };
|
|
2179
|
+
}
|
|
1267
2180
|
};
|
|
1268
2181
|
|
|
1269
2182
|
// src/start.ts
|
|
1270
2183
|
import { SwarmCoordinator, initRoles } from "@mclawnet/swarm";
|
|
2184
|
+
import { TaskStore } from "@mclawnet/task";
|
|
1271
2185
|
|
|
1272
2186
|
// src/brain-bridge.ts
|
|
1273
2187
|
import { existsSync as existsSync3, readFileSync as readFileSync3, readdirSync as readdirSync3 } from "fs";
|
|
1274
2188
|
import { join as join3 } from "path";
|
|
1275
|
-
import { createLogger as
|
|
1276
|
-
var
|
|
2189
|
+
import { createLogger as createLogger6 } from "@mclawnet/logger";
|
|
2190
|
+
var log6 = createLogger6({ module: "brain-bridge" });
|
|
1277
2191
|
var BrainBridge = class {
|
|
1278
2192
|
constructor(hub, options) {
|
|
1279
2193
|
this.hub = hub;
|
|
@@ -1281,7 +2195,7 @@ var BrainBridge = class {
|
|
|
1281
2195
|
this.brainHome = options?.brainHomePath || process.env.BRAIN_HOME || join3(home, "BrainData");
|
|
1282
2196
|
this.brainCorePath = options?.brainCorePath || join3(home, ".brain", "BrainCore");
|
|
1283
2197
|
this.hub.registerNamespace("brain", (msg) => this.handleRequest(msg));
|
|
1284
|
-
|
|
2198
|
+
log6.info(
|
|
1285
2199
|
{ brainHome: this.brainHome, brainCorePath: this.brainCorePath },
|
|
1286
2200
|
"BrainBridge initialized"
|
|
1287
2201
|
);
|
|
@@ -1289,11 +2203,11 @@ var BrainBridge = class {
|
|
|
1289
2203
|
brainHome;
|
|
1290
2204
|
brainCorePath;
|
|
1291
2205
|
async handleRequest(msg) {
|
|
1292
|
-
|
|
2206
|
+
log6.info({ action: msg.action, requestId: msg.requestId }, "brain request");
|
|
1293
2207
|
switch (msg.action) {
|
|
1294
2208
|
case "setup_status": {
|
|
1295
2209
|
const status = this.checkSetup();
|
|
1296
|
-
|
|
2210
|
+
log6.info({ status }, "setup_status result");
|
|
1297
2211
|
return { status };
|
|
1298
2212
|
}
|
|
1299
2213
|
case "get_briefing":
|
|
@@ -1301,7 +2215,7 @@ var BrainBridge = class {
|
|
|
1301
2215
|
case "get_meeting_recap":
|
|
1302
2216
|
return await this.getMeetingRecap(msg.params.recapPath);
|
|
1303
2217
|
default:
|
|
1304
|
-
|
|
2218
|
+
log6.warn({ action: msg.action }, "unknown brain action");
|
|
1305
2219
|
throw new Error(`Unknown brain action: ${msg.action}`);
|
|
1306
2220
|
}
|
|
1307
2221
|
}
|
|
@@ -1324,15 +2238,15 @@ var BrainBridge = class {
|
|
|
1324
2238
|
const targetDate = date || (/* @__PURE__ */ new Date()).toISOString().slice(0, 10);
|
|
1325
2239
|
const reportsDir = join3(this.brainHome, "reports", "daily");
|
|
1326
2240
|
if (!existsSync3(reportsDir)) {
|
|
1327
|
-
|
|
2241
|
+
log6.info({ reportsDir }, "get_briefing: reports dir not found");
|
|
1328
2242
|
return { briefing: null, actions: [], projects: [], meetings: [], feed: [] };
|
|
1329
2243
|
}
|
|
1330
2244
|
const files = readdirSync3(reportsDir).filter((f) => f.includes(targetDate) && f.endsWith(".md")).sort().reverse();
|
|
1331
2245
|
if (files.length === 0) {
|
|
1332
|
-
|
|
2246
|
+
log6.info({ targetDate }, "get_briefing: no report for date");
|
|
1333
2247
|
return { briefing: null, actions: [], projects: [], meetings: [], feed: [] };
|
|
1334
2248
|
}
|
|
1335
|
-
|
|
2249
|
+
log6.info({ targetDate, file: files[0] }, "get_briefing: reading report");
|
|
1336
2250
|
const content = readFileSync3(join3(reportsDir, files[0]), "utf-8");
|
|
1337
2251
|
const tldrMatch = content.match(/## TL;DR\n([\s\S]*?)(?=\n---|\n## )/);
|
|
1338
2252
|
const tldr = tldrMatch ? tldrMatch[1].trim() : content.slice(0, 200);
|
|
@@ -1340,7 +2254,7 @@ var BrainBridge = class {
|
|
|
1340
2254
|
const projects = this.parseProjects(content);
|
|
1341
2255
|
const meetings = this.parseMeetings(content);
|
|
1342
2256
|
const feed = this.parseFeed(content);
|
|
1343
|
-
|
|
2257
|
+
log6.info(
|
|
1344
2258
|
{ actions: actions.length, projects: projects.length, meetings: meetings.length, feed: feed.length },
|
|
1345
2259
|
"get_briefing: parsed"
|
|
1346
2260
|
);
|
|
@@ -1534,7 +2448,7 @@ var BrainBridge = class {
|
|
|
1534
2448
|
}
|
|
1535
2449
|
const fullPath = join3(this.brainHome, recapPath);
|
|
1536
2450
|
if (!existsSync3(fullPath)) {
|
|
1537
|
-
|
|
2451
|
+
log6.warn({ fullPath }, "meeting recap file not found");
|
|
1538
2452
|
return { error: "Recap file not found" };
|
|
1539
2453
|
}
|
|
1540
2454
|
const content = readFileSync3(fullPath, "utf-8");
|
|
@@ -1572,8 +2486,8 @@ var BrainBridge = class {
|
|
|
1572
2486
|
// src/fs-bridge.ts
|
|
1573
2487
|
import { existsSync as existsSync4, readFileSync as readFileSync4, statSync as statSync2 } from "fs";
|
|
1574
2488
|
import { extname, isAbsolute } from "path";
|
|
1575
|
-
import { createLogger as
|
|
1576
|
-
var
|
|
2489
|
+
import { createLogger as createLogger7 } from "@mclawnet/logger";
|
|
2490
|
+
var log7 = createLogger7({ module: "fs-bridge" });
|
|
1577
2491
|
var MAX_TEXT_SIZE = 5 * 1024 * 1024;
|
|
1578
2492
|
var MIME_MAP = {
|
|
1579
2493
|
".ts": "text/typescript",
|
|
@@ -1611,10 +2525,10 @@ var FsBridge = class {
|
|
|
1611
2525
|
constructor(hub) {
|
|
1612
2526
|
this.hub = hub;
|
|
1613
2527
|
this.hub.registerNamespace("fs", (msg) => this.handleRequest(msg));
|
|
1614
|
-
|
|
2528
|
+
log7.info("FsBridge initialized");
|
|
1615
2529
|
}
|
|
1616
2530
|
async handleRequest(msg) {
|
|
1617
|
-
|
|
2531
|
+
log7.info({ action: msg.action, requestId: msg.requestId }, "fs request");
|
|
1618
2532
|
switch (msg.action) {
|
|
1619
2533
|
case "read":
|
|
1620
2534
|
return this.readFile(msg.params);
|
|
@@ -1650,8 +2564,37 @@ var FsBridge = class {
|
|
|
1650
2564
|
}
|
|
1651
2565
|
};
|
|
1652
2566
|
|
|
2567
|
+
// src/swarm-session-bridge.ts
|
|
2568
|
+
import { createLogger as createLogger8 } from "@mclawnet/logger";
|
|
2569
|
+
var log8 = createLogger8({ module: "agent:swarm-bridge" });
|
|
2570
|
+
function createSwarmAwareSessionStartedHandler(deps) {
|
|
2571
|
+
return (sessionId, info) => {
|
|
2572
|
+
deps.hub.send({
|
|
2573
|
+
type: "claude.session_started",
|
|
2574
|
+
sessionId,
|
|
2575
|
+
claudeSessionId: info.claudeSessionId
|
|
2576
|
+
});
|
|
2577
|
+
if (deps.swarmCoordinator.isSwarmSession(sessionId)) {
|
|
2578
|
+
try {
|
|
2579
|
+
const ok = deps.swarmCoordinator.setRoleClaudeSessionIdBySession(
|
|
2580
|
+
sessionId,
|
|
2581
|
+
info.claudeSessionId
|
|
2582
|
+
);
|
|
2583
|
+
if (!ok) {
|
|
2584
|
+
log8.debug(
|
|
2585
|
+
{ sessionId },
|
|
2586
|
+
"session_started for swarm-shaped sessionId, but no matching role (already destroyed?)"
|
|
2587
|
+
);
|
|
2588
|
+
}
|
|
2589
|
+
} catch (err) {
|
|
2590
|
+
log8.warn({ err, sessionId }, "failed to bridge session_started into swarm");
|
|
2591
|
+
}
|
|
2592
|
+
}
|
|
2593
|
+
};
|
|
2594
|
+
}
|
|
2595
|
+
|
|
1653
2596
|
// src/start.ts
|
|
1654
|
-
import { createLogger as
|
|
2597
|
+
import { createLogger as createLogger9 } from "@mclawnet/logger";
|
|
1655
2598
|
import {
|
|
1656
2599
|
initDatabase,
|
|
1657
2600
|
MemoryStore,
|
|
@@ -1665,15 +2608,15 @@ import {
|
|
|
1665
2608
|
AccumulationScanner,
|
|
1666
2609
|
triggerFromAccumulation
|
|
1667
2610
|
} from "@mclawnet/skill-manager";
|
|
1668
|
-
var
|
|
2611
|
+
var log9 = createLogger9({ module: "agent" });
|
|
1669
2612
|
async function startAgent(options) {
|
|
1670
2613
|
const config = loadConfig(options.config);
|
|
1671
2614
|
if (!config.token) {
|
|
1672
|
-
|
|
2615
|
+
log9.error("no token configured \u2014 set CLAWNET_TOKEN or use --token");
|
|
1673
2616
|
process.exit(1);
|
|
1674
2617
|
}
|
|
1675
|
-
|
|
1676
|
-
|
|
2618
|
+
log9.info({ backend: options.adapter.type }, "starting agent");
|
|
2619
|
+
log9.info({ hubUrl: config.hubUrl }, "connecting to hub");
|
|
1677
2620
|
await initRoles();
|
|
1678
2621
|
await initSkills();
|
|
1679
2622
|
const hub = new HubConnection({
|
|
@@ -1681,18 +2624,34 @@ async function startAgent(options) {
|
|
|
1681
2624
|
token: config.token,
|
|
1682
2625
|
hostname: config.name,
|
|
1683
2626
|
onConnect: (agentId) => {
|
|
1684
|
-
|
|
2627
|
+
log9.info({ agentId }, "connected to hub");
|
|
1685
2628
|
const skills = getSkillList();
|
|
1686
2629
|
if (skills.length > 0) {
|
|
1687
2630
|
hub.sendSkillList(skills);
|
|
1688
|
-
|
|
2631
|
+
log9.info({ count: skills.length }, "pushed skill list to hub");
|
|
1689
2632
|
}
|
|
2633
|
+
void sessionManager.recoverFromCheckpoint().then((report) => {
|
|
2634
|
+
for (const entry of report.dead) {
|
|
2635
|
+
hub.send({
|
|
2636
|
+
type: "session.died",
|
|
2637
|
+
sessionId: entry.sessionId,
|
|
2638
|
+
reason: "agent_restart",
|
|
2639
|
+
claudeSessionId: entry.claudeSessionId
|
|
2640
|
+
});
|
|
2641
|
+
}
|
|
2642
|
+
log9.info(
|
|
2643
|
+
{ deadCount: report.dead.length, orphanCount: report.orphan.length },
|
|
2644
|
+
"checkpoint recover: cleanup complete"
|
|
2645
|
+
);
|
|
2646
|
+
}).catch((err) => {
|
|
2647
|
+
log9.warn({ err }, "checkpoint recover: failed (degrading to no-op)");
|
|
2648
|
+
});
|
|
1690
2649
|
},
|
|
1691
2650
|
onDisconnect: (code, reason) => {
|
|
1692
|
-
|
|
2651
|
+
log9.info({ code, reason }, "disconnected from hub");
|
|
1693
2652
|
},
|
|
1694
2653
|
onError: (err) => {
|
|
1695
|
-
|
|
2654
|
+
log9.error({ err }, "hub connection error");
|
|
1696
2655
|
}
|
|
1697
2656
|
});
|
|
1698
2657
|
let swarmCoordinator;
|
|
@@ -1710,7 +2669,7 @@ async function startAgent(options) {
|
|
|
1710
2669
|
skillStore = new SkillStore(clawnetDir);
|
|
1711
2670
|
evolutionPipeline = new EvolutionPipeline(clawnetDir);
|
|
1712
2671
|
} catch (err) {
|
|
1713
|
-
|
|
2672
|
+
log9.warn({ err }, "failed to init memory/skill infra; distillation disabled");
|
|
1714
2673
|
}
|
|
1715
2674
|
const onBeforeClose = async (sessionId, messages) => {
|
|
1716
2675
|
if (messages.length === 0) return;
|
|
@@ -1723,7 +2682,7 @@ async function startAgent(options) {
|
|
|
1723
2682
|
embeddingService
|
|
1724
2683
|
);
|
|
1725
2684
|
} catch (err) {
|
|
1726
|
-
|
|
2685
|
+
log9.warn({ err, sessionId }, "distillation failed (non-fatal)");
|
|
1727
2686
|
}
|
|
1728
2687
|
try {
|
|
1729
2688
|
if (!skillStore || !evolutionPipeline) return;
|
|
@@ -1745,7 +2704,7 @@ async function startAgent(options) {
|
|
|
1745
2704
|
await triggerFromAccumulation(signals, evolutionPipeline);
|
|
1746
2705
|
}
|
|
1747
2706
|
} catch (err) {
|
|
1748
|
-
|
|
2707
|
+
log9.warn({ err, sessionId }, "accumulation scan failed (non-fatal)");
|
|
1749
2708
|
}
|
|
1750
2709
|
};
|
|
1751
2710
|
const sessionManager = new SessionManager({
|
|
@@ -1770,11 +2729,11 @@ async function startAgent(options) {
|
|
|
1770
2729
|
});
|
|
1771
2730
|
},
|
|
1772
2731
|
onSessionStarted: (sessionId, info) => {
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
|
|
1776
|
-
claudeSessionId: info.claudeSessionId
|
|
2732
|
+
const handler = createSwarmAwareSessionStartedHandler({
|
|
2733
|
+
hub,
|
|
2734
|
+
swarmCoordinator
|
|
1777
2735
|
});
|
|
2736
|
+
handler(sessionId, info);
|
|
1778
2737
|
},
|
|
1779
2738
|
onSessionError: (sessionId, error) => {
|
|
1780
2739
|
hub.send({
|
|
@@ -1783,22 +2742,46 @@ async function startAgent(options) {
|
|
|
1783
2742
|
error
|
|
1784
2743
|
});
|
|
1785
2744
|
},
|
|
1786
|
-
onBeforeClose
|
|
2745
|
+
onBeforeClose,
|
|
2746
|
+
// PR-A: classify session kind for the idle sweeper. SessionManager stays
|
|
2747
|
+
// independent of the swarm package; we hand it a closure that defers to
|
|
2748
|
+
// SwarmCoordinator (created below) at call time. Lazy-read is safe because
|
|
2749
|
+
// createSession can't run until the hub connection is up, by which point
|
|
2750
|
+
// swarmCoordinator is already assigned.
|
|
2751
|
+
classify: (sessionId) => swarmCoordinator?.isSwarmSession(sessionId) ? "swarm-role" : "chat",
|
|
2752
|
+
// PR-C: enable logical-state checkpoint. Lives next to memory.db under
|
|
2753
|
+
// CLAWNET_DIR so backup/wipe affects both consistently. Per-call writes
|
|
2754
|
+
// are debounced to 5s by SessionManager itself.
|
|
2755
|
+
checkpointPath: join4(clawnetDir, "agent-sessions.json")
|
|
1787
2756
|
});
|
|
1788
|
-
swarmCoordinator = new SwarmCoordinator(sessionManager, hub)
|
|
2757
|
+
swarmCoordinator = new SwarmCoordinator(sessionManager, hub, (workDir) => {
|
|
2758
|
+
try {
|
|
2759
|
+
const env = process.env.CLAWNET_DIR;
|
|
2760
|
+
const home = env ? env.replace(/\/\.clawnet\/?$/, "") : homedir3();
|
|
2761
|
+
return new TaskStore({ workDir, home });
|
|
2762
|
+
} catch (err) {
|
|
2763
|
+
log9.warn({ err, workDir }, "TaskStore factory failed");
|
|
2764
|
+
return void 0;
|
|
2765
|
+
}
|
|
2766
|
+
}, process.env.CLAWNET_HOME ?? homedir3());
|
|
1789
2767
|
hub.setSessionManager(sessionManager);
|
|
1790
2768
|
hub.setSwarmCoordinator(swarmCoordinator);
|
|
2769
|
+
const scheduleRuntime = new ScheduleRuntime({ hub, sessionManager, swarmCoordinator });
|
|
2770
|
+
hub.setScheduleRuntime(scheduleRuntime);
|
|
2771
|
+
await scheduleRuntime.start();
|
|
1791
2772
|
const brainBridge = new BrainBridge(hub);
|
|
1792
2773
|
const fsBridge = new FsBridge(hub);
|
|
2774
|
+
sessionManager.startIdleSweeper();
|
|
1793
2775
|
const shutdown = async () => {
|
|
1794
|
-
|
|
2776
|
+
log9.info("shutting down");
|
|
1795
2777
|
await sessionManager.closeAll();
|
|
2778
|
+
await scheduleRuntime.stop();
|
|
1796
2779
|
hub.destroy();
|
|
1797
2780
|
if (memoryDb) {
|
|
1798
2781
|
try {
|
|
1799
2782
|
memoryDb.close();
|
|
1800
2783
|
} catch (err) {
|
|
1801
|
-
|
|
2784
|
+
log9.warn({ err }, "failed to close memory db");
|
|
1802
2785
|
}
|
|
1803
2786
|
}
|
|
1804
2787
|
process.exit(0);
|
|
@@ -1806,7 +2789,7 @@ async function startAgent(options) {
|
|
|
1806
2789
|
process.on("SIGINT", shutdown);
|
|
1807
2790
|
process.on("SIGTERM", shutdown);
|
|
1808
2791
|
hub.connect();
|
|
1809
|
-
return { hub, sessionManager, swarmCoordinator, brainBridge, fsBridge };
|
|
2792
|
+
return { hub, sessionManager, swarmCoordinator, scheduleRuntime, brainBridge, fsBridge };
|
|
1810
2793
|
}
|
|
1811
2794
|
|
|
1812
2795
|
export {
|
|
@@ -1816,4 +2799,4 @@ export {
|
|
|
1816
2799
|
FsBridge,
|
|
1817
2800
|
startAgent
|
|
1818
2801
|
};
|
|
1819
|
-
//# sourceMappingURL=chunk-
|
|
2802
|
+
//# sourceMappingURL=chunk-WJWCYGLQ.js.map
|