opencode-swarm-plugin 0.32.0 → 0.34.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.hive/issues.jsonl +12 -0
- package/.hive/memories.jsonl +255 -1
- package/.turbo/turbo-build.log +9 -10
- package/.turbo/turbo-test.log +343 -337
- package/CHANGELOG.md +358 -0
- package/README.md +152 -179
- package/bin/swarm.test.ts +303 -1
- package/bin/swarm.ts +473 -16
- package/dist/compaction-hook.d.ts +1 -1
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/index.d.ts +112 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +12380 -131
- package/dist/logger.d.ts +34 -0
- package/dist/logger.d.ts.map +1 -0
- package/dist/observability-tools.d.ts +116 -0
- package/dist/observability-tools.d.ts.map +1 -0
- package/dist/plugin.js +12254 -119
- package/dist/skills.d.ts.map +1 -1
- package/dist/swarm-orchestrate.d.ts +105 -0
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +113 -2
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm-research.d.ts +127 -0
- package/dist/swarm-research.d.ts.map +1 -0
- package/dist/swarm-review.d.ts.map +1 -1
- package/dist/swarm.d.ts +73 -1
- package/dist/swarm.d.ts.map +1 -1
- package/evals/compaction-resumption.eval.ts +289 -0
- package/evals/coordinator-behavior.eval.ts +307 -0
- package/evals/fixtures/compaction-cases.ts +350 -0
- package/evals/scorers/compaction-scorers.ts +305 -0
- package/evals/scorers/index.ts +12 -0
- package/examples/plugin-wrapper-template.ts +297 -8
- package/package.json +6 -2
- package/src/compaction-hook.test.ts +617 -1
- package/src/compaction-hook.ts +291 -18
- package/src/index.ts +54 -1
- package/src/logger.test.ts +189 -0
- package/src/logger.ts +135 -0
- package/src/observability-tools.test.ts +346 -0
- package/src/observability-tools.ts +594 -0
- package/src/skills.integration.test.ts +137 -1
- package/src/skills.test.ts +42 -1
- package/src/skills.ts +8 -4
- package/src/swarm-orchestrate.test.ts +123 -0
- package/src/swarm-orchestrate.ts +183 -0
- package/src/swarm-prompts.test.ts +553 -1
- package/src/swarm-prompts.ts +406 -4
- package/src/swarm-research.integration.test.ts +544 -0
- package/src/swarm-research.test.ts +698 -0
- package/src/swarm-research.ts +472 -0
- package/src/swarm-review.test.ts +177 -0
- package/src/swarm-review.ts +12 -47
- package/src/swarm.ts +6 -3
package/src/compaction-hook.ts
CHANGED
|
@@ -31,6 +31,28 @@
|
|
|
31
31
|
|
|
32
32
|
import { getHiveAdapter, getHiveWorkingDirectory } from "./hive";
|
|
33
33
|
import { checkSwarmHealth } from "swarm-mail";
|
|
34
|
+
import { createChildLogger } from "./logger";
|
|
35
|
+
|
|
36
|
+
let _logger: any | undefined;
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Get logger instance (lazy initialization for testability)
|
|
40
|
+
*
|
|
41
|
+
* Logs to: ~/.config/swarm-tools/logs/compaction.1log
|
|
42
|
+
*
|
|
43
|
+
* Log structure:
|
|
44
|
+
* - START: session_id, trigger
|
|
45
|
+
* - GATHER: source (swarm-mail|hive), duration_ms, stats/counts
|
|
46
|
+
* - DETECT: confidence, detected, reason_count, reasons
|
|
47
|
+
* - INJECT: confidence, context_length, context_type (full|fallback|none)
|
|
48
|
+
* - COMPLETE: duration_ms, success, detected, confidence, context_injected
|
|
49
|
+
*/
|
|
50
|
+
function getLog() {
|
|
51
|
+
if (!_logger) {
|
|
52
|
+
_logger = createChildLogger("compaction");
|
|
53
|
+
}
|
|
54
|
+
return _logger;
|
|
55
|
+
}
|
|
34
56
|
|
|
35
57
|
// ============================================================================
|
|
36
58
|
// Compaction Context
|
|
@@ -145,6 +167,55 @@ Include this in your summary:
|
|
|
145
167
|
"This is an active swarm. Check swarm_status and swarmmail_inbox immediately."
|
|
146
168
|
`;
|
|
147
169
|
|
|
170
|
+
// ============================================================================
|
|
171
|
+
// Dynamic Context Building
|
|
172
|
+
// ============================================================================
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Build dynamic swarm state section from detected state
|
|
176
|
+
*
|
|
177
|
+
* This injects SPECIFIC values instead of placeholders, making the context
|
|
178
|
+
* immediately actionable on resume.
|
|
179
|
+
*/
|
|
180
|
+
function buildDynamicSwarmState(state: SwarmState): string {
|
|
181
|
+
const parts: string[] = [];
|
|
182
|
+
|
|
183
|
+
parts.push("## 🐝 Current Swarm State\n");
|
|
184
|
+
|
|
185
|
+
if (state.epicId && state.epicTitle) {
|
|
186
|
+
parts.push(`**Epic:** ${state.epicId} - ${state.epicTitle}`);
|
|
187
|
+
|
|
188
|
+
const totalSubtasks = state.subtasks.closed + state.subtasks.in_progress +
|
|
189
|
+
state.subtasks.open + state.subtasks.blocked;
|
|
190
|
+
|
|
191
|
+
if (totalSubtasks > 0) {
|
|
192
|
+
parts.push(`**Subtasks:**`);
|
|
193
|
+
if (state.subtasks.closed > 0) parts.push(` - ${state.subtasks.closed} closed`);
|
|
194
|
+
if (state.subtasks.in_progress > 0) parts.push(` - ${state.subtasks.in_progress} in_progress`);
|
|
195
|
+
if (state.subtasks.open > 0) parts.push(` - ${state.subtasks.open} open`);
|
|
196
|
+
if (state.subtasks.blocked > 0) parts.push(` - ${state.subtasks.blocked} blocked`);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
parts.push(`**Project:** ${state.projectPath}`);
|
|
201
|
+
|
|
202
|
+
if (state.epicId) {
|
|
203
|
+
parts.push(`\n## 🎯 YOU ARE THE COORDINATOR`);
|
|
204
|
+
parts.push(``);
|
|
205
|
+
parts.push(`**Primary role:** Orchestrate workers, review their output, unblock dependencies.`);
|
|
206
|
+
parts.push(`**Spawn workers** for implementation tasks - don't do them yourself.`);
|
|
207
|
+
parts.push(``);
|
|
208
|
+
parts.push(`**RESUME STEPS:**`);
|
|
209
|
+
parts.push(`1. Check swarm status: \`swarm_status(epic_id="${state.epicId}", project_key="${state.projectPath}")\``);
|
|
210
|
+
parts.push(`2. Check inbox for worker messages: \`swarmmail_inbox(limit=5)\``);
|
|
211
|
+
parts.push(`3. For in_progress subtasks: Review worker results with \`swarm_review\``);
|
|
212
|
+
parts.push(`4. For open subtasks: Spawn workers with \`swarm_spawn_subtask\``);
|
|
213
|
+
parts.push(`5. For blocked subtasks: Investigate and unblock`);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return parts.join("\n");
|
|
217
|
+
}
|
|
218
|
+
|
|
148
219
|
// ============================================================================
|
|
149
220
|
// Swarm Detection
|
|
150
221
|
// ============================================================================
|
|
@@ -156,6 +227,23 @@ interface SwarmDetection {
|
|
|
156
227
|
detected: boolean;
|
|
157
228
|
confidence: "high" | "medium" | "low" | "none";
|
|
158
229
|
reasons: string[];
|
|
230
|
+
/** Specific swarm state data for context injection */
|
|
231
|
+
state?: SwarmState;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Specific swarm state captured during detection
|
|
236
|
+
*/
|
|
237
|
+
interface SwarmState {
|
|
238
|
+
epicId?: string;
|
|
239
|
+
epicTitle?: string;
|
|
240
|
+
projectPath: string;
|
|
241
|
+
subtasks: {
|
|
242
|
+
closed: number;
|
|
243
|
+
in_progress: number;
|
|
244
|
+
open: number;
|
|
245
|
+
blocked: number;
|
|
246
|
+
};
|
|
159
247
|
}
|
|
160
248
|
|
|
161
249
|
/**
|
|
@@ -173,13 +261,38 @@ async function detectSwarm(): Promise<SwarmDetection> {
|
|
|
173
261
|
let highConfidence = false;
|
|
174
262
|
let mediumConfidence = false;
|
|
175
263
|
let lowConfidence = false;
|
|
264
|
+
let state: SwarmState | undefined;
|
|
176
265
|
|
|
177
266
|
try {
|
|
178
267
|
const projectKey = getHiveWorkingDirectory();
|
|
268
|
+
|
|
269
|
+
// Initialize state with project path
|
|
270
|
+
state = {
|
|
271
|
+
projectPath: projectKey,
|
|
272
|
+
subtasks: {
|
|
273
|
+
closed: 0,
|
|
274
|
+
in_progress: 0,
|
|
275
|
+
open: 0,
|
|
276
|
+
blocked: 0,
|
|
277
|
+
},
|
|
278
|
+
};
|
|
179
279
|
|
|
180
280
|
// Check 1: Active reservations in swarm-mail (HIGH confidence)
|
|
281
|
+
const swarmMailStart = Date.now();
|
|
181
282
|
try {
|
|
182
283
|
const health = await checkSwarmHealth(projectKey);
|
|
284
|
+
const duration = Date.now() - swarmMailStart;
|
|
285
|
+
|
|
286
|
+
getLog().debug(
|
|
287
|
+
{
|
|
288
|
+
source: "swarm-mail",
|
|
289
|
+
duration_ms: duration,
|
|
290
|
+
healthy: health.healthy,
|
|
291
|
+
stats: health.stats,
|
|
292
|
+
},
|
|
293
|
+
"checked swarm-mail health",
|
|
294
|
+
);
|
|
295
|
+
|
|
183
296
|
if (health.healthy && health.stats) {
|
|
184
297
|
if (health.stats.reservations > 0) {
|
|
185
298
|
highConfidence = true;
|
|
@@ -194,14 +307,24 @@ async function detectSwarm(): Promise<SwarmDetection> {
|
|
|
194
307
|
reasons.push(`${health.stats.messages} swarm messages`);
|
|
195
308
|
}
|
|
196
309
|
}
|
|
197
|
-
} catch {
|
|
310
|
+
} catch (error) {
|
|
311
|
+
getLog().debug(
|
|
312
|
+
{
|
|
313
|
+
source: "swarm-mail",
|
|
314
|
+
duration_ms: Date.now() - swarmMailStart,
|
|
315
|
+
error: error instanceof Error ? error.message : String(error),
|
|
316
|
+
},
|
|
317
|
+
"swarm-mail check failed",
|
|
318
|
+
);
|
|
198
319
|
// Swarm-mail not available, continue with other checks
|
|
199
320
|
}
|
|
200
321
|
|
|
201
322
|
// Check 2: Hive cells (various confidence levels)
|
|
323
|
+
const hiveStart = Date.now();
|
|
202
324
|
try {
|
|
203
325
|
const adapter = await getHiveAdapter(projectKey);
|
|
204
326
|
const cells = await adapter.queryCells(projectKey, {});
|
|
327
|
+
const duration = Date.now() - hiveStart;
|
|
205
328
|
|
|
206
329
|
if (Array.isArray(cells) && cells.length > 0) {
|
|
207
330
|
// HIGH: Any in_progress cells
|
|
@@ -213,7 +336,7 @@ async function detectSwarm(): Promise<SwarmDetection> {
|
|
|
213
336
|
|
|
214
337
|
// MEDIUM: Open subtasks (cells with parent_id)
|
|
215
338
|
const subtasks = cells.filter(
|
|
216
|
-
(c) => c.status === "open" && c.parent_id
|
|
339
|
+
(c) => c.status === "open" && c.parent_id,
|
|
217
340
|
);
|
|
218
341
|
if (subtasks.length > 0) {
|
|
219
342
|
mediumConfidence = true;
|
|
@@ -222,11 +345,37 @@ async function detectSwarm(): Promise<SwarmDetection> {
|
|
|
222
345
|
|
|
223
346
|
// MEDIUM: Unclosed epics
|
|
224
347
|
const openEpics = cells.filter(
|
|
225
|
-
(c) => c.type === "epic" && c.status !== "closed"
|
|
348
|
+
(c) => c.type === "epic" && c.status !== "closed",
|
|
226
349
|
);
|
|
227
350
|
if (openEpics.length > 0) {
|
|
228
351
|
mediumConfidence = true;
|
|
229
352
|
reasons.push(`${openEpics.length} unclosed epics`);
|
|
353
|
+
|
|
354
|
+
// Capture in_progress epic data for state
|
|
355
|
+
const inProgressEpic = openEpics.find((c) => c.status === "in_progress");
|
|
356
|
+
if (inProgressEpic && state) {
|
|
357
|
+
state.epicId = inProgressEpic.id;
|
|
358
|
+
state.epicTitle = inProgressEpic.title;
|
|
359
|
+
|
|
360
|
+
// Count subtasks for this epic
|
|
361
|
+
const epicSubtasks = cells.filter((c) => c.parent_id === inProgressEpic.id);
|
|
362
|
+
state.subtasks.closed = epicSubtasks.filter((c) => c.status === "closed").length;
|
|
363
|
+
state.subtasks.in_progress = epicSubtasks.filter((c) => c.status === "in_progress").length;
|
|
364
|
+
state.subtasks.open = epicSubtasks.filter((c) => c.status === "open").length;
|
|
365
|
+
state.subtasks.blocked = epicSubtasks.filter((c) => c.status === "blocked").length;
|
|
366
|
+
|
|
367
|
+
getLog().debug(
|
|
368
|
+
{
|
|
369
|
+
epic_id: state.epicId,
|
|
370
|
+
epic_title: state.epicTitle,
|
|
371
|
+
subtasks_closed: state.subtasks.closed,
|
|
372
|
+
subtasks_in_progress: state.subtasks.in_progress,
|
|
373
|
+
subtasks_open: state.subtasks.open,
|
|
374
|
+
subtasks_blocked: state.subtasks.blocked,
|
|
375
|
+
},
|
|
376
|
+
"captured epic state for context",
|
|
377
|
+
);
|
|
378
|
+
}
|
|
230
379
|
}
|
|
231
380
|
|
|
232
381
|
// MEDIUM: Recently updated cells (last hour)
|
|
@@ -242,14 +391,46 @@ async function detectSwarm(): Promise<SwarmDetection> {
|
|
|
242
391
|
lowConfidence = true;
|
|
243
392
|
reasons.push(`${cells.length} total cells in hive`);
|
|
244
393
|
}
|
|
394
|
+
|
|
395
|
+
getLog().debug(
|
|
396
|
+
{
|
|
397
|
+
source: "hive",
|
|
398
|
+
duration_ms: duration,
|
|
399
|
+
total_cells: cells.length,
|
|
400
|
+
in_progress: inProgress.length,
|
|
401
|
+
open_subtasks: subtasks.length,
|
|
402
|
+
open_epics: openEpics.length,
|
|
403
|
+
recent_updates: recentCells.length,
|
|
404
|
+
},
|
|
405
|
+
"checked hive cells",
|
|
406
|
+
);
|
|
407
|
+
} else {
|
|
408
|
+
getLog().debug(
|
|
409
|
+
{ source: "hive", duration_ms: duration, total_cells: 0 },
|
|
410
|
+
"hive empty",
|
|
411
|
+
);
|
|
245
412
|
}
|
|
246
|
-
} catch {
|
|
413
|
+
} catch (error) {
|
|
414
|
+
getLog().debug(
|
|
415
|
+
{
|
|
416
|
+
source: "hive",
|
|
417
|
+
duration_ms: Date.now() - hiveStart,
|
|
418
|
+
error: error instanceof Error ? error.message : String(error),
|
|
419
|
+
},
|
|
420
|
+
"hive check failed",
|
|
421
|
+
);
|
|
247
422
|
// Hive not available, continue
|
|
248
423
|
}
|
|
249
|
-
} catch {
|
|
424
|
+
} catch (error) {
|
|
250
425
|
// Project detection failed, use fallback
|
|
251
426
|
lowConfidence = true;
|
|
252
427
|
reasons.push("Could not detect project, using fallback");
|
|
428
|
+
getLog().debug(
|
|
429
|
+
{
|
|
430
|
+
error: error instanceof Error ? error.message : String(error),
|
|
431
|
+
},
|
|
432
|
+
"project detection failed",
|
|
433
|
+
);
|
|
253
434
|
}
|
|
254
435
|
|
|
255
436
|
// Determine overall confidence
|
|
@@ -264,11 +445,25 @@ async function detectSwarm(): Promise<SwarmDetection> {
|
|
|
264
445
|
confidence = "none";
|
|
265
446
|
}
|
|
266
447
|
|
|
267
|
-
|
|
448
|
+
const result = {
|
|
268
449
|
detected: confidence !== "none",
|
|
269
450
|
confidence,
|
|
270
451
|
reasons,
|
|
452
|
+
state,
|
|
271
453
|
};
|
|
454
|
+
|
|
455
|
+
getLog().debug(
|
|
456
|
+
{
|
|
457
|
+
detected: result.detected,
|
|
458
|
+
confidence: result.confidence,
|
|
459
|
+
reason_count: result.reasons.length,
|
|
460
|
+
reasons: result.reasons,
|
|
461
|
+
has_state: !!result.state,
|
|
462
|
+
},
|
|
463
|
+
"swarm detection complete",
|
|
464
|
+
);
|
|
465
|
+
|
|
466
|
+
return result;
|
|
272
467
|
}
|
|
273
468
|
|
|
274
469
|
// ============================================================================
|
|
@@ -298,20 +493,98 @@ async function detectSwarm(): Promise<SwarmDetection> {
|
|
|
298
493
|
*/
|
|
299
494
|
export function createCompactionHook() {
|
|
300
495
|
return async (
|
|
301
|
-
|
|
496
|
+
input: { sessionID: string },
|
|
302
497
|
output: { context: string[] },
|
|
303
498
|
): Promise<void> => {
|
|
304
|
-
const
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
499
|
+
const startTime = Date.now();
|
|
500
|
+
|
|
501
|
+
getLog().info(
|
|
502
|
+
{
|
|
503
|
+
session_id: input.sessionID,
|
|
504
|
+
trigger: "session_compaction",
|
|
505
|
+
},
|
|
506
|
+
"compaction started",
|
|
507
|
+
);
|
|
508
|
+
|
|
509
|
+
try {
|
|
510
|
+
const detection = await detectSwarm();
|
|
511
|
+
|
|
512
|
+
if (
|
|
513
|
+
detection.confidence === "high" ||
|
|
514
|
+
detection.confidence === "medium"
|
|
515
|
+
) {
|
|
516
|
+
// Definite or probable swarm - inject full context
|
|
517
|
+
const header = `[Swarm detected: ${detection.reasons.join(", ")}]\n\n`;
|
|
518
|
+
|
|
519
|
+
// Build dynamic state section if we have specific data
|
|
520
|
+
let dynamicState = "";
|
|
521
|
+
if (detection.state && detection.state.epicId) {
|
|
522
|
+
dynamicState = buildDynamicSwarmState(detection.state) + "\n\n";
|
|
523
|
+
}
|
|
524
|
+
|
|
525
|
+
const contextContent = header + dynamicState + SWARM_COMPACTION_CONTEXT;
|
|
526
|
+
output.context.push(contextContent);
|
|
527
|
+
|
|
528
|
+
getLog().info(
|
|
529
|
+
{
|
|
530
|
+
confidence: detection.confidence,
|
|
531
|
+
context_length: contextContent.length,
|
|
532
|
+
context_type: "full",
|
|
533
|
+
reasons: detection.reasons,
|
|
534
|
+
has_dynamic_state: !!dynamicState,
|
|
535
|
+
epic_id: detection.state?.epicId,
|
|
536
|
+
},
|
|
537
|
+
"injected swarm context",
|
|
538
|
+
);
|
|
539
|
+
} else if (detection.confidence === "low") {
|
|
540
|
+
// Possible swarm - inject fallback detection prompt
|
|
541
|
+
const header = `[Possible swarm: ${detection.reasons.join(", ")}]\n\n`;
|
|
542
|
+
const contextContent = header + SWARM_DETECTION_FALLBACK;
|
|
543
|
+
output.context.push(contextContent);
|
|
544
|
+
|
|
545
|
+
getLog().info(
|
|
546
|
+
{
|
|
547
|
+
confidence: detection.confidence,
|
|
548
|
+
context_length: contextContent.length,
|
|
549
|
+
context_type: "fallback",
|
|
550
|
+
reasons: detection.reasons,
|
|
551
|
+
},
|
|
552
|
+
"injected swarm context",
|
|
553
|
+
);
|
|
554
|
+
} else {
|
|
555
|
+
getLog().debug(
|
|
556
|
+
{
|
|
557
|
+
confidence: detection.confidence,
|
|
558
|
+
context_type: "none",
|
|
559
|
+
},
|
|
560
|
+
"no swarm detected, skipping injection",
|
|
561
|
+
);
|
|
562
|
+
}
|
|
563
|
+
// confidence === "none" - no injection, probably not a swarm
|
|
564
|
+
|
|
565
|
+
const duration = Date.now() - startTime;
|
|
566
|
+
getLog().info(
|
|
567
|
+
{
|
|
568
|
+
duration_ms: duration,
|
|
569
|
+
success: true,
|
|
570
|
+
detected: detection.detected,
|
|
571
|
+
confidence: detection.confidence,
|
|
572
|
+
context_injected: output.context.length > 0,
|
|
573
|
+
},
|
|
574
|
+
"compaction complete",
|
|
575
|
+
);
|
|
576
|
+
} catch (error) {
|
|
577
|
+
const duration = Date.now() - startTime;
|
|
578
|
+
getLog().error(
|
|
579
|
+
{
|
|
580
|
+
duration_ms: duration,
|
|
581
|
+
success: false,
|
|
582
|
+
error: error instanceof Error ? error.message : String(error),
|
|
583
|
+
stack: error instanceof Error ? error.stack : undefined,
|
|
584
|
+
},
|
|
585
|
+
"compaction failed",
|
|
586
|
+
);
|
|
587
|
+
// Don't throw - compaction hook failures shouldn't break the session
|
|
314
588
|
}
|
|
315
|
-
// confidence === "none" - no injection, probably not a swarm
|
|
316
589
|
};
|
|
317
590
|
}
|
package/src/index.ts
CHANGED
|
@@ -47,6 +47,8 @@ import { repoCrawlTools } from "./repo-crawl";
|
|
|
47
47
|
import { skillsTools, setSkillsProjectDirectory } from "./skills";
|
|
48
48
|
import { mandateTools } from "./mandates";
|
|
49
49
|
import { memoryTools } from "./memory-tools";
|
|
50
|
+
import { observabilityTools } from "./observability-tools";
|
|
51
|
+
import { researchTools } from "./swarm-research";
|
|
50
52
|
import {
|
|
51
53
|
guardrailOutput,
|
|
52
54
|
DEFAULT_GUARDRAIL_CONFIG,
|
|
@@ -154,7 +156,7 @@ export const SwarmPlugin: Plugin = async (
|
|
|
154
156
|
* - mandate:file, mandate:vote, mandate:query, etc.
|
|
155
157
|
* - semantic-memory:store, semantic-memory:find, semantic-memory:get, etc.
|
|
156
158
|
*/
|
|
157
|
-
|
|
159
|
+
tool: {
|
|
158
160
|
...hiveTools,
|
|
159
161
|
...swarmMailTools,
|
|
160
162
|
...structuredTools,
|
|
@@ -165,6 +167,8 @@ export const SwarmPlugin: Plugin = async (
|
|
|
165
167
|
...skillsTools,
|
|
166
168
|
...mandateTools,
|
|
167
169
|
...memoryTools,
|
|
170
|
+
...observabilityTools,
|
|
171
|
+
...researchTools,
|
|
168
172
|
},
|
|
169
173
|
|
|
170
174
|
/**
|
|
@@ -679,3 +683,52 @@ export {
|
|
|
679
683
|
type OperationResult,
|
|
680
684
|
} from "./memory-tools";
|
|
681
685
|
export type { Memory, SearchResult, SearchOptions } from "swarm-mail";
|
|
686
|
+
|
|
687
|
+
/**
|
|
688
|
+
* Re-export logger infrastructure
|
|
689
|
+
*
|
|
690
|
+
* Includes:
|
|
691
|
+
* - getLogger - Gets or creates the main logger instance
|
|
692
|
+
* - createChildLogger - Creates a module-specific child logger with separate log file
|
|
693
|
+
* - logger - Default logger instance for immediate use
|
|
694
|
+
*
|
|
695
|
+
* Features:
|
|
696
|
+
* - Daily log rotation via pino-roll (numeric format: swarm.1log, swarm.2log, etc.)
|
|
697
|
+
* - 14-day retention
|
|
698
|
+
* - Module-specific child loggers
|
|
699
|
+
* - Pretty mode for development (SWARM_LOG_PRETTY=1)
|
|
700
|
+
* - Logs to ~/.config/swarm-tools/logs/
|
|
701
|
+
*
|
|
702
|
+
* @example
|
|
703
|
+
* ```typescript
|
|
704
|
+
* import { logger, createChildLogger } from "opencode-swarm-plugin";
|
|
705
|
+
*
|
|
706
|
+
* // Use default logger
|
|
707
|
+
* logger.info("Application started");
|
|
708
|
+
*
|
|
709
|
+
* // Create module-specific logger
|
|
710
|
+
* const compactionLog = createChildLogger("compaction");
|
|
711
|
+
* compactionLog.info("Compaction started");
|
|
712
|
+
* ```
|
|
713
|
+
*/
|
|
714
|
+
export { getLogger, createChildLogger, logger } from "./logger";
|
|
715
|
+
|
|
716
|
+
/**
|
|
717
|
+
* Re-export swarm-research module
|
|
718
|
+
*
|
|
719
|
+
* Includes:
|
|
720
|
+
* - discoverDocTools - Discover available documentation tools
|
|
721
|
+
* - getInstalledVersions - Get installed package versions from lockfile
|
|
722
|
+
* - researchTools - Plugin tools for tool discovery and version detection
|
|
723
|
+
*
|
|
724
|
+
* Types:
|
|
725
|
+
* - DiscoveredTool - Tool discovery result interface
|
|
726
|
+
* - VersionInfo - Package version information
|
|
727
|
+
*/
|
|
728
|
+
export {
|
|
729
|
+
discoverDocTools,
|
|
730
|
+
getInstalledVersions,
|
|
731
|
+
researchTools,
|
|
732
|
+
type DiscoveredTool,
|
|
733
|
+
type VersionInfo,
|
|
734
|
+
} from "./swarm-research";
|
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
import { describe, test, expect, beforeEach, afterEach } from "bun:test";
|
|
2
|
+
import { mkdir, rm, readdir } from "node:fs/promises";
|
|
3
|
+
import { existsSync } from "node:fs";
|
|
4
|
+
import { join } from "node:path";
|
|
5
|
+
import { homedir } from "node:os";
|
|
6
|
+
|
|
7
|
+
describe("Logger Infrastructure", () => {
|
|
8
|
+
const testLogDir = join(homedir(), ".config", "swarm-tools", "logs-test");
|
|
9
|
+
let originalEnv: string | undefined;
|
|
10
|
+
|
|
11
|
+
beforeEach(async () => {
|
|
12
|
+
// Clean up test log directory
|
|
13
|
+
if (existsSync(testLogDir)) {
|
|
14
|
+
await rm(testLogDir, { recursive: true, force: true });
|
|
15
|
+
}
|
|
16
|
+
await mkdir(testLogDir, { recursive: true });
|
|
17
|
+
originalEnv = process.env.SWARM_LOG_PRETTY;
|
|
18
|
+
|
|
19
|
+
// Clear module cache to reset logger instances
|
|
20
|
+
delete require.cache[require.resolve("./logger")];
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
afterEach(async () => {
|
|
24
|
+
// Restore environment
|
|
25
|
+
if (originalEnv !== undefined) {
|
|
26
|
+
process.env.SWARM_LOG_PRETTY = originalEnv;
|
|
27
|
+
} else {
|
|
28
|
+
delete process.env.SWARM_LOG_PRETTY;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
// Clean up test directory
|
|
32
|
+
if (existsSync(testLogDir)) {
|
|
33
|
+
await rm(testLogDir, { recursive: true, force: true });
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
describe("getLogger", () => {
|
|
38
|
+
test("returns a valid Pino logger instance", async () => {
|
|
39
|
+
const { getLogger } = await import("./logger");
|
|
40
|
+
const logger = getLogger(testLogDir);
|
|
41
|
+
|
|
42
|
+
expect(logger).toBeDefined();
|
|
43
|
+
expect(typeof logger.info).toBe("function");
|
|
44
|
+
expect(typeof logger.error).toBe("function");
|
|
45
|
+
expect(typeof logger.debug).toBe("function");
|
|
46
|
+
expect(typeof logger.warn).toBe("function");
|
|
47
|
+
});
|
|
48
|
+
|
|
49
|
+
test("creates log directory if it doesn't exist", async () => {
|
|
50
|
+
const newDir = join(testLogDir, "nested", "path");
|
|
51
|
+
const { getLogger } = await import("./logger");
|
|
52
|
+
|
|
53
|
+
getLogger(newDir);
|
|
54
|
+
|
|
55
|
+
expect(existsSync(newDir)).toBe(true);
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
test("creates log file with numeric rotation pattern", async () => {
|
|
59
|
+
const { getLogger } = await import("./logger");
|
|
60
|
+
const logger = getLogger(testLogDir);
|
|
61
|
+
|
|
62
|
+
// Write a log to force file creation
|
|
63
|
+
logger.info("test message");
|
|
64
|
+
|
|
65
|
+
// Wait for async file creation (pino-roll is async)
|
|
66
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
67
|
+
|
|
68
|
+
const files = await readdir(testLogDir);
|
|
69
|
+
// pino-roll format: {filename}.{number}log (e.g., swarm.1log)
|
|
70
|
+
const logFile = files.find((f) => f.match(/^swarm\.\d+log$/));
|
|
71
|
+
|
|
72
|
+
expect(logFile).toBeDefined();
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
test("writes log entries to file", async () => {
|
|
76
|
+
const { getLogger } = await import("./logger");
|
|
77
|
+
const logger = getLogger(testLogDir);
|
|
78
|
+
|
|
79
|
+
logger.info("test log entry");
|
|
80
|
+
logger.error("test error entry");
|
|
81
|
+
|
|
82
|
+
// Wait for async file writes
|
|
83
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
84
|
+
|
|
85
|
+
const files = await readdir(testLogDir);
|
|
86
|
+
expect(files.length).toBeGreaterThan(0);
|
|
87
|
+
});
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
describe("createChildLogger", () => {
|
|
91
|
+
test("creates child logger with module namespace", async () => {
|
|
92
|
+
const { getLogger, createChildLogger } = await import("./logger");
|
|
93
|
+
getLogger(testLogDir); // Initialize main logger
|
|
94
|
+
|
|
95
|
+
const childLogger = createChildLogger("compaction", testLogDir);
|
|
96
|
+
|
|
97
|
+
expect(childLogger).toBeDefined();
|
|
98
|
+
expect(typeof childLogger.info).toBe("function");
|
|
99
|
+
});
|
|
100
|
+
|
|
101
|
+
test("child logger writes to module-specific file", async () => {
|
|
102
|
+
const { getLogger, createChildLogger } = await import("./logger");
|
|
103
|
+
getLogger(testLogDir);
|
|
104
|
+
|
|
105
|
+
const childLogger = createChildLogger("compaction", testLogDir);
|
|
106
|
+
childLogger.info("compaction test message");
|
|
107
|
+
|
|
108
|
+
// Wait for async file writes
|
|
109
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
110
|
+
|
|
111
|
+
const files = await readdir(testLogDir);
|
|
112
|
+
// pino-roll format: {module}.{number}log (e.g., compaction.1log)
|
|
113
|
+
const compactionLog = files.find((f) => f.match(/^compaction\.\d+log$/));
|
|
114
|
+
|
|
115
|
+
expect(compactionLog).toBeDefined();
|
|
116
|
+
});
|
|
117
|
+
|
|
118
|
+
test("multiple child loggers write to separate files", async () => {
|
|
119
|
+
const { getLogger, createChildLogger } = await import("./logger");
|
|
120
|
+
getLogger(testLogDir);
|
|
121
|
+
|
|
122
|
+
const compactionLogger = createChildLogger("compaction", testLogDir);
|
|
123
|
+
const cliLogger = createChildLogger("cli", testLogDir);
|
|
124
|
+
|
|
125
|
+
compactionLogger.info("compaction message");
|
|
126
|
+
cliLogger.info("cli message");
|
|
127
|
+
|
|
128
|
+
// Wait for async file writes
|
|
129
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
130
|
+
|
|
131
|
+
const files = await readdir(testLogDir);
|
|
132
|
+
// pino-roll format: {module}.{number}log
|
|
133
|
+
const compactionLog = files.find((f) => f.match(/^compaction\.\d+log$/));
|
|
134
|
+
const cliLog = files.find((f) => f.match(/^cli\.\d+log$/));
|
|
135
|
+
|
|
136
|
+
expect(compactionLog).toBeDefined();
|
|
137
|
+
expect(cliLog).toBeDefined();
|
|
138
|
+
});
|
|
139
|
+
});
|
|
140
|
+
|
|
141
|
+
describe("Pretty mode", () => {
|
|
142
|
+
test("respects SWARM_LOG_PRETTY=1 environment variable", async () => {
|
|
143
|
+
process.env.SWARM_LOG_PRETTY = "1";
|
|
144
|
+
|
|
145
|
+
// Force reimport to pick up env var
|
|
146
|
+
delete require.cache[require.resolve("./logger")];
|
|
147
|
+
const { getLogger } = await import("./logger");
|
|
148
|
+
|
|
149
|
+
const logger = getLogger(testLogDir);
|
|
150
|
+
|
|
151
|
+
// If pretty mode is enabled, logger should have prettyPrint config
|
|
152
|
+
// We can't easily inspect Pino internals, but we can verify it doesn't throw
|
|
153
|
+
expect(logger).toBeDefined();
|
|
154
|
+
expect(typeof logger.info).toBe("function");
|
|
155
|
+
|
|
156
|
+
logger.info("pretty test message");
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
test("works without pretty mode by default", async () => {
|
|
160
|
+
delete process.env.SWARM_LOG_PRETTY;
|
|
161
|
+
|
|
162
|
+
// Force reimport
|
|
163
|
+
delete require.cache[require.resolve("./logger")];
|
|
164
|
+
const { getLogger } = await import("./logger");
|
|
165
|
+
|
|
166
|
+
const logger = getLogger(testLogDir);
|
|
167
|
+
|
|
168
|
+
expect(logger).toBeDefined();
|
|
169
|
+
logger.info("normal mode message");
|
|
170
|
+
});
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
describe("Log rotation", () => {
|
|
174
|
+
test("sets up daily rotation with 14-day retention", async () => {
|
|
175
|
+
const { getLogger } = await import("./logger");
|
|
176
|
+
const logger = getLogger(testLogDir);
|
|
177
|
+
|
|
178
|
+
// Write logs to trigger rotation setup
|
|
179
|
+
logger.info("rotation test");
|
|
180
|
+
|
|
181
|
+
// Wait for async file creation
|
|
182
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
183
|
+
|
|
184
|
+
// Verify log file exists (rotation config is internal to pino-roll)
|
|
185
|
+
const files = await readdir(testLogDir);
|
|
186
|
+
expect(files.length).toBeGreaterThan(0);
|
|
187
|
+
});
|
|
188
|
+
});
|
|
189
|
+
});
|