opencode-autognosis 1.0.1 → 2.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,498 @@
1
+ import { tool } from "@opencode-ai/plugin";
2
+ import * as fs from "node:fs/promises";
3
+ import * as fsSync from "node:fs";
4
+ import * as path from "node:path";
5
+ import * as crypto from "node:crypto";
6
+ const PROJECT_ROOT = process.cwd();
7
+ const OPENCODE_DIR = path.join(PROJECT_ROOT, ".opencode");
8
+ const ACTIVESET_DIR = path.join(OPENCODE_DIR, "activesets");
9
+ // Internal logging
10
+ function log(message, data) {
11
+ console.error(`[ActiveSet] ${message}`, data || '');
12
+ }
13
+ // =============================================================================
14
+ // HELPERS
15
+ // =============================================================================
16
+ async function ensureActiveSetDir() {
17
+ await fs.mkdir(ACTIVESET_DIR, { recursive: true });
18
+ }
19
+ function generateActiveSetId() {
20
+ return `activeset-${Date.now()}-${crypto.randomBytes(4).toString("hex")}`;
21
+ }
22
+ function calculateContextUsage(chunks) {
23
+ // Simple calculation - in real implementation this would consider actual chunk sizes
24
+ return chunks.length * 100; // Assume 100 tokens per chunk
25
+ }
26
+ async function loadActiveSet(setId) {
27
+ try {
28
+ const setPath = path.join(ACTIVESET_DIR, `${setId}.json`);
29
+ if (!fsSync.existsSync(setPath)) {
30
+ return null;
31
+ }
32
+ const content = await fs.readFile(setPath, 'utf-8');
33
+ return JSON.parse(content);
34
+ }
35
+ catch (error) {
36
+ return null;
37
+ }
38
+ }
39
+ async function saveActiveSet(activeSet) {
40
+ const setPath = path.join(ACTIVESET_DIR, `${activeSet.id}.json`);
41
+ await fs.writeFile(setPath, JSON.stringify(activeSet, null, 2));
42
+ }
43
+ async function loadWorkingMemory() {
44
+ const memoryPath = path.join(ACTIVESET_DIR, "working-memory.json");
45
+ try {
46
+ if (fsSync.existsSync(memoryPath)) {
47
+ const content = await fs.readFile(memoryPath, 'utf-8');
48
+ return JSON.parse(content);
49
+ }
50
+ }
51
+ catch (error) {
52
+ // Fall through to default
53
+ }
54
+ // Default working memory
55
+ return {
56
+ current_set: null,
57
+ history: [],
58
+ capacity: 10000, // 10k tokens capacity
59
+ usage: 0
60
+ };
61
+ }
62
+ async function saveWorkingMemory(memory) {
63
+ const memoryPath = path.join(ACTIVESET_DIR, "working-memory.json");
64
+ await fs.writeFile(memoryPath, JSON.stringify(memory, null, 2));
65
+ }
66
+ // =============================================================================
67
+ // ACTIVESET MANAGEMENT TOOLS
68
+ // =============================================================================
69
+ export function activeSetTools() {
70
+ return {
71
+ activeset_create: tool({
72
+ description: "Create a new ActiveSet for working memory management. Organizes chunk cards for focused context.",
73
+ args: {
74
+ name: tool.schema.string().describe("Human-readable name for the ActiveSet"),
75
+ chunk_ids: tool.schema.array(tool.schema.string()).optional().describe("Initial chunk card IDs to include"),
76
+ context_window: tool.schema.number().optional().default(4000).describe("Maximum context window size in tokens"),
77
+ priority: tool.schema.enum(["high", "medium", "low"]).optional().default("medium").describe("Priority level"),
78
+ description: tool.schema.string().optional().describe("Optional description of the ActiveSet purpose"),
79
+ tags: tool.schema.array(tool.schema.string()).optional().describe("Optional tags for categorization")
80
+ },
81
+ async execute({ name, chunk_ids, context_window, priority, description, tags }) {
82
+ log("Tool call: activeset_create", { name, chunk_ids, context_window, priority });
83
+ try {
84
+ await ensureActiveSetDir();
85
+ const activeSet = {
86
+ id: generateActiveSetId(),
87
+ name,
88
+ chunks: chunk_ids || [],
89
+ context_window,
90
+ priority,
91
+ created_at: new Date().toISOString(),
92
+ last_accessed: new Date().toISOString(),
93
+ metadata: {
94
+ description,
95
+ tags,
96
+ session_id: `session-${Date.now()}`,
97
+ agent_id: "agent-4"
98
+ }
99
+ };
100
+ // Save the ActiveSet
101
+ await saveActiveSet(activeSet);
102
+ // Update working memory
103
+ const memory = await loadWorkingMemory();
104
+ memory.history.push({
105
+ set_id: activeSet.id,
106
+ action: "created",
107
+ timestamp: new Date().toISOString()
108
+ });
109
+ memory.current_set = activeSet.id;
110
+ memory.usage = calculateContextUsage(activeSet.chunks);
111
+ await saveWorkingMemory(memory);
112
+ return JSON.stringify({
113
+ status: "SUCCESS",
114
+ activeset: activeSet,
115
+ working_memory: {
116
+ current_set: activeSet.id,
117
+ usage: memory.usage,
118
+ capacity: memory.capacity
119
+ }
120
+ }, null, 2);
121
+ }
122
+ catch (error) {
123
+ return JSON.stringify({
124
+ status: "ERROR",
125
+ message: error instanceof Error ? error.message : `${error}`
126
+ }, null, 2);
127
+ }
128
+ }
129
+ }),
130
+ activeset_load: tool({
131
+ description: "Load an existing ActiveSet into working memory. Makes it the current active context.",
132
+ args: {
133
+ set_id: tool.schema.string().describe("ActiveSet ID to load"),
134
+ reset_access_time: tool.schema.boolean().optional().default(true).describe("Update last_accessed timestamp")
135
+ },
136
+ async execute({ set_id, reset_access_time }) {
137
+ log("Tool call: activeset_load", { set_id, reset_access_time });
138
+ try {
139
+ const activeSet = await loadActiveSet(set_id);
140
+ if (!activeSet) {
141
+ return JSON.stringify({
142
+ status: "NOT_FOUND",
143
+ message: `ActiveSet not found: ${set_id}`
144
+ }, null, 2);
145
+ }
146
+ // Update access time if requested
147
+ if (reset_access_time) {
148
+ activeSet.last_accessed = new Date().toISOString();
149
+ await saveActiveSet(activeSet);
150
+ }
151
+ // Update working memory
152
+ const memory = await loadWorkingMemory();
153
+ memory.history.push({
154
+ set_id: activeSet.id,
155
+ action: "loaded",
156
+ timestamp: new Date().toISOString()
157
+ });
158
+ memory.current_set = activeSet.id;
159
+ memory.usage = calculateContextUsage(activeSet.chunks);
160
+ await saveWorkingMemory(memory);
161
+ return JSON.stringify({
162
+ status: "SUCCESS",
163
+ activeset: activeSet,
164
+ working_memory: {
165
+ current_set: activeSet.id,
166
+ usage: memory.usage,
167
+ capacity: memory.capacity
168
+ }
169
+ }, null, 2);
170
+ }
171
+ catch (error) {
172
+ return JSON.stringify({
173
+ status: "ERROR",
174
+ message: error instanceof Error ? error.message : `${error}`
175
+ }, null, 2);
176
+ }
177
+ }
178
+ }),
179
+ activeset_add_chunks: tool({
180
+ description: "Add chunk cards to the current ActiveSet. Manages context window capacity.",
181
+ args: {
182
+ chunk_ids: tool.schema.array(tool.schema.string()).describe("Chunk card IDs to add"),
183
+ set_id: tool.schema.string().optional().describe("Specific ActiveSet ID (uses current if not provided)"),
184
+ enforce_capacity: tool.schema.boolean().optional().default(true).describe("Enforce context window limits")
185
+ },
186
+ async execute({ chunk_ids, set_id, enforce_capacity }) {
187
+ log("Tool call: activeset_add_chunks", { chunk_ids, set_id, enforce_capacity });
188
+ try {
189
+ // Determine which ActiveSet to use
190
+ const memory = await loadWorkingMemory();
191
+ const targetSetId = set_id || memory.current_set;
192
+ if (!targetSetId) {
193
+ return JSON.stringify({
194
+ status: "ERROR",
195
+ message: "No ActiveSet specified and no current ActiveSet loaded"
196
+ }, null, 2);
197
+ }
198
+ const activeSet = await loadActiveSet(targetSetId);
199
+ if (!activeSet) {
200
+ return JSON.stringify({
201
+ status: "NOT_FOUND",
202
+ message: `ActiveSet not found: ${targetSetId}`
203
+ }, null, 2);
204
+ }
205
+ // Check capacity constraints
206
+ const newChunks = [...activeSet.chunks, ...chunk_ids];
207
+ const newUsage = calculateContextUsage(newChunks);
208
+ if (enforce_capacity && newUsage > activeSet.context_window) {
209
+ return JSON.stringify({
210
+ status: "CAPACITY_EXCEEDED",
211
+ message: `Adding chunks would exceed context window. Current: ${calculateContextUsage(activeSet.chunks)}, Proposed: ${newUsage}, Limit: ${activeSet.context_window}`,
212
+ current_usage: calculateContextUsage(activeSet.chunks),
213
+ proposed_usage: newUsage,
214
+ limit: activeSet.context_window
215
+ }, null, 2);
216
+ }
217
+ // Add chunks (removing duplicates)
218
+ const uniqueChunks = Array.from(new Set(newChunks));
219
+ activeSet.chunks = uniqueChunks;
220
+ activeSet.last_accessed = new Date().toISOString();
221
+ await saveActiveSet(activeSet);
222
+ // Update working memory if this is the current set
223
+ if (memory.current_set === activeSet.id) {
224
+ memory.usage = calculateContextUsage(activeSet.chunks);
225
+ memory.history.push({
226
+ set_id: activeSet.id,
227
+ action: "updated",
228
+ timestamp: new Date().toISOString()
229
+ });
230
+ await saveWorkingMemory(memory);
231
+ }
232
+ return JSON.stringify({
233
+ status: "SUCCESS",
234
+ activeset: activeSet,
235
+ added_chunks: chunk_ids,
236
+ total_chunks: activeSet.chunks.length,
237
+ usage: {
238
+ tokens: calculateContextUsage(activeSet.chunks),
239
+ window: activeSet.context_window,
240
+ utilization: `${Math.round((calculateContextUsage(activeSet.chunks) / activeSet.context_window) * 100)}%`
241
+ }
242
+ }, null, 2);
243
+ }
244
+ catch (error) {
245
+ return JSON.stringify({
246
+ status: "ERROR",
247
+ message: error instanceof Error ? error.message : `${error}`
248
+ }, null, 2);
249
+ }
250
+ }
251
+ }),
252
+ activeset_remove_chunks: tool({
253
+ description: "Remove chunk cards from the current ActiveSet.",
254
+ args: {
255
+ chunk_ids: tool.schema.array(tool.schema.string()).describe("Chunk card IDs to remove"),
256
+ set_id: tool.schema.string().optional().describe("Specific ActiveSet ID (uses current if not provided)")
257
+ },
258
+ async execute({ chunk_ids, set_id }) {
259
+ log("Tool call: activeset_remove_chunks", { chunk_ids, set_id });
260
+ try {
261
+ // Determine which ActiveSet to use
262
+ const memory = await loadWorkingMemory();
263
+ const targetSetId = set_id || memory.current_set;
264
+ if (!targetSetId) {
265
+ return JSON.stringify({
266
+ status: "ERROR",
267
+ message: "No ActiveSet specified and no current ActiveSet loaded"
268
+ }, null, 2);
269
+ }
270
+ const activeSet = await loadActiveSet(targetSetId);
271
+ if (!activeSet) {
272
+ return JSON.stringify({
273
+ status: "NOT_FOUND",
274
+ message: `ActiveSet not found: ${targetSetId}`
275
+ }, null, 2);
276
+ }
277
+ // Remove chunks
278
+ const originalLength = activeSet.chunks.length;
279
+ activeSet.chunks = activeSet.chunks.filter(chunkId => !chunk_ids.includes(chunkId));
280
+ activeSet.last_accessed = new Date().toISOString();
281
+ await saveActiveSet(activeSet);
282
+ // Update working memory if this is the current set
283
+ if (memory.current_set === activeSet.id) {
284
+ memory.usage = calculateContextUsage(activeSet.chunks);
285
+ memory.history.push({
286
+ set_id: activeSet.id,
287
+ action: "updated",
288
+ timestamp: new Date().toISOString()
289
+ });
290
+ await saveWorkingMemory(memory);
291
+ }
292
+ return JSON.stringify({
293
+ status: "SUCCESS",
294
+ activeset: activeSet,
295
+ removed_chunks: chunk_ids,
296
+ removed_count: originalLength - activeSet.chunks.length,
297
+ remaining_chunks: activeSet.chunks.length,
298
+ usage: {
299
+ tokens: calculateContextUsage(activeSet.chunks),
300
+ window: activeSet.context_window,
301
+ utilization: `${Math.round((calculateContextUsage(activeSet.chunks) / activeSet.context_window) * 100)}%`
302
+ }
303
+ }, null, 2);
304
+ }
305
+ catch (error) {
306
+ return JSON.stringify({
307
+ status: "ERROR",
308
+ message: error instanceof Error ? error.message : `${error}`
309
+ }, null, 2);
310
+ }
311
+ }
312
+ }),
313
+ activeset_get_current: tool({
314
+ description: "Get the currently loaded ActiveSet and working memory status.",
315
+ args: {
316
+ include_chunks: tool.schema.boolean().optional().default(true).describe("Include chunk details in response")
317
+ },
318
+ async execute({ include_chunks }) {
319
+ log("Tool call: activeset_get_current", { include_chunks });
320
+ try {
321
+ const memory = await loadWorkingMemory();
322
+ if (!memory.current_set) {
323
+ return JSON.stringify({
324
+ status: "NO_CURRENT_SET",
325
+ working_memory: memory,
326
+ message: "No ActiveSet currently loaded"
327
+ }, null, 2);
328
+ }
329
+ const activeSet = await loadActiveSet(memory.current_set);
330
+ if (!activeSet) {
331
+ return JSON.stringify({
332
+ status: "CURRENT_SET_NOT_FOUND",
333
+ working_memory: memory,
334
+ message: `Current ActiveSet not found: ${memory.current_set}`
335
+ }, null, 2);
336
+ }
337
+ const response = {
338
+ status: "SUCCESS",
339
+ activeset: activeSet,
340
+ working_memory: memory,
341
+ usage: {
342
+ tokens: calculateContextUsage(activeSet.chunks),
343
+ window: activeSet.context_window,
344
+ utilization: `${Math.round((calculateContextUsage(activeSet.chunks) / activeSet.context_window) * 100)}%`
345
+ }
346
+ };
347
+ return JSON.stringify(response, null, 2);
348
+ }
349
+ catch (error) {
350
+ return JSON.stringify({
351
+ status: "ERROR",
352
+ message: error instanceof Error ? error.message : `${error}`
353
+ }, null, 2);
354
+ }
355
+ }
356
+ }),
357
+ activeset_list: tool({
358
+ description: "List all available ActiveSets with optional filtering.",
359
+ args: {
360
+ priority_filter: tool.schema.enum(["high", "medium", "low"]).optional().describe("Filter by priority level"),
361
+ limit: tool.schema.number().optional().default(20).describe("Maximum number of sets to return"),
362
+ include_usage: tool.schema.boolean().optional().default(true).describe("Include usage statistics")
363
+ },
364
+ async execute({ priority_filter, limit, include_usage }) {
365
+ log("Tool call: activeset_list", { priority_filter, limit, include_usage });
366
+ try {
367
+ await ensureActiveSetDir();
368
+ const files = await fs.readdir(ACTIVESET_DIR);
369
+ const activeSets = [];
370
+ for (const file of files) {
371
+ if (file.endsWith('.json') && file !== 'working-memory.json') {
372
+ try {
373
+ const setPath = path.join(ACTIVESET_DIR, file);
374
+ const activeSet = JSON.parse(await fs.readFile(setPath, 'utf-8'));
375
+ // Apply priority filter if specified
376
+ if (priority_filter && activeSet.priority !== priority_filter) {
377
+ continue;
378
+ }
379
+ const setInfo = {
380
+ ...activeSet,
381
+ usage: include_usage ? {
382
+ tokens: calculateContextUsage(activeSet.chunks),
383
+ window: activeSet.context_window,
384
+ utilization: `${Math.round((calculateContextUsage(activeSet.chunks) / activeSet.context_window) * 100)}%`
385
+ } : undefined
386
+ };
387
+ activeSets.push(setInfo);
388
+ }
389
+ catch (error) {
390
+ // Skip corrupted files
391
+ continue;
392
+ }
393
+ }
394
+ }
395
+ // Sort by last accessed (most recent first)
396
+ activeSets.sort((a, b) => new Date(b.last_accessed).getTime() - new Date(a.last_accessed).getTime());
397
+ // Apply limit
398
+ const limitedSets = activeSets.slice(0, limit);
399
+ return JSON.stringify({
400
+ status: "SUCCESS",
401
+ activesets: limitedSets,
402
+ total_count: activeSets.length,
403
+ returned_count: limitedSets.length,
404
+ filters: {
405
+ priority: priority_filter,
406
+ limit
407
+ }
408
+ }, null, 2);
409
+ }
410
+ catch (error) {
411
+ return JSON.stringify({
412
+ status: "ERROR",
413
+ message: error instanceof Error ? error.message : `${error}`
414
+ }, null, 2);
415
+ }
416
+ }
417
+ }),
418
+ activeset_delete: tool({
419
+ description: "Delete an ActiveSet. Cannot delete the currently loaded set.",
420
+ args: {
421
+ set_id: tool.schema.string().describe("ActiveSet ID to delete")
422
+ },
423
+ async execute({ set_id }) {
424
+ log("Tool call: activeset_delete", { set_id });
425
+ try {
426
+ // Check if it's the current set
427
+ const memory = await loadWorkingMemory();
428
+ if (memory.current_set === set_id) {
429
+ return JSON.stringify({
430
+ status: "ERROR",
431
+ message: "Cannot delete the currently loaded ActiveSet. Load a different set first."
432
+ }, null, 2);
433
+ }
434
+ const setPath = path.join(ACTIVESET_DIR, `${set_id}.json`);
435
+ if (!fsSync.existsSync(setPath)) {
436
+ return JSON.stringify({
437
+ status: "NOT_FOUND",
438
+ message: `ActiveSet not found: ${set_id}`
439
+ }, null, 2);
440
+ }
441
+ await fs.unlink(setPath);
442
+ return JSON.stringify({
443
+ status: "SUCCESS",
444
+ message: `ActiveSet deleted: ${set_id}`
445
+ }, null, 2);
446
+ }
447
+ catch (error) {
448
+ return JSON.stringify({
449
+ status: "ERROR",
450
+ message: error instanceof Error ? error.message : `${error}`
451
+ }, null, 2);
452
+ }
453
+ }
454
+ }),
455
+ activeset_close: tool({
456
+ description: "Close the current ActiveSet (unload from working memory).",
457
+ args: {
458
+ clear_history: tool.schema.boolean().optional().default(false).describe("Clear working memory history")
459
+ },
460
+ async execute({ clear_history }) {
461
+ log("Tool call: activeset_close", { clear_history });
462
+ try {
463
+ const memory = await loadWorkingMemory();
464
+ if (!memory.current_set) {
465
+ return JSON.stringify({
466
+ status: "NO_CURRENT_SET",
467
+ message: "No ActiveSet currently loaded"
468
+ }, null, 2);
469
+ }
470
+ const closedSetId = memory.current_set;
471
+ // Update working memory
472
+ memory.history.push({
473
+ set_id: closedSetId,
474
+ action: "closed",
475
+ timestamp: new Date().toISOString()
476
+ });
477
+ memory.current_set = null;
478
+ memory.usage = 0;
479
+ if (clear_history) {
480
+ memory.history = [];
481
+ }
482
+ await saveWorkingMemory(memory);
483
+ return JSON.stringify({
484
+ status: "SUCCESS",
485
+ closed_set: closedSetId,
486
+ working_memory: memory
487
+ }, null, 2);
488
+ }
489
+ catch (error) {
490
+ return JSON.stringify({
491
+ status: "ERROR",
492
+ message: error instanceof Error ? error.message : `${error}`
493
+ }, null, 2);
494
+ }
495
+ }
496
+ })
497
+ };
498
+ }
@@ -0,0 +1,29 @@
1
+ import ts from "typescript";
2
+ export declare const CHUNK_DIR: string;
3
+ export interface ChunkCard {
4
+ id: string;
5
+ file_path: string;
6
+ chunk_type: "summary" | "api" | "invariant";
7
+ content: string;
8
+ metadata: {
9
+ created_at: string;
10
+ updated_at: string;
11
+ hash: string;
12
+ dependencies: string[];
13
+ symbols: string[];
14
+ complexity_score: number;
15
+ };
16
+ }
17
+ export declare function ensureChunkDir(): Promise<void>;
18
+ export declare function calculateHash(content: string): string;
19
+ export declare function calculateComplexity(content: string): number;
20
+ export declare function extractSymbols(content: string, filePath?: string): string[];
21
+ export declare function chunkCardsTools(): {
22
+ [key: string]: any;
23
+ };
24
+ export declare function generateSummaryChunk(content: string, filePath: string, ast: ts.SourceFile | null): Promise<string>;
25
+ export declare function generateApiChunk(content: string, filePath: string, ast: ts.SourceFile | null): Promise<string>;
26
+ export declare function generateInvariantChunk(content: string, filePath: string, ast: ts.SourceFile | null): Promise<string>;
27
+ export declare function extractDependencies(content: string, ast?: ts.SourceFile | null, filePath?: string): Promise<string[]>;
28
+ export declare function parseFileAST(filePath: string, content: string): ts.SourceFile | null;
29
+ export declare function extractSymbolsFromAST(sourceFile: ts.SourceFile | null, content: string): string[] | null;