byterover-cli 3.0.0 → 3.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agent/core/domain/tools/constants.d.ts +1 -0
- package/dist/agent/core/domain/tools/constants.js +1 -0
- package/dist/agent/core/interfaces/cipher-services.d.ts +8 -0
- package/dist/agent/core/interfaces/i-cipher-agent.d.ts +1 -0
- package/dist/agent/infra/agent/agent-error-codes.d.ts +0 -1
- package/dist/agent/infra/agent/agent-error-codes.js +0 -1
- package/dist/agent/infra/agent/agent-error.d.ts +0 -1
- package/dist/agent/infra/agent/agent-error.js +0 -1
- package/dist/agent/infra/agent/agent-state-manager.d.ts +1 -3
- package/dist/agent/infra/agent/agent-state-manager.js +1 -3
- package/dist/agent/infra/agent/base-agent.d.ts +1 -1
- package/dist/agent/infra/agent/base-agent.js +1 -1
- package/dist/agent/infra/agent/cipher-agent.d.ts +15 -1
- package/dist/agent/infra/agent/cipher-agent.js +188 -3
- package/dist/agent/infra/agent/index.d.ts +1 -1
- package/dist/agent/infra/agent/index.js +1 -1
- package/dist/agent/infra/agent/service-initializer.d.ts +3 -3
- package/dist/agent/infra/agent/service-initializer.js +14 -8
- package/dist/agent/infra/agent/types.d.ts +0 -1
- package/dist/agent/infra/file-system/file-system-service.js +6 -5
- package/dist/agent/infra/folder-pack/folder-pack-service.d.ts +1 -0
- package/dist/agent/infra/folder-pack/folder-pack-service.js +29 -15
- package/dist/agent/infra/llm/providers/openai.js +12 -0
- package/dist/agent/infra/llm/stream-to-text.d.ts +7 -0
- package/dist/agent/infra/llm/stream-to-text.js +14 -0
- package/dist/agent/infra/map/abstract-generator.d.ts +22 -0
- package/dist/agent/infra/map/abstract-generator.js +67 -0
- package/dist/agent/infra/map/abstract-queue.d.ts +67 -0
- package/dist/agent/infra/map/abstract-queue.js +218 -0
- package/dist/agent/infra/memory/memory-deduplicator.d.ts +44 -0
- package/dist/agent/infra/memory/memory-deduplicator.js +88 -0
- package/dist/agent/infra/memory/memory-manager.d.ts +1 -0
- package/dist/agent/infra/memory/memory-manager.js +6 -5
- package/dist/agent/infra/sandbox/curate-service.d.ts +4 -2
- package/dist/agent/infra/sandbox/curate-service.js +6 -7
- package/dist/agent/infra/sandbox/local-sandbox.d.ts +5 -0
- package/dist/agent/infra/sandbox/local-sandbox.js +57 -1
- package/dist/agent/infra/sandbox/tools-sdk.d.ts +3 -1
- package/dist/agent/infra/session/session-compressor.d.ts +43 -0
- package/dist/agent/infra/session/session-compressor.js +296 -0
- package/dist/agent/infra/session/session-manager.d.ts +7 -0
- package/dist/agent/infra/session/session-manager.js +9 -0
- package/dist/agent/infra/tools/implementations/curate-tool.d.ts +3 -2
- package/dist/agent/infra/tools/implementations/curate-tool.js +54 -27
- package/dist/agent/infra/tools/implementations/expand-knowledge-tool.d.ts +3 -3
- package/dist/agent/infra/tools/implementations/expand-knowledge-tool.js +34 -7
- package/dist/agent/infra/tools/implementations/ingest-resource-tool.d.ts +17 -0
- package/dist/agent/infra/tools/implementations/ingest-resource-tool.js +224 -0
- package/dist/agent/infra/tools/implementations/memory-symbol-tree.d.ts +8 -0
- package/dist/agent/infra/tools/implementations/search-knowledge-service.d.ts +1 -1
- package/dist/agent/infra/tools/implementations/search-knowledge-service.js +207 -34
- package/dist/agent/infra/tools/implementations/search-knowledge-tool.js +2 -2
- package/dist/agent/infra/tools/tool-provider.js +1 -0
- package/dist/agent/infra/tools/tool-registry.d.ts +3 -0
- package/dist/agent/infra/tools/tool-registry.js +15 -4
- package/dist/server/constants.d.ts +2 -0
- package/dist/server/constants.js +2 -0
- package/dist/server/core/domain/knowledge/memory-scoring.d.ts +3 -3
- package/dist/server/core/domain/knowledge/memory-scoring.js +5 -5
- package/dist/server/core/domain/knowledge/summary-types.d.ts +4 -0
- package/dist/server/core/domain/transport/schemas.d.ts +10 -10
- package/dist/server/infra/context-tree/derived-artifact.js +5 -1
- package/dist/server/infra/context-tree/file-context-tree-manifest-service.d.ts +2 -1
- package/dist/server/infra/context-tree/file-context-tree-manifest-service.js +43 -7
- package/dist/server/infra/context-tree/file-context-tree-summary-service.js +20 -2
- package/dist/server/infra/executor/curate-executor.js +2 -1
- package/dist/server/infra/executor/folder-pack-executor.js +72 -2
- package/dist/server/infra/executor/query-executor.js +11 -3
- package/dist/server/infra/transport/handlers/status-handler.js +10 -0
- package/dist/server/utils/curate-result-parser.d.ts +4 -4
- package/dist/shared/transport/types/dto.d.ts +7 -0
- package/oclif.manifest.json +1 -1
- package/package.json +10 -4
|
@@ -283,6 +283,15 @@ export class SessionManager {
|
|
|
283
283
|
getSession(id) {
|
|
284
284
|
return this.sessions.get(id);
|
|
285
285
|
}
|
|
286
|
+
/**
|
|
287
|
+
* Get the command type (agent name) registered for a session.
|
|
288
|
+
*
|
|
289
|
+
* @param id - Session ID
|
|
290
|
+
* @returns Command type string (e.g. 'curate', 'query') or undefined if not found
|
|
291
|
+
*/
|
|
292
|
+
getSessionCommandType(id) {
|
|
293
|
+
return this.sessionAgentNames.get(id);
|
|
294
|
+
}
|
|
286
295
|
/**
|
|
287
296
|
* Get the number of active sessions.
|
|
288
297
|
*
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { z } from 'zod';
|
|
2
2
|
import type { Tool, ToolExecutionContext } from '../../../core/domain/tools/types.js';
|
|
3
|
+
import type { AbstractGenerationQueue } from '../../map/abstract-queue.js';
|
|
3
4
|
/**
|
|
4
5
|
* Operation types for curating knowledge topics.
|
|
5
6
|
* Inspired by ACE Curator patterns.
|
|
@@ -573,6 +574,6 @@ export interface CurateOutput {
|
|
|
573
574
|
* Execute curate operations on knowledge topics.
|
|
574
575
|
* Exported for use by CurateService in sandbox.
|
|
575
576
|
*/
|
|
576
|
-
export declare function executeCurate(input: unknown, _context?: ToolExecutionContext): Promise<CurateOutput>;
|
|
577
|
-
export declare function createCurateTool(workingDirectory?: string): Tool;
|
|
577
|
+
export declare function executeCurate(input: unknown, _context?: ToolExecutionContext, abstractQueue?: AbstractGenerationQueue): Promise<CurateOutput>;
|
|
578
|
+
export declare function createCurateTool(workingDirectory?: string, abstractQueue?: AbstractGenerationQueue): Tool;
|
|
578
579
|
export {};
|
|
@@ -293,7 +293,7 @@ function generateSubtopicContextMarkdown(subtopicName, context) {
|
|
|
293
293
|
}
|
|
294
294
|
return sections.join('\n');
|
|
295
295
|
}
|
|
296
|
-
async function createDomainContextIfMissing(basePath, domain, domainContext) {
|
|
296
|
+
async function createDomainContextIfMissing(basePath, domain, domainContext, onAfterWrite) {
|
|
297
297
|
const normalizedDomain = toSnakeCase(domain);
|
|
298
298
|
const contextPath = join(basePath, normalizedDomain, 'context.md');
|
|
299
299
|
const exists = await DirectoryManager.fileExists(contextPath);
|
|
@@ -305,9 +305,10 @@ async function createDomainContextIfMissing(basePath, domain, domainContext) {
|
|
|
305
305
|
}
|
|
306
306
|
const content = generateDomainContextMarkdown(normalizedDomain, domainContext);
|
|
307
307
|
await DirectoryManager.writeFileAtomic(contextPath, content);
|
|
308
|
+
onAfterWrite?.(contextPath, content);
|
|
308
309
|
return { created: true, path: contextPath };
|
|
309
310
|
}
|
|
310
|
-
async function ensureTopicContextMd(basePath, domain, topic, topicContext) {
|
|
311
|
+
async function ensureTopicContextMd(basePath, domain, topic, topicContext, onAfterWrite) {
|
|
311
312
|
const normalizedDomain = toSnakeCase(domain);
|
|
312
313
|
const normalizedTopic = toSnakeCase(topic);
|
|
313
314
|
const topicPath = join(basePath, normalizedDomain, normalizedTopic);
|
|
@@ -327,6 +328,7 @@ async function ensureTopicContextMd(basePath, domain, topic, topicContext) {
|
|
|
327
328
|
}
|
|
328
329
|
const content = generateTopicContextMarkdown(normalizedTopic, topicContext);
|
|
329
330
|
await DirectoryManager.writeFileAtomic(contextPath, content);
|
|
331
|
+
onAfterWrite?.(contextPath, content);
|
|
330
332
|
return { created: true, path: contextPath };
|
|
331
333
|
}
|
|
332
334
|
/**
|
|
@@ -334,7 +336,7 @@ async function ensureTopicContextMd(basePath, domain, topic, topicContext) {
|
|
|
334
336
|
* Only creates context.md if LLM provides subtopicContext - no static templates.
|
|
335
337
|
*/
|
|
336
338
|
async function ensureSubtopicContextMd(options) {
|
|
337
|
-
const { basePath, domain, subtopic, subtopicContext, topic } = options;
|
|
339
|
+
const { basePath, domain, onAfterWrite, subtopic, subtopicContext, topic } = options;
|
|
338
340
|
const normalizedDomain = toSnakeCase(domain);
|
|
339
341
|
const normalizedTopic = toSnakeCase(topic);
|
|
340
342
|
const normalizedSubtopic = toSnakeCase(subtopic);
|
|
@@ -355,26 +357,43 @@ async function ensureSubtopicContextMd(options) {
|
|
|
355
357
|
}
|
|
356
358
|
const content = generateSubtopicContextMarkdown(normalizedSubtopic, subtopicContext);
|
|
357
359
|
await DirectoryManager.writeFileAtomic(contextPath, content);
|
|
360
|
+
onAfterWrite?.(contextPath, content);
|
|
358
361
|
return { created: true, path: contextPath };
|
|
359
362
|
}
|
|
360
363
|
/**
|
|
361
364
|
* Ensure context.md exists at all levels for a given path (topic and subtopic).
|
|
362
365
|
* This is called during ADD operations to create context.md files with LLM-provided content.
|
|
363
366
|
*/
|
|
364
|
-
async function ensureContextMd(basePath, parsed, topicContext, subtopicContext) {
|
|
367
|
+
async function ensureContextMd(basePath, parsed, topicContext, subtopicContext, onAfterWrite) {
|
|
365
368
|
// Ensure topic-level context.md exists
|
|
366
|
-
await ensureTopicContextMd(basePath, parsed.domain, parsed.topic, topicContext);
|
|
369
|
+
await ensureTopicContextMd(basePath, parsed.domain, parsed.topic, topicContext, onAfterWrite);
|
|
367
370
|
// If subtopic exists, ensure subtopic-level context.md exists
|
|
368
371
|
if (parsed.subtopic) {
|
|
369
372
|
await ensureSubtopicContextMd({
|
|
370
373
|
basePath,
|
|
371
374
|
domain: parsed.domain,
|
|
375
|
+
onAfterWrite,
|
|
372
376
|
subtopic: parsed.subtopic,
|
|
373
377
|
subtopicContext,
|
|
374
378
|
topic: parsed.topic,
|
|
375
379
|
});
|
|
376
380
|
}
|
|
377
381
|
}
|
|
382
|
+
async function deleteDerivedSiblings(contextPath) {
|
|
383
|
+
const siblingPaths = [
|
|
384
|
+
contextPath.replace(/\.md$/, '.abstract.md'),
|
|
385
|
+
contextPath.replace(/\.md$/, '.overview.md'),
|
|
386
|
+
];
|
|
387
|
+
/* eslint-disable no-await-in-loop */
|
|
388
|
+
for (const siblingPath of siblingPaths) {
|
|
389
|
+
if (siblingPath === contextPath)
|
|
390
|
+
continue;
|
|
391
|
+
if (await DirectoryManager.fileExists(siblingPath)) {
|
|
392
|
+
await DirectoryManager.deleteFile(siblingPath);
|
|
393
|
+
}
|
|
394
|
+
}
|
|
395
|
+
/* eslint-enable no-await-in-loop */
|
|
396
|
+
}
|
|
378
397
|
/**
|
|
379
398
|
* Parse a path into domain, topic, and optional subtopic.
|
|
380
399
|
*/
|
|
@@ -432,7 +451,7 @@ function buildFullPath(basePath, knowledgePath) {
|
|
|
432
451
|
/**
|
|
433
452
|
* Execute ADD operation - create new domain/topic/subtopic with {title}.md
|
|
434
453
|
*/
|
|
435
|
-
async function executeAdd(basePath, operation) {
|
|
454
|
+
async function executeAdd(basePath, operation, onAfterWrite) {
|
|
436
455
|
const { confidence, content, domainContext, impact, path, reason, subtopicContext, summary, title, topicContext } = operation;
|
|
437
456
|
const reviewMeta = deriveReviewMetadata('ADD', confidence, impact);
|
|
438
457
|
if (!title) {
|
|
@@ -478,7 +497,7 @@ async function executeAdd(basePath, operation) {
|
|
|
478
497
|
type: 'ADD',
|
|
479
498
|
};
|
|
480
499
|
}
|
|
481
|
-
await createDomainContextIfMissing(basePath, parsed.domain, domainContext);
|
|
500
|
+
await createDomainContextIfMissing(basePath, parsed.domain, domainContext, onAfterWrite);
|
|
482
501
|
const domainPath = join(basePath, toSnakeCase(parsed.domain));
|
|
483
502
|
const topicPath = join(domainPath, toSnakeCase(parsed.topic));
|
|
484
503
|
const finalPath = parsed.subtopic ? join(topicPath, toSnakeCase(parsed.subtopic)) : topicPath;
|
|
@@ -500,7 +519,8 @@ async function executeAdd(basePath, operation) {
|
|
|
500
519
|
const filename = `${toSnakeCase(title)}.md`;
|
|
501
520
|
const contextPath = join(finalPath, filename);
|
|
502
521
|
await DirectoryManager.writeFileAtomic(contextPath, contextContent);
|
|
503
|
-
|
|
522
|
+
onAfterWrite?.(contextPath, contextContent);
|
|
523
|
+
await ensureContextMd(basePath, parsed, topicContext, subtopicContext, onAfterWrite);
|
|
504
524
|
return {
|
|
505
525
|
...reviewMeta,
|
|
506
526
|
filePath: contextPath,
|
|
@@ -533,7 +553,7 @@ function maxImpact(a, b) {
|
|
|
533
553
|
/**
|
|
534
554
|
* Execute UPDATE operation - modify existing {title}.md
|
|
535
555
|
*/
|
|
536
|
-
async function executeUpdate(basePath, operation) {
|
|
556
|
+
async function executeUpdate(basePath, operation, onAfterWrite) {
|
|
537
557
|
const { confidence, content, domainContext, impact, path, reason, subtopicContext, summary, title, topicContext } = operation;
|
|
538
558
|
// Used for early-exit validation failures (before structural loss can be assessed)
|
|
539
559
|
const baseReviewMeta = deriveReviewMetadata('UPDATE', confidence, impact);
|
|
@@ -583,7 +603,7 @@ async function executeUpdate(basePath, operation) {
|
|
|
583
603
|
type: 'UPDATE',
|
|
584
604
|
};
|
|
585
605
|
}
|
|
586
|
-
await createDomainContextIfMissing(basePath, parsed.domain, domainContext);
|
|
606
|
+
await createDomainContextIfMissing(basePath, parsed.domain, domainContext, onAfterWrite);
|
|
587
607
|
// Read existing file to preserve scoring metadata and detect structural loss
|
|
588
608
|
const existingContent = await DirectoryManager.readFile(contextPath);
|
|
589
609
|
const existingScoring = existingContent ? parseFrontmatterScoring(existingContent) : undefined;
|
|
@@ -623,7 +643,8 @@ async function executeUpdate(basePath, operation) {
|
|
|
623
643
|
});
|
|
624
644
|
await backupBeforeWrite(contextPath, basePath);
|
|
625
645
|
await DirectoryManager.writeFileAtomic(contextPath, contextContent);
|
|
626
|
-
|
|
646
|
+
onAfterWrite?.(contextPath, contextContent);
|
|
647
|
+
await ensureContextMd(basePath, parsed, topicContext, subtopicContext, onAfterWrite);
|
|
627
648
|
return {
|
|
628
649
|
...reviewMeta,
|
|
629
650
|
filePath: contextPath,
|
|
@@ -651,7 +672,7 @@ async function executeUpdate(basePath, operation) {
|
|
|
651
672
|
* Execute UPSERT operation - automatically creates or updates based on file existence
|
|
652
673
|
* This is the recommended operation type as it eliminates the need for pre-checks.
|
|
653
674
|
*/
|
|
654
|
-
async function executeUpsert(basePath, operation) {
|
|
675
|
+
async function executeUpsert(basePath, operation, onAfterWrite) {
|
|
655
676
|
const { path, reason, title } = operation;
|
|
656
677
|
const reviewMeta = deriveReviewMetadata('UPSERT', operation.confidence, operation.impact);
|
|
657
678
|
if (!title) {
|
|
@@ -693,7 +714,7 @@ async function executeUpsert(basePath, operation) {
|
|
|
693
714
|
const exists = await DirectoryManager.fileExists(contextPath);
|
|
694
715
|
if (exists) {
|
|
695
716
|
// File exists - delegate to UPDATE logic
|
|
696
|
-
const result = await executeUpdate(basePath, { ...operation, type: 'UPDATE' });
|
|
717
|
+
const result = await executeUpdate(basePath, { ...operation, type: 'UPDATE' }, onAfterWrite);
|
|
697
718
|
// Return with UPSERT type but indicate it was an update
|
|
698
719
|
return {
|
|
699
720
|
...result,
|
|
@@ -702,7 +723,7 @@ async function executeUpsert(basePath, operation) {
|
|
|
702
723
|
};
|
|
703
724
|
}
|
|
704
725
|
// File doesn't exist - delegate to ADD logic
|
|
705
|
-
const result = await executeAdd(basePath, { ...operation, type: 'ADD' });
|
|
726
|
+
const result = await executeAdd(basePath, { ...operation, type: 'ADD' }, onAfterWrite);
|
|
706
727
|
// Return with UPSERT type but indicate it was an add
|
|
707
728
|
return {
|
|
708
729
|
...result,
|
|
@@ -724,7 +745,7 @@ async function executeUpsert(basePath, operation) {
|
|
|
724
745
|
/**
|
|
725
746
|
* Execute MERGE operation - combine source file into target file, delete source file
|
|
726
747
|
*/
|
|
727
|
-
async function executeMerge(basePath, operation) {
|
|
748
|
+
async function executeMerge(basePath, operation, onAfterWrite) {
|
|
728
749
|
const { confidence, domainContext, impact, mergeTarget, mergeTargetTitle, path, reason, subtopicContext, summary, title, topicContext, } = operation;
|
|
729
750
|
const reviewMeta = deriveReviewMetadata('MERGE', confidence, impact);
|
|
730
751
|
if (!title) {
|
|
@@ -798,8 +819,8 @@ async function executeMerge(basePath, operation) {
|
|
|
798
819
|
type: 'MERGE',
|
|
799
820
|
};
|
|
800
821
|
}
|
|
801
|
-
await createDomainContextIfMissing(basePath, sourceParsed.domain, domainContext);
|
|
802
|
-
await createDomainContextIfMissing(basePath, targetParsed.domain, domainContext);
|
|
822
|
+
await createDomainContextIfMissing(basePath, sourceParsed.domain, domainContext, onAfterWrite);
|
|
823
|
+
await createDomainContextIfMissing(basePath, targetParsed.domain, domainContext, onAfterWrite);
|
|
803
824
|
const sourceContent = await DirectoryManager.readFile(sourceContextPath);
|
|
804
825
|
const targetContent = await DirectoryManager.readFile(targetContextPath);
|
|
805
826
|
// Extract previous summary from target file (for review UI)
|
|
@@ -810,9 +831,11 @@ async function executeMerge(basePath, operation) {
|
|
|
810
831
|
await backupBeforeWrite(sourceContextPath, basePath);
|
|
811
832
|
const mergedContent = MarkdownWriter.mergeContexts(sourceContent, targetContent, reason, summary);
|
|
812
833
|
await DirectoryManager.writeFileAtomic(targetContextPath, mergedContent);
|
|
834
|
+
onAfterWrite?.(targetContextPath, mergedContent);
|
|
813
835
|
await DirectoryManager.deleteFile(sourceContextPath);
|
|
814
|
-
await
|
|
815
|
-
await ensureContextMd(basePath,
|
|
836
|
+
await deleteDerivedSiblings(sourceContextPath);
|
|
837
|
+
await ensureContextMd(basePath, sourceParsed, topicContext, subtopicContext, onAfterWrite);
|
|
838
|
+
await ensureContextMd(basePath, targetParsed, topicContext, subtopicContext, onAfterWrite);
|
|
816
839
|
return {
|
|
817
840
|
...reviewMeta,
|
|
818
841
|
additionalFilePaths: [sourceContextPath],
|
|
@@ -874,6 +897,7 @@ async function executeDelete(basePath, operation) {
|
|
|
874
897
|
}
|
|
875
898
|
await backupBeforeWrite(filePath, basePath);
|
|
876
899
|
await DirectoryManager.deleteFile(filePath);
|
|
900
|
+
await deleteDerivedSiblings(filePath);
|
|
877
901
|
return {
|
|
878
902
|
...reviewMeta,
|
|
879
903
|
filePath,
|
|
@@ -948,7 +972,7 @@ async function executeDelete(basePath, operation) {
|
|
|
948
972
|
* Execute curate operations on knowledge topics.
|
|
949
973
|
* Exported for use by CurateService in sandbox.
|
|
950
974
|
*/
|
|
951
|
-
export async function executeCurate(input, _context) {
|
|
975
|
+
export async function executeCurate(input, _context, abstractQueue) {
|
|
952
976
|
const parseResult = CurateInputSchema.safeParse(input);
|
|
953
977
|
if (!parseResult.success) {
|
|
954
978
|
return {
|
|
@@ -974,6 +998,9 @@ export async function executeCurate(input, _context) {
|
|
|
974
998
|
};
|
|
975
999
|
}
|
|
976
1000
|
const { basePath, operations } = parseResult.data;
|
|
1001
|
+
const onAfterWrite = abstractQueue
|
|
1002
|
+
? (contextPath, content) => { abstractQueue.enqueue({ contextPath, fullContent: content }); }
|
|
1003
|
+
: undefined;
|
|
977
1004
|
const applied = [];
|
|
978
1005
|
const summary = {
|
|
979
1006
|
added: 0,
|
|
@@ -987,7 +1014,7 @@ export async function executeCurate(input, _context) {
|
|
|
987
1014
|
let result;
|
|
988
1015
|
switch (operation.type) {
|
|
989
1016
|
case 'ADD': {
|
|
990
|
-
result = await executeAdd(basePath, operation);
|
|
1017
|
+
result = await executeAdd(basePath, operation, onAfterWrite);
|
|
991
1018
|
if (result.status === 'success')
|
|
992
1019
|
summary.added++;
|
|
993
1020
|
break;
|
|
@@ -999,19 +1026,19 @@ export async function executeCurate(input, _context) {
|
|
|
999
1026
|
break;
|
|
1000
1027
|
}
|
|
1001
1028
|
case 'MERGE': {
|
|
1002
|
-
result = await executeMerge(basePath, operation);
|
|
1029
|
+
result = await executeMerge(basePath, operation, onAfterWrite);
|
|
1003
1030
|
if (result.status === 'success')
|
|
1004
1031
|
summary.merged++;
|
|
1005
1032
|
break;
|
|
1006
1033
|
}
|
|
1007
1034
|
case 'UPDATE': {
|
|
1008
|
-
result = await executeUpdate(basePath, operation);
|
|
1035
|
+
result = await executeUpdate(basePath, operation, onAfterWrite);
|
|
1009
1036
|
if (result.status === 'success')
|
|
1010
1037
|
summary.updated++;
|
|
1011
1038
|
break;
|
|
1012
1039
|
}
|
|
1013
1040
|
case 'UPSERT': {
|
|
1014
|
-
result = await executeUpsert(basePath, operation);
|
|
1041
|
+
result = await executeUpsert(basePath, operation, onAfterWrite);
|
|
1015
1042
|
// UPSERT counts as either added or updated based on what happened
|
|
1016
1043
|
if (result.status === 'success') {
|
|
1017
1044
|
if (result.message?.includes('created new')) {
|
|
@@ -1046,7 +1073,7 @@ export async function executeCurate(input, _context) {
|
|
|
1046
1073
|
/* eslint-enable no-await-in-loop */
|
|
1047
1074
|
return { applied, summary };
|
|
1048
1075
|
}
|
|
1049
|
-
export function createCurateTool(workingDirectory) {
|
|
1076
|
+
export function createCurateTool(workingDirectory, abstractQueue) {
|
|
1050
1077
|
return {
|
|
1051
1078
|
description: `Curate knowledge topics with atomic operations. This tool manages the knowledge structure using four operation types and supports a two-part context model: Raw Concept + Narrative.
|
|
1052
1079
|
|
|
@@ -1246,10 +1273,10 @@ export function createCurateTool(workingDirectory) {
|
|
|
1246
1273
|
const parseResult = CurateInputSchema.safeParse(input);
|
|
1247
1274
|
if (parseResult.success) {
|
|
1248
1275
|
parseResult.data.basePath = resolve(workingDirectory, parseResult.data.basePath);
|
|
1249
|
-
return executeCurate(parseResult.data, context);
|
|
1276
|
+
return executeCurate(parseResult.data, context, abstractQueue);
|
|
1250
1277
|
}
|
|
1251
1278
|
}
|
|
1252
|
-
return executeCurate(input, context);
|
|
1279
|
+
return executeCurate(input, context, abstractQueue);
|
|
1253
1280
|
},
|
|
1254
1281
|
id: ToolName.CURATE,
|
|
1255
1282
|
inputSchema: CurateInputSchema,
|
|
@@ -8,9 +8,9 @@ export interface ExpandKnowledgeToolConfig {
|
|
|
8
8
|
/**
|
|
9
9
|
* Creates the expand knowledge tool.
|
|
10
10
|
*
|
|
11
|
-
*
|
|
12
|
-
*
|
|
13
|
-
*
|
|
11
|
+
* Two modes:
|
|
12
|
+
* - stubPath: retrieves full content from archived knowledge entries (archive drill-down)
|
|
13
|
+
* - overviewPath: retrieves L1 overview content from .overview.md sibling files
|
|
14
14
|
*
|
|
15
15
|
* @param config - Optional configuration
|
|
16
16
|
* @returns Configured expand knowledge tool
|
|
@@ -1,24 +1,36 @@
|
|
|
1
|
+
import { readFile } from 'node:fs/promises';
|
|
2
|
+
import { join } from 'node:path';
|
|
1
3
|
import { z } from 'zod';
|
|
4
|
+
import { BRV_DIR, CONTEXT_TREE_DIR } from '../../../../server/constants.js';
|
|
2
5
|
import { FileContextTreeArchiveService } from '../../../../server/infra/context-tree/file-context-tree-archive-service.js';
|
|
6
|
+
import { estimateTokens } from '../../../../server/infra/executor/pre-compaction/compaction-escalation.js';
|
|
3
7
|
import { ToolName } from '../../../core/domain/tools/constants.js';
|
|
4
8
|
/**
|
|
5
9
|
* Input schema for expand knowledge tool.
|
|
10
|
+
* Accepts either a stubPath (archive drill-down) or an overviewPath (L1 overview retrieval).
|
|
6
11
|
*/
|
|
7
12
|
const ExpandKnowledgeInputSchema = z
|
|
8
13
|
.object({
|
|
14
|
+
overviewPath: z
|
|
15
|
+
.string()
|
|
16
|
+
.min(1)
|
|
17
|
+
.describe('Path to the .overview.md file (relative to context tree). ' +
|
|
18
|
+
'This is the `overviewPath` field from search results.')
|
|
19
|
+
.optional(),
|
|
9
20
|
stubPath: z
|
|
10
21
|
.string()
|
|
11
22
|
.min(1)
|
|
12
23
|
.describe('Path to the .stub.md file in _archived/. ' +
|
|
13
|
-
'This is the `path` field from search results where symbolKind === "archive_stub".')
|
|
24
|
+
'This is the `path` field from search results where symbolKind === "archive_stub".')
|
|
25
|
+
.optional(),
|
|
14
26
|
})
|
|
15
|
-
.
|
|
27
|
+
.refine((data) => (data.stubPath !== undefined) !== (data.overviewPath !== undefined), { message: 'Exactly one of stubPath or overviewPath must be provided' });
|
|
16
28
|
/**
|
|
17
29
|
* Creates the expand knowledge tool.
|
|
18
30
|
*
|
|
19
|
-
*
|
|
20
|
-
*
|
|
21
|
-
*
|
|
31
|
+
* Two modes:
|
|
32
|
+
* - stubPath: retrieves full content from archived knowledge entries (archive drill-down)
|
|
33
|
+
* - overviewPath: retrieves L1 overview content from .overview.md sibling files
|
|
22
34
|
*
|
|
23
35
|
* @param config - Optional configuration
|
|
24
36
|
* @returns Configured expand knowledge tool
|
|
@@ -26,10 +38,25 @@ const ExpandKnowledgeInputSchema = z
|
|
|
26
38
|
export function createExpandKnowledgeTool(config = {}) {
|
|
27
39
|
const archiveService = new FileContextTreeArchiveService();
|
|
28
40
|
return {
|
|
29
|
-
description: 'Retrieve full content from archived knowledge entries. ' +
|
|
30
|
-
'Use when search results include an archive_stub that you need to drill into.'
|
|
41
|
+
description: 'Retrieve full content from archived knowledge entries or L1 overview files. ' +
|
|
42
|
+
'Use stubPath when search results include an archive_stub that you need to drill into. ' +
|
|
43
|
+
'Use overviewPath to retrieve the structured overview for a context entry.',
|
|
31
44
|
async execute(input, _context) {
|
|
32
45
|
const parsed = ExpandKnowledgeInputSchema.parse(input);
|
|
46
|
+
if (parsed.overviewPath) {
|
|
47
|
+
const baseDir = config.baseDirectory ?? process.cwd();
|
|
48
|
+
const fullPath = join(baseDir, BRV_DIR, CONTEXT_TREE_DIR, parsed.overviewPath);
|
|
49
|
+
const overviewContent = await readFile(fullPath, 'utf8');
|
|
50
|
+
const originalPath = parsed.overviewPath.replace(/\.overview\.md$/, '.md');
|
|
51
|
+
return {
|
|
52
|
+
originalPath,
|
|
53
|
+
overviewContent,
|
|
54
|
+
tokenCount: estimateTokens(overviewContent),
|
|
55
|
+
};
|
|
56
|
+
}
|
|
57
|
+
if (!parsed.stubPath) {
|
|
58
|
+
throw new Error('stubPath is required when overviewPath is not provided');
|
|
59
|
+
}
|
|
33
60
|
const result = await archiveService.drillDown(parsed.stubPath, config.baseDirectory);
|
|
34
61
|
return {
|
|
35
62
|
fullContent: result.fullContent,
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import type { Tool } from '../../../core/domain/tools/types.js';
|
|
2
|
+
import type { IContentGenerator } from '../../../core/interfaces/i-content-generator.js';
|
|
3
|
+
import type { IFileSystem } from '../../../core/interfaces/i-file-system.js';
|
|
4
|
+
import type { AbstractGenerationQueue } from '../../map/abstract-queue.js';
|
|
5
|
+
export interface IngestResourceConfig {
|
|
6
|
+
abstractQueue?: AbstractGenerationQueue;
|
|
7
|
+
baseDirectory?: string;
|
|
8
|
+
contentGenerator?: IContentGenerator;
|
|
9
|
+
fileSystem?: IFileSystem;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Creates the ingest_resource tool.
|
|
13
|
+
*
|
|
14
|
+
* Bulk-ingests files from a directory into the knowledge context tree.
|
|
15
|
+
* Glob → Read → LLM extraction → Curate pipeline.
|
|
16
|
+
*/
|
|
17
|
+
export declare function createIngestResourceTool(config?: IngestResourceConfig): Tool;
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
import { realpath } from 'node:fs/promises';
|
|
2
|
+
import { basename, isAbsolute, join, relative, resolve } from 'node:path';
|
|
3
|
+
import { z } from 'zod';
|
|
4
|
+
import { BRV_DIR, CONTEXT_TREE_DIR } from '../../../../server/constants.js';
|
|
5
|
+
import { ToolName } from '../../../core/domain/tools/constants.js';
|
|
6
|
+
import { executeLlmMapMemory } from '../../map/llm-map-memory.js';
|
|
7
|
+
import { executeCurate } from './curate-tool.js';
|
|
8
|
+
const DEFAULT_INCLUDE = ['**/*.ts', '**/*.md', '**/*.json', '**/*.js', '**/*.py', '**/*.go', '**/*.rs'];
|
|
9
|
+
const DEFAULT_EXCLUDE = ['node_modules', '.git', '*.test.*', '*.spec.*', 'dist', 'build'];
|
|
10
|
+
const MAX_FILES = 200;
|
|
11
|
+
const MAX_FILE_LINES = 500;
|
|
12
|
+
const MAX_CONTENT_CHARS = 4000;
|
|
13
|
+
function toRelativeUnixPath(rootPath, filePath) {
|
|
14
|
+
const relativePath = isAbsolute(filePath) ? relative(rootPath, filePath) : filePath;
|
|
15
|
+
return relativePath.replaceAll('\\', '/');
|
|
16
|
+
}
|
|
17
|
+
function matchesExcludePattern(relativePath, pattern) {
|
|
18
|
+
const normalizedPath = relativePath.replaceAll('\\', '/');
|
|
19
|
+
const normalizedPattern = pattern.replaceAll('\\', '/');
|
|
20
|
+
if (!normalizedPattern.includes('*')) {
|
|
21
|
+
return normalizedPath.split('/').includes(normalizedPattern);
|
|
22
|
+
}
|
|
23
|
+
const regexPattern = normalizedPattern
|
|
24
|
+
.replaceAll('.', String.raw `\.`)
|
|
25
|
+
.replaceAll('**', '<<<DOUBLESTAR>>>')
|
|
26
|
+
.replaceAll('*', '[^/]*')
|
|
27
|
+
.replaceAll('<<<DOUBLESTAR>>>', '.*');
|
|
28
|
+
return new RegExp(`^${regexPattern}$|/${regexPattern}$|^${regexPattern}/|/${regexPattern}/`).test(normalizedPath);
|
|
29
|
+
}
|
|
30
|
+
function getDirectoryDepth(relativePath) {
|
|
31
|
+
if (!relativePath)
|
|
32
|
+
return 0;
|
|
33
|
+
return Math.max(0, relativePath.split('/').length - 1);
|
|
34
|
+
}
|
|
35
|
+
function extractHeading(content) {
|
|
36
|
+
const headingMatch = content.match(/^#\s+(.+)$/m);
|
|
37
|
+
return headingMatch?.[1]?.trim();
|
|
38
|
+
}
|
|
39
|
+
function normalizeInlineText(content) {
|
|
40
|
+
return content.replaceAll(/\s+/g, ' ').trim();
|
|
41
|
+
}
|
|
42
|
+
function buildFallbackHighlights(content) {
|
|
43
|
+
const lines = content
|
|
44
|
+
.split('\n')
|
|
45
|
+
.map((line) => line.trim())
|
|
46
|
+
.filter(Boolean)
|
|
47
|
+
.slice(0, 8);
|
|
48
|
+
return lines.length > 0 ? lines.join('\n') : undefined;
|
|
49
|
+
}
|
|
50
|
+
function getIngestTarget(filePath) {
|
|
51
|
+
const pathSegments = filePath.split('/');
|
|
52
|
+
const fileBaseName = pathSegments.at(-1)?.replace(/\.[^.]+$/, '') ?? 'unknown';
|
|
53
|
+
const topic = pathSegments.length > 1 ? (pathSegments.at(-2) ?? fileBaseName) : fileBaseName;
|
|
54
|
+
return { fileBaseName, topic };
|
|
55
|
+
}
|
|
56
|
+
function buildFallbackFacts(file) {
|
|
57
|
+
const { fileBaseName } = getIngestTarget(file.path);
|
|
58
|
+
const heading = extractHeading(file.content) ?? fileBaseName;
|
|
59
|
+
const preview = normalizeInlineText(file.content).slice(0, 220);
|
|
60
|
+
const subject = normalizeInlineText(heading).toLowerCase().replaceAll(/[^a-z0-9]+/g, '_').replaceAll(/^_+|_+$/g, '');
|
|
61
|
+
return [{
|
|
62
|
+
statement: preview.length > 0
|
|
63
|
+
? `${heading} is captured in ${file.path}: ${preview}`
|
|
64
|
+
: `${heading} is captured in ${file.path}.`,
|
|
65
|
+
...(subject.length > 0 && { subject }),
|
|
66
|
+
}];
|
|
67
|
+
}
|
|
68
|
+
function buildOperation(domain, sourceRoot, file, facts) {
|
|
69
|
+
const { fileBaseName, topic } = getIngestTarget(file.path);
|
|
70
|
+
const usableFacts = facts?.filter((fact) => fact.statement.trim().length > 0) ?? [];
|
|
71
|
+
const sourcePath = join(sourceRoot, file.path);
|
|
72
|
+
if (usableFacts.length > 0) {
|
|
73
|
+
const highlights = usableFacts.map((fact) => `**${fact.subject ?? 'Concept'}**: ${fact.statement}`).join('\n\n');
|
|
74
|
+
return {
|
|
75
|
+
confidence: 'high',
|
|
76
|
+
content: {
|
|
77
|
+
narrative: { highlights },
|
|
78
|
+
rawConcept: { files: [sourcePath] },
|
|
79
|
+
},
|
|
80
|
+
impact: 'low',
|
|
81
|
+
path: `${domain}/${topic}`,
|
|
82
|
+
reason: `Ingested from ${file.path}`,
|
|
83
|
+
title: fileBaseName,
|
|
84
|
+
type: 'ADD',
|
|
85
|
+
};
|
|
86
|
+
}
|
|
87
|
+
const fallbackFacts = buildFallbackFacts(file);
|
|
88
|
+
const fallbackHighlights = buildFallbackHighlights(file.content);
|
|
89
|
+
if (fallbackFacts.length === 0 && !fallbackHighlights) {
|
|
90
|
+
return null;
|
|
91
|
+
}
|
|
92
|
+
return {
|
|
93
|
+
confidence: 'low',
|
|
94
|
+
content: {
|
|
95
|
+
facts: fallbackFacts,
|
|
96
|
+
...(fallbackHighlights && { narrative: { highlights: fallbackHighlights } }),
|
|
97
|
+
rawConcept: { files: [sourcePath] },
|
|
98
|
+
snippets: [file.content],
|
|
99
|
+
},
|
|
100
|
+
impact: 'low',
|
|
101
|
+
path: `${domain}/${topic}`,
|
|
102
|
+
reason: `Fallback ingest from ${file.path}`,
|
|
103
|
+
title: fileBaseName,
|
|
104
|
+
type: 'ADD',
|
|
105
|
+
};
|
|
106
|
+
}
|
|
107
|
+
const IngestResourceInputSchema = z
|
|
108
|
+
.object({
|
|
109
|
+
depth: z.number().int().min(1).max(5).optional().default(3).describe('Maximum directory depth to scan (default: 3, max: 5)'),
|
|
110
|
+
domain: z.string().optional().describe('Target knowledge domain (default: inferred from directory name)'),
|
|
111
|
+
exclude: z.array(z.string()).optional().describe('Glob patterns to exclude (default: node_modules, .git, *.test.*, dist, build)'),
|
|
112
|
+
include: z.array(z.string()).optional().describe('Glob patterns to include (default: *.ts, *.md, *.json, etc.)'),
|
|
113
|
+
path: z.string().min(1).describe('Directory path to ingest files from'),
|
|
114
|
+
})
|
|
115
|
+
.strict();
|
|
116
|
+
/**
|
|
117
|
+
* Creates the ingest_resource tool.
|
|
118
|
+
*
|
|
119
|
+
* Bulk-ingests files from a directory into the knowledge context tree.
|
|
120
|
+
* Glob → Read → LLM extraction → Curate pipeline.
|
|
121
|
+
*/
|
|
122
|
+
export function createIngestResourceTool(config = {}) {
|
|
123
|
+
return {
|
|
124
|
+
description: 'Bulk-ingest files from a directory into the knowledge context tree. ' +
|
|
125
|
+
'Globs files, reads contents, extracts knowledge via LLM, and adds to context tree. ' +
|
|
126
|
+
'Use for one-shot import of documentation, source files, or configuration.',
|
|
127
|
+
async execute(input, context) {
|
|
128
|
+
const params = IngestResourceInputSchema.parse(input);
|
|
129
|
+
const { abstractQueue, baseDirectory, contentGenerator, fileSystem } = config;
|
|
130
|
+
if (!contentGenerator || !fileSystem) {
|
|
131
|
+
throw new Error('ingest_resource requires contentGenerator and fileSystemService');
|
|
132
|
+
}
|
|
133
|
+
// Normalize to absolute using the injected workspace root so relative inputs like './src'
|
|
134
|
+
// resolve against the project directory, not the agent process cwd.
|
|
135
|
+
const absPath = resolve(baseDirectory ?? process.cwd(), params.path);
|
|
136
|
+
const normalizedAbsPath = await realpath(absPath).catch(() => absPath);
|
|
137
|
+
const domain = params.domain ?? (basename(absPath) || 'imported');
|
|
138
|
+
const include = params.include ?? DEFAULT_INCLUDE;
|
|
139
|
+
const exclude = params.exclude ?? DEFAULT_EXCLUDE;
|
|
140
|
+
// Step 1: Glob files — collect unique paths across all include patterns
|
|
141
|
+
const seenPaths = new Set();
|
|
142
|
+
const rawPaths = [];
|
|
143
|
+
/* eslint-disable no-await-in-loop */
|
|
144
|
+
for (const pattern of include) {
|
|
145
|
+
const globResult = await fileSystem.globFiles(pattern, {
|
|
146
|
+
cwd: normalizedAbsPath,
|
|
147
|
+
maxResults: MAX_FILES,
|
|
148
|
+
respectGitignore: true,
|
|
149
|
+
});
|
|
150
|
+
for (const file of globResult.files) {
|
|
151
|
+
const relativePath = toRelativeUnixPath(normalizedAbsPath, file.path);
|
|
152
|
+
if (relativePath.startsWith('../'))
|
|
153
|
+
continue;
|
|
154
|
+
if (getDirectoryDepth(relativePath) > params.depth)
|
|
155
|
+
continue;
|
|
156
|
+
const excluded = exclude.some((excludePattern) => matchesExcludePattern(relativePath, excludePattern));
|
|
157
|
+
if (!excluded && !seenPaths.has(file.path)) {
|
|
158
|
+
seenPaths.add(file.path);
|
|
159
|
+
rawPaths.push(file.path);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
/* eslint-enable no-await-in-loop */
|
|
164
|
+
// Step 2: Read file contents (limit to MAX_FILES)
|
|
165
|
+
const fileItems = [];
|
|
166
|
+
/* eslint-disable no-await-in-loop */
|
|
167
|
+
for (const filePath of rawPaths.slice(0, MAX_FILES)) {
|
|
168
|
+
try {
|
|
169
|
+
const { content } = await fileSystem.readFile(filePath, { limit: MAX_FILE_LINES });
|
|
170
|
+
if (content.trim()) {
|
|
171
|
+
const relativePath = toRelativeUnixPath(normalizedAbsPath, filePath);
|
|
172
|
+
if (relativePath.startsWith('../'))
|
|
173
|
+
continue;
|
|
174
|
+
fileItems.push({ content: content.slice(0, MAX_CONTENT_CHARS), path: relativePath });
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
catch {
|
|
178
|
+
// Skip unreadable files
|
|
179
|
+
}
|
|
180
|
+
}
|
|
181
|
+
/* eslint-enable no-await-in-loop */
|
|
182
|
+
if (fileItems.length === 0) {
|
|
183
|
+
return { domains: [domain], failed: 0, ingested: 0, queued: 0 };
|
|
184
|
+
}
|
|
185
|
+
// Step 3: LLM extraction via executeLlmMapMemory
|
|
186
|
+
const mapResult = await executeLlmMapMemory({
|
|
187
|
+
concurrency: Math.min(4, Math.max(1, fileItems.length)),
|
|
188
|
+
generator: contentGenerator,
|
|
189
|
+
items: fileItems.map((f) => ({ content: f.content, path: f.path })),
|
|
190
|
+
prompt: 'Extract 1-5 concrete reusable knowledge facts from the file provided in the map details below. ' +
|
|
191
|
+
'Focus on APIs, invariants, workflows, configuration, constraints, and implementation semantics. ' +
|
|
192
|
+
'Each fact should be a terse technical statement.',
|
|
193
|
+
taskId: context?.taskId,
|
|
194
|
+
});
|
|
195
|
+
// Step 4: Convert to CurateOperations (fail-open: use whichever results are available)
|
|
196
|
+
const operations = [];
|
|
197
|
+
let unresolvedCount = 0;
|
|
198
|
+
for (const [i, file] of fileItems.entries()) {
|
|
199
|
+
const operation = buildOperation(domain, normalizedAbsPath, file, mapResult.results[i] ?? null);
|
|
200
|
+
if (operation) {
|
|
201
|
+
operations.push(operation);
|
|
202
|
+
}
|
|
203
|
+
else {
|
|
204
|
+
unresolvedCount++;
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
if (operations.length === 0) {
|
|
208
|
+
return { domains: [domain], failed: fileItems.length, ingested: 0, queued: 0 };
|
|
209
|
+
}
|
|
210
|
+
// Step 5: Run curate pipeline with abstract queue hook.
|
|
211
|
+
// basePath must point to the knowledge store (.brv/context-tree), not the workspace root.
|
|
212
|
+
const contextTreePath = join(baseDirectory ?? process.cwd(), BRV_DIR, CONTEXT_TREE_DIR);
|
|
213
|
+
const curateResult = await executeCurate({ basePath: contextTreePath, operations }, context, abstractQueue);
|
|
214
|
+
return {
|
|
215
|
+
domains: [domain],
|
|
216
|
+
failed: unresolvedCount,
|
|
217
|
+
ingested: curateResult.summary.added + curateResult.summary.updated,
|
|
218
|
+
queued: abstractQueue ? operations.length : 0,
|
|
219
|
+
};
|
|
220
|
+
},
|
|
221
|
+
id: ToolName.INGEST_RESOURCE,
|
|
222
|
+
inputSchema: IngestResourceInputSchema,
|
|
223
|
+
};
|
|
224
|
+
}
|
|
@@ -44,8 +44,16 @@ export interface SymbolMetadata {
|
|
|
44
44
|
*/
|
|
45
45
|
export interface SummaryDocLike {
|
|
46
46
|
condensationOrder: number;
|
|
47
|
+
/** First 400 chars of _index.md content, used as excerpt for propagated parent hits */
|
|
48
|
+
excerpt?: string;
|
|
47
49
|
/** Path to the _index.md file, e.g. "domain/topic/_index.md" */
|
|
48
50
|
path: string;
|
|
51
|
+
/** Frontmatter scoring parsed from _index.md — used to apply hotness/importance to propagated hits */
|
|
52
|
+
scoring?: {
|
|
53
|
+
importance?: number;
|
|
54
|
+
maturity?: string;
|
|
55
|
+
recency?: number;
|
|
56
|
+
};
|
|
49
57
|
tokenCount: number;
|
|
50
58
|
}
|
|
51
59
|
/**
|
|
@@ -44,7 +44,7 @@ export declare class SearchKnowledgeService implements ISearchKnowledgeService {
|
|
|
44
44
|
* Called during index rebuild to batch writes and avoid write amplification.
|
|
45
45
|
* Best-effort: errors are swallowed per file.
|
|
46
46
|
*/
|
|
47
|
-
flushAccessHits(contextTreePath: string): Promise<
|
|
47
|
+
flushAccessHits(contextTreePath: string): Promise<boolean>;
|
|
48
48
|
/**
|
|
49
49
|
* Search the knowledge base for relevant topics.
|
|
50
50
|
* Supports symbolic path queries, scoped search, kind/maturity filtering, and overview mode.
|