@illuma-ai/agents 1.4.0-alpha.0 → 1.4.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/cjs/main.cjs +20 -10
  2. package/dist/cjs/main.cjs.map +1 -1
  3. package/dist/cjs/tools/fileSearch/formatter.cjs +95 -0
  4. package/dist/cjs/tools/fileSearch/formatter.cjs.map +1 -0
  5. package/dist/cjs/tools/fileSearch/ragClient.cjs +104 -0
  6. package/dist/cjs/tools/fileSearch/ragClient.cjs.map +1 -0
  7. package/dist/cjs/tools/fileSearch/schema.cjs +18 -0
  8. package/dist/cjs/tools/fileSearch/schema.cjs.map +1 -0
  9. package/dist/cjs/tools/fileSearch/tool.cjs +155 -0
  10. package/dist/cjs/tools/fileSearch/tool.cjs.map +1 -0
  11. package/dist/esm/main.mjs +4 -1
  12. package/dist/esm/main.mjs.map +1 -1
  13. package/dist/esm/tools/fileSearch/formatter.mjs +92 -0
  14. package/dist/esm/tools/fileSearch/formatter.mjs.map +1 -0
  15. package/dist/esm/tools/fileSearch/ragClient.mjs +100 -0
  16. package/dist/esm/tools/fileSearch/ragClient.mjs.map +1 -0
  17. package/dist/esm/tools/fileSearch/schema.mjs +15 -0
  18. package/dist/esm/tools/fileSearch/schema.mjs.map +1 -0
  19. package/dist/esm/tools/fileSearch/tool.mjs +152 -0
  20. package/dist/esm/tools/fileSearch/tool.mjs.map +1 -0
  21. package/dist/types/index.d.ts +1 -0
  22. package/dist/types/providers/index.d.ts +0 -1
  23. package/dist/types/tools/fileSearch/formatter.d.ts +25 -0
  24. package/dist/types/tools/fileSearch/index.d.ts +5 -0
  25. package/dist/types/tools/fileSearch/ragClient.d.ts +32 -0
  26. package/dist/types/tools/fileSearch/schema.d.ts +13 -0
  27. package/dist/types/tools/fileSearch/tool.d.ts +18 -0
  28. package/dist/types/tools/fileSearch/types.d.ts +139 -0
  29. package/package.json +1 -6
  30. package/src/index.ts +1 -0
  31. package/src/providers/index.ts +4 -1
  32. package/src/tools/fileSearch/__tests__/tool.test.ts +251 -0
  33. package/src/tools/fileSearch/formatter.ts +131 -0
  34. package/src/tools/fileSearch/index.ts +23 -0
  35. package/src/tools/fileSearch/ragClient.ts +141 -0
  36. package/src/tools/fileSearch/schema.ts +19 -0
  37. package/src/tools/fileSearch/tool.ts +207 -0
  38. package/src/tools/fileSearch/types.ts +147 -0
  39. package/dist/cjs/providers/composite/CompositeCapabilityProvider.cjs +0 -80
  40. package/dist/cjs/providers/composite/CompositeCapabilityProvider.cjs.map +0 -1
  41. package/dist/esm/providers/composite/CompositeCapabilityProvider.mjs +0 -78
  42. package/dist/esm/providers/composite/CompositeCapabilityProvider.mjs.map +0 -1
  43. package/dist/types/providers/composite/CompositeCapabilityProvider.d.ts +0 -22
  44. package/dist/types/providers/composite/index.d.ts +0 -1
  45. package/src/providers/__tests__/CompositeCapabilityProvider.test.ts +0 -93
  46. package/src/providers/composite/CompositeCapabilityProvider.ts +0 -112
  47. package/src/providers/composite/index.ts +0 -1
package/dist/cjs/main.cjs CHANGED
@@ -23,16 +23,19 @@ var ProgrammaticToolCalling = require('./tools/ProgrammaticToolCalling.cjs');
23
23
  var ToolSearch = require('./tools/ToolSearch.cjs');
24
24
  var ToolNode = require('./tools/ToolNode.cjs');
25
25
  var AskUser = require('./tools/AskUser.cjs');
26
- var schema$1 = require('./tools/schema.cjs');
26
+ var schema$2 = require('./tools/schema.cjs');
27
27
  var handlers$1 = require('./tools/handlers.cjs');
28
- var tool = require('./tools/search/tool.cjs');
28
+ var tool$1 = require('./tools/search/tool.cjs');
29
29
  var schema = require('./tools/search/schema.cjs');
30
30
  var index$2 = require('./tools/memory/index.cjs');
31
+ var tool = require('./tools/fileSearch/tool.cjs');
32
+ var ragClient = require('./tools/fileSearch/ragClient.cjs');
33
+ var formatter = require('./tools/fileSearch/formatter.cjs');
34
+ var schema$1 = require('./tools/fileSearch/schema.cjs');
31
35
  var proxyTool = require('./tools/proxyTool.cjs');
32
36
  var types = require('./providers/types.cjs');
33
37
  var capabilityNaming = require('./providers/capabilityNaming.cjs');
34
38
  var ToolsServerCapabilityProvider = require('./providers/tools-server/ToolsServerCapabilityProvider.cjs');
35
- var CompositeCapabilityProvider = require('./providers/composite/CompositeCapabilityProvider.cjs');
36
39
  var MCPCapabilityProvider = require('./providers/mcp/MCPCapabilityProvider.cjs');
37
40
  var transport = require('./providers/mcp/transport.cjs');
38
41
  var config$1 = require('./providers/mcp/config.cjs');
@@ -67,7 +70,7 @@ var run$1 = require('./utils/run.cjs');
67
70
  var tokens = require('./utils/tokens.cjs');
68
71
  var toonFormat = require('./utils/toonFormat.cjs');
69
72
  var contextAnalytics = require('./utils/contextAnalytics.cjs');
70
- var schema$2 = require('./utils/schema.cjs');
73
+ var schema$3 = require('./utils/schema.cjs');
71
74
  var toolCallContinuation = require('./utils/toolCallContinuation.cjs');
72
75
  var contextPressure = require('./utils/contextPressure.cjs');
73
76
  var toolDiscoveryCache = require('./utils/toolDiscoveryCache.cjs');
@@ -196,13 +199,13 @@ exports.AskUserStepSchema = AskUser.AskUserStepSchema;
196
199
  exports.AskUserToolName = AskUser.AskUserToolName;
197
200
  exports.HITL_FIELDS = AskUser.HITL_FIELDS;
198
201
  exports.createAskUserTool = AskUser.createAskUserTool;
199
- exports.createSchemaOnlyTool = schema$1.createSchemaOnlyTool;
200
- exports.createSchemaOnlyTools = schema$1.createSchemaOnlyTools;
202
+ exports.createSchemaOnlyTool = schema$2.createSchemaOnlyTool;
203
+ exports.createSchemaOnlyTools = schema$2.createSchemaOnlyTools;
201
204
  exports.handleServerToolResult = handlers$1.handleServerToolResult;
202
205
  exports.handleToolCallChunks = handlers$1.handleToolCallChunks;
203
206
  exports.handleToolCalls = handlers$1.handleToolCalls;
204
207
  exports.toolResultTypes = handlers$1.toolResultTypes;
205
- exports.createSearchTool = tool.createSearchTool;
208
+ exports.createSearchTool = tool$1.createSearchTool;
206
209
  Object.defineProperty(exports, "DATE_RANGE", {
207
210
  enumerable: true,
208
211
  get: function () { return schema.DATE_RANGE; }
@@ -220,6 +223,14 @@ exports.newsSchema = schema.newsSchema;
220
223
  exports.querySchema = schema.querySchema;
221
224
  exports.videosSchema = schema.videosSchema;
222
225
  exports.buildMemoryTools = index$2.buildMemoryTools;
226
+ exports.createFileSearchTool = tool.createFileSearchTool;
227
+ exports.HttpRagClient = ragClient.HttpRagClient;
228
+ exports.RAG_API_URL_ENV = ragClient.RAG_API_URL_ENV;
229
+ exports.getRagBaseUrl = ragClient.getRagBaseUrl;
230
+ exports.createCitationAnchorFormatter = formatter.createCitationAnchorFormatter;
231
+ exports.plainTextFormatter = formatter.plainTextFormatter;
232
+ exports.FileSearchToolName = schema$1.FileSearchToolName;
233
+ exports.fileSearchInputSchema = schema$1.fileSearchInputSchema;
223
234
  exports.buildProxyTool = proxyTool.buildProxyTool;
224
235
  Object.defineProperty(exports, "AuthSource", {
225
236
  enumerable: true,
@@ -233,7 +244,6 @@ exports.CAPABILITY_NAME_SEPARATOR = capabilityNaming.CAPABILITY_NAME_SEPARATOR;
233
244
  exports.formatCapabilityName = capabilityNaming.formatCapabilityName;
234
245
  exports.parseCapabilityName = capabilityNaming.parseCapabilityName;
235
246
  exports.ToolsServerCapabilityProvider = ToolsServerCapabilityProvider.ToolsServerCapabilityProvider;
236
- exports.CompositeCapabilityProvider = CompositeCapabilityProvider.CompositeCapabilityProvider;
237
247
  exports.MCPCapabilityProvider = MCPCapabilityProvider.MCPCapabilityProvider;
238
248
  exports.flattenToolCallResponse = MCPCapabilityProvider.flattenToolCallResponse;
239
249
  exports.createTransport = transport.createTransport;
@@ -453,8 +463,8 @@ exports.isToonFormat = toonFormat.isToonFormat;
453
463
  exports.jsonToToon = toonFormat.jsonToToon;
454
464
  exports.processToolOutput = toonFormat.processToolOutput;
455
465
  exports.buildContextAnalytics = contextAnalytics.buildContextAnalytics;
456
- exports.isZodSchema = schema$2.isZodSchema;
457
- exports.toJsonSchema = schema$2.toJsonSchema;
466
+ exports.isZodSchema = schema$3.isZodSchema;
467
+ exports.toJsonSchema = schema$3.toJsonSchema;
458
468
  exports.extractFinishReason = toolCallContinuation.extractFinishReason;
459
469
  exports.isMaxTokensFinish = toolCallContinuation.isMaxTokensFinish;
460
470
  exports.buildMultiDocHintContent = contextPressure.buildMultiDocHintContent;
@@ -1 +1 @@
1
- {"version":3,"file":"main.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
1
+ {"version":3,"file":"main.cjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
@@ -0,0 +1,95 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * Default result formatters.
5
+ *
6
+ * - `plainTextFormatter`: CLI / A2A / generic output. No citation anchors.
7
+ * - `citationAnchorFormatter`: ranger-style `\ue202turn0fileN` anchors with
8
+ * a monotonic `sourceOffset` so multi-call turns stay globally unique.
9
+ *
10
+ * Runtimes can supply their own `FileSearchResultFormatter` to override.
11
+ */
12
+ const plainTextFormatter = {
13
+ format(chunks, { files: _files }) {
14
+ if (chunks.length === 0) {
15
+ return { message: 'No relevant results found in the available files.' };
16
+ }
17
+ const body = chunks
18
+ .map((c) => {
19
+ const page = getPage(c);
20
+ const rel = (1 - c.distance).toFixed(4);
21
+ return (`File: ${c.filename}` +
22
+ (page != null ? `\nPage: ${page}` : '') +
23
+ `\nRelevance: ${rel}\nContent: ${c.page_content}\n`);
24
+ })
25
+ .join('\n---\n');
26
+ const sources = chunks.map((c) => ({
27
+ type: 'file',
28
+ fileId: c.file_id,
29
+ content: c.page_content,
30
+ fileName: c.filename,
31
+ relevance: 1 - c.distance,
32
+ pages: getPage(c) != null ? [getPage(c)] : [],
33
+ }));
34
+ return { message: body, artifact: { file_search: { sources } } };
35
+ },
36
+ };
37
+ function createCitationAnchorFormatter(opts = {}) {
38
+ const toolName = opts.toolName ?? 'file_search';
39
+ const getOffset = opts.getSourceOffset ?? (() => 0);
40
+ const advance = opts.advanceSourceOffset ?? (() => { });
41
+ return {
42
+ format(chunks) {
43
+ if (chunks.length === 0) {
44
+ return {
45
+ message: 'No results found or errors occurred while searching the files.',
46
+ };
47
+ }
48
+ const base = getOffset();
49
+ const body = chunks
50
+ .map((c, i) => {
51
+ const globalIndex = base + i;
52
+ const page = getPage(c);
53
+ const rel = (1 - c.distance).toFixed(4);
54
+ return (`[Source ${globalIndex}] File: ${c.filename} | Anchor: \\ue202turn0file${globalIndex}` +
55
+ (page != null ? ` | Page: ${page}` : '') +
56
+ ` | Relevance: ${rel}\nContent: ${c.page_content}\n` +
57
+ `↑ Cite this source using: \\ue202turn0file${globalIndex}`);
58
+ })
59
+ .join('\n---\n');
60
+ const sources = chunks.map((c) => ({
61
+ type: 'file',
62
+ fileId: c.file_id,
63
+ content: c.page_content,
64
+ fileName: c.filename,
65
+ relevance: 1 - c.distance,
66
+ pages: getPage(c) != null ? [getPage(c)] : [],
67
+ pageRelevance: getPage(c) != null
68
+ ? { [getPage(c)]: 1 - c.distance }
69
+ : {},
70
+ }));
71
+ advance(chunks.length);
72
+ return {
73
+ message: body,
74
+ artifact: { [toolName]: { sources, fileCitations: true } },
75
+ };
76
+ },
77
+ };
78
+ }
79
+ /** Extract a 1-indexed page number from the chunk metadata, or null. */
80
+ function getPage(chunk) {
81
+ const raw = chunk.metadata?.page ??
82
+ chunk.metadata?.page_number ??
83
+ null;
84
+ if (raw == null)
85
+ return null;
86
+ const parsed = typeof raw === 'number' ? raw : parseInt(String(raw), 10);
87
+ if (Number.isNaN(parsed) || parsed < 0)
88
+ return null;
89
+ // rag_api stores 0-indexed; display is 1-indexed
90
+ return parsed + 1;
91
+ }
92
+
93
+ exports.createCitationAnchorFormatter = createCitationAnchorFormatter;
94
+ exports.plainTextFormatter = plainTextFormatter;
95
+ //# sourceMappingURL=formatter.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"formatter.cjs","sources":["../../../../src/tools/fileSearch/formatter.ts"],"sourcesContent":["/**\n * Default result formatters.\n *\n * - `plainTextFormatter`: CLI / A2A / generic output. No citation anchors.\n * - `citationAnchorFormatter`: ranger-style `\\ue202turn0fileN` anchors with\n * a monotonic `sourceOffset` so multi-call turns stay globally unique.\n *\n * Runtimes can supply their own `FileSearchResultFormatter` to override.\n */\n\nimport type {\n FileSearchResultFormatter,\n FileSearchFile,\n RagChunk,\n} from './types';\n\ntype AnnotatedChunk = RagChunk & {\n filename: string;\n isCurrentMessage: boolean;\n};\n\nexport const plainTextFormatter: FileSearchResultFormatter = {\n format(chunks, { files: _files }) {\n if (chunks.length === 0) {\n return { message: 'No relevant results found in the available files.' };\n }\n const body = chunks\n .map((c) => {\n const page = getPage(c);\n const rel = (1 - c.distance).toFixed(4);\n return (\n `File: ${c.filename}` +\n (page != null ? `\\nPage: ${page}` : '') +\n `\\nRelevance: ${rel}\\nContent: ${c.page_content}\\n`\n );\n })\n .join('\\n---\\n');\n\n const sources = chunks.map((c) => ({\n type: 'file' as const,\n fileId: c.file_id,\n content: c.page_content,\n fileName: c.filename,\n relevance: 1 - c.distance,\n pages: getPage(c) != null ? [getPage(c) as number] : [],\n }));\n\n return { message: body, artifact: { file_search: { sources } } };\n },\n};\n\nexport interface CitationAnchorFormatterOptions {\n /** Tool name used in the `file_search` artifact wrapper. Defaults to `'file_search'`. */\n toolName?: string;\n /**\n * Monotonic counter for source indices within a turn. Pass the SAME\n * function to the formatter across multiple calls in the same turn so\n * anchors stay globally unique.\n */\n getSourceOffset?: () => number;\n /** Called after formatting to advance the offset. */\n advanceSourceOffset?: (by: number) => void;\n}\n\nexport function createCitationAnchorFormatter(\n opts: CitationAnchorFormatterOptions = {},\n): FileSearchResultFormatter {\n const toolName = opts.toolName ?? 'file_search';\n const getOffset = opts.getSourceOffset ?? (() => 0);\n const advance = opts.advanceSourceOffset ?? (() => {});\n\n return {\n format(chunks) {\n if (chunks.length === 0) {\n return {\n message:\n 'No results found or errors occurred while searching the files.',\n };\n }\n const base = getOffset();\n const body = chunks\n .map((c, i) => {\n const globalIndex = base + i;\n const page = getPage(c);\n const rel = (1 - c.distance).toFixed(4);\n return (\n `[Source ${globalIndex}] File: ${c.filename} | Anchor: \\\\ue202turn0file${globalIndex}` +\n (page != null ? ` | Page: ${page}` : '') +\n ` | Relevance: ${rel}\\nContent: ${c.page_content}\\n` +\n `↑ Cite this source using: \\\\ue202turn0file${globalIndex}`\n );\n })\n .join('\\n---\\n');\n\n const sources = chunks.map((c) => ({\n type: 'file' as const,\n fileId: c.file_id,\n content: c.page_content,\n fileName: c.filename,\n relevance: 1 - c.distance,\n pages: getPage(c) != null ? [getPage(c) as number] : [],\n pageRelevance:\n getPage(c) != null\n ? { [getPage(c) as number]: 1 - c.distance }\n : {},\n }));\n\n advance(chunks.length);\n return {\n message: body,\n artifact: { [toolName]: { sources, fileCitations: true } },\n };\n },\n };\n}\n\n/** Extract a 1-indexed page number from the chunk metadata, or null. */\nfunction getPage(chunk: AnnotatedChunk | RagChunk): number | null {\n const raw =\n (chunk.metadata?.page as unknown) ??\n (chunk.metadata?.page_number as unknown) ??\n null;\n if (raw == null) return null;\n const parsed = typeof raw === 'number' ? raw : parseInt(String(raw), 10);\n if (Number.isNaN(parsed) || parsed < 0) return null;\n // rag_api stores 0-indexed; display is 1-indexed\n return parsed + 1;\n}\n\n// Re-export so consumers only import from the formatter module.\nexport type { FileSearchResultFormatter, FileSearchFile, RagChunk };\n"],"names":[],"mappings":";;AAAA;;;;;;;;AAQG;AAaI,MAAM,kBAAkB,GAA8B;AAC3D,IAAA,MAAM,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,EAAA;AAC9B,QAAA,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;AACvB,YAAA,OAAO,EAAE,OAAO,EAAE,mDAAmD,EAAE;QACzE;QACA,MAAM,IAAI,GAAG;AACV,aAAA,GAAG,CAAC,CAAC,CAAC,KAAI;AACT,YAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC;AACvB,YAAA,MAAM,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC;AACvC,YAAA,QACE,CAAA,MAAA,EAAS,CAAC,CAAC,QAAQ,CAAA,CAAE;AACrB,iBAAC,IAAI,IAAI,IAAI,GAAG,CAAA,QAAA,EAAW,IAAI,CAAA,CAAE,GAAG,EAAE,CAAC;AACvC,gBAAA,CAAA,aAAA,EAAgB,GAAG,CAAA,WAAA,EAAc,CAAC,CAAC,YAAY,CAAA,EAAA,CAAI;AAEvD,QAAA,CAAC;aACA,IAAI,CAAC,SAAS,CAAC;QAElB,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACjC,YAAA,IAAI,EAAE,MAAe;YACrB,MAAM,EAAE,CAAC,CAAC,OAAO;YACjB,OAAO,EAAE,CAAC,CAAC,YAAY;YACvB,QAAQ,EAAE,CAAC,CAAC,QAAQ;AACpB,YAAA,SAAS,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ;AACzB,YAAA,KAAK,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAW,CAAC,GAAG,EAAE;AACxD,SAAA,CAAC,CAAC;AAEH,QAAA,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE,WAAW,EAAE,EAAE,OAAO,EAAE,EAAE,EAAE;IAClE,CAAC;;AAgBG,SAAU,6BAA6B,CAC3C,IAAA,GAAuC,EAAE,EAAA;AAEzC,IAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,aAAa;AAC/C,IAAA,MAAM,SAAS,GAAG,IAAI,CAAC,eAAe,KAAK,MAAM,CAAC,CAAC;AACnD,IAAA,MAAM,OAAO,GAAG,IAAI,CAAC,mBAAmB,KAAK,MAAK,EAAE,CAAC,CAAC;IAEtD,OAAO;AACL,QAAA,MAAM,CAAC,MAAM,EAAA;AACX,YAAA,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;gBACvB,OAAO;AACL,oBAAA,OAAO,EACL,gEAAgE;iBACnE;YACH;AACA,YAAA,MAAM,IAAI,GAAG,SAAS,EAAE;YACxB,MAAM,IAAI,GAAG;AACV,iBAAA,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAI;AACZ,gBAAA,MAAM,WAAW,GAAG,IAAI,GAAG,CAAC;AAC5B,gBAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC;AACvB,gBAAA,MAAM,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC;gBACvC,QACE,WAAW,WAAW,CAAA,QAAA,EAAW,CAAC,CAAC,QAAQ,CAAA,2BAAA,EAA8B,WAAW,CAAA,CAAE;AACtF,qBAAC,IAAI,IAAI,IAAI,GAAG,CAAA,SAAA,EAAY,IAAI,CAAA,CAAE,GAAG,EAAE,CAAC;AACxC,oBAAA,CAAA,cAAA,EAAiB,GAAG,CAAA,WAAA,EAAc,CAAC,CAAC,YAAY,CAAA,EAAA,CAAI;oBACpD,CAAA,0CAAA,EAA6C,WAAW,CAAA,CAAE;AAE9D,YAAA,CAAC;iBACA,IAAI,CAAC,SAAS,CAAC;YAElB,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACjC,gBAAA,IAAI,EAAE,MAAe;gBACrB,MAAM,EAAE,CAAC,CAAC,OAAO;gBACjB,OAAO,EAAE,CAAC,CAAC,YAAY;gBACvB,QAAQ,EAAE,CAAC,CAAC,QAAQ;AACpB,gBAAA,SAAS,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ;AACzB,gBAAA,KAAK,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAW,CAAC,GAAG,EAAE;AACvD,gBAAA,aAAa,EACX,OAAO,CAAC,CAAC,CAAC,IAAI;AACZ,sBAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ;AAC1C,sBAAE,EAAE;AACT,aAAA,CAAC,CAAC;AAEH,YAAA,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC;YACtB,OAAO;AACL,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,QAAQ,EAAE,EAAE,CAAC,QAAQ,GAAG,EAAE,OAAO,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE;aAC3D;QACH,CAAC;KACF;AACH;AAEA;AACA,SAAS,OAAO,CAAC,KAAgC,EAAA;AAC/C,IAAA,MAAM,GAAG,GACN,KAAK,CAAC,QAAQ,EAAE,IAAgB;QAChC,KAAK,CAAC,QAAQ,EAAE,WAAuB;AACxC,QAAA,IAAI;IACN,IAAI,GAAG,IAAI,IAAI;AAAE,QAAA,OAAO,IAAI;IAC5B,MAAM,MAAM,GAAG,OAAO,GAAG,KAAK,QAAQ,GAAG,GAAG,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC;IACxE,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,MAAM,GAAG,CAAC;AAAE,QAAA,OAAO,IAAI;;IAEnD,OAAO,MAAM,GAAG,CAAC;AACnB;;;;;"}
@@ -0,0 +1,104 @@
1
+ 'use strict';
2
+
3
+ var fetch = require('node-fetch');
4
+ var env = require('@langchain/core/utils/env');
5
+
6
+ /**
7
+ * Default HTTP RAG client. Posts to `${baseUrl}/query` with the shape
8
+ * rag_api expects (`{ file_id, query, k, entity_id? }`). Runtimes that
9
+ * use a different vector backend implement their own `RagClient`.
10
+ *
11
+ * Auth is runtime-provided per call (via `authHeaders` on the params) so
12
+ * short-lived tokens can be minted per request without the client
13
+ * caching stale credentials.
14
+ */
15
+ const RAG_API_URL_ENV = 'RAG_API_URL';
16
+ /** Resolve base URL at call time so env-var changes propagate. */
17
+ function getRagBaseUrl(override) {
18
+ const url = override ??
19
+ env.getEnvironmentVariable(RAG_API_URL_ENV) ??
20
+ '';
21
+ if (!url) {
22
+ throw new Error(`file_search: ${RAG_API_URL_ENV} is not configured. ` +
23
+ `Set the env var or pass baseUrl to HttpRagClient.`);
24
+ }
25
+ return url.replace(/\/$/, '');
26
+ }
27
+ class HttpRagClient {
28
+ baseUrlOverride;
29
+ defaultHeaders;
30
+ defaultTimeoutMs;
31
+ logger;
32
+ constructor(opts = {}) {
33
+ this.baseUrlOverride = opts.baseUrl;
34
+ this.defaultHeaders = opts.defaultHeaders ?? {};
35
+ this.defaultTimeoutMs = opts.defaultTimeoutMs ?? 15_000;
36
+ this.logger = opts.logger;
37
+ }
38
+ async query(params) {
39
+ const baseUrl = getRagBaseUrl(this.baseUrlOverride);
40
+ const url = `${baseUrl}/query`;
41
+ const body = {
42
+ file_id: params.file_id,
43
+ query: params.query,
44
+ k: params.k ?? 10,
45
+ };
46
+ if (params.entity_id)
47
+ body.entity_id = params.entity_id;
48
+ if (params.scope)
49
+ body.scope = params.scope;
50
+ const headers = {
51
+ 'Content-Type': 'application/json',
52
+ ...this.defaultHeaders,
53
+ ...(params.authHeaders ?? {}),
54
+ };
55
+ const timeoutMs = params.timeoutMs ?? this.defaultTimeoutMs;
56
+ const controller = typeof AbortController !== 'undefined' ? new AbortController() : null;
57
+ const timer = controller
58
+ ? setTimeout(() => controller.abort(), timeoutMs)
59
+ : null;
60
+ this.logger?.debug('[file_search] RAG query', {
61
+ url,
62
+ file_id: params.file_id,
63
+ k: body.k,
64
+ });
65
+ try {
66
+ const res = await fetch(url, {
67
+ method: 'POST',
68
+ headers,
69
+ body: JSON.stringify(body),
70
+ signal: controller?.signal,
71
+ });
72
+ if (!res.ok) {
73
+ const text = await res.text().catch(() => '');
74
+ throw new Error(`RAG query failed: ${res.status} ${res.statusText} — ${text.slice(0, 200)}`);
75
+ }
76
+ const json = (await res.json());
77
+ return this.normalize(params.file_id, json);
78
+ }
79
+ finally {
80
+ if (timer)
81
+ clearTimeout(timer);
82
+ }
83
+ }
84
+ /** Convert rag_api's tuple format into the library's normalized shape. */
85
+ normalize(file_id, resp) {
86
+ if (!Array.isArray(resp)) {
87
+ this.logger?.warn('[file_search] RAG response not an array', { resp });
88
+ return [];
89
+ }
90
+ return resp
91
+ .filter((row) => Array.isArray(row) && row.length === 2)
92
+ .map(([doc, distance]) => ({
93
+ file_id: doc?.metadata?.file_id ?? file_id,
94
+ page_content: doc?.page_content ?? '',
95
+ distance: typeof distance === 'number' ? distance : 1,
96
+ metadata: doc?.metadata,
97
+ }));
98
+ }
99
+ }
100
+
101
+ exports.HttpRagClient = HttpRagClient;
102
+ exports.RAG_API_URL_ENV = RAG_API_URL_ENV;
103
+ exports.getRagBaseUrl = getRagBaseUrl;
104
+ //# sourceMappingURL=ragClient.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ragClient.cjs","sources":["../../../../src/tools/fileSearch/ragClient.ts"],"sourcesContent":["/**\n * Default HTTP RAG client. Posts to `${baseUrl}/query` with the shape\n * rag_api expects (`{ file_id, query, k, entity_id? }`). Runtimes that\n * use a different vector backend implement their own `RagClient`.\n *\n * Auth is runtime-provided per call (via `authHeaders` on the params) so\n * short-lived tokens can be minted per request without the client\n * caching stale credentials.\n */\n\nimport fetch from 'node-fetch';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport type {\n RagClient,\n RagQueryParams,\n RagChunk,\n FileSearchToolLogger,\n} from './types';\n\nexport const RAG_API_URL_ENV = 'RAG_API_URL';\n\n/** Resolve base URL at call time so env-var changes propagate. */\nexport function getRagBaseUrl(override?: string): string {\n const url =\n override ??\n getEnvironmentVariable(RAG_API_URL_ENV) ??\n '';\n if (!url) {\n throw new Error(\n `file_search: ${RAG_API_URL_ENV} is not configured. ` +\n `Set the env var or pass baseUrl to HttpRagClient.`,\n );\n }\n return url.replace(/\\/$/, '');\n}\n\nexport interface HttpRagClientOptions {\n /** Base URL of the RAG service (no trailing slash). Falls back to env. */\n baseUrl?: string;\n /** Default headers sent on every request (e.g., a static API key). */\n defaultHeaders?: Record<string, string>;\n /** Default timeout if params don't override. Default 15_000. */\n defaultTimeoutMs?: number;\n logger?: FileSearchToolLogger;\n}\n\n/**\n * Expected rag_api response shape: `[[{ page_content, metadata }, distance], ...]`\n * — an array of [doc, score] tuples. Normalized here into `RagChunk[]`.\n */\ntype RagApiResponse = Array<\n [\n {\n page_content: string;\n metadata?: Record<string, unknown>;\n },\n number,\n ]\n>;\n\nexport class HttpRagClient implements RagClient {\n private readonly baseUrlOverride?: string;\n private readonly defaultHeaders: Record<string, string>;\n private readonly defaultTimeoutMs: number;\n private readonly logger?: FileSearchToolLogger;\n\n constructor(opts: HttpRagClientOptions = {}) {\n this.baseUrlOverride = opts.baseUrl;\n this.defaultHeaders = opts.defaultHeaders ?? {};\n this.defaultTimeoutMs = opts.defaultTimeoutMs ?? 15_000;\n this.logger = opts.logger;\n }\n\n async query(params: RagQueryParams): Promise<RagChunk[]> {\n const baseUrl = getRagBaseUrl(this.baseUrlOverride);\n const url = `${baseUrl}/query`;\n\n const body: Record<string, unknown> = {\n file_id: params.file_id,\n query: params.query,\n k: params.k ?? 10,\n };\n if (params.entity_id) body.entity_id = params.entity_id;\n if (params.scope) body.scope = params.scope;\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n ...this.defaultHeaders,\n ...(params.authHeaders ?? {}),\n };\n\n const timeoutMs = params.timeoutMs ?? this.defaultTimeoutMs;\n const controller =\n typeof AbortController !== 'undefined' ? new AbortController() : null;\n const timer = controller\n ? setTimeout(() => controller.abort(), timeoutMs)\n : null;\n\n this.logger?.debug('[file_search] RAG query', {\n url,\n file_id: params.file_id,\n k: body.k,\n });\n\n try {\n const res = await fetch(url, {\n method: 'POST',\n headers,\n body: JSON.stringify(body),\n signal: controller?.signal as unknown as undefined,\n });\n if (!res.ok) {\n const text = await res.text().catch(() => '');\n throw new Error(\n `RAG query failed: ${res.status} ${res.statusText} — ${text.slice(0, 200)}`,\n );\n }\n const json = (await res.json()) as RagApiResponse;\n return this.normalize(params.file_id, json);\n } finally {\n if (timer) clearTimeout(timer);\n }\n }\n\n /** Convert rag_api's tuple format into the library's normalized shape. */\n private normalize(file_id: string, resp: RagApiResponse): RagChunk[] {\n if (!Array.isArray(resp)) {\n this.logger?.warn('[file_search] RAG response not an array', { resp });\n return [];\n }\n return resp\n .filter((row) => Array.isArray(row) && row.length === 2)\n .map(([doc, distance]) => ({\n file_id:\n (doc?.metadata?.file_id as string | undefined) ?? file_id,\n page_content: doc?.page_content ?? '',\n distance: typeof distance === 'number' ? distance : 1,\n metadata: doc?.metadata,\n }));\n }\n}\n"],"names":["getEnvironmentVariable"],"mappings":";;;;;AAAA;;;;;;;;AAQG;AAWI,MAAM,eAAe,GAAG;AAE/B;AACM,SAAU,aAAa,CAAC,QAAiB,EAAA;IAC7C,MAAM,GAAG,GACP,QAAQ;QACRA,0BAAsB,CAAC,eAAe,CAAC;AACvC,QAAA,EAAE;IACJ,IAAI,CAAC,GAAG,EAAE;AACR,QAAA,MAAM,IAAI,KAAK,CACb,CAAA,aAAA,EAAgB,eAAe,CAAA,oBAAA,CAAsB;AACnD,YAAA,CAAA,iDAAA,CAAmD,CACtD;IACH;IACA,OAAO,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC;AAC/B;MA0Ba,aAAa,CAAA;AACP,IAAA,eAAe;AACf,IAAA,cAAc;AACd,IAAA,gBAAgB;AAChB,IAAA,MAAM;AAEvB,IAAA,WAAA,CAAY,OAA6B,EAAE,EAAA;AACzC,QAAA,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,OAAO;QACnC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,IAAI,EAAE;QAC/C,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,IAAI,MAAM;AACvD,QAAA,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM;IAC3B;IAEA,MAAM,KAAK,CAAC,MAAsB,EAAA;QAChC,MAAM,OAAO,GAAG,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC;AACnD,QAAA,MAAM,GAAG,GAAG,CAAA,EAAG,OAAO,QAAQ;AAE9B,QAAA,MAAM,IAAI,GAA4B;YACpC,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,KAAK,EAAE,MAAM,CAAC,KAAK;AACnB,YAAA,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI,EAAE;SAClB;QACD,IAAI,MAAM,CAAC,SAAS;AAAE,YAAA,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS;QACvD,IAAI,MAAM,CAAC,KAAK;AAAE,YAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK;AAE3C,QAAA,MAAM,OAAO,GAA2B;AACtC,YAAA,cAAc,EAAE,kBAAkB;YAClC,GAAG,IAAI,CAAC,cAAc;AACtB,YAAA,IAAI,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC;SAC9B;QAED,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,gBAAgB;AAC3D,QAAA,MAAM,UAAU,GACd,OAAO,eAAe,KAAK,WAAW,GAAG,IAAI,eAAe,EAAE,GAAG,IAAI;QACvE,MAAM,KAAK,GAAG;AACZ,cAAE,UAAU,CAAC,MAAM,UAAU,CAAC,KAAK,EAAE,EAAE,SAAS;cAC9C,IAAI;AAER,QAAA,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,yBAAyB,EAAE;YAC5C,GAAG;YACH,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,CAAC,EAAE,IAAI,CAAC,CAAC;AACV,SAAA,CAAC;AAEF,QAAA,IAAI;AACF,YAAA,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;AAC3B,gBAAA,MAAM,EAAE,MAAM;gBACd,OAAO;AACP,gBAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;gBAC1B,MAAM,EAAE,UAAU,EAAE,MAA8B;AACnD,aAAA,CAAC;AACF,YAAA,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE;AACX,gBAAA,MAAM,IAAI,GAAG,MAAM,GAAG,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;gBAC7C,MAAM,IAAI,KAAK,CACb,CAAA,kBAAA,EAAqB,GAAG,CAAC,MAAM,CAAA,CAAA,EAAI,GAAG,CAAC,UAAU,MAAM,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA,CAAE,CAC5E;YACH;YACA,MAAM,IAAI,IAAI,MAAM,GAAG,CAAC,IAAI,EAAE,CAAmB;YACjD,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,EAAE,IAAI,CAAC;QAC7C;gBAAU;AACR,YAAA,IAAI,KAAK;gBAAE,YAAY,CAAC,KAAK,CAAC;QAChC;IACF;;IAGQ,SAAS,CAAC,OAAe,EAAE,IAAoB,EAAA;QACrD,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;YACxB,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,yCAAyC,EAAE,EAAE,IAAI,EAAE,CAAC;AACtE,YAAA,OAAO,EAAE;QACX;AACA,QAAA,OAAO;AACJ,aAAA,MAAM,CAAC,CAAC,GAAG,KAAK,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC;aACtD,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC,MAAM;AACzB,YAAA,OAAO,EACJ,GAAG,EAAE,QAAQ,EAAE,OAA8B,IAAI,OAAO;AAC3D,YAAA,YAAY,EAAE,GAAG,EAAE,YAAY,IAAI,EAAE;AACrC,YAAA,QAAQ,EAAE,OAAO,QAAQ,KAAK,QAAQ,GAAG,QAAQ,GAAG,CAAC;YACrD,QAAQ,EAAE,GAAG,EAAE,QAAQ;AACxB,SAAA,CAAC,CAAC;IACP;AACD;;;;;;"}
@@ -0,0 +1,18 @@
1
+ 'use strict';
2
+
3
+ var zod = require('zod');
4
+
5
+ const fileSearchInputSchema = zod.z.object({
6
+ query: zod.z
7
+ .string()
8
+ .describe("A natural language query to search for relevant information in the files. Be SPECIFIC and TARGETED — use keywords for the specific section or topic you need. For comprehensive tasks (summaries, overviews), call this tool multiple times with different targeted queries (e.g., 'introduction', 'methodology', 'results', 'conclusions') rather than one broad query."),
9
+ target_files: zod.z
10
+ .array(zod.z.string())
11
+ .optional()
12
+ .describe('Optional list of filenames (or partial names) to limit the search to. When provided, only files whose name contains one of these strings will be searched. Use this to avoid searching irrelevant files. Omit to search all available files.'),
13
+ });
14
+ const FileSearchToolName = 'file_search';
15
+
16
+ exports.FileSearchToolName = FileSearchToolName;
17
+ exports.fileSearchInputSchema = fileSearchInputSchema;
18
+ //# sourceMappingURL=schema.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.cjs","sources":["../../../../src/tools/fileSearch/schema.ts"],"sourcesContent":["import { z } from 'zod';\n\nexport const fileSearchInputSchema = z.object({\n query: z\n .string()\n .describe(\n \"A natural language query to search for relevant information in the files. Be SPECIFIC and TARGETED — use keywords for the specific section or topic you need. For comprehensive tasks (summaries, overviews), call this tool multiple times with different targeted queries (e.g., 'introduction', 'methodology', 'results', 'conclusions') rather than one broad query.\",\n ),\n target_files: z\n .array(z.string())\n .optional()\n .describe(\n 'Optional list of filenames (or partial names) to limit the search to. When provided, only files whose name contains one of these strings will be searched. Use this to avoid searching irrelevant files. Omit to search all available files.',\n ),\n});\n\nexport type FileSearchInput = z.infer<typeof fileSearchInputSchema>;\n\nexport const FileSearchToolName = 'file_search';\n"],"names":["z"],"mappings":";;;;AAEO,MAAM,qBAAqB,GAAGA,KAAC,CAAC,MAAM,CAAC;AAC5C,IAAA,KAAK,EAAEA;AACJ,SAAA,MAAM;SACN,QAAQ,CACP,0WAA0W,CAC3W;AACH,IAAA,YAAY,EAAEA;AACX,SAAA,KAAK,CAACA,KAAC,CAAC,MAAM,EAAE;AAChB,SAAA,QAAQ;SACR,QAAQ,CACP,8OAA8O,CAC/O;AACJ,CAAA;AAIM,MAAM,kBAAkB,GAAG;;;;;"}
@@ -0,0 +1,155 @@
1
+ 'use strict';
2
+
3
+ var tools = require('@langchain/core/tools');
4
+ var schema = require('./schema.cjs');
5
+ var formatter = require('./formatter.cjs');
6
+
7
+ /**
8
+ * file_search tool factory — library-native equivalent of the CodeExecutor
9
+ * pattern. Runtimes supply a `RagClient`, the file list for this turn, and
10
+ * an optional formatter (ranger uses citation anchors; CLI/A2A use plain
11
+ * text).
12
+ *
13
+ * The tool itself:
14
+ * 1. Accepts `{ query, target_files? }` from the LLM.
15
+ * 2. Filters files by `target_files` substring match when provided.
16
+ * 3. Queries each file in bounded concurrent batches.
17
+ * 4. Enforces per-file timeouts (failures isolated per file).
18
+ * 5. Flattens chunks, deprioritizes stale-turn files, caps results.
19
+ * 6. Hands formatted output to the runtime's formatter for final shape.
20
+ */
21
+ const DEFAULT_QUERY_TIMEOUT_MS = 15_000;
22
+ const DEFAULT_CONCURRENCY = 10;
23
+ const DEFAULT_TOP_K = 10;
24
+ /**
25
+ * Build the tool description. Runtimes that use citation anchors supply
26
+ * `fileCitations: true` (via the formatter); the description includes the
27
+ * citation ruleset only when that's on.
28
+ */
29
+ function buildDescription(opts) {
30
+ const core = `Performs semantic search across the attached "${schema.FileSearchToolName}" documents using natural language queries. Analyzes the content of loaded files to find relevant information, quotes, and passages matching the query.
31
+
32
+ **Use target_files to narrow the search:**
33
+ When you know which file(s) contain the relevant information, ALWAYS pass target_files. This is faster and returns more focused results. Pass partial filenames — they match via substring.
34
+
35
+ **Multiple searches for thorough analysis:**
36
+ For summaries/overviews, call this tool MULTIPLE times with DIFFERENT queries targeting different aspects (intro, methodology, results, conclusions). A single search only returns chunks from one part of the document.`;
37
+ if (!opts.fileCitations)
38
+ return core;
39
+ return `${core}
40
+
41
+ **CITING FILE SEARCH RESULTS — MANDATORY:**
42
+ Cite EVERY statement derived from file content. Place the citation anchor IMMEDIATELY after each paragraph using that source. Each search result has a unique source index — use DIFFERENT indices for different claims; do not reuse the same anchor for all paragraphs. Format: \`\\ue202turn0fileN\`. With a page: include \`(p. N)\` inline. Multiple sources: \`\\ue200\\ue202turn0file0\\ue202turn0file1\\ue201\`. NEVER substitute with footnotes, brackets, or symbols.`;
43
+ }
44
+ function createFileSearchTool(config) {
45
+ const { ragClient, files, entity_id, scope, getAuthHeaders, formatter: formatter$1 = formatter.plainTextFormatter, queryTimeoutMs = DEFAULT_QUERY_TIMEOUT_MS, concurrencyLimit = DEFAULT_CONCURRENCY, topK = DEFAULT_TOP_K, resultCap, callbacks, logger, } = config;
46
+ // Monotonic call counter used by citation-style formatters to keep source
47
+ // indices unique across multiple invocations within a single turn.
48
+ let callIndex = 0;
49
+ // Infer whether the formatter wants citations from the artifact it emits
50
+ // on an empty-chunk format. This keeps the description/behavior aligned
51
+ // without forcing the host to declare `fileCitations` twice.
52
+ const fileCitations = formatter$1 !== formatter.plainTextFormatter;
53
+ return tools.tool(async (rawInput) => {
54
+ const { query, target_files } = rawInput;
55
+ if (files.length === 0) {
56
+ return [
57
+ 'No files to search. Instruct the user to add files for the search.',
58
+ undefined,
59
+ ];
60
+ }
61
+ // target_files: case-insensitive substring match, fallback to all
62
+ // files with a warning if the filter excludes everything.
63
+ let filesToQuery = files;
64
+ if (target_files && target_files.length > 0) {
65
+ const lowerTargets = target_files.map((t) => t.toLowerCase());
66
+ const matched = files.filter((f) => lowerTargets.some((t) => f.filename.toLowerCase().includes(t)));
67
+ if (matched.length === 0) {
68
+ logger?.warn(`[file_search] No files matched target_files ${target_files.join(', ')}; falling back to all files`);
69
+ filesToQuery = files;
70
+ }
71
+ else {
72
+ logger?.info(`[file_search] Filtered to ${matched.length}/${files.length} via target_files`);
73
+ filesToQuery = matched;
74
+ }
75
+ }
76
+ const authHeaders = getAuthHeaders ? await getAuthHeaders() : undefined;
77
+ const queryOne = async (file) => {
78
+ const params = {
79
+ file_id: file.file_id,
80
+ query,
81
+ k: topK,
82
+ entity_id,
83
+ scope,
84
+ authHeaders,
85
+ timeoutMs: queryTimeoutMs,
86
+ };
87
+ try {
88
+ const chunks = await ragClient.query(params);
89
+ callbacks?.onFileQueried?.(file, chunks.length);
90
+ return chunks;
91
+ }
92
+ catch (err) {
93
+ const e = err instanceof Error ? err : new Error(String(err));
94
+ logger?.error(`[file_search] Query failed for ${file.filename}: ${e.message}`);
95
+ callbacks?.onFileError?.(file, e);
96
+ return [];
97
+ }
98
+ };
99
+ // Bounded-concurrency batching. Server-side rerankers handle their
100
+ // own concurrency; this protects the HTTP connection pool when the
101
+ // agent has many files.
102
+ const allChunks = [];
103
+ for (let i = 0; i < filesToQuery.length; i += concurrencyLimit) {
104
+ const batch = filesToQuery.slice(i, i + concurrencyLimit);
105
+ const batchResults = await Promise.all(batch.map(queryOne));
106
+ for (const chunks of batchResults)
107
+ allChunks.push(...chunks);
108
+ }
109
+ if (allChunks.length === 0) {
110
+ return [
111
+ 'No content found in the files. The files may not have been processed correctly or the query may need refinement.',
112
+ undefined,
113
+ ];
114
+ }
115
+ // Build annotated results: attach filename + isCurrentMessage via
116
+ // a file-id lookup (metadata wins, factory list is fallback).
117
+ const fileById = new Map(files.map((f) => [f.file_id, f]));
118
+ const annotated = allChunks.map((c) => {
119
+ const matched = fileById.get(c.file_id);
120
+ const filename = (c.metadata?.source
121
+ ? String(c.metadata.source).split(/[/\\]/).pop()
122
+ : undefined) ??
123
+ matched?.filename ??
124
+ 'Unknown';
125
+ return {
126
+ ...c,
127
+ filename,
128
+ isCurrentMessage: matched?.isCurrentMessage === true,
129
+ };
130
+ });
131
+ // Sort: current-turn files first, then by relevance (lower distance).
132
+ annotated.sort((a, b) => {
133
+ if (a.isCurrentMessage !== b.isCurrentMessage)
134
+ return a.isCurrentMessage ? -1 : 1;
135
+ return a.distance - b.distance;
136
+ });
137
+ const cap = resultCap ?? Math.max(10, filesToQuery.length * 3);
138
+ const limited = annotated.slice(0, cap);
139
+ const { message, artifact } = formatter$1.format(limited, {
140
+ callIndex,
141
+ files,
142
+ });
143
+ callIndex += 1;
144
+ return [message, artifact];
145
+ }, {
146
+ name: schema.FileSearchToolName,
147
+ responseFormat: 'content_and_artifact',
148
+ description: buildDescription({ fileCitations }),
149
+ schema: schema.fileSearchInputSchema,
150
+ });
151
+ }
152
+
153
+ exports.FileSearchToolName = schema.FileSearchToolName;
154
+ exports.createFileSearchTool = createFileSearchTool;
155
+ //# sourceMappingURL=tool.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tool.cjs","sources":["../../../../src/tools/fileSearch/tool.ts"],"sourcesContent":["/**\n * file_search tool factory — library-native equivalent of the CodeExecutor\n * pattern. Runtimes supply a `RagClient`, the file list for this turn, and\n * an optional formatter (ranger uses citation anchors; CLI/A2A use plain\n * text).\n *\n * The tool itself:\n * 1. Accepts `{ query, target_files? }` from the LLM.\n * 2. Filters files by `target_files` substring match when provided.\n * 3. Queries each file in bounded concurrent batches.\n * 4. Enforces per-file timeouts (failures isolated per file).\n * 5. Flattens chunks, deprioritizes stale-turn files, caps results.\n * 6. Hands formatted output to the runtime's formatter for final shape.\n */\n\nimport { tool, DynamicStructuredTool } from '@langchain/core/tools';\nimport {\n fileSearchInputSchema,\n type FileSearchInput,\n FileSearchToolName,\n} from './schema';\nimport type {\n FileSearchToolConfig,\n FileSearchFile,\n RagChunk,\n RagQueryParams,\n} from './types';\nimport { plainTextFormatter } from './formatter';\n\nconst DEFAULT_QUERY_TIMEOUT_MS = 15_000;\nconst DEFAULT_CONCURRENCY = 10;\nconst DEFAULT_TOP_K = 10;\n\n/**\n * Build the tool description. Runtimes that use citation anchors supply\n * `fileCitations: true` (via the formatter); the description includes the\n * citation ruleset only when that's on.\n */\nfunction buildDescription(opts: { fileCitations: boolean }): string {\n const core = `Performs semantic search across the attached \"${FileSearchToolName}\" documents using natural language queries. Analyzes the content of loaded files to find relevant information, quotes, and passages matching the query.\n\n**Use target_files to narrow the search:**\nWhen you know which file(s) contain the relevant information, ALWAYS pass target_files. This is faster and returns more focused results. Pass partial filenames — they match via substring.\n\n**Multiple searches for thorough analysis:**\nFor summaries/overviews, call this tool MULTIPLE times with DIFFERENT queries targeting different aspects (intro, methodology, results, conclusions). A single search only returns chunks from one part of the document.`;\n\n if (!opts.fileCitations) return core;\n\n return `${core}\n\n**CITING FILE SEARCH RESULTS — MANDATORY:**\nCite EVERY statement derived from file content. Place the citation anchor IMMEDIATELY after each paragraph using that source. Each search result has a unique source index — use DIFFERENT indices for different claims; do not reuse the same anchor for all paragraphs. Format: \\`\\\\ue202turn0fileN\\`. With a page: include \\`(p. N)\\` inline. Multiple sources: \\`\\\\ue200\\\\ue202turn0file0\\\\ue202turn0file1\\\\ue201\\`. NEVER substitute with footnotes, brackets, or symbols.`;\n}\n\nexport function createFileSearchTool(\n config: FileSearchToolConfig,\n): DynamicStructuredTool {\n const {\n ragClient,\n files,\n entity_id,\n scope,\n getAuthHeaders,\n formatter = plainTextFormatter,\n queryTimeoutMs = DEFAULT_QUERY_TIMEOUT_MS,\n concurrencyLimit = DEFAULT_CONCURRENCY,\n topK = DEFAULT_TOP_K,\n resultCap,\n callbacks,\n logger,\n } = config;\n\n // Monotonic call counter used by citation-style formatters to keep source\n // indices unique across multiple invocations within a single turn.\n let callIndex = 0;\n\n // Infer whether the formatter wants citations from the artifact it emits\n // on an empty-chunk format. This keeps the description/behavior aligned\n // without forcing the host to declare `fileCitations` twice.\n const fileCitations = formatter !== plainTextFormatter;\n\n return tool(\n async (rawInput: FileSearchInput) => {\n const { query, target_files } = rawInput;\n\n if (files.length === 0) {\n return [\n 'No files to search. Instruct the user to add files for the search.',\n undefined,\n ];\n }\n\n // target_files: case-insensitive substring match, fallback to all\n // files with a warning if the filter excludes everything.\n let filesToQuery: FileSearchFile[] = files;\n if (target_files && target_files.length > 0) {\n const lowerTargets = target_files.map((t) => t.toLowerCase());\n const matched = files.filter((f) =>\n lowerTargets.some((t) => f.filename.toLowerCase().includes(t)),\n );\n if (matched.length === 0) {\n logger?.warn(\n `[file_search] No files matched target_files ${target_files.join(', ')}; falling back to all files`,\n );\n filesToQuery = files;\n } else {\n logger?.info(\n `[file_search] Filtered to ${matched.length}/${files.length} via target_files`,\n );\n filesToQuery = matched;\n }\n }\n\n const authHeaders = getAuthHeaders ? await getAuthHeaders() : undefined;\n\n const queryOne = async (file: FileSearchFile): Promise<RagChunk[]> => {\n const params: RagQueryParams = {\n file_id: file.file_id,\n query,\n k: topK,\n entity_id,\n scope,\n authHeaders,\n timeoutMs: queryTimeoutMs,\n };\n try {\n const chunks = await ragClient.query(params);\n callbacks?.onFileQueried?.(file, chunks.length);\n return chunks;\n } catch (err) {\n const e = err instanceof Error ? err : new Error(String(err));\n logger?.error(\n `[file_search] Query failed for ${file.filename}: ${e.message}`,\n );\n callbacks?.onFileError?.(file, e);\n return [];\n }\n };\n\n // Bounded-concurrency batching. Server-side rerankers handle their\n // own concurrency; this protects the HTTP connection pool when the\n // agent has many files.\n const allChunks: RagChunk[] = [];\n for (let i = 0; i < filesToQuery.length; i += concurrencyLimit) {\n const batch = filesToQuery.slice(i, i + concurrencyLimit);\n const batchResults = await Promise.all(batch.map(queryOne));\n for (const chunks of batchResults) allChunks.push(...chunks);\n }\n\n if (allChunks.length === 0) {\n return [\n 'No content found in the files. The files may not have been processed correctly or the query may need refinement.',\n undefined,\n ];\n }\n\n // Build annotated results: attach filename + isCurrentMessage via\n // a file-id lookup (metadata wins, factory list is fallback).\n const fileById = new Map(files.map((f) => [f.file_id, f]));\n const annotated = allChunks.map((c) => {\n const matched = fileById.get(c.file_id);\n const filename =\n (c.metadata?.source\n ? String(c.metadata.source).split(/[/\\\\]/).pop()\n : undefined) ??\n matched?.filename ??\n 'Unknown';\n return {\n ...c,\n filename,\n isCurrentMessage: matched?.isCurrentMessage === true,\n };\n });\n\n // Sort: current-turn files first, then by relevance (lower distance).\n annotated.sort((a, b) => {\n if (a.isCurrentMessage !== b.isCurrentMessage)\n return a.isCurrentMessage ? -1 : 1;\n return a.distance - b.distance;\n });\n\n const cap = resultCap ?? Math.max(10, filesToQuery.length * 3);\n const limited = annotated.slice(0, cap);\n\n const { message, artifact } = formatter.format(limited, {\n callIndex,\n files,\n });\n callIndex += 1;\n\n // Suppress unused-variable warning for fileCitations (currently only\n // used to gate description; kept in case formatters need it).\n void fileCitations;\n\n return [message, artifact];\n },\n {\n name: FileSearchToolName,\n responseFormat: 'content_and_artifact',\n description: buildDescription({ fileCitations }),\n schema: fileSearchInputSchema,\n },\n );\n}\n\nexport { FileSearchToolName } from './schema';\n"],"names":["FileSearchToolName","formatter","plainTextFormatter","tool","fileSearchInputSchema"],"mappings":";;;;;;AAAA;;;;;;;;;;;;;AAaG;AAgBH,MAAM,wBAAwB,GAAG,MAAM;AACvC,MAAM,mBAAmB,GAAG,EAAE;AAC9B,MAAM,aAAa,GAAG,EAAE;AAExB;;;;AAIG;AACH,SAAS,gBAAgB,CAAC,IAAgC,EAAA;IACxD,MAAM,IAAI,GAAG,CAAA,8CAAA,EAAiDA,yBAAkB,CAAA;;;;;;yNAMuI;IAEvN,IAAI,CAAC,IAAI,CAAC,aAAa;AAAE,QAAA,OAAO,IAAI;AAEpC,IAAA,OAAO,GAAG,IAAI;;;gdAGgc;AAChd;AAEM,SAAU,oBAAoB,CAClC,MAA4B,EAAA;AAE5B,IAAA,MAAM,EACJ,SAAS,EACT,KAAK,EACL,SAAS,EACT,KAAK,EACL,cAAc,aACdC,WAAS,GAAGC,4BAAkB,EAC9B,cAAc,GAAG,wBAAwB,EACzC,gBAAgB,GAAG,mBAAmB,EACtC,IAAI,GAAG,aAAa,EACpB,SAAS,EACT,SAAS,EACT,MAAM,GACP,GAAG,MAAM;;;IAIV,IAAI,SAAS,GAAG,CAAC;;;;AAKjB,IAAA,MAAM,aAAa,GAAGD,WAAS,KAAKC,4BAAkB;AAEtD,IAAA,OAAOC,UAAI,CACT,OAAO,QAAyB,KAAI;AAClC,QAAA,MAAM,EAAE,KAAK,EAAE,YAAY,EAAE,GAAG,QAAQ;AAExC,QAAA,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;YACtB,OAAO;gBACL,oEAAoE;gBACpE,SAAS;aACV;QACH;;;QAIA,IAAI,YAAY,GAAqB,KAAK;QAC1C,IAAI,YAAY,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3C,YAAA,MAAM,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;AAC7D,YAAA,MAAM,OAAO,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,KAC7B,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAC/D;AACD,YAAA,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;AACxB,gBAAA,MAAM,EAAE,IAAI,CACV,CAAA,4CAAA,EAA+C,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA,2BAAA,CAA6B,CACpG;gBACD,YAAY,GAAG,KAAK;YACtB;iBAAO;AACL,gBAAA,MAAM,EAAE,IAAI,CACV,CAAA,0BAAA,EAA6B,OAAO,CAAC,MAAM,CAAA,CAAA,EAAI,KAAK,CAAC,MAAM,CAAA,iBAAA,CAAmB,CAC/E;gBACD,YAAY,GAAG,OAAO;YACxB;QACF;AAEA,QAAA,MAAM,WAAW,GAAG,cAAc,GAAG,MAAM,cAAc,EAAE,GAAG,SAAS;AAEvE,QAAA,MAAM,QAAQ,GAAG,OAAO,IAAoB,KAAyB;AACnE,YAAA,MAAM,MAAM,GAAmB;gBAC7B,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,KAAK;AACL,gBAAA,CAAC,EAAE,IAAI;gBACP,SAAS;gBACT,KAAK;gBACL,WAAW;AACX,gBAAA,SAAS,EAAE,cAAc;aAC1B;AACD,YAAA,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC;gBAC5C,SAAS,EAAE,aAAa,GAAG,IAAI,EAAE,MAAM,CAAC,MAAM,CAAC;AAC/C,gBAAA,OAAO,MAAM;YACf;YAAE,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,GAAG,GAAG,YAAY,KAAK,GAAG,GAAG,GAAG,IAAI,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;AAC7D,gBAAA,MAAM,EAAE,KAAK,CACX,CAAA,+BAAA,EAAkC,IAAI,CAAC,QAAQ,CAAA,EAAA,EAAK,CAAC,CAAC,OAAO,CAAA,CAAE,CAChE;gBACD,SAAS,EAAE,WAAW,GAAG,IAAI,EAAE,CAAC,CAAC;AACjC,gBAAA,OAAO,EAAE;YACX;AACF,QAAA,CAAC;;;;QAKD,MAAM,SAAS,GAAe,EAAE;AAChC,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,IAAI,gBAAgB,EAAE;AAC9D,YAAA,MAAM,KAAK,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,gBAAgB,CAAC;AACzD,YAAA,MAAM,YAAY,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;YAC3D,KAAK,MAAM,MAAM,IAAI,YAAY;AAAE,gBAAA,SAAS,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC;QAC9D;AAEA,QAAA,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC1B,OAAO;gBACL,kHAAkH;gBAClH,SAAS;aACV;QACH;;;QAIA,MAAM,QAAQ,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;QAC1D,MAAM,SAAS,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;YACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC;AACvC,YAAA,MAAM,QAAQ,GACZ,CAAC,CAAC,CAAC,QAAQ,EAAE;AACX,kBAAE,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG;kBAC5C,SAAS;AACb,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,SAAS;YACX,OAAO;AACL,gBAAA,GAAG,CAAC;gBACJ,QAAQ;AACR,gBAAA,gBAAgB,EAAE,OAAO,EAAE,gBAAgB,KAAK,IAAI;aACrD;AACH,QAAA,CAAC,CAAC;;QAGF,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAI;AACtB,YAAA,IAAI,CAAC,CAAC,gBAAgB,KAAK,CAAC,CAAC,gBAAgB;AAC3C,gBAAA,OAAO,CAAC,CAAC,gBAAgB,GAAG,EAAE,GAAG,CAAC;AACpC,YAAA,OAAO,CAAC,CAAC,QAAQ,GAAG,CAAC,CAAC,QAAQ;AAChC,QAAA,CAAC,CAAC;AAEF,QAAA,MAAM,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;QAC9D,MAAM,OAAO,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC;QAEvC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAGF,WAAS,CAAC,MAAM,CAAC,OAAO,EAAE;YACtD,SAAS;YACT,KAAK;AACN,SAAA,CAAC;QACF,SAAS,IAAI,CAAC;AAMd,QAAA,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC;AAC5B,IAAA,CAAC,EACD;AACE,QAAA,IAAI,EAAED,yBAAkB;AACxB,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,WAAW,EAAE,gBAAgB,CAAC,EAAE,aAAa,EAAE,CAAC;AAChD,QAAA,MAAM,EAAEI,4BAAqB;AAC9B,KAAA,CACF;AACH;;;;;"}
package/dist/esm/main.mjs CHANGED
@@ -26,11 +26,14 @@ export { handleServerToolResult, handleToolCallChunks, handleToolCalls, toolResu
26
26
  export { createSearchTool } from './tools/search/tool.mjs';
27
27
  export { DATE_RANGE, DEFAULT_COUNTRY_DESCRIPTION, DEFAULT_QUERY_DESCRIPTION, WebSearchToolDefinition, WebSearchToolDescription, WebSearchToolName, WebSearchToolSchema, countrySchema, dateSchema, imagesSchema, newsSchema, querySchema, videosSchema } from './tools/search/schema.mjs';
28
28
  export { buildMemoryTools } from './tools/memory/index.mjs';
29
+ export { createFileSearchTool } from './tools/fileSearch/tool.mjs';
30
+ export { HttpRagClient, RAG_API_URL_ENV, getRagBaseUrl } from './tools/fileSearch/ragClient.mjs';
31
+ export { createCitationAnchorFormatter, plainTextFormatter } from './tools/fileSearch/formatter.mjs';
32
+ export { FileSearchToolName, fileSearchInputSchema } from './tools/fileSearch/schema.mjs';
29
33
  export { buildProxyTool } from './tools/proxyTool.mjs';
30
34
  export { AuthSource, CapabilityKind } from './providers/types.mjs';
31
35
  export { CAPABILITY_NAME_SEPARATOR, formatCapabilityName, parseCapabilityName } from './providers/capabilityNaming.mjs';
32
36
  export { ToolsServerCapabilityProvider } from './providers/tools-server/ToolsServerCapabilityProvider.mjs';
33
- export { CompositeCapabilityProvider } from './providers/composite/CompositeCapabilityProvider.mjs';
34
37
  export { MCPCapabilityProvider, flattenToolCallResponse } from './providers/mcp/MCPCapabilityProvider.mjs';
35
38
  export { createTransport } from './providers/mcp/transport.mjs';
36
39
  export { getMCPEnvDefaults, consoleLogger as mcpConsoleLogger } from './providers/mcp/config.mjs';
@@ -1 +1 @@
1
- {"version":3,"file":"main.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
1
+ {"version":3,"file":"main.mjs","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;"}
@@ -0,0 +1,92 @@
1
+ /**
2
+ * Default result formatters.
3
+ *
4
+ * - `plainTextFormatter`: CLI / A2A / generic output. No citation anchors.
5
+ * - `citationAnchorFormatter`: ranger-style `\ue202turn0fileN` anchors with
6
+ * a monotonic `sourceOffset` so multi-call turns stay globally unique.
7
+ *
8
+ * Runtimes can supply their own `FileSearchResultFormatter` to override.
9
+ */
10
+ const plainTextFormatter = {
11
+ format(chunks, { files: _files }) {
12
+ if (chunks.length === 0) {
13
+ return { message: 'No relevant results found in the available files.' };
14
+ }
15
+ const body = chunks
16
+ .map((c) => {
17
+ const page = getPage(c);
18
+ const rel = (1 - c.distance).toFixed(4);
19
+ return (`File: ${c.filename}` +
20
+ (page != null ? `\nPage: ${page}` : '') +
21
+ `\nRelevance: ${rel}\nContent: ${c.page_content}\n`);
22
+ })
23
+ .join('\n---\n');
24
+ const sources = chunks.map((c) => ({
25
+ type: 'file',
26
+ fileId: c.file_id,
27
+ content: c.page_content,
28
+ fileName: c.filename,
29
+ relevance: 1 - c.distance,
30
+ pages: getPage(c) != null ? [getPage(c)] : [],
31
+ }));
32
+ return { message: body, artifact: { file_search: { sources } } };
33
+ },
34
+ };
35
+ function createCitationAnchorFormatter(opts = {}) {
36
+ const toolName = opts.toolName ?? 'file_search';
37
+ const getOffset = opts.getSourceOffset ?? (() => 0);
38
+ const advance = opts.advanceSourceOffset ?? (() => { });
39
+ return {
40
+ format(chunks) {
41
+ if (chunks.length === 0) {
42
+ return {
43
+ message: 'No results found or errors occurred while searching the files.',
44
+ };
45
+ }
46
+ const base = getOffset();
47
+ const body = chunks
48
+ .map((c, i) => {
49
+ const globalIndex = base + i;
50
+ const page = getPage(c);
51
+ const rel = (1 - c.distance).toFixed(4);
52
+ return (`[Source ${globalIndex}] File: ${c.filename} | Anchor: \\ue202turn0file${globalIndex}` +
53
+ (page != null ? ` | Page: ${page}` : '') +
54
+ ` | Relevance: ${rel}\nContent: ${c.page_content}\n` +
55
+ `↑ Cite this source using: \\ue202turn0file${globalIndex}`);
56
+ })
57
+ .join('\n---\n');
58
+ const sources = chunks.map((c) => ({
59
+ type: 'file',
60
+ fileId: c.file_id,
61
+ content: c.page_content,
62
+ fileName: c.filename,
63
+ relevance: 1 - c.distance,
64
+ pages: getPage(c) != null ? [getPage(c)] : [],
65
+ pageRelevance: getPage(c) != null
66
+ ? { [getPage(c)]: 1 - c.distance }
67
+ : {},
68
+ }));
69
+ advance(chunks.length);
70
+ return {
71
+ message: body,
72
+ artifact: { [toolName]: { sources, fileCitations: true } },
73
+ };
74
+ },
75
+ };
76
+ }
77
+ /** Extract a 1-indexed page number from the chunk metadata, or null. */
78
+ function getPage(chunk) {
79
+ const raw = chunk.metadata?.page ??
80
+ chunk.metadata?.page_number ??
81
+ null;
82
+ if (raw == null)
83
+ return null;
84
+ const parsed = typeof raw === 'number' ? raw : parseInt(String(raw), 10);
85
+ if (Number.isNaN(parsed) || parsed < 0)
86
+ return null;
87
+ // rag_api stores 0-indexed; display is 1-indexed
88
+ return parsed + 1;
89
+ }
90
+
91
+ export { createCitationAnchorFormatter, plainTextFormatter };
92
+ //# sourceMappingURL=formatter.mjs.map