@illuma-ai/agents 1.4.0-alpha.1 → 1.4.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/dist/cjs/main.cjs +20 -8
  2. package/dist/cjs/main.cjs.map +1 -1
  3. package/dist/cjs/tools/fileSearch/formatter.cjs +95 -0
  4. package/dist/cjs/tools/fileSearch/formatter.cjs.map +1 -0
  5. package/dist/cjs/tools/fileSearch/ragClient.cjs +104 -0
  6. package/dist/cjs/tools/fileSearch/ragClient.cjs.map +1 -0
  7. package/dist/cjs/tools/fileSearch/schema.cjs +18 -0
  8. package/dist/cjs/tools/fileSearch/schema.cjs.map +1 -0
  9. package/dist/cjs/tools/fileSearch/tool.cjs +155 -0
  10. package/dist/cjs/tools/fileSearch/tool.cjs.map +1 -0
  11. package/dist/esm/main.mjs +4 -0
  12. package/dist/esm/main.mjs.map +1 -1
  13. package/dist/esm/tools/fileSearch/formatter.mjs +92 -0
  14. package/dist/esm/tools/fileSearch/formatter.mjs.map +1 -0
  15. package/dist/esm/tools/fileSearch/ragClient.mjs +100 -0
  16. package/dist/esm/tools/fileSearch/ragClient.mjs.map +1 -0
  17. package/dist/esm/tools/fileSearch/schema.mjs +15 -0
  18. package/dist/esm/tools/fileSearch/schema.mjs.map +1 -0
  19. package/dist/esm/tools/fileSearch/tool.mjs +152 -0
  20. package/dist/esm/tools/fileSearch/tool.mjs.map +1 -0
  21. package/dist/types/index.d.ts +1 -0
  22. package/dist/types/tools/fileSearch/formatter.d.ts +25 -0
  23. package/dist/types/tools/fileSearch/index.d.ts +5 -0
  24. package/dist/types/tools/fileSearch/ragClient.d.ts +32 -0
  25. package/dist/types/tools/fileSearch/schema.d.ts +13 -0
  26. package/dist/types/tools/fileSearch/tool.d.ts +18 -0
  27. package/dist/types/tools/fileSearch/types.d.ts +139 -0
  28. package/package.json +1 -1
  29. package/src/index.ts +1 -0
  30. package/src/tools/fileSearch/__tests__/tool.test.ts +251 -0
  31. package/src/tools/fileSearch/formatter.ts +131 -0
  32. package/src/tools/fileSearch/index.ts +23 -0
  33. package/src/tools/fileSearch/ragClient.ts +141 -0
  34. package/src/tools/fileSearch/schema.ts +19 -0
  35. package/src/tools/fileSearch/tool.ts +207 -0
  36. package/src/tools/fileSearch/types.ts +147 -0
@@ -0,0 +1 @@
1
+ {"version":3,"file":"formatter.mjs","sources":["../../../../src/tools/fileSearch/formatter.ts"],"sourcesContent":["/**\n * Default result formatters.\n *\n * - `plainTextFormatter`: CLI / A2A / generic output. No citation anchors.\n * - `citationAnchorFormatter`: ranger-style `\\ue202turn0fileN` anchors with\n * a monotonic `sourceOffset` so multi-call turns stay globally unique.\n *\n * Runtimes can supply their own `FileSearchResultFormatter` to override.\n */\n\nimport type {\n FileSearchResultFormatter,\n FileSearchFile,\n RagChunk,\n} from './types';\n\ntype AnnotatedChunk = RagChunk & {\n filename: string;\n isCurrentMessage: boolean;\n};\n\nexport const plainTextFormatter: FileSearchResultFormatter = {\n format(chunks, { files: _files }) {\n if (chunks.length === 0) {\n return { message: 'No relevant results found in the available files.' };\n }\n const body = chunks\n .map((c) => {\n const page = getPage(c);\n const rel = (1 - c.distance).toFixed(4);\n return (\n `File: ${c.filename}` +\n (page != null ? `\\nPage: ${page}` : '') +\n `\\nRelevance: ${rel}\\nContent: ${c.page_content}\\n`\n );\n })\n .join('\\n---\\n');\n\n const sources = chunks.map((c) => ({\n type: 'file' as const,\n fileId: c.file_id,\n content: c.page_content,\n fileName: c.filename,\n relevance: 1 - c.distance,\n pages: getPage(c) != null ? [getPage(c) as number] : [],\n }));\n\n return { message: body, artifact: { file_search: { sources } } };\n },\n};\n\nexport interface CitationAnchorFormatterOptions {\n /** Tool name used in the `file_search` artifact wrapper. Defaults to `'file_search'`. */\n toolName?: string;\n /**\n * Monotonic counter for source indices within a turn. Pass the SAME\n * function to the formatter across multiple calls in the same turn so\n * anchors stay globally unique.\n */\n getSourceOffset?: () => number;\n /** Called after formatting to advance the offset. */\n advanceSourceOffset?: (by: number) => void;\n}\n\nexport function createCitationAnchorFormatter(\n opts: CitationAnchorFormatterOptions = {},\n): FileSearchResultFormatter {\n const toolName = opts.toolName ?? 'file_search';\n const getOffset = opts.getSourceOffset ?? (() => 0);\n const advance = opts.advanceSourceOffset ?? (() => {});\n\n return {\n format(chunks) {\n if (chunks.length === 0) {\n return {\n message:\n 'No results found or errors occurred while searching the files.',\n };\n }\n const base = getOffset();\n const body = chunks\n .map((c, i) => {\n const globalIndex = base + i;\n const page = getPage(c);\n const rel = (1 - c.distance).toFixed(4);\n return (\n `[Source ${globalIndex}] File: ${c.filename} | Anchor: \\\\ue202turn0file${globalIndex}` +\n (page != null ? ` | Page: ${page}` : '') +\n ` | Relevance: ${rel}\\nContent: ${c.page_content}\\n` +\n `↑ Cite this source using: \\\\ue202turn0file${globalIndex}`\n );\n })\n .join('\\n---\\n');\n\n const sources = chunks.map((c) => ({\n type: 'file' as const,\n fileId: c.file_id,\n content: c.page_content,\n fileName: c.filename,\n relevance: 1 - c.distance,\n pages: getPage(c) != null ? [getPage(c) as number] : [],\n pageRelevance:\n getPage(c) != null\n ? { [getPage(c) as number]: 1 - c.distance }\n : {},\n }));\n\n advance(chunks.length);\n return {\n message: body,\n artifact: { [toolName]: { sources, fileCitations: true } },\n };\n },\n };\n}\n\n/** Extract a 1-indexed page number from the chunk metadata, or null. */\nfunction getPage(chunk: AnnotatedChunk | RagChunk): number | null {\n const raw =\n (chunk.metadata?.page as unknown) ??\n (chunk.metadata?.page_number as unknown) ??\n null;\n if (raw == null) return null;\n const parsed = typeof raw === 'number' ? raw : parseInt(String(raw), 10);\n if (Number.isNaN(parsed) || parsed < 0) return null;\n // rag_api stores 0-indexed; display is 1-indexed\n return parsed + 1;\n}\n\n// Re-export so consumers only import from the formatter module.\nexport type { FileSearchResultFormatter, FileSearchFile, RagChunk };\n"],"names":[],"mappings":"AAAA;;;;;;;;AAQG;AAaI,MAAM,kBAAkB,GAA8B;AAC3D,IAAA,MAAM,CAAC,MAAM,EAAE,EAAE,KAAK,EAAE,MAAM,EAAE,EAAA;AAC9B,QAAA,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;AACvB,YAAA,OAAO,EAAE,OAAO,EAAE,mDAAmD,EAAE;QACzE;QACA,MAAM,IAAI,GAAG;AACV,aAAA,GAAG,CAAC,CAAC,CAAC,KAAI;AACT,YAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC;AACvB,YAAA,MAAM,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC;AACvC,YAAA,QACE,CAAA,MAAA,EAAS,CAAC,CAAC,QAAQ,CAAA,CAAE;AACrB,iBAAC,IAAI,IAAI,IAAI,GAAG,CAAA,QAAA,EAAW,IAAI,CAAA,CAAE,GAAG,EAAE,CAAC;AACvC,gBAAA,CAAA,aAAA,EAAgB,GAAG,CAAA,WAAA,EAAc,CAAC,CAAC,YAAY,CAAA,EAAA,CAAI;AAEvD,QAAA,CAAC;aACA,IAAI,CAAC,SAAS,CAAC;QAElB,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACjC,YAAA,IAAI,EAAE,MAAe;YACrB,MAAM,EAAE,CAAC,CAAC,OAAO;YACjB,OAAO,EAAE,CAAC,CAAC,YAAY;YACvB,QAAQ,EAAE,CAAC,CAAC,QAAQ;AACpB,YAAA,SAAS,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ;AACzB,YAAA,KAAK,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAW,CAAC,GAAG,EAAE;AACxD,SAAA,CAAC,CAAC;AAEH,QAAA,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,EAAE,WAAW,EAAE,EAAE,OAAO,EAAE,EAAE,EAAE;IAClE,CAAC;;AAgBG,SAAU,6BAA6B,CAC3C,IAAA,GAAuC,EAAE,EAAA;AAEzC,IAAA,MAAM,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,aAAa;AAC/C,IAAA,MAAM,SAAS,GAAG,IAAI,CAAC,eAAe,KAAK,MAAM,CAAC,CAAC;AACnD,IAAA,MAAM,OAAO,GAAG,IAAI,CAAC,mBAAmB,KAAK,MAAK,EAAE,CAAC,CAAC;IAEtD,OAAO;AACL,QAAA,MAAM,CAAC,MAAM,EAAA;AACX,YAAA,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE;gBACvB,OAAO;AACL,oBAAA,OAAO,EACL,gEAAgE;iBACnE;YACH;AACA,YAAA,MAAM,IAAI,GAAG,SAAS,EAAE;YACxB,MAAM,IAAI,GAAG;AACV,iBAAA,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,KAAI;AACZ,gBAAA,MAAM,WAAW,GAAG,IAAI,GAAG,CAAC;AAC5B,gBAAA,MAAM,IAAI,GAAG,OAAO,CAAC,CAAC,CAAC;AACvB,gBAAA,MAAM,GAAG,GAAG,CAAC,CAAC,GAAG,CAAC,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAC;gBACvC,QACE,WAAW,WAAW,CAAA,QAAA,EAAW,CAAC,CAAC,QAAQ,CAAA,2BAAA,EAA8B,WAAW,CAAA,CAAE;AACtF,qBAAC,IAAI,IAAI,IAAI,GAAG,CAAA,SAAA,EAAY,IAAI,CAAA,CAAE,GAAG,EAAE,CAAC;AACxC,oBAAA,CAAA,cAAA,EAAiB,GAAG,CAAA,WAAA,EAAc,CAAC,CAAC,YAAY,CAAA,EAAA,CAAI;oBACpD,CAAA,0CAAA,EAA6C,WAAW,CAAA,CAAE;AAE9D,YAAA,CAAC;iBACA,IAAI,CAAC,SAAS,CAAC;YAElB,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,MAAM;AACjC,gBAAA,IAAI,EAAE,MAAe;gBACrB,MAAM,EAAE,CAAC,CAAC,OAAO;gBACjB,OAAO,EAAE,CAAC,CAAC,YAAY;gBACvB,QAAQ,EAAE,CAAC,CAAC,QAAQ;AACpB,gBAAA,SAAS,EAAE,CAAC,GAAG,CAAC,CAAC,QAAQ;AACzB,gBAAA,KAAK,EAAE,OAAO,CAAC,CAAC,CAAC,IAAI,IAAI,GAAG,CAAC,OAAO,CAAC,CAAC,CAAW,CAAC,GAAG,EAAE;AACvD,gBAAA,aAAa,EACX,OAAO,CAAC,CAAC,CAAC,IAAI;AACZ,sBAAE,EAAE,CAAC,OAAO,CAAC,CAAC,CAAW,GAAG,CAAC,GAAG,CAAC,CAAC,QAAQ;AAC1C,sBAAE,EAAE;AACT,aAAA,CAAC,CAAC;AAEH,YAAA,OAAO,CAAC,MAAM,CAAC,MAAM,CAAC;YACtB,OAAO;AACL,gBAAA,OAAO,EAAE,IAAI;AACb,gBAAA,QAAQ,EAAE,EAAE,CAAC,QAAQ,GAAG,EAAE,OAAO,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE;aAC3D;QACH,CAAC;KACF;AACH;AAEA;AACA,SAAS,OAAO,CAAC,KAAgC,EAAA;AAC/C,IAAA,MAAM,GAAG,GACN,KAAK,CAAC,QAAQ,EAAE,IAAgB;QAChC,KAAK,CAAC,QAAQ,EAAE,WAAuB;AACxC,QAAA,IAAI;IACN,IAAI,GAAG,IAAI,IAAI;AAAE,QAAA,OAAO,IAAI;IAC5B,MAAM,MAAM,GAAG,OAAO,GAAG,KAAK,QAAQ,GAAG,GAAG,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,CAAC,EAAE,EAAE,CAAC;IACxE,IAAI,MAAM,CAAC,KAAK,CAAC,MAAM,CAAC,IAAI,MAAM,GAAG,CAAC;AAAE,QAAA,OAAO,IAAI;;IAEnD,OAAO,MAAM,GAAG,CAAC;AACnB;;;;"}
@@ -0,0 +1,100 @@
1
+ import fetch from 'node-fetch';
2
+ import { getEnvironmentVariable } from '@langchain/core/utils/env';
3
+
4
+ /**
5
+ * Default HTTP RAG client. Posts to `${baseUrl}/query` with the shape
6
+ * rag_api expects (`{ file_id, query, k, entity_id? }`). Runtimes that
7
+ * use a different vector backend implement their own `RagClient`.
8
+ *
9
+ * Auth is runtime-provided per call (via `authHeaders` on the params) so
10
+ * short-lived tokens can be minted per request without the client
11
+ * caching stale credentials.
12
+ */
13
+ const RAG_API_URL_ENV = 'RAG_API_URL';
14
+ /** Resolve base URL at call time so env-var changes propagate. */
15
+ function getRagBaseUrl(override) {
16
+ const url = override ??
17
+ getEnvironmentVariable(RAG_API_URL_ENV) ??
18
+ '';
19
+ if (!url) {
20
+ throw new Error(`file_search: ${RAG_API_URL_ENV} is not configured. ` +
21
+ `Set the env var or pass baseUrl to HttpRagClient.`);
22
+ }
23
+ return url.replace(/\/$/, '');
24
+ }
25
+ class HttpRagClient {
26
+ baseUrlOverride;
27
+ defaultHeaders;
28
+ defaultTimeoutMs;
29
+ logger;
30
+ constructor(opts = {}) {
31
+ this.baseUrlOverride = opts.baseUrl;
32
+ this.defaultHeaders = opts.defaultHeaders ?? {};
33
+ this.defaultTimeoutMs = opts.defaultTimeoutMs ?? 15_000;
34
+ this.logger = opts.logger;
35
+ }
36
+ async query(params) {
37
+ const baseUrl = getRagBaseUrl(this.baseUrlOverride);
38
+ const url = `${baseUrl}/query`;
39
+ const body = {
40
+ file_id: params.file_id,
41
+ query: params.query,
42
+ k: params.k ?? 10,
43
+ };
44
+ if (params.entity_id)
45
+ body.entity_id = params.entity_id;
46
+ if (params.scope)
47
+ body.scope = params.scope;
48
+ const headers = {
49
+ 'Content-Type': 'application/json',
50
+ ...this.defaultHeaders,
51
+ ...(params.authHeaders ?? {}),
52
+ };
53
+ const timeoutMs = params.timeoutMs ?? this.defaultTimeoutMs;
54
+ const controller = typeof AbortController !== 'undefined' ? new AbortController() : null;
55
+ const timer = controller
56
+ ? setTimeout(() => controller.abort(), timeoutMs)
57
+ : null;
58
+ this.logger?.debug('[file_search] RAG query', {
59
+ url,
60
+ file_id: params.file_id,
61
+ k: body.k,
62
+ });
63
+ try {
64
+ const res = await fetch(url, {
65
+ method: 'POST',
66
+ headers,
67
+ body: JSON.stringify(body),
68
+ signal: controller?.signal,
69
+ });
70
+ if (!res.ok) {
71
+ const text = await res.text().catch(() => '');
72
+ throw new Error(`RAG query failed: ${res.status} ${res.statusText} — ${text.slice(0, 200)}`);
73
+ }
74
+ const json = (await res.json());
75
+ return this.normalize(params.file_id, json);
76
+ }
77
+ finally {
78
+ if (timer)
79
+ clearTimeout(timer);
80
+ }
81
+ }
82
+ /** Convert rag_api's tuple format into the library's normalized shape. */
83
+ normalize(file_id, resp) {
84
+ if (!Array.isArray(resp)) {
85
+ this.logger?.warn('[file_search] RAG response not an array', { resp });
86
+ return [];
87
+ }
88
+ return resp
89
+ .filter((row) => Array.isArray(row) && row.length === 2)
90
+ .map(([doc, distance]) => ({
91
+ file_id: doc?.metadata?.file_id ?? file_id,
92
+ page_content: doc?.page_content ?? '',
93
+ distance: typeof distance === 'number' ? distance : 1,
94
+ metadata: doc?.metadata,
95
+ }));
96
+ }
97
+ }
98
+
99
+ export { HttpRagClient, RAG_API_URL_ENV, getRagBaseUrl };
100
+ //# sourceMappingURL=ragClient.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"ragClient.mjs","sources":["../../../../src/tools/fileSearch/ragClient.ts"],"sourcesContent":["/**\n * Default HTTP RAG client. Posts to `${baseUrl}/query` with the shape\n * rag_api expects (`{ file_id, query, k, entity_id? }`). Runtimes that\n * use a different vector backend implement their own `RagClient`.\n *\n * Auth is runtime-provided per call (via `authHeaders` on the params) so\n * short-lived tokens can be minted per request without the client\n * caching stale credentials.\n */\n\nimport fetch from 'node-fetch';\nimport { getEnvironmentVariable } from '@langchain/core/utils/env';\nimport type {\n RagClient,\n RagQueryParams,\n RagChunk,\n FileSearchToolLogger,\n} from './types';\n\nexport const RAG_API_URL_ENV = 'RAG_API_URL';\n\n/** Resolve base URL at call time so env-var changes propagate. */\nexport function getRagBaseUrl(override?: string): string {\n const url =\n override ??\n getEnvironmentVariable(RAG_API_URL_ENV) ??\n '';\n if (!url) {\n throw new Error(\n `file_search: ${RAG_API_URL_ENV} is not configured. ` +\n `Set the env var or pass baseUrl to HttpRagClient.`,\n );\n }\n return url.replace(/\\/$/, '');\n}\n\nexport interface HttpRagClientOptions {\n /** Base URL of the RAG service (no trailing slash). Falls back to env. */\n baseUrl?: string;\n /** Default headers sent on every request (e.g., a static API key). */\n defaultHeaders?: Record<string, string>;\n /** Default timeout if params don't override. Default 15_000. */\n defaultTimeoutMs?: number;\n logger?: FileSearchToolLogger;\n}\n\n/**\n * Expected rag_api response shape: `[[{ page_content, metadata }, distance], ...]`\n * — an array of [doc, score] tuples. Normalized here into `RagChunk[]`.\n */\ntype RagApiResponse = Array<\n [\n {\n page_content: string;\n metadata?: Record<string, unknown>;\n },\n number,\n ]\n>;\n\nexport class HttpRagClient implements RagClient {\n private readonly baseUrlOverride?: string;\n private readonly defaultHeaders: Record<string, string>;\n private readonly defaultTimeoutMs: number;\n private readonly logger?: FileSearchToolLogger;\n\n constructor(opts: HttpRagClientOptions = {}) {\n this.baseUrlOverride = opts.baseUrl;\n this.defaultHeaders = opts.defaultHeaders ?? {};\n this.defaultTimeoutMs = opts.defaultTimeoutMs ?? 15_000;\n this.logger = opts.logger;\n }\n\n async query(params: RagQueryParams): Promise<RagChunk[]> {\n const baseUrl = getRagBaseUrl(this.baseUrlOverride);\n const url = `${baseUrl}/query`;\n\n const body: Record<string, unknown> = {\n file_id: params.file_id,\n query: params.query,\n k: params.k ?? 10,\n };\n if (params.entity_id) body.entity_id = params.entity_id;\n if (params.scope) body.scope = params.scope;\n\n const headers: Record<string, string> = {\n 'Content-Type': 'application/json',\n ...this.defaultHeaders,\n ...(params.authHeaders ?? {}),\n };\n\n const timeoutMs = params.timeoutMs ?? this.defaultTimeoutMs;\n const controller =\n typeof AbortController !== 'undefined' ? new AbortController() : null;\n const timer = controller\n ? setTimeout(() => controller.abort(), timeoutMs)\n : null;\n\n this.logger?.debug('[file_search] RAG query', {\n url,\n file_id: params.file_id,\n k: body.k,\n });\n\n try {\n const res = await fetch(url, {\n method: 'POST',\n headers,\n body: JSON.stringify(body),\n signal: controller?.signal as unknown as undefined,\n });\n if (!res.ok) {\n const text = await res.text().catch(() => '');\n throw new Error(\n `RAG query failed: ${res.status} ${res.statusText} — ${text.slice(0, 200)}`,\n );\n }\n const json = (await res.json()) as RagApiResponse;\n return this.normalize(params.file_id, json);\n } finally {\n if (timer) clearTimeout(timer);\n }\n }\n\n /** Convert rag_api's tuple format into the library's normalized shape. */\n private normalize(file_id: string, resp: RagApiResponse): RagChunk[] {\n if (!Array.isArray(resp)) {\n this.logger?.warn('[file_search] RAG response not an array', { resp });\n return [];\n }\n return resp\n .filter((row) => Array.isArray(row) && row.length === 2)\n .map(([doc, distance]) => ({\n file_id:\n (doc?.metadata?.file_id as string | undefined) ?? file_id,\n page_content: doc?.page_content ?? '',\n distance: typeof distance === 'number' ? distance : 1,\n metadata: doc?.metadata,\n }));\n }\n}\n"],"names":[],"mappings":";;;AAAA;;;;;;;;AAQG;AAWI,MAAM,eAAe,GAAG;AAE/B;AACM,SAAU,aAAa,CAAC,QAAiB,EAAA;IAC7C,MAAM,GAAG,GACP,QAAQ;QACR,sBAAsB,CAAC,eAAe,CAAC;AACvC,QAAA,EAAE;IACJ,IAAI,CAAC,GAAG,EAAE;AACR,QAAA,MAAM,IAAI,KAAK,CACb,CAAA,aAAA,EAAgB,eAAe,CAAA,oBAAA,CAAsB;AACnD,YAAA,CAAA,iDAAA,CAAmD,CACtD;IACH;IACA,OAAO,GAAG,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC;AAC/B;MA0Ba,aAAa,CAAA;AACP,IAAA,eAAe;AACf,IAAA,cAAc;AACd,IAAA,gBAAgB;AAChB,IAAA,MAAM;AAEvB,IAAA,WAAA,CAAY,OAA6B,EAAE,EAAA;AACzC,QAAA,IAAI,CAAC,eAAe,GAAG,IAAI,CAAC,OAAO;QACnC,IAAI,CAAC,cAAc,GAAG,IAAI,CAAC,cAAc,IAAI,EAAE;QAC/C,IAAI,CAAC,gBAAgB,GAAG,IAAI,CAAC,gBAAgB,IAAI,MAAM;AACvD,QAAA,IAAI,CAAC,MAAM,GAAG,IAAI,CAAC,MAAM;IAC3B;IAEA,MAAM,KAAK,CAAC,MAAsB,EAAA;QAChC,MAAM,OAAO,GAAG,aAAa,CAAC,IAAI,CAAC,eAAe,CAAC;AACnD,QAAA,MAAM,GAAG,GAAG,CAAA,EAAG,OAAO,QAAQ;AAE9B,QAAA,MAAM,IAAI,GAA4B;YACpC,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,KAAK,EAAE,MAAM,CAAC,KAAK;AACnB,YAAA,CAAC,EAAE,MAAM,CAAC,CAAC,IAAI,EAAE;SAClB;QACD,IAAI,MAAM,CAAC,SAAS;AAAE,YAAA,IAAI,CAAC,SAAS,GAAG,MAAM,CAAC,SAAS;QACvD,IAAI,MAAM,CAAC,KAAK;AAAE,YAAA,IAAI,CAAC,KAAK,GAAG,MAAM,CAAC,KAAK;AAE3C,QAAA,MAAM,OAAO,GAA2B;AACtC,YAAA,cAAc,EAAE,kBAAkB;YAClC,GAAG,IAAI,CAAC,cAAc;AACtB,YAAA,IAAI,MAAM,CAAC,WAAW,IAAI,EAAE,CAAC;SAC9B;QAED,MAAM,SAAS,GAAG,MAAM,CAAC,SAAS,IAAI,IAAI,CAAC,gBAAgB;AAC3D,QAAA,MAAM,UAAU,GACd,OAAO,eAAe,KAAK,WAAW,GAAG,IAAI,eAAe,EAAE,GAAG,IAAI;QACvE,MAAM,KAAK,GAAG;AACZ,cAAE,UAAU,CAAC,MAAM,UAAU,CAAC,KAAK,EAAE,EAAE,SAAS;cAC9C,IAAI;AAER,QAAA,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,yBAAyB,EAAE;YAC5C,GAAG;YACH,OAAO,EAAE,MAAM,CAAC,OAAO;YACvB,CAAC,EAAE,IAAI,CAAC,CAAC;AACV,SAAA,CAAC;AAEF,QAAA,IAAI;AACF,YAAA,MAAM,GAAG,GAAG,MAAM,KAAK,CAAC,GAAG,EAAE;AAC3B,gBAAA,MAAM,EAAE,MAAM;gBACd,OAAO;AACP,gBAAA,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC;gBAC1B,MAAM,EAAE,UAAU,EAAE,MAA8B;AACnD,aAAA,CAAC;AACF,YAAA,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE;AACX,gBAAA,MAAM,IAAI,GAAG,MAAM,GAAG,CAAC,IAAI,EAAE,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;gBAC7C,MAAM,IAAI,KAAK,CACb,CAAA,kBAAA,EAAqB,GAAG,CAAC,MAAM,CAAA,CAAA,EAAI,GAAG,CAAC,UAAU,MAAM,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC,CAAA,CAAE,CAC5E;YACH;YACA,MAAM,IAAI,IAAI,MAAM,GAAG,CAAC,IAAI,EAAE,CAAmB;YACjD,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,OAAO,EAAE,IAAI,CAAC;QAC7C;gBAAU;AACR,YAAA,IAAI,KAAK;gBAAE,YAAY,CAAC,KAAK,CAAC;QAChC;IACF;;IAGQ,SAAS,CAAC,OAAe,EAAE,IAAoB,EAAA;QACrD,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE;YACxB,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,yCAAyC,EAAE,EAAE,IAAI,EAAE,CAAC;AACtE,YAAA,OAAO,EAAE;QACX;AACA,QAAA,OAAO;AACJ,aAAA,MAAM,CAAC,CAAC,GAAG,KAAK,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,GAAG,CAAC,MAAM,KAAK,CAAC;aACtD,GAAG,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC,MAAM;AACzB,YAAA,OAAO,EACJ,GAAG,EAAE,QAAQ,EAAE,OAA8B,IAAI,OAAO;AAC3D,YAAA,YAAY,EAAE,GAAG,EAAE,YAAY,IAAI,EAAE;AACrC,YAAA,QAAQ,EAAE,OAAO,QAAQ,KAAK,QAAQ,GAAG,QAAQ,GAAG,CAAC;YACrD,QAAQ,EAAE,GAAG,EAAE,QAAQ;AACxB,SAAA,CAAC,CAAC;IACP;AACD;;;;"}
@@ -0,0 +1,15 @@
1
+ import { z } from 'zod';
2
+
3
+ const fileSearchInputSchema = z.object({
4
+ query: z
5
+ .string()
6
+ .describe("A natural language query to search for relevant information in the files. Be SPECIFIC and TARGETED — use keywords for the specific section or topic you need. For comprehensive tasks (summaries, overviews), call this tool multiple times with different targeted queries (e.g., 'introduction', 'methodology', 'results', 'conclusions') rather than one broad query."),
7
+ target_files: z
8
+ .array(z.string())
9
+ .optional()
10
+ .describe('Optional list of filenames (or partial names) to limit the search to. When provided, only files whose name contains one of these strings will be searched. Use this to avoid searching irrelevant files. Omit to search all available files.'),
11
+ });
12
+ const FileSearchToolName = 'file_search';
13
+
14
+ export { FileSearchToolName, fileSearchInputSchema };
15
+ //# sourceMappingURL=schema.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"schema.mjs","sources":["../../../../src/tools/fileSearch/schema.ts"],"sourcesContent":["import { z } from 'zod';\n\nexport const fileSearchInputSchema = z.object({\n query: z\n .string()\n .describe(\n \"A natural language query to search for relevant information in the files. Be SPECIFIC and TARGETED — use keywords for the specific section or topic you need. For comprehensive tasks (summaries, overviews), call this tool multiple times with different targeted queries (e.g., 'introduction', 'methodology', 'results', 'conclusions') rather than one broad query.\",\n ),\n target_files: z\n .array(z.string())\n .optional()\n .describe(\n 'Optional list of filenames (or partial names) to limit the search to. When provided, only files whose name contains one of these strings will be searched. Use this to avoid searching irrelevant files. Omit to search all available files.',\n ),\n});\n\nexport type FileSearchInput = z.infer<typeof fileSearchInputSchema>;\n\nexport const FileSearchToolName = 'file_search';\n"],"names":[],"mappings":";;AAEO,MAAM,qBAAqB,GAAG,CAAC,CAAC,MAAM,CAAC;AAC5C,IAAA,KAAK,EAAE;AACJ,SAAA,MAAM;SACN,QAAQ,CACP,0WAA0W,CAC3W;AACH,IAAA,YAAY,EAAE;AACX,SAAA,KAAK,CAAC,CAAC,CAAC,MAAM,EAAE;AAChB,SAAA,QAAQ;SACR,QAAQ,CACP,8OAA8O,CAC/O;AACJ,CAAA;AAIM,MAAM,kBAAkB,GAAG;;;;"}
@@ -0,0 +1,152 @@
1
+ import { tool } from '@langchain/core/tools';
2
+ import { fileSearchInputSchema, FileSearchToolName } from './schema.mjs';
3
+ import { plainTextFormatter } from './formatter.mjs';
4
+
5
+ /**
6
+ * file_search tool factory — library-native equivalent of the CodeExecutor
7
+ * pattern. Runtimes supply a `RagClient`, the file list for this turn, and
8
+ * an optional formatter (ranger uses citation anchors; CLI/A2A use plain
9
+ * text).
10
+ *
11
+ * The tool itself:
12
+ * 1. Accepts `{ query, target_files? }` from the LLM.
13
+ * 2. Filters files by `target_files` substring match when provided.
14
+ * 3. Queries each file in bounded concurrent batches.
15
+ * 4. Enforces per-file timeouts (failures isolated per file).
16
+ * 5. Flattens chunks, deprioritizes stale-turn files, caps results.
17
+ * 6. Hands formatted output to the runtime's formatter for final shape.
18
+ */
19
+ const DEFAULT_QUERY_TIMEOUT_MS = 15_000;
20
+ const DEFAULT_CONCURRENCY = 10;
21
+ const DEFAULT_TOP_K = 10;
22
+ /**
23
+ * Build the tool description. Runtimes that use citation anchors supply
24
+ * `fileCitations: true` (via the formatter); the description includes the
25
+ * citation ruleset only when that's on.
26
+ */
27
+ function buildDescription(opts) {
28
+ const core = `Performs semantic search across the attached "${FileSearchToolName}" documents using natural language queries. Analyzes the content of loaded files to find relevant information, quotes, and passages matching the query.
29
+
30
+ **Use target_files to narrow the search:**
31
+ When you know which file(s) contain the relevant information, ALWAYS pass target_files. This is faster and returns more focused results. Pass partial filenames — they match via substring.
32
+
33
+ **Multiple searches for thorough analysis:**
34
+ For summaries/overviews, call this tool MULTIPLE times with DIFFERENT queries targeting different aspects (intro, methodology, results, conclusions). A single search only returns chunks from one part of the document.`;
35
+ if (!opts.fileCitations)
36
+ return core;
37
+ return `${core}
38
+
39
+ **CITING FILE SEARCH RESULTS — MANDATORY:**
40
+ Cite EVERY statement derived from file content. Place the citation anchor IMMEDIATELY after each paragraph using that source. Each search result has a unique source index — use DIFFERENT indices for different claims; do not reuse the same anchor for all paragraphs. Format: \`\\ue202turn0fileN\`. With a page: include \`(p. N)\` inline. Multiple sources: \`\\ue200\\ue202turn0file0\\ue202turn0file1\\ue201\`. NEVER substitute with footnotes, brackets, or symbols.`;
41
+ }
42
+ function createFileSearchTool(config) {
43
+ const { ragClient, files, entity_id, scope, getAuthHeaders, formatter = plainTextFormatter, queryTimeoutMs = DEFAULT_QUERY_TIMEOUT_MS, concurrencyLimit = DEFAULT_CONCURRENCY, topK = DEFAULT_TOP_K, resultCap, callbacks, logger, } = config;
44
+ // Monotonic call counter used by citation-style formatters to keep source
45
+ // indices unique across multiple invocations within a single turn.
46
+ let callIndex = 0;
47
+ // Infer whether the formatter wants citations from the artifact it emits
48
+ // on an empty-chunk format. This keeps the description/behavior aligned
49
+ // without forcing the host to declare `fileCitations` twice.
50
+ const fileCitations = formatter !== plainTextFormatter;
51
+ return tool(async (rawInput) => {
52
+ const { query, target_files } = rawInput;
53
+ if (files.length === 0) {
54
+ return [
55
+ 'No files to search. Instruct the user to add files for the search.',
56
+ undefined,
57
+ ];
58
+ }
59
+ // target_files: case-insensitive substring match, fallback to all
60
+ // files with a warning if the filter excludes everything.
61
+ let filesToQuery = files;
62
+ if (target_files && target_files.length > 0) {
63
+ const lowerTargets = target_files.map((t) => t.toLowerCase());
64
+ const matched = files.filter((f) => lowerTargets.some((t) => f.filename.toLowerCase().includes(t)));
65
+ if (matched.length === 0) {
66
+ logger?.warn(`[file_search] No files matched target_files ${target_files.join(', ')}; falling back to all files`);
67
+ filesToQuery = files;
68
+ }
69
+ else {
70
+ logger?.info(`[file_search] Filtered to ${matched.length}/${files.length} via target_files`);
71
+ filesToQuery = matched;
72
+ }
73
+ }
74
+ const authHeaders = getAuthHeaders ? await getAuthHeaders() : undefined;
75
+ const queryOne = async (file) => {
76
+ const params = {
77
+ file_id: file.file_id,
78
+ query,
79
+ k: topK,
80
+ entity_id,
81
+ scope,
82
+ authHeaders,
83
+ timeoutMs: queryTimeoutMs,
84
+ };
85
+ try {
86
+ const chunks = await ragClient.query(params);
87
+ callbacks?.onFileQueried?.(file, chunks.length);
88
+ return chunks;
89
+ }
90
+ catch (err) {
91
+ const e = err instanceof Error ? err : new Error(String(err));
92
+ logger?.error(`[file_search] Query failed for ${file.filename}: ${e.message}`);
93
+ callbacks?.onFileError?.(file, e);
94
+ return [];
95
+ }
96
+ };
97
+ // Bounded-concurrency batching. Server-side rerankers handle their
98
+ // own concurrency; this protects the HTTP connection pool when the
99
+ // agent has many files.
100
+ const allChunks = [];
101
+ for (let i = 0; i < filesToQuery.length; i += concurrencyLimit) {
102
+ const batch = filesToQuery.slice(i, i + concurrencyLimit);
103
+ const batchResults = await Promise.all(batch.map(queryOne));
104
+ for (const chunks of batchResults)
105
+ allChunks.push(...chunks);
106
+ }
107
+ if (allChunks.length === 0) {
108
+ return [
109
+ 'No content found in the files. The files may not have been processed correctly or the query may need refinement.',
110
+ undefined,
111
+ ];
112
+ }
113
+ // Build annotated results: attach filename + isCurrentMessage via
114
+ // a file-id lookup (metadata wins, factory list is fallback).
115
+ const fileById = new Map(files.map((f) => [f.file_id, f]));
116
+ const annotated = allChunks.map((c) => {
117
+ const matched = fileById.get(c.file_id);
118
+ const filename = (c.metadata?.source
119
+ ? String(c.metadata.source).split(/[/\\]/).pop()
120
+ : undefined) ??
121
+ matched?.filename ??
122
+ 'Unknown';
123
+ return {
124
+ ...c,
125
+ filename,
126
+ isCurrentMessage: matched?.isCurrentMessage === true,
127
+ };
128
+ });
129
+ // Sort: current-turn files first, then by relevance (lower distance).
130
+ annotated.sort((a, b) => {
131
+ if (a.isCurrentMessage !== b.isCurrentMessage)
132
+ return a.isCurrentMessage ? -1 : 1;
133
+ return a.distance - b.distance;
134
+ });
135
+ const cap = resultCap ?? Math.max(10, filesToQuery.length * 3);
136
+ const limited = annotated.slice(0, cap);
137
+ const { message, artifact } = formatter.format(limited, {
138
+ callIndex,
139
+ files,
140
+ });
141
+ callIndex += 1;
142
+ return [message, artifact];
143
+ }, {
144
+ name: FileSearchToolName,
145
+ responseFormat: 'content_and_artifact',
146
+ description: buildDescription({ fileCitations }),
147
+ schema: fileSearchInputSchema,
148
+ });
149
+ }
150
+
151
+ export { FileSearchToolName, createFileSearchTool };
152
+ //# sourceMappingURL=tool.mjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"tool.mjs","sources":["../../../../src/tools/fileSearch/tool.ts"],"sourcesContent":["/**\n * file_search tool factory — library-native equivalent of the CodeExecutor\n * pattern. Runtimes supply a `RagClient`, the file list for this turn, and\n * an optional formatter (ranger uses citation anchors; CLI/A2A use plain\n * text).\n *\n * The tool itself:\n * 1. Accepts `{ query, target_files? }` from the LLM.\n * 2. Filters files by `target_files` substring match when provided.\n * 3. Queries each file in bounded concurrent batches.\n * 4. Enforces per-file timeouts (failures isolated per file).\n * 5. Flattens chunks, deprioritizes stale-turn files, caps results.\n * 6. Hands formatted output to the runtime's formatter for final shape.\n */\n\nimport { tool, DynamicStructuredTool } from '@langchain/core/tools';\nimport {\n fileSearchInputSchema,\n type FileSearchInput,\n FileSearchToolName,\n} from './schema';\nimport type {\n FileSearchToolConfig,\n FileSearchFile,\n RagChunk,\n RagQueryParams,\n} from './types';\nimport { plainTextFormatter } from './formatter';\n\nconst DEFAULT_QUERY_TIMEOUT_MS = 15_000;\nconst DEFAULT_CONCURRENCY = 10;\nconst DEFAULT_TOP_K = 10;\n\n/**\n * Build the tool description. Runtimes that use citation anchors supply\n * `fileCitations: true` (via the formatter); the description includes the\n * citation ruleset only when that's on.\n */\nfunction buildDescription(opts: { fileCitations: boolean }): string {\n const core = `Performs semantic search across the attached \"${FileSearchToolName}\" documents using natural language queries. Analyzes the content of loaded files to find relevant information, quotes, and passages matching the query.\n\n**Use target_files to narrow the search:**\nWhen you know which file(s) contain the relevant information, ALWAYS pass target_files. This is faster and returns more focused results. Pass partial filenames — they match via substring.\n\n**Multiple searches for thorough analysis:**\nFor summaries/overviews, call this tool MULTIPLE times with DIFFERENT queries targeting different aspects (intro, methodology, results, conclusions). A single search only returns chunks from one part of the document.`;\n\n if (!opts.fileCitations) return core;\n\n return `${core}\n\n**CITING FILE SEARCH RESULTS — MANDATORY:**\nCite EVERY statement derived from file content. Place the citation anchor IMMEDIATELY after each paragraph using that source. Each search result has a unique source index — use DIFFERENT indices for different claims; do not reuse the same anchor for all paragraphs. Format: \\`\\\\ue202turn0fileN\\`. With a page: include \\`(p. N)\\` inline. Multiple sources: \\`\\\\ue200\\\\ue202turn0file0\\\\ue202turn0file1\\\\ue201\\`. NEVER substitute with footnotes, brackets, or symbols.`;\n}\n\nexport function createFileSearchTool(\n config: FileSearchToolConfig,\n): DynamicStructuredTool {\n const {\n ragClient,\n files,\n entity_id,\n scope,\n getAuthHeaders,\n formatter = plainTextFormatter,\n queryTimeoutMs = DEFAULT_QUERY_TIMEOUT_MS,\n concurrencyLimit = DEFAULT_CONCURRENCY,\n topK = DEFAULT_TOP_K,\n resultCap,\n callbacks,\n logger,\n } = config;\n\n // Monotonic call counter used by citation-style formatters to keep source\n // indices unique across multiple invocations within a single turn.\n let callIndex = 0;\n\n // Infer whether the formatter wants citations from the artifact it emits\n // on an empty-chunk format. This keeps the description/behavior aligned\n // without forcing the host to declare `fileCitations` twice.\n const fileCitations = formatter !== plainTextFormatter;\n\n return tool(\n async (rawInput: FileSearchInput) => {\n const { query, target_files } = rawInput;\n\n if (files.length === 0) {\n return [\n 'No files to search. Instruct the user to add files for the search.',\n undefined,\n ];\n }\n\n // target_files: case-insensitive substring match, fallback to all\n // files with a warning if the filter excludes everything.\n let filesToQuery: FileSearchFile[] = files;\n if (target_files && target_files.length > 0) {\n const lowerTargets = target_files.map((t) => t.toLowerCase());\n const matched = files.filter((f) =>\n lowerTargets.some((t) => f.filename.toLowerCase().includes(t)),\n );\n if (matched.length === 0) {\n logger?.warn(\n `[file_search] No files matched target_files ${target_files.join(', ')}; falling back to all files`,\n );\n filesToQuery = files;\n } else {\n logger?.info(\n `[file_search] Filtered to ${matched.length}/${files.length} via target_files`,\n );\n filesToQuery = matched;\n }\n }\n\n const authHeaders = getAuthHeaders ? await getAuthHeaders() : undefined;\n\n const queryOne = async (file: FileSearchFile): Promise<RagChunk[]> => {\n const params: RagQueryParams = {\n file_id: file.file_id,\n query,\n k: topK,\n entity_id,\n scope,\n authHeaders,\n timeoutMs: queryTimeoutMs,\n };\n try {\n const chunks = await ragClient.query(params);\n callbacks?.onFileQueried?.(file, chunks.length);\n return chunks;\n } catch (err) {\n const e = err instanceof Error ? err : new Error(String(err));\n logger?.error(\n `[file_search] Query failed for ${file.filename}: ${e.message}`,\n );\n callbacks?.onFileError?.(file, e);\n return [];\n }\n };\n\n // Bounded-concurrency batching. Server-side rerankers handle their\n // own concurrency; this protects the HTTP connection pool when the\n // agent has many files.\n const allChunks: RagChunk[] = [];\n for (let i = 0; i < filesToQuery.length; i += concurrencyLimit) {\n const batch = filesToQuery.slice(i, i + concurrencyLimit);\n const batchResults = await Promise.all(batch.map(queryOne));\n for (const chunks of batchResults) allChunks.push(...chunks);\n }\n\n if (allChunks.length === 0) {\n return [\n 'No content found in the files. The files may not have been processed correctly or the query may need refinement.',\n undefined,\n ];\n }\n\n // Build annotated results: attach filename + isCurrentMessage via\n // a file-id lookup (metadata wins, factory list is fallback).\n const fileById = new Map(files.map((f) => [f.file_id, f]));\n const annotated = allChunks.map((c) => {\n const matched = fileById.get(c.file_id);\n const filename =\n (c.metadata?.source\n ? String(c.metadata.source).split(/[/\\\\]/).pop()\n : undefined) ??\n matched?.filename ??\n 'Unknown';\n return {\n ...c,\n filename,\n isCurrentMessage: matched?.isCurrentMessage === true,\n };\n });\n\n // Sort: current-turn files first, then by relevance (lower distance).\n annotated.sort((a, b) => {\n if (a.isCurrentMessage !== b.isCurrentMessage)\n return a.isCurrentMessage ? -1 : 1;\n return a.distance - b.distance;\n });\n\n const cap = resultCap ?? Math.max(10, filesToQuery.length * 3);\n const limited = annotated.slice(0, cap);\n\n const { message, artifact } = formatter.format(limited, {\n callIndex,\n files,\n });\n callIndex += 1;\n\n // Suppress unused-variable warning for fileCitations (currently only\n // used to gate description; kept in case formatters need it).\n void fileCitations;\n\n return [message, artifact];\n },\n {\n name: FileSearchToolName,\n responseFormat: 'content_and_artifact',\n description: buildDescription({ fileCitations }),\n schema: fileSearchInputSchema,\n },\n );\n}\n\nexport { FileSearchToolName } from './schema';\n"],"names":[],"mappings":";;;;AAAA;;;;;;;;;;;;;AAaG;AAgBH,MAAM,wBAAwB,GAAG,MAAM;AACvC,MAAM,mBAAmB,GAAG,EAAE;AAC9B,MAAM,aAAa,GAAG,EAAE;AAExB;;;;AAIG;AACH,SAAS,gBAAgB,CAAC,IAAgC,EAAA;IACxD,MAAM,IAAI,GAAG,CAAA,8CAAA,EAAiD,kBAAkB,CAAA;;;;;;yNAMuI;IAEvN,IAAI,CAAC,IAAI,CAAC,aAAa;AAAE,QAAA,OAAO,IAAI;AAEpC,IAAA,OAAO,GAAG,IAAI;;;gdAGgc;AAChd;AAEM,SAAU,oBAAoB,CAClC,MAA4B,EAAA;AAE5B,IAAA,MAAM,EACJ,SAAS,EACT,KAAK,EACL,SAAS,EACT,KAAK,EACL,cAAc,EACd,SAAS,GAAG,kBAAkB,EAC9B,cAAc,GAAG,wBAAwB,EACzC,gBAAgB,GAAG,mBAAmB,EACtC,IAAI,GAAG,aAAa,EACpB,SAAS,EACT,SAAS,EACT,MAAM,GACP,GAAG,MAAM;;;IAIV,IAAI,SAAS,GAAG,CAAC;;;;AAKjB,IAAA,MAAM,aAAa,GAAG,SAAS,KAAK,kBAAkB;AAEtD,IAAA,OAAO,IAAI,CACT,OAAO,QAAyB,KAAI;AAClC,QAAA,MAAM,EAAE,KAAK,EAAE,YAAY,EAAE,GAAG,QAAQ;AAExC,QAAA,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE;YACtB,OAAO;gBACL,oEAAoE;gBACpE,SAAS;aACV;QACH;;;QAIA,IAAI,YAAY,GAAqB,KAAK;QAC1C,IAAI,YAAY,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE;AAC3C,YAAA,MAAM,YAAY,GAAG,YAAY,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,WAAW,EAAE,CAAC;AAC7D,YAAA,MAAM,OAAO,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,KAC7B,YAAY,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,WAAW,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAC/D;AACD,YAAA,IAAI,OAAO,CAAC,MAAM,KAAK,CAAC,EAAE;AACxB,gBAAA,MAAM,EAAE,IAAI,CACV,CAAA,4CAAA,EAA+C,YAAY,CAAC,IAAI,CAAC,IAAI,CAAC,CAAA,2BAAA,CAA6B,CACpG;gBACD,YAAY,GAAG,KAAK;YACtB;iBAAO;AACL,gBAAA,MAAM,EAAE,IAAI,CACV,CAAA,0BAAA,EAA6B,OAAO,CAAC,MAAM,CAAA,CAAA,EAAI,KAAK,CAAC,MAAM,CAAA,iBAAA,CAAmB,CAC/E;gBACD,YAAY,GAAG,OAAO;YACxB;QACF;AAEA,QAAA,MAAM,WAAW,GAAG,cAAc,GAAG,MAAM,cAAc,EAAE,GAAG,SAAS;AAEvE,QAAA,MAAM,QAAQ,GAAG,OAAO,IAAoB,KAAyB;AACnE,YAAA,MAAM,MAAM,GAAmB;gBAC7B,OAAO,EAAE,IAAI,CAAC,OAAO;gBACrB,KAAK;AACL,gBAAA,CAAC,EAAE,IAAI;gBACP,SAAS;gBACT,KAAK;gBACL,WAAW;AACX,gBAAA,SAAS,EAAE,cAAc;aAC1B;AACD,YAAA,IAAI;gBACF,MAAM,MAAM,GAAG,MAAM,SAAS,CAAC,KAAK,CAAC,MAAM,CAAC;gBAC5C,SAAS,EAAE,aAAa,GAAG,IAAI,EAAE,MAAM,CAAC,MAAM,CAAC;AAC/C,gBAAA,OAAO,MAAM;YACf;YAAE,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,GAAG,GAAG,YAAY,KAAK,GAAG,GAAG,GAAG,IAAI,KAAK,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC;AAC7D,gBAAA,MAAM,EAAE,KAAK,CACX,CAAA,+BAAA,EAAkC,IAAI,CAAC,QAAQ,CAAA,EAAA,EAAK,CAAC,CAAC,OAAO,CAAA,CAAE,CAChE;gBACD,SAAS,EAAE,WAAW,GAAG,IAAI,EAAE,CAAC,CAAC;AACjC,gBAAA,OAAO,EAAE;YACX;AACF,QAAA,CAAC;;;;QAKD,MAAM,SAAS,GAAe,EAAE;AAChC,QAAA,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,YAAY,CAAC,MAAM,EAAE,CAAC,IAAI,gBAAgB,EAAE;AAC9D,YAAA,MAAM,KAAK,GAAG,YAAY,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,GAAG,gBAAgB,CAAC;AACzD,YAAA,MAAM,YAAY,GAAG,MAAM,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;YAC3D,KAAK,MAAM,MAAM,IAAI,YAAY;AAAE,gBAAA,SAAS,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC;QAC9D;AAEA,QAAA,IAAI,SAAS,CAAC,MAAM,KAAK,CAAC,EAAE;YAC1B,OAAO;gBACL,kHAAkH;gBAClH,SAAS;aACV;QACH;;;QAIA,MAAM,QAAQ,GAAG,IAAI,GAAG,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,CAAC;QAC1D,MAAM,SAAS,GAAG,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,KAAI;YACpC,MAAM,OAAO,GAAG,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,OAAO,CAAC;AACvC,YAAA,MAAM,QAAQ,GACZ,CAAC,CAAC,CAAC,QAAQ,EAAE;AACX,kBAAE,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,GAAG;kBAC5C,SAAS;AACb,gBAAA,OAAO,EAAE,QAAQ;AACjB,gBAAA,SAAS;YACX,OAAO;AACL,gBAAA,GAAG,CAAC;gBACJ,QAAQ;AACR,gBAAA,gBAAgB,EAAE,OAAO,EAAE,gBAAgB,KAAK,IAAI;aACrD;AACH,QAAA,CAAC,CAAC;;QAGF,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,KAAI;AACtB,YAAA,IAAI,CAAC,CAAC,gBAAgB,KAAK,CAAC,CAAC,gBAAgB;AAC3C,gBAAA,OAAO,CAAC,CAAC,gBAAgB,GAAG,EAAE,GAAG,CAAC;AACpC,YAAA,OAAO,CAAC,CAAC,QAAQ,GAAG,CAAC,CAAC,QAAQ;AAChC,QAAA,CAAC,CAAC;AAEF,QAAA,MAAM,GAAG,GAAG,SAAS,IAAI,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,YAAY,CAAC,MAAM,GAAG,CAAC,CAAC;QAC9D,MAAM,OAAO,GAAG,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC;QAEvC,MAAM,EAAE,OAAO,EAAE,QAAQ,EAAE,GAAG,SAAS,CAAC,MAAM,CAAC,OAAO,EAAE;YACtD,SAAS;YACT,KAAK;AACN,SAAA,CAAC;QACF,SAAS,IAAI,CAAC;AAMd,QAAA,OAAO,CAAC,OAAO,EAAE,QAAQ,CAAC;AAC5B,IAAA,CAAC,EACD;AACE,QAAA,IAAI,EAAE,kBAAkB;AACxB,QAAA,cAAc,EAAE,sBAAsB;AACtC,QAAA,WAAW,EAAE,gBAAgB,CAAC,EAAE,aAAa,EAAE,CAAC;AAChD,QAAA,MAAM,EAAE,qBAAqB;AAC9B,KAAA,CACF;AACH;;;;"}
@@ -16,6 +16,7 @@ export * from './tools/schema';
16
16
  export * from './tools/handlers';
17
17
  export * from './tools/search';
18
18
  export * from './tools/memory';
19
+ export * from './tools/fileSearch';
19
20
  export * from './tools/proxyTool';
20
21
  export * from './providers';
21
22
  export * from './memory';
@@ -0,0 +1,25 @@
1
+ /**
2
+ * Default result formatters.
3
+ *
4
+ * - `plainTextFormatter`: CLI / A2A / generic output. No citation anchors.
5
+ * - `citationAnchorFormatter`: ranger-style `\ue202turn0fileN` anchors with
6
+ * a monotonic `sourceOffset` so multi-call turns stay globally unique.
7
+ *
8
+ * Runtimes can supply their own `FileSearchResultFormatter` to override.
9
+ */
10
+ import type { FileSearchResultFormatter, FileSearchFile, RagChunk } from './types';
11
+ export declare const plainTextFormatter: FileSearchResultFormatter;
12
+ export interface CitationAnchorFormatterOptions {
13
+ /** Tool name used in the `file_search` artifact wrapper. Defaults to `'file_search'`. */
14
+ toolName?: string;
15
+ /**
16
+ * Monotonic counter for source indices within a turn. Pass the SAME
17
+ * function to the formatter across multiple calls in the same turn so
18
+ * anchors stay globally unique.
19
+ */
20
+ getSourceOffset?: () => number;
21
+ /** Called after formatting to advance the offset. */
22
+ advanceSourceOffset?: (by: number) => void;
23
+ }
24
+ export declare function createCitationAnchorFormatter(opts?: CitationAnchorFormatterOptions): FileSearchResultFormatter;
25
+ export type { FileSearchResultFormatter, FileSearchFile, RagChunk };
@@ -0,0 +1,5 @@
1
+ export { createFileSearchTool, FileSearchToolName } from './tool';
2
+ export { HttpRagClient, getRagBaseUrl, RAG_API_URL_ENV, type HttpRagClientOptions, } from './ragClient';
3
+ export { plainTextFormatter, createCitationAnchorFormatter, type CitationAnchorFormatterOptions, } from './formatter';
4
+ export { fileSearchInputSchema, type FileSearchInput } from './schema';
5
+ export type { FileSearchFile, RagChunk, RagClient, RagQueryParams, FileSearchResultFormatter, FileSearchToolCallbacks, FileSearchToolConfig, FileSearchToolLogger, } from './types';
@@ -0,0 +1,32 @@
1
+ /**
2
+ * Default HTTP RAG client. Posts to `${baseUrl}/query` with the shape
3
+ * rag_api expects (`{ file_id, query, k, entity_id? }`). Runtimes that
4
+ * use a different vector backend implement their own `RagClient`.
5
+ *
6
+ * Auth is runtime-provided per call (via `authHeaders` on the params) so
7
+ * short-lived tokens can be minted per request without the client
8
+ * caching stale credentials.
9
+ */
10
+ import type { RagClient, RagQueryParams, RagChunk, FileSearchToolLogger } from './types';
11
+ export declare const RAG_API_URL_ENV = "RAG_API_URL";
12
+ /** Resolve base URL at call time so env-var changes propagate. */
13
+ export declare function getRagBaseUrl(override?: string): string;
14
+ export interface HttpRagClientOptions {
15
+ /** Base URL of the RAG service (no trailing slash). Falls back to env. */
16
+ baseUrl?: string;
17
+ /** Default headers sent on every request (e.g., a static API key). */
18
+ defaultHeaders?: Record<string, string>;
19
+ /** Default timeout if params don't override. Default 15_000. */
20
+ defaultTimeoutMs?: number;
21
+ logger?: FileSearchToolLogger;
22
+ }
23
+ export declare class HttpRagClient implements RagClient {
24
+ private readonly baseUrlOverride?;
25
+ private readonly defaultHeaders;
26
+ private readonly defaultTimeoutMs;
27
+ private readonly logger?;
28
+ constructor(opts?: HttpRagClientOptions);
29
+ query(params: RagQueryParams): Promise<RagChunk[]>;
30
+ /** Convert rag_api's tuple format into the library's normalized shape. */
31
+ private normalize;
32
+ }
@@ -0,0 +1,13 @@
1
+ import { z } from 'zod';
2
+ export declare const fileSearchInputSchema: z.ZodObject<{
3
+ query: z.ZodString;
4
+ target_files: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
5
+ }, "strip", z.ZodTypeAny, {
6
+ query: string;
7
+ target_files?: string[] | undefined;
8
+ }, {
9
+ query: string;
10
+ target_files?: string[] | undefined;
11
+ }>;
12
+ export type FileSearchInput = z.infer<typeof fileSearchInputSchema>;
13
+ export declare const FileSearchToolName = "file_search";
@@ -0,0 +1,18 @@
1
+ /**
2
+ * file_search tool factory — library-native equivalent of the CodeExecutor
3
+ * pattern. Runtimes supply a `RagClient`, the file list for this turn, and
4
+ * an optional formatter (ranger uses citation anchors; CLI/A2A use plain
5
+ * text).
6
+ *
7
+ * The tool itself:
8
+ * 1. Accepts `{ query, target_files? }` from the LLM.
9
+ * 2. Filters files by `target_files` substring match when provided.
10
+ * 3. Queries each file in bounded concurrent batches.
11
+ * 4. Enforces per-file timeouts (failures isolated per file).
12
+ * 5. Flattens chunks, deprioritizes stale-turn files, caps results.
13
+ * 6. Hands formatted output to the runtime's formatter for final shape.
14
+ */
15
+ import { DynamicStructuredTool } from '@langchain/core/tools';
16
+ import type { FileSearchToolConfig } from './types';
17
+ export declare function createFileSearchTool(config: FileSearchToolConfig): DynamicStructuredTool;
18
+ export { FileSearchToolName } from './schema';
@@ -0,0 +1,139 @@
1
+ /**
2
+ * File-search tool types. Mirrors the web_search / code_executor split —
3
+ * the library owns tool logic; the host supplies a `RagClient` + config
4
+ * shaped to its own deployment (auth strategy, scope identity, file set).
5
+ */
6
+ export interface FileSearchFile {
7
+ /** Stable identifier within the RAG backend. */
8
+ file_id: string;
9
+ /** Human-readable name surfaced in tool results and prompts. */
10
+ filename: string;
11
+ /**
12
+ * Hint that this file arrived on the *current* conversation turn (as
13
+ * opposed to an earlier turn). Hosts that don't distinguish leave this
14
+ * undefined; the formatter deprioritizes older files when set.
15
+ */
16
+ isCurrentMessage?: boolean;
17
+ }
18
+ /**
19
+ * A single chunk returned by the RAG backend for a query.
20
+ * Shape is normalized here so the library stays independent of any
21
+ * specific RAG service's response format — the `RagClient` is responsible
22
+ * for translating the backend response into this shape.
23
+ */
24
+ export interface RagChunk {
25
+ file_id: string;
26
+ page_content: string;
27
+ distance: number;
28
+ metadata?: Record<string, unknown>;
29
+ }
30
+ export interface RagQueryParams {
31
+ file_id: string;
32
+ query: string;
33
+ /** Top-K chunks to return per file. Default 10. */
34
+ k?: number;
35
+ /** Optional tenant/entity ID — forwarded to the backend verbatim. */
36
+ entity_id?: string;
37
+ /**
38
+ * Scope identifier. Hosts use this for per-tenant isolation in the RAG
39
+ * backend (ranger → userId, cli → agentId, a2a → task-id).
40
+ */
41
+ scope?: string;
42
+ /**
43
+ * Per-request auth headers — the host builds these (e.g., ranger sends
44
+ * `Authorization: Bearer <short-lived-JWT-for-userId>`). Allows
45
+ * different runtimes to use different auth strategies without the
46
+ * library knowing.
47
+ */
48
+ authHeaders?: Record<string, string>;
49
+ /** Optional per-call timeout override. */
50
+ timeoutMs?: number;
51
+ }
52
+ /**
53
+ * Pluggable RAG backend. Runtimes provide an implementation that speaks
54
+ * to whatever vector DB / search service they've deployed (rag_api is the
55
+ * default; azure search, pinecone, etc. are valid alternates).
56
+ */
57
+ export interface RagClient {
58
+ query(params: RagQueryParams): Promise<RagChunk[]>;
59
+ }
60
+ /**
61
+ * Formatter callback that shapes raw chunks into the runtime's preferred
62
+ * presentation. Ranger uses citation anchors (`\ue202turn0fileN`), CLI
63
+ * uses plain text, A2A server can return structured parts.
64
+ */
65
+ export interface FileSearchResultFormatter {
66
+ format(chunks: Array<RagChunk & {
67
+ filename: string;
68
+ isCurrentMessage: boolean;
69
+ }>, context: {
70
+ /**
71
+ * Monotonic call index within a single turn. Ranger uses this to
72
+ * keep citation source indices globally unique across multiple
73
+ * file_search invocations.
74
+ */
75
+ callIndex: number;
76
+ /** Same files list the factory was seeded with (for lookups). */
77
+ files: FileSearchFile[];
78
+ }): {
79
+ /** Message returned to the LLM (content). */
80
+ message: string;
81
+ /** Optional artifact payload (sources, metadata) returned alongside. */
82
+ artifact?: unknown;
83
+ };
84
+ }
85
+ export interface FileSearchToolCallbacks {
86
+ /**
87
+ * Fires when a RAG query completes successfully for one file. Hosts can
88
+ * use this for telemetry, streaming UI updates, etc.
89
+ */
90
+ onFileQueried?: (file: FileSearchFile, chunkCount: number) => void;
91
+ /** Fires when a RAG query fails. Hosts can log or surface via UI. */
92
+ onFileError?: (file: FileSearchFile, error: Error) => void;
93
+ }
94
+ export interface FileSearchToolLogger {
95
+ debug: (msg: string, ...args: unknown[]) => void;
96
+ info: (msg: string, ...args: unknown[]) => void;
97
+ warn: (msg: string, ...args: unknown[]) => void;
98
+ error: (msg: string, ...args: unknown[]) => void;
99
+ }
100
+ export interface FileSearchToolConfig {
101
+ /** The RAG backend client. Required. */
102
+ ragClient: RagClient;
103
+ /** Files the agent has access to this turn. Empty = tool self-reports so. */
104
+ files: FileSearchFile[];
105
+ /** Tenant/entity ID forwarded to the backend per-query. */
106
+ entity_id?: string;
107
+ /**
108
+ * Per-turn scope identity. Most runtimes pin this once (userId/agentId);
109
+ * if omitted, no scope is sent to the backend.
110
+ */
111
+ scope?: string;
112
+ /**
113
+ * Per-call auth header builder. Called on every tool invocation so
114
+ * the host can mint fresh short-lived tokens (ranger's JWT pattern).
115
+ * When omitted, no auth headers are sent.
116
+ */
117
+ getAuthHeaders?: () => Record<string, string> | Promise<Record<string, string>>;
118
+ /**
119
+ * Result formatter. When omitted, the default plain-text formatter is
120
+ * used (suitable for CLI/A2A runtimes that don't need citation anchors).
121
+ */
122
+ formatter?: FileSearchResultFormatter;
123
+ /** Per-file query timeout in ms. Default 15_000. */
124
+ queryTimeoutMs?: number;
125
+ /**
126
+ * Max concurrent in-flight RAG queries. Protects HTTP connection pools
127
+ * under heavy file counts. Default 10.
128
+ */
129
+ concurrencyLimit?: number;
130
+ /** Top-K chunks per file. Default 10. */
131
+ topK?: number;
132
+ /**
133
+ * Cap on total chunks returned to the LLM after sorting. Default is
134
+ * `max(10, filesCount * 3)`.
135
+ */
136
+ resultCap?: number;
137
+ callbacks?: FileSearchToolCallbacks;
138
+ logger?: FileSearchToolLogger;
139
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@illuma-ai/agents",
3
- "version": "1.4.0-alpha.1",
3
+ "version": "1.4.0-alpha.2",
4
4
  "main": "./dist/cjs/main.cjs",
5
5
  "module": "./dist/esm/main.mjs",
6
6
  "types": "./dist/types/index.d.ts",
package/src/index.ts CHANGED
@@ -26,6 +26,7 @@ export * from './tools/schema';
26
26
  export * from './tools/handlers';
27
27
  export * from './tools/search';
28
28
  export * from './tools/memory';
29
+ export * from './tools/fileSearch';
29
30
  export * from './tools/proxyTool';
30
31
 
31
32
  /* Capability Providers */