@gmickel/gno 0.16.0 → 0.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. package/README.md +36 -1
  2. package/package.json +4 -1
  3. package/src/cli/commands/ask.ts +9 -0
  4. package/src/cli/commands/query.ts +3 -2
  5. package/src/cli/pager.ts +1 -1
  6. package/src/cli/program.ts +89 -0
  7. package/src/core/links.ts +92 -20
  8. package/src/ingestion/sync.ts +267 -23
  9. package/src/ingestion/types.ts +2 -0
  10. package/src/ingestion/walker.ts +2 -1
  11. package/src/mcp/tools/index.ts +30 -1
  12. package/src/mcp/tools/query.ts +22 -2
  13. package/src/mcp/tools/search.ts +8 -0
  14. package/src/mcp/tools/vsearch.ts +8 -0
  15. package/src/pipeline/answer.ts +324 -7
  16. package/src/pipeline/expansion.ts +243 -7
  17. package/src/pipeline/explain.ts +93 -5
  18. package/src/pipeline/hybrid.ts +240 -57
  19. package/src/pipeline/query-modes.ts +125 -0
  20. package/src/pipeline/rerank.ts +34 -13
  21. package/src/pipeline/search.ts +41 -3
  22. package/src/pipeline/temporal.ts +257 -0
  23. package/src/pipeline/types.ts +58 -0
  24. package/src/pipeline/vsearch.ts +107 -9
  25. package/src/serve/public/app.tsx +1 -3
  26. package/src/serve/public/globals.built.css +2 -2
  27. package/src/serve/public/lib/retrieval-filters.ts +167 -0
  28. package/src/serve/public/pages/Ask.tsx +339 -109
  29. package/src/serve/public/pages/Browse.tsx +71 -5
  30. package/src/serve/public/pages/DocView.tsx +2 -21
  31. package/src/serve/public/pages/Search.tsx +507 -120
  32. package/src/serve/routes/api.ts +202 -2
  33. package/src/store/migrations/006-document-metadata.ts +104 -0
  34. package/src/store/migrations/007-document-date-fields.ts +24 -0
  35. package/src/store/migrations/index.ts +3 -1
  36. package/src/store/sqlite/adapter.ts +218 -5
  37. package/src/store/types.ts +46 -0
@@ -7,7 +7,12 @@
7
7
 
8
8
  import type { GenerationPort } from "../llm/types";
9
9
  import type { StorePort } from "../store/types";
10
- import type { Citation, SearchResult } from "./types";
10
+ import type {
11
+ AnswerContextEntry,
12
+ AnswerContextExplain,
13
+ Citation,
14
+ SearchResult,
15
+ } from "./types";
11
16
 
12
17
  // ─────────────────────────────────────────────────────────────────────────────
13
18
  // Constants
@@ -39,12 +44,65 @@ export const ABSTENTION_MESSAGE =
39
44
  /** Max characters per document (~8K tokens) */
40
45
  const MAX_DOC_CHARS = 32_000;
41
46
 
42
- /** Max number of sources - fewer docs but full content */
43
- const MAX_CONTEXT_SOURCES = 3;
47
+ /** Max number of sources selected for grounded answer context */
48
+ const MAX_CONTEXT_SOURCES = 5;
49
+ /** Default source target for non-comparative queries */
50
+ const BASE_CONTEXT_SOURCES = 3;
51
+ /** Candidate pool before adaptive selection */
52
+ const CONTEXT_CANDIDATE_POOL = 12;
44
53
 
45
54
  /** Fallback snippet limit when full content unavailable */
46
55
  const MAX_SNIPPET_CHARS = 1500;
47
56
 
57
+ const FACET_SPLIT_RE = /\b(?:and|or|vs|versus)\b|[,;]+/gi;
58
+ const COMPARISON_QUERY_RE =
59
+ /\b(?:compare|comparison|difference|different|vs|versus|trade-?off|pros|cons|conflict|between)\b/i;
60
+ const TOKEN_SPLIT_RE = /[^\p{L}\p{N}]+/u;
61
+ const QUERY_STOPWORDS = new Set([
62
+ "a",
63
+ "an",
64
+ "and",
65
+ "are",
66
+ "as",
67
+ "at",
68
+ "be",
69
+ "by",
70
+ "for",
71
+ "from",
72
+ "how",
73
+ "i",
74
+ "in",
75
+ "is",
76
+ "it",
77
+ "of",
78
+ "on",
79
+ "or",
80
+ "that",
81
+ "the",
82
+ "to",
83
+ "vs",
84
+ "versus",
85
+ "what",
86
+ "when",
87
+ "where",
88
+ "which",
89
+ "who",
90
+ "why",
91
+ "with",
92
+ ]);
93
+
94
+ interface SourceCandidate {
95
+ result: SearchResult;
96
+ normalizedScore: number;
97
+ matchedQueryTokens: Set<string>;
98
+ matchedFacetIndexes: Set<number>;
99
+ }
100
+
101
+ interface SelectedSource {
102
+ candidate: SourceCandidate;
103
+ reason: string;
104
+ }
105
+
48
106
  // ─────────────────────────────────────────────────────────────────────────────
49
107
  // Citation Processing
50
108
  // ─────────────────────────────────────────────────────────────────────────────
@@ -114,6 +172,7 @@ export function renumberAnswerCitations(
114
172
  export interface AnswerGenerationResult {
115
173
  answer: string;
116
174
  citations: Citation[];
175
+ answerContext: AnswerContextExplain;
117
176
  }
118
177
 
119
178
  export interface AnswerGenerationDeps {
@@ -121,6 +180,250 @@ export interface AnswerGenerationDeps {
121
180
  store: StorePort | null;
122
181
  }
123
182
 
183
+ function normalizeScore(score: number): number {
184
+ if (!Number.isFinite(score)) {
185
+ return 0;
186
+ }
187
+ return Math.max(0, Math.min(1, score));
188
+ }
189
+
190
+ function tokenize(text: string): string[] {
191
+ return text
192
+ .trim()
193
+ .toLowerCase()
194
+ .split(TOKEN_SPLIT_RE)
195
+ .map((token) => token.trim())
196
+ .filter((token) => token.length >= 2 && !QUERY_STOPWORDS.has(token));
197
+ }
198
+
199
+ function uniqueFacetTexts(query: string): string[] {
200
+ const segments = query
201
+ .split(FACET_SPLIT_RE)
202
+ .map((segment) => segment.trim())
203
+ .filter((segment) => segment.length > 0);
204
+
205
+ if (segments.length <= 1) {
206
+ return query.trim().length > 0 ? [query.trim()] : [];
207
+ }
208
+
209
+ return [...new Set(segments)];
210
+ }
211
+
212
+ function buildCandidates(
213
+ queryTokenSet: Set<string>,
214
+ facetTokenSets: Set<string>[],
215
+ results: SearchResult[]
216
+ ): SourceCandidate[] {
217
+ return results.map((result) => {
218
+ const signalText = `${result.title ?? ""}\n${result.snippet ?? ""}`;
219
+ const signalTokenSet = new Set(tokenize(signalText));
220
+
221
+ const matchedQueryTokens = new Set<string>();
222
+ for (const token of queryTokenSet) {
223
+ if (signalTokenSet.has(token)) {
224
+ matchedQueryTokens.add(token);
225
+ }
226
+ }
227
+
228
+ const matchedFacetIndexes = new Set<number>();
229
+ for (const [index, facetTokenSet] of facetTokenSets.entries()) {
230
+ for (const token of facetTokenSet) {
231
+ if (signalTokenSet.has(token)) {
232
+ matchedFacetIndexes.add(index);
233
+ break;
234
+ }
235
+ }
236
+ }
237
+
238
+ return {
239
+ result,
240
+ normalizedScore: normalizeScore(result.score),
241
+ matchedQueryTokens,
242
+ matchedFacetIndexes,
243
+ };
244
+ });
245
+ }
246
+
247
+ function dedupeByDocidBestScore(results: SearchResult[]): SearchResult[] {
248
+ const bestByDocid = new Map<string, SearchResult>();
249
+
250
+ for (const result of results) {
251
+ const existing = bestByDocid.get(result.docid);
252
+ if (!existing || result.score > existing.score) {
253
+ bestByDocid.set(result.docid, result);
254
+ }
255
+ }
256
+
257
+ return [...bestByDocid.values()].sort((a, b) => {
258
+ const scoreDiff = b.score - a.score;
259
+ if (Math.abs(scoreDiff) > 1e-9) {
260
+ return scoreDiff;
261
+ }
262
+ return a.docid.localeCompare(b.docid);
263
+ });
264
+ }
265
+
266
+ function selectAdaptiveSources(
267
+ query: string,
268
+ results: SearchResult[]
269
+ ): { selected: SearchResult[]; explain: AnswerContextExplain } {
270
+ const dedupedResults = dedupeByDocidBestScore(results).slice(
271
+ 0,
272
+ CONTEXT_CANDIDATE_POOL
273
+ );
274
+ const queryTokens = tokenize(query);
275
+ const queryTokenSet = new Set(queryTokens);
276
+ const facets = uniqueFacetTexts(query);
277
+ const facetTokenSets = facets.map((facet) => new Set(tokenize(facet)));
278
+ const candidates = buildCandidates(
279
+ queryTokenSet,
280
+ facetTokenSets,
281
+ dedupedResults
282
+ );
283
+
284
+ const comparisonIntent = COMPARISON_QUERY_RE.test(query);
285
+ let targetSources = BASE_CONTEXT_SOURCES;
286
+ if (comparisonIntent || facets.length >= 3) {
287
+ targetSources = 5;
288
+ } else if (facets.length >= 2) {
289
+ targetSources = 4;
290
+ }
291
+ targetSources = Math.min(
292
+ targetSources,
293
+ MAX_CONTEXT_SOURCES,
294
+ candidates.length
295
+ );
296
+
297
+ const coveredTokens = new Set<string>();
298
+ const coveredFacets = new Set<number>();
299
+ const selected: SelectedSource[] = [];
300
+ const selectedDocids = new Set<string>();
301
+
302
+ while (selected.length < targetSources) {
303
+ let bestCandidate: SourceCandidate | null = null;
304
+ let bestGain = Number.NEGATIVE_INFINITY;
305
+ let bestReason = "relevance";
306
+
307
+ for (const candidate of candidates) {
308
+ const docid = candidate.result.docid;
309
+ if (selectedDocids.has(docid)) {
310
+ continue;
311
+ }
312
+
313
+ const newTokenHits = [...candidate.matchedQueryTokens].filter(
314
+ (token) => !coveredTokens.has(token)
315
+ ).length;
316
+ const newFacetHits = [...candidate.matchedFacetIndexes].filter(
317
+ (index) => !coveredFacets.has(index)
318
+ ).length;
319
+
320
+ const tokenGain =
321
+ queryTokenSet.size > 0 ? newTokenHits / queryTokenSet.size : 0;
322
+ const facetGain =
323
+ facetTokenSets.length > 0 ? newFacetHits / facetTokenSets.length : 0;
324
+
325
+ let gain =
326
+ candidate.normalizedScore * 0.6 + tokenGain * 0.25 + facetGain * 0.15;
327
+
328
+ if (comparisonIntent && selected.length > 0 && newFacetHits === 0) {
329
+ gain -= 0.2;
330
+ }
331
+
332
+ let reason = "relevance";
333
+ if (newFacetHits > 0) {
334
+ reason = "new_facet_coverage";
335
+ } else if (newTokenHits > 0) {
336
+ reason = "new_query_coverage";
337
+ }
338
+
339
+ if (
340
+ !bestCandidate ||
341
+ gain > bestGain ||
342
+ (Math.abs(gain - bestGain) <= 1e-9 &&
343
+ candidate.normalizedScore > bestCandidate.normalizedScore)
344
+ ) {
345
+ bestCandidate = candidate;
346
+ bestGain = gain;
347
+ bestReason = reason;
348
+ }
349
+ }
350
+
351
+ if (!bestCandidate) {
352
+ break;
353
+ }
354
+
355
+ // Keep selection compact when marginal gain is exhausted.
356
+ if (
357
+ bestGain <= 0 &&
358
+ selected.length >= 1 &&
359
+ !comparisonIntent &&
360
+ selected.length >= BASE_CONTEXT_SOURCES
361
+ ) {
362
+ break;
363
+ }
364
+
365
+ selected.push({ candidate: bestCandidate, reason: bestReason });
366
+ selectedDocids.add(bestCandidate.result.docid);
367
+ for (const token of bestCandidate.matchedQueryTokens) {
368
+ coveredTokens.add(token);
369
+ }
370
+ for (const index of bestCandidate.matchedFacetIndexes) {
371
+ coveredFacets.add(index);
372
+ }
373
+ }
374
+
375
+ if (comparisonIntent && selected.length < 2) {
376
+ for (const candidate of candidates) {
377
+ if (selectedDocids.has(candidate.result.docid)) {
378
+ continue;
379
+ }
380
+ selected.push({ candidate, reason: "comparison_balance" });
381
+ selectedDocids.add(candidate.result.docid);
382
+ if (selected.length >= 2) {
383
+ break;
384
+ }
385
+ }
386
+ }
387
+
388
+ if (selected.length === 0 && candidates.length > 0) {
389
+ const first = candidates[0];
390
+ if (first) {
391
+ selected.push({ candidate: first, reason: "fallback_top_result" });
392
+ selectedDocids.add(first.result.docid);
393
+ }
394
+ }
395
+
396
+ const toEntry = (
397
+ candidate: SourceCandidate,
398
+ reason: string
399
+ ): AnswerContextEntry => ({
400
+ docid: candidate.result.docid,
401
+ uri: candidate.result.uri,
402
+ score: candidate.normalizedScore,
403
+ queryTokenHits: candidate.matchedQueryTokens.size,
404
+ facetHits: candidate.matchedFacetIndexes.size,
405
+ reason,
406
+ });
407
+
408
+ const selectedEntries = selected.map(({ candidate, reason }) =>
409
+ toEntry(candidate, reason)
410
+ );
411
+ const droppedEntries = candidates
412
+ .filter((candidate) => !selectedDocids.has(candidate.result.docid))
413
+ .map((candidate) => toEntry(candidate, "lower_marginal_gain"));
414
+
415
+ return {
416
+ selected: selected.map((entry) => entry.candidate.result),
417
+ explain: {
418
+ strategy: "adaptive_coverage_v1",
419
+ targetSources,
420
+ facets,
421
+ selected: selectedEntries,
422
+ dropped: droppedEntries,
423
+ },
424
+ };
425
+ }
426
+
124
427
  /**
125
428
  * Generate a grounded answer from search results.
126
429
  * Returns null if no valid context or generation fails.
@@ -136,11 +439,12 @@ export async function generateGroundedAnswer(
136
439
  maxTokens: number
137
440
  ): Promise<AnswerGenerationResult | null> {
138
441
  const { genPort, store } = deps;
442
+ const sourceSelection = selectAdaptiveSources(query, results);
139
443
  const contextParts: string[] = [];
140
444
  const citations: Citation[] = [];
141
445
  let citationIndex = 0;
142
446
 
143
- for (const r of results.slice(0, MAX_CONTEXT_SOURCES)) {
447
+ for (const r of sourceSelection.selected) {
144
448
  let content: string | null = null;
145
449
  let usedFullContent = false;
146
450
 
@@ -197,7 +501,11 @@ export async function generateGroundedAnswer(
197
501
  return null;
198
502
  }
199
503
 
200
- return { answer: result.value, citations };
504
+ return {
505
+ answer: result.value,
506
+ citations,
507
+ answerContext: sourceSelection.explain,
508
+ };
201
509
  }
202
510
 
203
511
  /**
@@ -207,6 +515,7 @@ export async function generateGroundedAnswer(
207
515
  export function processAnswerResult(rawResult: AnswerGenerationResult): {
208
516
  answer: string;
209
517
  citations: Citation[];
518
+ answerContext: AnswerContextExplain;
210
519
  } {
211
520
  const maxCitation = rawResult.citations.length;
212
521
  const validUsedNums = extractValidCitationNumbers(
@@ -219,9 +528,17 @@ export function processAnswerResult(rawResult: AnswerGenerationResult): {
219
528
  );
220
529
 
221
530
  if (validUsedNums.length === 0 || filteredCitations.length === 0) {
222
- return { answer: ABSTENTION_MESSAGE, citations: [] };
531
+ return {
532
+ answer: ABSTENTION_MESSAGE,
533
+ citations: [],
534
+ answerContext: rawResult.answerContext,
535
+ };
223
536
  }
224
537
 
225
538
  const answer = renumberAnswerCitations(rawResult.answer, validUsedNums);
226
- return { answer, citations: filteredCitations };
539
+ return {
540
+ answer,
541
+ citations: filteredCitations,
542
+ answerContext: rawResult.answerContext,
543
+ };
227
544
  }
@@ -17,10 +17,44 @@ import { ok } from "../store/types";
17
17
  // Constants
18
18
  // ─────────────────────────────────────────────────────────────────────────────
19
19
 
20
- const EXPANSION_PROMPT_VERSION = "v2";
20
+ const EXPANSION_PROMPT_VERSION = "v3";
21
21
  const DEFAULT_TIMEOUT_MS = 5000;
22
22
  // Non-greedy to avoid matching from first { to last } across multiple objects
23
23
  const JSON_EXTRACT_PATTERN = /\{[\s\S]*?\}/;
24
+ const QUOTED_PHRASE_PATTERN = /"([^"]+)"/g;
25
+ const NEGATION_PATTERN = /-(?:"([^"]+)"|([^\s]+))/g;
26
+ const TOKEN_PATTERN = /[A-Za-z0-9][A-Za-z0-9.+#_-]*/g;
27
+ const MAX_VARIANTS = 5;
28
+ const STOPWORDS = new Set([
29
+ "a",
30
+ "an",
31
+ "and",
32
+ "are",
33
+ "as",
34
+ "at",
35
+ "be",
36
+ "by",
37
+ "for",
38
+ "from",
39
+ "how",
40
+ "in",
41
+ "is",
42
+ "it",
43
+ "of",
44
+ "on",
45
+ "or",
46
+ "that",
47
+ "the",
48
+ "this",
49
+ "to",
50
+ "what",
51
+ "when",
52
+ "where",
53
+ "which",
54
+ "who",
55
+ "why",
56
+ "with",
57
+ ]);
24
58
 
25
59
  // ─────────────────────────────────────────────────────────────────────────────
26
60
  // Cache Key Generation
@@ -54,6 +88,8 @@ Generate JSON with:
54
88
 
55
89
  Rules:
56
90
  - Keep proper nouns exactly as written
91
+ - Preserve quoted phrases and negated terms from the query in lexicalQueries
92
+ - Keep symbol-heavy technical entities exactly (for example: C++, C#, Node.js)
57
93
  - Be concise - each variation 3-8 words
58
94
  - HyDE should read like actual documentation, not a question
59
95
 
@@ -70,6 +106,8 @@ Generiere JSON mit:
70
106
 
71
107
  Regeln:
72
108
  - Eigennamen exakt beibehalten
109
+ - Zitierte Phrasen und negierte Begriffe in lexicalQueries beibehalten
110
+ - Technische Begriffe mit Symbolen exakt halten (z. B. C++, C#, Node.js)
73
111
  - Kurz halten - jede Variation 3-8 Wörter
74
112
  - HyDE soll wie echte Dokumentation klingen, nicht wie eine Frage
75
113
 
@@ -86,6 +124,8 @@ Generate JSON with:
86
124
 
87
125
  Rules:
88
126
  - Keep proper nouns exactly as written
127
+ - Preserve quoted phrases and negated terms from the query in lexicalQueries
128
+ - Keep symbol-heavy technical entities exactly (for example: C++, C#, Node.js)
89
129
  - Be concise - each variation 3-8 words
90
130
  - HyDE should read like actual documentation, not a question
91
131
 
@@ -110,6 +150,199 @@ function getPromptTemplate(lang?: string): string {
110
150
  }
111
151
  }
112
152
 
153
+ interface QuerySignals {
154
+ quotedPhrases: string[];
155
+ negations: string[];
156
+ criticalEntities: string[];
157
+ overlapTokens: Set<string>;
158
+ }
159
+
160
+ function normalizeToken(token: string): string {
161
+ return token.toLowerCase().trim();
162
+ }
163
+
164
+ function extractOverlapTokens(text: string): Set<string> {
165
+ const matches = text.match(TOKEN_PATTERN) ?? [];
166
+ const tokens: string[] = [];
167
+ for (const rawToken of matches) {
168
+ const token = normalizeToken(rawToken);
169
+ if (token.length < 2) {
170
+ continue;
171
+ }
172
+ if (STOPWORDS.has(token)) {
173
+ continue;
174
+ }
175
+ tokens.push(token);
176
+ }
177
+ return new Set(tokens);
178
+ }
179
+
180
+ function dedupeStrings(values: string[]): string[] {
181
+ const out: string[] = [];
182
+ const seen = new Set<string>();
183
+ for (const value of values) {
184
+ const trimmed = value.trim();
185
+ if (!trimmed) {
186
+ continue;
187
+ }
188
+ const key = trimmed.toLowerCase();
189
+ if (seen.has(key)) {
190
+ continue;
191
+ }
192
+ seen.add(key);
193
+ out.push(trimmed);
194
+ }
195
+ return out;
196
+ }
197
+
198
+ function extractQuerySignals(query: string): QuerySignals {
199
+ const quotedPhrases = dedupeStrings(
200
+ [...query.matchAll(QUOTED_PHRASE_PATTERN)]
201
+ .map((m) => m[1]?.trim() ?? "")
202
+ .filter(Boolean)
203
+ );
204
+
205
+ const negations = dedupeStrings(
206
+ [...query.matchAll(NEGATION_PATTERN)]
207
+ .map((m) => {
208
+ const phrase = m[1]?.trim();
209
+ if (phrase) {
210
+ return `-"${phrase}"`;
211
+ }
212
+ const token = m[2]?.trim();
213
+ return token ? `-${token}` : "";
214
+ })
215
+ .filter(Boolean)
216
+ );
217
+
218
+ const criticalEntities = dedupeStrings(
219
+ (query.match(TOKEN_PATTERN) ?? []).filter((token) => {
220
+ // Preserve common entity signals: uppercase/mixed case, acronyms, symbol-heavy technical terms.
221
+ return (
222
+ /[A-Z]/.test(token) ||
223
+ /[+#.]/.test(token) ||
224
+ /[A-Za-z]\d|\d[A-Za-z]/.test(token)
225
+ );
226
+ })
227
+ );
228
+
229
+ return {
230
+ quotedPhrases,
231
+ negations,
232
+ criticalEntities,
233
+ overlapTokens: extractOverlapTokens(query),
234
+ };
235
+ }
236
+
237
+ function hasCaseInsensitiveSubstring(text: string, part: string): boolean {
238
+ return text.toLowerCase().includes(part.toLowerCase());
239
+ }
240
+
241
+ function hasSufficientOverlap(
242
+ querySignals: QuerySignals,
243
+ candidate: string
244
+ ): boolean {
245
+ if (!candidate.trim()) {
246
+ return false;
247
+ }
248
+
249
+ for (const phrase of querySignals.quotedPhrases) {
250
+ if (hasCaseInsensitiveSubstring(candidate, phrase)) {
251
+ return true;
252
+ }
253
+ }
254
+ for (const entity of querySignals.criticalEntities) {
255
+ if (hasCaseInsensitiveSubstring(candidate, entity)) {
256
+ return true;
257
+ }
258
+ }
259
+ for (const negation of querySignals.negations) {
260
+ if (hasCaseInsensitiveSubstring(candidate, negation)) {
261
+ return true;
262
+ }
263
+ }
264
+
265
+ const candidateTokens = extractOverlapTokens(candidate);
266
+ for (const token of candidateTokens) {
267
+ if (querySignals.overlapTokens.has(token)) {
268
+ return true;
269
+ }
270
+ }
271
+
272
+ return false;
273
+ }
274
+
275
+ function buildAnchorLexicalQuery(
276
+ query: string,
277
+ querySignals: QuerySignals
278
+ ): string {
279
+ const parts: string[] = [];
280
+
281
+ for (const entity of querySignals.criticalEntities) {
282
+ parts.push(entity);
283
+ }
284
+ for (const phrase of querySignals.quotedPhrases) {
285
+ parts.push(`"${phrase}"`);
286
+ }
287
+ for (const negation of querySignals.negations) {
288
+ parts.push(negation);
289
+ }
290
+
291
+ const anchored = dedupeStrings(parts).join(" ").trim();
292
+ return anchored || query.trim();
293
+ }
294
+
295
+ function normalizeVariants(
296
+ variants: string[],
297
+ querySignals: QuerySignals
298
+ ): string[] {
299
+ const deduped = dedupeStrings(variants);
300
+ return deduped.filter((variant) =>
301
+ hasSufficientOverlap(querySignals, variant)
302
+ );
303
+ }
304
+
305
+ /**
306
+ * Apply deterministic expansion guardrails:
307
+ * - preserve entities/phrases/negations in lexical variants
308
+ * - filter drifted variants with no overlap
309
+ * - provide fallbacks when filtering removes all variants
310
+ */
311
+ export function applyExpansionGuardrails(
312
+ query: string,
313
+ expansion: ExpansionResult
314
+ ): ExpansionResult {
315
+ const querySignals = extractQuerySignals(query);
316
+ const anchorLexical = buildAnchorLexicalQuery(query, querySignals);
317
+
318
+ const lexicalCandidates = [anchorLexical, ...expansion.lexicalQueries];
319
+ const guardedLexical = normalizeVariants(lexicalCandidates, querySignals);
320
+ const guardedVector = normalizeVariants(
321
+ expansion.vectorQueries,
322
+ querySignals
323
+ );
324
+
325
+ const lexicalQueries = (
326
+ guardedLexical.length > 0 ? guardedLexical : [query.trim()]
327
+ ).slice(0, MAX_VARIANTS);
328
+ const vectorQueries = (
329
+ guardedVector.length > 0 ? guardedVector : [query.trim()]
330
+ ).slice(0, MAX_VARIANTS);
331
+
332
+ const hyde =
333
+ typeof expansion.hyde === "string" &&
334
+ hasSufficientOverlap(querySignals, expansion.hyde)
335
+ ? expansion.hyde.trim()
336
+ : undefined;
337
+
338
+ return {
339
+ lexicalQueries,
340
+ vectorQueries,
341
+ hyde,
342
+ notes: expansion.notes,
343
+ };
344
+ }
345
+
113
346
  // ─────────────────────────────────────────────────────────────────────────────
114
347
  // Schema Validation
115
348
  // ─────────────────────────────────────────────────────────────────────────────
@@ -145,13 +378,13 @@ function parseExpansionResult(output: string): ExpansionResult | null {
145
378
 
146
379
  // Limit array sizes
147
380
  const result: ExpansionResult = {
148
- lexicalQueries: lexicalQueries.slice(0, 5),
149
- vectorQueries: vectorQueries.slice(0, 5),
381
+ lexicalQueries: lexicalQueries.slice(0, MAX_VARIANTS),
382
+ vectorQueries: vectorQueries.slice(0, MAX_VARIANTS),
150
383
  };
151
384
 
152
385
  // Optional fields
153
- if (typeof parsed.hyde === "string" && parsed.hyde.length > 0) {
154
- result.hyde = parsed.hyde;
386
+ if (typeof parsed.hyde === "string" && parsed.hyde.trim().length > 0) {
387
+ result.hyde = parsed.hyde.trim();
155
388
  }
156
389
  if (typeof parsed.notes === "string") {
157
390
  result.notes = parsed.notes;
@@ -222,7 +455,10 @@ export async function expandQuery(
222
455
 
223
456
  // Parse result
224
457
  const parsed = parseExpansionResult(result.value);
225
- return ok(parsed);
458
+ if (!parsed) {
459
+ return ok(null);
460
+ }
461
+ return ok(applyExpansionGuardrails(query, parsed));
226
462
  } catch {
227
463
  if (timeoutId) {
228
464
  clearTimeout(timeoutId);
@@ -257,7 +493,7 @@ export async function expandQueryCached(
257
493
  if (cached) {
258
494
  const parsed = parseExpansionResult(cached);
259
495
  if (parsed) {
260
- return ok(parsed);
496
+ return ok(applyExpansionGuardrails(query, parsed));
261
497
  }
262
498
  }
263
499