@cue-dev/retrieval-core 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,983 @@
1
+ import Parser from "tree-sitter";
2
+ import Go from "tree-sitter-go";
3
+ import JavaScriptV023 from "tree-sitter-javascript-v023";
4
+ import Java from "tree-sitter-java";
5
+ import PythonV023 from "tree-sitter-python-v023";
6
+ import Rust from "tree-sitter-rust";
7
+ import TypeScript from "tree-sitter-typescript";
8
+ const DEFAULT_BOUNDARY_NODE_TYPES_LEGACY = {
9
+ typescript: new Set([
10
+ "function_declaration",
11
+ "generator_function_declaration",
12
+ "class_declaration",
13
+ "interface_declaration",
14
+ "enum_declaration",
15
+ "type_alias_declaration"
16
+ ]),
17
+ tsx: new Set([
18
+ "function_declaration",
19
+ "generator_function_declaration",
20
+ "class_declaration",
21
+ "interface_declaration",
22
+ "enum_declaration",
23
+ "type_alias_declaration"
24
+ ]),
25
+ javascript: new Set([
26
+ "function_declaration",
27
+ "generator_function_declaration",
28
+ "class_declaration",
29
+ "function_expression",
30
+ "arrow_function"
31
+ ]),
32
+ jsx: new Set([
33
+ "function_declaration",
34
+ "generator_function_declaration",
35
+ "class_declaration",
36
+ "function_expression",
37
+ "arrow_function"
38
+ ]),
39
+ python: new Set(["function_definition", "class_definition"]),
40
+ go: new Set(["function_declaration", "method_declaration", "type_declaration"]),
41
+ rust: new Set(["function_item", "impl_item", "trait_item", "struct_item", "enum_item"]),
42
+ java: new Set(["class_declaration", "interface_declaration", "enum_declaration", "method_declaration"])
43
+ };
44
+ const DEFAULT_BOUNDARY_NODE_TYPES_SEMANTIC_JS_TS = {
45
+ ...DEFAULT_BOUNDARY_NODE_TYPES_LEGACY,
46
+ typescript: new Set([
47
+ ...DEFAULT_BOUNDARY_NODE_TYPES_LEGACY.typescript,
48
+ "function_expression",
49
+ "arrow_function",
50
+ "method_definition",
51
+ "class"
52
+ ]),
53
+ tsx: new Set([
54
+ ...DEFAULT_BOUNDARY_NODE_TYPES_LEGACY.tsx,
55
+ "function_expression",
56
+ "arrow_function",
57
+ "method_definition",
58
+ "class"
59
+ ]),
60
+ javascript: new Set([...DEFAULT_BOUNDARY_NODE_TYPES_LEGACY.javascript, "method_definition", "class"]),
61
+ jsx: new Set([...DEFAULT_BOUNDARY_NODE_TYPES_LEGACY.jsx, "method_definition", "class"])
62
+ };
63
+ const parserAvailabilityCache = new Map();
64
+ const parserInitAttempts = new Map();
65
+ const parserLanguageLoaderOverrides = new Map();
66
+ const CANONICAL_TO_PARSER_LANGUAGE = {
67
+ typescript: "typescript",
68
+ javascript: "javascript",
69
+ python: "python",
70
+ go: "go",
71
+ rust: "rust",
72
+ java: "java"
73
+ };
74
+ const JAVASCRIPT_EXPRESSION_BOUNDARY_PARENT_TYPES = new Set([
75
+ "assignment_expression",
76
+ "variable_declarator",
77
+ "pair",
78
+ "export_statement",
79
+ "public_field_definition",
80
+ "property_definition"
81
+ ]);
82
+ const JAVASCRIPT_EXPRESSION_BOUNDARY_NODE_TYPES = new Set(["function_expression", "arrow_function", "class"]);
83
+ const JAVASCRIPT_EXPRESSION_BOUNDARY_WRAPPER_TYPES = new Set([
84
+ "parenthesized_expression",
85
+ "as_expression",
86
+ "satisfies_expression",
87
+ "type_assertion",
88
+ "non_null_expression"
89
+ ]);
90
+ const SEMANTIC_JS_TS_SOFT_MAX_MULTIPLIER = 1.35;
91
+ function parserLanguageToCanonical(language) {
92
+ if (language === "tsx") {
93
+ return "typescript";
94
+ }
95
+ if (language === "jsx") {
96
+ return "javascript";
97
+ }
98
+ return language;
99
+ }
100
+ function normalizeLanguageAlias(language) {
101
+ const normalized = language.trim().toLowerCase();
102
+ if (normalized === "typescript" || normalized === "ts") {
103
+ return "typescript";
104
+ }
105
+ if (normalized === "tsx") {
106
+ return "tsx";
107
+ }
108
+ if (normalized === "javascript" || normalized === "js") {
109
+ return "javascript";
110
+ }
111
+ if (normalized === "jsx") {
112
+ return "jsx";
113
+ }
114
+ if (normalized === "python" || normalized === "py") {
115
+ return "python";
116
+ }
117
+ if (normalized === "go" || normalized === "golang") {
118
+ return "go";
119
+ }
120
+ if (normalized === "rust" || normalized === "rs") {
121
+ return "rust";
122
+ }
123
+ if (normalized === "java") {
124
+ return "java";
125
+ }
126
+ return undefined;
127
+ }
128
+ function parserLanguageFromPath(path) {
129
+ const normalized = path.toLowerCase();
130
+ if (normalized.endsWith(".tsx")) {
131
+ return "tsx";
132
+ }
133
+ if (normalized.endsWith(".ts") || normalized.endsWith(".mts") || normalized.endsWith(".cts")) {
134
+ return "typescript";
135
+ }
136
+ if (normalized.endsWith(".jsx")) {
137
+ return "jsx";
138
+ }
139
+ if (normalized.endsWith(".js") || normalized.endsWith(".mjs") || normalized.endsWith(".cjs")) {
140
+ return "javascript";
141
+ }
142
+ if (normalized.endsWith(".py")) {
143
+ return "python";
144
+ }
145
+ if (normalized.endsWith(".go")) {
146
+ return "go";
147
+ }
148
+ if (normalized.endsWith(".rs")) {
149
+ return "rust";
150
+ }
151
+ if (normalized.endsWith(".java")) {
152
+ return "java";
153
+ }
154
+ return undefined;
155
+ }
156
+ function resolveParserLanguage(file) {
157
+ const explicit = file.language ? normalizeLanguageAlias(file.language) : undefined;
158
+ if (explicit) {
159
+ return explicit;
160
+ }
161
+ return parserLanguageFromPath(file.path);
162
+ }
163
+ function formatErrorMessage(error) {
164
+ if (error instanceof Error && error.message) {
165
+ return error.message;
166
+ }
167
+ return String(error);
168
+ }
169
+ function resolveTreeSitterLanguageHandle(moduleValue) {
170
+ if (moduleValue && typeof moduleValue === "object" && "language" in moduleValue) {
171
+ return moduleValue.language;
172
+ }
173
+ return moduleValue;
174
+ }
175
+ function loadParserLanguage(language) {
176
+ const overrideLoader = parserLanguageLoaderOverrides.get(language);
177
+ if (overrideLoader) {
178
+ return overrideLoader();
179
+ }
180
+ if (language === "typescript") {
181
+ return TypeScript.typescript;
182
+ }
183
+ if (language === "tsx") {
184
+ return TypeScript.tsx;
185
+ }
186
+ if (language === "javascript" || language === "jsx") {
187
+ // Bun is currently most reliable with tree-sitter 0.23-compatible JS/Python grammars.
188
+ return resolveTreeSitterLanguageHandle(JavaScriptV023);
189
+ }
190
+ if (language === "python") {
191
+ return resolveTreeSitterLanguageHandle(PythonV023);
192
+ }
193
+ if (language === "rust") {
194
+ return resolveTreeSitterLanguageHandle(Rust);
195
+ }
196
+ if (language === "java") {
197
+ return resolveTreeSitterLanguageHandle(Java);
198
+ }
199
+ return resolveTreeSitterLanguageHandle(Go);
200
+ }
201
+ function getParserAvailability(language) {
202
+ const cached = parserAvailabilityCache.get(language);
203
+ if (cached) {
204
+ return cached;
205
+ }
206
+ parserInitAttempts.set(language, (parserInitAttempts.get(language) ?? 0) + 1);
207
+ try {
208
+ const parser = new Parser();
209
+ parser.setLanguage(loadParserLanguage(language));
210
+ const availability = {
211
+ status: "available",
212
+ parser
213
+ };
214
+ parserAvailabilityCache.set(language, availability);
215
+ return availability;
216
+ }
217
+ catch (error) {
218
+ const availability = {
219
+ status: "unavailable",
220
+ error: formatErrorMessage(error)
221
+ };
222
+ parserAvailabilityCache.set(language, availability);
223
+ return availability;
224
+ }
225
+ }
226
+ function getParser(language) {
227
+ const availability = getParserAvailability(language);
228
+ if (availability.status !== "available") {
229
+ return undefined;
230
+ }
231
+ return availability.parser;
232
+ }
233
+ function toChunkingParserLanguage(canonicalLanguage) {
234
+ return CANONICAL_TO_PARSER_LANGUAGE[canonicalLanguage];
235
+ }
236
+ export function getChunkingParserAvailabilitySnapshot(input) {
237
+ const canonicalLanguages = new Set();
238
+ if (input?.enabled_languages && input.enabled_languages.length > 0) {
239
+ for (const language of input.enabled_languages) {
240
+ const parsed = normalizeLanguageAlias(language);
241
+ if (!parsed) {
242
+ continue;
243
+ }
244
+ const canonical = parserLanguageToCanonical(parsed);
245
+ if (toChunkingParserLanguage(canonical)) {
246
+ canonicalLanguages.add(canonical);
247
+ }
248
+ }
249
+ }
250
+ if (canonicalLanguages.size === 0) {
251
+ for (const canonical of Object.keys(CANONICAL_TO_PARSER_LANGUAGE)) {
252
+ canonicalLanguages.add(canonical);
253
+ }
254
+ }
255
+ return [...canonicalLanguages]
256
+ .sort((a, b) => a.localeCompare(b))
257
+ .map((canonicalLanguage) => {
258
+ const parserLanguage = toChunkingParserLanguage(canonicalLanguage);
259
+ if (!parserLanguage) {
260
+ return {
261
+ language: canonicalLanguage,
262
+ status: "unavailable",
263
+ error: "no parser mapping for language"
264
+ };
265
+ }
266
+ const availability = getParserAvailability(parserLanguage);
267
+ return {
268
+ language: canonicalLanguage,
269
+ status: availability.status,
270
+ ...(availability.error ? { error: availability.error } : {})
271
+ };
272
+ });
273
+ }
274
+ export function __resetChunkingParserStateForTests() {
275
+ parserAvailabilityCache.clear();
276
+ parserInitAttempts.clear();
277
+ parserLanguageLoaderOverrides.clear();
278
+ }
279
+ export function __setChunkingParserLanguageLoaderForTests(language, loader) {
280
+ if (loader) {
281
+ parserLanguageLoaderOverrides.set(language, loader);
282
+ }
283
+ else {
284
+ parserLanguageLoaderOverrides.delete(language);
285
+ }
286
+ parserAvailabilityCache.delete(language);
287
+ }
288
+ export function __getChunkingParserInitAttemptsForTests() {
289
+ return Object.fromEntries(parserInitAttempts.entries());
290
+ }
291
+ function toInclusiveEndRow(node) {
292
+ const end = node.endPosition;
293
+ if (end.column === 0 && end.row > node.startPosition.row) {
294
+ return end.row - 1;
295
+ }
296
+ return end.row;
297
+ }
298
+ function trimLineRange(lines, startRow, endRow) {
299
+ let start = startRow;
300
+ let end = endRow;
301
+ while (start <= end && (lines[start] ?? "").trim().length === 0) {
302
+ start += 1;
303
+ }
304
+ while (end >= start && (lines[end] ?? "").trim().length === 0) {
305
+ end -= 1;
306
+ }
307
+ if (end < start) {
308
+ return undefined;
309
+ }
310
+ return { start, end };
311
+ }
312
+ function splitRangeWithBudget(input) {
313
+ const rangeTokenCount = (startRow, endRow) => {
314
+ let total = 0;
315
+ for (let row = startRow; row <= endRow; row += 1) {
316
+ total += input.lineTokenCounts[row] ?? 0;
317
+ }
318
+ return total;
319
+ };
320
+ const isSafeSplitBoundaryLine = (line) => {
321
+ const trimmed = line.trim();
322
+ if (trimmed.length === 0) {
323
+ return true;
324
+ }
325
+ return trimmed.endsWith(";") || trimmed.endsWith("}") || trimmed.endsWith("{");
326
+ };
327
+ const segments = [];
328
+ let start = input.startRow;
329
+ while (start <= input.endRow && segments.length < input.maxChunks) {
330
+ let tokens = 0;
331
+ let end = start - 1;
332
+ while (end < input.endRow) {
333
+ const nextEnd = end + 1;
334
+ tokens += input.lineTokenCounts[nextEnd] ?? 0;
335
+ end = nextEnd;
336
+ if (tokens >= input.targetChunkTokens && end >= start) {
337
+ break;
338
+ }
339
+ }
340
+ let safeEnd = Math.min(Math.max(start, end), input.endRow);
341
+ if (input.preferSafeBoundarySplit && safeEnd > start) {
342
+ let adjusted = safeEnd;
343
+ for (let row = safeEnd; row > start; row -= 1) {
344
+ if (isSafeSplitBoundaryLine(input.lines[row] ?? "")) {
345
+ adjusted = row;
346
+ break;
347
+ }
348
+ }
349
+ if (adjusted === safeEnd && typeof input.softMaxChunkTokens === "number" && input.softMaxChunkTokens > input.targetChunkTokens) {
350
+ for (let row = safeEnd + 1; row <= input.endRow; row += 1) {
351
+ if (rangeTokenCount(start, row) > input.softMaxChunkTokens) {
352
+ break;
353
+ }
354
+ if (isSafeSplitBoundaryLine(input.lines[row] ?? "")) {
355
+ adjusted = row;
356
+ break;
357
+ }
358
+ }
359
+ }
360
+ safeEnd = Math.max(start, adjusted);
361
+ }
362
+ if (safeEnd >= start) {
363
+ segments.push({ startRow: start, endRow: safeEnd });
364
+ }
365
+ if (safeEnd >= input.endRow) {
366
+ break;
367
+ }
368
+ let nextStart = safeEnd + 1;
369
+ if (input.overlapTokens > 0) {
370
+ let overlap = 0;
371
+ let cursor = safeEnd;
372
+ while (cursor >= start && overlap < input.overlapTokens) {
373
+ overlap += input.lineTokenCounts[cursor] ?? 0;
374
+ cursor -= 1;
375
+ }
376
+ nextStart = Math.max(start + 1, cursor + 1);
377
+ }
378
+ start = Math.max(start + 1, nextStart);
379
+ }
380
+ return segments;
381
+ }
382
+ function buildSlidingChunks(input) {
383
+ const lineTokenCounts = input.lineTokenCounts ?? computeLineTokenCounts(input.lines, input.tokenize);
384
+ const rawSegments = splitRangeWithBudget({
385
+ lines: input.lines,
386
+ lineTokenCounts,
387
+ startRow: 0,
388
+ endRow: Math.max(0, input.lines.length - 1),
389
+ targetChunkTokens: input.targetChunkTokens,
390
+ overlapTokens: input.overlapTokens,
391
+ maxChunks: input.maxChunks
392
+ });
393
+ const chunks = [];
394
+ for (const segment of rawSegments) {
395
+ const trimmed = trimLineRange(input.lines, segment.startRow, segment.endRow);
396
+ if (!trimmed) {
397
+ continue;
398
+ }
399
+ chunks.push({
400
+ start_line: trimmed.start + 1,
401
+ end_line: trimmed.end + 1,
402
+ snippet: input.lines.slice(trimmed.start, trimmed.end + 1).join("\n")
403
+ });
404
+ }
405
+ return chunks;
406
+ }
407
+ function hasBoundaryAncestor(node, boundaryTypes) {
408
+ let current = node.parent;
409
+ while (current) {
410
+ if (boundaryTypes.has(current.type)) {
411
+ return true;
412
+ }
413
+ current = current.parent;
414
+ }
415
+ return false;
416
+ }
417
+ function getBoundaryTypes(parserLanguage, boundaryStrictness) {
418
+ if (boundaryStrictness === "semantic_js_ts") {
419
+ return DEFAULT_BOUNDARY_NODE_TYPES_SEMANTIC_JS_TS[parserLanguage];
420
+ }
421
+ return DEFAULT_BOUNDARY_NODE_TYPES_LEGACY[parserLanguage];
422
+ }
423
+ function isExpressionBoundaryLanguage(parserLanguage, boundaryStrictness) {
424
+ if (boundaryStrictness === "semantic_js_ts") {
425
+ return (parserLanguage === "javascript" ||
426
+ parserLanguage === "jsx" ||
427
+ parserLanguage === "typescript" ||
428
+ parserLanguage === "tsx");
429
+ }
430
+ return parserLanguage === "javascript" || parserLanguage === "jsx";
431
+ }
432
+ function isLanguageBoundaryCandidate(parserLanguage, node, boundaryStrictness) {
433
+ if (!isExpressionBoundaryLanguage(parserLanguage, boundaryStrictness)) {
434
+ return true;
435
+ }
436
+ if (!JAVASCRIPT_EXPRESSION_BOUNDARY_NODE_TYPES.has(node.type)) {
437
+ return true;
438
+ }
439
+ let owner = node.parent;
440
+ while (owner && JAVASCRIPT_EXPRESSION_BOUNDARY_WRAPPER_TYPES.has(owner.type)) {
441
+ owner = owner.parent;
442
+ }
443
+ const ownerType = owner?.type;
444
+ if (!ownerType) {
445
+ return false;
446
+ }
447
+ return JAVASCRIPT_EXPRESSION_BOUNDARY_PARENT_TYPES.has(ownerType);
448
+ }
449
+ export function __isChunkingBoundaryCandidateForTests(input) {
450
+ const strictness = input.boundaryStrictness ?? "legacy";
451
+ if (!isExpressionBoundaryLanguage(input.parserLanguage, strictness)) {
452
+ return true;
453
+ }
454
+ if (!JAVASCRIPT_EXPRESSION_BOUNDARY_NODE_TYPES.has(input.nodeType)) {
455
+ return true;
456
+ }
457
+ const ancestorChain = [];
458
+ if (input.parentType) {
459
+ ancestorChain.push(input.parentType);
460
+ }
461
+ if (input.ancestorTypes && input.ancestorTypes.length > 0) {
462
+ ancestorChain.push(...input.ancestorTypes);
463
+ }
464
+ const ownerType = ancestorChain.find((type) => !JAVASCRIPT_EXPRESSION_BOUNDARY_WRAPPER_TYPES.has(type));
465
+ if (!ownerType) {
466
+ return false;
467
+ }
468
+ return JAVASCRIPT_EXPRESSION_BOUNDARY_PARENT_TYPES.has(ownerType);
469
+ }
470
+ function computeLineTokenCounts(lines, tokenize) {
471
+ return lines.map((line) => tokenize(line ?? "").length);
472
+ }
473
+ function rangeTokenCount(lineTokenCounts, startRow, endRow) {
474
+ let total = 0;
475
+ for (let row = startRow; row <= endRow; row += 1) {
476
+ total += lineTokenCounts[row] ?? 0;
477
+ }
478
+ return total;
479
+ }
480
+ function listNamedChildren(node) {
481
+ const children = [];
482
+ for (let index = 0; index < node.namedChildCount; index += 1) {
483
+ const child = node.namedChild(index);
484
+ if (child) {
485
+ children.push(child);
486
+ }
487
+ }
488
+ return children;
489
+ }
490
+ function normalizeNodeWindow(input) {
491
+ const startRow = Math.max(0, Math.min(input.lastRow, input.node.startPosition.row));
492
+ const endRow = Math.max(startRow, Math.min(input.lastRow, toInclusiveEndRow(input.node)));
493
+ const trimmed = trimLineRange(input.lines, startRow, endRow);
494
+ if (!trimmed) {
495
+ return undefined;
496
+ }
497
+ return {
498
+ startRow: trimmed.start,
499
+ endRow: trimmed.end
500
+ };
501
+ }
502
+ function buildRecursiveSemanticWindows(input) {
503
+ const lastRow = Math.max(0, input.lines.length - 1);
504
+ const windows = [];
505
+ const softMaxChunkTokens = Math.floor(input.targetChunkTokens * SEMANTIC_JS_TS_SOFT_MAX_MULTIPLIER);
506
+ const seen = new Set();
507
+ const pushSplitWindows = (startRow, endRow) => {
508
+ if (startRow > endRow || windows.length >= input.maxChunks) {
509
+ return;
510
+ }
511
+ const segments = splitRangeWithBudget({
512
+ lines: input.lines,
513
+ lineTokenCounts: input.lineTokenCounts,
514
+ startRow,
515
+ endRow,
516
+ targetChunkTokens: input.targetChunkTokens,
517
+ overlapTokens: 0,
518
+ maxChunks: input.maxChunks - windows.length,
519
+ preferSafeBoundarySplit: input.boundaryStrictness === "semantic_js_ts",
520
+ softMaxChunkTokens
521
+ });
522
+ for (const segment of segments) {
523
+ const trimmed = trimLineRange(input.lines, segment.startRow, segment.endRow);
524
+ if (!trimmed) {
525
+ continue;
526
+ }
527
+ const key = `${trimmed.start}:${trimmed.end}`;
528
+ if (seen.has(key)) {
529
+ continue;
530
+ }
531
+ seen.add(key);
532
+ windows.push({ startRow: trimmed.start, endRow: trimmed.end });
533
+ if (windows.length >= input.maxChunks) {
534
+ return;
535
+ }
536
+ }
537
+ };
538
+ const visitNode = (node) => {
539
+ if (windows.length >= input.maxChunks) {
540
+ return;
541
+ }
542
+ const range = normalizeNodeWindow({
543
+ node,
544
+ lines: input.lines,
545
+ lastRow
546
+ });
547
+ if (!range) {
548
+ return;
549
+ }
550
+ const tokenCount = rangeTokenCount(input.lineTokenCounts, range.startRow, range.endRow);
551
+ if (tokenCount <= input.targetChunkTokens) {
552
+ const key = `${range.startRow}:${range.endRow}`;
553
+ if (!seen.has(key)) {
554
+ seen.add(key);
555
+ windows.push(range);
556
+ }
557
+ return;
558
+ }
559
+ const children = listNamedChildren(node)
560
+ .map((child) => ({
561
+ node: child,
562
+ range: normalizeNodeWindow({
563
+ node: child,
564
+ lines: input.lines,
565
+ lastRow
566
+ })
567
+ }))
568
+ .filter((child) => Boolean(child.range))
569
+ .sort((a, b) => a.range.startRow - b.range.startRow || a.range.endRow - b.range.endRow);
570
+ if (children.length === 0) {
571
+ pushSplitWindows(range.startRow, range.endRow);
572
+ return;
573
+ }
574
+ let cursor = range.startRow;
575
+ for (const child of children) {
576
+ if (windows.length >= input.maxChunks) {
577
+ return;
578
+ }
579
+ if (child.range.endRow < cursor) {
580
+ continue;
581
+ }
582
+ if (child.range.startRow > cursor) {
583
+ pushSplitWindows(cursor, child.range.startRow - 1);
584
+ }
585
+ visitNode(child.node);
586
+ cursor = Math.max(cursor, child.range.endRow + 1);
587
+ if (cursor > range.endRow) {
588
+ return;
589
+ }
590
+ }
591
+ if (cursor <= range.endRow) {
592
+ pushSplitWindows(cursor, range.endRow);
593
+ }
594
+ };
595
+ visitNode(input.root);
596
+ return windows.sort((a, b) => a.startRow - b.startRow || a.endRow - b.endRow);
597
+ }
598
+ function mergeSemanticWindows(input) {
599
+ if (input.windows.length <= 1) {
600
+ return [...input.windows];
601
+ }
602
+ const ordered = [...input.windows].sort((a, b) => a.startRow - b.startRow || a.endRow - b.endRow);
603
+ const merged = [];
604
+ const mergeTokenBudget = Math.floor(input.targetChunkTokens * SEMANTIC_JS_TS_SOFT_MAX_MULTIPLIER);
605
+ for (const window of ordered) {
606
+ const last = merged[merged.length - 1];
607
+ if (!last) {
608
+ merged.push({ ...window });
609
+ continue;
610
+ }
611
+ const gapLines = Math.max(0, window.startRow - last.endRow - 1);
612
+ const nextStartRow = Math.min(last.startRow, window.startRow);
613
+ const nextEndRow = Math.max(last.endRow, window.endRow);
614
+ const nextSpanLines = nextEndRow - nextStartRow + 1;
615
+ const mergedTokenCount = rangeTokenCount(input.lineTokenCounts, nextStartRow, nextEndRow);
616
+ const canMerge = gapLines <= input.semanticMergeGapLines &&
617
+ nextSpanLines <= input.semanticMergeMaxSpanLines &&
618
+ mergedTokenCount <= mergeTokenBudget;
619
+ if (!canMerge) {
620
+ merged.push({ ...window });
621
+ continue;
622
+ }
623
+ last.startRow = nextStartRow;
624
+ last.endRow = nextEndRow;
625
+ }
626
+ return merged;
627
+ }
628
+ function isCommentOnlyLine(line) {
629
+ const trimmed = line.trim();
630
+ if (trimmed.length === 0) {
631
+ return true;
632
+ }
633
+ return (trimmed.startsWith("//") ||
634
+ trimmed.startsWith("/*") ||
635
+ trimmed.startsWith("*") ||
636
+ trimmed.startsWith("*/") ||
637
+ trimmed.startsWith("#"));
638
+ }
639
+ function windowLooksCommentOnly(input) {
640
+ for (let row = input.startRow; row <= input.endRow; row += 1) {
641
+ if (!isCommentOnlyLine(input.lines[row] ?? "")) {
642
+ return false;
643
+ }
644
+ }
645
+ return true;
646
+ }
647
+ function absorbForwardCommentWindows(input) {
648
+ if (input.windows.length <= 1) {
649
+ return [...input.windows];
650
+ }
651
+ const output = [];
652
+ const mergeTokenBudget = Math.floor(input.targetChunkTokens * SEMANTIC_JS_TS_SOFT_MAX_MULTIPLIER);
653
+ for (let index = 0; index < input.windows.length; index += 1) {
654
+ const current = input.windows[index];
655
+ const next = input.windows[index + 1];
656
+ if (!current) {
657
+ continue;
658
+ }
659
+ if (!next) {
660
+ output.push({ ...current });
661
+ continue;
662
+ }
663
+ if (!windowLooksCommentOnly({ lines: input.lines, startRow: current.startRow, endRow: current.endRow })) {
664
+ output.push({ ...current });
665
+ continue;
666
+ }
667
+ const gapLines = Math.max(0, next.startRow - current.endRow - 1);
668
+ const nextSpanLines = next.endRow - current.startRow + 1;
669
+ const mergedTokenCount = rangeTokenCount(input.lineTokenCounts, current.startRow, next.endRow);
670
+ const canAbsorb = gapLines <= 1 && nextSpanLines <= input.semanticMergeMaxSpanLines && mergedTokenCount <= mergeTokenBudget;
671
+ if (!canAbsorb) {
672
+ output.push({ ...current });
673
+ continue;
674
+ }
675
+ output.push({
676
+ startRow: current.startRow,
677
+ endRow: next.endRow
678
+ });
679
+ index += 1;
680
+ }
681
+ return output;
682
+ }
683
+ function windowsToChunks(input) {
684
+ const chunks = [];
685
+ for (const window of input.windows) {
686
+ if (chunks.length >= input.maxChunks) {
687
+ break;
688
+ }
689
+ const trimmed = trimLineRange(input.lines, window.startRow, window.endRow);
690
+ if (!trimmed) {
691
+ continue;
692
+ }
693
+ chunks.push({
694
+ start_line: trimmed.start + 1,
695
+ end_line: trimmed.end + 1,
696
+ snippet: input.lines.slice(trimmed.start, trimmed.end + 1).join("\n")
697
+ });
698
+ }
699
+ return chunks;
700
+ }
701
+ function buildLanguageAwareChunks(input) {
702
+ const languageAwareAttemptStart = Date.now();
703
+ const lineTokenCounts = computeLineTokenCounts(input.lines, input.tokenize);
704
+ const parser = getParser(input.parserLanguage);
705
+ if (!parser) {
706
+ const fallbackStart = Date.now();
707
+ const chunks = buildSlidingChunks({
708
+ lines: input.lines,
709
+ tokenize: input.tokenize,
710
+ targetChunkTokens: input.config.target_chunk_tokens,
711
+ overlapTokens: input.config.chunk_overlap_tokens,
712
+ maxChunks: input.config.max_chunks_per_file,
713
+ lineTokenCounts
714
+ });
715
+ return {
716
+ chunks,
717
+ strategy: "sliding",
718
+ fallback_reason: "parser_unavailable",
719
+ language_aware_attempt_latency_ms: Date.now() - languageAwareAttemptStart,
720
+ fallback_path_latency_ms: Date.now() - fallbackStart,
721
+ language: parserLanguageToCanonical(input.parserLanguage)
722
+ };
723
+ }
724
+ try {
725
+ parser.setTimeoutMicros(input.config.parse_timeout_ms * 1_000);
726
+ const parseStart = Date.now();
727
+ const tree = parser.parse(input.file.content);
728
+ const parseLatencyMs = Date.now() - parseStart;
729
+ if (parseLatencyMs > input.config.parse_timeout_ms) {
730
+ const fallbackStart = Date.now();
731
+ const chunks = buildSlidingChunks({
732
+ lines: input.lines,
733
+ tokenize: input.tokenize,
734
+ targetChunkTokens: input.config.target_chunk_tokens,
735
+ overlapTokens: input.config.chunk_overlap_tokens,
736
+ maxChunks: input.config.max_chunks_per_file,
737
+ lineTokenCounts
738
+ });
739
+ return {
740
+ chunks,
741
+ strategy: "sliding",
742
+ fallback_reason: "parse_timeout_exceeded",
743
+ parse_latency_ms: parseLatencyMs,
744
+ language_aware_attempt_latency_ms: Date.now() - languageAwareAttemptStart,
745
+ fallback_path_latency_ms: Date.now() - fallbackStart,
746
+ language: parserLanguageToCanonical(input.parserLanguage)
747
+ };
748
+ }
749
+ const root = tree?.rootNode;
750
+ if (!root) {
751
+ const fallbackStart = Date.now();
752
+ const chunks = buildSlidingChunks({
753
+ lines: input.lines,
754
+ tokenize: input.tokenize,
755
+ targetChunkTokens: input.config.target_chunk_tokens,
756
+ overlapTokens: input.config.chunk_overlap_tokens,
757
+ maxChunks: input.config.max_chunks_per_file,
758
+ lineTokenCounts
759
+ });
760
+ return {
761
+ chunks,
762
+ strategy: "sliding",
763
+ fallback_reason: "parse_error",
764
+ parse_latency_ms: parseLatencyMs,
765
+ language_aware_attempt_latency_ms: Date.now() - languageAwareAttemptStart,
766
+ fallback_path_latency_ms: Date.now() - fallbackStart,
767
+ language: parserLanguageToCanonical(input.parserLanguage)
768
+ };
769
+ }
770
+ let chunks = [];
771
+ let recursiveSemanticChunkingUsed = false;
772
+ if (input.config.recursive_semantic_chunking_enabled) {
773
+ const semanticMergeGapLines = input.config.semantic_merge_gap_lines ?? 6;
774
+ const semanticMergeMaxSpanLines = input.config.semantic_merge_max_span_lines ?? 220;
775
+ const recursiveWindows = buildRecursiveSemanticWindows({
776
+ root,
777
+ lines: input.lines,
778
+ lineTokenCounts,
779
+ targetChunkTokens: input.config.target_chunk_tokens,
780
+ maxChunks: input.config.max_chunks_per_file,
781
+ boundaryStrictness: input.config.boundary_strictness
782
+ });
783
+ const mergedWindows = mergeSemanticWindows({
784
+ windows: recursiveWindows,
785
+ lineTokenCounts,
786
+ targetChunkTokens: input.config.target_chunk_tokens,
787
+ semanticMergeGapLines,
788
+ semanticMergeMaxSpanLines
789
+ });
790
+ const absorbedWindows = input.config.comment_forward_absorb_enabled === false
791
+ ? mergedWindows
792
+ : absorbForwardCommentWindows({
793
+ windows: mergedWindows,
794
+ lines: input.lines,
795
+ lineTokenCounts,
796
+ targetChunkTokens: input.config.target_chunk_tokens,
797
+ semanticMergeMaxSpanLines
798
+ });
799
+ chunks = windowsToChunks({
800
+ windows: absorbedWindows,
801
+ lines: input.lines,
802
+ maxChunks: input.config.max_chunks_per_file
803
+ });
804
+ recursiveSemanticChunkingUsed = chunks.length > 0;
805
+ }
806
+ else {
807
+ const boundaryTypes = getBoundaryTypes(input.parserLanguage, input.config.boundary_strictness);
808
+ const candidates = root.descendantsOfType([...boundaryTypes]);
809
+ const boundaryNodes = candidates
810
+ .filter((node) => !hasBoundaryAncestor(node, boundaryTypes))
811
+ .filter((node) => isLanguageBoundaryCandidate(input.parserLanguage, node, input.config.boundary_strictness))
812
+ .sort((a, b) => a.startPosition.row - b.startPosition.row || a.startPosition.column - b.startPosition.column);
813
+ if (boundaryNodes.length === 0) {
814
+ const fallbackStart = Date.now();
815
+ const fallbackChunks = buildSlidingChunks({
816
+ lines: input.lines,
817
+ tokenize: input.tokenize,
818
+ targetChunkTokens: input.config.target_chunk_tokens,
819
+ overlapTokens: input.config.chunk_overlap_tokens,
820
+ maxChunks: input.config.max_chunks_per_file,
821
+ lineTokenCounts
822
+ });
823
+ return {
824
+ chunks: fallbackChunks,
825
+ strategy: "sliding",
826
+ fallback_reason: "empty_language_boundaries",
827
+ parse_latency_ms: parseLatencyMs,
828
+ language_aware_attempt_latency_ms: Date.now() - languageAwareAttemptStart,
829
+ fallback_path_latency_ms: Date.now() - fallbackStart,
830
+ language: parserLanguageToCanonical(input.parserLanguage)
831
+ };
832
+ }
833
+ const segments = [];
834
+ let cursor = 0;
835
+ const lastRow = Math.max(0, input.lines.length - 1);
836
+ for (const node of boundaryNodes) {
837
+ const startRow = Math.max(0, Math.min(lastRow, node.startPosition.row));
838
+ const endRow = Math.max(startRow, Math.min(lastRow, toInclusiveEndRow(node)));
839
+ if (startRow > cursor) {
840
+ segments.push({ startRow: cursor, endRow: startRow - 1, boundary: false });
841
+ }
842
+ segments.push({ startRow, endRow, boundary: true });
843
+ cursor = endRow + 1;
844
+ if (cursor > lastRow) {
845
+ break;
846
+ }
847
+ }
848
+ if (cursor <= lastRow) {
849
+ segments.push({ startRow: cursor, endRow: lastRow, boundary: false });
850
+ }
851
+ for (const segment of segments) {
852
+ if (segment.endRow < segment.startRow || chunks.length >= input.config.max_chunks_per_file) {
853
+ continue;
854
+ }
855
+ const segmentTokenCount = lineTokenCounts
856
+ .slice(segment.startRow, segment.endRow + 1)
857
+ .reduce((sum, value) => sum + value, 0);
858
+ const enableSemanticBoundarySplits = input.config.boundary_strictness === "semantic_js_ts" &&
859
+ (input.parserLanguage === "javascript" ||
860
+ input.parserLanguage === "jsx" ||
861
+ input.parserLanguage === "typescript" ||
862
+ input.parserLanguage === "tsx") &&
863
+ segment.boundary;
864
+ const softMaxChunkTokens = Math.floor(input.config.target_chunk_tokens * SEMANTIC_JS_TS_SOFT_MAX_MULTIPLIER);
865
+ const pieces = enableSemanticBoundarySplits && segmentTokenCount <= softMaxChunkTokens
866
+ ? [{ startRow: segment.startRow, endRow: segment.endRow }]
867
+ : splitRangeWithBudget({
868
+ lines: input.lines,
869
+ lineTokenCounts,
870
+ startRow: segment.startRow,
871
+ endRow: segment.endRow,
872
+ targetChunkTokens: input.config.target_chunk_tokens,
873
+ overlapTokens: input.config.chunk_overlap_tokens,
874
+ maxChunks: input.config.max_chunks_per_file - chunks.length,
875
+ preferSafeBoundarySplit: enableSemanticBoundarySplits,
876
+ softMaxChunkTokens
877
+ });
878
+ for (const piece of pieces) {
879
+ const trimmed = trimLineRange(input.lines, piece.startRow, piece.endRow);
880
+ if (!trimmed) {
881
+ continue;
882
+ }
883
+ chunks.push({
884
+ start_line: trimmed.start + 1,
885
+ end_line: trimmed.end + 1,
886
+ snippet: input.lines.slice(trimmed.start, trimmed.end + 1).join("\n")
887
+ });
888
+ if (chunks.length >= input.config.max_chunks_per_file) {
889
+ break;
890
+ }
891
+ }
892
+ }
893
+ }
894
+ if (chunks.length === 0) {
895
+ const fallbackStart = Date.now();
896
+ const slidingChunks = buildSlidingChunks({
897
+ lines: input.lines,
898
+ tokenize: input.tokenize,
899
+ targetChunkTokens: input.config.target_chunk_tokens,
900
+ overlapTokens: input.config.chunk_overlap_tokens,
901
+ maxChunks: input.config.max_chunks_per_file,
902
+ lineTokenCounts
903
+ });
904
+ return {
905
+ chunks: slidingChunks,
906
+ strategy: "sliding",
907
+ fallback_reason: "empty_language_boundaries",
908
+ parse_latency_ms: parseLatencyMs,
909
+ language_aware_attempt_latency_ms: Date.now() - languageAwareAttemptStart,
910
+ fallback_path_latency_ms: Date.now() - fallbackStart,
911
+ language: parserLanguageToCanonical(input.parserLanguage)
912
+ };
913
+ }
914
+ return {
915
+ chunks,
916
+ strategy: "language_aware",
917
+ parse_latency_ms: parseLatencyMs,
918
+ language_aware_attempt_latency_ms: Date.now() - languageAwareAttemptStart,
919
+ language: parserLanguageToCanonical(input.parserLanguage),
920
+ recursive_semantic_chunking_used: recursiveSemanticChunkingUsed
921
+ };
922
+ }
923
+ catch {
924
+ const fallbackStart = Date.now();
925
+ const chunks = buildSlidingChunks({
926
+ lines: input.lines,
927
+ tokenize: input.tokenize,
928
+ targetChunkTokens: input.config.target_chunk_tokens,
929
+ overlapTokens: input.config.chunk_overlap_tokens,
930
+ maxChunks: input.config.max_chunks_per_file,
931
+ lineTokenCounts
932
+ });
933
+ return {
934
+ chunks,
935
+ strategy: "sliding",
936
+ fallback_reason: "parse_error",
937
+ language_aware_attempt_latency_ms: Date.now() - languageAwareAttemptStart,
938
+ fallback_path_latency_ms: Date.now() - fallbackStart,
939
+ language: parserLanguageToCanonical(input.parserLanguage)
940
+ };
941
+ }
942
+ }
943
+ export function buildChunksForFile(input) {
944
+ const lines = input.file.content.split("\n");
945
+ const language = resolveParserLanguage(input.file);
946
+ const enabledLanguageSet = new Set(input.config.enabled_languages.map((value) => value.trim().toLowerCase()));
947
+ if (input.config.strategy === "sliding") {
948
+ return {
949
+ chunks: buildSlidingChunks({
950
+ lines,
951
+ tokenize: input.tokenize,
952
+ targetChunkTokens: input.config.target_chunk_tokens,
953
+ overlapTokens: input.config.chunk_overlap_tokens,
954
+ maxChunks: input.config.max_chunks_per_file
955
+ }),
956
+ strategy: "sliding",
957
+ language: language ? parserLanguageToCanonical(language) : undefined
958
+ };
959
+ }
960
+ if (!language || !enabledLanguageSet.has(parserLanguageToCanonical(language))) {
961
+ const fallbackStart = Date.now();
962
+ const chunks = buildSlidingChunks({
963
+ lines,
964
+ tokenize: input.tokenize,
965
+ targetChunkTokens: input.config.target_chunk_tokens,
966
+ overlapTokens: input.config.chunk_overlap_tokens,
967
+ maxChunks: input.config.max_chunks_per_file
968
+ });
969
+ return {
970
+ chunks,
971
+ strategy: "sliding",
972
+ fallback_reason: "unsupported_language",
973
+ fallback_path_latency_ms: Date.now() - fallbackStart
974
+ };
975
+ }
976
+ return buildLanguageAwareChunks({
977
+ file: input.file,
978
+ lines,
979
+ parserLanguage: language,
980
+ config: input.config,
981
+ tokenize: input.tokenize
982
+ });
983
+ }