@intlayer/cli 5.5.10 → 5.5.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/cli.cjs +78 -6
- package/dist/cjs/cli.cjs.map +1 -1
- package/dist/cjs/cli.test.cjs +435 -0
- package/dist/cjs/cli.test.cjs.map +1 -0
- package/dist/cjs/fill.cjs +8 -12
- package/dist/cjs/fill.cjs.map +1 -1
- package/dist/cjs/index.cjs +5 -1
- package/dist/cjs/index.cjs.map +1 -1
- package/dist/cjs/prompts/REVIEW_PROMPT.md +37 -0
- package/dist/cjs/prompts/TRANSLATE_PROMPT.md +38 -0
- package/dist/cjs/pull.cjs +10 -2
- package/dist/cjs/pull.cjs.map +1 -1
- package/dist/cjs/pushConfig.cjs +5 -1
- package/dist/cjs/pushConfig.cjs.map +1 -1
- package/dist/cjs/reviewDoc.cjs +203 -0
- package/dist/cjs/reviewDoc.cjs.map +1 -0
- package/dist/cjs/translateDoc.cjs +201 -0
- package/dist/cjs/translateDoc.cjs.map +1 -0
- package/dist/cjs/utils/calculateChunks.cjs +120 -0
- package/dist/cjs/utils/calculateChunks.cjs.map +1 -0
- package/dist/cjs/utils/calculateChunks.test.cjs +104 -0
- package/dist/cjs/utils/calculateChunks.test.cjs.map +1 -0
- package/dist/cjs/utils/calculrateChunkTest.md +9 -0
- package/dist/cjs/utils/checkAIAccess.cjs +40 -0
- package/dist/cjs/utils/checkAIAccess.cjs.map +1 -0
- package/dist/cjs/utils/checkFileModifiedRange.cjs +97 -0
- package/dist/cjs/utils/checkFileModifiedRange.cjs.map +1 -0
- package/dist/cjs/utils/checkFileModifiedRange.test.cjs +175 -0
- package/dist/cjs/utils/checkFileModifiedRange.test.cjs.map +1 -0
- package/dist/cjs/utils/checkLastUpdateTime.cjs +33 -0
- package/dist/cjs/utils/checkLastUpdateTime.cjs.map +1 -0
- package/dist/cjs/utils/chunkInference.cjs +58 -0
- package/dist/cjs/utils/chunkInference.cjs.map +1 -0
- package/dist/cjs/utils/fixChunkStartEndChars.cjs +47 -0
- package/dist/cjs/utils/fixChunkStartEndChars.cjs.map +1 -0
- package/dist/cjs/utils/fixChunkStartEndChars.test.cjs +81 -0
- package/dist/cjs/utils/fixChunkStartEndChars.test.cjs.map +1 -0
- package/dist/cjs/utils/formatTimeDiff.cjs +46 -0
- package/dist/cjs/utils/formatTimeDiff.cjs.map +1 -0
- package/dist/cjs/utils/formatTimeDiff.test.cjs +32 -0
- package/dist/cjs/utils/formatTimeDiff.test.cjs.map +1 -0
- package/dist/cjs/utils/getChunk.cjs +77 -0
- package/dist/cjs/utils/getChunk.cjs.map +1 -0
- package/dist/cjs/utils/getChunk.test.cjs +46 -0
- package/dist/cjs/utils/getChunk.test.cjs.map +1 -0
- package/dist/cjs/utils/getIsFileUpdatedRecently.cjs +36 -0
- package/dist/cjs/utils/getIsFileUpdatedRecently.cjs.map +1 -0
- package/dist/cjs/utils/getOutputFilePath.cjs +89 -0
- package/dist/cjs/utils/getOutputFilePath.cjs.map +1 -0
- package/dist/cjs/utils/getOutputFilePath.test.cjs +73 -0
- package/dist/cjs/utils/getOutputFilePath.test.cjs.map +1 -0
- package/dist/cjs/utils/getParentPackageJSON.cjs +47 -0
- package/dist/cjs/utils/getParentPackageJSON.cjs.map +1 -0
- package/dist/cjs/utils/listSpecialChars.cjs +78 -0
- package/dist/cjs/utils/listSpecialChars.cjs.map +1 -0
- package/dist/cjs/utils/listSpecialChars.test.cjs +58 -0
- package/dist/cjs/utils/listSpecialChars.test.cjs.map +1 -0
- package/dist/cjs/utils/reorderParagraphs.cjs +125 -0
- package/dist/cjs/utils/reorderParagraphs.cjs.map +1 -0
- package/dist/cjs/utils/reorderParagraphs.test.cjs +71 -0
- package/dist/cjs/utils/reorderParagraphs.test.cjs.map +1 -0
- package/dist/cjs/utils/splitTextByLine.cjs +35 -0
- package/dist/cjs/utils/splitTextByLine.cjs.map +1 -0
- package/dist/cjs/utils/splitTextByLine.test.cjs +14 -0
- package/dist/cjs/utils/splitTextByLine.test.cjs.map +1 -0
- package/dist/esm/cli.mjs +79 -7
- package/dist/esm/cli.mjs.map +1 -1
- package/dist/esm/cli.test.mjs +412 -0
- package/dist/esm/cli.test.mjs.map +1 -0
- package/dist/esm/fill.mjs +8 -12
- package/dist/esm/fill.mjs.map +1 -1
- package/dist/esm/index.mjs +2 -0
- package/dist/esm/index.mjs.map +1 -1
- package/dist/esm/prompts/REVIEW_PROMPT.md +37 -0
- package/dist/esm/prompts/TRANSLATE_PROMPT.md +38 -0
- package/dist/esm/pull.mjs +10 -2
- package/dist/esm/pull.mjs.map +1 -1
- package/dist/esm/pushConfig.mjs +5 -1
- package/dist/esm/pushConfig.mjs.map +1 -1
- package/dist/esm/reviewDoc.mjs +172 -0
- package/dist/esm/reviewDoc.mjs.map +1 -0
- package/dist/esm/translateDoc.mjs +170 -0
- package/dist/esm/translateDoc.mjs.map +1 -0
- package/dist/esm/utils/calculateChunks.mjs +96 -0
- package/dist/esm/utils/calculateChunks.mjs.map +1 -0
- package/dist/esm/utils/calculateChunks.test.mjs +103 -0
- package/dist/esm/utils/calculateChunks.test.mjs.map +1 -0
- package/dist/esm/utils/calculrateChunkTest.md +9 -0
- package/dist/esm/utils/checkAIAccess.mjs +16 -0
- package/dist/esm/utils/checkAIAccess.mjs.map +1 -0
- package/dist/esm/utils/checkFileModifiedRange.mjs +73 -0
- package/dist/esm/utils/checkFileModifiedRange.mjs.map +1 -0
- package/dist/esm/utils/checkFileModifiedRange.test.mjs +181 -0
- package/dist/esm/utils/checkFileModifiedRange.test.mjs.map +1 -0
- package/dist/esm/utils/checkLastUpdateTime.mjs +9 -0
- package/dist/esm/utils/checkLastUpdateTime.mjs.map +1 -0
- package/dist/esm/utils/chunkInference.mjs +34 -0
- package/dist/esm/utils/chunkInference.mjs.map +1 -0
- package/dist/esm/utils/fixChunkStartEndChars.mjs +23 -0
- package/dist/esm/utils/fixChunkStartEndChars.mjs.map +1 -0
- package/dist/esm/utils/fixChunkStartEndChars.test.mjs +80 -0
- package/dist/esm/utils/fixChunkStartEndChars.test.mjs.map +1 -0
- package/dist/esm/utils/formatTimeDiff.mjs +22 -0
- package/dist/esm/utils/formatTimeDiff.mjs.map +1 -0
- package/dist/esm/utils/formatTimeDiff.test.mjs +31 -0
- package/dist/esm/utils/formatTimeDiff.test.mjs.map +1 -0
- package/dist/esm/utils/getChunk.mjs +53 -0
- package/dist/esm/utils/getChunk.mjs.map +1 -0
- package/dist/esm/utils/getChunk.test.mjs +45 -0
- package/dist/esm/utils/getChunk.test.mjs.map +1 -0
- package/dist/esm/utils/getIsFileUpdatedRecently.mjs +12 -0
- package/dist/esm/utils/getIsFileUpdatedRecently.mjs.map +1 -0
- package/dist/esm/utils/getOutputFilePath.mjs +65 -0
- package/dist/esm/utils/getOutputFilePath.mjs.map +1 -0
- package/dist/esm/utils/getOutputFilePath.test.mjs +72 -0
- package/dist/esm/utils/getOutputFilePath.test.mjs.map +1 -0
- package/dist/esm/utils/getParentPackageJSON.mjs +23 -0
- package/dist/esm/utils/getParentPackageJSON.mjs.map +1 -0
- package/dist/esm/utils/listSpecialChars.mjs +54 -0
- package/dist/esm/utils/listSpecialChars.mjs.map +1 -0
- package/dist/esm/utils/listSpecialChars.test.mjs +57 -0
- package/dist/esm/utils/listSpecialChars.test.mjs.map +1 -0
- package/dist/esm/utils/reorderParagraphs.mjs +101 -0
- package/dist/esm/utils/reorderParagraphs.mjs.map +1 -0
- package/dist/esm/utils/reorderParagraphs.test.mjs +70 -0
- package/dist/esm/utils/reorderParagraphs.test.mjs.map +1 -0
- package/dist/esm/utils/splitTextByLine.mjs +11 -0
- package/dist/esm/utils/splitTextByLine.mjs.map +1 -0
- package/dist/esm/utils/splitTextByLine.test.mjs +13 -0
- package/dist/esm/utils/splitTextByLine.test.mjs.map +1 -0
- package/dist/types/cli.d.ts.map +1 -1
- package/dist/types/cli.test.d.ts +2 -0
- package/dist/types/cli.test.d.ts.map +1 -0
- package/dist/types/fill.d.ts.map +1 -1
- package/dist/types/index.d.ts +2 -0
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/pull.d.ts.map +1 -1
- package/dist/types/pushConfig.d.ts.map +1 -1
- package/dist/types/reviewDoc.d.ts +25 -0
- package/dist/types/reviewDoc.d.ts.map +1 -0
- package/dist/types/translateDoc.d.ts +25 -0
- package/dist/types/translateDoc.d.ts.map +1 -0
- package/dist/types/utils/calculateChunks.d.ts +9 -0
- package/dist/types/utils/calculateChunks.d.ts.map +1 -0
- package/dist/types/utils/calculateChunks.test.d.ts +2 -0
- package/dist/types/utils/calculateChunks.test.d.ts.map +1 -0
- package/dist/types/utils/checkAIAccess.d.ts +4 -0
- package/dist/types/utils/checkAIAccess.d.ts.map +1 -0
- package/dist/types/utils/checkFileModifiedRange.d.ts +11 -0
- package/dist/types/utils/checkFileModifiedRange.d.ts.map +1 -0
- package/dist/types/utils/checkFileModifiedRange.test.d.ts +2 -0
- package/dist/types/utils/checkFileModifiedRange.test.d.ts.map +1 -0
- package/dist/types/utils/checkLastUpdateTime.d.ts +9 -0
- package/dist/types/utils/checkLastUpdateTime.d.ts.map +1 -0
- package/dist/types/utils/chunkInference.d.ts +12 -0
- package/dist/types/utils/chunkInference.d.ts.map +1 -0
- package/dist/types/utils/fixChunkStartEndChars.d.ts +2 -0
- package/dist/types/utils/fixChunkStartEndChars.d.ts.map +1 -0
- package/dist/types/utils/fixChunkStartEndChars.test.d.ts +2 -0
- package/dist/types/utils/fixChunkStartEndChars.test.d.ts.map +1 -0
- package/dist/types/utils/formatTimeDiff.d.ts +2 -0
- package/dist/types/utils/formatTimeDiff.d.ts.map +1 -0
- package/dist/types/utils/formatTimeDiff.test.d.ts +2 -0
- package/dist/types/utils/formatTimeDiff.test.d.ts.map +1 -0
- package/dist/types/utils/getChunk.d.ts +9 -0
- package/dist/types/utils/getChunk.d.ts.map +1 -0
- package/dist/types/utils/getChunk.test.d.ts +2 -0
- package/dist/types/utils/getChunk.test.d.ts.map +1 -0
- package/dist/types/utils/getIsFileUpdatedRecently.d.ts +5 -0
- package/dist/types/utils/getIsFileUpdatedRecently.d.ts.map +1 -0
- package/dist/types/utils/getOutputFilePath.d.ts +26 -0
- package/dist/types/utils/getOutputFilePath.d.ts.map +1 -0
- package/dist/types/utils/getOutputFilePath.test.d.ts +2 -0
- package/dist/types/utils/getOutputFilePath.test.d.ts.map +1 -0
- package/dist/types/utils/getParentPackageJSON.d.ts +32 -0
- package/dist/types/utils/getParentPackageJSON.d.ts.map +1 -0
- package/dist/types/utils/listSpecialChars.d.ts +10 -0
- package/dist/types/utils/listSpecialChars.d.ts.map +1 -0
- package/dist/types/utils/listSpecialChars.test.d.ts +2 -0
- package/dist/types/utils/listSpecialChars.test.d.ts.map +1 -0
- package/dist/types/utils/reorderParagraphs.d.ts +8 -0
- package/dist/types/utils/reorderParagraphs.d.ts.map +1 -0
- package/dist/types/utils/reorderParagraphs.test.d.ts +2 -0
- package/dist/types/utils/reorderParagraphs.test.d.ts.map +1 -0
- package/dist/types/utils/splitTextByLine.d.ts +2 -0
- package/dist/types/utils/splitTextByLine.d.ts.map +1 -0
- package/dist/types/utils/splitTextByLine.test.d.ts +2 -0
- package/dist/types/utils/splitTextByLine.test.d.ts.map +1 -0
- package/package.json +16 -14
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
import { splitTextByLines } from "./splitTextByLine.mjs";
|
|
2
|
+
const DEFAULT_MAX_CHARS_PER_CHUNK = 800;
|
|
3
|
+
const DEFAULT_OVERLAP_CHARS = 0;
|
|
4
|
+
const chunkText = (text, maxCharsPerChunk = DEFAULT_MAX_CHARS_PER_CHUNK, overlapChars = DEFAULT_OVERLAP_CHARS) => {
|
|
5
|
+
if (maxCharsPerChunk <= 0) {
|
|
6
|
+
throw new Error("maxCharsPerChunk must be greater than 0");
|
|
7
|
+
}
|
|
8
|
+
const splittedText = splitTextByLines(text);
|
|
9
|
+
const lines = [];
|
|
10
|
+
let charStartAcc = 0;
|
|
11
|
+
splittedText.forEach((line, index) => {
|
|
12
|
+
lines.push({
|
|
13
|
+
content: line,
|
|
14
|
+
lineStart: index,
|
|
15
|
+
lineLength: 1,
|
|
16
|
+
charStart: charStartAcc,
|
|
17
|
+
charLength: line.length
|
|
18
|
+
});
|
|
19
|
+
charStartAcc += line.length;
|
|
20
|
+
});
|
|
21
|
+
const groupedLines = lines.reduce(
|
|
22
|
+
(acc, line) => {
|
|
23
|
+
if (line.content.length > maxCharsPerChunk) {
|
|
24
|
+
acc.push(line);
|
|
25
|
+
return acc;
|
|
26
|
+
}
|
|
27
|
+
if (acc.length === 0) {
|
|
28
|
+
acc.push(line);
|
|
29
|
+
return acc;
|
|
30
|
+
}
|
|
31
|
+
const lastChunk = acc[acc.length - 1];
|
|
32
|
+
const combinedLength = lastChunk.content.length + line.content.length;
|
|
33
|
+
if (combinedLength > maxCharsPerChunk) {
|
|
34
|
+
acc.push(line);
|
|
35
|
+
return acc;
|
|
36
|
+
}
|
|
37
|
+
const combinedContent = lastChunk.content + line.content;
|
|
38
|
+
const updatedChunk = {
|
|
39
|
+
content: combinedContent,
|
|
40
|
+
lineStart: lastChunk.lineStart,
|
|
41
|
+
lineLength: lastChunk.lineLength + line.lineLength,
|
|
42
|
+
charStart: lastChunk.charStart,
|
|
43
|
+
charLength: combinedContent.length
|
|
44
|
+
};
|
|
45
|
+
acc[acc.length - 1] = updatedChunk;
|
|
46
|
+
return acc;
|
|
47
|
+
},
|
|
48
|
+
[]
|
|
49
|
+
);
|
|
50
|
+
const splittedLines = groupedLines.flatMap((line) => {
|
|
51
|
+
const chunk = [];
|
|
52
|
+
if (line.content.length <= maxCharsPerChunk) {
|
|
53
|
+
chunk.push(line);
|
|
54
|
+
return chunk;
|
|
55
|
+
}
|
|
56
|
+
for (let i = 0; i < line.content.length; i += maxCharsPerChunk) {
|
|
57
|
+
const slicedContent = line.content.slice(i, i + maxCharsPerChunk);
|
|
58
|
+
chunk.push({
|
|
59
|
+
content: slicedContent,
|
|
60
|
+
lineStart: line.lineStart,
|
|
61
|
+
lineLength: 1,
|
|
62
|
+
charStart: line.charStart + i,
|
|
63
|
+
charLength: slicedContent.length
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
return chunk;
|
|
67
|
+
});
|
|
68
|
+
if (overlapChars === 0) return splittedLines;
|
|
69
|
+
const overlapChunks = splittedLines.length > 0 ? [splittedLines[0]] : [];
|
|
70
|
+
for (let i = 1; i < splittedLines.length; i++) {
|
|
71
|
+
const previousChunk = splittedLines[i - 1];
|
|
72
|
+
const chunk = splittedLines[i];
|
|
73
|
+
const overlapContent = previousChunk.content.slice(-overlapChars);
|
|
74
|
+
const overlapLineNb = splitTextByLines(overlapContent).length;
|
|
75
|
+
const overlapContentWithoutPartialLine = overlapContent.slice(
|
|
76
|
+
overlapLineNb > 1 ? overlapContent.indexOf("\n") + 1 : 0,
|
|
77
|
+
overlapContent.length
|
|
78
|
+
);
|
|
79
|
+
const newContent = overlapContentWithoutPartialLine + chunk.content;
|
|
80
|
+
const newLineLength = splitTextByLines(newContent).length;
|
|
81
|
+
const lineDiff = chunk.lineLength - newLineLength;
|
|
82
|
+
const overlappedChunk = {
|
|
83
|
+
content: newContent,
|
|
84
|
+
lineStart: chunk.lineStart + lineDiff,
|
|
85
|
+
lineLength: chunk.lineLength - lineDiff,
|
|
86
|
+
charStart: chunk.charStart - overlapContentWithoutPartialLine.length,
|
|
87
|
+
charLength: newContent.length
|
|
88
|
+
};
|
|
89
|
+
overlapChunks.push(overlappedChunk);
|
|
90
|
+
}
|
|
91
|
+
return overlapChunks;
|
|
92
|
+
};
|
|
93
|
+
export {
|
|
94
|
+
chunkText
|
|
95
|
+
};
|
|
96
|
+
//# sourceMappingURL=calculateChunks.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/calculateChunks.ts"],"sourcesContent":["import { splitTextByLines } from './splitTextByLine';\n\nexport type ChunkLineResult = {\n lineStart: number;\n lineLength: number;\n charStart: number;\n charLength: number;\n content: string;\n};\n\nconst DEFAULT_MAX_CHARS_PER_CHUNK = 800;\nconst DEFAULT_OVERLAP_CHARS = 0;\n\nexport const chunkText = (\n text: string,\n maxCharsPerChunk: number = DEFAULT_MAX_CHARS_PER_CHUNK,\n overlapChars: number = DEFAULT_OVERLAP_CHARS\n): ChunkLineResult[] => {\n if (maxCharsPerChunk <= 0) {\n throw new Error('maxCharsPerChunk must be greater than 0');\n }\n\n const splittedText = splitTextByLines(text);\n\n // Split text into lines to faciliate the translation\n const lines: ChunkLineResult[] = [];\n let charStartAcc = 0;\n\n splittedText.forEach((line, index) => {\n lines.push({\n content: line,\n lineStart: index,\n lineLength: 1,\n charStart: charStartAcc,\n charLength: line.length,\n });\n charStartAcc += line.length;\n });\n\n // Group lines\n // as long as the chunk length is less than maxCharsPerChunk\n // if a line longer than maxCharsPerChunk, keep it alone\n // if a line is not longer than maxCharsPerChunk, it is grouped\n const groupedLines: ChunkLineResult[] = lines.reduce(\n (acc: ChunkLineResult[], line) => {\n // If this line alone exceeds maxCharsPerChunk, keep it separate\n if (line.content.length > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // If we have no chunks yet, start with this line\n if (acc.length === 0) {\n acc.push(line);\n return acc;\n }\n\n // Get the last chunk\n const lastChunk = acc[acc.length - 1];\n\n // Calculate what the combined length would be (including newline character)\n const combinedLength = lastChunk.content.length + line.content.length;\n\n // If combining would exceed the limit, start a new chunk\n if (combinedLength > maxCharsPerChunk) {\n acc.push(line);\n return acc;\n }\n\n // Otherwise, combine with the last chunk\n const combinedContent = lastChunk.content + line.content;\n const updatedChunk = {\n content: combinedContent,\n lineStart: lastChunk.lineStart,\n lineLength: lastChunk.lineLength + line.lineLength,\n charStart: lastChunk.charStart,\n charLength: combinedContent.length,\n };\n\n acc[acc.length - 1] = updatedChunk;\n return acc;\n },\n []\n );\n\n // If one line is longer than maxCharsPerChunk, split it into multiple chunks\n const splittedLines: ChunkLineResult[] = groupedLines.flatMap((line) => {\n const chunk: ChunkLineResult[] = [];\n\n if (line.content.length <= maxCharsPerChunk) {\n chunk.push(line);\n return chunk;\n }\n\n for (let i = 0; i < line.content.length; i += maxCharsPerChunk) {\n const slicedContent = line.content.slice(i, i + maxCharsPerChunk);\n chunk.push({\n content: slicedContent,\n lineStart: line.lineStart,\n lineLength: 1,\n charStart: line.charStart + i,\n charLength: slicedContent.length,\n });\n }\n return chunk;\n });\n\n if (overlapChars === 0) return splittedLines;\n\n const overlapChunks: ChunkLineResult[] =\n splittedLines.length > 0 ? [splittedLines[0]] : [];\n\n for (let i = 1; i < splittedLines.length; i++) {\n const previousChunk = splittedLines[i - 1];\n const chunk = splittedLines[i];\n\n const overlapContent = previousChunk.content.slice(-overlapChars);\n const overlapLineNb = splitTextByLines(overlapContent).length;\n\n const overlapContentWithoutPartialLine = overlapContent.slice(\n overlapLineNb > 1 ? overlapContent.indexOf('\\n') + 1 : 0,\n overlapContent.length\n );\n\n const newContent = overlapContentWithoutPartialLine + chunk.content;\n const newLineLength = splitTextByLines(newContent).length;\n const lineDiff = chunk.lineLength - newLineLength;\n\n const overlappedChunk = {\n content: newContent,\n lineStart: chunk.lineStart + lineDiff,\n lineLength: chunk.lineLength - lineDiff,\n charStart: chunk.charStart - overlapContentWithoutPartialLine.length,\n charLength: newContent.length,\n };\n\n overlapChunks.push(overlappedChunk);\n }\n\n return overlapChunks;\n};\n"],"mappings":"AAAA,SAAS,wBAAwB;AAUjC,MAAM,8BAA8B;AACpC,MAAM,wBAAwB;AAEvB,MAAM,YAAY,CACvB,MACA,mBAA2B,6BAC3B,eAAuB,0BACD;AACtB,MAAI,oBAAoB,GAAG;AACzB,UAAM,IAAI,MAAM,yCAAyC;AAAA,EAC3D;AAEA,QAAM,eAAe,iBAAiB,IAAI;AAG1C,QAAM,QAA2B,CAAC;AAClC,MAAI,eAAe;AAEnB,eAAa,QAAQ,CAAC,MAAM,UAAU;AACpC,UAAM,KAAK;AAAA,MACT,SAAS;AAAA,MACT,WAAW;AAAA,MACX,YAAY;AAAA,MACZ,WAAW;AAAA,MACX,YAAY,KAAK;AAAA,IACnB,CAAC;AACD,oBAAgB,KAAK;AAAA,EACvB,CAAC;AAMD,QAAM,eAAkC,MAAM;AAAA,IAC5C,CAAC,KAAwB,SAAS;AAEhC,UAAI,KAAK,QAAQ,SAAS,kBAAkB;AAC1C,YAAI,KAAK,IAAI;AACb,eAAO;AAAA,MACT;AAGA,UAAI,IAAI,WAAW,GAAG;AACpB,YAAI,KAAK,IAAI;AACb,eAAO;AAAA,MACT;AAGA,YAAM,YAAY,IAAI,IAAI,SAAS,CAAC;AAGpC,YAAM,iBAAiB,UAAU,QAAQ,SAAS,KAAK,QAAQ;AAG/D,UAAI,iBAAiB,kBAAkB;AACrC,YAAI,KAAK,IAAI;AACb,eAAO;AAAA,MACT;AAGA,YAAM,kBAAkB,UAAU,UAAU,KAAK;AACjD,YAAM,eAAe;AAAA,QACnB,SAAS;AAAA,QACT,WAAW,UAAU;AAAA,QACrB,YAAY,UAAU,aAAa,KAAK;AAAA,QACxC,WAAW,UAAU;AAAA,QACrB,YAAY,gBAAgB;AAAA,MAC9B;AAEA,UAAI,IAAI,SAAS,CAAC,IAAI;AACtB,aAAO;AAAA,IACT;AAAA,IACA,CAAC;AAAA,EACH;AAGA,QAAM,gBAAmC,aAAa,QAAQ,CAAC,SAAS;AACtE,UAAM,QAA2B,CAAC;AAElC,QAAI,KAAK,QAAQ,UAAU,kBAAkB;AAC3C,YAAM,KAAK,IAAI;AACf,aAAO;AAAA,IACT;AAEA,aAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,QAAQ,KAAK,kBAAkB;AAC9D,YAAM,gBAAgB,KAAK,QAAQ,MAAM,GAAG,IAAI,gBAAgB;AAChE,YAAM,KAAK;AAAA,QACT,SAAS;AAAA,QACT,WAAW,KAAK;AAAA,QAChB,YAAY;AAAA,QACZ,WAAW,KAAK,YAAY;AAAA,QAC5B,YAAY,cAAc;AAAA,MAC5B,CAAC;AAAA,IACH;AACA,WAAO;AAAA,EACT,CAAC;AAED,MAAI,iBAAiB,EAAG,QAAO;AAE/B,QAAM,gBACJ,cAAc,SAAS,IAAI,CAAC,cAAc,CAAC,CAAC,IAAI,CAAC;AAEnD,WAAS,IAAI,GAAG,IAAI,cAAc,QAAQ,KAAK;AAC7C,UAAM,gBAAgB,cAAc,IAAI,CAAC;AACzC,UAAM,QAAQ,cAAc,CAAC;AAE7B,UAAM,iBAAiB,cAAc,QAAQ,MAAM,CAAC,YAAY;AAChE,UAAM,gBAAgB,iBAAiB,cAAc,EAAE;AAEvD,UAAM,mCAAmC,eAAe;AAAA,MACtD,gBAAgB,IAAI,eAAe,QAAQ,IAAI,IAAI,IAAI;AAAA,MACvD,eAAe;AAAA,IACjB;AAEA,UAAM,aAAa,mCAAmC,MAAM;AAC5D,UAAM,gBAAgB,iBAAiB,UAAU,EAAE;AACnD,UAAM,WAAW,MAAM,aAAa;AAEpC,UAAM,kBAAkB;AAAA,MACtB,SAAS;AAAA,MACT,WAAW,MAAM,YAAY;AAAA,MAC7B,YAAY,MAAM,aAAa;AAAA,MAC/B,WAAW,MAAM,YAAY,iCAAiC;AAAA,MAC9D,YAAY,WAAW;AAAA,IACzB;AAEA,kBAAc,KAAK,eAAe;AAAA,EACpC;AAEA,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
import { readFileSync } from "fs";
|
|
2
|
+
import { join } from "path";
|
|
3
|
+
import { describe, expect, it } from "vitest";
|
|
4
|
+
import { chunkText } from "./calculateChunks.mjs";
|
|
5
|
+
import { getChunk } from "./getChunk.mjs";
|
|
6
|
+
const sampleText = [
|
|
7
|
+
"Line 0: The quick brown fox jumps over the lazy dog.",
|
|
8
|
+
"Line 1: Pack my box with five dozen liquor jugs.",
|
|
9
|
+
"Line 2: How razorback-jumping frogs can level six piqued gymnasts!",
|
|
10
|
+
"Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 51. 52. 53. 54. 55. 56. 57. 58. 59. 60. ",
|
|
11
|
+
"Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent."
|
|
12
|
+
].join("\n");
|
|
13
|
+
describe("calculateChunks", () => {
|
|
14
|
+
it.skip("creates chunks with custom parameters", () => {
|
|
15
|
+
const chunks = chunkText(sampleText, 200);
|
|
16
|
+
expect(chunks).toStrictEqual([
|
|
17
|
+
{
|
|
18
|
+
content: "Line 0: The quick brown fox jumps over the lazy dog.\nLine 1: Pack my box with five dozen liquor jugs.\nLine 2: How razorback-jumping frogs can level six piqued gymnasts!",
|
|
19
|
+
lineStart: 0,
|
|
20
|
+
lineEnd: 2,
|
|
21
|
+
charStart: 0,
|
|
22
|
+
charEnd: 167
|
|
23
|
+
},
|
|
24
|
+
{
|
|
25
|
+
content: "Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 5",
|
|
26
|
+
lineStart: 3,
|
|
27
|
+
lineEnd: 3,
|
|
28
|
+
charStart: 0,
|
|
29
|
+
charEnd: 199
|
|
30
|
+
},
|
|
31
|
+
{
|
|
32
|
+
content: "1. 52. 53. 54. 55. 56. 57. 58. 59. 60. ",
|
|
33
|
+
lineStart: 3,
|
|
34
|
+
lineEnd: 3,
|
|
35
|
+
charStart: 200,
|
|
36
|
+
charEnd: 238
|
|
37
|
+
},
|
|
38
|
+
{
|
|
39
|
+
content: "Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.",
|
|
40
|
+
lineStart: 4,
|
|
41
|
+
lineEnd: 4,
|
|
42
|
+
charStart: 0,
|
|
43
|
+
charEnd: 77
|
|
44
|
+
}
|
|
45
|
+
]);
|
|
46
|
+
});
|
|
47
|
+
it("creates chunks with overlap", () => {
|
|
48
|
+
const chunks = chunkText(sampleText, 200, 100);
|
|
49
|
+
expect(chunks).toStrictEqual([
|
|
50
|
+
{
|
|
51
|
+
content: "Line 0: The quick brown fox jumps over the lazy dog.\nLine 1: Pack my box with five dozen liquor jugs.\nLine 2: How razorback-jumping frogs can level six piqued gymnasts!\n",
|
|
52
|
+
lineStart: 0,
|
|
53
|
+
lineLength: 3,
|
|
54
|
+
charStart: 0,
|
|
55
|
+
charLength: 169
|
|
56
|
+
},
|
|
57
|
+
{
|
|
58
|
+
content: "Line 2: How razorback-jumping frogs can level six piqued gymnasts!\nLine 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 5",
|
|
59
|
+
lineStart: 2,
|
|
60
|
+
lineLength: 2,
|
|
61
|
+
charStart: 102,
|
|
62
|
+
charLength: 267
|
|
63
|
+
},
|
|
64
|
+
{
|
|
65
|
+
content: "6. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 51. 52. 53. 54. 55. 56. 57. 58. 59. 60. \n",
|
|
66
|
+
lineStart: 3,
|
|
67
|
+
lineLength: 1,
|
|
68
|
+
charStart: 269,
|
|
69
|
+
charLength: 140
|
|
70
|
+
},
|
|
71
|
+
{
|
|
72
|
+
content: "1. 52. 53. 54. 55. 56. 57. 58. 59. 60. \nLine 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.",
|
|
73
|
+
lineStart: 3,
|
|
74
|
+
lineLength: 2,
|
|
75
|
+
charStart: 369,
|
|
76
|
+
charLength: 118
|
|
77
|
+
}
|
|
78
|
+
]);
|
|
79
|
+
});
|
|
80
|
+
it("Line and char stats correspond", () => {
|
|
81
|
+
const chunks = chunkText(sampleText, 200, 100);
|
|
82
|
+
const firstChunk = chunks[0];
|
|
83
|
+
const thirdChunk = chunks[2];
|
|
84
|
+
const retrievedFirstChunk = getChunk(sampleText, firstChunk);
|
|
85
|
+
const retrievedThirdChunk = getChunk(sampleText, thirdChunk);
|
|
86
|
+
expect(retrievedFirstChunk).toBe(firstChunk.content);
|
|
87
|
+
expect(retrievedThirdChunk).toBe(thirdChunk.content);
|
|
88
|
+
});
|
|
89
|
+
it("Validates chunking with real file content", () => {
|
|
90
|
+
const fileContent = readFileSync(
|
|
91
|
+
join(__dirname, "./calculrateChunkTest.md"),
|
|
92
|
+
"utf-8"
|
|
93
|
+
);
|
|
94
|
+
const chunks = chunkText(fileContent, 200, 100);
|
|
95
|
+
const firstChunk = chunks[8];
|
|
96
|
+
const thirdChunk = chunks[25];
|
|
97
|
+
const retrievedFirstChunk = getChunk(fileContent, firstChunk);
|
|
98
|
+
const retrievedThirdChunk = getChunk(fileContent, thirdChunk);
|
|
99
|
+
expect(retrievedFirstChunk).toBe(firstChunk.content);
|
|
100
|
+
expect(retrievedThirdChunk).toBe(thirdChunk.content);
|
|
101
|
+
});
|
|
102
|
+
});
|
|
103
|
+
//# sourceMappingURL=calculateChunks.test.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/calculateChunks.test.ts"],"sourcesContent":["import { readFileSync } from 'fs';\nimport { join } from 'path';\nimport { describe, expect, it } from 'vitest';\nimport { chunkText } from './calculateChunks';\nimport { getChunk } from './getChunk';\n\n// Sample multiline string reused across test cases\nconst sampleText = [\n 'Line 0: The quick brown fox jumps over the lazy dog.',\n 'Line 1: Pack my box with five dozen liquor jugs.',\n 'Line 2: How razorback-jumping frogs can level six piqued gymnasts!',\n 'Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 51. 52. 53. 54. 55. 56. 57. 58. 59. 60. ',\n 'Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.',\n].join('\\n');\n\ndescribe('calculateChunks', () => {\n it.skip('creates chunks with custom parameters', () => {\n const chunks = chunkText(sampleText, 200);\n\n expect(chunks).toStrictEqual([\n {\n content:\n 'Line 0: The quick brown fox jumps over the lazy dog.\\n' +\n 'Line 1: Pack my box with five dozen liquor jugs.\\n' +\n 'Line 2: How razorback-jumping frogs can level six piqued gymnasts!',\n lineStart: 0,\n lineEnd: 2,\n charStart: 0,\n charEnd: 167,\n },\n {\n content:\n 'Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 5',\n lineStart: 3,\n lineEnd: 3,\n charStart: 0,\n charEnd: 199,\n },\n {\n content: '1. 52. 53. 54. 55. 56. 57. 58. 59. 60. ',\n lineStart: 3,\n lineEnd: 3,\n charStart: 200,\n charEnd: 238,\n },\n {\n content:\n 'Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.',\n lineStart: 4,\n lineEnd: 4,\n charStart: 0,\n charEnd: 77,\n },\n ]);\n });\n\n it('creates chunks with overlap', () => {\n const chunks = chunkText(sampleText, 200, 100);\n\n expect(chunks).toStrictEqual([\n {\n content:\n 'Line 0: The quick brown fox jumps over the lazy dog.\\n' +\n 'Line 1: Pack my box with five dozen liquor jugs.\\n' +\n 'Line 2: How razorback-jumping frogs can level six piqued gymnasts!\\n',\n lineStart: 0,\n lineLength: 3,\n charStart: 0,\n charLength: 169,\n },\n {\n content:\n 'Line 2: How razorback-jumping frogs can level six piqued gymnasts!\\n' +\n 'Line 3: 1. 2. 3. 4. 5. 6. 7. 8. 9. 10. 11. 12. 13. 14. 15. 16. 17. 18. 19. 20. 21. 22. 23. 24. 25. 26. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 5',\n lineStart: 2,\n lineLength: 2,\n charStart: 102,\n charLength: 267,\n },\n {\n content:\n '6. 27. 28. 29. 30. 31. 32. 33. 34. 35. 36. 37. 38. 39. 40. 41. 42. 43. 44. 45. 46. 47. 48. 49. 50. 51. 52. 53. 54. 55. 56. 57. 58. 59. 60. \\n',\n lineStart: 3,\n lineLength: 1,\n charStart: 269,\n charLength: 140,\n },\n {\n content:\n '1. 52. 53. 54. 55. 56. 57. 58. 59. 60. \\n' +\n 'Line 4: A mad boxer shot a quick, gloved jab to the jaw of his dizzy opponent.',\n lineStart: 3,\n lineLength: 2,\n charStart: 369,\n charLength: 118,\n },\n ]);\n });\n\n it('Line and char stats correspond', () => {\n const chunks = chunkText(sampleText, 200, 100);\n\n const firstChunk = chunks[0];\n const thirdChunk = chunks[2];\n\n const retrievedFirstChunk = getChunk(sampleText, firstChunk);\n const retrievedThirdChunk = getChunk(sampleText, thirdChunk);\n\n expect(retrievedFirstChunk).toBe(firstChunk.content);\n expect(retrievedThirdChunk).toBe(thirdChunk.content);\n });\n\n it('Validates chunking with real file content', () => {\n const fileContent = readFileSync(\n join(__dirname, './calculrateChunkTest.md'),\n 'utf-8'\n );\n\n const chunks = chunkText(fileContent, 200, 100);\n\n const firstChunk = chunks[8];\n const thirdChunk = chunks[25];\n\n const retrievedFirstChunk = getChunk(fileContent, firstChunk);\n const retrievedThirdChunk = getChunk(fileContent, thirdChunk);\n\n expect(retrievedFirstChunk).toBe(firstChunk.content);\n expect(retrievedThirdChunk).toBe(thirdChunk.content);\n });\n});\n"],"mappings":"AAAA,SAAS,oBAAoB;AAC7B,SAAS,YAAY;AACrB,SAAS,UAAU,QAAQ,UAAU;AACrC,SAAS,iBAAiB;AAC1B,SAAS,gBAAgB;AAGzB,MAAM,aAAa;AAAA,EACjB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,EAAE,KAAK,IAAI;AAEX,SAAS,mBAAmB,MAAM;AAChC,KAAG,KAAK,yCAAyC,MAAM;AACrD,UAAM,SAAS,UAAU,YAAY,GAAG;AAExC,WAAO,MAAM,EAAE,cAAc;AAAA,MAC3B;AAAA,QACE,SACE;AAAA,QAGF,WAAW;AAAA,QACX,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,MACX;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,WAAW;AAAA,QACX,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,MACX;AAAA,MACA;AAAA,QACE,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,MACX;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,WAAW;AAAA,QACX,SAAS;AAAA,QACT,WAAW;AAAA,QACX,SAAS;AAAA,MACX;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAED,KAAG,+BAA+B,MAAM;AACtC,UAAM,SAAS,UAAU,YAAY,KAAK,GAAG;AAE7C,WAAO,MAAM,EAAE,cAAc;AAAA,MAC3B;AAAA,QACE,SACE;AAAA,QAGF,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA;AAAA,QACE,SACE;AAAA,QAEF,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA;AAAA,QACE,SACE;AAAA,QACF,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,MACA;AAAA,QACE,SACE;AAAA,QAEF,WAAW;AAAA,QACX,YAAY;AAAA,QACZ,WAAW;AAAA,QACX,YAAY;AAAA,MACd;AAAA,IACF,CAAC;AAAA,EACH,CAAC;AAED,KAAG,kCAAkC,MAAM;AACzC,UAAM,SAAS,UAAU,YAAY,KAAK,GAAG;AAE7C,UAAM,aAAa,OAAO,CAAC;AAC3B,UAAM,aAAa,OAAO,CAAC;AAE3B,UAAM,sBAAsB,SAAS,YAAY,UAAU;AAC3D,UAAM,sBAAsB,SAAS,YAAY,UAAU;AAE3D,WAAO,mBAAmB,EAAE,KAAK,WAAW,OAAO;AACnD,WAAO,mBAAmB,EAAE,KAAK,WAAW,OAAO;AAAA,EACrD,CAAC;AAED,KAAG,6CAA6C,MAAM;AACpD,UAAM,cAAc;AAAA,MAClB,KAAK,WAAW,0BAA0B;AAAA,MAC1C;AAAA,IACF;AAEA,UAAM,SAAS,UAAU,aAAa,KAAK,GAAG;AAE9C,UAAM,aAAa,OAAO,CAAC;AAC3B,UAAM,aAAa,OAAO,EAAE;AAE5B,UAAM,sBAAsB,SAAS,aAAa,UAAU;AAC5D,UAAM,sBAAsB,SAAS,aAAa,UAAU;AAE5D,WAAO,mBAAmB,EAAE,KAAK,WAAW,OAAO;AACnD,WAAO,mBAAmB,EAAE,KAAK,WAAW,OAAO;AAAA,EACrD,CAAC;AACH,CAAC;","names":[]}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
Lorem ipsum dolor sit amet consectetur adipiscing elit. Blandit quis suspendisse aliquet nisi sodales consequat magna. Sem placerat in id cursus mi pretium tellus. Finibus facilisis dapibus etiam interdum tortor ligula congue. Sed diam urna tempor pulvinar vivamus fringilla lacus. Porta elementum a enim euismod quam justo lectus. Nisl malesuada lacinia integer nunc posuere ut hendrerit. Imperdiet mollis nullam volutpat porttitor ullamcorper rutrum gravida. Ad litora torquent per conubia nostra inceptos himenaeos. Ornare sagittis vehicula praesent dui felis venenatis ultrices. Dis parturient montes nascetur ridiculus mus donec rhoncus. Potenti ultricies habitant morbi senectus netus suscipit auctor. Maximus eget fermentum odio phasellus non purus est. Platea dictumst lorem ipsum dolor sit amet consectetur. Dictum risus blandit quis suspendisse aliquet nisi sodales. Vitae pellentesque sem placerat in id cursus mi. Luctus nibh finibus facilisis dapibus etiam interdum tortor. Eu aenean sed diam urna tempor pulvinar vivamus. Tincidunt nam porta elementum a enim euismod quam. Iaculis massa nisl malesuada lacinia integer nunc posuere. Velit aliquam imperdiet mollis nullam volutpat porttitor ullamcorper. Taciti sociosqu ad litora torquent per conubia nostra.
|
|
2
|
+
|
|
3
|
+
Primis vulputate ornare sagittis vehicula praesent dui felis. Et magnis dis parturient montes nascetur ridiculus mus. Accumsan maecenas potenti ultricies habitant morbi senectus netus. Mattis scelerisque maximus eget fermentum odio phasellus non. Hac habitasse platea dictumst lorem ipsum dolor sit. Vestibulum fusce dictum risus blandit quis suspendisse aliquet. Ex sapien vitae pellentesque sem placerat in id. Neque at luctus nibh finibus facilisis dapibus etiam. Tempus leo eu aenean sed diam urna tempor. Viverra ac tincidunt nam porta elementum a enim. Bibendum egestas iaculis massa nisl malesuada lacinia integer. Arcu dignissim velit aliquam imperdiet mollis nullam volutpat. Class aptent taciti sociosqu ad litora torquent per. Turpis fames primis vulputate ornare sagittis vehicula praesent. Natoque penatibus et magnis dis parturient montes nascetur. Feugiat tristique accumsan maecenas potenti ultricies habitant morbi. Nulla molestie mattis scelerisque maximus eget fermentum odio. Cubilia curae hac habitasse platea dictumst lorem ipsum. Mauris pharetra vestibulum fusce dictum risus blandit quis. Quisque faucibus ex sapien vitae pellentesque sem placerat. Ante condimentum neque at luctus nibh finibus facilisis. Duis convallis tempus leo eu aenean sed diam. Sollicitudin erat viverra ac tincidunt nam porta elementum. Nec metus bibendum egestas iaculis massa nisl malesuada.
|
|
4
|
+
|
|
5
|
+
Commodo augue arcu dignissim velit aliquam imperdiet mollis. Semper vel class aptent taciti sociosqu ad litora. Cras eleifend turpis fames primis vulputate ornare sagittis. Orci varius natoque penatibus et magnis dis parturient. Proin libero feugiat tristique accumsan maecenas potenti ultricies. Eros lobortis nulla molestie mattis scelerisque maximus eget. Curabitur facilisi cubilia curae hac habitasse platea dictumst. Efficitur laoreet mauris pharetra vestibulum fusce dictum risus. Adipiscing elit quisque faucibus ex sapien vitae pellentesque. Consequat magna ante condimentum neque at luctus nibh. Pretium tellus duis convallis tempus leo eu aenean. Ligula congue sollicitudin erat viverra ac tincidunt nam. Fringilla lacus nec metus bibendum egestas iaculis massa. Justo lectus commodo augue arcu dignissim velit aliquam. Ut hendrerit semper vel class aptent taciti sociosqu. Rutrum gravida cras eleifend turpis fames primis vulputate. Inceptos himenaeos orci varius natoque penatibus et magnis. Venenatis ultrices proin libero feugiat tristique accumsan maecenas. Donec rhoncus eros lobortis nulla molestie mattis scelerisque. Suscipit auctor curabitur facilisi cubilia curae hac habitasse. Purus est efficitur laoreet mauris pharetra vestibulum fusce. Amet consectetur adipiscing elit quisque faucibus ex sapien. Nisi sodales consequat magna ante condimentum neque at. Cursus mi pretium tellus duis convallis tempus leo. Interdum tortor ligula congue sollicitudin erat viverra ac.
|
|
6
|
+
|
|
7
|
+
Pulvinar vivamus fringilla lacus nec metus bibendum egestas. Euismod quam justo lectus commodo augue arcu dignissim. Nunc posuere ut hendrerit semper vel class aptent. Porttitor ullamcorper rutrum gravida cras eleifend turpis fames. Conubia nostra inceptos himenaeos orci varius natoque penatibus. Dui felis venenatis ultrices proin libero feugiat tristique. Ridiculus mus donec rhoncus eros lobortis nulla molestie. Senectus netus suscipit auctor curabitur facilisi cubilia curae. Phasellus non purus est efficitur laoreet mauris pharetra. Dolor sit amet consectetur adipiscing elit quisque faucibus. Suspendisse aliquet nisi sodales consequat magna ante condimentum. In id cursus mi pretium tellus duis convallis.
|
|
8
|
+
|
|
9
|
+
Dapibus etiam interdum tortor ligula congue sollicitudin erat. Urna tempor pulvinar vivamus fringilla lacus nec metus. Aenim euismod quam justo lectus commodo augue. Lacinia integer nunc posuere ut hendrerit semper vel. Nullam volutpat porttitor ullamcorper rutrum gravida cras eleifend. Torquent per conubia nostra inceptos himenaeos orci varius. Vehicula praesent dui felis venenatis ultrices proin libero. Montes nascetur ridiculus mus donec rhoncus eros lobortis. Habitant morbi senectus netus suscipit auctor curabitur facilisi. Fermentum odio phasellus non purus est efficitur laoreet. Lorem ipsum dolor sit amet consectetur adipiscing elit. Blandit quis suspendisse aliquet nisi sodales consequat magna. Sem placerat in id cursus mi pretium tellus. Finibus facilisis dapibus etiam interdum tortor ligula congue. Sed diam urna tempor pulvinar vivamus fringilla lacus. Porta elementum a enim euismod quam justo lectus. Nisl malesuada lacinia integer nunc posuere ut hendrerit.
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { getAppLogger } from "@intlayer/config";
|
|
2
|
+
const checkAIAccess = (configuration, aiOptions) => {
|
|
3
|
+
const appLogger = getAppLogger(configuration);
|
|
4
|
+
if (!configuration.editor.clientId && !configuration.editor.clientSecret && !configuration.ai?.apiKey && !aiOptions?.apiKey) {
|
|
5
|
+
appLogger("AI options or API key not provided. Skipping AI translation.", {
|
|
6
|
+
level: "error"
|
|
7
|
+
});
|
|
8
|
+
throw new Error(
|
|
9
|
+
"AI options or API key not provided. Skipping AI translation."
|
|
10
|
+
);
|
|
11
|
+
}
|
|
12
|
+
};
|
|
13
|
+
export {
|
|
14
|
+
checkAIAccess
|
|
15
|
+
};
|
|
16
|
+
//# sourceMappingURL=checkAIAccess.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/checkAIAccess.ts"],"sourcesContent":["import type { AIOptions } from '@intlayer/api';\nimport { getAppLogger, type IntlayerConfig } from '@intlayer/config';\n\nexport const checkAIAccess = (\n configuration: IntlayerConfig,\n aiOptions?: AIOptions\n) => {\n const appLogger = getAppLogger(configuration);\n\n if (\n !configuration.editor.clientId &&\n !configuration.editor.clientSecret &&\n !configuration.ai?.apiKey &&\n !aiOptions?.apiKey\n ) {\n appLogger('AI options or API key not provided. Skipping AI translation.', {\n level: 'error',\n });\n // Potentially handle this case differently, e.g., by using a different translation method or stopping.\n\n throw new Error(\n 'AI options or API key not provided. Skipping AI translation.'\n );\n }\n};\n"],"mappings":"AACA,SAAS,oBAAyC;AAE3C,MAAM,gBAAgB,CAC3B,eACA,cACG;AACH,QAAM,YAAY,aAAa,aAAa;AAE5C,MACE,CAAC,cAAc,OAAO,YACtB,CAAC,cAAc,OAAO,gBACtB,CAAC,cAAc,IAAI,UACnB,CAAC,WAAW,QACZ;AACA,cAAU,gEAAgE;AAAA,MACxE,OAAO;AAAA,IACT,CAAC;AAGD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { checkLastUpdateTime } from "./checkLastUpdateTime.mjs";
|
|
2
|
+
import { formatTimeDiff } from "./formatTimeDiff.mjs";
|
|
3
|
+
const TIMESTAMP_THRESHOLD_MS = 50 * 365 * 24 * 60 * 60 * 1e3;
|
|
4
|
+
const normaliseInputDate = (date, now = /* @__PURE__ */ new Date()) => {
|
|
5
|
+
if (date instanceof Date) {
|
|
6
|
+
return {
|
|
7
|
+
absoluteTime: date,
|
|
8
|
+
relativeTime: new Date(now.getTime() - date.getTime())
|
|
9
|
+
};
|
|
10
|
+
}
|
|
11
|
+
if (typeof date === "number") {
|
|
12
|
+
if (date > TIMESTAMP_THRESHOLD_MS) {
|
|
13
|
+
const absoluteTime = new Date(date);
|
|
14
|
+
return {
|
|
15
|
+
absoluteTime,
|
|
16
|
+
relativeTime: new Date(now.getTime() - absoluteTime.getTime())
|
|
17
|
+
};
|
|
18
|
+
}
|
|
19
|
+
const relativeMs = date;
|
|
20
|
+
return {
|
|
21
|
+
// Relative duration expressed as a Date object starting at the epoch
|
|
22
|
+
relativeTime: new Date(relativeMs),
|
|
23
|
+
// The concrete date obtained by subtracting the duration from *now*
|
|
24
|
+
absoluteTime: new Date(now.getTime() - relativeMs)
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
if (typeof date === "string") {
|
|
28
|
+
const absoluteTime = new Date(date);
|
|
29
|
+
if (Number.isNaN(absoluteTime.getTime())) {
|
|
30
|
+
throw new Error(`Invalid date string provided: ${date}`);
|
|
31
|
+
}
|
|
32
|
+
return {
|
|
33
|
+
absoluteTime,
|
|
34
|
+
relativeTime: new Date(now.getTime() - absoluteTime.getTime())
|
|
35
|
+
};
|
|
36
|
+
}
|
|
37
|
+
throw new Error(`Unsupported date format: ${date}`);
|
|
38
|
+
};
|
|
39
|
+
const checkFileModifiedRange = (filePath, options) => {
|
|
40
|
+
const fileLastUpdateTime = checkLastUpdateTime(filePath);
|
|
41
|
+
const { skipIfModifiedBefore, skipIfModifiedAfter } = options;
|
|
42
|
+
const now = /* @__PURE__ */ new Date();
|
|
43
|
+
const minDate = skipIfModifiedBefore ? normaliseInputDate(skipIfModifiedBefore, now).absoluteTime : void 0;
|
|
44
|
+
const maxDate = skipIfModifiedAfter ? normaliseInputDate(skipIfModifiedAfter, now).absoluteTime : void 0;
|
|
45
|
+
let shouldSkip = false;
|
|
46
|
+
if (minDate instanceof Date && maxDate instanceof Date) {
|
|
47
|
+
shouldSkip = fileLastUpdateTime >= minDate && fileLastUpdateTime <= maxDate;
|
|
48
|
+
} else if (minDate instanceof Date) {
|
|
49
|
+
shouldSkip = fileLastUpdateTime >= minDate;
|
|
50
|
+
} else if (maxDate) {
|
|
51
|
+
shouldSkip = fileLastUpdateTime >= maxDate;
|
|
52
|
+
}
|
|
53
|
+
if (shouldSkip) {
|
|
54
|
+
const referenceDate = minDate && maxDate ? (
|
|
55
|
+
// When both bounds are present, the *range* caused the skip so we use
|
|
56
|
+
// the distance between the two bounds as the relative duration.
|
|
57
|
+
new Date(Math.abs(maxDate.getTime() - minDate.getTime()))
|
|
58
|
+
) : minDate ?? maxDate;
|
|
59
|
+
const relativeTime = new Date(now.getTime() - referenceDate.getTime());
|
|
60
|
+
return {
|
|
61
|
+
isSkipped: true,
|
|
62
|
+
message: `Skipping file because it has been modified within the last ${formatTimeDiff(relativeTime)} - ${filePath}`
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
return {
|
|
66
|
+
isSkipped: false,
|
|
67
|
+
message: `File ${filePath} can be processed - ${filePath}`
|
|
68
|
+
};
|
|
69
|
+
};
|
|
70
|
+
export {
|
|
71
|
+
checkFileModifiedRange
|
|
72
|
+
};
|
|
73
|
+
//# sourceMappingURL=checkFileModifiedRange.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/checkFileModifiedRange.ts"],"sourcesContent":["import { checkLastUpdateTime } from './checkLastUpdateTime';\nimport { formatTimeDiff } from './formatTimeDiff';\n\ntype GetTimeRangeResult = {\n relativeTime: Date;\n absoluteTime: Date;\n};\n\n/**\n * Threshold that helps us differentiate between a numeric *timestamp* (ms from epoch)\n * and a numeric *duration* (ms ago).\n * 50 years expressed in milliseconds is far greater than any reasonable\n * \"relative\" duration we expect the helper to receive (e.g. a couple of years).\n */\nconst TIMESTAMP_THRESHOLD_MS = 50 * 365 * 24 * 60 * 60 * 1000; // 50 years\n\n/**\n * Normalises the input date representation into a pair:\n * 1. `relativeTime` – a Date instance whose epoch-time equals the duration\n * between `now` and the absolute date.\n * 2. `absoluteTime` – the concrete point in time represented as a Date.\n *\n * Rules for interpreting the input:\n * • Date => treated as an absolute time.\n * • string => parsed via the Date constructor => absolute time.\n * • number:\n * – if the value is larger than the TIMESTAMP_THRESHOLD_MS we assume it\n * is a unix timestamp (absolute time).\n * – otherwise we treat it as a *relative* duration expressed in\n * milliseconds.\n */\nconst normaliseInputDate = (\n date: Date | number | string,\n now: Date = new Date()\n): GetTimeRangeResult => {\n // Case 1: Already a Date instance\n if (date instanceof Date) {\n return {\n absoluteTime: date,\n relativeTime: new Date(now.getTime() - date.getTime()),\n };\n }\n\n // Case 2: Numeric value – decide between timestamp vs relative ms.\n if (typeof date === 'number') {\n if (date > TIMESTAMP_THRESHOLD_MS) {\n // Treat as *unix timestamp* (absolute)\n const absoluteTime = new Date(date);\n return {\n absoluteTime,\n relativeTime: new Date(now.getTime() - absoluteTime.getTime()),\n };\n }\n\n // Treat as a *relative* duration (milliseconds in the past)\n const relativeMs = date;\n return {\n // Relative duration expressed as a Date object starting at the epoch\n relativeTime: new Date(relativeMs),\n // The concrete date obtained by subtracting the duration from *now*\n absoluteTime: new Date(now.getTime() - relativeMs),\n };\n }\n\n // Case 3: String representation – let Date parse it.\n if (typeof date === 'string') {\n const absoluteTime = new Date(date);\n if (Number.isNaN(absoluteTime.getTime())) {\n throw new Error(`Invalid date string provided: ${date}`);\n }\n\n return {\n absoluteTime,\n relativeTime: new Date(now.getTime() - absoluteTime.getTime()),\n };\n }\n\n throw new Error(`Unsupported date format: ${date}`);\n};\n\ntype CheckFileModifiedRangeResult = {\n isSkipped: boolean;\n message: string;\n};\n\ntype CheckFileModifiedRangeOptions = {\n skipIfModifiedBefore?: Date | number | string;\n skipIfModifiedAfter?: Date | number | string;\n};\n\nexport const checkFileModifiedRange = (\n filePath: string,\n options: CheckFileModifiedRangeOptions\n): CheckFileModifiedRangeResult => {\n const fileLastUpdateTime = checkLastUpdateTime(filePath);\n const { skipIfModifiedBefore, skipIfModifiedAfter } = options;\n\n // Normalise the provided thresholds to concrete dates.\n const now = new Date();\n const minDate = skipIfModifiedBefore\n ? normaliseInputDate(skipIfModifiedBefore, now).absoluteTime\n : undefined;\n const maxDate = skipIfModifiedAfter\n ? normaliseInputDate(skipIfModifiedAfter, now).absoluteTime\n : undefined;\n\n // Determine if the file should be skipped.\n let shouldSkip = false;\n\n if (minDate instanceof Date && maxDate instanceof Date) {\n // Skip when the modification time falls *within* the range [minDate, maxDate]\n shouldSkip = fileLastUpdateTime >= minDate && fileLastUpdateTime <= maxDate;\n } else if (minDate instanceof Date) {\n // Only lower bound – skip when the file was modified *after* minDate\n shouldSkip = fileLastUpdateTime >= minDate;\n } else if (maxDate) {\n // Only upper bound – skip when the file was modified *after* maxDate\n shouldSkip = fileLastUpdateTime >= maxDate;\n }\n\n if (shouldSkip) {\n // For the sake of the message we compute the relative time to *now* using\n // whichever bound was responsible for the skip logic.\n const referenceDate = (\n minDate && maxDate\n ? // When both bounds are present, the *range* caused the skip so we use\n // the distance between the two bounds as the relative duration.\n new Date(Math.abs(maxDate.getTime() - minDate.getTime()))\n : (minDate ?? maxDate)\n )!;\n\n const relativeTime = new Date(now.getTime() - referenceDate.getTime());\n\n return {\n isSkipped: true,\n message: `Skipping file because it has been modified within the last ${formatTimeDiff(relativeTime)} - ${filePath}`,\n };\n }\n\n return {\n isSkipped: false,\n message: `File ${filePath} can be processed - ${filePath}`,\n };\n};\n"],"mappings":"AAAA,SAAS,2BAA2B;AACpC,SAAS,sBAAsB;AAa/B,MAAM,yBAAyB,KAAK,MAAM,KAAK,KAAK,KAAK;AAiBzD,MAAM,qBAAqB,CACzB,MACA,MAAY,oBAAI,KAAK,MACE;AAEvB,MAAI,gBAAgB,MAAM;AACxB,WAAO;AAAA,MACL,cAAc;AAAA,MACd,cAAc,IAAI,KAAK,IAAI,QAAQ,IAAI,KAAK,QAAQ,CAAC;AAAA,IACvD;AAAA,EACF;AAGA,MAAI,OAAO,SAAS,UAAU;AAC5B,QAAI,OAAO,wBAAwB;AAEjC,YAAM,eAAe,IAAI,KAAK,IAAI;AAClC,aAAO;AAAA,QACL;AAAA,QACA,cAAc,IAAI,KAAK,IAAI,QAAQ,IAAI,aAAa,QAAQ,CAAC;AAAA,MAC/D;AAAA,IACF;AAGA,UAAM,aAAa;AACnB,WAAO;AAAA;AAAA,MAEL,cAAc,IAAI,KAAK,UAAU;AAAA;AAAA,MAEjC,cAAc,IAAI,KAAK,IAAI,QAAQ,IAAI,UAAU;AAAA,IACnD;AAAA,EACF;AAGA,MAAI,OAAO,SAAS,UAAU;AAC5B,UAAM,eAAe,IAAI,KAAK,IAAI;AAClC,QAAI,OAAO,MAAM,aAAa,QAAQ,CAAC,GAAG;AACxC,YAAM,IAAI,MAAM,iCAAiC,IAAI,EAAE;AAAA,IACzD;AAEA,WAAO;AAAA,MACL;AAAA,MACA,cAAc,IAAI,KAAK,IAAI,QAAQ,IAAI,aAAa,QAAQ,CAAC;AAAA,IAC/D;AAAA,EACF;AAEA,QAAM,IAAI,MAAM,4BAA4B,IAAI,EAAE;AACpD;AAYO,MAAM,yBAAyB,CACpC,UACA,YACiC;AACjC,QAAM,qBAAqB,oBAAoB,QAAQ;AACvD,QAAM,EAAE,sBAAsB,oBAAoB,IAAI;AAGtD,QAAM,MAAM,oBAAI,KAAK;AACrB,QAAM,UAAU,uBACZ,mBAAmB,sBAAsB,GAAG,EAAE,eAC9C;AACJ,QAAM,UAAU,sBACZ,mBAAmB,qBAAqB,GAAG,EAAE,eAC7C;AAGJ,MAAI,aAAa;AAEjB,MAAI,mBAAmB,QAAQ,mBAAmB,MAAM;AAEtD,iBAAa,sBAAsB,WAAW,sBAAsB;AAAA,EACtE,WAAW,mBAAmB,MAAM;AAElC,iBAAa,sBAAsB;AAAA,EACrC,WAAW,SAAS;AAElB,iBAAa,sBAAsB;AAAA,EACrC;AAEA,MAAI,YAAY;AAGd,UAAM,gBACJ,WAAW;AAAA;AAAA;AAAA,MAGP,IAAI,KAAK,KAAK,IAAI,QAAQ,QAAQ,IAAI,QAAQ,QAAQ,CAAC,CAAC;AAAA,QACvD,WAAW;AAGlB,UAAM,eAAe,IAAI,KAAK,IAAI,QAAQ,IAAI,cAAc,QAAQ,CAAC;AAErE,WAAO;AAAA,MACL,WAAW;AAAA,MACX,SAAS,8DAA8D,eAAe,YAAY,CAAC,MAAM,QAAQ;AAAA,IACnH;AAAA,EACF;AAEA,SAAO;AAAA,IACL,WAAW;AAAA,IACX,SAAS,QAAQ,QAAQ,uBAAuB,QAAQ;AAAA,EAC1D;AACF;","names":[]}
|
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
import {
|
|
2
|
+
afterEach,
|
|
3
|
+
beforeEach,
|
|
4
|
+
describe,
|
|
5
|
+
expect,
|
|
6
|
+
it,
|
|
7
|
+
vi
|
|
8
|
+
} from "vitest";
|
|
9
|
+
vi.mock("./checkLastUpdateTime", () => {
|
|
10
|
+
return {
|
|
11
|
+
checkLastUpdateTime: vi.fn(() => /* @__PURE__ */ new Date())
|
|
12
|
+
// pretend the file was just modified
|
|
13
|
+
};
|
|
14
|
+
});
|
|
15
|
+
import { checkFileModifiedRange } from "./checkFileModifiedRange.mjs";
|
|
16
|
+
import { checkLastUpdateTime } from "./checkLastUpdateTime.mjs";
|
|
17
|
+
describe("checkFileModifiedRange", () => {
|
|
18
|
+
beforeEach(() => {
|
|
19
|
+
vi.useFakeTimers();
|
|
20
|
+
vi.setSystemTime(/* @__PURE__ */ new Date("2025-01-01"));
|
|
21
|
+
});
|
|
22
|
+
afterEach(() => {
|
|
23
|
+
vi.useRealTimers();
|
|
24
|
+
vi.clearAllMocks();
|
|
25
|
+
});
|
|
26
|
+
const testList = [
|
|
27
|
+
{
|
|
28
|
+
// |---a---|
|
|
29
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2025-01-01"),
|
|
30
|
+
// a
|
|
31
|
+
isSkipped: false
|
|
32
|
+
// No constraints
|
|
33
|
+
},
|
|
34
|
+
{
|
|
35
|
+
// |-min-a-max-|
|
|
36
|
+
skipIfModifiedBefore: "2024-01-01",
|
|
37
|
+
// min
|
|
38
|
+
skipIfModifiedAfter: "2026-01-01",
|
|
39
|
+
// max
|
|
40
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2025-01-01"),
|
|
41
|
+
// a
|
|
42
|
+
isSkipped: true
|
|
43
|
+
// During the range
|
|
44
|
+
},
|
|
45
|
+
{
|
|
46
|
+
// |-min-max-a-|
|
|
47
|
+
skipIfModifiedBefore: "2024-01-01",
|
|
48
|
+
// min
|
|
49
|
+
skipIfModifiedAfter: "2026-01-01",
|
|
50
|
+
// max
|
|
51
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2027-01-01"),
|
|
52
|
+
// a
|
|
53
|
+
isSkipped: false
|
|
54
|
+
// After the range
|
|
55
|
+
},
|
|
56
|
+
{
|
|
57
|
+
// |-a-min-max-|
|
|
58
|
+
skipIfModifiedBefore: "2024-01-01",
|
|
59
|
+
// min
|
|
60
|
+
skipIfModifiedAfter: "2026-01-01",
|
|
61
|
+
// max
|
|
62
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2023-01-01"),
|
|
63
|
+
// a
|
|
64
|
+
isSkipped: false
|
|
65
|
+
// Before the range
|
|
66
|
+
},
|
|
67
|
+
{
|
|
68
|
+
// |-a-min-|
|
|
69
|
+
skipIfModifiedBefore: "2024-01-01",
|
|
70
|
+
// min
|
|
71
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2023-01-01"),
|
|
72
|
+
// a
|
|
73
|
+
isSkipped: false
|
|
74
|
+
// Before the skipIfModifiedBefore
|
|
75
|
+
},
|
|
76
|
+
{
|
|
77
|
+
// |-min-a-|
|
|
78
|
+
skipIfModifiedBefore: "2024-01-01",
|
|
79
|
+
// min
|
|
80
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2025-01-01"),
|
|
81
|
+
// a
|
|
82
|
+
isSkipped: true
|
|
83
|
+
// After the skipIfModifiedBefore
|
|
84
|
+
},
|
|
85
|
+
{
|
|
86
|
+
// |-a-max-|
|
|
87
|
+
skipIfModifiedAfter: "2026-01-01",
|
|
88
|
+
// max
|
|
89
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2027-01-01"),
|
|
90
|
+
// a
|
|
91
|
+
isSkipped: true
|
|
92
|
+
// Before the skipIfModifiedAfter
|
|
93
|
+
},
|
|
94
|
+
{
|
|
95
|
+
// |-max-a-|
|
|
96
|
+
skipIfModifiedAfter: "2026-01-01",
|
|
97
|
+
// max
|
|
98
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2027-01-01"),
|
|
99
|
+
// a
|
|
100
|
+
isSkipped: true
|
|
101
|
+
// After the skipIfModifiedAfter
|
|
102
|
+
},
|
|
103
|
+
{
|
|
104
|
+
// |-min-max-a-|
|
|
105
|
+
skipIfModifiedBefore: 3 * 365 * 24 * 60 * 60 * 1e3,
|
|
106
|
+
// min // 3 year ago
|
|
107
|
+
skipIfModifiedAfter: 1 * 365 * 24 * 60 * 60 * 1e3,
|
|
108
|
+
// max // 1 year
|
|
109
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2025-01-01"),
|
|
110
|
+
// a
|
|
111
|
+
isSkipped: false
|
|
112
|
+
// Before the range
|
|
113
|
+
},
|
|
114
|
+
{
|
|
115
|
+
// |-a-max-|
|
|
116
|
+
skipIfModifiedAfter: 0,
|
|
117
|
+
// max // now
|
|
118
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2024-01-01"),
|
|
119
|
+
// a
|
|
120
|
+
isSkipped: false
|
|
121
|
+
// Before the skipIfModifiedAfter
|
|
122
|
+
},
|
|
123
|
+
{
|
|
124
|
+
// |-min-a-|
|
|
125
|
+
skipIfModifiedAfter: 1 * 365 * 24 * 60 * 60 * 1e3,
|
|
126
|
+
// max // 1 year
|
|
127
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2025-01-01"),
|
|
128
|
+
// a
|
|
129
|
+
isSkipped: true
|
|
130
|
+
// Before the skipIfModifiedAfter
|
|
131
|
+
},
|
|
132
|
+
{
|
|
133
|
+
// |-min-a-max-|
|
|
134
|
+
skipIfModifiedBefore: 3 * 365 * 24 * 60 * 60 * 1e3,
|
|
135
|
+
// min // 3 year ago
|
|
136
|
+
skipIfModifiedAfter: 1 * 365 * 24 * 60 * 60 * 1e3,
|
|
137
|
+
// max // 1 year
|
|
138
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2023-01-01"),
|
|
139
|
+
// a
|
|
140
|
+
isSkipped: true
|
|
141
|
+
// After the range
|
|
142
|
+
},
|
|
143
|
+
{
|
|
144
|
+
// |-a-min-max-|
|
|
145
|
+
skipIfModifiedBefore: 3 * 365 * 24 * 60 * 60 * 1e3,
|
|
146
|
+
// min // 3 year ago
|
|
147
|
+
skipIfModifiedAfter: 1 * 365 * 24 * 60 * 60 * 1e3,
|
|
148
|
+
// max // 1 year
|
|
149
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2020-01-01"),
|
|
150
|
+
// a
|
|
151
|
+
isSkipped: false
|
|
152
|
+
// Before the range
|
|
153
|
+
},
|
|
154
|
+
{
|
|
155
|
+
// |-min-max-a-|
|
|
156
|
+
skipIfModifiedBefore: "2024-01-01T00:00:00Z",
|
|
157
|
+
// min
|
|
158
|
+
skipIfModifiedAfter: "2026-01-01T00:00:00Z",
|
|
159
|
+
// max
|
|
160
|
+
fileModifiedTime: /* @__PURE__ */ new Date("2027-01-01"),
|
|
161
|
+
// a
|
|
162
|
+
isSkipped: false
|
|
163
|
+
// After the range
|
|
164
|
+
}
|
|
165
|
+
];
|
|
166
|
+
testList.forEach((test) => {
|
|
167
|
+
it(`should correctly handle string date inputs (absolute time) outside of the range`, () => {
|
|
168
|
+
const filePath = "test/file.txt";
|
|
169
|
+
checkLastUpdateTime.mockReturnValue(
|
|
170
|
+
// If a specific mocked modification time is provided, use it. Otherwise, fall back to "now".
|
|
171
|
+
test.fileModifiedTime ?? /* @__PURE__ */ new Date()
|
|
172
|
+
);
|
|
173
|
+
const { isSkipped } = checkFileModifiedRange(filePath, {
|
|
174
|
+
skipIfModifiedBefore: test.skipIfModifiedBefore,
|
|
175
|
+
skipIfModifiedAfter: test.skipIfModifiedAfter
|
|
176
|
+
});
|
|
177
|
+
expect(isSkipped).toBe(test.isSkipped);
|
|
178
|
+
});
|
|
179
|
+
});
|
|
180
|
+
});
|
|
181
|
+
//# sourceMappingURL=checkFileModifiedRange.test.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/checkFileModifiedRange.test.ts"],"sourcesContent":["import {\n afterEach,\n beforeEach,\n describe,\n expect,\n it,\n vi,\n type Mock,\n} from 'vitest';\n\n// We are testing the public helper. The helper itself internally relies on\n// `checkLastUpdateTime`, which hits the file-system. To keep the test\n// deterministic (and independent from the real FS) we mock this dependency so\n// that we can control the last modification date that the helper receives.\n\n// NOTE: `vi.mock` must be called *before* the module under test is imported.\nvi.mock('./checkLastUpdateTime', () => {\n return {\n checkLastUpdateTime: vi.fn(() => new Date()), // pretend the file was just modified\n };\n});\n\nimport { checkFileModifiedRange } from './checkFileModifiedRange';\nimport { checkLastUpdateTime } from './checkLastUpdateTime';\n\ndescribe('checkFileModifiedRange', () => {\n beforeEach(() => {\n // Use fake timers so Date.now() is deterministic.\n vi.useFakeTimers();\n // Pin the system time to an arbitrary date for reproducibility\n vi.setSystemTime(new Date('2025-01-01'));\n });\n\n afterEach(() => {\n vi.useRealTimers();\n vi.clearAllMocks();\n });\n\n const testList = [\n {\n // |---a---|\n fileModifiedTime: new Date('2025-01-01'), // a\n isSkipped: false, // No constraints\n },\n {\n // |-min-a-max-|\n skipIfModifiedBefore: '2024-01-01', // min\n skipIfModifiedAfter: '2026-01-01', // max\n fileModifiedTime: new Date('2025-01-01'), // a\n isSkipped: true, // During the range\n },\n {\n // |-min-max-a-|\n skipIfModifiedBefore: '2024-01-01', // min\n skipIfModifiedAfter: '2026-01-01', // max\n fileModifiedTime: new Date('2027-01-01'), // a\n isSkipped: false, // After the range\n },\n {\n // |-a-min-max-|\n skipIfModifiedBefore: '2024-01-01', // min\n skipIfModifiedAfter: '2026-01-01', // max\n fileModifiedTime: new Date('2023-01-01'), // a\n isSkipped: false, // Before the range\n },\n {\n // |-a-min-|\n skipIfModifiedBefore: '2024-01-01', // min\n fileModifiedTime: new Date('2023-01-01'), // a\n isSkipped: false, // Before the skipIfModifiedBefore\n },\n {\n // |-min-a-|\n skipIfModifiedBefore: '2024-01-01', // min\n fileModifiedTime: new Date('2025-01-01'), // a\n isSkipped: true, // After the skipIfModifiedBefore\n },\n {\n // |-a-max-|\n skipIfModifiedAfter: '2026-01-01', // max\n fileModifiedTime: new Date('2027-01-01'), // a\n isSkipped: true, // Before the skipIfModifiedAfter\n },\n {\n // |-max-a-|\n skipIfModifiedAfter: '2026-01-01', // max\n fileModifiedTime: new Date('2027-01-01'), // a\n isSkipped: true, // After the skipIfModifiedAfter\n },\n {\n // |-min-max-a-|\n skipIfModifiedBefore: 3 * 365 * 24 * 60 * 60 * 1000, // min // 3 year ago\n skipIfModifiedAfter: 1 * 365 * 24 * 60 * 60 * 1000, // max // 1 year\n fileModifiedTime: new Date('2025-01-01'), // a\n isSkipped: false, // Before the range\n },\n {\n // |-a-max-|\n skipIfModifiedAfter: 0, // max // now\n fileModifiedTime: new Date('2024-01-01'), // a\n isSkipped: false, // Before the skipIfModifiedAfter\n },\n {\n // |-min-a-|\n skipIfModifiedAfter: 1 * 365 * 24 * 60 * 60 * 1000, // max // 1 year\n fileModifiedTime: new Date('2025-01-01'), // a\n isSkipped: true, // Before the skipIfModifiedAfter\n },\n {\n // |-min-a-max-|\n skipIfModifiedBefore: 3 * 365 * 24 * 60 * 60 * 1000, // min // 3 year ago\n skipIfModifiedAfter: 1 * 365 * 24 * 60 * 60 * 1000, // max // 1 year\n fileModifiedTime: new Date('2023-01-01'), // a\n isSkipped: true, // After the range\n },\n {\n // |-a-min-max-|\n skipIfModifiedBefore: 3 * 365 * 24 * 60 * 60 * 1000, // min // 3 year ago\n skipIfModifiedAfter: 1 * 365 * 24 * 60 * 60 * 1000, // max // 1 year\n fileModifiedTime: new Date('2020-01-01'), // a\n isSkipped: false, // Before the range\n },\n {\n // |-min-max-a-|\n skipIfModifiedBefore: '2024-01-01T00:00:00Z', // min\n skipIfModifiedAfter: '2026-01-01T00:00:00Z', // max\n fileModifiedTime: new Date('2027-01-01'), // a\n isSkipped: false, // After the range\n },\n ];\n\n testList.forEach((test) => {\n it(`should correctly handle string date inputs (absolute time) outside of the range`, () => {\n const filePath = 'test/file.txt';\n\n // Simulate the file's last modification time for this test case\n (checkLastUpdateTime as unknown as Mock).mockReturnValue(\n // If a specific mocked modification time is provided, use it. Otherwise, fall back to \"now\".\n test.fileModifiedTime ?? new Date()\n );\n\n const { isSkipped } = checkFileModifiedRange(filePath, {\n skipIfModifiedBefore: test.skipIfModifiedBefore,\n skipIfModifiedAfter: test.skipIfModifiedAfter,\n });\n\n expect(isSkipped).toBe(test.isSkipped);\n });\n });\n});\n"],"mappings":"AAAA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AAQP,GAAG,KAAK,yBAAyB,MAAM;AACrC,SAAO;AAAA,IACL,qBAAqB,GAAG,GAAG,MAAM,oBAAI,KAAK,CAAC;AAAA;AAAA,EAC7C;AACF,CAAC;AAED,SAAS,8BAA8B;AACvC,SAAS,2BAA2B;AAEpC,SAAS,0BAA0B,MAAM;AACvC,aAAW,MAAM;AAEf,OAAG,cAAc;AAEjB,OAAG,cAAc,oBAAI,KAAK,YAAY,CAAC;AAAA,EACzC,CAAC;AAED,YAAU,MAAM;AACd,OAAG,cAAc;AACjB,OAAG,cAAc;AAAA,EACnB,CAAC;AAED,QAAM,WAAW;AAAA,IACf;AAAA;AAAA,MAEE,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB;AAAA;AAAA,MACtB,qBAAqB;AAAA;AAAA,MACrB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB;AAAA;AAAA,MACtB,qBAAqB;AAAA;AAAA,MACrB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB;AAAA;AAAA,MACtB,qBAAqB;AAAA;AAAA,MACrB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB;AAAA;AAAA,MACtB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB;AAAA;AAAA,MACtB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,qBAAqB;AAAA;AAAA,MACrB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,qBAAqB;AAAA;AAAA,MACrB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB,IAAI,MAAM,KAAK,KAAK,KAAK;AAAA;AAAA,MAC/C,qBAAqB,IAAI,MAAM,KAAK,KAAK,KAAK;AAAA;AAAA,MAC9C,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,qBAAqB;AAAA;AAAA,MACrB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,qBAAqB,IAAI,MAAM,KAAK,KAAK,KAAK;AAAA;AAAA,MAC9C,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB,IAAI,MAAM,KAAK,KAAK,KAAK;AAAA;AAAA,MAC/C,qBAAqB,IAAI,MAAM,KAAK,KAAK,KAAK;AAAA;AAAA,MAC9C,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB,IAAI,MAAM,KAAK,KAAK,KAAK;AAAA;AAAA,MAC/C,qBAAqB,IAAI,MAAM,KAAK,KAAK,KAAK;AAAA;AAAA,MAC9C,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,IACA;AAAA;AAAA,MAEE,sBAAsB;AAAA;AAAA,MACtB,qBAAqB;AAAA;AAAA,MACrB,kBAAkB,oBAAI,KAAK,YAAY;AAAA;AAAA,MACvC,WAAW;AAAA;AAAA,IACb;AAAA,EACF;AAEA,WAAS,QAAQ,CAAC,SAAS;AACzB,OAAG,mFAAmF,MAAM;AAC1F,YAAM,WAAW;AAGjB,MAAC,oBAAwC;AAAA;AAAA,QAEvC,KAAK,oBAAoB,oBAAI,KAAK;AAAA,MACpC;AAEA,YAAM,EAAE,UAAU,IAAI,uBAAuB,UAAU;AAAA,QACrD,sBAAsB,KAAK;AAAA,QAC3B,qBAAqB,KAAK;AAAA,MAC5B,CAAC;AAED,aAAO,SAAS,EAAE,KAAK,KAAK,SAAS;AAAA,IACvC,CAAC;AAAA,EACH,CAAC;AACH,CAAC;","names":[]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/checkLastUpdateTime.ts"],"sourcesContent":["import { statSync } from 'fs';\n\n/**\n * Returns the last modification date of a file.\n *\n * @param filePath - Absolute or relative path to the file to inspect.\n * @returns Date instance representing the file's last modified time (mtime).\n * @throws Will propagate any error thrown by fs.statSync (e.g., file not found).\n */\nexport const checkLastUpdateTime = (filePath: string): Date => {\n const stats = statSync(filePath);\n return new Date(stats.mtime);\n};\n"],"mappings":"AAAA,SAAS,gBAAgB;AASlB,MAAM,sBAAsB,CAAC,aAA2B;AAC7D,QAAM,QAAQ,SAAS,QAAQ;AAC/B,SAAO,IAAI,KAAK,MAAM,KAAK;AAC7B;","names":[]}
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { getAiAPI } from "@intlayer/api";
|
|
2
|
+
import { retryManager } from "@intlayer/config";
|
|
3
|
+
const chunkInference = async (messages, aiOptions, oAuth2AccessToken) => {
|
|
4
|
+
let lastResult;
|
|
5
|
+
await retryManager(async () => {
|
|
6
|
+
const response = await getAiAPI().customQuery(
|
|
7
|
+
{
|
|
8
|
+
aiOptions,
|
|
9
|
+
messages
|
|
10
|
+
},
|
|
11
|
+
{
|
|
12
|
+
...oAuth2AccessToken && {
|
|
13
|
+
headers: {
|
|
14
|
+
Authorization: `Bearer ${oAuth2AccessToken}`
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
);
|
|
19
|
+
if (!response.data) {
|
|
20
|
+
throw new Error("No response from AI API");
|
|
21
|
+
}
|
|
22
|
+
const { fileContent, tokenUsed } = response.data;
|
|
23
|
+
const newContent = fileContent.replaceAll("///chunksStart///", "").replaceAll("///chunkStart///", "").replaceAll("///chunksEnd///", "").replaceAll("///chunkEnd///", "");
|
|
24
|
+
lastResult = {
|
|
25
|
+
fileContent: newContent,
|
|
26
|
+
tokenUsed
|
|
27
|
+
};
|
|
28
|
+
})();
|
|
29
|
+
return lastResult;
|
|
30
|
+
};
|
|
31
|
+
export {
|
|
32
|
+
chunkInference
|
|
33
|
+
};
|
|
34
|
+
//# sourceMappingURL=chunkInference.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/utils/chunkInference.ts"],"sourcesContent":["import { type AIOptions, type Messages, getAiAPI } from '@intlayer/api';\nimport { retryManager } from '@intlayer/config';\n\ntype ChunkInferenceResult = {\n fileContent: string;\n tokenUsed: number;\n};\n\n/**\n * Translates a single chunk via the OpenAI API.\n * Includes retry logic if the call fails.\n */\nexport const chunkInference = async (\n messages: Messages,\n aiOptions?: AIOptions,\n oAuth2AccessToken?: string\n): Promise<ChunkInferenceResult> => {\n let lastResult: ChunkInferenceResult;\n\n await retryManager(async () => {\n const response = await getAiAPI().customQuery(\n {\n aiOptions,\n messages,\n },\n {\n ...(oAuth2AccessToken && {\n headers: {\n Authorization: `Bearer ${oAuth2AccessToken}`,\n },\n }),\n }\n );\n\n if (!response.data) {\n throw new Error('No response from AI API');\n }\n\n const { fileContent, tokenUsed } = response.data;\n\n const newContent = fileContent\n .replaceAll('///chunksStart///', '')\n .replaceAll('///chunkStart///', '')\n .replaceAll('///chunksEnd///', '')\n .replaceAll('///chunkEnd///', '');\n\n lastResult = {\n fileContent: newContent,\n tokenUsed,\n };\n })();\n\n return lastResult!;\n};\n"],"mappings":"AAAA,SAAwC,gBAAgB;AACxD,SAAS,oBAAoB;AAWtB,MAAM,iBAAiB,OAC5B,UACA,WACA,sBACkC;AAClC,MAAI;AAEJ,QAAM,aAAa,YAAY;AAC7B,UAAM,WAAW,MAAM,SAAS,EAAE;AAAA,MAChC;AAAA,QACE;AAAA,QACA;AAAA,MACF;AAAA,MACA;AAAA,QACE,GAAI,qBAAqB;AAAA,UACvB,SAAS;AAAA,YACP,eAAe,UAAU,iBAAiB;AAAA,UAC5C;AAAA,QACF;AAAA,MACF;AAAA,IACF;AAEA,QAAI,CAAC,SAAS,MAAM;AAClB,YAAM,IAAI,MAAM,yBAAyB;AAAA,IAC3C;AAEA,UAAM,EAAE,aAAa,UAAU,IAAI,SAAS;AAE5C,UAAM,aAAa,YAChB,WAAW,qBAAqB,EAAE,EAClC,WAAW,oBAAoB,EAAE,EACjC,WAAW,mBAAmB,EAAE,EAChC,WAAW,kBAAkB,EAAE;AAElC,iBAAa;AAAA,MACX,aAAa;AAAA,MACb;AAAA,IACF;AAAA,EACF,CAAC,EAAE;AAEH,SAAO;AACT;","names":[]}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
const CHAR_TO_CHECK_FORMATTING = ["```", "\n\n", "\n", "---", "{{", "}}"];
|
|
2
|
+
const escapeForRegExp = (str) => str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&").replace(/\n/g, "\\n");
|
|
3
|
+
const FORMATTING_REGEX_SOURCE = `(?:${CHAR_TO_CHECK_FORMATTING.map(escapeForRegExp).join("|")})+`;
|
|
4
|
+
const LEADING_FORMATTING_REGEX = new RegExp(`^${FORMATTING_REGEX_SOURCE}`);
|
|
5
|
+
const TRAILING_FORMATTING_REGEX = new RegExp(`${FORMATTING_REGEX_SOURCE}$`);
|
|
6
|
+
const fixChunkStartEndChars = (reviewedChunkResult, baseChunkContext) => {
|
|
7
|
+
let result = reviewedChunkResult;
|
|
8
|
+
const baseLeading = baseChunkContext.match(LEADING_FORMATTING_REGEX)?.[0] ?? "";
|
|
9
|
+
const baseTrailing = baseChunkContext.match(TRAILING_FORMATTING_REGEX)?.[0] ?? "";
|
|
10
|
+
const resultLeading = result.match(LEADING_FORMATTING_REGEX)?.[0] ?? "";
|
|
11
|
+
const resultTrailing = result.match(TRAILING_FORMATTING_REGEX)?.[0] ?? "";
|
|
12
|
+
if (baseLeading !== resultLeading) {
|
|
13
|
+
result = baseLeading + result.slice(resultLeading.length);
|
|
14
|
+
}
|
|
15
|
+
if (baseTrailing !== resultTrailing) {
|
|
16
|
+
result = result.slice(0, result.length - resultTrailing.length) + baseTrailing;
|
|
17
|
+
}
|
|
18
|
+
return result;
|
|
19
|
+
};
|
|
20
|
+
export {
|
|
21
|
+
fixChunkStartEndChars
|
|
22
|
+
};
|
|
23
|
+
//# sourceMappingURL=fixChunkStartEndChars.mjs.map
|