@ai-sdk-tool/middleware 0.0.0 → 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright 2025 Woonggi Min (https://github.com/minpeter)
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
@@ -1,4 +1,4 @@
1
- // src/reasoning-parser/index.ts
1
+ // src/reasoning-parser.ts
2
2
  function getPotentialStartIndex(text, searchedText) {
3
3
  if (searchedText.length === 0) {
4
4
  return null;
@@ -207,4 +207,4 @@ export {
207
207
  * @license
208
208
  * Copyright (c) 2021-present, FriendliAI Inc. All rights reserved.
209
209
  */
210
- //# sourceMappingURL=chunk-Q2TVVB35.js.map
210
+ //# sourceMappingURL=chunk-R4PZN7IW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/reasoning-parser.ts"],"sourcesContent":["/**\n * @license\n * Copyright (c) 2021-present, FriendliAI Inc. All rights reserved.\n */\n\nimport type {\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\n/**\n * All code below is forked from the following link:\n * https://github.com/vercel/ai/blob/v5/packages/ai/core/middleware/extract-reasoning-middleware.ts\n */\n\n/**\n * Returns the index of the start of the searchedText in the text, or null if it\n * is not found.\n */\nexport function getPotentialStartIndex(\n text: string,\n searchedText: string\n): number | null {\n // Return null immediately if searchedText is empty.\n if (searchedText.length === 0) {\n return null;\n }\n\n // Check if the searchedText exists as a direct substring of text.\n const directIndex = text.indexOf(searchedText);\n if (directIndex !== -1) {\n return directIndex;\n }\n\n // Otherwise, look for the largest suffix of \"text\" that matches\n // a prefix of \"searchedText\". We go from the end of text inward.\n for (let i = text.length - 1; i >= 0; i -= 1) {\n const suffix = text.substring(i);\n if (searchedText.startsWith(suffix)) {\n return i;\n }\n }\n\n return null;\n}\n\n/**\n * Extract an XML-tagged reasoning section from the generated text and exposes it\n * as a `reasoning` property on the result.\n *\n * @param openingTag - The opening XML tag to extract reasoning from.\n * @param closingTag - The closing XML tag to extract reasoning from.\n * @param separator - The separator to use between reasoning and text sections.\n * @param startWithReasoning - Whether to start with reasoning tokens.\n */\nexport function extractReasoningMiddleware({\n openingTag,\n closingTag,\n separator = \"\\n\",\n startWithReasoning = false,\n}: {\n openingTag: string;\n closingTag: string;\n separator?: string;\n startWithReasoning?: boolean;\n}): LanguageModelV3Middleware {\n function processTextPart(\n text: string,\n transformedContent: LanguageModelV3Content[]\n ) {\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n return;\n }\n\n const reasoningText = matches.map((match) => match[1]).join(separator);\n\n let textWithoutReasoning = text;\n for (let i = matches.length - 1; i >= 0; i -= 1) {\n const match = matches[i];\n\n const beforeMatch = textWithoutReasoning.slice(0, match.index);\n const matchIndex = match.index ?? 0;\n const afterMatch = textWithoutReasoning.slice(\n matchIndex + match[0].length\n );\n\n textWithoutReasoning =\n beforeMatch +\n (beforeMatch.length > 0 && afterMatch.length > 0 ? separator : \"\") +\n afterMatch;\n }\n\n transformedContent.push({\n type: \"reasoning\",\n text: reasoningText,\n });\n\n transformedContent.push({\n type: \"text\",\n text: textWithoutReasoning,\n });\n }\n\n return {\n specificationVersion: \"v3\",\n wrapGenerate: async ({ doGenerate }) => {\n const { content, ...rest } = await doGenerate();\n\n const transformedContent: LanguageModelV3Content[] = [];\n for (const part of content) {\n if (part.type !== \"text\") {\n transformedContent.push(part);\n continue;\n }\n\n const text = startWithReasoning ? openingTag + part.text : part.text;\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n transformedContent.push(part);\n continue;\n }\n\n processTextPart(text, transformedContent);\n }\n\n return { content: transformedContent, ...rest };\n },\n\n wrapStream: async ({ doStream }) => {\n const { stream, ...rest } = await doStream();\n\n interface ExtractionState {\n isFirstReasoning: boolean;\n isFirstText: boolean;\n afterSwitch: boolean;\n isReasoning: boolean;\n buffer: string;\n idCounter: number;\n textId: string;\n }\n\n const reasoningExtractions: Record<string, ExtractionState> = {};\n\n function createPublisher(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n return (text: string) => {\n if (text.length === 0) {\n return;\n }\n\n const prefix = getPrefix(activeExtraction);\n enqueueReasoningStart(activeExtraction, controller);\n enqueueDelta(activeExtraction, controller, prefix, text);\n updateExtractionState(activeExtraction);\n };\n }\n\n function getPrefix(activeExtraction: ExtractionState): string {\n return activeExtraction.afterSwitch &&\n (activeExtraction.isReasoning\n ? !activeExtraction.isFirstReasoning\n : !activeExtraction.isFirstText)\n ? separator\n : \"\";\n }\n\n function enqueueReasoningStart(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n if (\n (activeExtraction.afterSwitch && activeExtraction.isReasoning) ||\n activeExtraction.isFirstReasoning\n ) {\n controller.enqueue({\n type: \"reasoning-start\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n }\n }\n\n function enqueueDelta(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n prefix: string,\n text: string\n ) {\n controller.enqueue(\n activeExtraction.isReasoning\n ? {\n type: \"reasoning-delta\",\n delta: prefix + text,\n id: `reasoning-${activeExtraction.idCounter}`,\n }\n : {\n type: \"text-delta\",\n delta: prefix + text,\n id: activeExtraction.textId,\n }\n );\n }\n\n function updateExtractionState(activeExtraction: ExtractionState) {\n activeExtraction.afterSwitch = false;\n if (activeExtraction.isReasoning) {\n activeExtraction.isFirstReasoning = false;\n } else {\n activeExtraction.isFirstText = false;\n }\n }\n\n function handleFullMatch(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n startIndex: number,\n nextTag: string\n ) {\n activeExtraction.buffer = activeExtraction.buffer.slice(\n startIndex + nextTag.length\n );\n\n if (activeExtraction.isReasoning) {\n controller.enqueue({\n type: \"reasoning-end\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n activeExtraction.idCounter += 1;\n }\n\n activeExtraction.isReasoning = !activeExtraction.isReasoning;\n activeExtraction.afterSwitch = true;\n }\n\n function processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n }: {\n activeExtraction: ExtractionState;\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>;\n publish: (text: string) => void;\n startIndex: number;\n nextTag: string;\n }): boolean {\n publish(activeExtraction.buffer.slice(0, startIndex));\n\n const foundFullMatch =\n startIndex + nextTag.length <= activeExtraction.buffer.length;\n\n if (foundFullMatch) {\n handleFullMatch(activeExtraction, controller, startIndex, nextTag);\n return true;\n }\n\n activeExtraction.buffer = activeExtraction.buffer.slice(startIndex);\n return false;\n }\n\n function processBuffer(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n const publish = createPublisher(activeExtraction, controller);\n let continueProcessing = true;\n\n while (continueProcessing) {\n const nextTag = activeExtraction.isReasoning\n ? closingTag\n : openingTag;\n const startIndex = getPotentialStartIndex(\n activeExtraction.buffer,\n nextTag\n );\n\n if (startIndex == null) {\n publish(activeExtraction.buffer);\n activeExtraction.buffer = \"\";\n break;\n }\n\n continueProcessing = processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n });\n }\n }\n\n return {\n stream: stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform: (chunk, controller) => {\n if (chunk.type !== \"text-delta\") {\n controller.enqueue(chunk);\n return;\n }\n\n if (reasoningExtractions[chunk.id] == null) {\n reasoningExtractions[chunk.id] = {\n isFirstReasoning: true,\n isFirstText: true,\n afterSwitch: false,\n isReasoning: startWithReasoning,\n buffer: \"\",\n idCounter: 0,\n textId: chunk.id,\n };\n }\n\n const activeExtraction = reasoningExtractions[chunk.id];\n activeExtraction.buffer += chunk.delta;\n processBuffer(activeExtraction, controller);\n },\n })\n ),\n ...rest,\n };\n },\n };\n}\n"],"mappings":";AAoBO,SAAS,uBACd,MACA,cACe;AAEf,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,KAAK,QAAQ,YAAY;AAC7C,MAAI,gBAAgB,IAAI;AACtB,WAAO;AAAA,EACT;AAIA,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC5C,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,aAAa,WAAW,MAAM,GAAG;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,2BAA2B;AAAA,EACzC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ,qBAAqB;AACvB,GAK8B;AAC5B,WAAS,gBACP,MACA,oBACA;AAtEJ;AAuEI,UAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,UAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,QAAI,CAAC,QAAQ,QAAQ;AACnB;AAAA,IACF;AAEA,UAAM,gBAAgB,QAAQ,IAAI,CAAC,UAAU,MAAM,CAAC,CAAC,EAAE,KAAK,SAAS;AAErE,QAAI,uBAAuB;AAC3B,aAAS,IAAI,QAAQ,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC/C,YAAM,QAAQ,QAAQ,CAAC;AAEvB,YAAM,cAAc,qBAAqB,MAAM,GAAG,MAAM,KAAK;AAC7D,YAAM,cAAa,WAAM,UAAN,YAAe;AAClC,YAAM,aAAa,qBAAqB;AAAA,QACtC,aAAa,MAAM,CAAC,EAAE;AAAA,MACxB;AAEA,6BACE,eACC,YAAY,SAAS,KAAK,WAAW,SAAS,IAAI,YAAY,MAC/D;AAAA,IACJ;AAEA,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAED,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,cAAc,OAAO,EAAE,WAAW,MAAM;AACtC,YAAM,EAAE,SAAS,GAAG,KAAK,IAAI,MAAM,WAAW;AAE9C,YAAM,qBAA+C,CAAC;AACtD,iBAAW,QAAQ,SAAS;AAC1B,YAAI,KAAK,SAAS,QAAQ;AACxB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,cAAM,OAAO,qBAAqB,aAAa,KAAK,OAAO,KAAK;AAChE,cAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,cAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,YAAI,CAAC,QAAQ,QAAQ;AACnB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,wBAAgB,MAAM,kBAAkB;AAAA,MAC1C;AAEA,aAAO,EAAE,SAAS,oBAAoB,GAAG,KAAK;AAAA,IAChD;AAAA,IAEA,YAAY,OAAO,EAAE,SAAS,MAAM;AAClC,YAAM,EAAE,QAAQ,GAAG,KAAK,IAAI,MAAM,SAAS;AAY3C,YAAM,uBAAwD,CAAC;AAE/D,eAAS,gBACP,kBACA,YACA;AACA,eAAO,CAAC,SAAiB;AACvB,cAAI,KAAK,WAAW,GAAG;AACrB;AAAA,UACF;AAEA,gBAAM,SAAS,UAAU,gBAAgB;AACzC,gCAAsB,kBAAkB,UAAU;AAClD,uBAAa,kBAAkB,YAAY,QAAQ,IAAI;AACvD,gCAAsB,gBAAgB;AAAA,QACxC;AAAA,MACF;AAEA,eAAS,UAAU,kBAA2C;AAC5D,eAAO,iBAAiB,gBACrB,iBAAiB,cACd,CAAC,iBAAiB,mBAClB,CAAC,iBAAiB,eACpB,YACA;AAAA,MACN;AAEA,eAAS,sBACP,kBACA,YACA;AACA,YACG,iBAAiB,eAAe,iBAAiB,eAClD,iBAAiB,kBACjB;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AAAA,QACH;AAAA,MACF;AAEA,eAAS,aACP,kBACA,YACA,QACA,MACA;AACA,mBAAW;AAAA,UACT,iBAAiB,cACb;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,IACA;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,iBAAiB;AAAA,UACvB;AAAA,QACN;AAAA,MACF;AAEA,eAAS,sBAAsB,kBAAmC;AAChE,yBAAiB,cAAc;AAC/B,YAAI,iBAAiB,aAAa;AAChC,2BAAiB,mBAAmB;AAAA,QACtC,OAAO;AACL,2BAAiB,cAAc;AAAA,QACjC;AAAA,MACF;AAEA,eAAS,gBACP,kBACA,YACA,YACA,SACA;AACA,yBAAiB,SAAS,iBAAiB,OAAO;AAAA,UAChD,aAAa,QAAQ;AAAA,QACvB;AAEA,YAAI,iBAAiB,aAAa;AAChC,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AACD,2BAAiB,aAAa;AAAA,QAChC;AAEA,yBAAiB,cAAc,CAAC,iBAAiB;AACjD,yBAAiB,cAAc;AAAA,MACjC;AAEA,eAAS,gBAAgB;AAAA,QACvB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,GAMY;AACV,gBAAQ,iBAAiB,OAAO,MAAM,GAAG,UAAU,CAAC;AAEpD,cAAM,iBACJ,aAAa,QAAQ,UAAU,iBAAiB,OAAO;AAEzD,YAAI,gBAAgB;AAClB,0BAAgB,kBAAkB,YAAY,YAAY,OAAO;AACjE,iBAAO;AAAA,QACT;AAEA,yBAAiB,SAAS,iBAAiB,OAAO,MAAM,UAAU;AAClE,eAAO;AAAA,MACT;AAEA,eAAS,cACP,kBACA,YACA;AACA,cAAM,UAAU,gBAAgB,kBAAkB,UAAU;AAC5D,YAAI,qBAAqB;AAEzB,eAAO,oBAAoB;AACzB,gBAAM,UAAU,iBAAiB,cAC7B,aACA;AACJ,gBAAM,aAAa;AAAA,YACjB,iBAAiB;AAAA,YACjB;AAAA,UACF;AAEA,cAAI,cAAc,MAAM;AACtB,oBAAQ,iBAAiB,MAAM;AAC/B,6BAAiB,SAAS;AAC1B;AAAA,UACF;AAEA,+BAAqB,gBAAgB;AAAA,YACnC;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,aAAO;AAAA,QACL,QAAQ,OAAO;AAAA,UACb,IAAI,gBAGF;AAAA,YACA,WAAW,CAAC,OAAO,eAAe;AAChC,kBAAI,MAAM,SAAS,cAAc;AAC/B,2BAAW,QAAQ,KAAK;AACxB;AAAA,cACF;AAEA,kBAAI,qBAAqB,MAAM,EAAE,KAAK,MAAM;AAC1C,qCAAqB,MAAM,EAAE,IAAI;AAAA,kBAC/B,kBAAkB;AAAA,kBAClB,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,QAAQ;AAAA,kBACR,WAAW;AAAA,kBACX,QAAQ,MAAM;AAAA,gBAChB;AAAA,cACF;AAEA,oBAAM,mBAAmB,qBAAqB,MAAM,EAAE;AACtD,+BAAiB,UAAU,MAAM;AACjC,4BAAc,kBAAkB,UAAU;AAAA,YAC5C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
@@ -0,0 +1,223 @@
1
+ // src/disk-cache.ts
2
+ import { createHash } from "crypto";
3
+ import {
4
+ mkdir,
5
+ readdir,
6
+ readFile,
7
+ rm,
8
+ stat,
9
+ writeFile
10
+ } from "fs/promises";
11
+ import { dirname, join, resolve } from "path";
12
+ function defaultGenerateKey(modelId, params) {
13
+ const serialized = JSON.stringify(
14
+ { version: "0.0.1", modelId, params },
15
+ (_key, value) => {
16
+ if (typeof value === "function") {
17
+ return "[function]";
18
+ }
19
+ if (value instanceof RegExp) {
20
+ return value.toString();
21
+ }
22
+ return value;
23
+ }
24
+ );
25
+ return createHash("sha256").update(serialized).digest("hex");
26
+ }
27
+ function getCachePath(cacheDir, key) {
28
+ return join(cacheDir, key.slice(0, 2), `${key}.json`);
29
+ }
30
+ async function readCache(cachePath) {
31
+ try {
32
+ const content = await readFile(cachePath, "utf-8");
33
+ const parsed = JSON.parse(content);
34
+ if (parsed.response && typeof parsed.response === "object") {
35
+ const resp = parsed.response;
36
+ if (typeof resp.timestamp === "string") {
37
+ resp.timestamp = new Date(resp.timestamp);
38
+ }
39
+ }
40
+ return parsed;
41
+ } catch (e) {
42
+ return null;
43
+ }
44
+ }
45
+ async function writeCache(cachePath, result) {
46
+ try {
47
+ await mkdir(dirname(cachePath), { recursive: true });
48
+ await writeFile(cachePath, JSON.stringify(result), "utf-8");
49
+ } catch (e) {
50
+ }
51
+ }
52
+ function createStreamFromParts(parts) {
53
+ let index = 0;
54
+ return new ReadableStream({
55
+ pull(controller) {
56
+ if (index < parts.length) {
57
+ controller.enqueue(parts[index++]);
58
+ } else {
59
+ controller.close();
60
+ }
61
+ }
62
+ });
63
+ }
64
+ function isErrorFinishReason(finishReason) {
65
+ if (!finishReason) {
66
+ return false;
67
+ }
68
+ const unified = typeof finishReason === "string" ? finishReason : finishReason.unified;
69
+ return unified === "error" || unified === "other";
70
+ }
71
+ function createDiskCacheMiddleware(options = {}) {
72
+ var _a, _b, _c, _d, _e;
73
+ const generateKey = (_a = options.generateKey) != null ? _a : defaultGenerateKey;
74
+ const resolvedCacheDir = resolve((_b = options.cacheDir) != null ? _b : ".ai-cache");
75
+ const envEnabled = process.env.AI_CACHE_ENABLED;
76
+ const enabled = envEnabled !== void 0 ? envEnabled.toLowerCase() === "true" || envEnabled === "1" : (_c = options.enabled) != null ? _c : true;
77
+ const envDebug = process.env.AI_CACHE_DEBUG;
78
+ const debug = envDebug !== void 0 ? envDebug.toLowerCase() === "true" || envDebug === "1" : (_d = options.debug) != null ? _d : false;
79
+ const envForceRefresh = process.env.AI_CACHE_FORCE_REFRESH;
80
+ const forceRefresh = envForceRefresh !== void 0 ? envForceRefresh.toLowerCase() === "true" || envForceRefresh === "1" : (_e = options.forceRefresh) != null ? _e : false;
81
+ const log = debug ? (msg, data) => console.log(`[ai-cache] ${msg}`, data != null ? data : "") : () => void 0;
82
+ if (!enabled) {
83
+ return { specificationVersion: "v3" };
84
+ }
85
+ return {
86
+ specificationVersion: "v3",
87
+ wrapGenerate: async ({ doGenerate, params, model }) => {
88
+ const cacheKey = generateKey(model.modelId, params);
89
+ const cachePath = getCachePath(resolvedCacheDir, cacheKey);
90
+ if (!forceRefresh) {
91
+ const cached = await readCache(cachePath);
92
+ if ((cached == null ? void 0 : cached.type) === "generate") {
93
+ log("HIT generate", cacheKey.slice(0, 8));
94
+ return {
95
+ content: cached.content,
96
+ finishReason: cached.finishReason,
97
+ usage: cached.usage,
98
+ warnings: cached.warnings,
99
+ response: cached.response,
100
+ providerMetadata: cached.providerMetadata,
101
+ request: cached.request
102
+ };
103
+ }
104
+ }
105
+ log(
106
+ forceRefresh ? "REFRESH generate" : "MISS generate",
107
+ cacheKey.slice(0, 8)
108
+ );
109
+ const result = await doGenerate();
110
+ if (isErrorFinishReason(result.finishReason)) {
111
+ log("SKIP cache (error response)", result.finishReason);
112
+ } else {
113
+ await writeCache(cachePath, {
114
+ type: "generate",
115
+ content: result.content,
116
+ finishReason: result.finishReason,
117
+ usage: result.usage,
118
+ warnings: result.warnings,
119
+ response: result.response,
120
+ providerMetadata: result.providerMetadata,
121
+ request: result.request
122
+ });
123
+ }
124
+ return result;
125
+ },
126
+ wrapStream: async ({ doStream, params, model }) => {
127
+ const cacheKey = generateKey(model.modelId, params);
128
+ const cachePath = getCachePath(resolvedCacheDir, cacheKey);
129
+ if (!forceRefresh) {
130
+ const cached = await readCache(cachePath);
131
+ if ((cached == null ? void 0 : cached.type) === "stream") {
132
+ log("HIT stream", {
133
+ key: cacheKey.slice(0, 8),
134
+ parts: cached.parts.length
135
+ });
136
+ return {
137
+ stream: createStreamFromParts(cached.parts),
138
+ response: cached.response,
139
+ request: cached.request
140
+ };
141
+ }
142
+ }
143
+ log(
144
+ forceRefresh ? "REFRESH stream" : "MISS stream",
145
+ cacheKey.slice(0, 8)
146
+ );
147
+ const result = await doStream();
148
+ const collectedParts = [];
149
+ const cachedStream = result.stream.pipeThrough(
150
+ new TransformStream({
151
+ transform(chunk, controller) {
152
+ collectedParts.push(chunk);
153
+ controller.enqueue(chunk);
154
+ },
155
+ flush() {
156
+ const finishPart = collectedParts.find((p) => p.type === "finish");
157
+ if (finishPart && isErrorFinishReason(finishPart.finishReason)) {
158
+ return;
159
+ }
160
+ writeCache(cachePath, {
161
+ type: "stream",
162
+ parts: collectedParts,
163
+ response: result.response,
164
+ request: result.request
165
+ });
166
+ }
167
+ })
168
+ );
169
+ return { ...result, stream: cachedStream };
170
+ }
171
+ };
172
+ }
173
+ async function clearDiskCache(cacheDir = ".ai-cache") {
174
+ try {
175
+ await rm(resolve(cacheDir), { recursive: true, force: true });
176
+ } catch (e) {
177
+ }
178
+ }
179
+ async function getCacheStats(cacheDir = ".ai-cache") {
180
+ const resolvedDir = resolve(cacheDir);
181
+ let totalFiles = 0;
182
+ let totalSizeBytes = 0;
183
+ let generateCount = 0;
184
+ let streamCount = 0;
185
+ async function walkDir(dir) {
186
+ try {
187
+ const entries = await readdir(dir, { withFileTypes: true });
188
+ await Promise.all(
189
+ entries.map(async (entry) => {
190
+ const fullPath = join(dir, entry.name);
191
+ if (entry.isDirectory()) {
192
+ await walkDir(fullPath);
193
+ } else if (entry.name.endsWith(".json")) {
194
+ totalFiles++;
195
+ const fileStat = await stat(fullPath);
196
+ totalSizeBytes += fileStat.size;
197
+ try {
198
+ const content = JSON.parse(
199
+ await readFile(fullPath, "utf-8")
200
+ );
201
+ if (content.type === "generate") {
202
+ generateCount++;
203
+ } else if (content.type === "stream") {
204
+ streamCount++;
205
+ }
206
+ } catch (e) {
207
+ }
208
+ }
209
+ })
210
+ );
211
+ } catch (e) {
212
+ }
213
+ }
214
+ await walkDir(resolvedDir);
215
+ return { totalFiles, totalSizeBytes, generateCount, streamCount };
216
+ }
217
+
218
+ export {
219
+ createDiskCacheMiddleware,
220
+ clearDiskCache,
221
+ getCacheStats
222
+ };
223
+ //# sourceMappingURL=chunk-XJIXXAOA.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/disk-cache.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport {\n mkdir,\n readdir,\n readFile,\n rm,\n stat,\n writeFile,\n} from \"node:fs/promises\";\nimport { dirname, join, resolve } from \"node:path\";\nimport type {\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\ndeclare const __PACKAGE_VERSION__: string;\n\nexport interface DiskCacheMiddlewareOptions {\n cacheDir?: string;\n enabled?: boolean;\n forceRefresh?: boolean;\n generateKey?: (modelId: string, params: unknown) => string;\n debug?: boolean;\n}\n\ninterface CachedGenerateResult {\n type: \"generate\";\n content: unknown;\n finishReason: unknown;\n usage: unknown;\n warnings: unknown;\n response: unknown;\n providerMetadata: unknown;\n request: unknown;\n}\n\ninterface CachedStreamResult {\n type: \"stream\";\n parts: LanguageModelV3StreamPart[];\n response: unknown;\n request: unknown;\n}\n\ntype CachedResult = CachedGenerateResult | CachedStreamResult;\n\nfunction defaultGenerateKey(modelId: string, params: unknown): string {\n const serialized = JSON.stringify(\n { version: __PACKAGE_VERSION__, modelId, params },\n (_key, value) => {\n if (typeof value === \"function\") {\n return \"[function]\";\n }\n if (value instanceof RegExp) {\n return value.toString();\n }\n return value;\n }\n );\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n}\n\nfunction getCachePath(cacheDir: string, key: string): string {\n return join(cacheDir, key.slice(0, 2), `${key}.json`);\n}\n\nasync function readCache(cachePath: string): Promise<CachedResult | null> {\n try {\n const content = await readFile(cachePath, \"utf-8\");\n const parsed = JSON.parse(content) as CachedResult;\n if (parsed.response && typeof parsed.response === \"object\") {\n const resp = parsed.response as Record<string, unknown>;\n if (typeof resp.timestamp === \"string\") {\n resp.timestamp = new Date(resp.timestamp);\n }\n }\n return parsed;\n } catch {\n return null;\n }\n}\n\nasync function writeCache(\n cachePath: string,\n result: CachedResult\n): Promise<void> {\n try {\n await mkdir(dirname(cachePath), { recursive: true });\n await writeFile(cachePath, JSON.stringify(result), \"utf-8\");\n } catch {\n // Silent fail\n }\n}\n\nfunction createStreamFromParts(\n parts: LanguageModelV3StreamPart[]\n): ReadableStream<LanguageModelV3StreamPart> {\n let index = 0;\n return new ReadableStream({\n pull(controller) {\n if (index < parts.length) {\n controller.enqueue(parts[index++]);\n } else {\n controller.close();\n }\n },\n });\n}\n\ntype FinishReasonLike = { unified?: string } | string | null | undefined;\n\nfunction isErrorFinishReason(finishReason: FinishReasonLike): boolean {\n if (!finishReason) {\n return false;\n }\n const unified =\n typeof finishReason === \"string\" ? finishReason : finishReason.unified;\n return unified === \"error\" || unified === \"other\";\n}\n\nexport function createDiskCacheMiddleware(\n options: DiskCacheMiddlewareOptions = {}\n): LanguageModelV3Middleware {\n const generateKey = options.generateKey ?? defaultGenerateKey;\n const resolvedCacheDir = resolve(options.cacheDir ?? \".ai-cache\");\n\n const envEnabled = process.env.AI_CACHE_ENABLED;\n const enabled =\n envEnabled !== undefined\n ? envEnabled.toLowerCase() === \"true\" || envEnabled === \"1\"\n : (options.enabled ?? true);\n\n const envDebug = process.env.AI_CACHE_DEBUG;\n const debug =\n envDebug !== undefined\n ? envDebug.toLowerCase() === \"true\" || envDebug === \"1\"\n : (options.debug ?? false);\n\n const envForceRefresh = process.env.AI_CACHE_FORCE_REFRESH;\n const forceRefresh =\n envForceRefresh !== undefined\n ? envForceRefresh.toLowerCase() === \"true\" || envForceRefresh === \"1\"\n : (options.forceRefresh ?? false);\n\n const log = debug\n ? (msg: string, data?: unknown) =>\n console.log(`[ai-cache] ${msg}`, data ?? \"\")\n : () => undefined;\n\n if (!enabled) {\n return { specificationVersion: \"v3\" };\n }\n\n return {\n specificationVersion: \"v3\",\n\n wrapGenerate: async ({ doGenerate, params, model }) => {\n const cacheKey = generateKey(model.modelId, params);\n const cachePath = getCachePath(resolvedCacheDir, cacheKey);\n\n if (!forceRefresh) {\n const cached = await readCache(cachePath);\n if (cached?.type === \"generate\") {\n log(\"HIT generate\", cacheKey.slice(0, 8));\n return {\n content: cached.content,\n finishReason: cached.finishReason,\n usage: cached.usage,\n warnings: cached.warnings,\n response: cached.response,\n providerMetadata: cached.providerMetadata,\n request: cached.request,\n } as Awaited<ReturnType<typeof doGenerate>>;\n }\n }\n\n log(\n forceRefresh ? \"REFRESH generate\" : \"MISS generate\",\n cacheKey.slice(0, 8)\n );\n const result = await doGenerate();\n\n if (isErrorFinishReason(result.finishReason)) {\n log(\"SKIP cache (error response)\", result.finishReason);\n } else {\n await writeCache(cachePath, {\n type: \"generate\",\n content: result.content,\n finishReason: result.finishReason,\n usage: result.usage,\n warnings: result.warnings,\n response: result.response,\n providerMetadata: result.providerMetadata,\n request: result.request,\n });\n }\n\n return result;\n },\n\n wrapStream: async ({ doStream, params, model }) => {\n const cacheKey = generateKey(model.modelId, params);\n const cachePath = getCachePath(resolvedCacheDir, cacheKey);\n\n if (!forceRefresh) {\n const cached = await readCache(cachePath);\n if (cached?.type === \"stream\") {\n log(\"HIT stream\", {\n key: cacheKey.slice(0, 8),\n parts: cached.parts.length,\n });\n return {\n stream: createStreamFromParts(cached.parts),\n response: cached.response,\n request: cached.request,\n } as Awaited<ReturnType<typeof doStream>>;\n }\n }\n\n log(\n forceRefresh ? \"REFRESH stream\" : \"MISS stream\",\n cacheKey.slice(0, 8)\n );\n const result = await doStream();\n\n const collectedParts: LanguageModelV3StreamPart[] = [];\n\n const cachedStream = result.stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform(chunk, controller) {\n collectedParts.push(chunk);\n controller.enqueue(chunk);\n },\n flush() {\n const finishPart = collectedParts.find((p) => p.type === \"finish\");\n if (finishPart && isErrorFinishReason(finishPart.finishReason)) {\n return;\n }\n\n writeCache(cachePath, {\n type: \"stream\",\n parts: collectedParts,\n response: result.response,\n request: result.request,\n });\n },\n })\n );\n\n return { ...result, stream: cachedStream };\n },\n };\n}\n\nexport async function clearDiskCache(cacheDir = \".ai-cache\"): Promise<void> {\n try {\n await rm(resolve(cacheDir), { recursive: true, force: true });\n } catch {\n // Directory doesn't exist\n }\n}\n\nexport async function getCacheStats(cacheDir = \".ai-cache\"): Promise<{\n totalFiles: number;\n totalSizeBytes: number;\n generateCount: number;\n streamCount: number;\n}> {\n const resolvedDir = resolve(cacheDir);\n let totalFiles = 0;\n let totalSizeBytes = 0;\n let generateCount = 0;\n let streamCount = 0;\n\n async function walkDir(dir: string): Promise<void> {\n try {\n const entries = await readdir(dir, { withFileTypes: true });\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n await walkDir(fullPath);\n } else if (entry.name.endsWith(\".json\")) {\n totalFiles++;\n const fileStat = await stat(fullPath);\n totalSizeBytes += fileStat.size;\n\n try {\n const content = JSON.parse(\n await readFile(fullPath, \"utf-8\")\n ) as CachedResult;\n if (content.type === \"generate\") {\n generateCount++;\n } else if (content.type === \"stream\") {\n streamCount++;\n }\n } catch {\n // Skip malformed\n }\n }\n })\n );\n } catch {\n // Directory doesn't exist\n }\n }\n\n await walkDir(resolvedDir);\n return { totalFiles, totalSizeBytes, generateCount, streamCount };\n}\n"],"mappings":";AAAA,SAAS,kBAAkB;AAC3B;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AACP,SAAS,SAAS,MAAM,eAAe;AAoCvC,SAAS,mBAAmB,SAAiB,QAAyB;AACpE,QAAM,aAAa,KAAK;AAAA,IACtB,EAAE,SAAS,SAAqB,SAAS,OAAO;AAAA,IAChD,CAAC,MAAM,UAAU;AACf,UAAI,OAAO,UAAU,YAAY;AAC/B,eAAO;AAAA,MACT;AACA,UAAI,iBAAiB,QAAQ;AAC3B,eAAO,MAAM,SAAS;AAAA,MACxB;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACA,SAAO,WAAW,QAAQ,EAAE,OAAO,UAAU,EAAE,OAAO,KAAK;AAC7D;AAEA,SAAS,aAAa,UAAkB,KAAqB;AAC3D,SAAO,KAAK,UAAU,IAAI,MAAM,GAAG,CAAC,GAAG,GAAG,GAAG,OAAO;AACtD;AAEA,eAAe,UAAU,WAAiD;AACxE,MAAI;AACF,UAAM,UAAU,MAAM,SAAS,WAAW,OAAO;AACjD,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,QAAI,OAAO,YAAY,OAAO,OAAO,aAAa,UAAU;AAC1D,YAAM,OAAO,OAAO;AACpB,UAAI,OAAO,KAAK,cAAc,UAAU;AACtC,aAAK,YAAY,IAAI,KAAK,KAAK,SAAS;AAAA,MAC1C;AAAA,IACF;AACA,WAAO;AAAA,EACT,SAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAe,WACb,WACA,QACe;AACf,MAAI;AACF,UAAM,MAAM,QAAQ,SAAS,GAAG,EAAE,WAAW,KAAK,CAAC;AACnD,UAAM,UAAU,WAAW,KAAK,UAAU,MAAM,GAAG,OAAO;AAAA,EAC5D,SAAQ;AAAA,EAER;AACF;AAEA,SAAS,sBACP,OAC2C;AAC3C,MAAI,QAAQ;AACZ,SAAO,IAAI,eAAe;AAAA,IACxB,KAAK,YAAY;AACf,UAAI,QAAQ,MAAM,QAAQ;AACxB,mBAAW,QAAQ,MAAM,OAAO,CAAC;AAAA,MACnC,OAAO;AACL,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAIA,SAAS,oBAAoB,cAAyC;AACpE,MAAI,CAAC,cAAc;AACjB,WAAO;AAAA,EACT;AACA,QAAM,UACJ,OAAO,iBAAiB,WAAW,eAAe,aAAa;AACjE,SAAO,YAAY,WAAW,YAAY;AAC5C;AAEO,SAAS,0BACd,UAAsC,CAAC,GACZ;AAzH7B;AA0HE,QAAM,eAAc,aAAQ,gBAAR,YAAuB;AAC3C,QAAM,mBAAmB,SAAQ,aAAQ,aAAR,YAAoB,WAAW;AAEhE,QAAM,aAAa,QAAQ,IAAI;AAC/B,QAAM,UACJ,eAAe,SACX,WAAW,YAAY,MAAM,UAAU,eAAe,OACrD,aAAQ,YAAR,YAAmB;AAE1B,QAAM,WAAW,QAAQ,IAAI;AAC7B,QAAM,QACJ,aAAa,SACT,SAAS,YAAY,MAAM,UAAU,aAAa,OACjD,aAAQ,UAAR,YAAiB;AAExB,QAAM,kBAAkB,QAAQ,IAAI;AACpC,QAAM,eACJ,oBAAoB,SAChB,gBAAgB,YAAY,MAAM,UAAU,oBAAoB,OAC/D,aAAQ,iBAAR,YAAwB;AAE/B,QAAM,MAAM,QACR,CAAC,KAAa,SACZ,QAAQ,IAAI,cAAc,GAAG,IAAI,sBAAQ,EAAE,IAC7C,MAAM;AAEV,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,sBAAsB,KAAK;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IAEtB,cAAc,OAAO,EAAE,YAAY,QAAQ,MAAM,MAAM;AACrD,YAAM,WAAW,YAAY,MAAM,SAAS,MAAM;AAClD,YAAM,YAAY,aAAa,kBAAkB,QAAQ;AAEzD,UAAI,CAAC,cAAc;AACjB,cAAM,SAAS,MAAM,UAAU,SAAS;AACxC,aAAI,iCAAQ,UAAS,YAAY;AAC/B,cAAI,gBAAgB,SAAS,MAAM,GAAG,CAAC,CAAC;AACxC,iBAAO;AAAA,YACL,SAAS,OAAO;AAAA,YAChB,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,YACzB,SAAS,OAAO;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAEA;AAAA,QACE,eAAe,qBAAqB;AAAA,QACpC,SAAS,MAAM,GAAG,CAAC;AAAA,MACrB;AACA,YAAM,SAAS,MAAM,WAAW;AAEhC,UAAI,oBAAoB,OAAO,YAAY,GAAG;AAC5C,YAAI,+BAA+B,OAAO,YAAY;AAAA,MACxD,OAAO;AACL,cAAM,WAAW,WAAW;AAAA,UAC1B,MAAM;AAAA,UACN,SAAS,OAAO;AAAA,UAChB,cAAc,OAAO;AAAA,UACrB,OAAO,OAAO;AAAA,UACd,UAAU,OAAO;AAAA,UACjB,UAAU,OAAO;AAAA,UACjB,kBAAkB,OAAO;AAAA,UACzB,SAAS,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,YAAY,OAAO,EAAE,UAAU,QAAQ,MAAM,MAAM;AACjD,YAAM,WAAW,YAAY,MAAM,SAAS,MAAM;AAClD,YAAM,YAAY,aAAa,kBAAkB,QAAQ;AAEzD,UAAI,CAAC,cAAc;AACjB,cAAM,SAAS,MAAM,UAAU,SAAS;AACxC,aAAI,iCAAQ,UAAS,UAAU;AAC7B,cAAI,cAAc;AAAA,YAChB,KAAK,SAAS,MAAM,GAAG,CAAC;AAAA,YACxB,OAAO,OAAO,MAAM;AAAA,UACtB,CAAC;AACD,iBAAO;AAAA,YACL,QAAQ,sBAAsB,OAAO,KAAK;AAAA,YAC1C,UAAU,OAAO;AAAA,YACjB,SAAS,OAAO;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAEA;AAAA,QACE,eAAe,mBAAmB;AAAA,QAClC,SAAS,MAAM,GAAG,CAAC;AAAA,MACrB;AACA,YAAM,SAAS,MAAM,SAAS;AAE9B,YAAM,iBAA8C,CAAC;AAErD,YAAM,eAAe,OAAO,OAAO;AAAA,QACjC,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,2BAAe,KAAK,KAAK;AACzB,uBAAW,QAAQ,KAAK;AAAA,UAC1B;AAAA,UACA,QAAQ;AACN,kBAAM,aAAa,eAAe,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ;AACjE,gBAAI,cAAc,oBAAoB,WAAW,YAAY,GAAG;AAC9D;AAAA,YACF;AAEA,uBAAW,WAAW;AAAA,cACpB,MAAM;AAAA,cACN,OAAO;AAAA,cACP,UAAU,OAAO;AAAA,cACjB,SAAS,OAAO;AAAA,YAClB,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,EAAE,GAAG,QAAQ,QAAQ,aAAa;AAAA,IAC3C;AAAA,EACF;AACF;AAEA,eAAsB,eAAe,WAAW,aAA4B;AAC1E,MAAI;AACF,UAAM,GAAG,QAAQ,QAAQ,GAAG,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EAC9D,SAAQ;AAAA,EAER;AACF;AAEA,eAAsB,cAAc,WAAW,aAK5C;AACD,QAAM,cAAc,QAAQ,QAAQ;AACpC,MAAI,aAAa;AACjB,MAAI,iBAAiB;AACrB,MAAI,gBAAgB;AACpB,MAAI,cAAc;AAElB,iBAAe,QAAQ,KAA4B;AACjD,QAAI;AACF,YAAM,UAAU,MAAM,QAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,YAAM,QAAQ;AAAA,QACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,gBAAM,WAAW,KAAK,KAAK,MAAM,IAAI;AACrC,cAAI,MAAM,YAAY,GAAG;AACvB,kBAAM,QAAQ,QAAQ;AAAA,UACxB,WAAW,MAAM,KAAK,SAAS,OAAO,GAAG;AACvC;AACA,kBAAM,WAAW,MAAM,KAAK,QAAQ;AACpC,8BAAkB,SAAS;AAE3B,gBAAI;AACF,oBAAM,UAAU,KAAK;AAAA,gBACnB,MAAM,SAAS,UAAU,OAAO;AAAA,cAClC;AACA,kBAAI,QAAQ,SAAS,YAAY;AAC/B;AAAA,cACF,WAAW,QAAQ,SAAS,UAAU;AACpC;AAAA,cACF;AAAA,YACF,SAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,QAAQ,WAAW;AACzB,SAAO,EAAE,YAAY,gBAAgB,eAAe,YAAY;AAClE;","names":[]}
@@ -0,0 +1,242 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/disk-cache.ts
21
+ var disk_cache_exports = {};
22
+ __export(disk_cache_exports, {
23
+ clearDiskCache: () => clearDiskCache,
24
+ createDiskCacheMiddleware: () => createDiskCacheMiddleware,
25
+ getCacheStats: () => getCacheStats
26
+ });
27
+ module.exports = __toCommonJS(disk_cache_exports);
28
+ var import_node_crypto = require("crypto");
29
+ var import_promises = require("fs/promises");
30
+ var import_node_path = require("path");
31
+ function defaultGenerateKey(modelId, params) {
32
+ const serialized = JSON.stringify(
33
+ { version: "0.0.1", modelId, params },
34
+ (_key, value) => {
35
+ if (typeof value === "function") {
36
+ return "[function]";
37
+ }
38
+ if (value instanceof RegExp) {
39
+ return value.toString();
40
+ }
41
+ return value;
42
+ }
43
+ );
44
+ return (0, import_node_crypto.createHash)("sha256").update(serialized).digest("hex");
45
+ }
46
+ function getCachePath(cacheDir, key) {
47
+ return (0, import_node_path.join)(cacheDir, key.slice(0, 2), `${key}.json`);
48
+ }
49
+ async function readCache(cachePath) {
50
+ try {
51
+ const content = await (0, import_promises.readFile)(cachePath, "utf-8");
52
+ const parsed = JSON.parse(content);
53
+ if (parsed.response && typeof parsed.response === "object") {
54
+ const resp = parsed.response;
55
+ if (typeof resp.timestamp === "string") {
56
+ resp.timestamp = new Date(resp.timestamp);
57
+ }
58
+ }
59
+ return parsed;
60
+ } catch (e) {
61
+ return null;
62
+ }
63
+ }
64
+ async function writeCache(cachePath, result) {
65
+ try {
66
+ await (0, import_promises.mkdir)((0, import_node_path.dirname)(cachePath), { recursive: true });
67
+ await (0, import_promises.writeFile)(cachePath, JSON.stringify(result), "utf-8");
68
+ } catch (e) {
69
+ }
70
+ }
71
+ function createStreamFromParts(parts) {
72
+ let index = 0;
73
+ return new ReadableStream({
74
+ pull(controller) {
75
+ if (index < parts.length) {
76
+ controller.enqueue(parts[index++]);
77
+ } else {
78
+ controller.close();
79
+ }
80
+ }
81
+ });
82
+ }
83
+ function isErrorFinishReason(finishReason) {
84
+ if (!finishReason) {
85
+ return false;
86
+ }
87
+ const unified = typeof finishReason === "string" ? finishReason : finishReason.unified;
88
+ return unified === "error" || unified === "other";
89
+ }
90
+ function createDiskCacheMiddleware(options = {}) {
91
+ var _a, _b, _c, _d, _e;
92
+ const generateKey = (_a = options.generateKey) != null ? _a : defaultGenerateKey;
93
+ const resolvedCacheDir = (0, import_node_path.resolve)((_b = options.cacheDir) != null ? _b : ".ai-cache");
94
+ const envEnabled = process.env.AI_CACHE_ENABLED;
95
+ const enabled = envEnabled !== void 0 ? envEnabled.toLowerCase() === "true" || envEnabled === "1" : (_c = options.enabled) != null ? _c : true;
96
+ const envDebug = process.env.AI_CACHE_DEBUG;
97
+ const debug = envDebug !== void 0 ? envDebug.toLowerCase() === "true" || envDebug === "1" : (_d = options.debug) != null ? _d : false;
98
+ const envForceRefresh = process.env.AI_CACHE_FORCE_REFRESH;
99
+ const forceRefresh = envForceRefresh !== void 0 ? envForceRefresh.toLowerCase() === "true" || envForceRefresh === "1" : (_e = options.forceRefresh) != null ? _e : false;
100
+ const log = debug ? (msg, data) => console.log(`[ai-cache] ${msg}`, data != null ? data : "") : () => void 0;
101
+ if (!enabled) {
102
+ return { specificationVersion: "v3" };
103
+ }
104
+ return {
105
+ specificationVersion: "v3",
106
+ wrapGenerate: async ({ doGenerate, params, model }) => {
107
+ const cacheKey = generateKey(model.modelId, params);
108
+ const cachePath = getCachePath(resolvedCacheDir, cacheKey);
109
+ if (!forceRefresh) {
110
+ const cached = await readCache(cachePath);
111
+ if ((cached == null ? void 0 : cached.type) === "generate") {
112
+ log("HIT generate", cacheKey.slice(0, 8));
113
+ return {
114
+ content: cached.content,
115
+ finishReason: cached.finishReason,
116
+ usage: cached.usage,
117
+ warnings: cached.warnings,
118
+ response: cached.response,
119
+ providerMetadata: cached.providerMetadata,
120
+ request: cached.request
121
+ };
122
+ }
123
+ }
124
+ log(
125
+ forceRefresh ? "REFRESH generate" : "MISS generate",
126
+ cacheKey.slice(0, 8)
127
+ );
128
+ const result = await doGenerate();
129
+ if (isErrorFinishReason(result.finishReason)) {
130
+ log("SKIP cache (error response)", result.finishReason);
131
+ } else {
132
+ await writeCache(cachePath, {
133
+ type: "generate",
134
+ content: result.content,
135
+ finishReason: result.finishReason,
136
+ usage: result.usage,
137
+ warnings: result.warnings,
138
+ response: result.response,
139
+ providerMetadata: result.providerMetadata,
140
+ request: result.request
141
+ });
142
+ }
143
+ return result;
144
+ },
145
+ wrapStream: async ({ doStream, params, model }) => {
146
+ const cacheKey = generateKey(model.modelId, params);
147
+ const cachePath = getCachePath(resolvedCacheDir, cacheKey);
148
+ if (!forceRefresh) {
149
+ const cached = await readCache(cachePath);
150
+ if ((cached == null ? void 0 : cached.type) === "stream") {
151
+ log("HIT stream", {
152
+ key: cacheKey.slice(0, 8),
153
+ parts: cached.parts.length
154
+ });
155
+ return {
156
+ stream: createStreamFromParts(cached.parts),
157
+ response: cached.response,
158
+ request: cached.request
159
+ };
160
+ }
161
+ }
162
+ log(
163
+ forceRefresh ? "REFRESH stream" : "MISS stream",
164
+ cacheKey.slice(0, 8)
165
+ );
166
+ const result = await doStream();
167
+ const collectedParts = [];
168
+ const cachedStream = result.stream.pipeThrough(
169
+ new TransformStream({
170
+ transform(chunk, controller) {
171
+ collectedParts.push(chunk);
172
+ controller.enqueue(chunk);
173
+ },
174
+ flush() {
175
+ const finishPart = collectedParts.find((p) => p.type === "finish");
176
+ if (finishPart && isErrorFinishReason(finishPart.finishReason)) {
177
+ return;
178
+ }
179
+ writeCache(cachePath, {
180
+ type: "stream",
181
+ parts: collectedParts,
182
+ response: result.response,
183
+ request: result.request
184
+ });
185
+ }
186
+ })
187
+ );
188
+ return { ...result, stream: cachedStream };
189
+ }
190
+ };
191
+ }
192
+ async function clearDiskCache(cacheDir = ".ai-cache") {
193
+ try {
194
+ await (0, import_promises.rm)((0, import_node_path.resolve)(cacheDir), { recursive: true, force: true });
195
+ } catch (e) {
196
+ }
197
+ }
198
+ async function getCacheStats(cacheDir = ".ai-cache") {
199
+ const resolvedDir = (0, import_node_path.resolve)(cacheDir);
200
+ let totalFiles = 0;
201
+ let totalSizeBytes = 0;
202
+ let generateCount = 0;
203
+ let streamCount = 0;
204
+ async function walkDir(dir) {
205
+ try {
206
+ const entries = await (0, import_promises.readdir)(dir, { withFileTypes: true });
207
+ await Promise.all(
208
+ entries.map(async (entry) => {
209
+ const fullPath = (0, import_node_path.join)(dir, entry.name);
210
+ if (entry.isDirectory()) {
211
+ await walkDir(fullPath);
212
+ } else if (entry.name.endsWith(".json")) {
213
+ totalFiles++;
214
+ const fileStat = await (0, import_promises.stat)(fullPath);
215
+ totalSizeBytes += fileStat.size;
216
+ try {
217
+ const content = JSON.parse(
218
+ await (0, import_promises.readFile)(fullPath, "utf-8")
219
+ );
220
+ if (content.type === "generate") {
221
+ generateCount++;
222
+ } else if (content.type === "stream") {
223
+ streamCount++;
224
+ }
225
+ } catch (e) {
226
+ }
227
+ }
228
+ })
229
+ );
230
+ } catch (e) {
231
+ }
232
+ }
233
+ await walkDir(resolvedDir);
234
+ return { totalFiles, totalSizeBytes, generateCount, streamCount };
235
+ }
236
+ // Annotate the CommonJS export names for ESM import in node:
237
+ 0 && (module.exports = {
238
+ clearDiskCache,
239
+ createDiskCacheMiddleware,
240
+ getCacheStats
241
+ });
242
+ //# sourceMappingURL=disk-cache.cjs.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/disk-cache.ts"],"sourcesContent":["import { createHash } from \"node:crypto\";\nimport {\n mkdir,\n readdir,\n readFile,\n rm,\n stat,\n writeFile,\n} from \"node:fs/promises\";\nimport { dirname, join, resolve } from \"node:path\";\nimport type {\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\ndeclare const __PACKAGE_VERSION__: string;\n\nexport interface DiskCacheMiddlewareOptions {\n cacheDir?: string;\n enabled?: boolean;\n forceRefresh?: boolean;\n generateKey?: (modelId: string, params: unknown) => string;\n debug?: boolean;\n}\n\ninterface CachedGenerateResult {\n type: \"generate\";\n content: unknown;\n finishReason: unknown;\n usage: unknown;\n warnings: unknown;\n response: unknown;\n providerMetadata: unknown;\n request: unknown;\n}\n\ninterface CachedStreamResult {\n type: \"stream\";\n parts: LanguageModelV3StreamPart[];\n response: unknown;\n request: unknown;\n}\n\ntype CachedResult = CachedGenerateResult | CachedStreamResult;\n\nfunction defaultGenerateKey(modelId: string, params: unknown): string {\n const serialized = JSON.stringify(\n { version: __PACKAGE_VERSION__, modelId, params },\n (_key, value) => {\n if (typeof value === \"function\") {\n return \"[function]\";\n }\n if (value instanceof RegExp) {\n return value.toString();\n }\n return value;\n }\n );\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n}\n\nfunction getCachePath(cacheDir: string, key: string): string {\n return join(cacheDir, key.slice(0, 2), `${key}.json`);\n}\n\nasync function readCache(cachePath: string): Promise<CachedResult | null> {\n try {\n const content = await readFile(cachePath, \"utf-8\");\n const parsed = JSON.parse(content) as CachedResult;\n if (parsed.response && typeof parsed.response === \"object\") {\n const resp = parsed.response as Record<string, unknown>;\n if (typeof resp.timestamp === \"string\") {\n resp.timestamp = new Date(resp.timestamp);\n }\n }\n return parsed;\n } catch {\n return null;\n }\n}\n\nasync function writeCache(\n cachePath: string,\n result: CachedResult\n): Promise<void> {\n try {\n await mkdir(dirname(cachePath), { recursive: true });\n await writeFile(cachePath, JSON.stringify(result), \"utf-8\");\n } catch {\n // Silent fail\n }\n}\n\nfunction createStreamFromParts(\n parts: LanguageModelV3StreamPart[]\n): ReadableStream<LanguageModelV3StreamPart> {\n let index = 0;\n return new ReadableStream({\n pull(controller) {\n if (index < parts.length) {\n controller.enqueue(parts[index++]);\n } else {\n controller.close();\n }\n },\n });\n}\n\ntype FinishReasonLike = { unified?: string } | string | null | undefined;\n\nfunction isErrorFinishReason(finishReason: FinishReasonLike): boolean {\n if (!finishReason) {\n return false;\n }\n const unified =\n typeof finishReason === \"string\" ? finishReason : finishReason.unified;\n return unified === \"error\" || unified === \"other\";\n}\n\nexport function createDiskCacheMiddleware(\n options: DiskCacheMiddlewareOptions = {}\n): LanguageModelV3Middleware {\n const generateKey = options.generateKey ?? defaultGenerateKey;\n const resolvedCacheDir = resolve(options.cacheDir ?? \".ai-cache\");\n\n const envEnabled = process.env.AI_CACHE_ENABLED;\n const enabled =\n envEnabled !== undefined\n ? envEnabled.toLowerCase() === \"true\" || envEnabled === \"1\"\n : (options.enabled ?? true);\n\n const envDebug = process.env.AI_CACHE_DEBUG;\n const debug =\n envDebug !== undefined\n ? envDebug.toLowerCase() === \"true\" || envDebug === \"1\"\n : (options.debug ?? false);\n\n const envForceRefresh = process.env.AI_CACHE_FORCE_REFRESH;\n const forceRefresh =\n envForceRefresh !== undefined\n ? envForceRefresh.toLowerCase() === \"true\" || envForceRefresh === \"1\"\n : (options.forceRefresh ?? false);\n\n const log = debug\n ? (msg: string, data?: unknown) =>\n console.log(`[ai-cache] ${msg}`, data ?? \"\")\n : () => undefined;\n\n if (!enabled) {\n return { specificationVersion: \"v3\" };\n }\n\n return {\n specificationVersion: \"v3\",\n\n wrapGenerate: async ({ doGenerate, params, model }) => {\n const cacheKey = generateKey(model.modelId, params);\n const cachePath = getCachePath(resolvedCacheDir, cacheKey);\n\n if (!forceRefresh) {\n const cached = await readCache(cachePath);\n if (cached?.type === \"generate\") {\n log(\"HIT generate\", cacheKey.slice(0, 8));\n return {\n content: cached.content,\n finishReason: cached.finishReason,\n usage: cached.usage,\n warnings: cached.warnings,\n response: cached.response,\n providerMetadata: cached.providerMetadata,\n request: cached.request,\n } as Awaited<ReturnType<typeof doGenerate>>;\n }\n }\n\n log(\n forceRefresh ? \"REFRESH generate\" : \"MISS generate\",\n cacheKey.slice(0, 8)\n );\n const result = await doGenerate();\n\n if (isErrorFinishReason(result.finishReason)) {\n log(\"SKIP cache (error response)\", result.finishReason);\n } else {\n await writeCache(cachePath, {\n type: \"generate\",\n content: result.content,\n finishReason: result.finishReason,\n usage: result.usage,\n warnings: result.warnings,\n response: result.response,\n providerMetadata: result.providerMetadata,\n request: result.request,\n });\n }\n\n return result;\n },\n\n wrapStream: async ({ doStream, params, model }) => {\n const cacheKey = generateKey(model.modelId, params);\n const cachePath = getCachePath(resolvedCacheDir, cacheKey);\n\n if (!forceRefresh) {\n const cached = await readCache(cachePath);\n if (cached?.type === \"stream\") {\n log(\"HIT stream\", {\n key: cacheKey.slice(0, 8),\n parts: cached.parts.length,\n });\n return {\n stream: createStreamFromParts(cached.parts),\n response: cached.response,\n request: cached.request,\n } as Awaited<ReturnType<typeof doStream>>;\n }\n }\n\n log(\n forceRefresh ? \"REFRESH stream\" : \"MISS stream\",\n cacheKey.slice(0, 8)\n );\n const result = await doStream();\n\n const collectedParts: LanguageModelV3StreamPart[] = [];\n\n const cachedStream = result.stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform(chunk, controller) {\n collectedParts.push(chunk);\n controller.enqueue(chunk);\n },\n flush() {\n const finishPart = collectedParts.find((p) => p.type === \"finish\");\n if (finishPart && isErrorFinishReason(finishPart.finishReason)) {\n return;\n }\n\n writeCache(cachePath, {\n type: \"stream\",\n parts: collectedParts,\n response: result.response,\n request: result.request,\n });\n },\n })\n );\n\n return { ...result, stream: cachedStream };\n },\n };\n}\n\nexport async function clearDiskCache(cacheDir = \".ai-cache\"): Promise<void> {\n try {\n await rm(resolve(cacheDir), { recursive: true, force: true });\n } catch {\n // Directory doesn't exist\n }\n}\n\nexport async function getCacheStats(cacheDir = \".ai-cache\"): Promise<{\n totalFiles: number;\n totalSizeBytes: number;\n generateCount: number;\n streamCount: number;\n}> {\n const resolvedDir = resolve(cacheDir);\n let totalFiles = 0;\n let totalSizeBytes = 0;\n let generateCount = 0;\n let streamCount = 0;\n\n async function walkDir(dir: string): Promise<void> {\n try {\n const entries = await readdir(dir, { withFileTypes: true });\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n await walkDir(fullPath);\n } else if (entry.name.endsWith(\".json\")) {\n totalFiles++;\n const fileStat = await stat(fullPath);\n totalSizeBytes += fileStat.size;\n\n try {\n const content = JSON.parse(\n await readFile(fullPath, \"utf-8\")\n ) as CachedResult;\n if (content.type === \"generate\") {\n generateCount++;\n } else if (content.type === \"stream\") {\n streamCount++;\n }\n } catch {\n // Skip malformed\n }\n }\n })\n );\n } catch {\n // Directory doesn't exist\n }\n }\n\n await walkDir(resolvedDir);\n return { totalFiles, totalSizeBytes, generateCount, streamCount };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAA2B;AAC3B,sBAOO;AACP,uBAAuC;AAoCvC,SAAS,mBAAmB,SAAiB,QAAyB;AACpE,QAAM,aAAa,KAAK;AAAA,IACtB,EAAE,SAAS,SAAqB,SAAS,OAAO;AAAA,IAChD,CAAC,MAAM,UAAU;AACf,UAAI,OAAO,UAAU,YAAY;AAC/B,eAAO;AAAA,MACT;AACA,UAAI,iBAAiB,QAAQ;AAC3B,eAAO,MAAM,SAAS;AAAA,MACxB;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACA,aAAO,+BAAW,QAAQ,EAAE,OAAO,UAAU,EAAE,OAAO,KAAK;AAC7D;AAEA,SAAS,aAAa,UAAkB,KAAqB;AAC3D,aAAO,uBAAK,UAAU,IAAI,MAAM,GAAG,CAAC,GAAG,GAAG,GAAG,OAAO;AACtD;AAEA,eAAe,UAAU,WAAiD;AACxE,MAAI;AACF,UAAM,UAAU,UAAM,0BAAS,WAAW,OAAO;AACjD,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,QAAI,OAAO,YAAY,OAAO,OAAO,aAAa,UAAU;AAC1D,YAAM,OAAO,OAAO;AACpB,UAAI,OAAO,KAAK,cAAc,UAAU;AACtC,aAAK,YAAY,IAAI,KAAK,KAAK,SAAS;AAAA,MAC1C;AAAA,IACF;AACA,WAAO;AAAA,EACT,SAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAe,WACb,WACA,QACe;AACf,MAAI;AACF,cAAM,2BAAM,0BAAQ,SAAS,GAAG,EAAE,WAAW,KAAK,CAAC;AACnD,cAAM,2BAAU,WAAW,KAAK,UAAU,MAAM,GAAG,OAAO;AAAA,EAC5D,SAAQ;AAAA,EAER;AACF;AAEA,SAAS,sBACP,OAC2C;AAC3C,MAAI,QAAQ;AACZ,SAAO,IAAI,eAAe;AAAA,IACxB,KAAK,YAAY;AACf,UAAI,QAAQ,MAAM,QAAQ;AACxB,mBAAW,QAAQ,MAAM,OAAO,CAAC;AAAA,MACnC,OAAO;AACL,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAIA,SAAS,oBAAoB,cAAyC;AACpE,MAAI,CAAC,cAAc;AACjB,WAAO;AAAA,EACT;AACA,QAAM,UACJ,OAAO,iBAAiB,WAAW,eAAe,aAAa;AACjE,SAAO,YAAY,WAAW,YAAY;AAC5C;AAEO,SAAS,0BACd,UAAsC,CAAC,GACZ;AAzH7B;AA0HE,QAAM,eAAc,aAAQ,gBAAR,YAAuB;AAC3C,QAAM,uBAAmB,2BAAQ,aAAQ,aAAR,YAAoB,WAAW;AAEhE,QAAM,aAAa,QAAQ,IAAI;AAC/B,QAAM,UACJ,eAAe,SACX,WAAW,YAAY,MAAM,UAAU,eAAe,OACrD,aAAQ,YAAR,YAAmB;AAE1B,QAAM,WAAW,QAAQ,IAAI;AAC7B,QAAM,QACJ,aAAa,SACT,SAAS,YAAY,MAAM,UAAU,aAAa,OACjD,aAAQ,UAAR,YAAiB;AAExB,QAAM,kBAAkB,QAAQ,IAAI;AACpC,QAAM,eACJ,oBAAoB,SAChB,gBAAgB,YAAY,MAAM,UAAU,oBAAoB,OAC/D,aAAQ,iBAAR,YAAwB;AAE/B,QAAM,MAAM,QACR,CAAC,KAAa,SACZ,QAAQ,IAAI,cAAc,GAAG,IAAI,sBAAQ,EAAE,IAC7C,MAAM;AAEV,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,sBAAsB,KAAK;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IAEtB,cAAc,OAAO,EAAE,YAAY,QAAQ,MAAM,MAAM;AACrD,YAAM,WAAW,YAAY,MAAM,SAAS,MAAM;AAClD,YAAM,YAAY,aAAa,kBAAkB,QAAQ;AAEzD,UAAI,CAAC,cAAc;AACjB,cAAM,SAAS,MAAM,UAAU,SAAS;AACxC,aAAI,iCAAQ,UAAS,YAAY;AAC/B,cAAI,gBAAgB,SAAS,MAAM,GAAG,CAAC,CAAC;AACxC,iBAAO;AAAA,YACL,SAAS,OAAO;AAAA,YAChB,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,YACzB,SAAS,OAAO;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAEA;AAAA,QACE,eAAe,qBAAqB;AAAA,QACpC,SAAS,MAAM,GAAG,CAAC;AAAA,MACrB;AACA,YAAM,SAAS,MAAM,WAAW;AAEhC,UAAI,oBAAoB,OAAO,YAAY,GAAG;AAC5C,YAAI,+BAA+B,OAAO,YAAY;AAAA,MACxD,OAAO;AACL,cAAM,WAAW,WAAW;AAAA,UAC1B,MAAM;AAAA,UACN,SAAS,OAAO;AAAA,UAChB,cAAc,OAAO;AAAA,UACrB,OAAO,OAAO;AAAA,UACd,UAAU,OAAO;AAAA,UACjB,UAAU,OAAO;AAAA,UACjB,kBAAkB,OAAO;AAAA,UACzB,SAAS,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,YAAY,OAAO,EAAE,UAAU,QAAQ,MAAM,MAAM;AACjD,YAAM,WAAW,YAAY,MAAM,SAAS,MAAM;AAClD,YAAM,YAAY,aAAa,kBAAkB,QAAQ;AAEzD,UAAI,CAAC,cAAc;AACjB,cAAM,SAAS,MAAM,UAAU,SAAS;AACxC,aAAI,iCAAQ,UAAS,UAAU;AAC7B,cAAI,cAAc;AAAA,YAChB,KAAK,SAAS,MAAM,GAAG,CAAC;AAAA,YACxB,OAAO,OAAO,MAAM;AAAA,UACtB,CAAC;AACD,iBAAO;AAAA,YACL,QAAQ,sBAAsB,OAAO,KAAK;AAAA,YAC1C,UAAU,OAAO;AAAA,YACjB,SAAS,OAAO;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAEA;AAAA,QACE,eAAe,mBAAmB;AAAA,QAClC,SAAS,MAAM,GAAG,CAAC;AAAA,MACrB;AACA,YAAM,SAAS,MAAM,SAAS;AAE9B,YAAM,iBAA8C,CAAC;AAErD,YAAM,eAAe,OAAO,OAAO;AAAA,QACjC,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,2BAAe,KAAK,KAAK;AACzB,uBAAW,QAAQ,KAAK;AAAA,UAC1B;AAAA,UACA,QAAQ;AACN,kBAAM,aAAa,eAAe,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ;AACjE,gBAAI,cAAc,oBAAoB,WAAW,YAAY,GAAG;AAC9D;AAAA,YACF;AAEA,uBAAW,WAAW;AAAA,cACpB,MAAM;AAAA,cACN,OAAO;AAAA,cACP,UAAU,OAAO;AAAA,cACjB,SAAS,OAAO;AAAA,YAClB,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,EAAE,GAAG,QAAQ,QAAQ,aAAa;AAAA,IAC3C;AAAA,EACF;AACF;AAEA,eAAsB,eAAe,WAAW,aAA4B;AAC1E,MAAI;AACF,cAAM,wBAAG,0BAAQ,QAAQ,GAAG,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EAC9D,SAAQ;AAAA,EAER;AACF;AAEA,eAAsB,cAAc,WAAW,aAK5C;AACD,QAAM,kBAAc,0BAAQ,QAAQ;AACpC,MAAI,aAAa;AACjB,MAAI,iBAAiB;AACrB,MAAI,gBAAgB;AACpB,MAAI,cAAc;AAElB,iBAAe,QAAQ,KAA4B;AACjD,QAAI;AACF,YAAM,UAAU,UAAM,yBAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,YAAM,QAAQ;AAAA,QACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,gBAAM,eAAW,uBAAK,KAAK,MAAM,IAAI;AACrC,cAAI,MAAM,YAAY,GAAG;AACvB,kBAAM,QAAQ,QAAQ;AAAA,UACxB,WAAW,MAAM,KAAK,SAAS,OAAO,GAAG;AACvC;AACA,kBAAM,WAAW,UAAM,sBAAK,QAAQ;AACpC,8BAAkB,SAAS;AAE3B,gBAAI;AACF,oBAAM,UAAU,KAAK;AAAA,gBACnB,UAAM,0BAAS,UAAU,OAAO;AAAA,cAClC;AACA,kBAAI,QAAQ,SAAS,YAAY;AAC/B;AAAA,cACF,WAAW,QAAQ,SAAS,UAAU;AACpC;AAAA,cACF;AAAA,YACF,SAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,QAAQ,WAAW;AACzB,SAAO,EAAE,YAAY,gBAAgB,eAAe,YAAY;AAClE;","names":[]}
@@ -0,0 +1,19 @@
1
+ import { LanguageModelV3Middleware } from '@ai-sdk/provider';
2
+
3
+ interface DiskCacheMiddlewareOptions {
4
+ cacheDir?: string;
5
+ enabled?: boolean;
6
+ forceRefresh?: boolean;
7
+ generateKey?: (modelId: string, params: unknown) => string;
8
+ debug?: boolean;
9
+ }
10
+ declare function createDiskCacheMiddleware(options?: DiskCacheMiddlewareOptions): LanguageModelV3Middleware;
11
+ declare function clearDiskCache(cacheDir?: string): Promise<void>;
12
+ declare function getCacheStats(cacheDir?: string): Promise<{
13
+ totalFiles: number;
14
+ totalSizeBytes: number;
15
+ generateCount: number;
16
+ streamCount: number;
17
+ }>;
18
+
19
+ export { type DiskCacheMiddlewareOptions, clearDiskCache, createDiskCacheMiddleware, getCacheStats };
@@ -0,0 +1,19 @@
1
+ import { LanguageModelV3Middleware } from '@ai-sdk/provider';
2
+
3
+ interface DiskCacheMiddlewareOptions {
4
+ cacheDir?: string;
5
+ enabled?: boolean;
6
+ forceRefresh?: boolean;
7
+ generateKey?: (modelId: string, params: unknown) => string;
8
+ debug?: boolean;
9
+ }
10
+ declare function createDiskCacheMiddleware(options?: DiskCacheMiddlewareOptions): LanguageModelV3Middleware;
11
+ declare function clearDiskCache(cacheDir?: string): Promise<void>;
12
+ declare function getCacheStats(cacheDir?: string): Promise<{
13
+ totalFiles: number;
14
+ totalSizeBytes: number;
15
+ generateCount: number;
16
+ streamCount: number;
17
+ }>;
18
+
19
+ export { type DiskCacheMiddlewareOptions, clearDiskCache, createDiskCacheMiddleware, getCacheStats };
@@ -0,0 +1,11 @@
1
+ import {
2
+ clearDiskCache,
3
+ createDiskCacheMiddleware,
4
+ getCacheStats
5
+ } from "./chunk-XJIXXAOA.js";
6
+ export {
7
+ clearDiskCache,
8
+ createDiskCacheMiddleware,
9
+ getCacheStats
10
+ };
11
+ //# sourceMappingURL=disk-cache.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":[],"sourcesContent":[],"mappings":"","names":[]}
package/dist/index.cjs CHANGED
@@ -20,8 +20,11 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
20
20
  // src/index.ts
21
21
  var src_exports = {};
22
22
  __export(src_exports, {
23
+ clearDiskCache: () => clearDiskCache,
24
+ createDiskCacheMiddleware: () => createDiskCacheMiddleware,
23
25
  defaultSystemPromptMiddleware: () => defaultSystemPromptMiddleware,
24
26
  extractReasoningMiddleware: () => extractReasoningMiddleware,
27
+ getCacheStats: () => getCacheStats,
25
28
  getPotentialStartIndex: () => getPotentialStartIndex
26
29
  });
27
30
  module.exports = __toCommonJS(src_exports);
@@ -126,7 +129,217 @@ function defaultSystemPromptMiddleware({
126
129
  };
127
130
  }
128
131
 
129
- // src/reasoning-parser/index.ts
132
+ // src/disk-cache.ts
133
+ var import_node_crypto = require("crypto");
134
+ var import_promises = require("fs/promises");
135
+ var import_node_path = require("path");
136
+ function defaultGenerateKey(modelId, params) {
137
+ const serialized = JSON.stringify(
138
+ { version: "0.0.1", modelId, params },
139
+ (_key, value) => {
140
+ if (typeof value === "function") {
141
+ return "[function]";
142
+ }
143
+ if (value instanceof RegExp) {
144
+ return value.toString();
145
+ }
146
+ return value;
147
+ }
148
+ );
149
+ return (0, import_node_crypto.createHash)("sha256").update(serialized).digest("hex");
150
+ }
151
+ function getCachePath(cacheDir, key) {
152
+ return (0, import_node_path.join)(cacheDir, key.slice(0, 2), `${key}.json`);
153
+ }
154
+ async function readCache(cachePath) {
155
+ try {
156
+ const content = await (0, import_promises.readFile)(cachePath, "utf-8");
157
+ const parsed = JSON.parse(content);
158
+ if (parsed.response && typeof parsed.response === "object") {
159
+ const resp = parsed.response;
160
+ if (typeof resp.timestamp === "string") {
161
+ resp.timestamp = new Date(resp.timestamp);
162
+ }
163
+ }
164
+ return parsed;
165
+ } catch (e) {
166
+ return null;
167
+ }
168
+ }
169
+ async function writeCache(cachePath, result) {
170
+ try {
171
+ await (0, import_promises.mkdir)((0, import_node_path.dirname)(cachePath), { recursive: true });
172
+ await (0, import_promises.writeFile)(cachePath, JSON.stringify(result), "utf-8");
173
+ } catch (e) {
174
+ }
175
+ }
176
+ function createStreamFromParts(parts) {
177
+ let index = 0;
178
+ return new ReadableStream({
179
+ pull(controller) {
180
+ if (index < parts.length) {
181
+ controller.enqueue(parts[index++]);
182
+ } else {
183
+ controller.close();
184
+ }
185
+ }
186
+ });
187
+ }
188
+ function isErrorFinishReason(finishReason) {
189
+ if (!finishReason) {
190
+ return false;
191
+ }
192
+ const unified = typeof finishReason === "string" ? finishReason : finishReason.unified;
193
+ return unified === "error" || unified === "other";
194
+ }
195
+ function createDiskCacheMiddleware(options = {}) {
196
+ var _a, _b, _c, _d, _e;
197
+ const generateKey = (_a = options.generateKey) != null ? _a : defaultGenerateKey;
198
+ const resolvedCacheDir = (0, import_node_path.resolve)((_b = options.cacheDir) != null ? _b : ".ai-cache");
199
+ const envEnabled = process.env.AI_CACHE_ENABLED;
200
+ const enabled = envEnabled !== void 0 ? envEnabled.toLowerCase() === "true" || envEnabled === "1" : (_c = options.enabled) != null ? _c : true;
201
+ const envDebug = process.env.AI_CACHE_DEBUG;
202
+ const debug = envDebug !== void 0 ? envDebug.toLowerCase() === "true" || envDebug === "1" : (_d = options.debug) != null ? _d : false;
203
+ const envForceRefresh = process.env.AI_CACHE_FORCE_REFRESH;
204
+ const forceRefresh = envForceRefresh !== void 0 ? envForceRefresh.toLowerCase() === "true" || envForceRefresh === "1" : (_e = options.forceRefresh) != null ? _e : false;
205
+ const log = debug ? (msg, data) => console.log(`[ai-cache] ${msg}`, data != null ? data : "") : () => void 0;
206
+ if (!enabled) {
207
+ return { specificationVersion: "v3" };
208
+ }
209
+ return {
210
+ specificationVersion: "v3",
211
+ wrapGenerate: async ({ doGenerate, params, model }) => {
212
+ const cacheKey = generateKey(model.modelId, params);
213
+ const cachePath = getCachePath(resolvedCacheDir, cacheKey);
214
+ if (!forceRefresh) {
215
+ const cached = await readCache(cachePath);
216
+ if ((cached == null ? void 0 : cached.type) === "generate") {
217
+ log("HIT generate", cacheKey.slice(0, 8));
218
+ return {
219
+ content: cached.content,
220
+ finishReason: cached.finishReason,
221
+ usage: cached.usage,
222
+ warnings: cached.warnings,
223
+ response: cached.response,
224
+ providerMetadata: cached.providerMetadata,
225
+ request: cached.request
226
+ };
227
+ }
228
+ }
229
+ log(
230
+ forceRefresh ? "REFRESH generate" : "MISS generate",
231
+ cacheKey.slice(0, 8)
232
+ );
233
+ const result = await doGenerate();
234
+ if (isErrorFinishReason(result.finishReason)) {
235
+ log("SKIP cache (error response)", result.finishReason);
236
+ } else {
237
+ await writeCache(cachePath, {
238
+ type: "generate",
239
+ content: result.content,
240
+ finishReason: result.finishReason,
241
+ usage: result.usage,
242
+ warnings: result.warnings,
243
+ response: result.response,
244
+ providerMetadata: result.providerMetadata,
245
+ request: result.request
246
+ });
247
+ }
248
+ return result;
249
+ },
250
+ wrapStream: async ({ doStream, params, model }) => {
251
+ const cacheKey = generateKey(model.modelId, params);
252
+ const cachePath = getCachePath(resolvedCacheDir, cacheKey);
253
+ if (!forceRefresh) {
254
+ const cached = await readCache(cachePath);
255
+ if ((cached == null ? void 0 : cached.type) === "stream") {
256
+ log("HIT stream", {
257
+ key: cacheKey.slice(0, 8),
258
+ parts: cached.parts.length
259
+ });
260
+ return {
261
+ stream: createStreamFromParts(cached.parts),
262
+ response: cached.response,
263
+ request: cached.request
264
+ };
265
+ }
266
+ }
267
+ log(
268
+ forceRefresh ? "REFRESH stream" : "MISS stream",
269
+ cacheKey.slice(0, 8)
270
+ );
271
+ const result = await doStream();
272
+ const collectedParts = [];
273
+ const cachedStream = result.stream.pipeThrough(
274
+ new TransformStream({
275
+ transform(chunk, controller) {
276
+ collectedParts.push(chunk);
277
+ controller.enqueue(chunk);
278
+ },
279
+ flush() {
280
+ const finishPart = collectedParts.find((p) => p.type === "finish");
281
+ if (finishPart && isErrorFinishReason(finishPart.finishReason)) {
282
+ return;
283
+ }
284
+ writeCache(cachePath, {
285
+ type: "stream",
286
+ parts: collectedParts,
287
+ response: result.response,
288
+ request: result.request
289
+ });
290
+ }
291
+ })
292
+ );
293
+ return { ...result, stream: cachedStream };
294
+ }
295
+ };
296
+ }
297
+ async function clearDiskCache(cacheDir = ".ai-cache") {
298
+ try {
299
+ await (0, import_promises.rm)((0, import_node_path.resolve)(cacheDir), { recursive: true, force: true });
300
+ } catch (e) {
301
+ }
302
+ }
303
+ async function getCacheStats(cacheDir = ".ai-cache") {
304
+ const resolvedDir = (0, import_node_path.resolve)(cacheDir);
305
+ let totalFiles = 0;
306
+ let totalSizeBytes = 0;
307
+ let generateCount = 0;
308
+ let streamCount = 0;
309
+ async function walkDir(dir) {
310
+ try {
311
+ const entries = await (0, import_promises.readdir)(dir, { withFileTypes: true });
312
+ await Promise.all(
313
+ entries.map(async (entry) => {
314
+ const fullPath = (0, import_node_path.join)(dir, entry.name);
315
+ if (entry.isDirectory()) {
316
+ await walkDir(fullPath);
317
+ } else if (entry.name.endsWith(".json")) {
318
+ totalFiles++;
319
+ const fileStat = await (0, import_promises.stat)(fullPath);
320
+ totalSizeBytes += fileStat.size;
321
+ try {
322
+ const content = JSON.parse(
323
+ await (0, import_promises.readFile)(fullPath, "utf-8")
324
+ );
325
+ if (content.type === "generate") {
326
+ generateCount++;
327
+ } else if (content.type === "stream") {
328
+ streamCount++;
329
+ }
330
+ } catch (e) {
331
+ }
332
+ }
333
+ })
334
+ );
335
+ } catch (e) {
336
+ }
337
+ }
338
+ await walkDir(resolvedDir);
339
+ return { totalFiles, totalSizeBytes, generateCount, streamCount };
340
+ }
341
+
342
+ // src/reasoning-parser.ts
130
343
  function getPotentialStartIndex(text, searchedText) {
131
344
  if (searchedText.length === 0) {
132
345
  return null;
@@ -328,8 +541,11 @@ function extractReasoningMiddleware({
328
541
  }
329
542
  // Annotate the CommonJS export names for ESM import in node:
330
543
  0 && (module.exports = {
544
+ clearDiskCache,
545
+ createDiskCacheMiddleware,
331
546
  defaultSystemPromptMiddleware,
332
547
  extractReasoningMiddleware,
548
+ getCacheStats,
333
549
  getPotentialStartIndex
334
550
  });
335
551
  /**
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/index.ts","../src/default-system-prompt.ts","../src/reasoning-parser/index.ts"],"sourcesContent":["// biome-ignore lint/performance/noBarrelFile: Package entrypoint - must re-export for public API\nexport * from \"./default-system-prompt\";\nexport * from \"./reasoning-parser\";\n","import type {\n LanguageModelV3CallOptions,\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3Prompt,\n} from \"@ai-sdk/provider\";\n\ntype SystemPromptPlacement = \"first\" | \"last\";\n\ninterface DefaultSystemPromptMiddlewareOptions {\n systemPrompt: string;\n placement?: SystemPromptPlacement;\n}\n\nfunction extractSystemText(content: unknown): string | undefined {\n if (typeof content === \"string\") {\n return content;\n }\n\n if (!Array.isArray(content)) {\n if (content == null) {\n return;\n }\n return String(content);\n }\n\n const parts = (content as LanguageModelV3Content[]).map((part) => {\n if (part?.type === \"text\" && \"text\" in part) {\n return String(part.text ?? \"\");\n }\n\n return JSON.stringify(part);\n });\n\n const textParts = parts.filter((value) => value.length > 0);\n if (textParts.length === 0) {\n return;\n }\n\n return textParts.join(\"\\n\");\n}\n\nfunction mergeSystemPrompts({\n base,\n addition,\n placement,\n}: {\n base?: string;\n addition: string;\n placement: SystemPromptPlacement;\n}): string {\n if (!base) {\n return addition;\n }\n\n if (addition.length === 0) {\n return base;\n }\n\n return placement === \"first\"\n ? `${addition}\\n\\n${base}`\n : `${base}\\n\\n${addition}`;\n}\n\nfunction ensurePromptArray(\n prompt?: LanguageModelV3Prompt\n): LanguageModelV3Prompt {\n if (!prompt) {\n return [];\n }\n\n return [...prompt];\n}\n\nexport function defaultSystemPromptMiddleware({\n systemPrompt,\n placement = \"first\",\n}: DefaultSystemPromptMiddlewareOptions): LanguageModelV3Middleware {\n return {\n specificationVersion: \"v3\",\n transformParams: ({ params }) => {\n const prompt = ensurePromptArray(params.prompt);\n const systemIndex = prompt.findIndex(\n (message) => message.role === \"system\"\n );\n\n if (systemIndex === -1) {\n const promptWithSystem =\n placement === \"first\"\n ? ([\n {\n role: \"system\" as const,\n content: systemPrompt,\n },\n ...prompt,\n ] as LanguageModelV3Prompt)\n : ([\n ...prompt,\n {\n role: \"system\" as const,\n content: systemPrompt,\n },\n ] as LanguageModelV3Prompt);\n\n const nextParams: LanguageModelV3CallOptions = {\n ...params,\n prompt: promptWithSystem,\n };\n\n return Promise.resolve<LanguageModelV3CallOptions>(nextParams);\n }\n\n const systemMessage = prompt[systemIndex];\n const baseText = extractSystemText(systemMessage.content);\n const mergedContent = mergeSystemPrompts({\n base: baseText,\n addition: systemPrompt,\n placement,\n });\n\n const updatedPrompt = prompt.map((message, index) =>\n index === systemIndex\n ? {\n ...message,\n content: mergedContent,\n }\n : message\n ) as LanguageModelV3Prompt;\n\n const nextParams: LanguageModelV3CallOptions = {\n ...params,\n prompt: updatedPrompt,\n };\n\n return Promise.resolve<LanguageModelV3CallOptions>(nextParams);\n },\n };\n}\n","/**\n * @license\n * Copyright (c) 2021-present, FriendliAI Inc. All rights reserved.\n */\n\nimport type {\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\n/**\n * All code below is forked from the following link:\n * https://github.com/vercel/ai/blob/v5/packages/ai/core/middleware/extract-reasoning-middleware.ts\n */\n\n/**\n * Returns the index of the start of the searchedText in the text, or null if it\n * is not found.\n */\nexport function getPotentialStartIndex(\n text: string,\n searchedText: string\n): number | null {\n // Return null immediately if searchedText is empty.\n if (searchedText.length === 0) {\n return null;\n }\n\n // Check if the searchedText exists as a direct substring of text.\n const directIndex = text.indexOf(searchedText);\n if (directIndex !== -1) {\n return directIndex;\n }\n\n // Otherwise, look for the largest suffix of \"text\" that matches\n // a prefix of \"searchedText\". We go from the end of text inward.\n for (let i = text.length - 1; i >= 0; i -= 1) {\n const suffix = text.substring(i);\n if (searchedText.startsWith(suffix)) {\n return i;\n }\n }\n\n return null;\n}\n\n/**\n * Extract an XML-tagged reasoning section from the generated text and exposes it\n * as a `reasoning` property on the result.\n *\n * @param openingTag - The opening XML tag to extract reasoning from.\n * @param closingTag - The closing XML tag to extract reasoning from.\n * @param separator - The separator to use between reasoning and text sections.\n * @param startWithReasoning - Whether to start with reasoning tokens.\n */\nexport function extractReasoningMiddleware({\n openingTag,\n closingTag,\n separator = \"\\n\",\n startWithReasoning = false,\n}: {\n openingTag: string;\n closingTag: string;\n separator?: string;\n startWithReasoning?: boolean;\n}): LanguageModelV3Middleware {\n function processTextPart(\n text: string,\n transformedContent: LanguageModelV3Content[]\n ) {\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n return;\n }\n\n const reasoningText = matches.map((match) => match[1]).join(separator);\n\n let textWithoutReasoning = text;\n for (let i = matches.length - 1; i >= 0; i -= 1) {\n const match = matches[i];\n\n const beforeMatch = textWithoutReasoning.slice(0, match.index);\n const matchIndex = match.index ?? 0;\n const afterMatch = textWithoutReasoning.slice(\n matchIndex + match[0].length\n );\n\n textWithoutReasoning =\n beforeMatch +\n (beforeMatch.length > 0 && afterMatch.length > 0 ? separator : \"\") +\n afterMatch;\n }\n\n transformedContent.push({\n type: \"reasoning\",\n text: reasoningText,\n });\n\n transformedContent.push({\n type: \"text\",\n text: textWithoutReasoning,\n });\n }\n\n return {\n specificationVersion: \"v3\",\n wrapGenerate: async ({ doGenerate }) => {\n const { content, ...rest } = await doGenerate();\n\n const transformedContent: LanguageModelV3Content[] = [];\n for (const part of content) {\n if (part.type !== \"text\") {\n transformedContent.push(part);\n continue;\n }\n\n const text = startWithReasoning ? openingTag + part.text : part.text;\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n transformedContent.push(part);\n continue;\n }\n\n processTextPart(text, transformedContent);\n }\n\n return { content: transformedContent, ...rest };\n },\n\n wrapStream: async ({ doStream }) => {\n const { stream, ...rest } = await doStream();\n\n interface ExtractionState {\n isFirstReasoning: boolean;\n isFirstText: boolean;\n afterSwitch: boolean;\n isReasoning: boolean;\n buffer: string;\n idCounter: number;\n textId: string;\n }\n\n const reasoningExtractions: Record<string, ExtractionState> = {};\n\n function createPublisher(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n return (text: string) => {\n if (text.length === 0) {\n return;\n }\n\n const prefix = getPrefix(activeExtraction);\n enqueueReasoningStart(activeExtraction, controller);\n enqueueDelta(activeExtraction, controller, prefix, text);\n updateExtractionState(activeExtraction);\n };\n }\n\n function getPrefix(activeExtraction: ExtractionState): string {\n return activeExtraction.afterSwitch &&\n (activeExtraction.isReasoning\n ? !activeExtraction.isFirstReasoning\n : !activeExtraction.isFirstText)\n ? separator\n : \"\";\n }\n\n function enqueueReasoningStart(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n if (\n (activeExtraction.afterSwitch && activeExtraction.isReasoning) ||\n activeExtraction.isFirstReasoning\n ) {\n controller.enqueue({\n type: \"reasoning-start\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n }\n }\n\n function enqueueDelta(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n prefix: string,\n text: string\n ) {\n controller.enqueue(\n activeExtraction.isReasoning\n ? {\n type: \"reasoning-delta\",\n delta: prefix + text,\n id: `reasoning-${activeExtraction.idCounter}`,\n }\n : {\n type: \"text-delta\",\n delta: prefix + text,\n id: activeExtraction.textId,\n }\n );\n }\n\n function updateExtractionState(activeExtraction: ExtractionState) {\n activeExtraction.afterSwitch = false;\n if (activeExtraction.isReasoning) {\n activeExtraction.isFirstReasoning = false;\n } else {\n activeExtraction.isFirstText = false;\n }\n }\n\n function handleFullMatch(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n startIndex: number,\n nextTag: string\n ) {\n activeExtraction.buffer = activeExtraction.buffer.slice(\n startIndex + nextTag.length\n );\n\n if (activeExtraction.isReasoning) {\n controller.enqueue({\n type: \"reasoning-end\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n activeExtraction.idCounter += 1;\n }\n\n activeExtraction.isReasoning = !activeExtraction.isReasoning;\n activeExtraction.afterSwitch = true;\n }\n\n function processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n }: {\n activeExtraction: ExtractionState;\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>;\n publish: (text: string) => void;\n startIndex: number;\n nextTag: string;\n }): boolean {\n publish(activeExtraction.buffer.slice(0, startIndex));\n\n const foundFullMatch =\n startIndex + nextTag.length <= activeExtraction.buffer.length;\n\n if (foundFullMatch) {\n handleFullMatch(activeExtraction, controller, startIndex, nextTag);\n return true;\n }\n\n activeExtraction.buffer = activeExtraction.buffer.slice(startIndex);\n return false;\n }\n\n function processBuffer(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n const publish = createPublisher(activeExtraction, controller);\n let continueProcessing = true;\n\n while (continueProcessing) {\n const nextTag = activeExtraction.isReasoning\n ? closingTag\n : openingTag;\n const startIndex = getPotentialStartIndex(\n activeExtraction.buffer,\n nextTag\n );\n\n if (startIndex == null) {\n publish(activeExtraction.buffer);\n activeExtraction.buffer = \"\";\n break;\n }\n\n continueProcessing = processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n });\n }\n }\n\n return {\n stream: stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform: (chunk, controller) => {\n if (chunk.type !== \"text-delta\") {\n controller.enqueue(chunk);\n return;\n }\n\n if (reasoningExtractions[chunk.id] == null) {\n reasoningExtractions[chunk.id] = {\n isFirstReasoning: true,\n isFirstText: true,\n afterSwitch: false,\n isReasoning: startWithReasoning,\n buffer: \"\",\n idCounter: 0,\n textId: chunk.id,\n };\n }\n\n const activeExtraction = reasoningExtractions[chunk.id];\n activeExtraction.buffer += chunk.delta;\n processBuffer(activeExtraction, controller);\n },\n })\n ),\n ...rest,\n };\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACcA,SAAS,kBAAkB,SAAsC;AAC/D,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAC3B,QAAI,WAAW,MAAM;AACnB;AAAA,IACF;AACA,WAAO,OAAO,OAAO;AAAA,EACvB;AAEA,QAAM,QAAS,QAAqC,IAAI,CAAC,SAAS;AA1BpE;AA2BI,SAAI,6BAAM,UAAS,UAAU,UAAU,MAAM;AAC3C,aAAO,QAAO,UAAK,SAAL,YAAa,EAAE;AAAA,IAC/B;AAEA,WAAO,KAAK,UAAU,IAAI;AAAA,EAC5B,CAAC;AAED,QAAM,YAAY,MAAM,OAAO,CAAC,UAAU,MAAM,SAAS,CAAC;AAC1D,MAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,EACF;AAEA,SAAO,UAAU,KAAK,IAAI;AAC5B;AAEA,SAAS,mBAAmB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AACF,GAIW;AACT,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,SAAO,cAAc,UACjB,GAAG,QAAQ;AAAA;AAAA,EAAO,IAAI,KACtB,GAAG,IAAI;AAAA;AAAA,EAAO,QAAQ;AAC5B;AAEA,SAAS,kBACP,QACuB;AACvB,MAAI,CAAC,QAAQ;AACX,WAAO,CAAC;AAAA,EACV;AAEA,SAAO,CAAC,GAAG,MAAM;AACnB;AAEO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA,YAAY;AACd,GAAoE;AAClE,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,iBAAiB,CAAC,EAAE,OAAO,MAAM;AAC/B,YAAM,SAAS,kBAAkB,OAAO,MAAM;AAC9C,YAAM,cAAc,OAAO;AAAA,QACzB,CAAC,YAAY,QAAQ,SAAS;AAAA,MAChC;AAEA,UAAI,gBAAgB,IAAI;AACtB,cAAM,mBACJ,cAAc,UACT;AAAA,UACC;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,UACA,GAAG;AAAA,QACL,IACC;AAAA,UACC,GAAG;AAAA,UACH;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAEN,cAAMA,cAAyC;AAAA,UAC7C,GAAG;AAAA,UACH,QAAQ;AAAA,QACV;AAEA,eAAO,QAAQ,QAAoCA,WAAU;AAAA,MAC/D;AAEA,YAAM,gBAAgB,OAAO,WAAW;AACxC,YAAM,WAAW,kBAAkB,cAAc,OAAO;AACxD,YAAM,gBAAgB,mBAAmB;AAAA,QACvC,MAAM;AAAA,QACN,UAAU;AAAA,QACV;AAAA,MACF,CAAC;AAED,YAAM,gBAAgB,OAAO;AAAA,QAAI,CAAC,SAAS,UACzC,UAAU,cACN;AAAA,UACE,GAAG;AAAA,UACH,SAAS;AAAA,QACX,IACA;AAAA,MACN;AAEA,YAAM,aAAyC;AAAA,QAC7C,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAEA,aAAO,QAAQ,QAAoC,UAAU;AAAA,IAC/D;AAAA,EACF;AACF;;;ACrHO,SAAS,uBACd,MACA,cACe;AAEf,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,KAAK,QAAQ,YAAY;AAC7C,MAAI,gBAAgB,IAAI;AACtB,WAAO;AAAA,EACT;AAIA,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC5C,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,aAAa,WAAW,MAAM,GAAG;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,2BAA2B;AAAA,EACzC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ,qBAAqB;AACvB,GAK8B;AAC5B,WAAS,gBACP,MACA,oBACA;AAtEJ;AAuEI,UAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,UAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,QAAI,CAAC,QAAQ,QAAQ;AACnB;AAAA,IACF;AAEA,UAAM,gBAAgB,QAAQ,IAAI,CAAC,UAAU,MAAM,CAAC,CAAC,EAAE,KAAK,SAAS;AAErE,QAAI,uBAAuB;AAC3B,aAAS,IAAI,QAAQ,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC/C,YAAM,QAAQ,QAAQ,CAAC;AAEvB,YAAM,cAAc,qBAAqB,MAAM,GAAG,MAAM,KAAK;AAC7D,YAAM,cAAa,WAAM,UAAN,YAAe;AAClC,YAAM,aAAa,qBAAqB;AAAA,QACtC,aAAa,MAAM,CAAC,EAAE;AAAA,MACxB;AAEA,6BACE,eACC,YAAY,SAAS,KAAK,WAAW,SAAS,IAAI,YAAY,MAC/D;AAAA,IACJ;AAEA,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAED,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,cAAc,OAAO,EAAE,WAAW,MAAM;AACtC,YAAM,EAAE,SAAS,GAAG,KAAK,IAAI,MAAM,WAAW;AAE9C,YAAM,qBAA+C,CAAC;AACtD,iBAAW,QAAQ,SAAS;AAC1B,YAAI,KAAK,SAAS,QAAQ;AACxB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,cAAM,OAAO,qBAAqB,aAAa,KAAK,OAAO,KAAK;AAChE,cAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,cAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,YAAI,CAAC,QAAQ,QAAQ;AACnB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,wBAAgB,MAAM,kBAAkB;AAAA,MAC1C;AAEA,aAAO,EAAE,SAAS,oBAAoB,GAAG,KAAK;AAAA,IAChD;AAAA,IAEA,YAAY,OAAO,EAAE,SAAS,MAAM;AAClC,YAAM,EAAE,QAAQ,GAAG,KAAK,IAAI,MAAM,SAAS;AAY3C,YAAM,uBAAwD,CAAC;AAE/D,eAAS,gBACP,kBACA,YACA;AACA,eAAO,CAAC,SAAiB;AACvB,cAAI,KAAK,WAAW,GAAG;AACrB;AAAA,UACF;AAEA,gBAAM,SAAS,UAAU,gBAAgB;AACzC,gCAAsB,kBAAkB,UAAU;AAClD,uBAAa,kBAAkB,YAAY,QAAQ,IAAI;AACvD,gCAAsB,gBAAgB;AAAA,QACxC;AAAA,MACF;AAEA,eAAS,UAAU,kBAA2C;AAC5D,eAAO,iBAAiB,gBACrB,iBAAiB,cACd,CAAC,iBAAiB,mBAClB,CAAC,iBAAiB,eACpB,YACA;AAAA,MACN;AAEA,eAAS,sBACP,kBACA,YACA;AACA,YACG,iBAAiB,eAAe,iBAAiB,eAClD,iBAAiB,kBACjB;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AAAA,QACH;AAAA,MACF;AAEA,eAAS,aACP,kBACA,YACA,QACA,MACA;AACA,mBAAW;AAAA,UACT,iBAAiB,cACb;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,IACA;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,iBAAiB;AAAA,UACvB;AAAA,QACN;AAAA,MACF;AAEA,eAAS,sBAAsB,kBAAmC;AAChE,yBAAiB,cAAc;AAC/B,YAAI,iBAAiB,aAAa;AAChC,2BAAiB,mBAAmB;AAAA,QACtC,OAAO;AACL,2BAAiB,cAAc;AAAA,QACjC;AAAA,MACF;AAEA,eAAS,gBACP,kBACA,YACA,YACA,SACA;AACA,yBAAiB,SAAS,iBAAiB,OAAO;AAAA,UAChD,aAAa,QAAQ;AAAA,QACvB;AAEA,YAAI,iBAAiB,aAAa;AAChC,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AACD,2BAAiB,aAAa;AAAA,QAChC;AAEA,yBAAiB,cAAc,CAAC,iBAAiB;AACjD,yBAAiB,cAAc;AAAA,MACjC;AAEA,eAAS,gBAAgB;AAAA,QACvB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,GAMY;AACV,gBAAQ,iBAAiB,OAAO,MAAM,GAAG,UAAU,CAAC;AAEpD,cAAM,iBACJ,aAAa,QAAQ,UAAU,iBAAiB,OAAO;AAEzD,YAAI,gBAAgB;AAClB,0BAAgB,kBAAkB,YAAY,YAAY,OAAO;AACjE,iBAAO;AAAA,QACT;AAEA,yBAAiB,SAAS,iBAAiB,OAAO,MAAM,UAAU;AAClE,eAAO;AAAA,MACT;AAEA,eAAS,cACP,kBACA,YACA;AACA,cAAM,UAAU,gBAAgB,kBAAkB,UAAU;AAC5D,YAAI,qBAAqB;AAEzB,eAAO,oBAAoB;AACzB,gBAAM,UAAU,iBAAiB,cAC7B,aACA;AACJ,gBAAM,aAAa;AAAA,YACjB,iBAAiB;AAAA,YACjB;AAAA,UACF;AAEA,cAAI,cAAc,MAAM;AACtB,oBAAQ,iBAAiB,MAAM;AAC/B,6BAAiB,SAAS;AAC1B;AAAA,UACF;AAEA,+BAAqB,gBAAgB;AAAA,YACnC;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,aAAO;AAAA,QACL,QAAQ,OAAO;AAAA,UACb,IAAI,gBAGF;AAAA,YACA,WAAW,CAAC,OAAO,eAAe;AAChC,kBAAI,MAAM,SAAS,cAAc;AAC/B,2BAAW,QAAQ,KAAK;AACxB;AAAA,cACF;AAEA,kBAAI,qBAAqB,MAAM,EAAE,KAAK,MAAM;AAC1C,qCAAqB,MAAM,EAAE,IAAI;AAAA,kBAC/B,kBAAkB;AAAA,kBAClB,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,QAAQ;AAAA,kBACR,WAAW;AAAA,kBACX,QAAQ,MAAM;AAAA,gBAChB;AAAA,cACF;AAEA,oBAAM,mBAAmB,qBAAqB,MAAM,EAAE;AACtD,+BAAiB,UAAU,MAAM;AACjC,4BAAc,kBAAkB,UAAU;AAAA,YAC5C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACF;","names":["nextParams"]}
1
+ {"version":3,"sources":["../src/index.ts","../src/default-system-prompt.ts","../src/disk-cache.ts","../src/reasoning-parser.ts"],"sourcesContent":["// biome-ignore lint/performance/noBarrelFile: Package entrypoint - must re-export for public API\nexport * from \"./default-system-prompt\";\nexport * from \"./disk-cache\";\nexport * from \"./reasoning-parser\";\n","import type {\n LanguageModelV3CallOptions,\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3Prompt,\n} from \"@ai-sdk/provider\";\n\ntype SystemPromptPlacement = \"first\" | \"last\";\n\ninterface DefaultSystemPromptMiddlewareOptions {\n systemPrompt: string;\n placement?: SystemPromptPlacement;\n}\n\nfunction extractSystemText(content: unknown): string | undefined {\n if (typeof content === \"string\") {\n return content;\n }\n\n if (!Array.isArray(content)) {\n if (content == null) {\n return;\n }\n return String(content);\n }\n\n const parts = (content as LanguageModelV3Content[]).map((part) => {\n if (part?.type === \"text\" && \"text\" in part) {\n return String(part.text ?? \"\");\n }\n\n return JSON.stringify(part);\n });\n\n const textParts = parts.filter((value) => value.length > 0);\n if (textParts.length === 0) {\n return;\n }\n\n return textParts.join(\"\\n\");\n}\n\nfunction mergeSystemPrompts({\n base,\n addition,\n placement,\n}: {\n base?: string;\n addition: string;\n placement: SystemPromptPlacement;\n}): string {\n if (!base) {\n return addition;\n }\n\n if (addition.length === 0) {\n return base;\n }\n\n return placement === \"first\"\n ? `${addition}\\n\\n${base}`\n : `${base}\\n\\n${addition}`;\n}\n\nfunction ensurePromptArray(\n prompt?: LanguageModelV3Prompt\n): LanguageModelV3Prompt {\n if (!prompt) {\n return [];\n }\n\n return [...prompt];\n}\n\nexport function defaultSystemPromptMiddleware({\n systemPrompt,\n placement = \"first\",\n}: DefaultSystemPromptMiddlewareOptions): LanguageModelV3Middleware {\n return {\n specificationVersion: \"v3\",\n transformParams: ({ params }) => {\n const prompt = ensurePromptArray(params.prompt);\n const systemIndex = prompt.findIndex(\n (message) => message.role === \"system\"\n );\n\n if (systemIndex === -1) {\n const promptWithSystem =\n placement === \"first\"\n ? ([\n {\n role: \"system\" as const,\n content: systemPrompt,\n },\n ...prompt,\n ] as LanguageModelV3Prompt)\n : ([\n ...prompt,\n {\n role: \"system\" as const,\n content: systemPrompt,\n },\n ] as LanguageModelV3Prompt);\n\n const nextParams: LanguageModelV3CallOptions = {\n ...params,\n prompt: promptWithSystem,\n };\n\n return Promise.resolve<LanguageModelV3CallOptions>(nextParams);\n }\n\n const systemMessage = prompt[systemIndex];\n const baseText = extractSystemText(systemMessage.content);\n const mergedContent = mergeSystemPrompts({\n base: baseText,\n addition: systemPrompt,\n placement,\n });\n\n const updatedPrompt = prompt.map((message, index) =>\n index === systemIndex\n ? {\n ...message,\n content: mergedContent,\n }\n : message\n ) as LanguageModelV3Prompt;\n\n const nextParams: LanguageModelV3CallOptions = {\n ...params,\n prompt: updatedPrompt,\n };\n\n return Promise.resolve<LanguageModelV3CallOptions>(nextParams);\n },\n };\n}\n","import { createHash } from \"node:crypto\";\nimport {\n mkdir,\n readdir,\n readFile,\n rm,\n stat,\n writeFile,\n} from \"node:fs/promises\";\nimport { dirname, join, resolve } from \"node:path\";\nimport type {\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\ndeclare const __PACKAGE_VERSION__: string;\n\nexport interface DiskCacheMiddlewareOptions {\n cacheDir?: string;\n enabled?: boolean;\n forceRefresh?: boolean;\n generateKey?: (modelId: string, params: unknown) => string;\n debug?: boolean;\n}\n\ninterface CachedGenerateResult {\n type: \"generate\";\n content: unknown;\n finishReason: unknown;\n usage: unknown;\n warnings: unknown;\n response: unknown;\n providerMetadata: unknown;\n request: unknown;\n}\n\ninterface CachedStreamResult {\n type: \"stream\";\n parts: LanguageModelV3StreamPart[];\n response: unknown;\n request: unknown;\n}\n\ntype CachedResult = CachedGenerateResult | CachedStreamResult;\n\nfunction defaultGenerateKey(modelId: string, params: unknown): string {\n const serialized = JSON.stringify(\n { version: __PACKAGE_VERSION__, modelId, params },\n (_key, value) => {\n if (typeof value === \"function\") {\n return \"[function]\";\n }\n if (value instanceof RegExp) {\n return value.toString();\n }\n return value;\n }\n );\n return createHash(\"sha256\").update(serialized).digest(\"hex\");\n}\n\nfunction getCachePath(cacheDir: string, key: string): string {\n return join(cacheDir, key.slice(0, 2), `${key}.json`);\n}\n\nasync function readCache(cachePath: string): Promise<CachedResult | null> {\n try {\n const content = await readFile(cachePath, \"utf-8\");\n const parsed = JSON.parse(content) as CachedResult;\n if (parsed.response && typeof parsed.response === \"object\") {\n const resp = parsed.response as Record<string, unknown>;\n if (typeof resp.timestamp === \"string\") {\n resp.timestamp = new Date(resp.timestamp);\n }\n }\n return parsed;\n } catch {\n return null;\n }\n}\n\nasync function writeCache(\n cachePath: string,\n result: CachedResult\n): Promise<void> {\n try {\n await mkdir(dirname(cachePath), { recursive: true });\n await writeFile(cachePath, JSON.stringify(result), \"utf-8\");\n } catch {\n // Silent fail\n }\n}\n\nfunction createStreamFromParts(\n parts: LanguageModelV3StreamPart[]\n): ReadableStream<LanguageModelV3StreamPart> {\n let index = 0;\n return new ReadableStream({\n pull(controller) {\n if (index < parts.length) {\n controller.enqueue(parts[index++]);\n } else {\n controller.close();\n }\n },\n });\n}\n\ntype FinishReasonLike = { unified?: string } | string | null | undefined;\n\nfunction isErrorFinishReason(finishReason: FinishReasonLike): boolean {\n if (!finishReason) {\n return false;\n }\n const unified =\n typeof finishReason === \"string\" ? finishReason : finishReason.unified;\n return unified === \"error\" || unified === \"other\";\n}\n\nexport function createDiskCacheMiddleware(\n options: DiskCacheMiddlewareOptions = {}\n): LanguageModelV3Middleware {\n const generateKey = options.generateKey ?? defaultGenerateKey;\n const resolvedCacheDir = resolve(options.cacheDir ?? \".ai-cache\");\n\n const envEnabled = process.env.AI_CACHE_ENABLED;\n const enabled =\n envEnabled !== undefined\n ? envEnabled.toLowerCase() === \"true\" || envEnabled === \"1\"\n : (options.enabled ?? true);\n\n const envDebug = process.env.AI_CACHE_DEBUG;\n const debug =\n envDebug !== undefined\n ? envDebug.toLowerCase() === \"true\" || envDebug === \"1\"\n : (options.debug ?? false);\n\n const envForceRefresh = process.env.AI_CACHE_FORCE_REFRESH;\n const forceRefresh =\n envForceRefresh !== undefined\n ? envForceRefresh.toLowerCase() === \"true\" || envForceRefresh === \"1\"\n : (options.forceRefresh ?? false);\n\n const log = debug\n ? (msg: string, data?: unknown) =>\n console.log(`[ai-cache] ${msg}`, data ?? \"\")\n : () => undefined;\n\n if (!enabled) {\n return { specificationVersion: \"v3\" };\n }\n\n return {\n specificationVersion: \"v3\",\n\n wrapGenerate: async ({ doGenerate, params, model }) => {\n const cacheKey = generateKey(model.modelId, params);\n const cachePath = getCachePath(resolvedCacheDir, cacheKey);\n\n if (!forceRefresh) {\n const cached = await readCache(cachePath);\n if (cached?.type === \"generate\") {\n log(\"HIT generate\", cacheKey.slice(0, 8));\n return {\n content: cached.content,\n finishReason: cached.finishReason,\n usage: cached.usage,\n warnings: cached.warnings,\n response: cached.response,\n providerMetadata: cached.providerMetadata,\n request: cached.request,\n } as Awaited<ReturnType<typeof doGenerate>>;\n }\n }\n\n log(\n forceRefresh ? \"REFRESH generate\" : \"MISS generate\",\n cacheKey.slice(0, 8)\n );\n const result = await doGenerate();\n\n if (isErrorFinishReason(result.finishReason)) {\n log(\"SKIP cache (error response)\", result.finishReason);\n } else {\n await writeCache(cachePath, {\n type: \"generate\",\n content: result.content,\n finishReason: result.finishReason,\n usage: result.usage,\n warnings: result.warnings,\n response: result.response,\n providerMetadata: result.providerMetadata,\n request: result.request,\n });\n }\n\n return result;\n },\n\n wrapStream: async ({ doStream, params, model }) => {\n const cacheKey = generateKey(model.modelId, params);\n const cachePath = getCachePath(resolvedCacheDir, cacheKey);\n\n if (!forceRefresh) {\n const cached = await readCache(cachePath);\n if (cached?.type === \"stream\") {\n log(\"HIT stream\", {\n key: cacheKey.slice(0, 8),\n parts: cached.parts.length,\n });\n return {\n stream: createStreamFromParts(cached.parts),\n response: cached.response,\n request: cached.request,\n } as Awaited<ReturnType<typeof doStream>>;\n }\n }\n\n log(\n forceRefresh ? \"REFRESH stream\" : \"MISS stream\",\n cacheKey.slice(0, 8)\n );\n const result = await doStream();\n\n const collectedParts: LanguageModelV3StreamPart[] = [];\n\n const cachedStream = result.stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform(chunk, controller) {\n collectedParts.push(chunk);\n controller.enqueue(chunk);\n },\n flush() {\n const finishPart = collectedParts.find((p) => p.type === \"finish\");\n if (finishPart && isErrorFinishReason(finishPart.finishReason)) {\n return;\n }\n\n writeCache(cachePath, {\n type: \"stream\",\n parts: collectedParts,\n response: result.response,\n request: result.request,\n });\n },\n })\n );\n\n return { ...result, stream: cachedStream };\n },\n };\n}\n\nexport async function clearDiskCache(cacheDir = \".ai-cache\"): Promise<void> {\n try {\n await rm(resolve(cacheDir), { recursive: true, force: true });\n } catch {\n // Directory doesn't exist\n }\n}\n\nexport async function getCacheStats(cacheDir = \".ai-cache\"): Promise<{\n totalFiles: number;\n totalSizeBytes: number;\n generateCount: number;\n streamCount: number;\n}> {\n const resolvedDir = resolve(cacheDir);\n let totalFiles = 0;\n let totalSizeBytes = 0;\n let generateCount = 0;\n let streamCount = 0;\n\n async function walkDir(dir: string): Promise<void> {\n try {\n const entries = await readdir(dir, { withFileTypes: true });\n await Promise.all(\n entries.map(async (entry) => {\n const fullPath = join(dir, entry.name);\n if (entry.isDirectory()) {\n await walkDir(fullPath);\n } else if (entry.name.endsWith(\".json\")) {\n totalFiles++;\n const fileStat = await stat(fullPath);\n totalSizeBytes += fileStat.size;\n\n try {\n const content = JSON.parse(\n await readFile(fullPath, \"utf-8\")\n ) as CachedResult;\n if (content.type === \"generate\") {\n generateCount++;\n } else if (content.type === \"stream\") {\n streamCount++;\n }\n } catch {\n // Skip malformed\n }\n }\n })\n );\n } catch {\n // Directory doesn't exist\n }\n }\n\n await walkDir(resolvedDir);\n return { totalFiles, totalSizeBytes, generateCount, streamCount };\n}\n","/**\n * @license\n * Copyright (c) 2021-present, FriendliAI Inc. All rights reserved.\n */\n\nimport type {\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\n/**\n * All code below is forked from the following link:\n * https://github.com/vercel/ai/blob/v5/packages/ai/core/middleware/extract-reasoning-middleware.ts\n */\n\n/**\n * Returns the index of the start of the searchedText in the text, or null if it\n * is not found.\n */\nexport function getPotentialStartIndex(\n text: string,\n searchedText: string\n): number | null {\n // Return null immediately if searchedText is empty.\n if (searchedText.length === 0) {\n return null;\n }\n\n // Check if the searchedText exists as a direct substring of text.\n const directIndex = text.indexOf(searchedText);\n if (directIndex !== -1) {\n return directIndex;\n }\n\n // Otherwise, look for the largest suffix of \"text\" that matches\n // a prefix of \"searchedText\". We go from the end of text inward.\n for (let i = text.length - 1; i >= 0; i -= 1) {\n const suffix = text.substring(i);\n if (searchedText.startsWith(suffix)) {\n return i;\n }\n }\n\n return null;\n}\n\n/**\n * Extract an XML-tagged reasoning section from the generated text and exposes it\n * as a `reasoning` property on the result.\n *\n * @param openingTag - The opening XML tag to extract reasoning from.\n * @param closingTag - The closing XML tag to extract reasoning from.\n * @param separator - The separator to use between reasoning and text sections.\n * @param startWithReasoning - Whether to start with reasoning tokens.\n */\nexport function extractReasoningMiddleware({\n openingTag,\n closingTag,\n separator = \"\\n\",\n startWithReasoning = false,\n}: {\n openingTag: string;\n closingTag: string;\n separator?: string;\n startWithReasoning?: boolean;\n}): LanguageModelV3Middleware {\n function processTextPart(\n text: string,\n transformedContent: LanguageModelV3Content[]\n ) {\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n return;\n }\n\n const reasoningText = matches.map((match) => match[1]).join(separator);\n\n let textWithoutReasoning = text;\n for (let i = matches.length - 1; i >= 0; i -= 1) {\n const match = matches[i];\n\n const beforeMatch = textWithoutReasoning.slice(0, match.index);\n const matchIndex = match.index ?? 0;\n const afterMatch = textWithoutReasoning.slice(\n matchIndex + match[0].length\n );\n\n textWithoutReasoning =\n beforeMatch +\n (beforeMatch.length > 0 && afterMatch.length > 0 ? separator : \"\") +\n afterMatch;\n }\n\n transformedContent.push({\n type: \"reasoning\",\n text: reasoningText,\n });\n\n transformedContent.push({\n type: \"text\",\n text: textWithoutReasoning,\n });\n }\n\n return {\n specificationVersion: \"v3\",\n wrapGenerate: async ({ doGenerate }) => {\n const { content, ...rest } = await doGenerate();\n\n const transformedContent: LanguageModelV3Content[] = [];\n for (const part of content) {\n if (part.type !== \"text\") {\n transformedContent.push(part);\n continue;\n }\n\n const text = startWithReasoning ? openingTag + part.text : part.text;\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n transformedContent.push(part);\n continue;\n }\n\n processTextPart(text, transformedContent);\n }\n\n return { content: transformedContent, ...rest };\n },\n\n wrapStream: async ({ doStream }) => {\n const { stream, ...rest } = await doStream();\n\n interface ExtractionState {\n isFirstReasoning: boolean;\n isFirstText: boolean;\n afterSwitch: boolean;\n isReasoning: boolean;\n buffer: string;\n idCounter: number;\n textId: string;\n }\n\n const reasoningExtractions: Record<string, ExtractionState> = {};\n\n function createPublisher(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n return (text: string) => {\n if (text.length === 0) {\n return;\n }\n\n const prefix = getPrefix(activeExtraction);\n enqueueReasoningStart(activeExtraction, controller);\n enqueueDelta(activeExtraction, controller, prefix, text);\n updateExtractionState(activeExtraction);\n };\n }\n\n function getPrefix(activeExtraction: ExtractionState): string {\n return activeExtraction.afterSwitch &&\n (activeExtraction.isReasoning\n ? !activeExtraction.isFirstReasoning\n : !activeExtraction.isFirstText)\n ? separator\n : \"\";\n }\n\n function enqueueReasoningStart(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n if (\n (activeExtraction.afterSwitch && activeExtraction.isReasoning) ||\n activeExtraction.isFirstReasoning\n ) {\n controller.enqueue({\n type: \"reasoning-start\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n }\n }\n\n function enqueueDelta(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n prefix: string,\n text: string\n ) {\n controller.enqueue(\n activeExtraction.isReasoning\n ? {\n type: \"reasoning-delta\",\n delta: prefix + text,\n id: `reasoning-${activeExtraction.idCounter}`,\n }\n : {\n type: \"text-delta\",\n delta: prefix + text,\n id: activeExtraction.textId,\n }\n );\n }\n\n function updateExtractionState(activeExtraction: ExtractionState) {\n activeExtraction.afterSwitch = false;\n if (activeExtraction.isReasoning) {\n activeExtraction.isFirstReasoning = false;\n } else {\n activeExtraction.isFirstText = false;\n }\n }\n\n function handleFullMatch(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n startIndex: number,\n nextTag: string\n ) {\n activeExtraction.buffer = activeExtraction.buffer.slice(\n startIndex + nextTag.length\n );\n\n if (activeExtraction.isReasoning) {\n controller.enqueue({\n type: \"reasoning-end\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n activeExtraction.idCounter += 1;\n }\n\n activeExtraction.isReasoning = !activeExtraction.isReasoning;\n activeExtraction.afterSwitch = true;\n }\n\n function processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n }: {\n activeExtraction: ExtractionState;\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>;\n publish: (text: string) => void;\n startIndex: number;\n nextTag: string;\n }): boolean {\n publish(activeExtraction.buffer.slice(0, startIndex));\n\n const foundFullMatch =\n startIndex + nextTag.length <= activeExtraction.buffer.length;\n\n if (foundFullMatch) {\n handleFullMatch(activeExtraction, controller, startIndex, nextTag);\n return true;\n }\n\n activeExtraction.buffer = activeExtraction.buffer.slice(startIndex);\n return false;\n }\n\n function processBuffer(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n const publish = createPublisher(activeExtraction, controller);\n let continueProcessing = true;\n\n while (continueProcessing) {\n const nextTag = activeExtraction.isReasoning\n ? closingTag\n : openingTag;\n const startIndex = getPotentialStartIndex(\n activeExtraction.buffer,\n nextTag\n );\n\n if (startIndex == null) {\n publish(activeExtraction.buffer);\n activeExtraction.buffer = \"\";\n break;\n }\n\n continueProcessing = processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n });\n }\n }\n\n return {\n stream: stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform: (chunk, controller) => {\n if (chunk.type !== \"text-delta\") {\n controller.enqueue(chunk);\n return;\n }\n\n if (reasoningExtractions[chunk.id] == null) {\n reasoningExtractions[chunk.id] = {\n isFirstReasoning: true,\n isFirstText: true,\n afterSwitch: false,\n isReasoning: startWithReasoning,\n buffer: \"\",\n idCounter: 0,\n textId: chunk.id,\n };\n }\n\n const activeExtraction = reasoningExtractions[chunk.id];\n activeExtraction.buffer += chunk.delta;\n processBuffer(activeExtraction, controller);\n },\n })\n ),\n ...rest,\n };\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACcA,SAAS,kBAAkB,SAAsC;AAC/D,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAC3B,QAAI,WAAW,MAAM;AACnB;AAAA,IACF;AACA,WAAO,OAAO,OAAO;AAAA,EACvB;AAEA,QAAM,QAAS,QAAqC,IAAI,CAAC,SAAS;AA1BpE;AA2BI,SAAI,6BAAM,UAAS,UAAU,UAAU,MAAM;AAC3C,aAAO,QAAO,UAAK,SAAL,YAAa,EAAE;AAAA,IAC/B;AAEA,WAAO,KAAK,UAAU,IAAI;AAAA,EAC5B,CAAC;AAED,QAAM,YAAY,MAAM,OAAO,CAAC,UAAU,MAAM,SAAS,CAAC;AAC1D,MAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,EACF;AAEA,SAAO,UAAU,KAAK,IAAI;AAC5B;AAEA,SAAS,mBAAmB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AACF,GAIW;AACT,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,SAAO,cAAc,UACjB,GAAG,QAAQ;AAAA;AAAA,EAAO,IAAI,KACtB,GAAG,IAAI;AAAA;AAAA,EAAO,QAAQ;AAC5B;AAEA,SAAS,kBACP,QACuB;AACvB,MAAI,CAAC,QAAQ;AACX,WAAO,CAAC;AAAA,EACV;AAEA,SAAO,CAAC,GAAG,MAAM;AACnB;AAEO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA,YAAY;AACd,GAAoE;AAClE,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,iBAAiB,CAAC,EAAE,OAAO,MAAM;AAC/B,YAAM,SAAS,kBAAkB,OAAO,MAAM;AAC9C,YAAM,cAAc,OAAO;AAAA,QACzB,CAAC,YAAY,QAAQ,SAAS;AAAA,MAChC;AAEA,UAAI,gBAAgB,IAAI;AACtB,cAAM,mBACJ,cAAc,UACT;AAAA,UACC;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,UACA,GAAG;AAAA,QACL,IACC;AAAA,UACC,GAAG;AAAA,UACH;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAEN,cAAMA,cAAyC;AAAA,UAC7C,GAAG;AAAA,UACH,QAAQ;AAAA,QACV;AAEA,eAAO,QAAQ,QAAoCA,WAAU;AAAA,MAC/D;AAEA,YAAM,gBAAgB,OAAO,WAAW;AACxC,YAAM,WAAW,kBAAkB,cAAc,OAAO;AACxD,YAAM,gBAAgB,mBAAmB;AAAA,QACvC,MAAM;AAAA,QACN,UAAU;AAAA,QACV;AAAA,MACF,CAAC;AAED,YAAM,gBAAgB,OAAO;AAAA,QAAI,CAAC,SAAS,UACzC,UAAU,cACN;AAAA,UACE,GAAG;AAAA,UACH,SAAS;AAAA,QACX,IACA;AAAA,MACN;AAEA,YAAM,aAAyC;AAAA,QAC7C,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAEA,aAAO,QAAQ,QAAoC,UAAU;AAAA,IAC/D;AAAA,EACF;AACF;;;ACzIA,yBAA2B;AAC3B,sBAOO;AACP,uBAAuC;AAoCvC,SAAS,mBAAmB,SAAiB,QAAyB;AACpE,QAAM,aAAa,KAAK;AAAA,IACtB,EAAE,SAAS,SAAqB,SAAS,OAAO;AAAA,IAChD,CAAC,MAAM,UAAU;AACf,UAAI,OAAO,UAAU,YAAY;AAC/B,eAAO;AAAA,MACT;AACA,UAAI,iBAAiB,QAAQ;AAC3B,eAAO,MAAM,SAAS;AAAA,MACxB;AACA,aAAO;AAAA,IACT;AAAA,EACF;AACA,aAAO,+BAAW,QAAQ,EAAE,OAAO,UAAU,EAAE,OAAO,KAAK;AAC7D;AAEA,SAAS,aAAa,UAAkB,KAAqB;AAC3D,aAAO,uBAAK,UAAU,IAAI,MAAM,GAAG,CAAC,GAAG,GAAG,GAAG,OAAO;AACtD;AAEA,eAAe,UAAU,WAAiD;AACxE,MAAI;AACF,UAAM,UAAU,UAAM,0BAAS,WAAW,OAAO;AACjD,UAAM,SAAS,KAAK,MAAM,OAAO;AACjC,QAAI,OAAO,YAAY,OAAO,OAAO,aAAa,UAAU;AAC1D,YAAM,OAAO,OAAO;AACpB,UAAI,OAAO,KAAK,cAAc,UAAU;AACtC,aAAK,YAAY,IAAI,KAAK,KAAK,SAAS;AAAA,MAC1C;AAAA,IACF;AACA,WAAO;AAAA,EACT,SAAQ;AACN,WAAO;AAAA,EACT;AACF;AAEA,eAAe,WACb,WACA,QACe;AACf,MAAI;AACF,cAAM,2BAAM,0BAAQ,SAAS,GAAG,EAAE,WAAW,KAAK,CAAC;AACnD,cAAM,2BAAU,WAAW,KAAK,UAAU,MAAM,GAAG,OAAO;AAAA,EAC5D,SAAQ;AAAA,EAER;AACF;AAEA,SAAS,sBACP,OAC2C;AAC3C,MAAI,QAAQ;AACZ,SAAO,IAAI,eAAe;AAAA,IACxB,KAAK,YAAY;AACf,UAAI,QAAQ,MAAM,QAAQ;AACxB,mBAAW,QAAQ,MAAM,OAAO,CAAC;AAAA,MACnC,OAAO;AACL,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAIA,SAAS,oBAAoB,cAAyC;AACpE,MAAI,CAAC,cAAc;AACjB,WAAO;AAAA,EACT;AACA,QAAM,UACJ,OAAO,iBAAiB,WAAW,eAAe,aAAa;AACjE,SAAO,YAAY,WAAW,YAAY;AAC5C;AAEO,SAAS,0BACd,UAAsC,CAAC,GACZ;AAzH7B;AA0HE,QAAM,eAAc,aAAQ,gBAAR,YAAuB;AAC3C,QAAM,uBAAmB,2BAAQ,aAAQ,aAAR,YAAoB,WAAW;AAEhE,QAAM,aAAa,QAAQ,IAAI;AAC/B,QAAM,UACJ,eAAe,SACX,WAAW,YAAY,MAAM,UAAU,eAAe,OACrD,aAAQ,YAAR,YAAmB;AAE1B,QAAM,WAAW,QAAQ,IAAI;AAC7B,QAAM,QACJ,aAAa,SACT,SAAS,YAAY,MAAM,UAAU,aAAa,OACjD,aAAQ,UAAR,YAAiB;AAExB,QAAM,kBAAkB,QAAQ,IAAI;AACpC,QAAM,eACJ,oBAAoB,SAChB,gBAAgB,YAAY,MAAM,UAAU,oBAAoB,OAC/D,aAAQ,iBAAR,YAAwB;AAE/B,QAAM,MAAM,QACR,CAAC,KAAa,SACZ,QAAQ,IAAI,cAAc,GAAG,IAAI,sBAAQ,EAAE,IAC7C,MAAM;AAEV,MAAI,CAAC,SAAS;AACZ,WAAO,EAAE,sBAAsB,KAAK;AAAA,EACtC;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IAEtB,cAAc,OAAO,EAAE,YAAY,QAAQ,MAAM,MAAM;AACrD,YAAM,WAAW,YAAY,MAAM,SAAS,MAAM;AAClD,YAAM,YAAY,aAAa,kBAAkB,QAAQ;AAEzD,UAAI,CAAC,cAAc;AACjB,cAAM,SAAS,MAAM,UAAU,SAAS;AACxC,aAAI,iCAAQ,UAAS,YAAY;AAC/B,cAAI,gBAAgB,SAAS,MAAM,GAAG,CAAC,CAAC;AACxC,iBAAO;AAAA,YACL,SAAS,OAAO;AAAA,YAChB,cAAc,OAAO;AAAA,YACrB,OAAO,OAAO;AAAA,YACd,UAAU,OAAO;AAAA,YACjB,UAAU,OAAO;AAAA,YACjB,kBAAkB,OAAO;AAAA,YACzB,SAAS,OAAO;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAEA;AAAA,QACE,eAAe,qBAAqB;AAAA,QACpC,SAAS,MAAM,GAAG,CAAC;AAAA,MACrB;AACA,YAAM,SAAS,MAAM,WAAW;AAEhC,UAAI,oBAAoB,OAAO,YAAY,GAAG;AAC5C,YAAI,+BAA+B,OAAO,YAAY;AAAA,MACxD,OAAO;AACL,cAAM,WAAW,WAAW;AAAA,UAC1B,MAAM;AAAA,UACN,SAAS,OAAO;AAAA,UAChB,cAAc,OAAO;AAAA,UACrB,OAAO,OAAO;AAAA,UACd,UAAU,OAAO;AAAA,UACjB,UAAU,OAAO;AAAA,UACjB,kBAAkB,OAAO;AAAA,UACzB,SAAS,OAAO;AAAA,QAClB,CAAC;AAAA,MACH;AAEA,aAAO;AAAA,IACT;AAAA,IAEA,YAAY,OAAO,EAAE,UAAU,QAAQ,MAAM,MAAM;AACjD,YAAM,WAAW,YAAY,MAAM,SAAS,MAAM;AAClD,YAAM,YAAY,aAAa,kBAAkB,QAAQ;AAEzD,UAAI,CAAC,cAAc;AACjB,cAAM,SAAS,MAAM,UAAU,SAAS;AACxC,aAAI,iCAAQ,UAAS,UAAU;AAC7B,cAAI,cAAc;AAAA,YAChB,KAAK,SAAS,MAAM,GAAG,CAAC;AAAA,YACxB,OAAO,OAAO,MAAM;AAAA,UACtB,CAAC;AACD,iBAAO;AAAA,YACL,QAAQ,sBAAsB,OAAO,KAAK;AAAA,YAC1C,UAAU,OAAO;AAAA,YACjB,SAAS,OAAO;AAAA,UAClB;AAAA,QACF;AAAA,MACF;AAEA;AAAA,QACE,eAAe,mBAAmB;AAAA,QAClC,SAAS,MAAM,GAAG,CAAC;AAAA,MACrB;AACA,YAAM,SAAS,MAAM,SAAS;AAE9B,YAAM,iBAA8C,CAAC;AAErD,YAAM,eAAe,OAAO,OAAO;AAAA,QACjC,IAAI,gBAGF;AAAA,UACA,UAAU,OAAO,YAAY;AAC3B,2BAAe,KAAK,KAAK;AACzB,uBAAW,QAAQ,KAAK;AAAA,UAC1B;AAAA,UACA,QAAQ;AACN,kBAAM,aAAa,eAAe,KAAK,CAAC,MAAM,EAAE,SAAS,QAAQ;AACjE,gBAAI,cAAc,oBAAoB,WAAW,YAAY,GAAG;AAC9D;AAAA,YACF;AAEA,uBAAW,WAAW;AAAA,cACpB,MAAM;AAAA,cACN,OAAO;AAAA,cACP,UAAU,OAAO;AAAA,cACjB,SAAS,OAAO;AAAA,YAClB,CAAC;AAAA,UACH;AAAA,QACF,CAAC;AAAA,MACH;AAEA,aAAO,EAAE,GAAG,QAAQ,QAAQ,aAAa;AAAA,IAC3C;AAAA,EACF;AACF;AAEA,eAAsB,eAAe,WAAW,aAA4B;AAC1E,MAAI;AACF,cAAM,wBAAG,0BAAQ,QAAQ,GAAG,EAAE,WAAW,MAAM,OAAO,KAAK,CAAC;AAAA,EAC9D,SAAQ;AAAA,EAER;AACF;AAEA,eAAsB,cAAc,WAAW,aAK5C;AACD,QAAM,kBAAc,0BAAQ,QAAQ;AACpC,MAAI,aAAa;AACjB,MAAI,iBAAiB;AACrB,MAAI,gBAAgB;AACpB,MAAI,cAAc;AAElB,iBAAe,QAAQ,KAA4B;AACjD,QAAI;AACF,YAAM,UAAU,UAAM,yBAAQ,KAAK,EAAE,eAAe,KAAK,CAAC;AAC1D,YAAM,QAAQ;AAAA,QACZ,QAAQ,IAAI,OAAO,UAAU;AAC3B,gBAAM,eAAW,uBAAK,KAAK,MAAM,IAAI;AACrC,cAAI,MAAM,YAAY,GAAG;AACvB,kBAAM,QAAQ,QAAQ;AAAA,UACxB,WAAW,MAAM,KAAK,SAAS,OAAO,GAAG;AACvC;AACA,kBAAM,WAAW,UAAM,sBAAK,QAAQ;AACpC,8BAAkB,SAAS;AAE3B,gBAAI;AACF,oBAAM,UAAU,KAAK;AAAA,gBACnB,UAAM,0BAAS,UAAU,OAAO;AAAA,cAClC;AACA,kBAAI,QAAQ,SAAS,YAAY;AAC/B;AAAA,cACF,WAAW,QAAQ,SAAS,UAAU;AACpC;AAAA,cACF;AAAA,YACF,SAAQ;AAAA,YAER;AAAA,UACF;AAAA,QACF,CAAC;AAAA,MACH;AAAA,IACF,SAAQ;AAAA,IAER;AAAA,EACF;AAEA,QAAM,QAAQ,WAAW;AACzB,SAAO,EAAE,YAAY,gBAAgB,eAAe,YAAY;AAClE;;;ACnSO,SAAS,uBACd,MACA,cACe;AAEf,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,KAAK,QAAQ,YAAY;AAC7C,MAAI,gBAAgB,IAAI;AACtB,WAAO;AAAA,EACT;AAIA,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC5C,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,aAAa,WAAW,MAAM,GAAG;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,2BAA2B;AAAA,EACzC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ,qBAAqB;AACvB,GAK8B;AAC5B,WAAS,gBACP,MACA,oBACA;AAtEJ;AAuEI,UAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,UAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,QAAI,CAAC,QAAQ,QAAQ;AACnB;AAAA,IACF;AAEA,UAAM,gBAAgB,QAAQ,IAAI,CAAC,UAAU,MAAM,CAAC,CAAC,EAAE,KAAK,SAAS;AAErE,QAAI,uBAAuB;AAC3B,aAAS,IAAI,QAAQ,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC/C,YAAM,QAAQ,QAAQ,CAAC;AAEvB,YAAM,cAAc,qBAAqB,MAAM,GAAG,MAAM,KAAK;AAC7D,YAAM,cAAa,WAAM,UAAN,YAAe;AAClC,YAAM,aAAa,qBAAqB;AAAA,QACtC,aAAa,MAAM,CAAC,EAAE;AAAA,MACxB;AAEA,6BACE,eACC,YAAY,SAAS,KAAK,WAAW,SAAS,IAAI,YAAY,MAC/D;AAAA,IACJ;AAEA,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAED,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,cAAc,OAAO,EAAE,WAAW,MAAM;AACtC,YAAM,EAAE,SAAS,GAAG,KAAK,IAAI,MAAM,WAAW;AAE9C,YAAM,qBAA+C,CAAC;AACtD,iBAAW,QAAQ,SAAS;AAC1B,YAAI,KAAK,SAAS,QAAQ;AACxB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,cAAM,OAAO,qBAAqB,aAAa,KAAK,OAAO,KAAK;AAChE,cAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,cAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,YAAI,CAAC,QAAQ,QAAQ;AACnB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,wBAAgB,MAAM,kBAAkB;AAAA,MAC1C;AAEA,aAAO,EAAE,SAAS,oBAAoB,GAAG,KAAK;AAAA,IAChD;AAAA,IAEA,YAAY,OAAO,EAAE,SAAS,MAAM;AAClC,YAAM,EAAE,QAAQ,GAAG,KAAK,IAAI,MAAM,SAAS;AAY3C,YAAM,uBAAwD,CAAC;AAE/D,eAAS,gBACP,kBACA,YACA;AACA,eAAO,CAAC,SAAiB;AACvB,cAAI,KAAK,WAAW,GAAG;AACrB;AAAA,UACF;AAEA,gBAAM,SAAS,UAAU,gBAAgB;AACzC,gCAAsB,kBAAkB,UAAU;AAClD,uBAAa,kBAAkB,YAAY,QAAQ,IAAI;AACvD,gCAAsB,gBAAgB;AAAA,QACxC;AAAA,MACF;AAEA,eAAS,UAAU,kBAA2C;AAC5D,eAAO,iBAAiB,gBACrB,iBAAiB,cACd,CAAC,iBAAiB,mBAClB,CAAC,iBAAiB,eACpB,YACA;AAAA,MACN;AAEA,eAAS,sBACP,kBACA,YACA;AACA,YACG,iBAAiB,eAAe,iBAAiB,eAClD,iBAAiB,kBACjB;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AAAA,QACH;AAAA,MACF;AAEA,eAAS,aACP,kBACA,YACA,QACA,MACA;AACA,mBAAW;AAAA,UACT,iBAAiB,cACb;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,IACA;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,iBAAiB;AAAA,UACvB;AAAA,QACN;AAAA,MACF;AAEA,eAAS,sBAAsB,kBAAmC;AAChE,yBAAiB,cAAc;AAC/B,YAAI,iBAAiB,aAAa;AAChC,2BAAiB,mBAAmB;AAAA,QACtC,OAAO;AACL,2BAAiB,cAAc;AAAA,QACjC;AAAA,MACF;AAEA,eAAS,gBACP,kBACA,YACA,YACA,SACA;AACA,yBAAiB,SAAS,iBAAiB,OAAO;AAAA,UAChD,aAAa,QAAQ;AAAA,QACvB;AAEA,YAAI,iBAAiB,aAAa;AAChC,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AACD,2BAAiB,aAAa;AAAA,QAChC;AAEA,yBAAiB,cAAc,CAAC,iBAAiB;AACjD,yBAAiB,cAAc;AAAA,MACjC;AAEA,eAAS,gBAAgB;AAAA,QACvB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,GAMY;AACV,gBAAQ,iBAAiB,OAAO,MAAM,GAAG,UAAU,CAAC;AAEpD,cAAM,iBACJ,aAAa,QAAQ,UAAU,iBAAiB,OAAO;AAEzD,YAAI,gBAAgB;AAClB,0BAAgB,kBAAkB,YAAY,YAAY,OAAO;AACjE,iBAAO;AAAA,QACT;AAEA,yBAAiB,SAAS,iBAAiB,OAAO,MAAM,UAAU;AAClE,eAAO;AAAA,MACT;AAEA,eAAS,cACP,kBACA,YACA;AACA,cAAM,UAAU,gBAAgB,kBAAkB,UAAU;AAC5D,YAAI,qBAAqB;AAEzB,eAAO,oBAAoB;AACzB,gBAAM,UAAU,iBAAiB,cAC7B,aACA;AACJ,gBAAM,aAAa;AAAA,YACjB,iBAAiB;AAAA,YACjB;AAAA,UACF;AAEA,cAAI,cAAc,MAAM;AACtB,oBAAQ,iBAAiB,MAAM;AAC/B,6BAAiB,SAAS;AAC1B;AAAA,UACF;AAEA,+BAAqB,gBAAgB;AAAA,YACnC;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,aAAO;AAAA,QACL,QAAQ,OAAO;AAAA,UACb,IAAI,gBAGF;AAAA,YACA,WAAW,CAAC,OAAO,eAAe;AAChC,kBAAI,MAAM,SAAS,cAAc;AAC/B,2BAAW,QAAQ,KAAK;AACxB;AAAA,cACF;AAEA,kBAAI,qBAAqB,MAAM,EAAE,KAAK,MAAM;AAC1C,qCAAqB,MAAM,EAAE,IAAI;AAAA,kBAC/B,kBAAkB;AAAA,kBAClB,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,QAAQ;AAAA,kBACR,WAAW;AAAA,kBACX,QAAQ,MAAM;AAAA,gBAChB;AAAA,cACF;AAEA,oBAAM,mBAAmB,qBAAqB,MAAM,EAAE;AACtD,+BAAiB,UAAU,MAAM;AACjC,4BAAc,kBAAkB,UAAU;AAAA,YAC5C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACF;","names":["nextParams"]}
package/dist/index.d.cts CHANGED
@@ -1,4 +1,5 @@
1
1
  import { LanguageModelV3Middleware } from '@ai-sdk/provider';
2
+ export { DiskCacheMiddlewareOptions, clearDiskCache, createDiskCacheMiddleware, getCacheStats } from './disk-cache.cjs';
2
3
  export { extractReasoningMiddleware, getPotentialStartIndex } from './reasoning-parser.cjs';
3
4
 
4
5
  type SystemPromptPlacement = "first" | "last";
package/dist/index.d.ts CHANGED
@@ -1,4 +1,5 @@
1
1
  import { LanguageModelV3Middleware } from '@ai-sdk/provider';
2
+ export { DiskCacheMiddlewareOptions, clearDiskCache, createDiskCacheMiddleware, getCacheStats } from './disk-cache.js';
2
3
  export { extractReasoningMiddleware, getPotentialStartIndex } from './reasoning-parser.js';
3
4
 
4
5
  type SystemPromptPlacement = "first" | "last";
package/dist/index.js CHANGED
@@ -1,7 +1,12 @@
1
1
  import {
2
2
  extractReasoningMiddleware,
3
3
  getPotentialStartIndex
4
- } from "./chunk-Q2TVVB35.js";
4
+ } from "./chunk-R4PZN7IW.js";
5
+ import {
6
+ clearDiskCache,
7
+ createDiskCacheMiddleware,
8
+ getCacheStats
9
+ } from "./chunk-XJIXXAOA.js";
5
10
 
6
11
  // src/default-system-prompt.ts
7
12
  function extractSystemText(content) {
@@ -103,8 +108,11 @@ function defaultSystemPromptMiddleware({
103
108
  };
104
109
  }
105
110
  export {
111
+ clearDiskCache,
112
+ createDiskCacheMiddleware,
106
113
  defaultSystemPromptMiddleware,
107
114
  extractReasoningMiddleware,
115
+ getCacheStats,
108
116
  getPotentialStartIndex
109
117
  };
110
118
  //# sourceMappingURL=index.js.map
package/dist/index.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/default-system-prompt.ts"],"sourcesContent":["import type {\n LanguageModelV3CallOptions,\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3Prompt,\n} from \"@ai-sdk/provider\";\n\ntype SystemPromptPlacement = \"first\" | \"last\";\n\ninterface DefaultSystemPromptMiddlewareOptions {\n systemPrompt: string;\n placement?: SystemPromptPlacement;\n}\n\nfunction extractSystemText(content: unknown): string | undefined {\n if (typeof content === \"string\") {\n return content;\n }\n\n if (!Array.isArray(content)) {\n if (content == null) {\n return;\n }\n return String(content);\n }\n\n const parts = (content as LanguageModelV3Content[]).map((part) => {\n if (part?.type === \"text\" && \"text\" in part) {\n return String(part.text ?? \"\");\n }\n\n return JSON.stringify(part);\n });\n\n const textParts = parts.filter((value) => value.length > 0);\n if (textParts.length === 0) {\n return;\n }\n\n return textParts.join(\"\\n\");\n}\n\nfunction mergeSystemPrompts({\n base,\n addition,\n placement,\n}: {\n base?: string;\n addition: string;\n placement: SystemPromptPlacement;\n}): string {\n if (!base) {\n return addition;\n }\n\n if (addition.length === 0) {\n return base;\n }\n\n return placement === \"first\"\n ? `${addition}\\n\\n${base}`\n : `${base}\\n\\n${addition}`;\n}\n\nfunction ensurePromptArray(\n prompt?: LanguageModelV3Prompt\n): LanguageModelV3Prompt {\n if (!prompt) {\n return [];\n }\n\n return [...prompt];\n}\n\nexport function defaultSystemPromptMiddleware({\n systemPrompt,\n placement = \"first\",\n}: DefaultSystemPromptMiddlewareOptions): LanguageModelV3Middleware {\n return {\n specificationVersion: \"v3\",\n transformParams: ({ params }) => {\n const prompt = ensurePromptArray(params.prompt);\n const systemIndex = prompt.findIndex(\n (message) => message.role === \"system\"\n );\n\n if (systemIndex === -1) {\n const promptWithSystem =\n placement === \"first\"\n ? ([\n {\n role: \"system\" as const,\n content: systemPrompt,\n },\n ...prompt,\n ] as LanguageModelV3Prompt)\n : ([\n ...prompt,\n {\n role: \"system\" as const,\n content: systemPrompt,\n },\n ] as LanguageModelV3Prompt);\n\n const nextParams: LanguageModelV3CallOptions = {\n ...params,\n prompt: promptWithSystem,\n };\n\n return Promise.resolve<LanguageModelV3CallOptions>(nextParams);\n }\n\n const systemMessage = prompt[systemIndex];\n const baseText = extractSystemText(systemMessage.content);\n const mergedContent = mergeSystemPrompts({\n base: baseText,\n addition: systemPrompt,\n placement,\n });\n\n const updatedPrompt = prompt.map((message, index) =>\n index === systemIndex\n ? {\n ...message,\n content: mergedContent,\n }\n : message\n ) as LanguageModelV3Prompt;\n\n const nextParams: LanguageModelV3CallOptions = {\n ...params,\n prompt: updatedPrompt,\n };\n\n return Promise.resolve<LanguageModelV3CallOptions>(nextParams);\n },\n };\n}\n"],"mappings":";;;;;;AAcA,SAAS,kBAAkB,SAAsC;AAC/D,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAC3B,QAAI,WAAW,MAAM;AACnB;AAAA,IACF;AACA,WAAO,OAAO,OAAO;AAAA,EACvB;AAEA,QAAM,QAAS,QAAqC,IAAI,CAAC,SAAS;AA1BpE;AA2BI,SAAI,6BAAM,UAAS,UAAU,UAAU,MAAM;AAC3C,aAAO,QAAO,UAAK,SAAL,YAAa,EAAE;AAAA,IAC/B;AAEA,WAAO,KAAK,UAAU,IAAI;AAAA,EAC5B,CAAC;AAED,QAAM,YAAY,MAAM,OAAO,CAAC,UAAU,MAAM,SAAS,CAAC;AAC1D,MAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,EACF;AAEA,SAAO,UAAU,KAAK,IAAI;AAC5B;AAEA,SAAS,mBAAmB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AACF,GAIW;AACT,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,SAAO,cAAc,UACjB,GAAG,QAAQ;AAAA;AAAA,EAAO,IAAI,KACtB,GAAG,IAAI;AAAA;AAAA,EAAO,QAAQ;AAC5B;AAEA,SAAS,kBACP,QACuB;AACvB,MAAI,CAAC,QAAQ;AACX,WAAO,CAAC;AAAA,EACV;AAEA,SAAO,CAAC,GAAG,MAAM;AACnB;AAEO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA,YAAY;AACd,GAAoE;AAClE,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,iBAAiB,CAAC,EAAE,OAAO,MAAM;AAC/B,YAAM,SAAS,kBAAkB,OAAO,MAAM;AAC9C,YAAM,cAAc,OAAO;AAAA,QACzB,CAAC,YAAY,QAAQ,SAAS;AAAA,MAChC;AAEA,UAAI,gBAAgB,IAAI;AACtB,cAAM,mBACJ,cAAc,UACT;AAAA,UACC;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,UACA,GAAG;AAAA,QACL,IACC;AAAA,UACC,GAAG;AAAA,UACH;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAEN,cAAMA,cAAyC;AAAA,UAC7C,GAAG;AAAA,UACH,QAAQ;AAAA,QACV;AAEA,eAAO,QAAQ,QAAoCA,WAAU;AAAA,MAC/D;AAEA,YAAM,gBAAgB,OAAO,WAAW;AACxC,YAAM,WAAW,kBAAkB,cAAc,OAAO;AACxD,YAAM,gBAAgB,mBAAmB;AAAA,QACvC,MAAM;AAAA,QACN,UAAU;AAAA,QACV;AAAA,MACF,CAAC;AAED,YAAM,gBAAgB,OAAO;AAAA,QAAI,CAAC,SAAS,UACzC,UAAU,cACN;AAAA,UACE,GAAG;AAAA,UACH,SAAS;AAAA,QACX,IACA;AAAA,MACN;AAEA,YAAM,aAAyC;AAAA,QAC7C,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAEA,aAAO,QAAQ,QAAoC,UAAU;AAAA,IAC/D;AAAA,EACF;AACF;","names":["nextParams"]}
1
+ {"version":3,"sources":["../src/default-system-prompt.ts"],"sourcesContent":["import type {\n LanguageModelV3CallOptions,\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3Prompt,\n} from \"@ai-sdk/provider\";\n\ntype SystemPromptPlacement = \"first\" | \"last\";\n\ninterface DefaultSystemPromptMiddlewareOptions {\n systemPrompt: string;\n placement?: SystemPromptPlacement;\n}\n\nfunction extractSystemText(content: unknown): string | undefined {\n if (typeof content === \"string\") {\n return content;\n }\n\n if (!Array.isArray(content)) {\n if (content == null) {\n return;\n }\n return String(content);\n }\n\n const parts = (content as LanguageModelV3Content[]).map((part) => {\n if (part?.type === \"text\" && \"text\" in part) {\n return String(part.text ?? \"\");\n }\n\n return JSON.stringify(part);\n });\n\n const textParts = parts.filter((value) => value.length > 0);\n if (textParts.length === 0) {\n return;\n }\n\n return textParts.join(\"\\n\");\n}\n\nfunction mergeSystemPrompts({\n base,\n addition,\n placement,\n}: {\n base?: string;\n addition: string;\n placement: SystemPromptPlacement;\n}): string {\n if (!base) {\n return addition;\n }\n\n if (addition.length === 0) {\n return base;\n }\n\n return placement === \"first\"\n ? `${addition}\\n\\n${base}`\n : `${base}\\n\\n${addition}`;\n}\n\nfunction ensurePromptArray(\n prompt?: LanguageModelV3Prompt\n): LanguageModelV3Prompt {\n if (!prompt) {\n return [];\n }\n\n return [...prompt];\n}\n\nexport function defaultSystemPromptMiddleware({\n systemPrompt,\n placement = \"first\",\n}: DefaultSystemPromptMiddlewareOptions): LanguageModelV3Middleware {\n return {\n specificationVersion: \"v3\",\n transformParams: ({ params }) => {\n const prompt = ensurePromptArray(params.prompt);\n const systemIndex = prompt.findIndex(\n (message) => message.role === \"system\"\n );\n\n if (systemIndex === -1) {\n const promptWithSystem =\n placement === \"first\"\n ? ([\n {\n role: \"system\" as const,\n content: systemPrompt,\n },\n ...prompt,\n ] as LanguageModelV3Prompt)\n : ([\n ...prompt,\n {\n role: \"system\" as const,\n content: systemPrompt,\n },\n ] as LanguageModelV3Prompt);\n\n const nextParams: LanguageModelV3CallOptions = {\n ...params,\n prompt: promptWithSystem,\n };\n\n return Promise.resolve<LanguageModelV3CallOptions>(nextParams);\n }\n\n const systemMessage = prompt[systemIndex];\n const baseText = extractSystemText(systemMessage.content);\n const mergedContent = mergeSystemPrompts({\n base: baseText,\n addition: systemPrompt,\n placement,\n });\n\n const updatedPrompt = prompt.map((message, index) =>\n index === systemIndex\n ? {\n ...message,\n content: mergedContent,\n }\n : message\n ) as LanguageModelV3Prompt;\n\n const nextParams: LanguageModelV3CallOptions = {\n ...params,\n prompt: updatedPrompt,\n };\n\n return Promise.resolve<LanguageModelV3CallOptions>(nextParams);\n },\n };\n}\n"],"mappings":";;;;;;;;;;;AAcA,SAAS,kBAAkB,SAAsC;AAC/D,MAAI,OAAO,YAAY,UAAU;AAC/B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,MAAM,QAAQ,OAAO,GAAG;AAC3B,QAAI,WAAW,MAAM;AACnB;AAAA,IACF;AACA,WAAO,OAAO,OAAO;AAAA,EACvB;AAEA,QAAM,QAAS,QAAqC,IAAI,CAAC,SAAS;AA1BpE;AA2BI,SAAI,6BAAM,UAAS,UAAU,UAAU,MAAM;AAC3C,aAAO,QAAO,UAAK,SAAL,YAAa,EAAE;AAAA,IAC/B;AAEA,WAAO,KAAK,UAAU,IAAI;AAAA,EAC5B,CAAC;AAED,QAAM,YAAY,MAAM,OAAO,CAAC,UAAU,MAAM,SAAS,CAAC;AAC1D,MAAI,UAAU,WAAW,GAAG;AAC1B;AAAA,EACF;AAEA,SAAO,UAAU,KAAK,IAAI;AAC5B;AAEA,SAAS,mBAAmB;AAAA,EAC1B;AAAA,EACA;AAAA,EACA;AACF,GAIW;AACT,MAAI,CAAC,MAAM;AACT,WAAO;AAAA,EACT;AAEA,MAAI,SAAS,WAAW,GAAG;AACzB,WAAO;AAAA,EACT;AAEA,SAAO,cAAc,UACjB,GAAG,QAAQ;AAAA;AAAA,EAAO,IAAI,KACtB,GAAG,IAAI;AAAA;AAAA,EAAO,QAAQ;AAC5B;AAEA,SAAS,kBACP,QACuB;AACvB,MAAI,CAAC,QAAQ;AACX,WAAO,CAAC;AAAA,EACV;AAEA,SAAO,CAAC,GAAG,MAAM;AACnB;AAEO,SAAS,8BAA8B;AAAA,EAC5C;AAAA,EACA,YAAY;AACd,GAAoE;AAClE,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,iBAAiB,CAAC,EAAE,OAAO,MAAM;AAC/B,YAAM,SAAS,kBAAkB,OAAO,MAAM;AAC9C,YAAM,cAAc,OAAO;AAAA,QACzB,CAAC,YAAY,QAAQ,SAAS;AAAA,MAChC;AAEA,UAAI,gBAAgB,IAAI;AACtB,cAAM,mBACJ,cAAc,UACT;AAAA,UACC;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,UACA,GAAG;AAAA,QACL,IACC;AAAA,UACC,GAAG;AAAA,UACH;AAAA,YACE,MAAM;AAAA,YACN,SAAS;AAAA,UACX;AAAA,QACF;AAEN,cAAMA,cAAyC;AAAA,UAC7C,GAAG;AAAA,UACH,QAAQ;AAAA,QACV;AAEA,eAAO,QAAQ,QAAoCA,WAAU;AAAA,MAC/D;AAEA,YAAM,gBAAgB,OAAO,WAAW;AACxC,YAAM,WAAW,kBAAkB,cAAc,OAAO;AACxD,YAAM,gBAAgB,mBAAmB;AAAA,QACvC,MAAM;AAAA,QACN,UAAU;AAAA,QACV;AAAA,MACF,CAAC;AAED,YAAM,gBAAgB,OAAO;AAAA,QAAI,CAAC,SAAS,UACzC,UAAU,cACN;AAAA,UACE,GAAG;AAAA,UACH,SAAS;AAAA,QACX,IACA;AAAA,MACN;AAEA,YAAM,aAAyC;AAAA,QAC7C,GAAG;AAAA,QACH,QAAQ;AAAA,MACV;AAEA,aAAO,QAAQ,QAAoC,UAAU;AAAA,IAC/D;AAAA,EACF;AACF;","names":["nextParams"]}
@@ -17,7 +17,7 @@ var __copyProps = (to, from, except, desc) => {
17
17
  };
18
18
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
19
 
20
- // src/reasoning-parser/index.ts
20
+ // src/reasoning-parser.ts
21
21
  var reasoning_parser_exports = {};
22
22
  __export(reasoning_parser_exports, {
23
23
  extractReasoningMiddleware: () => extractReasoningMiddleware,
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/reasoning-parser/index.ts"],"sourcesContent":["/**\n * @license\n * Copyright (c) 2021-present, FriendliAI Inc. All rights reserved.\n */\n\nimport type {\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\n/**\n * All code below is forked from the following link:\n * https://github.com/vercel/ai/blob/v5/packages/ai/core/middleware/extract-reasoning-middleware.ts\n */\n\n/**\n * Returns the index of the start of the searchedText in the text, or null if it\n * is not found.\n */\nexport function getPotentialStartIndex(\n text: string,\n searchedText: string\n): number | null {\n // Return null immediately if searchedText is empty.\n if (searchedText.length === 0) {\n return null;\n }\n\n // Check if the searchedText exists as a direct substring of text.\n const directIndex = text.indexOf(searchedText);\n if (directIndex !== -1) {\n return directIndex;\n }\n\n // Otherwise, look for the largest suffix of \"text\" that matches\n // a prefix of \"searchedText\". We go from the end of text inward.\n for (let i = text.length - 1; i >= 0; i -= 1) {\n const suffix = text.substring(i);\n if (searchedText.startsWith(suffix)) {\n return i;\n }\n }\n\n return null;\n}\n\n/**\n * Extract an XML-tagged reasoning section from the generated text and exposes it\n * as a `reasoning` property on the result.\n *\n * @param openingTag - The opening XML tag to extract reasoning from.\n * @param closingTag - The closing XML tag to extract reasoning from.\n * @param separator - The separator to use between reasoning and text sections.\n * @param startWithReasoning - Whether to start with reasoning tokens.\n */\nexport function extractReasoningMiddleware({\n openingTag,\n closingTag,\n separator = \"\\n\",\n startWithReasoning = false,\n}: {\n openingTag: string;\n closingTag: string;\n separator?: string;\n startWithReasoning?: boolean;\n}): LanguageModelV3Middleware {\n function processTextPart(\n text: string,\n transformedContent: LanguageModelV3Content[]\n ) {\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n return;\n }\n\n const reasoningText = matches.map((match) => match[1]).join(separator);\n\n let textWithoutReasoning = text;\n for (let i = matches.length - 1; i >= 0; i -= 1) {\n const match = matches[i];\n\n const beforeMatch = textWithoutReasoning.slice(0, match.index);\n const matchIndex = match.index ?? 0;\n const afterMatch = textWithoutReasoning.slice(\n matchIndex + match[0].length\n );\n\n textWithoutReasoning =\n beforeMatch +\n (beforeMatch.length > 0 && afterMatch.length > 0 ? separator : \"\") +\n afterMatch;\n }\n\n transformedContent.push({\n type: \"reasoning\",\n text: reasoningText,\n });\n\n transformedContent.push({\n type: \"text\",\n text: textWithoutReasoning,\n });\n }\n\n return {\n specificationVersion: \"v3\",\n wrapGenerate: async ({ doGenerate }) => {\n const { content, ...rest } = await doGenerate();\n\n const transformedContent: LanguageModelV3Content[] = [];\n for (const part of content) {\n if (part.type !== \"text\") {\n transformedContent.push(part);\n continue;\n }\n\n const text = startWithReasoning ? openingTag + part.text : part.text;\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n transformedContent.push(part);\n continue;\n }\n\n processTextPart(text, transformedContent);\n }\n\n return { content: transformedContent, ...rest };\n },\n\n wrapStream: async ({ doStream }) => {\n const { stream, ...rest } = await doStream();\n\n interface ExtractionState {\n isFirstReasoning: boolean;\n isFirstText: boolean;\n afterSwitch: boolean;\n isReasoning: boolean;\n buffer: string;\n idCounter: number;\n textId: string;\n }\n\n const reasoningExtractions: Record<string, ExtractionState> = {};\n\n function createPublisher(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n return (text: string) => {\n if (text.length === 0) {\n return;\n }\n\n const prefix = getPrefix(activeExtraction);\n enqueueReasoningStart(activeExtraction, controller);\n enqueueDelta(activeExtraction, controller, prefix, text);\n updateExtractionState(activeExtraction);\n };\n }\n\n function getPrefix(activeExtraction: ExtractionState): string {\n return activeExtraction.afterSwitch &&\n (activeExtraction.isReasoning\n ? !activeExtraction.isFirstReasoning\n : !activeExtraction.isFirstText)\n ? separator\n : \"\";\n }\n\n function enqueueReasoningStart(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n if (\n (activeExtraction.afterSwitch && activeExtraction.isReasoning) ||\n activeExtraction.isFirstReasoning\n ) {\n controller.enqueue({\n type: \"reasoning-start\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n }\n }\n\n function enqueueDelta(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n prefix: string,\n text: string\n ) {\n controller.enqueue(\n activeExtraction.isReasoning\n ? {\n type: \"reasoning-delta\",\n delta: prefix + text,\n id: `reasoning-${activeExtraction.idCounter}`,\n }\n : {\n type: \"text-delta\",\n delta: prefix + text,\n id: activeExtraction.textId,\n }\n );\n }\n\n function updateExtractionState(activeExtraction: ExtractionState) {\n activeExtraction.afterSwitch = false;\n if (activeExtraction.isReasoning) {\n activeExtraction.isFirstReasoning = false;\n } else {\n activeExtraction.isFirstText = false;\n }\n }\n\n function handleFullMatch(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n startIndex: number,\n nextTag: string\n ) {\n activeExtraction.buffer = activeExtraction.buffer.slice(\n startIndex + nextTag.length\n );\n\n if (activeExtraction.isReasoning) {\n controller.enqueue({\n type: \"reasoning-end\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n activeExtraction.idCounter += 1;\n }\n\n activeExtraction.isReasoning = !activeExtraction.isReasoning;\n activeExtraction.afterSwitch = true;\n }\n\n function processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n }: {\n activeExtraction: ExtractionState;\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>;\n publish: (text: string) => void;\n startIndex: number;\n nextTag: string;\n }): boolean {\n publish(activeExtraction.buffer.slice(0, startIndex));\n\n const foundFullMatch =\n startIndex + nextTag.length <= activeExtraction.buffer.length;\n\n if (foundFullMatch) {\n handleFullMatch(activeExtraction, controller, startIndex, nextTag);\n return true;\n }\n\n activeExtraction.buffer = activeExtraction.buffer.slice(startIndex);\n return false;\n }\n\n function processBuffer(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n const publish = createPublisher(activeExtraction, controller);\n let continueProcessing = true;\n\n while (continueProcessing) {\n const nextTag = activeExtraction.isReasoning\n ? closingTag\n : openingTag;\n const startIndex = getPotentialStartIndex(\n activeExtraction.buffer,\n nextTag\n );\n\n if (startIndex == null) {\n publish(activeExtraction.buffer);\n activeExtraction.buffer = \"\";\n break;\n }\n\n continueProcessing = processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n });\n }\n }\n\n return {\n stream: stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform: (chunk, controller) => {\n if (chunk.type !== \"text-delta\") {\n controller.enqueue(chunk);\n return;\n }\n\n if (reasoningExtractions[chunk.id] == null) {\n reasoningExtractions[chunk.id] = {\n isFirstReasoning: true,\n isFirstText: true,\n afterSwitch: false,\n isReasoning: startWithReasoning,\n buffer: \"\",\n idCounter: 0,\n textId: chunk.id,\n };\n }\n\n const activeExtraction = reasoningExtractions[chunk.id];\n activeExtraction.buffer += chunk.delta;\n processBuffer(activeExtraction, controller);\n },\n })\n ),\n ...rest,\n };\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoBO,SAAS,uBACd,MACA,cACe;AAEf,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,KAAK,QAAQ,YAAY;AAC7C,MAAI,gBAAgB,IAAI;AACtB,WAAO;AAAA,EACT;AAIA,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC5C,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,aAAa,WAAW,MAAM,GAAG;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,2BAA2B;AAAA,EACzC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ,qBAAqB;AACvB,GAK8B;AAC5B,WAAS,gBACP,MACA,oBACA;AAtEJ;AAuEI,UAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,UAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,QAAI,CAAC,QAAQ,QAAQ;AACnB;AAAA,IACF;AAEA,UAAM,gBAAgB,QAAQ,IAAI,CAAC,UAAU,MAAM,CAAC,CAAC,EAAE,KAAK,SAAS;AAErE,QAAI,uBAAuB;AAC3B,aAAS,IAAI,QAAQ,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC/C,YAAM,QAAQ,QAAQ,CAAC;AAEvB,YAAM,cAAc,qBAAqB,MAAM,GAAG,MAAM,KAAK;AAC7D,YAAM,cAAa,WAAM,UAAN,YAAe;AAClC,YAAM,aAAa,qBAAqB;AAAA,QACtC,aAAa,MAAM,CAAC,EAAE;AAAA,MACxB;AAEA,6BACE,eACC,YAAY,SAAS,KAAK,WAAW,SAAS,IAAI,YAAY,MAC/D;AAAA,IACJ;AAEA,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAED,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,cAAc,OAAO,EAAE,WAAW,MAAM;AACtC,YAAM,EAAE,SAAS,GAAG,KAAK,IAAI,MAAM,WAAW;AAE9C,YAAM,qBAA+C,CAAC;AACtD,iBAAW,QAAQ,SAAS;AAC1B,YAAI,KAAK,SAAS,QAAQ;AACxB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,cAAM,OAAO,qBAAqB,aAAa,KAAK,OAAO,KAAK;AAChE,cAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,cAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,YAAI,CAAC,QAAQ,QAAQ;AACnB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,wBAAgB,MAAM,kBAAkB;AAAA,MAC1C;AAEA,aAAO,EAAE,SAAS,oBAAoB,GAAG,KAAK;AAAA,IAChD;AAAA,IAEA,YAAY,OAAO,EAAE,SAAS,MAAM;AAClC,YAAM,EAAE,QAAQ,GAAG,KAAK,IAAI,MAAM,SAAS;AAY3C,YAAM,uBAAwD,CAAC;AAE/D,eAAS,gBACP,kBACA,YACA;AACA,eAAO,CAAC,SAAiB;AACvB,cAAI,KAAK,WAAW,GAAG;AACrB;AAAA,UACF;AAEA,gBAAM,SAAS,UAAU,gBAAgB;AACzC,gCAAsB,kBAAkB,UAAU;AAClD,uBAAa,kBAAkB,YAAY,QAAQ,IAAI;AACvD,gCAAsB,gBAAgB;AAAA,QACxC;AAAA,MACF;AAEA,eAAS,UAAU,kBAA2C;AAC5D,eAAO,iBAAiB,gBACrB,iBAAiB,cACd,CAAC,iBAAiB,mBAClB,CAAC,iBAAiB,eACpB,YACA;AAAA,MACN;AAEA,eAAS,sBACP,kBACA,YACA;AACA,YACG,iBAAiB,eAAe,iBAAiB,eAClD,iBAAiB,kBACjB;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AAAA,QACH;AAAA,MACF;AAEA,eAAS,aACP,kBACA,YACA,QACA,MACA;AACA,mBAAW;AAAA,UACT,iBAAiB,cACb;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,IACA;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,iBAAiB;AAAA,UACvB;AAAA,QACN;AAAA,MACF;AAEA,eAAS,sBAAsB,kBAAmC;AAChE,yBAAiB,cAAc;AAC/B,YAAI,iBAAiB,aAAa;AAChC,2BAAiB,mBAAmB;AAAA,QACtC,OAAO;AACL,2BAAiB,cAAc;AAAA,QACjC;AAAA,MACF;AAEA,eAAS,gBACP,kBACA,YACA,YACA,SACA;AACA,yBAAiB,SAAS,iBAAiB,OAAO;AAAA,UAChD,aAAa,QAAQ;AAAA,QACvB;AAEA,YAAI,iBAAiB,aAAa;AAChC,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AACD,2BAAiB,aAAa;AAAA,QAChC;AAEA,yBAAiB,cAAc,CAAC,iBAAiB;AACjD,yBAAiB,cAAc;AAAA,MACjC;AAEA,eAAS,gBAAgB;AAAA,QACvB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,GAMY;AACV,gBAAQ,iBAAiB,OAAO,MAAM,GAAG,UAAU,CAAC;AAEpD,cAAM,iBACJ,aAAa,QAAQ,UAAU,iBAAiB,OAAO;AAEzD,YAAI,gBAAgB;AAClB,0BAAgB,kBAAkB,YAAY,YAAY,OAAO;AACjE,iBAAO;AAAA,QACT;AAEA,yBAAiB,SAAS,iBAAiB,OAAO,MAAM,UAAU;AAClE,eAAO;AAAA,MACT;AAEA,eAAS,cACP,kBACA,YACA;AACA,cAAM,UAAU,gBAAgB,kBAAkB,UAAU;AAC5D,YAAI,qBAAqB;AAEzB,eAAO,oBAAoB;AACzB,gBAAM,UAAU,iBAAiB,cAC7B,aACA;AACJ,gBAAM,aAAa;AAAA,YACjB,iBAAiB;AAAA,YACjB;AAAA,UACF;AAEA,cAAI,cAAc,MAAM;AACtB,oBAAQ,iBAAiB,MAAM;AAC/B,6BAAiB,SAAS;AAC1B;AAAA,UACF;AAEA,+BAAqB,gBAAgB;AAAA,YACnC;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,aAAO;AAAA,QACL,QAAQ,OAAO;AAAA,UACb,IAAI,gBAGF;AAAA,YACA,WAAW,CAAC,OAAO,eAAe;AAChC,kBAAI,MAAM,SAAS,cAAc;AAC/B,2BAAW,QAAQ,KAAK;AACxB;AAAA,cACF;AAEA,kBAAI,qBAAqB,MAAM,EAAE,KAAK,MAAM;AAC1C,qCAAqB,MAAM,EAAE,IAAI;AAAA,kBAC/B,kBAAkB;AAAA,kBAClB,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,QAAQ;AAAA,kBACR,WAAW;AAAA,kBACX,QAAQ,MAAM;AAAA,gBAChB;AAAA,cACF;AAEA,oBAAM,mBAAmB,qBAAqB,MAAM,EAAE;AACtD,+BAAiB,UAAU,MAAM;AACjC,4BAAc,kBAAkB,UAAU;AAAA,YAC5C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
1
+ {"version":3,"sources":["../src/reasoning-parser.ts"],"sourcesContent":["/**\n * @license\n * Copyright (c) 2021-present, FriendliAI Inc. All rights reserved.\n */\n\nimport type {\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\n/**\n * All code below is forked from the following link:\n * https://github.com/vercel/ai/blob/v5/packages/ai/core/middleware/extract-reasoning-middleware.ts\n */\n\n/**\n * Returns the index of the start of the searchedText in the text, or null if it\n * is not found.\n */\nexport function getPotentialStartIndex(\n text: string,\n searchedText: string\n): number | null {\n // Return null immediately if searchedText is empty.\n if (searchedText.length === 0) {\n return null;\n }\n\n // Check if the searchedText exists as a direct substring of text.\n const directIndex = text.indexOf(searchedText);\n if (directIndex !== -1) {\n return directIndex;\n }\n\n // Otherwise, look for the largest suffix of \"text\" that matches\n // a prefix of \"searchedText\". We go from the end of text inward.\n for (let i = text.length - 1; i >= 0; i -= 1) {\n const suffix = text.substring(i);\n if (searchedText.startsWith(suffix)) {\n return i;\n }\n }\n\n return null;\n}\n\n/**\n * Extract an XML-tagged reasoning section from the generated text and exposes it\n * as a `reasoning` property on the result.\n *\n * @param openingTag - The opening XML tag to extract reasoning from.\n * @param closingTag - The closing XML tag to extract reasoning from.\n * @param separator - The separator to use between reasoning and text sections.\n * @param startWithReasoning - Whether to start with reasoning tokens.\n */\nexport function extractReasoningMiddleware({\n openingTag,\n closingTag,\n separator = \"\\n\",\n startWithReasoning = false,\n}: {\n openingTag: string;\n closingTag: string;\n separator?: string;\n startWithReasoning?: boolean;\n}): LanguageModelV3Middleware {\n function processTextPart(\n text: string,\n transformedContent: LanguageModelV3Content[]\n ) {\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n return;\n }\n\n const reasoningText = matches.map((match) => match[1]).join(separator);\n\n let textWithoutReasoning = text;\n for (let i = matches.length - 1; i >= 0; i -= 1) {\n const match = matches[i];\n\n const beforeMatch = textWithoutReasoning.slice(0, match.index);\n const matchIndex = match.index ?? 0;\n const afterMatch = textWithoutReasoning.slice(\n matchIndex + match[0].length\n );\n\n textWithoutReasoning =\n beforeMatch +\n (beforeMatch.length > 0 && afterMatch.length > 0 ? separator : \"\") +\n afterMatch;\n }\n\n transformedContent.push({\n type: \"reasoning\",\n text: reasoningText,\n });\n\n transformedContent.push({\n type: \"text\",\n text: textWithoutReasoning,\n });\n }\n\n return {\n specificationVersion: \"v3\",\n wrapGenerate: async ({ doGenerate }) => {\n const { content, ...rest } = await doGenerate();\n\n const transformedContent: LanguageModelV3Content[] = [];\n for (const part of content) {\n if (part.type !== \"text\") {\n transformedContent.push(part);\n continue;\n }\n\n const text = startWithReasoning ? openingTag + part.text : part.text;\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n transformedContent.push(part);\n continue;\n }\n\n processTextPart(text, transformedContent);\n }\n\n return { content: transformedContent, ...rest };\n },\n\n wrapStream: async ({ doStream }) => {\n const { stream, ...rest } = await doStream();\n\n interface ExtractionState {\n isFirstReasoning: boolean;\n isFirstText: boolean;\n afterSwitch: boolean;\n isReasoning: boolean;\n buffer: string;\n idCounter: number;\n textId: string;\n }\n\n const reasoningExtractions: Record<string, ExtractionState> = {};\n\n function createPublisher(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n return (text: string) => {\n if (text.length === 0) {\n return;\n }\n\n const prefix = getPrefix(activeExtraction);\n enqueueReasoningStart(activeExtraction, controller);\n enqueueDelta(activeExtraction, controller, prefix, text);\n updateExtractionState(activeExtraction);\n };\n }\n\n function getPrefix(activeExtraction: ExtractionState): string {\n return activeExtraction.afterSwitch &&\n (activeExtraction.isReasoning\n ? !activeExtraction.isFirstReasoning\n : !activeExtraction.isFirstText)\n ? separator\n : \"\";\n }\n\n function enqueueReasoningStart(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n if (\n (activeExtraction.afterSwitch && activeExtraction.isReasoning) ||\n activeExtraction.isFirstReasoning\n ) {\n controller.enqueue({\n type: \"reasoning-start\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n }\n }\n\n function enqueueDelta(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n prefix: string,\n text: string\n ) {\n controller.enqueue(\n activeExtraction.isReasoning\n ? {\n type: \"reasoning-delta\",\n delta: prefix + text,\n id: `reasoning-${activeExtraction.idCounter}`,\n }\n : {\n type: \"text-delta\",\n delta: prefix + text,\n id: activeExtraction.textId,\n }\n );\n }\n\n function updateExtractionState(activeExtraction: ExtractionState) {\n activeExtraction.afterSwitch = false;\n if (activeExtraction.isReasoning) {\n activeExtraction.isFirstReasoning = false;\n } else {\n activeExtraction.isFirstText = false;\n }\n }\n\n function handleFullMatch(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n startIndex: number,\n nextTag: string\n ) {\n activeExtraction.buffer = activeExtraction.buffer.slice(\n startIndex + nextTag.length\n );\n\n if (activeExtraction.isReasoning) {\n controller.enqueue({\n type: \"reasoning-end\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n activeExtraction.idCounter += 1;\n }\n\n activeExtraction.isReasoning = !activeExtraction.isReasoning;\n activeExtraction.afterSwitch = true;\n }\n\n function processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n }: {\n activeExtraction: ExtractionState;\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>;\n publish: (text: string) => void;\n startIndex: number;\n nextTag: string;\n }): boolean {\n publish(activeExtraction.buffer.slice(0, startIndex));\n\n const foundFullMatch =\n startIndex + nextTag.length <= activeExtraction.buffer.length;\n\n if (foundFullMatch) {\n handleFullMatch(activeExtraction, controller, startIndex, nextTag);\n return true;\n }\n\n activeExtraction.buffer = activeExtraction.buffer.slice(startIndex);\n return false;\n }\n\n function processBuffer(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n const publish = createPublisher(activeExtraction, controller);\n let continueProcessing = true;\n\n while (continueProcessing) {\n const nextTag = activeExtraction.isReasoning\n ? closingTag\n : openingTag;\n const startIndex = getPotentialStartIndex(\n activeExtraction.buffer,\n nextTag\n );\n\n if (startIndex == null) {\n publish(activeExtraction.buffer);\n activeExtraction.buffer = \"\";\n break;\n }\n\n continueProcessing = processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n });\n }\n }\n\n return {\n stream: stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform: (chunk, controller) => {\n if (chunk.type !== \"text-delta\") {\n controller.enqueue(chunk);\n return;\n }\n\n if (reasoningExtractions[chunk.id] == null) {\n reasoningExtractions[chunk.id] = {\n isFirstReasoning: true,\n isFirstText: true,\n afterSwitch: false,\n isReasoning: startWithReasoning,\n buffer: \"\",\n idCounter: 0,\n textId: chunk.id,\n };\n }\n\n const activeExtraction = reasoningExtractions[chunk.id];\n activeExtraction.buffer += chunk.delta;\n processBuffer(activeExtraction, controller);\n },\n })\n ),\n ...rest,\n };\n },\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAoBO,SAAS,uBACd,MACA,cACe;AAEf,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,KAAK,QAAQ,YAAY;AAC7C,MAAI,gBAAgB,IAAI;AACtB,WAAO;AAAA,EACT;AAIA,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC5C,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,aAAa,WAAW,MAAM,GAAG;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,2BAA2B;AAAA,EACzC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ,qBAAqB;AACvB,GAK8B;AAC5B,WAAS,gBACP,MACA,oBACA;AAtEJ;AAuEI,UAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,UAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,QAAI,CAAC,QAAQ,QAAQ;AACnB;AAAA,IACF;AAEA,UAAM,gBAAgB,QAAQ,IAAI,CAAC,UAAU,MAAM,CAAC,CAAC,EAAE,KAAK,SAAS;AAErE,QAAI,uBAAuB;AAC3B,aAAS,IAAI,QAAQ,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC/C,YAAM,QAAQ,QAAQ,CAAC;AAEvB,YAAM,cAAc,qBAAqB,MAAM,GAAG,MAAM,KAAK;AAC7D,YAAM,cAAa,WAAM,UAAN,YAAe;AAClC,YAAM,aAAa,qBAAqB;AAAA,QACtC,aAAa,MAAM,CAAC,EAAE;AAAA,MACxB;AAEA,6BACE,eACC,YAAY,SAAS,KAAK,WAAW,SAAS,IAAI,YAAY,MAC/D;AAAA,IACJ;AAEA,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAED,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,cAAc,OAAO,EAAE,WAAW,MAAM;AACtC,YAAM,EAAE,SAAS,GAAG,KAAK,IAAI,MAAM,WAAW;AAE9C,YAAM,qBAA+C,CAAC;AACtD,iBAAW,QAAQ,SAAS;AAC1B,YAAI,KAAK,SAAS,QAAQ;AACxB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,cAAM,OAAO,qBAAqB,aAAa,KAAK,OAAO,KAAK;AAChE,cAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,cAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,YAAI,CAAC,QAAQ,QAAQ;AACnB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,wBAAgB,MAAM,kBAAkB;AAAA,MAC1C;AAEA,aAAO,EAAE,SAAS,oBAAoB,GAAG,KAAK;AAAA,IAChD;AAAA,IAEA,YAAY,OAAO,EAAE,SAAS,MAAM;AAClC,YAAM,EAAE,QAAQ,GAAG,KAAK,IAAI,MAAM,SAAS;AAY3C,YAAM,uBAAwD,CAAC;AAE/D,eAAS,gBACP,kBACA,YACA;AACA,eAAO,CAAC,SAAiB;AACvB,cAAI,KAAK,WAAW,GAAG;AACrB;AAAA,UACF;AAEA,gBAAM,SAAS,UAAU,gBAAgB;AACzC,gCAAsB,kBAAkB,UAAU;AAClD,uBAAa,kBAAkB,YAAY,QAAQ,IAAI;AACvD,gCAAsB,gBAAgB;AAAA,QACxC;AAAA,MACF;AAEA,eAAS,UAAU,kBAA2C;AAC5D,eAAO,iBAAiB,gBACrB,iBAAiB,cACd,CAAC,iBAAiB,mBAClB,CAAC,iBAAiB,eACpB,YACA;AAAA,MACN;AAEA,eAAS,sBACP,kBACA,YACA;AACA,YACG,iBAAiB,eAAe,iBAAiB,eAClD,iBAAiB,kBACjB;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AAAA,QACH;AAAA,MACF;AAEA,eAAS,aACP,kBACA,YACA,QACA,MACA;AACA,mBAAW;AAAA,UACT,iBAAiB,cACb;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,IACA;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,iBAAiB;AAAA,UACvB;AAAA,QACN;AAAA,MACF;AAEA,eAAS,sBAAsB,kBAAmC;AAChE,yBAAiB,cAAc;AAC/B,YAAI,iBAAiB,aAAa;AAChC,2BAAiB,mBAAmB;AAAA,QACtC,OAAO;AACL,2BAAiB,cAAc;AAAA,QACjC;AAAA,MACF;AAEA,eAAS,gBACP,kBACA,YACA,YACA,SACA;AACA,yBAAiB,SAAS,iBAAiB,OAAO;AAAA,UAChD,aAAa,QAAQ;AAAA,QACvB;AAEA,YAAI,iBAAiB,aAAa;AAChC,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AACD,2BAAiB,aAAa;AAAA,QAChC;AAEA,yBAAiB,cAAc,CAAC,iBAAiB;AACjD,yBAAiB,cAAc;AAAA,MACjC;AAEA,eAAS,gBAAgB;AAAA,QACvB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,GAMY;AACV,gBAAQ,iBAAiB,OAAO,MAAM,GAAG,UAAU,CAAC;AAEpD,cAAM,iBACJ,aAAa,QAAQ,UAAU,iBAAiB,OAAO;AAEzD,YAAI,gBAAgB;AAClB,0BAAgB,kBAAkB,YAAY,YAAY,OAAO;AACjE,iBAAO;AAAA,QACT;AAEA,yBAAiB,SAAS,iBAAiB,OAAO,MAAM,UAAU;AAClE,eAAO;AAAA,MACT;AAEA,eAAS,cACP,kBACA,YACA;AACA,cAAM,UAAU,gBAAgB,kBAAkB,UAAU;AAC5D,YAAI,qBAAqB;AAEzB,eAAO,oBAAoB;AACzB,gBAAM,UAAU,iBAAiB,cAC7B,aACA;AACJ,gBAAM,aAAa;AAAA,YACjB,iBAAiB;AAAA,YACjB;AAAA,UACF;AAEA,cAAI,cAAc,MAAM;AACtB,oBAAQ,iBAAiB,MAAM;AAC/B,6BAAiB,SAAS;AAC1B;AAAA,UACF;AAEA,+BAAqB,gBAAgB;AAAA,YACnC;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,aAAO;AAAA,QACL,QAAQ,OAAO;AAAA,UACb,IAAI,gBAGF;AAAA,YACA,WAAW,CAAC,OAAO,eAAe;AAChC,kBAAI,MAAM,SAAS,cAAc;AAC/B,2BAAW,QAAQ,KAAK;AACxB;AAAA,cACF;AAEA,kBAAI,qBAAqB,MAAM,EAAE,KAAK,MAAM;AAC1C,qCAAqB,MAAM,EAAE,IAAI;AAAA,kBAC/B,kBAAkB;AAAA,kBAClB,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,QAAQ;AAAA,kBACR,WAAW;AAAA,kBACX,QAAQ,MAAM;AAAA,gBAChB;AAAA,cACF;AAEA,oBAAM,mBAAmB,qBAAqB,MAAM,EAAE;AACtD,+BAAiB,UAAU,MAAM;AACjC,4BAAc,kBAAkB,UAAU;AAAA,YAC5C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACF;","names":[]}
@@ -1,7 +1,7 @@
1
1
  import {
2
2
  extractReasoningMiddleware,
3
3
  getPotentialStartIndex
4
- } from "./chunk-Q2TVVB35.js";
4
+ } from "./chunk-R4PZN7IW.js";
5
5
  export {
6
6
  extractReasoningMiddleware,
7
7
  getPotentialStartIndex
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ai-sdk-tool/middleware",
3
- "version": "0.0.0",
3
+ "version": "0.0.1",
4
4
  "description": "Collection of reusable AI SDK middlewares",
5
5
  "type": "module",
6
6
  "repository": {
@@ -21,6 +21,11 @@
21
21
  "types": "./dist/reasoning-parser.d.ts",
22
22
  "import": "./dist/reasoning-parser.js",
23
23
  "require": "./dist/reasoning-parser.cjs"
24
+ },
25
+ "./disk-cache": {
26
+ "types": "./dist/disk-cache.d.ts",
27
+ "import": "./dist/disk-cache.js",
28
+ "require": "./dist/disk-cache.cjs"
24
29
  }
25
30
  },
26
31
  "files": [
@@ -29,20 +34,13 @@
29
34
  "publishConfig": {
30
35
  "access": "public"
31
36
  },
32
- "scripts": {
33
- "build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
34
- "build:watch": "pnpm clean && tsup --watch --tsconfig tsconfig.build.json",
35
- "clean": "rm -rf dist *.tsbuildinfo",
36
- "typecheck": "tsc --noEmit",
37
- "test": "vitest run"
38
- },
39
37
  "dependencies": {
40
- "@ai-sdk/provider": "3.0.0"
38
+ "@ai-sdk/provider": "3.0.1"
41
39
  },
42
40
  "devDependencies": {
43
- "@ai-sdkx/tsconfig": "workspace:*",
44
41
  "@types/node": "^25.0.3",
45
- "tsup": "^8.5.1"
42
+ "tsup": "^8.5.1",
43
+ "@ai-sdkx/tsconfig": "0.0.1"
46
44
  },
47
45
  "keywords": [
48
46
  "ai",
@@ -50,5 +48,12 @@
50
48
  "middleware"
51
49
  ],
52
50
  "author": "",
53
- "license": "Apache-2.0"
54
- }
51
+ "license": "Apache-2.0",
52
+ "scripts": {
53
+ "build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
54
+ "build:watch": "pnpm clean && tsup --watch --tsconfig tsconfig.build.json",
55
+ "clean": "rm -rf dist *.tsbuildinfo",
56
+ "typecheck": "tsc --noEmit",
57
+ "test": "vitest run"
58
+ }
59
+ }
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/reasoning-parser/index.ts"],"sourcesContent":["/**\n * @license\n * Copyright (c) 2021-present, FriendliAI Inc. All rights reserved.\n */\n\nimport type {\n LanguageModelV3Content,\n LanguageModelV3Middleware,\n LanguageModelV3StreamPart,\n} from \"@ai-sdk/provider\";\n\n/**\n * All code below is forked from the following link:\n * https://github.com/vercel/ai/blob/v5/packages/ai/core/middleware/extract-reasoning-middleware.ts\n */\n\n/**\n * Returns the index of the start of the searchedText in the text, or null if it\n * is not found.\n */\nexport function getPotentialStartIndex(\n text: string,\n searchedText: string\n): number | null {\n // Return null immediately if searchedText is empty.\n if (searchedText.length === 0) {\n return null;\n }\n\n // Check if the searchedText exists as a direct substring of text.\n const directIndex = text.indexOf(searchedText);\n if (directIndex !== -1) {\n return directIndex;\n }\n\n // Otherwise, look for the largest suffix of \"text\" that matches\n // a prefix of \"searchedText\". We go from the end of text inward.\n for (let i = text.length - 1; i >= 0; i -= 1) {\n const suffix = text.substring(i);\n if (searchedText.startsWith(suffix)) {\n return i;\n }\n }\n\n return null;\n}\n\n/**\n * Extract an XML-tagged reasoning section from the generated text and exposes it\n * as a `reasoning` property on the result.\n *\n * @param openingTag - The opening XML tag to extract reasoning from.\n * @param closingTag - The closing XML tag to extract reasoning from.\n * @param separator - The separator to use between reasoning and text sections.\n * @param startWithReasoning - Whether to start with reasoning tokens.\n */\nexport function extractReasoningMiddleware({\n openingTag,\n closingTag,\n separator = \"\\n\",\n startWithReasoning = false,\n}: {\n openingTag: string;\n closingTag: string;\n separator?: string;\n startWithReasoning?: boolean;\n}): LanguageModelV3Middleware {\n function processTextPart(\n text: string,\n transformedContent: LanguageModelV3Content[]\n ) {\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n return;\n }\n\n const reasoningText = matches.map((match) => match[1]).join(separator);\n\n let textWithoutReasoning = text;\n for (let i = matches.length - 1; i >= 0; i -= 1) {\n const match = matches[i];\n\n const beforeMatch = textWithoutReasoning.slice(0, match.index);\n const matchIndex = match.index ?? 0;\n const afterMatch = textWithoutReasoning.slice(\n matchIndex + match[0].length\n );\n\n textWithoutReasoning =\n beforeMatch +\n (beforeMatch.length > 0 && afterMatch.length > 0 ? separator : \"\") +\n afterMatch;\n }\n\n transformedContent.push({\n type: \"reasoning\",\n text: reasoningText,\n });\n\n transformedContent.push({\n type: \"text\",\n text: textWithoutReasoning,\n });\n }\n\n return {\n specificationVersion: \"v3\",\n wrapGenerate: async ({ doGenerate }) => {\n const { content, ...rest } = await doGenerate();\n\n const transformedContent: LanguageModelV3Content[] = [];\n for (const part of content) {\n if (part.type !== \"text\") {\n transformedContent.push(part);\n continue;\n }\n\n const text = startWithReasoning ? openingTag + part.text : part.text;\n const regexp = new RegExp(`${openingTag}(.*?)${closingTag}`, \"gs\");\n const matches = Array.from(text.matchAll(regexp));\n\n if (!matches.length) {\n transformedContent.push(part);\n continue;\n }\n\n processTextPart(text, transformedContent);\n }\n\n return { content: transformedContent, ...rest };\n },\n\n wrapStream: async ({ doStream }) => {\n const { stream, ...rest } = await doStream();\n\n interface ExtractionState {\n isFirstReasoning: boolean;\n isFirstText: boolean;\n afterSwitch: boolean;\n isReasoning: boolean;\n buffer: string;\n idCounter: number;\n textId: string;\n }\n\n const reasoningExtractions: Record<string, ExtractionState> = {};\n\n function createPublisher(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n return (text: string) => {\n if (text.length === 0) {\n return;\n }\n\n const prefix = getPrefix(activeExtraction);\n enqueueReasoningStart(activeExtraction, controller);\n enqueueDelta(activeExtraction, controller, prefix, text);\n updateExtractionState(activeExtraction);\n };\n }\n\n function getPrefix(activeExtraction: ExtractionState): string {\n return activeExtraction.afterSwitch &&\n (activeExtraction.isReasoning\n ? !activeExtraction.isFirstReasoning\n : !activeExtraction.isFirstText)\n ? separator\n : \"\";\n }\n\n function enqueueReasoningStart(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n if (\n (activeExtraction.afterSwitch && activeExtraction.isReasoning) ||\n activeExtraction.isFirstReasoning\n ) {\n controller.enqueue({\n type: \"reasoning-start\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n }\n }\n\n function enqueueDelta(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n prefix: string,\n text: string\n ) {\n controller.enqueue(\n activeExtraction.isReasoning\n ? {\n type: \"reasoning-delta\",\n delta: prefix + text,\n id: `reasoning-${activeExtraction.idCounter}`,\n }\n : {\n type: \"text-delta\",\n delta: prefix + text,\n id: activeExtraction.textId,\n }\n );\n }\n\n function updateExtractionState(activeExtraction: ExtractionState) {\n activeExtraction.afterSwitch = false;\n if (activeExtraction.isReasoning) {\n activeExtraction.isFirstReasoning = false;\n } else {\n activeExtraction.isFirstText = false;\n }\n }\n\n function handleFullMatch(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>,\n startIndex: number,\n nextTag: string\n ) {\n activeExtraction.buffer = activeExtraction.buffer.slice(\n startIndex + nextTag.length\n );\n\n if (activeExtraction.isReasoning) {\n controller.enqueue({\n type: \"reasoning-end\",\n id: `reasoning-${activeExtraction.idCounter}`,\n });\n activeExtraction.idCounter += 1;\n }\n\n activeExtraction.isReasoning = !activeExtraction.isReasoning;\n activeExtraction.afterSwitch = true;\n }\n\n function processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n }: {\n activeExtraction: ExtractionState;\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>;\n publish: (text: string) => void;\n startIndex: number;\n nextTag: string;\n }): boolean {\n publish(activeExtraction.buffer.slice(0, startIndex));\n\n const foundFullMatch =\n startIndex + nextTag.length <= activeExtraction.buffer.length;\n\n if (foundFullMatch) {\n handleFullMatch(activeExtraction, controller, startIndex, nextTag);\n return true;\n }\n\n activeExtraction.buffer = activeExtraction.buffer.slice(startIndex);\n return false;\n }\n\n function processBuffer(\n activeExtraction: ExtractionState,\n controller: TransformStreamDefaultController<LanguageModelV3StreamPart>\n ) {\n const publish = createPublisher(activeExtraction, controller);\n let continueProcessing = true;\n\n while (continueProcessing) {\n const nextTag = activeExtraction.isReasoning\n ? closingTag\n : openingTag;\n const startIndex = getPotentialStartIndex(\n activeExtraction.buffer,\n nextTag\n );\n\n if (startIndex == null) {\n publish(activeExtraction.buffer);\n activeExtraction.buffer = \"\";\n break;\n }\n\n continueProcessing = processTagMatch({\n activeExtraction,\n controller,\n publish,\n startIndex,\n nextTag,\n });\n }\n }\n\n return {\n stream: stream.pipeThrough(\n new TransformStream<\n LanguageModelV3StreamPart,\n LanguageModelV3StreamPart\n >({\n transform: (chunk, controller) => {\n if (chunk.type !== \"text-delta\") {\n controller.enqueue(chunk);\n return;\n }\n\n if (reasoningExtractions[chunk.id] == null) {\n reasoningExtractions[chunk.id] = {\n isFirstReasoning: true,\n isFirstText: true,\n afterSwitch: false,\n isReasoning: startWithReasoning,\n buffer: \"\",\n idCounter: 0,\n textId: chunk.id,\n };\n }\n\n const activeExtraction = reasoningExtractions[chunk.id];\n activeExtraction.buffer += chunk.delta;\n processBuffer(activeExtraction, controller);\n },\n })\n ),\n ...rest,\n };\n },\n };\n}\n"],"mappings":";AAoBO,SAAS,uBACd,MACA,cACe;AAEf,MAAI,aAAa,WAAW,GAAG;AAC7B,WAAO;AAAA,EACT;AAGA,QAAM,cAAc,KAAK,QAAQ,YAAY;AAC7C,MAAI,gBAAgB,IAAI;AACtB,WAAO;AAAA,EACT;AAIA,WAAS,IAAI,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC5C,UAAM,SAAS,KAAK,UAAU,CAAC;AAC/B,QAAI,aAAa,WAAW,MAAM,GAAG;AACnC,aAAO;AAAA,IACT;AAAA,EACF;AAEA,SAAO;AACT;AAWO,SAAS,2BAA2B;AAAA,EACzC;AAAA,EACA;AAAA,EACA,YAAY;AAAA,EACZ,qBAAqB;AACvB,GAK8B;AAC5B,WAAS,gBACP,MACA,oBACA;AAtEJ;AAuEI,UAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,UAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,QAAI,CAAC,QAAQ,QAAQ;AACnB;AAAA,IACF;AAEA,UAAM,gBAAgB,QAAQ,IAAI,CAAC,UAAU,MAAM,CAAC,CAAC,EAAE,KAAK,SAAS;AAErE,QAAI,uBAAuB;AAC3B,aAAS,IAAI,QAAQ,SAAS,GAAG,KAAK,GAAG,KAAK,GAAG;AAC/C,YAAM,QAAQ,QAAQ,CAAC;AAEvB,YAAM,cAAc,qBAAqB,MAAM,GAAG,MAAM,KAAK;AAC7D,YAAM,cAAa,WAAM,UAAN,YAAe;AAClC,YAAM,aAAa,qBAAqB;AAAA,QACtC,aAAa,MAAM,CAAC,EAAE;AAAA,MACxB;AAEA,6BACE,eACC,YAAY,SAAS,KAAK,WAAW,SAAS,IAAI,YAAY,MAC/D;AAAA,IACJ;AAEA,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAED,uBAAmB,KAAK;AAAA,MACtB,MAAM;AAAA,MACN,MAAM;AAAA,IACR,CAAC;AAAA,EACH;AAEA,SAAO;AAAA,IACL,sBAAsB;AAAA,IACtB,cAAc,OAAO,EAAE,WAAW,MAAM;AACtC,YAAM,EAAE,SAAS,GAAG,KAAK,IAAI,MAAM,WAAW;AAE9C,YAAM,qBAA+C,CAAC;AACtD,iBAAW,QAAQ,SAAS;AAC1B,YAAI,KAAK,SAAS,QAAQ;AACxB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,cAAM,OAAO,qBAAqB,aAAa,KAAK,OAAO,KAAK;AAChE,cAAM,SAAS,IAAI,OAAO,GAAG,UAAU,QAAQ,UAAU,IAAI,IAAI;AACjE,cAAM,UAAU,MAAM,KAAK,KAAK,SAAS,MAAM,CAAC;AAEhD,YAAI,CAAC,QAAQ,QAAQ;AACnB,6BAAmB,KAAK,IAAI;AAC5B;AAAA,QACF;AAEA,wBAAgB,MAAM,kBAAkB;AAAA,MAC1C;AAEA,aAAO,EAAE,SAAS,oBAAoB,GAAG,KAAK;AAAA,IAChD;AAAA,IAEA,YAAY,OAAO,EAAE,SAAS,MAAM;AAClC,YAAM,EAAE,QAAQ,GAAG,KAAK,IAAI,MAAM,SAAS;AAY3C,YAAM,uBAAwD,CAAC;AAE/D,eAAS,gBACP,kBACA,YACA;AACA,eAAO,CAAC,SAAiB;AACvB,cAAI,KAAK,WAAW,GAAG;AACrB;AAAA,UACF;AAEA,gBAAM,SAAS,UAAU,gBAAgB;AACzC,gCAAsB,kBAAkB,UAAU;AAClD,uBAAa,kBAAkB,YAAY,QAAQ,IAAI;AACvD,gCAAsB,gBAAgB;AAAA,QACxC;AAAA,MACF;AAEA,eAAS,UAAU,kBAA2C;AAC5D,eAAO,iBAAiB,gBACrB,iBAAiB,cACd,CAAC,iBAAiB,mBAClB,CAAC,iBAAiB,eACpB,YACA;AAAA,MACN;AAEA,eAAS,sBACP,kBACA,YACA;AACA,YACG,iBAAiB,eAAe,iBAAiB,eAClD,iBAAiB,kBACjB;AACA,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AAAA,QACH;AAAA,MACF;AAEA,eAAS,aACP,kBACA,YACA,QACA,MACA;AACA,mBAAW;AAAA,UACT,iBAAiB,cACb;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,IACA;AAAA,YACE,MAAM;AAAA,YACN,OAAO,SAAS;AAAA,YAChB,IAAI,iBAAiB;AAAA,UACvB;AAAA,QACN;AAAA,MACF;AAEA,eAAS,sBAAsB,kBAAmC;AAChE,yBAAiB,cAAc;AAC/B,YAAI,iBAAiB,aAAa;AAChC,2BAAiB,mBAAmB;AAAA,QACtC,OAAO;AACL,2BAAiB,cAAc;AAAA,QACjC;AAAA,MACF;AAEA,eAAS,gBACP,kBACA,YACA,YACA,SACA;AACA,yBAAiB,SAAS,iBAAiB,OAAO;AAAA,UAChD,aAAa,QAAQ;AAAA,QACvB;AAEA,YAAI,iBAAiB,aAAa;AAChC,qBAAW,QAAQ;AAAA,YACjB,MAAM;AAAA,YACN,IAAI,aAAa,iBAAiB,SAAS;AAAA,UAC7C,CAAC;AACD,2BAAiB,aAAa;AAAA,QAChC;AAEA,yBAAiB,cAAc,CAAC,iBAAiB;AACjD,yBAAiB,cAAc;AAAA,MACjC;AAEA,eAAS,gBAAgB;AAAA,QACvB;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,MACF,GAMY;AACV,gBAAQ,iBAAiB,OAAO,MAAM,GAAG,UAAU,CAAC;AAEpD,cAAM,iBACJ,aAAa,QAAQ,UAAU,iBAAiB,OAAO;AAEzD,YAAI,gBAAgB;AAClB,0BAAgB,kBAAkB,YAAY,YAAY,OAAO;AACjE,iBAAO;AAAA,QACT;AAEA,yBAAiB,SAAS,iBAAiB,OAAO,MAAM,UAAU;AAClE,eAAO;AAAA,MACT;AAEA,eAAS,cACP,kBACA,YACA;AACA,cAAM,UAAU,gBAAgB,kBAAkB,UAAU;AAC5D,YAAI,qBAAqB;AAEzB,eAAO,oBAAoB;AACzB,gBAAM,UAAU,iBAAiB,cAC7B,aACA;AACJ,gBAAM,aAAa;AAAA,YACjB,iBAAiB;AAAA,YACjB;AAAA,UACF;AAEA,cAAI,cAAc,MAAM;AACtB,oBAAQ,iBAAiB,MAAM;AAC/B,6BAAiB,SAAS;AAC1B;AAAA,UACF;AAEA,+BAAqB,gBAAgB;AAAA,YACnC;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,YACA;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AAEA,aAAO;AAAA,QACL,QAAQ,OAAO;AAAA,UACb,IAAI,gBAGF;AAAA,YACA,WAAW,CAAC,OAAO,eAAe;AAChC,kBAAI,MAAM,SAAS,cAAc;AAC/B,2BAAW,QAAQ,KAAK;AACxB;AAAA,cACF;AAEA,kBAAI,qBAAqB,MAAM,EAAE,KAAK,MAAM;AAC1C,qCAAqB,MAAM,EAAE,IAAI;AAAA,kBAC/B,kBAAkB;AAAA,kBAClB,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,aAAa;AAAA,kBACb,QAAQ;AAAA,kBACR,WAAW;AAAA,kBACX,QAAQ,MAAM;AAAA,gBAChB;AAAA,cACF;AAEA,oBAAM,mBAAmB,qBAAqB,MAAM,EAAE;AACtD,+BAAiB,UAAU,MAAM;AACjC,4BAAc,kBAAkB,UAAU;AAAA,YAC5C;AAAA,UACF,CAAC;AAAA,QACH;AAAA,QACA,GAAG;AAAA,MACL;AAAA,IACF;AAAA,EACF;AACF;","names":[]}