kimi-vercel-ai-sdk-provider 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +157 -2
- package/dist/index.d.mts +142 -1
- package/dist/index.d.ts +142 -1
- package/dist/index.js +222 -9
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +222 -9
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
- package/src/__tests__/file-cache.test.ts +310 -0
- package/src/__tests__/model-config.test.ts +120 -0
- package/src/__tests__/reasoning-utils.test.ts +164 -0
- package/src/__tests__/tools.test.ts +75 -7
- package/src/chat/kimi-chat-language-model.ts +21 -2
- package/src/core/index.ts +10 -3
- package/src/core/types.ts +57 -2
- package/src/core/utils.ts +138 -0
- package/src/files/attachment-processor.ts +51 -4
- package/src/files/file-cache.ts +260 -0
- package/src/files/index.ts +16 -1
- package/src/tools/prepare-tools.ts +88 -2
package/dist/index.js
CHANGED
|
@@ -144,6 +144,9 @@ var KimiContextLengthError = class extends KimiError {
|
|
|
144
144
|
};
|
|
145
145
|
|
|
146
146
|
// src/core/types.ts
|
|
147
|
+
var THINKING_MODEL_TEMPERATURE = 1;
|
|
148
|
+
var THINKING_MODEL_DEFAULT_MAX_TOKENS = 32768;
|
|
149
|
+
var STANDARD_MODEL_DEFAULT_MAX_TOKENS = 4096;
|
|
147
150
|
function inferModelCapabilities(modelId) {
|
|
148
151
|
const isThinkingModel = modelId.includes("-thinking");
|
|
149
152
|
const isK25Model = modelId.includes("k2.5") || modelId.includes("k2-5");
|
|
@@ -158,7 +161,12 @@ function inferModelCapabilities(modelId) {
|
|
|
158
161
|
// 256k context window
|
|
159
162
|
toolCalling: true,
|
|
160
163
|
jsonMode: true,
|
|
161
|
-
structuredOutputs: true
|
|
164
|
+
structuredOutputs: true,
|
|
165
|
+
// Thinking models require temperature=1.0 for optimal reasoning
|
|
166
|
+
defaultTemperature: isThinkingModel ? THINKING_MODEL_TEMPERATURE : void 0,
|
|
167
|
+
temperatureLocked: isThinkingModel,
|
|
168
|
+
// Thinking models need higher token limits to avoid truncated reasoning
|
|
169
|
+
defaultMaxOutputTokens: isThinkingModel ? THINKING_MODEL_DEFAULT_MAX_TOKENS : STANDARD_MODEL_DEFAULT_MAX_TOKENS
|
|
162
170
|
};
|
|
163
171
|
}
|
|
164
172
|
|
|
@@ -395,13 +403,15 @@ function prepareKimiTools({
|
|
|
395
403
|
});
|
|
396
404
|
continue;
|
|
397
405
|
}
|
|
406
|
+
const sanitizedSchema = sanitizeToolSchema(tool.inputSchema);
|
|
398
407
|
kimiTools2.push({
|
|
399
408
|
type: "function",
|
|
400
409
|
function: {
|
|
401
410
|
name: tool.name,
|
|
402
411
|
description: tool.description,
|
|
403
|
-
parameters:
|
|
404
|
-
|
|
412
|
+
parameters: sanitizedSchema
|
|
413
|
+
// Don't pass strict mode to Kimi - it may cause issues
|
|
414
|
+
// ...(tool.strict != null ? { strict: tool.strict } : {})
|
|
405
415
|
}
|
|
406
416
|
});
|
|
407
417
|
}
|
|
@@ -475,6 +485,61 @@ function generateRequiredToolMessage(toolNames) {
|
|
|
475
485
|
function generateSpecificToolMessage(toolName) {
|
|
476
486
|
return `IMPORTANT INSTRUCTION: You MUST use the "${toolName}" tool to respond to this request. Do NOT use any other tool or provide a direct text response. Call the "${toolName}" tool with appropriate parameters.`;
|
|
477
487
|
}
|
|
488
|
+
var UNSUPPORTED_SCHEMA_KEYWORDS = [
|
|
489
|
+
"$schema",
|
|
490
|
+
"$id",
|
|
491
|
+
"$ref",
|
|
492
|
+
"$defs",
|
|
493
|
+
"definitions",
|
|
494
|
+
"if",
|
|
495
|
+
"then",
|
|
496
|
+
"else",
|
|
497
|
+
"allOf",
|
|
498
|
+
"anyOf",
|
|
499
|
+
"oneOf",
|
|
500
|
+
"not",
|
|
501
|
+
"patternProperties",
|
|
502
|
+
"additionalItems",
|
|
503
|
+
"contains",
|
|
504
|
+
"propertyNames",
|
|
505
|
+
"const",
|
|
506
|
+
"contentMediaType",
|
|
507
|
+
"contentEncoding",
|
|
508
|
+
"examples",
|
|
509
|
+
"$comment"
|
|
510
|
+
];
|
|
511
|
+
function sanitizeToolSchema(schema) {
|
|
512
|
+
if (schema === null || schema === void 0) {
|
|
513
|
+
return schema;
|
|
514
|
+
}
|
|
515
|
+
if (Array.isArray(schema)) {
|
|
516
|
+
return schema.map(sanitizeToolSchema);
|
|
517
|
+
}
|
|
518
|
+
if (typeof schema !== "object") {
|
|
519
|
+
return schema;
|
|
520
|
+
}
|
|
521
|
+
const sanitized = {};
|
|
522
|
+
const schemaObj = schema;
|
|
523
|
+
for (const [key, value] of Object.entries(schemaObj)) {
|
|
524
|
+
if (UNSUPPORTED_SCHEMA_KEYWORDS.includes(key)) {
|
|
525
|
+
continue;
|
|
526
|
+
}
|
|
527
|
+
if (key === "properties" && typeof value === "object" && value !== null) {
|
|
528
|
+
const props = {};
|
|
529
|
+
for (const [propKey, propValue] of Object.entries(value)) {
|
|
530
|
+
props[propKey] = sanitizeToolSchema(propValue);
|
|
531
|
+
}
|
|
532
|
+
sanitized[key] = props;
|
|
533
|
+
} else if (key === "items" && typeof value === "object") {
|
|
534
|
+
sanitized[key] = sanitizeToolSchema(value);
|
|
535
|
+
} else if (key === "additionalProperties" && typeof value === "object") {
|
|
536
|
+
sanitized[key] = sanitizeToolSchema(value);
|
|
537
|
+
} else {
|
|
538
|
+
sanitized[key] = value;
|
|
539
|
+
}
|
|
540
|
+
}
|
|
541
|
+
return sanitized;
|
|
542
|
+
}
|
|
478
543
|
function tryConvertToKimiBuiltinTool(tool) {
|
|
479
544
|
if (!tool.id.startsWith("kimi.")) {
|
|
480
545
|
return void 0;
|
|
@@ -876,11 +941,24 @@ var KimiChatLanguageModel = class {
|
|
|
876
941
|
if (toolChoiceSystemMessage) {
|
|
877
942
|
messages.unshift({ role: "system", content: toolChoiceSystemMessage });
|
|
878
943
|
}
|
|
944
|
+
const caps = this.capabilities;
|
|
945
|
+
let resolvedTemperature = temperature;
|
|
946
|
+
if (caps.temperatureLocked && caps.defaultTemperature !== void 0) {
|
|
947
|
+
if (temperature !== void 0 && temperature !== caps.defaultTemperature) {
|
|
948
|
+
warnings.push({
|
|
949
|
+
type: "compatibility",
|
|
950
|
+
feature: "temperature",
|
|
951
|
+
details: `Thinking models require temperature=${caps.defaultTemperature}. Your value (${temperature}) will be overridden.`
|
|
952
|
+
});
|
|
953
|
+
}
|
|
954
|
+
resolvedTemperature = caps.defaultTemperature;
|
|
955
|
+
}
|
|
956
|
+
const resolvedMaxTokens = maxOutputTokens ?? caps.defaultMaxOutputTokens;
|
|
879
957
|
const body = (0, import_provider_utils3.removeUndefinedEntries)({
|
|
880
958
|
model: this.modelId,
|
|
881
959
|
messages,
|
|
882
|
-
max_tokens:
|
|
883
|
-
temperature,
|
|
960
|
+
max_tokens: resolvedMaxTokens,
|
|
961
|
+
temperature: resolvedTemperature,
|
|
884
962
|
top_p: topP,
|
|
885
963
|
frequency_penalty: frequencyPenalty,
|
|
886
964
|
presence_penalty: presencePenalty,
|
|
@@ -1365,6 +1443,115 @@ var kimiChatChunkBaseSchema = import_v43.z.looseObject({
|
|
|
1365
1443
|
});
|
|
1366
1444
|
var kimiChatChunkSchema = import_v43.z.union([kimiChatChunkBaseSchema, kimiErrorSchema]);
|
|
1367
1445
|
|
|
1446
|
+
// src/files/file-cache.ts
|
|
1447
|
+
var FileCache = class {
|
|
1448
|
+
constructor(options = {}) {
|
|
1449
|
+
this.maxSize = options.maxSize ?? 100;
|
|
1450
|
+
this.ttlMs = options.ttlMs ?? 36e5;
|
|
1451
|
+
this.cache = /* @__PURE__ */ new Map();
|
|
1452
|
+
}
|
|
1453
|
+
/**
|
|
1454
|
+
* Get a cached entry by content hash.
|
|
1455
|
+
* Returns undefined if not found or expired.
|
|
1456
|
+
* Moves the entry to the end (most recently used).
|
|
1457
|
+
*/
|
|
1458
|
+
get(contentHash) {
|
|
1459
|
+
const entry = this.cache.get(contentHash);
|
|
1460
|
+
if (!entry) {
|
|
1461
|
+
return void 0;
|
|
1462
|
+
}
|
|
1463
|
+
if (this.isExpired(entry)) {
|
|
1464
|
+
this.cache.delete(contentHash);
|
|
1465
|
+
return void 0;
|
|
1466
|
+
}
|
|
1467
|
+
this.cache.delete(contentHash);
|
|
1468
|
+
this.cache.set(contentHash, entry);
|
|
1469
|
+
return entry;
|
|
1470
|
+
}
|
|
1471
|
+
/**
|
|
1472
|
+
* Set a cache entry.
|
|
1473
|
+
* Evicts the least recently used entry if cache is full.
|
|
1474
|
+
*/
|
|
1475
|
+
set(contentHash, entry) {
|
|
1476
|
+
this.cache.delete(contentHash);
|
|
1477
|
+
while (this.cache.size >= this.maxSize) {
|
|
1478
|
+
const oldestKey = this.cache.keys().next().value;
|
|
1479
|
+
if (oldestKey !== void 0) {
|
|
1480
|
+
this.cache.delete(oldestKey);
|
|
1481
|
+
} else {
|
|
1482
|
+
break;
|
|
1483
|
+
}
|
|
1484
|
+
}
|
|
1485
|
+
this.cache.set(contentHash, entry);
|
|
1486
|
+
}
|
|
1487
|
+
/**
|
|
1488
|
+
* Check if an entry exists and is not expired.
|
|
1489
|
+
*/
|
|
1490
|
+
has(contentHash) {
|
|
1491
|
+
return this.get(contentHash) !== void 0;
|
|
1492
|
+
}
|
|
1493
|
+
/**
|
|
1494
|
+
* Delete a specific entry.
|
|
1495
|
+
*/
|
|
1496
|
+
delete(contentHash) {
|
|
1497
|
+
return this.cache.delete(contentHash);
|
|
1498
|
+
}
|
|
1499
|
+
/**
|
|
1500
|
+
* Clear all entries.
|
|
1501
|
+
*/
|
|
1502
|
+
clear() {
|
|
1503
|
+
this.cache.clear();
|
|
1504
|
+
}
|
|
1505
|
+
/**
|
|
1506
|
+
* Get the current cache size.
|
|
1507
|
+
*/
|
|
1508
|
+
get size() {
|
|
1509
|
+
return this.cache.size;
|
|
1510
|
+
}
|
|
1511
|
+
/**
|
|
1512
|
+
* Remove all expired entries.
|
|
1513
|
+
*/
|
|
1514
|
+
prune() {
|
|
1515
|
+
let pruned = 0;
|
|
1516
|
+
for (const [key, entry] of this.cache) {
|
|
1517
|
+
if (this.isExpired(entry)) {
|
|
1518
|
+
this.cache.delete(key);
|
|
1519
|
+
pruned++;
|
|
1520
|
+
}
|
|
1521
|
+
}
|
|
1522
|
+
return pruned;
|
|
1523
|
+
}
|
|
1524
|
+
/**
|
|
1525
|
+
* Check if an entry is expired.
|
|
1526
|
+
*/
|
|
1527
|
+
isExpired(entry) {
|
|
1528
|
+
return Date.now() - entry.createdAt > this.ttlMs;
|
|
1529
|
+
}
|
|
1530
|
+
};
|
|
1531
|
+
function generateContentHash(data) {
|
|
1532
|
+
const bytes = typeof data === "string" ? new TextEncoder().encode(data) : data;
|
|
1533
|
+
let hash = 2166136261;
|
|
1534
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
1535
|
+
hash ^= bytes[i];
|
|
1536
|
+
hash = Math.imul(hash, 16777619);
|
|
1537
|
+
}
|
|
1538
|
+
hash ^= bytes.length;
|
|
1539
|
+
return (hash >>> 0).toString(16).padStart(8, "0");
|
|
1540
|
+
}
|
|
1541
|
+
function generateCacheKey(data, filename) {
|
|
1542
|
+
const bytes = typeof data === "string" ? new TextEncoder().encode(data) : data;
|
|
1543
|
+
const contentHash = generateContentHash(data);
|
|
1544
|
+
const normalizedFilename = filename.toLowerCase().replace(/[^a-z0-9.]/g, "_");
|
|
1545
|
+
return `${contentHash}_${bytes.length}_${normalizedFilename}`;
|
|
1546
|
+
}
|
|
1547
|
+
var defaultCache = null;
|
|
1548
|
+
function getDefaultFileCache() {
|
|
1549
|
+
if (!defaultCache) {
|
|
1550
|
+
defaultCache = new FileCache();
|
|
1551
|
+
}
|
|
1552
|
+
return defaultCache;
|
|
1553
|
+
}
|
|
1554
|
+
|
|
1368
1555
|
// src/files/file-utils.ts
|
|
1369
1556
|
var SUPPORTED_FILE_EXTENSIONS = [
|
|
1370
1557
|
// Documents
|
|
@@ -1720,8 +1907,10 @@ async function processAttachments(options) {
|
|
|
1720
1907
|
clientConfig,
|
|
1721
1908
|
autoUploadDocuments = true,
|
|
1722
1909
|
uploadImages = false,
|
|
1723
|
-
cleanupAfterExtract = false
|
|
1910
|
+
cleanupAfterExtract = false,
|
|
1911
|
+
cache = false
|
|
1724
1912
|
} = options;
|
|
1913
|
+
const cacheInstance = cache === true ? getDefaultFileCache() : cache === false ? null : cache;
|
|
1725
1914
|
const results = [];
|
|
1726
1915
|
const client = new KimiFileClient(clientConfig);
|
|
1727
1916
|
for (const attachment of attachments) {
|
|
@@ -1729,7 +1918,8 @@ async function processAttachments(options) {
|
|
|
1729
1918
|
const processed = await processAttachment(attachment, client, {
|
|
1730
1919
|
autoUploadDocuments,
|
|
1731
1920
|
uploadImages,
|
|
1732
|
-
cleanupAfterExtract
|
|
1921
|
+
cleanupAfterExtract,
|
|
1922
|
+
cache: cacheInstance
|
|
1733
1923
|
});
|
|
1734
1924
|
results.push(processed);
|
|
1735
1925
|
} catch (error) {
|
|
@@ -1789,12 +1979,35 @@ async function processAttachment(attachment, client, options) {
|
|
|
1789
1979
|
error: "No content or URL provided for document attachment"
|
|
1790
1980
|
};
|
|
1791
1981
|
}
|
|
1982
|
+
const filename = attachment.name ?? guessFilename(attachment, contentType);
|
|
1983
|
+
if (options.cache) {
|
|
1984
|
+
const cacheKey = generateCacheKey(data, filename);
|
|
1985
|
+
const cached = options.cache.get(cacheKey);
|
|
1986
|
+
if (cached) {
|
|
1987
|
+
return {
|
|
1988
|
+
original: attachment,
|
|
1989
|
+
type: "text-inject",
|
|
1990
|
+
textContent: cached.content,
|
|
1991
|
+
fileId: cached.fileId
|
|
1992
|
+
};
|
|
1993
|
+
}
|
|
1994
|
+
}
|
|
1792
1995
|
const result = await client.uploadAndExtract({
|
|
1793
1996
|
data,
|
|
1794
|
-
filename
|
|
1997
|
+
filename,
|
|
1795
1998
|
mediaType: contentType,
|
|
1796
1999
|
purpose: "file-extract"
|
|
1797
2000
|
});
|
|
2001
|
+
if (options.cache && result.content) {
|
|
2002
|
+
const cacheKey = generateCacheKey(data, filename);
|
|
2003
|
+
const cacheEntry = {
|
|
2004
|
+
fileId: result.file.id,
|
|
2005
|
+
content: result.content,
|
|
2006
|
+
createdAt: Date.now(),
|
|
2007
|
+
purpose: "file-extract"
|
|
2008
|
+
};
|
|
2009
|
+
options.cache.set(cacheKey, cacheEntry);
|
|
2010
|
+
}
|
|
1798
2011
|
if (options.cleanupAfterExtract && result.file.id) {
|
|
1799
2012
|
try {
|
|
1800
2013
|
await client.deleteFile(result.file.id);
|
|
@@ -1851,7 +2064,7 @@ function guessFilename(attachment, contentType) {
|
|
|
1851
2064
|
}
|
|
1852
2065
|
|
|
1853
2066
|
// src/version.ts
|
|
1854
|
-
var VERSION = "0.
|
|
2067
|
+
var VERSION = "0.4.0".length > 0 ? "0.4.0" : "0.0.0";
|
|
1855
2068
|
|
|
1856
2069
|
// src/kimi-provider.ts
|
|
1857
2070
|
var GLOBAL_BASE_URL = "https://api.moonshot.ai/v1";
|