kimi-vercel-ai-sdk-provider 0.3.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +157 -2
- package/dist/index.d.mts +142 -1
- package/dist/index.d.ts +142 -1
- package/dist/index.js +222 -9
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +222 -9
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
- package/src/__tests__/file-cache.test.ts +310 -0
- package/src/__tests__/model-config.test.ts +120 -0
- package/src/__tests__/reasoning-utils.test.ts +164 -0
- package/src/__tests__/tools.test.ts +75 -7
- package/src/chat/kimi-chat-language-model.ts +21 -2
- package/src/core/index.ts +10 -3
- package/src/core/types.ts +57 -2
- package/src/core/utils.ts +138 -0
- package/src/files/attachment-processor.ts +51 -4
- package/src/files/file-cache.ts +260 -0
- package/src/files/index.ts +16 -1
- package/src/tools/prepare-tools.ts +88 -2
package/dist/index.mjs
CHANGED
|
@@ -98,6 +98,9 @@ var KimiContextLengthError = class extends KimiError {
|
|
|
98
98
|
};
|
|
99
99
|
|
|
100
100
|
// src/core/types.ts
|
|
101
|
+
var THINKING_MODEL_TEMPERATURE = 1;
|
|
102
|
+
var THINKING_MODEL_DEFAULT_MAX_TOKENS = 32768;
|
|
103
|
+
var STANDARD_MODEL_DEFAULT_MAX_TOKENS = 4096;
|
|
101
104
|
function inferModelCapabilities(modelId) {
|
|
102
105
|
const isThinkingModel = modelId.includes("-thinking");
|
|
103
106
|
const isK25Model = modelId.includes("k2.5") || modelId.includes("k2-5");
|
|
@@ -112,7 +115,12 @@ function inferModelCapabilities(modelId) {
|
|
|
112
115
|
// 256k context window
|
|
113
116
|
toolCalling: true,
|
|
114
117
|
jsonMode: true,
|
|
115
|
-
structuredOutputs: true
|
|
118
|
+
structuredOutputs: true,
|
|
119
|
+
// Thinking models require temperature=1.0 for optimal reasoning
|
|
120
|
+
defaultTemperature: isThinkingModel ? THINKING_MODEL_TEMPERATURE : void 0,
|
|
121
|
+
temperatureLocked: isThinkingModel,
|
|
122
|
+
// Thinking models need higher token limits to avoid truncated reasoning
|
|
123
|
+
defaultMaxOutputTokens: isThinkingModel ? THINKING_MODEL_DEFAULT_MAX_TOKENS : STANDARD_MODEL_DEFAULT_MAX_TOKENS
|
|
116
124
|
};
|
|
117
125
|
}
|
|
118
126
|
|
|
@@ -351,13 +359,15 @@ function prepareKimiTools({
|
|
|
351
359
|
});
|
|
352
360
|
continue;
|
|
353
361
|
}
|
|
362
|
+
const sanitizedSchema = sanitizeToolSchema(tool.inputSchema);
|
|
354
363
|
kimiTools2.push({
|
|
355
364
|
type: "function",
|
|
356
365
|
function: {
|
|
357
366
|
name: tool.name,
|
|
358
367
|
description: tool.description,
|
|
359
|
-
parameters:
|
|
360
|
-
|
|
368
|
+
parameters: sanitizedSchema
|
|
369
|
+
// Don't pass strict mode to Kimi - it may cause issues
|
|
370
|
+
// ...(tool.strict != null ? { strict: tool.strict } : {})
|
|
361
371
|
}
|
|
362
372
|
});
|
|
363
373
|
}
|
|
@@ -431,6 +441,61 @@ function generateRequiredToolMessage(toolNames) {
|
|
|
431
441
|
function generateSpecificToolMessage(toolName) {
|
|
432
442
|
return `IMPORTANT INSTRUCTION: You MUST use the "${toolName}" tool to respond to this request. Do NOT use any other tool or provide a direct text response. Call the "${toolName}" tool with appropriate parameters.`;
|
|
433
443
|
}
|
|
444
|
+
var UNSUPPORTED_SCHEMA_KEYWORDS = [
|
|
445
|
+
"$schema",
|
|
446
|
+
"$id",
|
|
447
|
+
"$ref",
|
|
448
|
+
"$defs",
|
|
449
|
+
"definitions",
|
|
450
|
+
"if",
|
|
451
|
+
"then",
|
|
452
|
+
"else",
|
|
453
|
+
"allOf",
|
|
454
|
+
"anyOf",
|
|
455
|
+
"oneOf",
|
|
456
|
+
"not",
|
|
457
|
+
"patternProperties",
|
|
458
|
+
"additionalItems",
|
|
459
|
+
"contains",
|
|
460
|
+
"propertyNames",
|
|
461
|
+
"const",
|
|
462
|
+
"contentMediaType",
|
|
463
|
+
"contentEncoding",
|
|
464
|
+
"examples",
|
|
465
|
+
"$comment"
|
|
466
|
+
];
|
|
467
|
+
function sanitizeToolSchema(schema) {
|
|
468
|
+
if (schema === null || schema === void 0) {
|
|
469
|
+
return schema;
|
|
470
|
+
}
|
|
471
|
+
if (Array.isArray(schema)) {
|
|
472
|
+
return schema.map(sanitizeToolSchema);
|
|
473
|
+
}
|
|
474
|
+
if (typeof schema !== "object") {
|
|
475
|
+
return schema;
|
|
476
|
+
}
|
|
477
|
+
const sanitized = {};
|
|
478
|
+
const schemaObj = schema;
|
|
479
|
+
for (const [key, value] of Object.entries(schemaObj)) {
|
|
480
|
+
if (UNSUPPORTED_SCHEMA_KEYWORDS.includes(key)) {
|
|
481
|
+
continue;
|
|
482
|
+
}
|
|
483
|
+
if (key === "properties" && typeof value === "object" && value !== null) {
|
|
484
|
+
const props = {};
|
|
485
|
+
for (const [propKey, propValue] of Object.entries(value)) {
|
|
486
|
+
props[propKey] = sanitizeToolSchema(propValue);
|
|
487
|
+
}
|
|
488
|
+
sanitized[key] = props;
|
|
489
|
+
} else if (key === "items" && typeof value === "object") {
|
|
490
|
+
sanitized[key] = sanitizeToolSchema(value);
|
|
491
|
+
} else if (key === "additionalProperties" && typeof value === "object") {
|
|
492
|
+
sanitized[key] = sanitizeToolSchema(value);
|
|
493
|
+
} else {
|
|
494
|
+
sanitized[key] = value;
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
return sanitized;
|
|
498
|
+
}
|
|
434
499
|
function tryConvertToKimiBuiltinTool(tool) {
|
|
435
500
|
if (!tool.id.startsWith("kimi.")) {
|
|
436
501
|
return void 0;
|
|
@@ -834,11 +899,24 @@ var KimiChatLanguageModel = class {
|
|
|
834
899
|
if (toolChoiceSystemMessage) {
|
|
835
900
|
messages.unshift({ role: "system", content: toolChoiceSystemMessage });
|
|
836
901
|
}
|
|
902
|
+
const caps = this.capabilities;
|
|
903
|
+
let resolvedTemperature = temperature;
|
|
904
|
+
if (caps.temperatureLocked && caps.defaultTemperature !== void 0) {
|
|
905
|
+
if (temperature !== void 0 && temperature !== caps.defaultTemperature) {
|
|
906
|
+
warnings.push({
|
|
907
|
+
type: "compatibility",
|
|
908
|
+
feature: "temperature",
|
|
909
|
+
details: `Thinking models require temperature=${caps.defaultTemperature}. Your value (${temperature}) will be overridden.`
|
|
910
|
+
});
|
|
911
|
+
}
|
|
912
|
+
resolvedTemperature = caps.defaultTemperature;
|
|
913
|
+
}
|
|
914
|
+
const resolvedMaxTokens = maxOutputTokens ?? caps.defaultMaxOutputTokens;
|
|
837
915
|
const body = removeUndefinedEntries({
|
|
838
916
|
model: this.modelId,
|
|
839
917
|
messages,
|
|
840
|
-
max_tokens:
|
|
841
|
-
temperature,
|
|
918
|
+
max_tokens: resolvedMaxTokens,
|
|
919
|
+
temperature: resolvedTemperature,
|
|
842
920
|
top_p: topP,
|
|
843
921
|
frequency_penalty: frequencyPenalty,
|
|
844
922
|
presence_penalty: presencePenalty,
|
|
@@ -1323,6 +1401,115 @@ var kimiChatChunkBaseSchema = z3.looseObject({
|
|
|
1323
1401
|
});
|
|
1324
1402
|
var kimiChatChunkSchema = z3.union([kimiChatChunkBaseSchema, kimiErrorSchema]);
|
|
1325
1403
|
|
|
1404
|
+
// src/files/file-cache.ts
|
|
1405
|
+
var FileCache = class {
|
|
1406
|
+
constructor(options = {}) {
|
|
1407
|
+
this.maxSize = options.maxSize ?? 100;
|
|
1408
|
+
this.ttlMs = options.ttlMs ?? 36e5;
|
|
1409
|
+
this.cache = /* @__PURE__ */ new Map();
|
|
1410
|
+
}
|
|
1411
|
+
/**
|
|
1412
|
+
* Get a cached entry by content hash.
|
|
1413
|
+
* Returns undefined if not found or expired.
|
|
1414
|
+
* Moves the entry to the end (most recently used).
|
|
1415
|
+
*/
|
|
1416
|
+
get(contentHash) {
|
|
1417
|
+
const entry = this.cache.get(contentHash);
|
|
1418
|
+
if (!entry) {
|
|
1419
|
+
return void 0;
|
|
1420
|
+
}
|
|
1421
|
+
if (this.isExpired(entry)) {
|
|
1422
|
+
this.cache.delete(contentHash);
|
|
1423
|
+
return void 0;
|
|
1424
|
+
}
|
|
1425
|
+
this.cache.delete(contentHash);
|
|
1426
|
+
this.cache.set(contentHash, entry);
|
|
1427
|
+
return entry;
|
|
1428
|
+
}
|
|
1429
|
+
/**
|
|
1430
|
+
* Set a cache entry.
|
|
1431
|
+
* Evicts the least recently used entry if cache is full.
|
|
1432
|
+
*/
|
|
1433
|
+
set(contentHash, entry) {
|
|
1434
|
+
this.cache.delete(contentHash);
|
|
1435
|
+
while (this.cache.size >= this.maxSize) {
|
|
1436
|
+
const oldestKey = this.cache.keys().next().value;
|
|
1437
|
+
if (oldestKey !== void 0) {
|
|
1438
|
+
this.cache.delete(oldestKey);
|
|
1439
|
+
} else {
|
|
1440
|
+
break;
|
|
1441
|
+
}
|
|
1442
|
+
}
|
|
1443
|
+
this.cache.set(contentHash, entry);
|
|
1444
|
+
}
|
|
1445
|
+
/**
|
|
1446
|
+
* Check if an entry exists and is not expired.
|
|
1447
|
+
*/
|
|
1448
|
+
has(contentHash) {
|
|
1449
|
+
return this.get(contentHash) !== void 0;
|
|
1450
|
+
}
|
|
1451
|
+
/**
|
|
1452
|
+
* Delete a specific entry.
|
|
1453
|
+
*/
|
|
1454
|
+
delete(contentHash) {
|
|
1455
|
+
return this.cache.delete(contentHash);
|
|
1456
|
+
}
|
|
1457
|
+
/**
|
|
1458
|
+
* Clear all entries.
|
|
1459
|
+
*/
|
|
1460
|
+
clear() {
|
|
1461
|
+
this.cache.clear();
|
|
1462
|
+
}
|
|
1463
|
+
/**
|
|
1464
|
+
* Get the current cache size.
|
|
1465
|
+
*/
|
|
1466
|
+
get size() {
|
|
1467
|
+
return this.cache.size;
|
|
1468
|
+
}
|
|
1469
|
+
/**
|
|
1470
|
+
* Remove all expired entries.
|
|
1471
|
+
*/
|
|
1472
|
+
prune() {
|
|
1473
|
+
let pruned = 0;
|
|
1474
|
+
for (const [key, entry] of this.cache) {
|
|
1475
|
+
if (this.isExpired(entry)) {
|
|
1476
|
+
this.cache.delete(key);
|
|
1477
|
+
pruned++;
|
|
1478
|
+
}
|
|
1479
|
+
}
|
|
1480
|
+
return pruned;
|
|
1481
|
+
}
|
|
1482
|
+
/**
|
|
1483
|
+
* Check if an entry is expired.
|
|
1484
|
+
*/
|
|
1485
|
+
isExpired(entry) {
|
|
1486
|
+
return Date.now() - entry.createdAt > this.ttlMs;
|
|
1487
|
+
}
|
|
1488
|
+
};
|
|
1489
|
+
function generateContentHash(data) {
|
|
1490
|
+
const bytes = typeof data === "string" ? new TextEncoder().encode(data) : data;
|
|
1491
|
+
let hash = 2166136261;
|
|
1492
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
1493
|
+
hash ^= bytes[i];
|
|
1494
|
+
hash = Math.imul(hash, 16777619);
|
|
1495
|
+
}
|
|
1496
|
+
hash ^= bytes.length;
|
|
1497
|
+
return (hash >>> 0).toString(16).padStart(8, "0");
|
|
1498
|
+
}
|
|
1499
|
+
function generateCacheKey(data, filename) {
|
|
1500
|
+
const bytes = typeof data === "string" ? new TextEncoder().encode(data) : data;
|
|
1501
|
+
const contentHash = generateContentHash(data);
|
|
1502
|
+
const normalizedFilename = filename.toLowerCase().replace(/[^a-z0-9.]/g, "_");
|
|
1503
|
+
return `${contentHash}_${bytes.length}_${normalizedFilename}`;
|
|
1504
|
+
}
|
|
1505
|
+
var defaultCache = null;
|
|
1506
|
+
function getDefaultFileCache() {
|
|
1507
|
+
if (!defaultCache) {
|
|
1508
|
+
defaultCache = new FileCache();
|
|
1509
|
+
}
|
|
1510
|
+
return defaultCache;
|
|
1511
|
+
}
|
|
1512
|
+
|
|
1326
1513
|
// src/files/file-utils.ts
|
|
1327
1514
|
var SUPPORTED_FILE_EXTENSIONS = [
|
|
1328
1515
|
// Documents
|
|
@@ -1678,8 +1865,10 @@ async function processAttachments(options) {
|
|
|
1678
1865
|
clientConfig,
|
|
1679
1866
|
autoUploadDocuments = true,
|
|
1680
1867
|
uploadImages = false,
|
|
1681
|
-
cleanupAfterExtract = false
|
|
1868
|
+
cleanupAfterExtract = false,
|
|
1869
|
+
cache = false
|
|
1682
1870
|
} = options;
|
|
1871
|
+
const cacheInstance = cache === true ? getDefaultFileCache() : cache === false ? null : cache;
|
|
1683
1872
|
const results = [];
|
|
1684
1873
|
const client = new KimiFileClient(clientConfig);
|
|
1685
1874
|
for (const attachment of attachments) {
|
|
@@ -1687,7 +1876,8 @@ async function processAttachments(options) {
|
|
|
1687
1876
|
const processed = await processAttachment(attachment, client, {
|
|
1688
1877
|
autoUploadDocuments,
|
|
1689
1878
|
uploadImages,
|
|
1690
|
-
cleanupAfterExtract
|
|
1879
|
+
cleanupAfterExtract,
|
|
1880
|
+
cache: cacheInstance
|
|
1691
1881
|
});
|
|
1692
1882
|
results.push(processed);
|
|
1693
1883
|
} catch (error) {
|
|
@@ -1747,12 +1937,35 @@ async function processAttachment(attachment, client, options) {
|
|
|
1747
1937
|
error: "No content or URL provided for document attachment"
|
|
1748
1938
|
};
|
|
1749
1939
|
}
|
|
1940
|
+
const filename = attachment.name ?? guessFilename(attachment, contentType);
|
|
1941
|
+
if (options.cache) {
|
|
1942
|
+
const cacheKey = generateCacheKey(data, filename);
|
|
1943
|
+
const cached = options.cache.get(cacheKey);
|
|
1944
|
+
if (cached) {
|
|
1945
|
+
return {
|
|
1946
|
+
original: attachment,
|
|
1947
|
+
type: "text-inject",
|
|
1948
|
+
textContent: cached.content,
|
|
1949
|
+
fileId: cached.fileId
|
|
1950
|
+
};
|
|
1951
|
+
}
|
|
1952
|
+
}
|
|
1750
1953
|
const result = await client.uploadAndExtract({
|
|
1751
1954
|
data,
|
|
1752
|
-
filename
|
|
1955
|
+
filename,
|
|
1753
1956
|
mediaType: contentType,
|
|
1754
1957
|
purpose: "file-extract"
|
|
1755
1958
|
});
|
|
1959
|
+
if (options.cache && result.content) {
|
|
1960
|
+
const cacheKey = generateCacheKey(data, filename);
|
|
1961
|
+
const cacheEntry = {
|
|
1962
|
+
fileId: result.file.id,
|
|
1963
|
+
content: result.content,
|
|
1964
|
+
createdAt: Date.now(),
|
|
1965
|
+
purpose: "file-extract"
|
|
1966
|
+
};
|
|
1967
|
+
options.cache.set(cacheKey, cacheEntry);
|
|
1968
|
+
}
|
|
1756
1969
|
if (options.cleanupAfterExtract && result.file.id) {
|
|
1757
1970
|
try {
|
|
1758
1971
|
await client.deleteFile(result.file.id);
|
|
@@ -1809,7 +2022,7 @@ function guessFilename(attachment, contentType) {
|
|
|
1809
2022
|
}
|
|
1810
2023
|
|
|
1811
2024
|
// src/version.ts
|
|
1812
|
-
var VERSION = "0.
|
|
2025
|
+
var VERSION = "0.4.0".length > 0 ? "0.4.0" : "0.0.0";
|
|
1813
2026
|
|
|
1814
2027
|
// src/kimi-provider.ts
|
|
1815
2028
|
var GLOBAL_BASE_URL = "https://api.moonshot.ai/v1";
|