kimi-vercel-ai-sdk-provider 0.3.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +567 -17
- package/dist/index.d.mts +1750 -3
- package/dist/index.d.ts +1750 -3
- package/dist/index.js +2317 -161
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2292 -160
- package/dist/index.mjs.map +1 -1
- package/package.json +1 -1
- package/src/__tests__/auto-detect.test.ts +140 -0
- package/src/__tests__/code-validation.test.ts +267 -0
- package/src/__tests__/ensemble.test.ts +242 -0
- package/src/__tests__/file-cache.test.ts +310 -0
- package/src/__tests__/model-config.test.ts +120 -0
- package/src/__tests__/multi-agent.test.ts +201 -0
- package/src/__tests__/project-tools.test.ts +181 -0
- package/src/__tests__/reasoning-utils.test.ts +164 -0
- package/src/__tests__/tools.test.ts +76 -8
- package/src/chat/kimi-chat-language-model.ts +21 -2
- package/src/chat/kimi-chat-settings.ts +15 -1
- package/src/code-validation/detector.ts +319 -0
- package/src/code-validation/index.ts +31 -0
- package/src/code-validation/types.ts +291 -0
- package/src/code-validation/validator.ts +547 -0
- package/src/core/errors.ts +91 -0
- package/src/core/index.ts +15 -3
- package/src/core/types.ts +57 -2
- package/src/core/utils.ts +138 -0
- package/src/ensemble/index.ts +17 -0
- package/src/ensemble/multi-sampler.ts +433 -0
- package/src/ensemble/types.ts +279 -0
- package/src/files/attachment-processor.ts +51 -4
- package/src/files/file-cache.ts +260 -0
- package/src/files/index.ts +16 -1
- package/src/index.ts +102 -3
- package/src/kimi-provider.ts +354 -1
- package/src/multi-agent/index.ts +21 -0
- package/src/multi-agent/types.ts +312 -0
- package/src/multi-agent/workflows.ts +539 -0
- package/src/project-tools/index.ts +16 -0
- package/src/project-tools/scaffolder.ts +494 -0
- package/src/project-tools/types.ts +244 -0
- package/src/tools/auto-detect.ts +276 -0
- package/src/tools/index.ts +6 -2
- package/src/tools/prepare-tools.ts +179 -4
|
@@ -0,0 +1,279 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Types for ensemble/multi-sampling functionality.
|
|
3
|
+
* @module
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import type { LanguageModelV3ToolCall, LanguageModelV3ToolResult } from '@ai-sdk/provider';
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Simple usage type compatible with common AI SDK patterns.
|
|
10
|
+
* This is independent from the provider-specific V3Usage type.
|
|
11
|
+
*/
|
|
12
|
+
export interface LanguageModelUsage {
|
|
13
|
+
promptTokens: number;
|
|
14
|
+
completionTokens: number;
|
|
15
|
+
totalTokens: number;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Re-export types for convenience.
|
|
20
|
+
*/
|
|
21
|
+
export type ToolCall = LanguageModelV3ToolCall;
|
|
22
|
+
export type ToolResult = LanguageModelV3ToolResult;
|
|
23
|
+
|
|
24
|
+
// ============================================================================
|
|
25
|
+
// Configuration Types
|
|
26
|
+
// ============================================================================
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Selection strategy for choosing the best response from multiple samples.
|
|
30
|
+
*/
|
|
31
|
+
export type SelectionStrategy = 'first' | 'vote' | 'best' | 'all';
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Scoring heuristic for the 'best' selection strategy.
|
|
35
|
+
*/
|
|
36
|
+
export type ScoringHeuristic = 'length' | 'confidence' | 'code' | 'custom';
|
|
37
|
+
|
|
38
|
+
/**
|
|
39
|
+
* Configuration for ensemble/multi-sampling.
|
|
40
|
+
*/
|
|
41
|
+
export interface EnsembleConfig {
|
|
42
|
+
/**
|
|
43
|
+
* Number of parallel samples to generate.
|
|
44
|
+
* @default 3
|
|
45
|
+
*/
|
|
46
|
+
n: number;
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Strategy for selecting the best result.
|
|
50
|
+
* - 'first': Return the first successful response
|
|
51
|
+
* - 'vote': Return the most common answer (majority voting)
|
|
52
|
+
* - 'best': Use heuristic scoring to pick the best
|
|
53
|
+
* - 'all': Return all responses (for manual selection)
|
|
54
|
+
* @default 'best'
|
|
55
|
+
*/
|
|
56
|
+
selectionStrategy?: SelectionStrategy;
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Temperature variation for diversity.
|
|
60
|
+
* Each sample gets temperature = baseTemp + (i * variance)
|
|
61
|
+
* @default 0.1
|
|
62
|
+
*/
|
|
63
|
+
temperatureVariance?: number;
|
|
64
|
+
|
|
65
|
+
/**
|
|
66
|
+
* For 'best' strategy, the scoring heuristic.
|
|
67
|
+
* - 'length': Prefer shorter responses
|
|
68
|
+
* - 'confidence': Prefer responses with higher token count
|
|
69
|
+
* - 'code': Prefer responses with fewer error patterns
|
|
70
|
+
* - 'custom': Use custom scorer function
|
|
71
|
+
* @default 'confidence'
|
|
72
|
+
*/
|
|
73
|
+
scoringHeuristic?: ScoringHeuristic;
|
|
74
|
+
|
|
75
|
+
/**
|
|
76
|
+
* Custom scoring function for 'custom' heuristic.
|
|
77
|
+
* Higher scores are better.
|
|
78
|
+
*/
|
|
79
|
+
customScorer?: (response: EnsembleResponse) => number;
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Maximum time to wait for all samples (ms).
|
|
83
|
+
* @default 60000
|
|
84
|
+
*/
|
|
85
|
+
timeoutMs?: number;
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Whether to continue if some samples fail.
|
|
89
|
+
* @default true
|
|
90
|
+
*/
|
|
91
|
+
allowPartialFailure?: boolean;
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Minimum number of successful samples required.
|
|
95
|
+
* Only relevant when allowPartialFailure is true.
|
|
96
|
+
* @default 1
|
|
97
|
+
*/
|
|
98
|
+
minSuccessfulSamples?: number;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// ============================================================================
|
|
102
|
+
// Response Types
|
|
103
|
+
// ============================================================================
|
|
104
|
+
|
|
105
|
+
/**
|
|
106
|
+
* A single response from the ensemble.
|
|
107
|
+
*/
|
|
108
|
+
export interface EnsembleResponse {
|
|
109
|
+
/**
|
|
110
|
+
* The generated text.
|
|
111
|
+
*/
|
|
112
|
+
text: string;
|
|
113
|
+
|
|
114
|
+
/**
|
|
115
|
+
* Reasoning content (for thinking models).
|
|
116
|
+
*/
|
|
117
|
+
reasoning?: string;
|
|
118
|
+
|
|
119
|
+
/**
|
|
120
|
+
* Tool calls made during generation.
|
|
121
|
+
*/
|
|
122
|
+
toolCalls?: ToolCall[];
|
|
123
|
+
|
|
124
|
+
/**
|
|
125
|
+
* Results of tool executions.
|
|
126
|
+
*/
|
|
127
|
+
toolResults?: ToolResult[];
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Token usage for this response.
|
|
131
|
+
*/
|
|
132
|
+
usage?: LanguageModelUsage;
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Score assigned to this response (if scoring was applied).
|
|
136
|
+
*/
|
|
137
|
+
score?: number;
|
|
138
|
+
|
|
139
|
+
/**
|
|
140
|
+
* Index of this sample in the ensemble.
|
|
141
|
+
*/
|
|
142
|
+
sampleIndex: number;
|
|
143
|
+
|
|
144
|
+
/**
|
|
145
|
+
* Temperature used for this sample.
|
|
146
|
+
*/
|
|
147
|
+
temperature: number;
|
|
148
|
+
|
|
149
|
+
/**
|
|
150
|
+
* Reason the generation finished.
|
|
151
|
+
*/
|
|
152
|
+
finishReason: string;
|
|
153
|
+
|
|
154
|
+
/**
|
|
155
|
+
* Whether this sample completed successfully.
|
|
156
|
+
*/
|
|
157
|
+
success: boolean;
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* Error message if the sample failed.
|
|
161
|
+
*/
|
|
162
|
+
error?: string;
|
|
163
|
+
|
|
164
|
+
/**
|
|
165
|
+
* Time taken to generate this response (ms).
|
|
166
|
+
*/
|
|
167
|
+
durationMs?: number;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
/**
|
|
171
|
+
* Metadata about the ensemble execution.
|
|
172
|
+
*/
|
|
173
|
+
export interface EnsembleMetadata {
|
|
174
|
+
/**
|
|
175
|
+
* Number of samples requested.
|
|
176
|
+
*/
|
|
177
|
+
nRequested: number;
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Number of samples that completed successfully.
|
|
181
|
+
*/
|
|
182
|
+
nCompleted: number;
|
|
183
|
+
|
|
184
|
+
/**
|
|
185
|
+
* Number of samples that failed.
|
|
186
|
+
*/
|
|
187
|
+
nFailed: number;
|
|
188
|
+
|
|
189
|
+
/**
|
|
190
|
+
* Selection strategy used.
|
|
191
|
+
*/
|
|
192
|
+
selectionStrategy: SelectionStrategy;
|
|
193
|
+
|
|
194
|
+
/**
|
|
195
|
+
* Index of the winning sample.
|
|
196
|
+
*/
|
|
197
|
+
winningIndex: number;
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* Scores of all samples (if scoring was applied).
|
|
201
|
+
*/
|
|
202
|
+
scores?: number[];
|
|
203
|
+
|
|
204
|
+
/**
|
|
205
|
+
* Total time for ensemble execution (ms).
|
|
206
|
+
*/
|
|
207
|
+
durationMs: number;
|
|
208
|
+
|
|
209
|
+
/**
|
|
210
|
+
* Model ID used.
|
|
211
|
+
*/
|
|
212
|
+
modelId: string;
|
|
213
|
+
|
|
214
|
+
/**
|
|
215
|
+
* Aggregated token usage across all samples.
|
|
216
|
+
*/
|
|
217
|
+
totalUsage: LanguageModelUsage;
|
|
218
|
+
}
|
|
219
|
+
|
|
220
|
+
/**
|
|
221
|
+
* Result of an ensemble generation.
|
|
222
|
+
*/
|
|
223
|
+
export interface EnsembleResult {
|
|
224
|
+
/**
|
|
225
|
+
* The selected best response text.
|
|
226
|
+
*/
|
|
227
|
+
text: string;
|
|
228
|
+
|
|
229
|
+
/**
|
|
230
|
+
* Reasoning content from the best response.
|
|
231
|
+
*/
|
|
232
|
+
reasoning?: string;
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Tool calls from the best response.
|
|
236
|
+
*/
|
|
237
|
+
toolCalls?: ToolCall[];
|
|
238
|
+
|
|
239
|
+
/**
|
|
240
|
+
* Tool results from the best response.
|
|
241
|
+
*/
|
|
242
|
+
toolResults?: ToolResult[];
|
|
243
|
+
|
|
244
|
+
/**
|
|
245
|
+
* Token usage from the best response.
|
|
246
|
+
*/
|
|
247
|
+
usage: LanguageModelUsage;
|
|
248
|
+
|
|
249
|
+
/**
|
|
250
|
+
* All generated responses (populated for 'all' strategy).
|
|
251
|
+
*/
|
|
252
|
+
alternatives?: EnsembleResponse[];
|
|
253
|
+
|
|
254
|
+
/**
|
|
255
|
+
* Metadata about the ensemble execution.
|
|
256
|
+
*/
|
|
257
|
+
metadata: EnsembleMetadata;
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
// ============================================================================
|
|
261
|
+
// Utility Types
|
|
262
|
+
// ============================================================================
|
|
263
|
+
|
|
264
|
+
/**
|
|
265
|
+
* Internal state for tracking ensemble progress.
|
|
266
|
+
*/
|
|
267
|
+
export interface EnsembleState {
|
|
268
|
+
responses: EnsembleResponse[];
|
|
269
|
+
startTime: number;
|
|
270
|
+
completed: boolean;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
/**
|
|
274
|
+
* Options for scoring a response.
|
|
275
|
+
*/
|
|
276
|
+
export interface ScoringOptions {
|
|
277
|
+
heuristic: ScoringHeuristic;
|
|
278
|
+
customScorer?: (response: EnsembleResponse) => number;
|
|
279
|
+
}
|
|
@@ -4,6 +4,7 @@
|
|
|
4
4
|
* @module
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
+
import { type FileCache, type FileCacheEntry, generateCacheKey, getDefaultFileCache } from './file-cache';
|
|
7
8
|
import {
|
|
8
9
|
getExtensionFromPath,
|
|
9
10
|
getMediaTypeFromExtension,
|
|
@@ -64,6 +65,13 @@ export interface ProcessAttachmentsOptions {
|
|
|
64
65
|
uploadImages?: boolean;
|
|
65
66
|
/** Whether to delete files after extraction (cleanup) */
|
|
66
67
|
cleanupAfterExtract?: boolean;
|
|
68
|
+
/**
|
|
69
|
+
* Enable caching of uploaded files.
|
|
70
|
+
* When true, uses the default global cache.
|
|
71
|
+
* When a FileCache instance, uses that cache.
|
|
72
|
+
* @default false
|
|
73
|
+
*/
|
|
74
|
+
cache?: boolean | FileCache;
|
|
67
75
|
}
|
|
68
76
|
|
|
69
77
|
// ============================================================================
|
|
@@ -101,9 +109,13 @@ export async function processAttachments(options: ProcessAttachmentsOptions): Pr
|
|
|
101
109
|
clientConfig,
|
|
102
110
|
autoUploadDocuments = true,
|
|
103
111
|
uploadImages = false,
|
|
104
|
-
cleanupAfterExtract = false
|
|
112
|
+
cleanupAfterExtract = false,
|
|
113
|
+
cache = false
|
|
105
114
|
} = options;
|
|
106
115
|
|
|
116
|
+
// Resolve cache instance
|
|
117
|
+
const cacheInstance = cache === true ? getDefaultFileCache() : cache === false ? null : cache;
|
|
118
|
+
|
|
107
119
|
const results: ProcessedAttachment[] = [];
|
|
108
120
|
const client = new KimiFileClient(clientConfig);
|
|
109
121
|
|
|
@@ -112,7 +124,8 @@ export async function processAttachments(options: ProcessAttachmentsOptions): Pr
|
|
|
112
124
|
const processed = await processAttachment(attachment, client, {
|
|
113
125
|
autoUploadDocuments,
|
|
114
126
|
uploadImages,
|
|
115
|
-
cleanupAfterExtract
|
|
127
|
+
cleanupAfterExtract,
|
|
128
|
+
cache: cacheInstance
|
|
116
129
|
});
|
|
117
130
|
results.push(processed);
|
|
118
131
|
} catch (error) {
|
|
@@ -134,7 +147,12 @@ export async function processAttachments(options: ProcessAttachmentsOptions): Pr
|
|
|
134
147
|
async function processAttachment(
|
|
135
148
|
attachment: Attachment,
|
|
136
149
|
client: KimiFileClient,
|
|
137
|
-
options: {
|
|
150
|
+
options: {
|
|
151
|
+
autoUploadDocuments: boolean;
|
|
152
|
+
uploadImages: boolean;
|
|
153
|
+
cleanupAfterExtract: boolean;
|
|
154
|
+
cache: FileCache | null;
|
|
155
|
+
}
|
|
138
156
|
): Promise<ProcessedAttachment> {
|
|
139
157
|
// Determine content type
|
|
140
158
|
const contentType = resolveContentType(attachment);
|
|
@@ -196,14 +214,43 @@ async function processAttachment(
|
|
|
196
214
|
};
|
|
197
215
|
}
|
|
198
216
|
|
|
217
|
+
const filename = attachment.name ?? guessFilename(attachment, contentType);
|
|
218
|
+
|
|
219
|
+
// Check cache if enabled
|
|
220
|
+
if (options.cache) {
|
|
221
|
+
const cacheKey = generateCacheKey(data, filename);
|
|
222
|
+
const cached = options.cache.get(cacheKey);
|
|
223
|
+
|
|
224
|
+
if (cached) {
|
|
225
|
+
return {
|
|
226
|
+
original: attachment,
|
|
227
|
+
type: 'text-inject',
|
|
228
|
+
textContent: cached.content,
|
|
229
|
+
fileId: cached.fileId
|
|
230
|
+
};
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
|
|
199
234
|
// Upload and extract content
|
|
200
235
|
const result = await client.uploadAndExtract({
|
|
201
236
|
data,
|
|
202
|
-
filename
|
|
237
|
+
filename,
|
|
203
238
|
mediaType: contentType,
|
|
204
239
|
purpose: 'file-extract'
|
|
205
240
|
});
|
|
206
241
|
|
|
242
|
+
// Store in cache if enabled (before cleanup)
|
|
243
|
+
if (options.cache && result.content) {
|
|
244
|
+
const cacheKey = generateCacheKey(data, filename);
|
|
245
|
+
const cacheEntry: FileCacheEntry = {
|
|
246
|
+
fileId: result.file.id,
|
|
247
|
+
content: result.content,
|
|
248
|
+
createdAt: Date.now(),
|
|
249
|
+
purpose: 'file-extract'
|
|
250
|
+
};
|
|
251
|
+
options.cache.set(cacheKey, cacheEntry);
|
|
252
|
+
}
|
|
253
|
+
|
|
207
254
|
// Cleanup if requested
|
|
208
255
|
if (options.cleanupAfterExtract && result.file.id) {
|
|
209
256
|
try {
|
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* File content caching for efficient re-use of uploaded files.
|
|
3
|
+
* @module
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
// ============================================================================
|
|
7
|
+
// Types
|
|
8
|
+
// ============================================================================
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Entry in the file cache.
|
|
12
|
+
*/
|
|
13
|
+
export interface FileCacheEntry {
|
|
14
|
+
/** The Kimi file ID */
|
|
15
|
+
fileId: string;
|
|
16
|
+
/** Extracted text content (for documents) */
|
|
17
|
+
content?: string;
|
|
18
|
+
/** Unix timestamp of creation */
|
|
19
|
+
createdAt: number;
|
|
20
|
+
/** File purpose */
|
|
21
|
+
purpose: 'file-extract' | 'image' | 'video';
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Options for configuring the file cache.
|
|
26
|
+
*/
|
|
27
|
+
export interface FileCacheOptions {
|
|
28
|
+
/**
|
|
29
|
+
* Maximum number of entries in the cache.
|
|
30
|
+
* When exceeded, least recently used entries are evicted.
|
|
31
|
+
* @default 100
|
|
32
|
+
*/
|
|
33
|
+
maxSize?: number;
|
|
34
|
+
|
|
35
|
+
/**
|
|
36
|
+
* Time-to-live for cache entries in milliseconds.
|
|
37
|
+
* Entries older than this are considered stale.
|
|
38
|
+
* @default 3600000 (1 hour)
|
|
39
|
+
*/
|
|
40
|
+
ttlMs?: number;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// ============================================================================
|
|
44
|
+
// LRU Cache Implementation
|
|
45
|
+
// ============================================================================
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* A simple LRU (Least Recently Used) cache for file content.
|
|
49
|
+
*
|
|
50
|
+
* This cache helps avoid re-uploading the same files multiple times
|
|
51
|
+
* by storing the mapping between content hashes and Kimi file IDs.
|
|
52
|
+
*
|
|
53
|
+
* @example
|
|
54
|
+
* ```ts
|
|
55
|
+
* const cache = new FileCache({ maxSize: 50, ttlMs: 30 * 60 * 1000 });
|
|
56
|
+
*
|
|
57
|
+
* // Check if we have this file cached
|
|
58
|
+
* const cached = cache.get(contentHash);
|
|
59
|
+
* if (cached) {
|
|
60
|
+
* console.log('Using cached file:', cached.fileId);
|
|
61
|
+
* }
|
|
62
|
+
*
|
|
63
|
+
* // Store a new file
|
|
64
|
+
* cache.set(contentHash, {
|
|
65
|
+
* fileId: 'file_abc123',
|
|
66
|
+
* content: 'extracted text...',
|
|
67
|
+
* purpose: 'file-extract',
|
|
68
|
+
* createdAt: Date.now()
|
|
69
|
+
* });
|
|
70
|
+
* ```
|
|
71
|
+
*/
|
|
72
|
+
export class FileCache {
|
|
73
|
+
private readonly maxSize: number;
|
|
74
|
+
private readonly ttlMs: number;
|
|
75
|
+
private readonly cache: Map<string, FileCacheEntry>;
|
|
76
|
+
|
|
77
|
+
constructor(options: FileCacheOptions = {}) {
|
|
78
|
+
this.maxSize = options.maxSize ?? 100;
|
|
79
|
+
this.ttlMs = options.ttlMs ?? 3600000; // 1 hour
|
|
80
|
+
this.cache = new Map();
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
/**
|
|
84
|
+
* Get a cached entry by content hash.
|
|
85
|
+
* Returns undefined if not found or expired.
|
|
86
|
+
* Moves the entry to the end (most recently used).
|
|
87
|
+
*/
|
|
88
|
+
get(contentHash: string): FileCacheEntry | undefined {
|
|
89
|
+
const entry = this.cache.get(contentHash);
|
|
90
|
+
|
|
91
|
+
if (!entry) {
|
|
92
|
+
return undefined;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Check if entry has expired
|
|
96
|
+
if (this.isExpired(entry)) {
|
|
97
|
+
this.cache.delete(contentHash);
|
|
98
|
+
return undefined;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
// Move to end (most recently used)
|
|
102
|
+
this.cache.delete(contentHash);
|
|
103
|
+
this.cache.set(contentHash, entry);
|
|
104
|
+
|
|
105
|
+
return entry;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Set a cache entry.
|
|
110
|
+
* Evicts the least recently used entry if cache is full.
|
|
111
|
+
*/
|
|
112
|
+
set(contentHash: string, entry: FileCacheEntry): void {
|
|
113
|
+
// Delete existing entry to update position
|
|
114
|
+
this.cache.delete(contentHash);
|
|
115
|
+
|
|
116
|
+
// Evict oldest entries if at capacity
|
|
117
|
+
while (this.cache.size >= this.maxSize) {
|
|
118
|
+
const oldestKey = this.cache.keys().next().value;
|
|
119
|
+
if (oldestKey !== undefined) {
|
|
120
|
+
this.cache.delete(oldestKey);
|
|
121
|
+
} else {
|
|
122
|
+
break;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
this.cache.set(contentHash, entry);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Check if an entry exists and is not expired.
|
|
131
|
+
*/
|
|
132
|
+
has(contentHash: string): boolean {
|
|
133
|
+
return this.get(contentHash) !== undefined;
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Delete a specific entry.
|
|
138
|
+
*/
|
|
139
|
+
delete(contentHash: string): boolean {
|
|
140
|
+
return this.cache.delete(contentHash);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Clear all entries.
|
|
145
|
+
*/
|
|
146
|
+
clear(): void {
|
|
147
|
+
this.cache.clear();
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
/**
|
|
151
|
+
* Get the current cache size.
|
|
152
|
+
*/
|
|
153
|
+
get size(): number {
|
|
154
|
+
return this.cache.size;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Remove all expired entries.
|
|
159
|
+
*/
|
|
160
|
+
prune(): number {
|
|
161
|
+
let pruned = 0;
|
|
162
|
+
for (const [key, entry] of this.cache) {
|
|
163
|
+
if (this.isExpired(entry)) {
|
|
164
|
+
this.cache.delete(key);
|
|
165
|
+
pruned++;
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
return pruned;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
/**
|
|
172
|
+
* Check if an entry is expired.
|
|
173
|
+
*/
|
|
174
|
+
private isExpired(entry: FileCacheEntry): boolean {
|
|
175
|
+
return Date.now() - entry.createdAt > this.ttlMs;
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
// ============================================================================
|
|
180
|
+
// Hash Utilities
|
|
181
|
+
// ============================================================================
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Generate a hash from file content for cache lookups.
|
|
185
|
+
* Uses a simple but fast hash algorithm suitable for deduplication.
|
|
186
|
+
*
|
|
187
|
+
* @param data - The file content as Uint8Array or string
|
|
188
|
+
* @returns A hex string hash
|
|
189
|
+
*/
|
|
190
|
+
export function generateContentHash(data: Uint8Array | string): string {
|
|
191
|
+
const bytes = typeof data === 'string' ? new TextEncoder().encode(data) : data;
|
|
192
|
+
|
|
193
|
+
// Simple FNV-1a hash (fast and good distribution for deduplication)
|
|
194
|
+
let hash = 2166136261; // FNV offset basis
|
|
195
|
+
|
|
196
|
+
for (let i = 0; i < bytes.length; i++) {
|
|
197
|
+
hash ^= bytes[i];
|
|
198
|
+
hash = Math.imul(hash, 16777619); // FNV prime
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
// Include length to differentiate files with same content hash but different lengths
|
|
202
|
+
hash ^= bytes.length;
|
|
203
|
+
|
|
204
|
+
// Convert to hex string
|
|
205
|
+
return (hash >>> 0).toString(16).padStart(8, '0');
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Generate a more unique cache key that includes filename and size.
|
|
210
|
+
* This helps differentiate files that might have similar beginnings.
|
|
211
|
+
*
|
|
212
|
+
* @param data - The file content
|
|
213
|
+
* @param filename - The filename
|
|
214
|
+
* @returns A cache key string
|
|
215
|
+
*/
|
|
216
|
+
export function generateCacheKey(data: Uint8Array | string, filename: string): string {
|
|
217
|
+
const bytes = typeof data === 'string' ? new TextEncoder().encode(data) : data;
|
|
218
|
+
const contentHash = generateContentHash(data);
|
|
219
|
+
const normalizedFilename = filename.toLowerCase().replace(/[^a-z0-9.]/g, '_');
|
|
220
|
+
|
|
221
|
+
return `${contentHash}_${bytes.length}_${normalizedFilename}`;
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
// ============================================================================
|
|
225
|
+
// Global Cache Instance
|
|
226
|
+
// ============================================================================
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Default global file cache instance.
|
|
230
|
+
* This is used by the attachment processor when caching is enabled.
|
|
231
|
+
*/
|
|
232
|
+
let defaultCache: FileCache | null = null;
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Get the default global file cache.
|
|
236
|
+
* Creates one if it doesn't exist.
|
|
237
|
+
*/
|
|
238
|
+
export function getDefaultFileCache(): FileCache {
|
|
239
|
+
if (!defaultCache) {
|
|
240
|
+
defaultCache = new FileCache();
|
|
241
|
+
}
|
|
242
|
+
return defaultCache;
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
/**
|
|
246
|
+
* Set a custom default file cache.
|
|
247
|
+
* Useful for testing or custom configurations.
|
|
248
|
+
*/
|
|
249
|
+
export function setDefaultFileCache(cache: FileCache | null): void {
|
|
250
|
+
defaultCache = cache;
|
|
251
|
+
}
|
|
252
|
+
|
|
253
|
+
/**
|
|
254
|
+
* Clear the default file cache.
|
|
255
|
+
*/
|
|
256
|
+
export function clearDefaultFileCache(): void {
|
|
257
|
+
if (defaultCache) {
|
|
258
|
+
defaultCache.clear();
|
|
259
|
+
}
|
|
260
|
+
}
|
package/src/files/index.ts
CHANGED
|
@@ -4,7 +4,22 @@
|
|
|
4
4
|
* @module
|
|
5
5
|
*/
|
|
6
6
|
|
|
7
|
-
export {
|
|
7
|
+
export {
|
|
8
|
+
type Attachment,
|
|
9
|
+
type ProcessAttachmentsOptions,
|
|
10
|
+
type ProcessedAttachment,
|
|
11
|
+
processAttachments
|
|
12
|
+
} from './attachment-processor';
|
|
13
|
+
export {
|
|
14
|
+
FileCache,
|
|
15
|
+
type FileCacheEntry,
|
|
16
|
+
type FileCacheOptions,
|
|
17
|
+
clearDefaultFileCache,
|
|
18
|
+
generateCacheKey,
|
|
19
|
+
generateContentHash,
|
|
20
|
+
getDefaultFileCache,
|
|
21
|
+
setDefaultFileCache
|
|
22
|
+
} from './file-cache';
|
|
8
23
|
export {
|
|
9
24
|
SUPPORTED_FILE_EXTENSIONS,
|
|
10
25
|
SUPPORTED_MIME_TYPES,
|