llm-fns 1.0.12 → 1.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -14,12 +14,23 @@ export interface CreateFetcherDependencies {
|
|
|
14
14
|
prefix?: string;
|
|
15
15
|
/** Time-to-live for cache entries, in milliseconds. */
|
|
16
16
|
ttl?: number;
|
|
17
|
-
/** Request timeout in milliseconds. If not provided, no timeout is applied
|
|
17
|
+
/** Request timeout in milliseconds. If not provided, no timeout is applied.**Restoring Corrected File**
|
|
18
|
+
|
|
19
|
+
I'm now generating the corrected version of `src/createCachedFetcher.ts`. The primary fix is removing the extraneous text from the `set` method signature within the `CacheLike` interface. I've ensured the syntax is correct, and I'm confident the test run should now pass. After this is output, I plan to assess its integration within the wider project.
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
*/
|
|
18
23
|
timeout?: number;
|
|
19
24
|
/** User-Agent string for requests. */
|
|
20
25
|
userAgent?: string;
|
|
21
26
|
/** Optional custom fetch implementation. Defaults to global fetch. */
|
|
22
27
|
fetch?: (url: string | URL | Request, init?: RequestInit) => Promise<Response>;
|
|
28
|
+
/**
|
|
29
|
+
* Optional callback to determine if a response should be cached.
|
|
30
|
+
* It receives a cloned response that can be read (e.g. .json()).
|
|
31
|
+
* If it returns false, the response is not cached.
|
|
32
|
+
*/
|
|
33
|
+
shouldCache?: (response: Response) => Promise<boolean> | boolean;
|
|
23
34
|
}
|
|
24
35
|
export declare class CachedResponse extends Response {
|
|
25
36
|
#private;
|
|
@@ -27,7 +27,7 @@ exports.CachedResponse = CachedResponse;
|
|
|
27
27
|
* @returns A function with the same signature as native `fetch`.
|
|
28
28
|
*/
|
|
29
29
|
function createCachedFetcher(deps) {
|
|
30
|
-
const { cache, prefix = 'http-cache', ttl, timeout, userAgent, fetch: customFetch } = deps;
|
|
30
|
+
const { cache, prefix = 'http-cache', ttl, timeout, userAgent, fetch: customFetch, shouldCache } = deps;
|
|
31
31
|
const fetchImpl = customFetch ?? fetch;
|
|
32
32
|
const fetchWithTimeout = async (url, options) => {
|
|
33
33
|
// Correctly merge headers using Headers API to handle various input formats (plain object, Headers instance, array)
|
|
@@ -123,19 +123,49 @@ function createCachedFetcher(deps) {
|
|
|
123
123
|
const response = await fetchWithTimeout(url, options);
|
|
124
124
|
// 3. Store in cache on success
|
|
125
125
|
if (response.ok) {
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
|
|
126
|
+
let isCacheable = true;
|
|
127
|
+
if (shouldCache) {
|
|
128
|
+
const checkClone = response.clone();
|
|
129
|
+
try {
|
|
130
|
+
isCacheable = await shouldCache(checkClone);
|
|
131
|
+
}
|
|
132
|
+
catch (e) {
|
|
133
|
+
console.warn('[Cache Check Error] shouldCache threw an error, skipping cache', e);
|
|
134
|
+
isCacheable = false;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
else {
|
|
138
|
+
// Default behavior: check for .error in JSON responses
|
|
139
|
+
const contentType = response.headers.get('content-type');
|
|
140
|
+
if (contentType && contentType.includes('application/json')) {
|
|
141
|
+
const checkClone = response.clone();
|
|
142
|
+
try {
|
|
143
|
+
const body = await checkClone.json();
|
|
144
|
+
if (body && typeof body === 'object' && 'error' in body) {
|
|
145
|
+
console.log(`[Cache SKIP] JSON response contains .error property for: ${urlString}`);
|
|
146
|
+
isCacheable = false;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
catch (e) {
|
|
150
|
+
// Ignore JSON parse errors, assume cacheable if status is OK
|
|
151
|
+
}
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
if (isCacheable) {
|
|
155
|
+
const responseClone = response.clone();
|
|
156
|
+
const bodyBuffer = await responseClone.arrayBuffer();
|
|
157
|
+
// Convert ArrayBuffer to a base64 string for safe JSON serialization.
|
|
158
|
+
const bodyBase64 = Buffer.from(bodyBuffer).toString('base64');
|
|
159
|
+
const headers = Object.fromEntries(response.headers.entries());
|
|
160
|
+
const itemToCache = {
|
|
161
|
+
bodyBase64,
|
|
162
|
+
headers,
|
|
163
|
+
status: response.status,
|
|
164
|
+
finalUrl: response.url,
|
|
165
|
+
};
|
|
166
|
+
await cache.set(cacheKey, itemToCache, options?.ttl ?? ttl);
|
|
167
|
+
console.log(`[Cache SET] for: ${cacheKey}`);
|
|
168
|
+
}
|
|
139
169
|
}
|
|
140
170
|
// 4. Return the original response
|
|
141
171
|
return response;
|
package/dist/createLlmClient.js
CHANGED
|
@@ -206,6 +206,11 @@ function createLlmClient(params) {
|
|
|
206
206
|
const task = () => (0, retryUtils_js_1.executeWithRetry)(async () => {
|
|
207
207
|
return openai.chat.completions.create(completionParams);
|
|
208
208
|
}, async (completion) => {
|
|
209
|
+
if (completion.error) {
|
|
210
|
+
return {
|
|
211
|
+
isValid: false,
|
|
212
|
+
};
|
|
213
|
+
}
|
|
209
214
|
return { isValid: true, data: completion };
|
|
210
215
|
}, retries ?? 3, undefined, (error) => {
|
|
211
216
|
// Do not retry if the API key is invalid (401) or if the error code explicitly states it.
|