@promptbook/openai 0.102.0-6 → 0.102.0-8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +52 -57
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/book-components/Chat/Chat/ChatMessageItem.d.ts +5 -1
- package/esm/typings/src/book-components/Chat/Chat/ChatProps.d.ts +6 -0
- package/esm/typings/src/book-components/Chat/LlmChat/LlmChatProps.d.ts +5 -0
- package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +0 -8
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +52 -57
- package/umd/index.umd.js.map +1 -1
package/esm/index.es.js
CHANGED
|
@@ -19,7 +19,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
19
19
|
* @generated
|
|
20
20
|
* @see https://github.com/webgptorg/promptbook
|
|
21
21
|
*/
|
|
22
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.102.0-
|
|
22
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.102.0-8';
|
|
23
23
|
/**
|
|
24
24
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
25
25
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -2004,6 +2004,7 @@ function isUnsupportedParameterError(error) {
|
|
|
2004
2004
|
* @public exported from `@promptbook/openai`
|
|
2005
2005
|
*/
|
|
2006
2006
|
class OpenAiCompatibleExecutionTools {
|
|
2007
|
+
// Removed retriedUnsupportedParameters and attemptHistory instance fields
|
|
2007
2008
|
/**
|
|
2008
2009
|
* Creates OpenAI compatible Execution Tools.
|
|
2009
2010
|
*
|
|
@@ -2015,14 +2016,6 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2015
2016
|
* OpenAI API client.
|
|
2016
2017
|
*/
|
|
2017
2018
|
this.client = null;
|
|
2018
|
-
/**
|
|
2019
|
-
* Tracks models and parameters that have already been retried to prevent infinite loops
|
|
2020
|
-
*/
|
|
2021
|
-
this.retriedUnsupportedParameters = new Set();
|
|
2022
|
-
/**
|
|
2023
|
-
* Tracks the history of attempts for error reporting
|
|
2024
|
-
*/
|
|
2025
|
-
this.attemptHistory = [];
|
|
2026
2019
|
// TODO: Allow configuring rate limits via options
|
|
2027
2020
|
this.limiter = new Bottleneck({
|
|
2028
2021
|
minTime: 60000 / (this.options.maxRequestsPerMinute || DEFAULT_MAX_REQUESTS_PER_MINUTE),
|
|
@@ -2084,12 +2077,16 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2084
2077
|
* Calls OpenAI compatible API to use a chat model.
|
|
2085
2078
|
*/
|
|
2086
2079
|
async callChatModel(prompt) {
|
|
2087
|
-
|
|
2080
|
+
// Deep clone prompt and modelRequirements to avoid mutation across calls
|
|
2081
|
+
const clonedPrompt = JSON.parse(JSON.stringify(prompt));
|
|
2082
|
+
// Use local Set for retried parameters to ensure independence and thread safety
|
|
2083
|
+
const retriedUnsupportedParameters = new Set();
|
|
2084
|
+
return this.callChatModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
|
|
2088
2085
|
}
|
|
2089
2086
|
/**
|
|
2090
2087
|
* Internal method that handles parameter retry for chat model calls
|
|
2091
2088
|
*/
|
|
2092
|
-
async callChatModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
|
|
2089
|
+
async callChatModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
|
|
2093
2090
|
var _a;
|
|
2094
2091
|
if (this.options.isVerbose) {
|
|
2095
2092
|
console.info(`💬 ${this.title} callChatModel call`, { prompt, currentModelRequirements });
|
|
@@ -2199,10 +2196,12 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2199
2196
|
// If we have attemptStack, include it in the error message
|
|
2200
2197
|
if (attemptStack.length > 0) {
|
|
2201
2198
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2202
|
-
attemptStack
|
|
2199
|
+
attemptStack
|
|
2200
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2203
2201
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2204
2202
|
`, Error: ${a.errorMessage}` +
|
|
2205
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2203
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2204
|
+
.join('\n') +
|
|
2206
2205
|
`\nFinal error: ${error.message}`);
|
|
2207
2206
|
}
|
|
2208
2207
|
throw error;
|
|
@@ -2217,7 +2216,7 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2217
2216
|
}
|
|
2218
2217
|
// Create a unique key for this model + parameter combination to prevent infinite loops
|
|
2219
2218
|
const retryKey = `${modelName}-${unsupportedParameter}`;
|
|
2220
|
-
if (
|
|
2219
|
+
if (retriedUnsupportedParameters.has(retryKey)) {
|
|
2221
2220
|
// Already retried this parameter, throw the error with attemptStack
|
|
2222
2221
|
attemptStack.push({
|
|
2223
2222
|
modelName,
|
|
@@ -2226,14 +2225,16 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2226
2225
|
stripped: true,
|
|
2227
2226
|
});
|
|
2228
2227
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2229
|
-
attemptStack
|
|
2228
|
+
attemptStack
|
|
2229
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2230
2230
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2231
2231
|
`, Error: ${a.errorMessage}` +
|
|
2232
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2232
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2233
|
+
.join('\n') +
|
|
2233
2234
|
`\nFinal error: ${error.message}`);
|
|
2234
2235
|
}
|
|
2235
2236
|
// Mark this parameter as retried
|
|
2236
|
-
|
|
2237
|
+
retriedUnsupportedParameters.add(retryKey);
|
|
2237
2238
|
// Log warning in verbose mode
|
|
2238
2239
|
if (this.options.isVerbose) {
|
|
2239
2240
|
console.warn(colors.bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
|
|
@@ -2247,19 +2248,22 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2247
2248
|
});
|
|
2248
2249
|
// Remove the unsupported parameter and retry
|
|
2249
2250
|
const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
|
|
2250
|
-
return this.callChatModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
|
|
2251
|
+
return this.callChatModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
|
|
2251
2252
|
}
|
|
2252
2253
|
}
|
|
2253
2254
|
/**
|
|
2254
2255
|
* Calls OpenAI API to use a complete model.
|
|
2255
2256
|
*/
|
|
2256
2257
|
async callCompletionModel(prompt) {
|
|
2257
|
-
|
|
2258
|
+
// Deep clone prompt and modelRequirements to avoid mutation across calls
|
|
2259
|
+
const clonedPrompt = JSON.parse(JSON.stringify(prompt));
|
|
2260
|
+
const retriedUnsupportedParameters = new Set();
|
|
2261
|
+
return this.callCompletionModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
|
|
2258
2262
|
}
|
|
2259
2263
|
/**
|
|
2260
2264
|
* Internal method that handles parameter retry for completion model calls
|
|
2261
2265
|
*/
|
|
2262
|
-
async callCompletionModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
|
|
2266
|
+
async callCompletionModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
|
|
2263
2267
|
var _a;
|
|
2264
2268
|
if (this.options.isVerbose) {
|
|
2265
2269
|
console.info(`🖋 ${this.title} callCompletionModel call`, { prompt, currentModelRequirements });
|
|
@@ -2275,8 +2279,6 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2275
2279
|
model: modelName,
|
|
2276
2280
|
max_tokens: currentModelRequirements.maxTokens,
|
|
2277
2281
|
temperature: currentModelRequirements.temperature,
|
|
2278
|
-
// <- TODO: [🈁] Use `seed` here AND/OR use is `isDeterministic` for entire execution tools
|
|
2279
|
-
// <- Note: [🧆]
|
|
2280
2282
|
};
|
|
2281
2283
|
const rawPromptContent = templateParameters(content, { ...parameters, modelName });
|
|
2282
2284
|
const rawRequest = {
|
|
@@ -2306,7 +2308,6 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2306
2308
|
throw new PipelineExecutionError(`No choises from ${this.title}`);
|
|
2307
2309
|
}
|
|
2308
2310
|
if (rawResponse.choices.length > 1) {
|
|
2309
|
-
// TODO: This should be maybe only warning
|
|
2310
2311
|
throw new PipelineExecutionError(`More than one choise from ${this.title}`);
|
|
2311
2312
|
}
|
|
2312
2313
|
const resultContent = rawResponse.choices[0].text;
|
|
@@ -2326,25 +2327,24 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2326
2327
|
rawPromptContent,
|
|
2327
2328
|
rawRequest,
|
|
2328
2329
|
rawResponse,
|
|
2329
|
-
// <- [🗯]
|
|
2330
2330
|
},
|
|
2331
2331
|
});
|
|
2332
2332
|
}
|
|
2333
2333
|
catch (error) {
|
|
2334
2334
|
assertsError(error);
|
|
2335
|
-
// Check if this is an unsupported parameter error
|
|
2336
2335
|
if (!isUnsupportedParameterError(error)) {
|
|
2337
2336
|
if (attemptStack.length > 0) {
|
|
2338
2337
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2339
|
-
attemptStack
|
|
2338
|
+
attemptStack
|
|
2339
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2340
2340
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2341
2341
|
`, Error: ${a.errorMessage}` +
|
|
2342
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2342
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2343
|
+
.join('\n') +
|
|
2343
2344
|
`\nFinal error: ${error.message}`);
|
|
2344
2345
|
}
|
|
2345
2346
|
throw error;
|
|
2346
2347
|
}
|
|
2347
|
-
// Parse which parameter is unsupported
|
|
2348
2348
|
const unsupportedParameter = parseUnsupportedParameterError(error.message);
|
|
2349
2349
|
if (!unsupportedParameter) {
|
|
2350
2350
|
if (this.options.isVerbose) {
|
|
@@ -2352,9 +2352,8 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2352
2352
|
}
|
|
2353
2353
|
throw error;
|
|
2354
2354
|
}
|
|
2355
|
-
// Create a unique key for this model + parameter combination to prevent infinite loops
|
|
2356
2355
|
const retryKey = `${modelName}-${unsupportedParameter}`;
|
|
2357
|
-
if (
|
|
2356
|
+
if (retriedUnsupportedParameters.has(retryKey)) {
|
|
2358
2357
|
attemptStack.push({
|
|
2359
2358
|
modelName,
|
|
2360
2359
|
unsupportedParameter,
|
|
@@ -2362,15 +2361,15 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2362
2361
|
stripped: true,
|
|
2363
2362
|
});
|
|
2364
2363
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2365
|
-
attemptStack
|
|
2364
|
+
attemptStack
|
|
2365
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2366
2366
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2367
2367
|
`, Error: ${a.errorMessage}` +
|
|
2368
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2368
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2369
|
+
.join('\n') +
|
|
2369
2370
|
`\nFinal error: ${error.message}`);
|
|
2370
2371
|
}
|
|
2371
|
-
|
|
2372
|
-
this.retriedUnsupportedParameters.add(retryKey);
|
|
2373
|
-
// Log warning in verbose mode
|
|
2372
|
+
retriedUnsupportedParameters.add(retryKey);
|
|
2374
2373
|
if (this.options.isVerbose) {
|
|
2375
2374
|
console.warn(colors.bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
|
|
2376
2375
|
}
|
|
@@ -2380,27 +2379,28 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2380
2379
|
errorMessage: error.message,
|
|
2381
2380
|
stripped: true,
|
|
2382
2381
|
});
|
|
2383
|
-
// Remove the unsupported parameter and retry
|
|
2384
2382
|
const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
|
|
2385
|
-
return this.callCompletionModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
|
|
2383
|
+
return this.callCompletionModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
|
|
2386
2384
|
}
|
|
2387
2385
|
}
|
|
2388
2386
|
/**
|
|
2389
2387
|
* Calls OpenAI compatible API to use a embedding model
|
|
2390
2388
|
*/
|
|
2391
2389
|
async callEmbeddingModel(prompt) {
|
|
2392
|
-
|
|
2390
|
+
// Deep clone prompt and modelRequirements to avoid mutation across calls
|
|
2391
|
+
const clonedPrompt = JSON.parse(JSON.stringify(prompt));
|
|
2392
|
+
const retriedUnsupportedParameters = new Set();
|
|
2393
|
+
return this.callEmbeddingModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
|
|
2393
2394
|
}
|
|
2394
2395
|
/**
|
|
2395
2396
|
* Internal method that handles parameter retry for embedding model calls
|
|
2396
2397
|
*/
|
|
2397
|
-
async callEmbeddingModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
|
|
2398
|
+
async callEmbeddingModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
|
|
2398
2399
|
if (this.options.isVerbose) {
|
|
2399
2400
|
console.info(`🖋 ${this.title} embedding call`, { prompt, currentModelRequirements });
|
|
2400
2401
|
}
|
|
2401
2402
|
const { content, parameters } = prompt;
|
|
2402
2403
|
const client = await this.getClient();
|
|
2403
|
-
// TODO: [☂] Use here more modelRequirements
|
|
2404
2404
|
if (currentModelRequirements.modelVariant !== 'EMBEDDING') {
|
|
2405
2405
|
throw new PipelineExecutionError('Use embed only for EMBEDDING variant');
|
|
2406
2406
|
}
|
|
@@ -2432,9 +2432,7 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2432
2432
|
throw new PipelineExecutionError(`Expected exactly 1 data item in response, got ${rawResponse.data.length}`);
|
|
2433
2433
|
}
|
|
2434
2434
|
const resultContent = rawResponse.data[0].embedding;
|
|
2435
|
-
const usage = this.computeUsage(content || '', '',
|
|
2436
|
-
// <- Note: Embedding does not have result content
|
|
2437
|
-
rawResponse);
|
|
2435
|
+
const usage = this.computeUsage(content || '', '', rawResponse);
|
|
2438
2436
|
return exportJson({
|
|
2439
2437
|
name: 'promptResult',
|
|
2440
2438
|
message: `Result of \`OpenAiCompatibleExecutionTools.callEmbeddingModel\``,
|
|
@@ -2450,25 +2448,24 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2450
2448
|
rawPromptContent,
|
|
2451
2449
|
rawRequest,
|
|
2452
2450
|
rawResponse,
|
|
2453
|
-
// <- [🗯]
|
|
2454
2451
|
},
|
|
2455
2452
|
});
|
|
2456
2453
|
}
|
|
2457
2454
|
catch (error) {
|
|
2458
2455
|
assertsError(error);
|
|
2459
|
-
// Check if this is an unsupported parameter error
|
|
2460
2456
|
if (!isUnsupportedParameterError(error)) {
|
|
2461
2457
|
if (attemptStack.length > 0) {
|
|
2462
2458
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2463
|
-
attemptStack
|
|
2459
|
+
attemptStack
|
|
2460
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2464
2461
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2465
2462
|
`, Error: ${a.errorMessage}` +
|
|
2466
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2463
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2464
|
+
.join('\n') +
|
|
2467
2465
|
`\nFinal error: ${error.message}`);
|
|
2468
2466
|
}
|
|
2469
2467
|
throw error;
|
|
2470
2468
|
}
|
|
2471
|
-
// Parse which parameter is unsupported
|
|
2472
2469
|
const unsupportedParameter = parseUnsupportedParameterError(error.message);
|
|
2473
2470
|
if (!unsupportedParameter) {
|
|
2474
2471
|
if (this.options.isVerbose) {
|
|
@@ -2476,9 +2473,8 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2476
2473
|
}
|
|
2477
2474
|
throw error;
|
|
2478
2475
|
}
|
|
2479
|
-
// Create a unique key for this model + parameter combination to prevent infinite loops
|
|
2480
2476
|
const retryKey = `${modelName}-${unsupportedParameter}`;
|
|
2481
|
-
if (
|
|
2477
|
+
if (retriedUnsupportedParameters.has(retryKey)) {
|
|
2482
2478
|
attemptStack.push({
|
|
2483
2479
|
modelName,
|
|
2484
2480
|
unsupportedParameter,
|
|
@@ -2486,15 +2482,15 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2486
2482
|
stripped: true,
|
|
2487
2483
|
});
|
|
2488
2484
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2489
|
-
attemptStack
|
|
2485
|
+
attemptStack
|
|
2486
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2490
2487
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2491
2488
|
`, Error: ${a.errorMessage}` +
|
|
2492
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2489
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2490
|
+
.join('\n') +
|
|
2493
2491
|
`\nFinal error: ${error.message}`);
|
|
2494
2492
|
}
|
|
2495
|
-
|
|
2496
|
-
this.retriedUnsupportedParameters.add(retryKey);
|
|
2497
|
-
// Log warning in verbose mode
|
|
2493
|
+
retriedUnsupportedParameters.add(retryKey);
|
|
2498
2494
|
if (this.options.isVerbose) {
|
|
2499
2495
|
console.warn(colors.bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
|
|
2500
2496
|
}
|
|
@@ -2504,9 +2500,8 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2504
2500
|
errorMessage: error.message,
|
|
2505
2501
|
stripped: true,
|
|
2506
2502
|
});
|
|
2507
|
-
// Remove the unsupported parameter and retry
|
|
2508
2503
|
const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
|
|
2509
|
-
return this.callEmbeddingModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
|
|
2504
|
+
return this.callEmbeddingModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
|
|
2510
2505
|
}
|
|
2511
2506
|
}
|
|
2512
2507
|
// <- Note: [🤖] callXxxModel
|