@promptbook/ollama 0.102.0-6 → 0.102.0-8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +52 -57
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/book-components/Chat/Chat/ChatMessageItem.d.ts +5 -1
- package/esm/typings/src/book-components/Chat/Chat/ChatProps.d.ts +6 -0
- package/esm/typings/src/book-components/Chat/LlmChat/LlmChatProps.d.ts +5 -0
- package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +0 -8
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +52 -57
- package/umd/index.umd.js.map +1 -1
package/esm/index.es.js
CHANGED
|
@@ -18,7 +18,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
18
18
|
* @generated
|
|
19
19
|
* @see https://github.com/webgptorg/promptbook
|
|
20
20
|
*/
|
|
21
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.102.0-
|
|
21
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.102.0-8';
|
|
22
22
|
/**
|
|
23
23
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
24
24
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -1974,6 +1974,7 @@ function isUnsupportedParameterError(error) {
|
|
|
1974
1974
|
* @public exported from `@promptbook/openai`
|
|
1975
1975
|
*/
|
|
1976
1976
|
class OpenAiCompatibleExecutionTools {
|
|
1977
|
+
// Removed retriedUnsupportedParameters and attemptHistory instance fields
|
|
1977
1978
|
/**
|
|
1978
1979
|
* Creates OpenAI compatible Execution Tools.
|
|
1979
1980
|
*
|
|
@@ -1985,14 +1986,6 @@ class OpenAiCompatibleExecutionTools {
|
|
|
1985
1986
|
* OpenAI API client.
|
|
1986
1987
|
*/
|
|
1987
1988
|
this.client = null;
|
|
1988
|
-
/**
|
|
1989
|
-
* Tracks models and parameters that have already been retried to prevent infinite loops
|
|
1990
|
-
*/
|
|
1991
|
-
this.retriedUnsupportedParameters = new Set();
|
|
1992
|
-
/**
|
|
1993
|
-
* Tracks the history of attempts for error reporting
|
|
1994
|
-
*/
|
|
1995
|
-
this.attemptHistory = [];
|
|
1996
1989
|
// TODO: Allow configuring rate limits via options
|
|
1997
1990
|
this.limiter = new Bottleneck({
|
|
1998
1991
|
minTime: 60000 / (this.options.maxRequestsPerMinute || DEFAULT_MAX_REQUESTS_PER_MINUTE),
|
|
@@ -2054,12 +2047,16 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2054
2047
|
* Calls OpenAI compatible API to use a chat model.
|
|
2055
2048
|
*/
|
|
2056
2049
|
async callChatModel(prompt) {
|
|
2057
|
-
|
|
2050
|
+
// Deep clone prompt and modelRequirements to avoid mutation across calls
|
|
2051
|
+
const clonedPrompt = JSON.parse(JSON.stringify(prompt));
|
|
2052
|
+
// Use local Set for retried parameters to ensure independence and thread safety
|
|
2053
|
+
const retriedUnsupportedParameters = new Set();
|
|
2054
|
+
return this.callChatModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
|
|
2058
2055
|
}
|
|
2059
2056
|
/**
|
|
2060
2057
|
* Internal method that handles parameter retry for chat model calls
|
|
2061
2058
|
*/
|
|
2062
|
-
async callChatModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
|
|
2059
|
+
async callChatModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
|
|
2063
2060
|
var _a;
|
|
2064
2061
|
if (this.options.isVerbose) {
|
|
2065
2062
|
console.info(`💬 ${this.title} callChatModel call`, { prompt, currentModelRequirements });
|
|
@@ -2169,10 +2166,12 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2169
2166
|
// If we have attemptStack, include it in the error message
|
|
2170
2167
|
if (attemptStack.length > 0) {
|
|
2171
2168
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2172
|
-
attemptStack
|
|
2169
|
+
attemptStack
|
|
2170
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2173
2171
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2174
2172
|
`, Error: ${a.errorMessage}` +
|
|
2175
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2173
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2174
|
+
.join('\n') +
|
|
2176
2175
|
`\nFinal error: ${error.message}`);
|
|
2177
2176
|
}
|
|
2178
2177
|
throw error;
|
|
@@ -2187,7 +2186,7 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2187
2186
|
}
|
|
2188
2187
|
// Create a unique key for this model + parameter combination to prevent infinite loops
|
|
2189
2188
|
const retryKey = `${modelName}-${unsupportedParameter}`;
|
|
2190
|
-
if (
|
|
2189
|
+
if (retriedUnsupportedParameters.has(retryKey)) {
|
|
2191
2190
|
// Already retried this parameter, throw the error with attemptStack
|
|
2192
2191
|
attemptStack.push({
|
|
2193
2192
|
modelName,
|
|
@@ -2196,14 +2195,16 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2196
2195
|
stripped: true,
|
|
2197
2196
|
});
|
|
2198
2197
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2199
|
-
attemptStack
|
|
2198
|
+
attemptStack
|
|
2199
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2200
2200
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2201
2201
|
`, Error: ${a.errorMessage}` +
|
|
2202
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2202
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2203
|
+
.join('\n') +
|
|
2203
2204
|
`\nFinal error: ${error.message}`);
|
|
2204
2205
|
}
|
|
2205
2206
|
// Mark this parameter as retried
|
|
2206
|
-
|
|
2207
|
+
retriedUnsupportedParameters.add(retryKey);
|
|
2207
2208
|
// Log warning in verbose mode
|
|
2208
2209
|
if (this.options.isVerbose) {
|
|
2209
2210
|
console.warn(colors.bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
|
|
@@ -2217,19 +2218,22 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2217
2218
|
});
|
|
2218
2219
|
// Remove the unsupported parameter and retry
|
|
2219
2220
|
const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
|
|
2220
|
-
return this.callChatModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
|
|
2221
|
+
return this.callChatModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
|
|
2221
2222
|
}
|
|
2222
2223
|
}
|
|
2223
2224
|
/**
|
|
2224
2225
|
* Calls OpenAI API to use a complete model.
|
|
2225
2226
|
*/
|
|
2226
2227
|
async callCompletionModel(prompt) {
|
|
2227
|
-
|
|
2228
|
+
// Deep clone prompt and modelRequirements to avoid mutation across calls
|
|
2229
|
+
const clonedPrompt = JSON.parse(JSON.stringify(prompt));
|
|
2230
|
+
const retriedUnsupportedParameters = new Set();
|
|
2231
|
+
return this.callCompletionModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
|
|
2228
2232
|
}
|
|
2229
2233
|
/**
|
|
2230
2234
|
* Internal method that handles parameter retry for completion model calls
|
|
2231
2235
|
*/
|
|
2232
|
-
async callCompletionModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
|
|
2236
|
+
async callCompletionModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
|
|
2233
2237
|
var _a;
|
|
2234
2238
|
if (this.options.isVerbose) {
|
|
2235
2239
|
console.info(`🖋 ${this.title} callCompletionModel call`, { prompt, currentModelRequirements });
|
|
@@ -2245,8 +2249,6 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2245
2249
|
model: modelName,
|
|
2246
2250
|
max_tokens: currentModelRequirements.maxTokens,
|
|
2247
2251
|
temperature: currentModelRequirements.temperature,
|
|
2248
|
-
// <- TODO: [🈁] Use `seed` here AND/OR use is `isDeterministic` for entire execution tools
|
|
2249
|
-
// <- Note: [🧆]
|
|
2250
2252
|
};
|
|
2251
2253
|
const rawPromptContent = templateParameters(content, { ...parameters, modelName });
|
|
2252
2254
|
const rawRequest = {
|
|
@@ -2276,7 +2278,6 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2276
2278
|
throw new PipelineExecutionError(`No choises from ${this.title}`);
|
|
2277
2279
|
}
|
|
2278
2280
|
if (rawResponse.choices.length > 1) {
|
|
2279
|
-
// TODO: This should be maybe only warning
|
|
2280
2281
|
throw new PipelineExecutionError(`More than one choise from ${this.title}`);
|
|
2281
2282
|
}
|
|
2282
2283
|
const resultContent = rawResponse.choices[0].text;
|
|
@@ -2296,25 +2297,24 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2296
2297
|
rawPromptContent,
|
|
2297
2298
|
rawRequest,
|
|
2298
2299
|
rawResponse,
|
|
2299
|
-
// <- [🗯]
|
|
2300
2300
|
},
|
|
2301
2301
|
});
|
|
2302
2302
|
}
|
|
2303
2303
|
catch (error) {
|
|
2304
2304
|
assertsError(error);
|
|
2305
|
-
// Check if this is an unsupported parameter error
|
|
2306
2305
|
if (!isUnsupportedParameterError(error)) {
|
|
2307
2306
|
if (attemptStack.length > 0) {
|
|
2308
2307
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2309
|
-
attemptStack
|
|
2308
|
+
attemptStack
|
|
2309
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2310
2310
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2311
2311
|
`, Error: ${a.errorMessage}` +
|
|
2312
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2312
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2313
|
+
.join('\n') +
|
|
2313
2314
|
`\nFinal error: ${error.message}`);
|
|
2314
2315
|
}
|
|
2315
2316
|
throw error;
|
|
2316
2317
|
}
|
|
2317
|
-
// Parse which parameter is unsupported
|
|
2318
2318
|
const unsupportedParameter = parseUnsupportedParameterError(error.message);
|
|
2319
2319
|
if (!unsupportedParameter) {
|
|
2320
2320
|
if (this.options.isVerbose) {
|
|
@@ -2322,9 +2322,8 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2322
2322
|
}
|
|
2323
2323
|
throw error;
|
|
2324
2324
|
}
|
|
2325
|
-
// Create a unique key for this model + parameter combination to prevent infinite loops
|
|
2326
2325
|
const retryKey = `${modelName}-${unsupportedParameter}`;
|
|
2327
|
-
if (
|
|
2326
|
+
if (retriedUnsupportedParameters.has(retryKey)) {
|
|
2328
2327
|
attemptStack.push({
|
|
2329
2328
|
modelName,
|
|
2330
2329
|
unsupportedParameter,
|
|
@@ -2332,15 +2331,15 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2332
2331
|
stripped: true,
|
|
2333
2332
|
});
|
|
2334
2333
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2335
|
-
attemptStack
|
|
2334
|
+
attemptStack
|
|
2335
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2336
2336
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2337
2337
|
`, Error: ${a.errorMessage}` +
|
|
2338
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2338
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2339
|
+
.join('\n') +
|
|
2339
2340
|
`\nFinal error: ${error.message}`);
|
|
2340
2341
|
}
|
|
2341
|
-
|
|
2342
|
-
this.retriedUnsupportedParameters.add(retryKey);
|
|
2343
|
-
// Log warning in verbose mode
|
|
2342
|
+
retriedUnsupportedParameters.add(retryKey);
|
|
2344
2343
|
if (this.options.isVerbose) {
|
|
2345
2344
|
console.warn(colors.bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
|
|
2346
2345
|
}
|
|
@@ -2350,27 +2349,28 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2350
2349
|
errorMessage: error.message,
|
|
2351
2350
|
stripped: true,
|
|
2352
2351
|
});
|
|
2353
|
-
// Remove the unsupported parameter and retry
|
|
2354
2352
|
const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
|
|
2355
|
-
return this.callCompletionModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
|
|
2353
|
+
return this.callCompletionModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
|
|
2356
2354
|
}
|
|
2357
2355
|
}
|
|
2358
2356
|
/**
|
|
2359
2357
|
* Calls OpenAI compatible API to use a embedding model
|
|
2360
2358
|
*/
|
|
2361
2359
|
async callEmbeddingModel(prompt) {
|
|
2362
|
-
|
|
2360
|
+
// Deep clone prompt and modelRequirements to avoid mutation across calls
|
|
2361
|
+
const clonedPrompt = JSON.parse(JSON.stringify(prompt));
|
|
2362
|
+
const retriedUnsupportedParameters = new Set();
|
|
2363
|
+
return this.callEmbeddingModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
|
|
2363
2364
|
}
|
|
2364
2365
|
/**
|
|
2365
2366
|
* Internal method that handles parameter retry for embedding model calls
|
|
2366
2367
|
*/
|
|
2367
|
-
async callEmbeddingModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
|
|
2368
|
+
async callEmbeddingModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
|
|
2368
2369
|
if (this.options.isVerbose) {
|
|
2369
2370
|
console.info(`🖋 ${this.title} embedding call`, { prompt, currentModelRequirements });
|
|
2370
2371
|
}
|
|
2371
2372
|
const { content, parameters } = prompt;
|
|
2372
2373
|
const client = await this.getClient();
|
|
2373
|
-
// TODO: [☂] Use here more modelRequirements
|
|
2374
2374
|
if (currentModelRequirements.modelVariant !== 'EMBEDDING') {
|
|
2375
2375
|
throw new PipelineExecutionError('Use embed only for EMBEDDING variant');
|
|
2376
2376
|
}
|
|
@@ -2402,9 +2402,7 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2402
2402
|
throw new PipelineExecutionError(`Expected exactly 1 data item in response, got ${rawResponse.data.length}`);
|
|
2403
2403
|
}
|
|
2404
2404
|
const resultContent = rawResponse.data[0].embedding;
|
|
2405
|
-
const usage = this.computeUsage(content || '', '',
|
|
2406
|
-
// <- Note: Embedding does not have result content
|
|
2407
|
-
rawResponse);
|
|
2405
|
+
const usage = this.computeUsage(content || '', '', rawResponse);
|
|
2408
2406
|
return exportJson({
|
|
2409
2407
|
name: 'promptResult',
|
|
2410
2408
|
message: `Result of \`OpenAiCompatibleExecutionTools.callEmbeddingModel\``,
|
|
@@ -2420,25 +2418,24 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2420
2418
|
rawPromptContent,
|
|
2421
2419
|
rawRequest,
|
|
2422
2420
|
rawResponse,
|
|
2423
|
-
// <- [🗯]
|
|
2424
2421
|
},
|
|
2425
2422
|
});
|
|
2426
2423
|
}
|
|
2427
2424
|
catch (error) {
|
|
2428
2425
|
assertsError(error);
|
|
2429
|
-
// Check if this is an unsupported parameter error
|
|
2430
2426
|
if (!isUnsupportedParameterError(error)) {
|
|
2431
2427
|
if (attemptStack.length > 0) {
|
|
2432
2428
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2433
|
-
attemptStack
|
|
2429
|
+
attemptStack
|
|
2430
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2434
2431
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2435
2432
|
`, Error: ${a.errorMessage}` +
|
|
2436
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2433
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2434
|
+
.join('\n') +
|
|
2437
2435
|
`\nFinal error: ${error.message}`);
|
|
2438
2436
|
}
|
|
2439
2437
|
throw error;
|
|
2440
2438
|
}
|
|
2441
|
-
// Parse which parameter is unsupported
|
|
2442
2439
|
const unsupportedParameter = parseUnsupportedParameterError(error.message);
|
|
2443
2440
|
if (!unsupportedParameter) {
|
|
2444
2441
|
if (this.options.isVerbose) {
|
|
@@ -2446,9 +2443,8 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2446
2443
|
}
|
|
2447
2444
|
throw error;
|
|
2448
2445
|
}
|
|
2449
|
-
// Create a unique key for this model + parameter combination to prevent infinite loops
|
|
2450
2446
|
const retryKey = `${modelName}-${unsupportedParameter}`;
|
|
2451
|
-
if (
|
|
2447
|
+
if (retriedUnsupportedParameters.has(retryKey)) {
|
|
2452
2448
|
attemptStack.push({
|
|
2453
2449
|
modelName,
|
|
2454
2450
|
unsupportedParameter,
|
|
@@ -2456,15 +2452,15 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2456
2452
|
stripped: true,
|
|
2457
2453
|
});
|
|
2458
2454
|
throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
|
|
2459
|
-
attemptStack
|
|
2455
|
+
attemptStack
|
|
2456
|
+
.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
|
|
2460
2457
|
(a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
|
|
2461
2458
|
`, Error: ${a.errorMessage}` +
|
|
2462
|
-
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2459
|
+
(a.stripped ? ' (stripped and retried)' : ''))
|
|
2460
|
+
.join('\n') +
|
|
2463
2461
|
`\nFinal error: ${error.message}`);
|
|
2464
2462
|
}
|
|
2465
|
-
|
|
2466
|
-
this.retriedUnsupportedParameters.add(retryKey);
|
|
2467
|
-
// Log warning in verbose mode
|
|
2463
|
+
retriedUnsupportedParameters.add(retryKey);
|
|
2468
2464
|
if (this.options.isVerbose) {
|
|
2469
2465
|
console.warn(colors.bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
|
|
2470
2466
|
}
|
|
@@ -2474,9 +2470,8 @@ class OpenAiCompatibleExecutionTools {
|
|
|
2474
2470
|
errorMessage: error.message,
|
|
2475
2471
|
stripped: true,
|
|
2476
2472
|
});
|
|
2477
|
-
// Remove the unsupported parameter and retry
|
|
2478
2473
|
const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
|
|
2479
|
-
return this.callEmbeddingModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
|
|
2474
|
+
return this.callEmbeddingModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
|
|
2480
2475
|
}
|
|
2481
2476
|
}
|
|
2482
2477
|
// <- Note: [🤖] callXxxModel
|