@promptbook/ollama 0.102.0-6 → 0.102.0-7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/umd/index.umd.js CHANGED
@@ -25,7 +25,7 @@
25
25
  * @generated
26
26
  * @see https://github.com/webgptorg/promptbook
27
27
  */
28
- const PROMPTBOOK_ENGINE_VERSION = '0.102.0-6';
28
+ const PROMPTBOOK_ENGINE_VERSION = '0.102.0-7';
29
29
  /**
30
30
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
31
31
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -1981,6 +1981,7 @@
1981
1981
  * @public exported from `@promptbook/openai`
1982
1982
  */
1983
1983
  class OpenAiCompatibleExecutionTools {
1984
+ // Removed retriedUnsupportedParameters and attemptHistory instance fields
1984
1985
  /**
1985
1986
  * Creates OpenAI compatible Execution Tools.
1986
1987
  *
@@ -1992,14 +1993,6 @@
1992
1993
  * OpenAI API client.
1993
1994
  */
1994
1995
  this.client = null;
1995
- /**
1996
- * Tracks models and parameters that have already been retried to prevent infinite loops
1997
- */
1998
- this.retriedUnsupportedParameters = new Set();
1999
- /**
2000
- * Tracks the history of attempts for error reporting
2001
- */
2002
- this.attemptHistory = [];
2003
1996
  // TODO: Allow configuring rate limits via options
2004
1997
  this.limiter = new Bottleneck__default["default"]({
2005
1998
  minTime: 60000 / (this.options.maxRequestsPerMinute || DEFAULT_MAX_REQUESTS_PER_MINUTE),
@@ -2061,12 +2054,16 @@
2061
2054
  * Calls OpenAI compatible API to use a chat model.
2062
2055
  */
2063
2056
  async callChatModel(prompt) {
2064
- return this.callChatModelWithRetry(prompt, prompt.modelRequirements);
2057
+ // Deep clone prompt and modelRequirements to avoid mutation across calls
2058
+ const clonedPrompt = JSON.parse(JSON.stringify(prompt));
2059
+ // Use local Set for retried parameters to ensure independence and thread safety
2060
+ const retriedUnsupportedParameters = new Set();
2061
+ return this.callChatModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
2065
2062
  }
2066
2063
  /**
2067
2064
  * Internal method that handles parameter retry for chat model calls
2068
2065
  */
2069
- async callChatModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
2066
+ async callChatModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
2070
2067
  var _a;
2071
2068
  if (this.options.isVerbose) {
2072
2069
  console.info(`💬 ${this.title} callChatModel call`, { prompt, currentModelRequirements });
@@ -2176,10 +2173,12 @@
2176
2173
  // If we have attemptStack, include it in the error message
2177
2174
  if (attemptStack.length > 0) {
2178
2175
  throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
2179
- attemptStack.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2176
+ attemptStack
2177
+ .map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2180
2178
  (a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
2181
2179
  `, Error: ${a.errorMessage}` +
2182
- (a.stripped ? ' (stripped and retried)' : '')).join('\n') +
2180
+ (a.stripped ? ' (stripped and retried)' : ''))
2181
+ .join('\n') +
2183
2182
  `\nFinal error: ${error.message}`);
2184
2183
  }
2185
2184
  throw error;
@@ -2194,7 +2193,7 @@
2194
2193
  }
2195
2194
  // Create a unique key for this model + parameter combination to prevent infinite loops
2196
2195
  const retryKey = `${modelName}-${unsupportedParameter}`;
2197
- if (this.retriedUnsupportedParameters.has(retryKey)) {
2196
+ if (retriedUnsupportedParameters.has(retryKey)) {
2198
2197
  // Already retried this parameter, throw the error with attemptStack
2199
2198
  attemptStack.push({
2200
2199
  modelName,
@@ -2203,14 +2202,16 @@
2203
2202
  stripped: true,
2204
2203
  });
2205
2204
  throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
2206
- attemptStack.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2205
+ attemptStack
2206
+ .map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2207
2207
  (a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
2208
2208
  `, Error: ${a.errorMessage}` +
2209
- (a.stripped ? ' (stripped and retried)' : '')).join('\n') +
2209
+ (a.stripped ? ' (stripped and retried)' : ''))
2210
+ .join('\n') +
2210
2211
  `\nFinal error: ${error.message}`);
2211
2212
  }
2212
2213
  // Mark this parameter as retried
2213
- this.retriedUnsupportedParameters.add(retryKey);
2214
+ retriedUnsupportedParameters.add(retryKey);
2214
2215
  // Log warning in verbose mode
2215
2216
  if (this.options.isVerbose) {
2216
2217
  console.warn(colors__default["default"].bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
@@ -2224,19 +2225,22 @@
2224
2225
  });
2225
2226
  // Remove the unsupported parameter and retry
2226
2227
  const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
2227
- return this.callChatModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
2228
+ return this.callChatModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
2228
2229
  }
2229
2230
  }
2230
2231
  /**
2231
2232
  * Calls OpenAI API to use a complete model.
2232
2233
  */
2233
2234
  async callCompletionModel(prompt) {
2234
- return this.callCompletionModelWithRetry(prompt, prompt.modelRequirements);
2235
+ // Deep clone prompt and modelRequirements to avoid mutation across calls
2236
+ const clonedPrompt = JSON.parse(JSON.stringify(prompt));
2237
+ const retriedUnsupportedParameters = new Set();
2238
+ return this.callCompletionModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
2235
2239
  }
2236
2240
  /**
2237
2241
  * Internal method that handles parameter retry for completion model calls
2238
2242
  */
2239
- async callCompletionModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
2243
+ async callCompletionModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
2240
2244
  var _a;
2241
2245
  if (this.options.isVerbose) {
2242
2246
  console.info(`🖋 ${this.title} callCompletionModel call`, { prompt, currentModelRequirements });
@@ -2252,8 +2256,6 @@
2252
2256
  model: modelName,
2253
2257
  max_tokens: currentModelRequirements.maxTokens,
2254
2258
  temperature: currentModelRequirements.temperature,
2255
- // <- TODO: [🈁] Use `seed` here AND/OR use is `isDeterministic` for entire execution tools
2256
- // <- Note: [🧆]
2257
2259
  };
2258
2260
  const rawPromptContent = templateParameters(content, { ...parameters, modelName });
2259
2261
  const rawRequest = {
@@ -2283,7 +2285,6 @@
2283
2285
  throw new PipelineExecutionError(`No choises from ${this.title}`);
2284
2286
  }
2285
2287
  if (rawResponse.choices.length > 1) {
2286
- // TODO: This should be maybe only warning
2287
2288
  throw new PipelineExecutionError(`More than one choise from ${this.title}`);
2288
2289
  }
2289
2290
  const resultContent = rawResponse.choices[0].text;
@@ -2303,25 +2304,24 @@
2303
2304
  rawPromptContent,
2304
2305
  rawRequest,
2305
2306
  rawResponse,
2306
- // <- [🗯]
2307
2307
  },
2308
2308
  });
2309
2309
  }
2310
2310
  catch (error) {
2311
2311
  assertsError(error);
2312
- // Check if this is an unsupported parameter error
2313
2312
  if (!isUnsupportedParameterError(error)) {
2314
2313
  if (attemptStack.length > 0) {
2315
2314
  throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
2316
- attemptStack.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2315
+ attemptStack
2316
+ .map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2317
2317
  (a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
2318
2318
  `, Error: ${a.errorMessage}` +
2319
- (a.stripped ? ' (stripped and retried)' : '')).join('\n') +
2319
+ (a.stripped ? ' (stripped and retried)' : ''))
2320
+ .join('\n') +
2320
2321
  `\nFinal error: ${error.message}`);
2321
2322
  }
2322
2323
  throw error;
2323
2324
  }
2324
- // Parse which parameter is unsupported
2325
2325
  const unsupportedParameter = parseUnsupportedParameterError(error.message);
2326
2326
  if (!unsupportedParameter) {
2327
2327
  if (this.options.isVerbose) {
@@ -2329,9 +2329,8 @@
2329
2329
  }
2330
2330
  throw error;
2331
2331
  }
2332
- // Create a unique key for this model + parameter combination to prevent infinite loops
2333
2332
  const retryKey = `${modelName}-${unsupportedParameter}`;
2334
- if (this.retriedUnsupportedParameters.has(retryKey)) {
2333
+ if (retriedUnsupportedParameters.has(retryKey)) {
2335
2334
  attemptStack.push({
2336
2335
  modelName,
2337
2336
  unsupportedParameter,
@@ -2339,15 +2338,15 @@
2339
2338
  stripped: true,
2340
2339
  });
2341
2340
  throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
2342
- attemptStack.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2341
+ attemptStack
2342
+ .map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2343
2343
  (a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
2344
2344
  `, Error: ${a.errorMessage}` +
2345
- (a.stripped ? ' (stripped and retried)' : '')).join('\n') +
2345
+ (a.stripped ? ' (stripped and retried)' : ''))
2346
+ .join('\n') +
2346
2347
  `\nFinal error: ${error.message}`);
2347
2348
  }
2348
- // Mark this parameter as retried
2349
- this.retriedUnsupportedParameters.add(retryKey);
2350
- // Log warning in verbose mode
2349
+ retriedUnsupportedParameters.add(retryKey);
2351
2350
  if (this.options.isVerbose) {
2352
2351
  console.warn(colors__default["default"].bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
2353
2352
  }
@@ -2357,27 +2356,28 @@
2357
2356
  errorMessage: error.message,
2358
2357
  stripped: true,
2359
2358
  });
2360
- // Remove the unsupported parameter and retry
2361
2359
  const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
2362
- return this.callCompletionModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
2360
+ return this.callCompletionModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
2363
2361
  }
2364
2362
  }
2365
2363
  /**
2366
2364
  * Calls OpenAI compatible API to use a embedding model
2367
2365
  */
2368
2366
  async callEmbeddingModel(prompt) {
2369
- return this.callEmbeddingModelWithRetry(prompt, prompt.modelRequirements);
2367
+ // Deep clone prompt and modelRequirements to avoid mutation across calls
2368
+ const clonedPrompt = JSON.parse(JSON.stringify(prompt));
2369
+ const retriedUnsupportedParameters = new Set();
2370
+ return this.callEmbeddingModelWithRetry(clonedPrompt, clonedPrompt.modelRequirements, [], retriedUnsupportedParameters);
2370
2371
  }
2371
2372
  /**
2372
2373
  * Internal method that handles parameter retry for embedding model calls
2373
2374
  */
2374
- async callEmbeddingModelWithRetry(prompt, currentModelRequirements, attemptStack = []) {
2375
+ async callEmbeddingModelWithRetry(prompt, currentModelRequirements, attemptStack = [], retriedUnsupportedParameters = new Set()) {
2375
2376
  if (this.options.isVerbose) {
2376
2377
  console.info(`🖋 ${this.title} embedding call`, { prompt, currentModelRequirements });
2377
2378
  }
2378
2379
  const { content, parameters } = prompt;
2379
2380
  const client = await this.getClient();
2380
- // TODO: [☂] Use here more modelRequirements
2381
2381
  if (currentModelRequirements.modelVariant !== 'EMBEDDING') {
2382
2382
  throw new PipelineExecutionError('Use embed only for EMBEDDING variant');
2383
2383
  }
@@ -2409,9 +2409,7 @@
2409
2409
  throw new PipelineExecutionError(`Expected exactly 1 data item in response, got ${rawResponse.data.length}`);
2410
2410
  }
2411
2411
  const resultContent = rawResponse.data[0].embedding;
2412
- const usage = this.computeUsage(content || '', '',
2413
- // <- Note: Embedding does not have result content
2414
- rawResponse);
2412
+ const usage = this.computeUsage(content || '', '', rawResponse);
2415
2413
  return exportJson({
2416
2414
  name: 'promptResult',
2417
2415
  message: `Result of \`OpenAiCompatibleExecutionTools.callEmbeddingModel\``,
@@ -2427,25 +2425,24 @@
2427
2425
  rawPromptContent,
2428
2426
  rawRequest,
2429
2427
  rawResponse,
2430
- // <- [🗯]
2431
2428
  },
2432
2429
  });
2433
2430
  }
2434
2431
  catch (error) {
2435
2432
  assertsError(error);
2436
- // Check if this is an unsupported parameter error
2437
2433
  if (!isUnsupportedParameterError(error)) {
2438
2434
  if (attemptStack.length > 0) {
2439
2435
  throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
2440
- attemptStack.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2436
+ attemptStack
2437
+ .map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2441
2438
  (a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
2442
2439
  `, Error: ${a.errorMessage}` +
2443
- (a.stripped ? ' (stripped and retried)' : '')).join('\n') +
2440
+ (a.stripped ? ' (stripped and retried)' : ''))
2441
+ .join('\n') +
2444
2442
  `\nFinal error: ${error.message}`);
2445
2443
  }
2446
2444
  throw error;
2447
2445
  }
2448
- // Parse which parameter is unsupported
2449
2446
  const unsupportedParameter = parseUnsupportedParameterError(error.message);
2450
2447
  if (!unsupportedParameter) {
2451
2448
  if (this.options.isVerbose) {
@@ -2453,9 +2450,8 @@
2453
2450
  }
2454
2451
  throw error;
2455
2452
  }
2456
- // Create a unique key for this model + parameter combination to prevent infinite loops
2457
2453
  const retryKey = `${modelName}-${unsupportedParameter}`;
2458
- if (this.retriedUnsupportedParameters.has(retryKey)) {
2454
+ if (retriedUnsupportedParameters.has(retryKey)) {
2459
2455
  attemptStack.push({
2460
2456
  modelName,
2461
2457
  unsupportedParameter,
@@ -2463,15 +2459,15 @@
2463
2459
  stripped: true,
2464
2460
  });
2465
2461
  throw new PipelineExecutionError(`All attempts failed. Attempt history:\n` +
2466
- attemptStack.map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2462
+ attemptStack
2463
+ .map((a, i) => ` ${i + 1}. Model: ${a.modelName}` +
2467
2464
  (a.unsupportedParameter ? `, Stripped: ${a.unsupportedParameter}` : '') +
2468
2465
  `, Error: ${a.errorMessage}` +
2469
- (a.stripped ? ' (stripped and retried)' : '')).join('\n') +
2466
+ (a.stripped ? ' (stripped and retried)' : ''))
2467
+ .join('\n') +
2470
2468
  `\nFinal error: ${error.message}`);
2471
2469
  }
2472
- // Mark this parameter as retried
2473
- this.retriedUnsupportedParameters.add(retryKey);
2474
- // Log warning in verbose mode
2470
+ retriedUnsupportedParameters.add(retryKey);
2475
2471
  if (this.options.isVerbose) {
2476
2472
  console.warn(colors__default["default"].bgYellow('Warning'), `Removing unsupported parameter '${unsupportedParameter}' for model '${modelName}' and retrying request`);
2477
2473
  }
@@ -2481,9 +2477,8 @@
2481
2477
  errorMessage: error.message,
2482
2478
  stripped: true,
2483
2479
  });
2484
- // Remove the unsupported parameter and retry
2485
2480
  const modifiedModelRequirements = removeUnsupportedModelRequirement(currentModelRequirements, unsupportedParameter);
2486
- return this.callEmbeddingModelWithRetry(prompt, modifiedModelRequirements, attemptStack);
2481
+ return this.callEmbeddingModelWithRetry(prompt, modifiedModelRequirements, attemptStack, retriedUnsupportedParameters);
2487
2482
  }
2488
2483
  }
2489
2484
  // <- Note: [🤖] callXxxModel