@promptbook/node 0.69.0-12 → 0.69.0-13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -15,7 +15,7 @@ import * as dotenv from 'dotenv';
15
15
  /**
16
16
  * The version of the Promptbook library
17
17
  */
18
- var PROMPTBOOK_VERSION = '0.69.0-11';
18
+ var PROMPTBOOK_VERSION = '0.69.0-12';
19
19
  // TODO: !!!! List here all the versions and annotate + put into script
20
20
 
21
21
  /*! *****************************************************************************
@@ -427,14 +427,12 @@ var RESERVED_PARAMETER_RESTRICTED = 'RESTRICTED-' + REPLACING_NONCE;
427
427
  *
428
428
  * @public exported from `@promptbook/core`
429
429
  */
430
- var DEFAULT_CSV_SETTINGS = {
431
- header: true,
430
+ var DEFAULT_CSV_SETTINGS = Object.freeze({
432
431
  delimiter: ',',
433
432
  quoteChar: '"',
434
433
  newline: '\n',
435
434
  skipEmptyLines: true,
436
- // encoding: 'utf8'
437
- };
435
+ });
438
436
  /**
439
437
  * @@@
440
438
  *
@@ -898,7 +896,7 @@ function forEachAsync(array, options, callbackfunction) {
898
896
  });
899
897
  }
900
898
 
901
- var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.ptbk.md",promptbookVersion:"0.69.0-11",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",resultingParameterName:"knowledgePieces",dependentParameterNames:["knowledgeContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-from-markdown.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.ptbk.md",promptbookVersion:"0.69.0-11",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",resultingParameterName:"keywords",dependentParameterNames:["knowledgePieceContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-keywords.ptbk.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.ptbk.md",promptbookVersion:"0.69.0-11",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",resultingParameterName:"title",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-title.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.ptbk.md",promptbookVersion:"0.69.0-11",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"make-model-requirements",title:"Make modelRequirements",content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Sample\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",resultingParameterName:"modelRequirements",format:"JSON",dependentParameterNames:["availableModelNames","personaDescription"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-persona.ptbk.md"}];
899
+ var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.ptbk.md",promptbookVersion:"0.69.0-12",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",resultingParameterName:"knowledgePieces",dependentParameterNames:["knowledgeContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-from-markdown.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.ptbk.md",promptbookVersion:"0.69.0-12",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",resultingParameterName:"keywords",dependentParameterNames:["knowledgePieceContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-keywords.ptbk.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.ptbk.md",promptbookVersion:"0.69.0-12",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",resultingParameterName:"title",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-title.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.ptbk.md",promptbookVersion:"0.69.0-12",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"make-model-requirements",title:"Make modelRequirements",content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Sample\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",resultingParameterName:"modelRequirements",format:"JSON",dependentParameterNames:["availableModelNames","personaDescription"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-persona.ptbk.md"}];
902
900
 
903
901
  /**
904
902
  * This error indicates that the promptbook in a markdown format cannot be parsed into a valid promptbook object
@@ -2337,9 +2335,7 @@ var MultipleLlmExecutionTools = /** @class */ (function () {
2337
2335
  throw new PipelineExecutionError("You have not provided any `LlmExecutionTools`");
2338
2336
  }
2339
2337
  else {
2340
- throw new PipelineExecutionError(spaceTrim$1(function (block) { return "\n You have not provided any `LlmExecutionTools` that support model variant \"".concat(prompt.modelRequirements.modelVariant, "\n\n Available `LlmExecutionTools`:\n ").concat(block(_this.llmExecutionTools
2341
- .map(function (tools) { return "- ".concat(tools.title, " ").concat(tools.description || ''); })
2342
- .join('\n')), "\n\n "); }));
2338
+ throw new PipelineExecutionError(spaceTrim$1(function (block) { return "\n You have not provided any `LlmExecutionTools` that support model variant \"".concat(prompt.modelRequirements.modelVariant, "\n\n Available `LlmExecutionTools`:\n ").concat(block(_this.llmExecutionTools.map(function (tools) { return "- ".concat(tools.title); }).join('\n')), "\n\n "); }));
2343
2339
  }
2344
2340
  }
2345
2341
  });
@@ -2645,6 +2641,16 @@ function TODO_USE() {
2645
2641
  }
2646
2642
  }
2647
2643
 
2644
+ /**
2645
+ * @@@
2646
+ *
2647
+ * @public exported from `@promptbook/core`
2648
+ */
2649
+ var MANDATORY_CSV_SETTINGS = Object.freeze({
2650
+ header: true,
2651
+ // encoding: 'utf8',
2652
+ });
2653
+
2648
2654
  /**
2649
2655
  * Definition for CSV spreadsheet
2650
2656
  *
@@ -2683,7 +2689,7 @@ var CsvFormatDefinition = {
2683
2689
  return __generator(this, function (_a) {
2684
2690
  switch (_a.label) {
2685
2691
  case 0:
2686
- csv = parse(value, settings);
2692
+ csv = parse(value, __assign(__assign({}, settings), MANDATORY_CSV_SETTINGS));
2687
2693
  if (csv.errors.length !== 0) {
2688
2694
  throw new ParseError(// <- TODO: !!!!!! Split PipelineParseError and FormatParseError -> CsvParseError
2689
2695
  spaceTrim$1(function (block) { return "\n CSV parsing error\n\n ".concat(block(csv.errors.map(function (error) { return error.message; }).join('\n\n')), "\n "); }));
@@ -2708,7 +2714,7 @@ var CsvFormatDefinition = {
2708
2714
  }); }))];
2709
2715
  case 1:
2710
2716
  mappedData = _a.sent();
2711
- return [2 /*return*/, unparse(mappedData, settings)];
2717
+ return [2 /*return*/, unparse(mappedData, __assign(__assign({}, settings), MANDATORY_CSV_SETTINGS))];
2712
2718
  }
2713
2719
  });
2714
2720
  });
@@ -2723,7 +2729,7 @@ var CsvFormatDefinition = {
2723
2729
  return __generator(this, function (_a) {
2724
2730
  switch (_a.label) {
2725
2731
  case 0:
2726
- csv = parse(value, settings);
2732
+ csv = parse(value, __assign(__assign({}, settings), MANDATORY_CSV_SETTINGS));
2727
2733
  if (csv.errors.length !== 0) {
2728
2734
  throw new ParseError(// <- TODO: !!!!!! Split PipelineParseError and FormatParseError -> CsvParseError
2729
2735
  spaceTrim$1(function (block) { return "\n CSV parsing error\n\n ".concat(block(csv.errors.map(function (error) { return error.message; }).join('\n\n')), "\n "); }));
@@ -2746,7 +2752,7 @@ var CsvFormatDefinition = {
2746
2752
  }); }))];
2747
2753
  case 1:
2748
2754
  mappedData = _a.sent();
2749
- return [2 /*return*/, unparse(mappedData, settings)];
2755
+ return [2 /*return*/, unparse(mappedData, __assign(__assign({}, settings), MANDATORY_CSV_SETTINGS))];
2750
2756
  }
2751
2757
  });
2752
2758
  });
@@ -3367,10 +3373,10 @@ function executeAttempts(options) {
3367
3373
  $scriptPipelineExecutionErrors: [],
3368
3374
  };
3369
3375
  _loop_1 = function (attempt) {
3370
- var isJokerAttempt, jokerParameterName, _b, modelRequirements, _c, _d, _e, _f, _g, _h, scriptTools, _j, error_1, e_1_1, _k, _l, _m, functionName, postprocessingError, _o, _p, scriptTools, _q, error_2, e_2_1, e_3_1, error_3;
3371
- var e_1, _r, e_3, _s, e_2, _t;
3372
- return __generator(this, function (_u) {
3373
- switch (_u.label) {
3376
+ var isJokerAttempt, jokerParameterName, _b, modelRequirements, _c, _d, _e, _f, _g, scriptTools, _h, error_1, e_1_1, _j, _k, _l, functionName, postprocessingError, _m, _o, scriptTools, _p, error_2, e_2_1, e_3_1, error_3;
3377
+ var e_1, _q, e_3, _r, e_2, _s;
3378
+ return __generator(this, function (_t) {
3379
+ switch (_t.label) {
3374
3380
  case 0:
3375
3381
  isJokerAttempt = attempt < 0;
3376
3382
  jokerParameterName = jokerParameterNames[jokerParameterNames.length + attempt];
@@ -3390,21 +3396,21 @@ function executeAttempts(options) {
3390
3396
  $ongoingTemplateResult.$resultString = parameters[jokerParameterName];
3391
3397
  }
3392
3398
  }
3393
- _u.label = 1;
3399
+ _t.label = 1;
3394
3400
  case 1:
3395
- _u.trys.push([1, 44, 45, 46]);
3396
- if (!!isJokerAttempt) return [3 /*break*/, 26];
3401
+ _t.trys.push([1, 43, 44, 45]);
3402
+ if (!!isJokerAttempt) return [3 /*break*/, 25];
3397
3403
  _b = template.templateType;
3398
3404
  switch (_b) {
3399
3405
  case 'SIMPLE_TEMPLATE': return [3 /*break*/, 2];
3400
3406
  case 'PROMPT_TEMPLATE': return [3 /*break*/, 3];
3401
- case 'SCRIPT_TEMPLATE': return [3 /*break*/, 12];
3402
- case 'DIALOG_TEMPLATE': return [3 /*break*/, 23];
3407
+ case 'SCRIPT_TEMPLATE': return [3 /*break*/, 11];
3408
+ case 'DIALOG_TEMPLATE': return [3 /*break*/, 22];
3403
3409
  }
3404
- return [3 /*break*/, 25];
3410
+ return [3 /*break*/, 24];
3405
3411
  case 2:
3406
3412
  $ongoingTemplateResult.$resultString = replaceParameters(preparedContent, parameters);
3407
- return [3 /*break*/, 26];
3413
+ return [3 /*break*/, 25];
3408
3414
  case 3:
3409
3415
  modelRequirements = __assign(__assign({ modelVariant: 'CHAT' }, (preparedPipeline.defaultModelRequirements || {})), (template.modelRequirements || {}));
3410
3416
  $ongoingTemplateResult.$prompt = {
@@ -3429,67 +3435,57 @@ function executeAttempts(options) {
3429
3435
  case 'COMPLETION': return [3 /*break*/, 6];
3430
3436
  case 'EMBEDDING': return [3 /*break*/, 8];
3431
3437
  }
3432
- return [3 /*break*/, 10];
3438
+ return [3 /*break*/, 9];
3433
3439
  case 4:
3434
3440
  _d = $ongoingTemplateResult;
3435
3441
  return [4 /*yield*/, llmTools.callChatModel($deepFreeze($ongoingTemplateResult.$prompt))];
3436
3442
  case 5:
3437
- _d.$chatResult = _u.sent();
3443
+ _d.$chatResult = _t.sent();
3438
3444
  // TODO: [🍬] Destroy chatThread
3439
3445
  $ongoingTemplateResult.$result = $ongoingTemplateResult.$chatResult;
3440
3446
  $ongoingTemplateResult.$resultString = $ongoingTemplateResult.$chatResult.content;
3441
- return [3 /*break*/, 11];
3447
+ return [3 /*break*/, 10];
3442
3448
  case 6:
3443
3449
  _e = $ongoingTemplateResult;
3444
3450
  return [4 /*yield*/, llmTools.callCompletionModel($deepFreeze($ongoingTemplateResult.$prompt))];
3445
3451
  case 7:
3446
- _e.$completionResult = _u.sent();
3452
+ _e.$completionResult = _t.sent();
3447
3453
  $ongoingTemplateResult.$result = $ongoingTemplateResult.$completionResult;
3448
3454
  $ongoingTemplateResult.$resultString =
3449
3455
  $ongoingTemplateResult.$completionResult.content;
3450
- return [3 /*break*/, 11];
3451
- case 8:
3452
- // TODO: [🧠] This is weird, embedding model can not be used such a way in the pipeline
3453
- _f = $ongoingTemplateResult;
3454
- return [4 /*yield*/, llmTools.callEmbeddingModel($deepFreeze($ongoingTemplateResult.$prompt))];
3455
- case 9:
3456
- // TODO: [🧠] This is weird, embedding model can not be used such a way in the pipeline
3457
- _f.$embeddingResult = _u.sent();
3458
- $ongoingTemplateResult.$result = $ongoingTemplateResult.$embeddingResult;
3459
- $ongoingTemplateResult.$resultString =
3460
- $ongoingTemplateResult.$embeddingResult.content.join(',');
3461
- return [3 /*break*/, 11];
3462
- case 10: throw new PipelineExecutionError(spaceTrim(function (block) { return "\n Unknown model variant \"".concat(template.modelRequirements.modelVariant, "\"\n\n ").concat(block(pipelineIdentification), "\n\n "); }));
3463
- case 11: return [3 /*break*/, 26];
3464
- case 12:
3456
+ return [3 /*break*/, 10];
3457
+ case 8: throw new PipelineExecutionError(spaceTrim(function (block) { return "\n Embedding model can not be used in pipeline\n\n This should be catched during parsing\n\n ".concat(block(pipelineIdentification), "\n\n "); }));
3458
+ case 9: throw new PipelineExecutionError(spaceTrim(function (block) { return "\n Unknown model variant \"".concat(template.modelRequirements.modelVariant, "\"\n\n ").concat(block(pipelineIdentification), "\n\n "); }));
3459
+ case 10: return [3 /*break*/, 25];
3460
+ case 11:
3465
3461
  if (arrayableToArray(tools.script).length === 0) {
3466
3462
  throw new PipelineExecutionError(spaceTrim(function (block) { return "\n No script execution tools are available\n\n ".concat(block(pipelineIdentification), "\n "); }));
3467
3463
  }
3468
3464
  if (!template.contentLanguage) {
3469
3465
  throw new PipelineExecutionError(spaceTrim(function (block) { return "\n Script language is not defined for SCRIPT TEMPLATE \"".concat(template.name, "\"\n\n ").concat(block(pipelineIdentification), "\n "); }));
3470
3466
  }
3471
- _u.label = 13;
3467
+ _t.label = 12;
3468
+ case 12:
3469
+ _t.trys.push([12, 19, 20, 21]);
3470
+ _f = (e_1 = void 0, __values(arrayableToArray(tools.script))), _g = _f.next();
3471
+ _t.label = 13;
3472
3472
  case 13:
3473
- _u.trys.push([13, 20, 21, 22]);
3474
- _g = (e_1 = void 0, __values(arrayableToArray(tools.script))), _h = _g.next();
3475
- _u.label = 14;
3473
+ if (!!_g.done) return [3 /*break*/, 18];
3474
+ scriptTools = _g.value;
3475
+ _t.label = 14;
3476
3476
  case 14:
3477
- if (!!_h.done) return [3 /*break*/, 19];
3478
- scriptTools = _h.value;
3479
- _u.label = 15;
3480
- case 15:
3481
- _u.trys.push([15, 17, , 18]);
3482
- _j = $ongoingTemplateResult;
3477
+ _t.trys.push([14, 16, , 17]);
3478
+ _h = $ongoingTemplateResult;
3483
3479
  return [4 /*yield*/, scriptTools.execute($deepFreeze({
3484
3480
  scriptLanguage: template.contentLanguage,
3485
3481
  script: preparedContent,
3486
3482
  parameters: parameters,
3487
3483
  }))];
3484
+ case 15:
3485
+ _h.$resultString = _t.sent();
3486
+ return [3 /*break*/, 18];
3488
3487
  case 16:
3489
- _j.$resultString = _u.sent();
3490
- return [3 /*break*/, 19];
3491
- case 17:
3492
- error_1 = _u.sent();
3488
+ error_1 = _t.sent();
3493
3489
  if (!(error_1 instanceof Error)) {
3494
3490
  throw error_1;
3495
3491
  }
@@ -3497,24 +3493,24 @@ function executeAttempts(options) {
3497
3493
  throw error_1;
3498
3494
  }
3499
3495
  $ongoingTemplateResult.$scriptPipelineExecutionErrors.push(error_1);
3500
- return [3 /*break*/, 18];
3501
- case 18:
3502
- _h = _g.next();
3503
- return [3 /*break*/, 14];
3504
- case 19: return [3 /*break*/, 22];
3505
- case 20:
3506
- e_1_1 = _u.sent();
3496
+ return [3 /*break*/, 17];
3497
+ case 17:
3498
+ _g = _f.next();
3499
+ return [3 /*break*/, 13];
3500
+ case 18: return [3 /*break*/, 21];
3501
+ case 19:
3502
+ e_1_1 = _t.sent();
3507
3503
  e_1 = { error: e_1_1 };
3508
- return [3 /*break*/, 22];
3509
- case 21:
3504
+ return [3 /*break*/, 21];
3505
+ case 20:
3510
3506
  try {
3511
- if (_h && !_h.done && (_r = _g.return)) _r.call(_g);
3507
+ if (_g && !_g.done && (_q = _f.return)) _q.call(_f);
3512
3508
  }
3513
3509
  finally { if (e_1) throw e_1.error; }
3514
3510
  return [7 /*endfinally*/];
3515
- case 22:
3511
+ case 21:
3516
3512
  if ($ongoingTemplateResult.$resultString !== null) {
3517
- return [3 /*break*/, 26];
3513
+ return [3 /*break*/, 25];
3518
3514
  }
3519
3515
  if ($ongoingTemplateResult.$scriptPipelineExecutionErrors.length === 1) {
3520
3516
  throw $ongoingTemplateResult.$scriptPipelineExecutionErrors[0];
@@ -3524,12 +3520,12 @@ function executeAttempts(options) {
3524
3520
  .map(function (error) { return '- ' + error.message; })
3525
3521
  .join('\n\n')), "\n "); }));
3526
3522
  }
3527
- case 23:
3523
+ case 22:
3528
3524
  if (tools.userInterface === undefined) {
3529
3525
  throw new PipelineExecutionError(spaceTrim(function (block) { return "\n User interface tools are not available\n\n ".concat(block(pipelineIdentification), "\n "); }));
3530
3526
  }
3531
3527
  // TODO: [🌹] When making next attempt for `DIALOG TEMPLATE`, preserve the previous user input
3532
- _k = $ongoingTemplateResult;
3528
+ _j = $ongoingTemplateResult;
3533
3529
  return [4 /*yield*/, tools.userInterface.promptDialog($deepFreeze({
3534
3530
  promptTitle: template.title,
3535
3531
  promptMessage: replaceParameters(template.description || '', parameters),
@@ -3538,34 +3534,34 @@ function executeAttempts(options) {
3538
3534
  placeholder: undefined,
3539
3535
  priority: priority,
3540
3536
  }))];
3541
- case 24:
3537
+ case 23:
3542
3538
  // TODO: [🌹] When making next attempt for `DIALOG TEMPLATE`, preserve the previous user input
3543
- _k.$resultString = _u.sent();
3544
- return [3 /*break*/, 26];
3545
- case 25: throw new PipelineExecutionError(spaceTrim(function (block) { return "\n Unknown execution type \"".concat(template.templateType, "\"\n\n ").concat(block(pipelineIdentification), "\n "); }));
3539
+ _j.$resultString = _t.sent();
3540
+ return [3 /*break*/, 25];
3541
+ case 24: throw new PipelineExecutionError(spaceTrim(function (block) { return "\n Unknown execution type \"".concat(template.templateType, "\"\n\n ").concat(block(pipelineIdentification), "\n "); }));
3542
+ case 25:
3543
+ if (!(!isJokerAttempt && template.postprocessingFunctionNames)) return [3 /*break*/, 42];
3544
+ _t.label = 26;
3546
3545
  case 26:
3547
- if (!(!isJokerAttempt && template.postprocessingFunctionNames)) return [3 /*break*/, 43];
3548
- _u.label = 27;
3546
+ _t.trys.push([26, 40, 41, 42]);
3547
+ _k = (e_3 = void 0, __values(template.postprocessingFunctionNames)), _l = _k.next();
3548
+ _t.label = 27;
3549
3549
  case 27:
3550
- _u.trys.push([27, 41, 42, 43]);
3551
- _l = (e_3 = void 0, __values(template.postprocessingFunctionNames)), _m = _l.next();
3552
- _u.label = 28;
3553
- case 28:
3554
- if (!!_m.done) return [3 /*break*/, 40];
3555
- functionName = _m.value;
3550
+ if (!!_l.done) return [3 /*break*/, 39];
3551
+ functionName = _l.value;
3556
3552
  postprocessingError = null;
3557
- _u.label = 29;
3553
+ _t.label = 28;
3554
+ case 28:
3555
+ _t.trys.push([28, 35, 36, 37]);
3556
+ _m = (e_2 = void 0, __values(arrayableToArray(tools.script))), _o = _m.next();
3557
+ _t.label = 29;
3558
3558
  case 29:
3559
- _u.trys.push([29, 36, 37, 38]);
3560
- _o = (e_2 = void 0, __values(arrayableToArray(tools.script))), _p = _o.next();
3561
- _u.label = 30;
3559
+ if (!!_o.done) return [3 /*break*/, 34];
3560
+ scriptTools = _o.value;
3561
+ _t.label = 30;
3562
3562
  case 30:
3563
- if (!!_p.done) return [3 /*break*/, 35];
3564
- scriptTools = _p.value;
3565
- _u.label = 31;
3566
- case 31:
3567
- _u.trys.push([31, 33, , 34]);
3568
- _q = $ongoingTemplateResult;
3563
+ _t.trys.push([30, 32, , 33]);
3564
+ _p = $ongoingTemplateResult;
3569
3565
  return [4 /*yield*/, scriptTools.execute({
3570
3566
  scriptLanguage: "javascript" /* <- TODO: Try it in each languages; In future allow postprocessing with arbitrary combination of languages to combine */,
3571
3567
  script: "".concat(functionName, "(resultString)"),
@@ -3574,12 +3570,12 @@ function executeAttempts(options) {
3574
3570
  // Note: No ...parametersForTemplate, because working with result only
3575
3571
  },
3576
3572
  })];
3577
- case 32:
3578
- _q.$resultString = _u.sent();
3573
+ case 31:
3574
+ _p.$resultString = _t.sent();
3579
3575
  postprocessingError = null;
3580
- return [3 /*break*/, 35];
3581
- case 33:
3582
- error_2 = _u.sent();
3576
+ return [3 /*break*/, 34];
3577
+ case 32:
3578
+ error_2 = _t.sent();
3583
3579
  if (!(error_2 instanceof Error)) {
3584
3580
  throw error_2;
3585
3581
  }
@@ -3588,41 +3584,41 @@ function executeAttempts(options) {
3588
3584
  }
3589
3585
  postprocessingError = error_2;
3590
3586
  $ongoingTemplateResult.$scriptPipelineExecutionErrors.push(error_2);
3591
- return [3 /*break*/, 34];
3592
- case 34:
3593
- _p = _o.next();
3594
- return [3 /*break*/, 30];
3595
- case 35: return [3 /*break*/, 38];
3596
- case 36:
3597
- e_2_1 = _u.sent();
3587
+ return [3 /*break*/, 33];
3588
+ case 33:
3589
+ _o = _m.next();
3590
+ return [3 /*break*/, 29];
3591
+ case 34: return [3 /*break*/, 37];
3592
+ case 35:
3593
+ e_2_1 = _t.sent();
3598
3594
  e_2 = { error: e_2_1 };
3599
- return [3 /*break*/, 38];
3600
- case 37:
3595
+ return [3 /*break*/, 37];
3596
+ case 36:
3601
3597
  try {
3602
- if (_p && !_p.done && (_t = _o.return)) _t.call(_o);
3598
+ if (_o && !_o.done && (_s = _m.return)) _s.call(_m);
3603
3599
  }
3604
3600
  finally { if (e_2) throw e_2.error; }
3605
3601
  return [7 /*endfinally*/];
3606
- case 38:
3602
+ case 37:
3607
3603
  if (postprocessingError) {
3608
3604
  throw postprocessingError;
3609
3605
  }
3610
- _u.label = 39;
3611
- case 39:
3612
- _m = _l.next();
3613
- return [3 /*break*/, 28];
3614
- case 40: return [3 /*break*/, 43];
3615
- case 41:
3616
- e_3_1 = _u.sent();
3606
+ _t.label = 38;
3607
+ case 38:
3608
+ _l = _k.next();
3609
+ return [3 /*break*/, 27];
3610
+ case 39: return [3 /*break*/, 42];
3611
+ case 40:
3612
+ e_3_1 = _t.sent();
3617
3613
  e_3 = { error: e_3_1 };
3618
- return [3 /*break*/, 43];
3619
- case 42:
3614
+ return [3 /*break*/, 42];
3615
+ case 41:
3620
3616
  try {
3621
- if (_m && !_m.done && (_s = _l.return)) _s.call(_l);
3617
+ if (_l && !_l.done && (_r = _k.return)) _r.call(_k);
3622
3618
  }
3623
3619
  finally { if (e_3) throw e_3.error; }
3624
3620
  return [7 /*endfinally*/];
3625
- case 43:
3621
+ case 42:
3626
3622
  // TODO: [💝] Unite object for expecting amount and format
3627
3623
  if (template.format) {
3628
3624
  if (template.format === 'JSON') {
@@ -3647,14 +3643,14 @@ function executeAttempts(options) {
3647
3643
  checkExpectations(template.expectations, $ongoingTemplateResult.$resultString || '');
3648
3644
  }
3649
3645
  return [2 /*return*/, "break-attempts"];
3650
- case 44:
3651
- error_3 = _u.sent();
3646
+ case 43:
3647
+ error_3 = _t.sent();
3652
3648
  if (!(error_3 instanceof ExpectError)) {
3653
3649
  throw error_3;
3654
3650
  }
3655
3651
  $ongoingTemplateResult.$expectError = error_3;
3656
- return [3 /*break*/, 46];
3657
- case 45:
3652
+ return [3 /*break*/, 45];
3653
+ case 44:
3658
3654
  if (!isJokerAttempt &&
3659
3655
  template.templateType === 'PROMPT_TEMPLATE' &&
3660
3656
  $ongoingTemplateResult.$prompt
@@ -3671,7 +3667,7 @@ function executeAttempts(options) {
3671
3667
  });
3672
3668
  }
3673
3669
  return [7 /*endfinally*/];
3674
- case 46:
3670
+ case 45:
3675
3671
  if ($ongoingTemplateResult.$expectError !== null && attempt === maxAttempts - 1) {
3676
3672
  throw new PipelineExecutionError(spaceTrim(function (block) {
3677
3673
  var _a, _b, _c;
@@ -3770,6 +3766,8 @@ function executeFormatCells(options) {
3770
3766
  return __generator(this, function (_a) {
3771
3767
  switch (_a.label) {
3772
3768
  case 0:
3769
+ // TODO: !!!!!!! Limit to N concurrent executions
3770
+ // TODO: !!!!!!! Report progress
3773
3771
  try {
3774
3772
  mappedParameters = mapAvailableToExpectedParameters({
3775
3773
  expectedParameters: Object.fromEntries(template.foreach.subparameterNames.map(function (subparameterName) { return [subparameterName, null]; })),
@@ -4553,8 +4551,13 @@ function prepareKnowledgeFromMarkdown(knowledgeContent /* <- TODO: [🖖] (?mayb
4553
4551
  case 6: return [3 /*break*/, 8];
4554
4552
  case 7:
4555
4553
  error_1 = _c.sent();
4554
+ // Note: Here is expected error:
4555
+ // > PipelineExecutionError: You have not provided any `LlmExecutionTools` that support model variant "EMBEDDING
4556
+ if (!(error_1 instanceof PipelineExecutionError)) {
4557
+ throw error_1;
4558
+ }
4556
4559
  // TODO: [🟥] Detect browser / node and make it colorfull
4557
- console.error(error_1);
4560
+ console.error(error_1, "<- Note: This error is not critical to prepare the pipeline, just knowledge pieces won't have embeddings");
4558
4561
  return [3 /*break*/, 8];
4559
4562
  case 8: return [2 /*return*/, {
4560
4563
  name: name,
@@ -6057,6 +6060,9 @@ var modelCommandParser = {
6057
6060
  */
6058
6061
  parse: function (input) {
6059
6062
  var args = input.args, normalized = input.normalized;
6063
+ var availableVariantsMessage = spaceTrim$1(function (block) { return "\n Available variants are:\n ".concat(block(MODEL_VARIANTS.map(function (variantName) {
6064
+ return "- ".concat(variantName).concat(variantName !== 'EMBEDDING' ? '' : ' (Not available in pipeline)');
6065
+ }).join('\n')), "\n "); });
6060
6066
  // TODO: Make this more elegant and dynamically
6061
6067
  if (normalized.startsWith('MODEL_VARIANT')) {
6062
6068
  if (normalized === 'MODEL_VARIANT_CHAT') {
@@ -6072,17 +6078,13 @@ var modelCommandParser = {
6072
6078
  key: 'modelVariant',
6073
6079
  value: 'COMPLETION',
6074
6080
  };
6081
+ // <- Note: [🤖]
6075
6082
  }
6076
6083
  else if (normalized.startsWith('MODEL_VARIANT_EMBED')) {
6077
- return {
6078
- type: 'MODEL',
6079
- key: 'modelVariant',
6080
- value: 'EMBEDDING',
6081
- };
6082
- // <- Note: [🤖]
6084
+ spaceTrim$1(function (block) { return "\n Embedding model can not be used in pipeline\n\n ".concat(block(availableVariantsMessage), "\n "); });
6083
6085
  }
6084
6086
  else {
6085
- throw new ParseError(spaceTrim$1(function (block) { return "\n Unknown model variant in command:\n\n Supported variants are:\n ".concat(block(MODEL_VARIANTS.map(function (variantName) { return "- ".concat(variantName); }).join('\n')), "\n "); }));
6087
+ throw new ParseError(spaceTrim$1(function (block) { return "\n Unknown model variant in command:\n\n ".concat(block(availableVariantsMessage), "\n "); }));
6086
6088
  }
6087
6089
  }
6088
6090
  if (normalized.startsWith('MODEL_NAME')) {