@promptbook/core 0.67.7 → 0.67.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -1,6 +1,7 @@
1
1
  import spaceTrim, { spaceTrim as spaceTrim$1 } from 'spacetrim';
2
2
  import { format } from 'prettier';
3
3
  import parserHtml from 'prettier/parser-html';
4
+ import { forTime } from 'waitasecond';
4
5
  import hexEncoder from 'crypto-js/enc-hex';
5
6
  import sha256 from 'crypto-js/sha256';
6
7
  import moment from 'moment';
@@ -9,7 +10,7 @@ import moment from 'moment';
9
10
  /**
10
11
  * The version of the Promptbook library
11
12
  */
12
- var PROMPTBOOK_VERSION = '0.67.6';
13
+ var PROMPTBOOK_VERSION = '0.67.8';
13
14
  // TODO: !!!! List here all the versions and annotate + put into script
14
15
 
15
16
  /*! *****************************************************************************
@@ -640,6 +641,12 @@ var CLAIM = "Supercharge LLM models with Promptbook";
640
641
  * @private within the repository - too low-level in comparison with other `MAX_...`
641
642
  */
642
643
  var LOOP_LIMIT = 1000;
644
+ /**
645
+ * Short time interval to prevent race conditions in milliseconds
646
+ *
647
+ * @private within the repository - too low-level in comparison with other `MAX_...`
648
+ */
649
+ var IMMEDIATE_TIME = 10;
643
650
  /**
644
651
  * The maximum number of (LLM) tasks running in parallel
645
652
  *
@@ -1779,7 +1786,7 @@ function forEachAsync(array, options, callbackfunction) {
1779
1786
  });
1780
1787
  }
1781
1788
 
1782
- var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.ptbk.md",promptbookVersion:"0.67.6",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT"},content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",dependentParameterNames:["knowledgeContent"],resultingParameterName:"knowledgePieces"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-from-markdown.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.ptbk.md",promptbookVersion:"0.67.6",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT"},content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",dependentParameterNames:["knowledgePieceContent"],resultingParameterName:"keywords"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-keywords.ptbk.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.ptbk.md",promptbookVersion:"0.67.6",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT"},content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"],resultingParameterName:"title"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-title.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.ptbk.md",promptbookVersion:"0.67.6",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"make-model-requirements",title:"Make modelRequirements",modelRequirements:{modelVariant:"CHAT"},content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Sample\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",expectFormat:"JSON",dependentParameterNames:["availableModelNames","personaDescription"],resultingParameterName:"modelRequirements"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-persona.ptbk.md"}];
1789
+ var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.ptbk.md",promptbookVersion:"0.67.8",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT"},content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",dependentParameterNames:["knowledgeContent"],resultingParameterName:"knowledgePieces"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-from-markdown.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.ptbk.md",promptbookVersion:"0.67.8",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT"},content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",dependentParameterNames:["knowledgePieceContent"],resultingParameterName:"keywords"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-keywords.ptbk.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.ptbk.md",promptbookVersion:"0.67.8",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT"},content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"],resultingParameterName:"title"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-title.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.ptbk.md",promptbookVersion:"0.67.8",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"make-model-requirements",title:"Make modelRequirements",modelRequirements:{modelVariant:"CHAT"},content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Sample\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",expectFormat:"JSON",dependentParameterNames:["availableModelNames","personaDescription"],resultingParameterName:"modelRequirements"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-persona.ptbk.md"}];
1783
1790
 
1784
1791
  var defaultDiacriticsRemovalMap = [
1785
1792
  {
@@ -3372,7 +3379,7 @@ function createPipelineExecutor(options) {
3372
3379
  }
3373
3380
  function executeSingleTemplate(currentTemplate) {
3374
3381
  return __awaiter(this, void 0, void 0, function () {
3375
- var name, title, priority, usedParameterNames, dependentParameterNames, definedParameters, _a, _b, _c, definedParameterNames, parameters, _loop_4, _d, _e, parameterName, prompt, chatResult, completionResult, embeddingResult, result, resultString, expectError, scriptPipelineExecutionErrors, maxAttempts, jokerParameterNames, preparedContent, _loop_5, attempt, state_2;
3382
+ var name, title, priority, progress_1, usedParameterNames, dependentParameterNames, definedParameters, _a, _b, _c, definedParameterNames, parameters, _loop_4, _d, _e, parameterName, prompt, chatResult, completionResult, embeddingResult, result, resultString, expectError, scriptPipelineExecutionErrors, maxAttempts, jokerParameterNames, preparedContent, _loop_5, attempt, state_2, progress_2;
3376
3383
  var e_4, _f, _g;
3377
3384
  return __generator(this, function (_h) {
3378
3385
  switch (_h.label) {
@@ -3380,17 +3387,24 @@ function createPipelineExecutor(options) {
3380
3387
  name = "pipeline-executor-frame-".concat(currentTemplate.name);
3381
3388
  title = currentTemplate.title;
3382
3389
  priority = preparedPipeline.promptTemplates.length - preparedPipeline.promptTemplates.indexOf(currentTemplate);
3383
- if (!onProgress /* <- [3] */) return [3 /*break*/, 2]; /* <- [3] */
3384
- return [4 /*yield*/, onProgress({
3385
- name: name,
3386
- title: title,
3387
- isStarted: false,
3388
- isDone: false,
3389
- blockType: currentTemplate.blockType,
3390
- parameterName: currentTemplate.resultingParameterName,
3391
- parameterValue: null,
3392
- // <- [3]
3393
- })];
3390
+ if (!(onProgress !== undefined) /* <- [3] */) return [3 /*break*/, 2]; /* <- [3] */
3391
+ progress_1 = {
3392
+ name: name,
3393
+ title: title,
3394
+ isStarted: false,
3395
+ isDone: false,
3396
+ blockType: currentTemplate.blockType,
3397
+ parameterName: currentTemplate.resultingParameterName,
3398
+ parameterValue: null,
3399
+ // <- [3]
3400
+ };
3401
+ if (isReturned) {
3402
+ throw new UnexpectedError(spaceTrim$1(function (block) { return "\n Can not call `onProgress` after pipeline execution is finished \uD83C\uDF4F\n\n ".concat(block(pipelineIdentification), "\n\n ").concat(block(JSON.stringify(progress_1, null, 4)
3403
+ .split('\n')
3404
+ .map(function (line) { return "> ".concat(line); })
3405
+ .join('\n')), "\n "); }));
3406
+ }
3407
+ return [4 /*yield*/, onProgress(progress_1)];
3394
3408
  case 1:
3395
3409
  _h.sent();
3396
3410
  _h.label = 2;
@@ -3776,18 +3790,28 @@ function createPipelineExecutor(options) {
3776
3790
  if (resultString === null) {
3777
3791
  throw new UnexpectedError(spaceTrim$1(function (block) { return "\n Something went wrong and prompt result is null\n\n ".concat(block(pipelineIdentification), "\n "); }));
3778
3792
  }
3779
- if (onProgress /* <- [3] */) {
3780
- onProgress({
3781
- name: name,
3782
- title: title,
3783
- isStarted: true,
3784
- isDone: true,
3785
- blockType: currentTemplate.blockType,
3786
- parameterName: currentTemplate.resultingParameterName,
3787
- parameterValue: resultString,
3788
- // <- [3]
3789
- });
3793
+ if (!(onProgress !== undefined) /* <- [3] */) return [3 /*break*/, 9]; /* <- [3] */
3794
+ progress_2 = {
3795
+ name: name,
3796
+ title: title,
3797
+ isStarted: true,
3798
+ isDone: true,
3799
+ blockType: currentTemplate.blockType,
3800
+ parameterName: currentTemplate.resultingParameterName,
3801
+ parameterValue: resultString,
3802
+ // <- [3]
3803
+ };
3804
+ if (isReturned) {
3805
+ throw new UnexpectedError(spaceTrim$1(function (block) { return "\n Can not call `onProgress` after pipeline execution is finished \uD83C\uDF4E\n\n ".concat(block(pipelineIdentification), "\n\n ").concat(block(JSON.stringify(progress_2, null, 4)
3806
+ .split('\n')
3807
+ .map(function (line) { return "> ".concat(line); })
3808
+ .join('\n')), "\n\n "); }));
3790
3809
  }
3810
+ return [4 /*yield*/, onProgress(progress_2)];
3811
+ case 8:
3812
+ _h.sent();
3813
+ _h.label = 9;
3814
+ case 9:
3791
3815
  parametersToPass = Object.freeze(__assign(__assign({}, parametersToPass), (_g = {}, _g[currentTemplate.resultingParameterName] = resultString /* <- Note: Not need to detect parameter collision here because pipeline checks logic consistency during construction */, _g)));
3792
3816
  return [2 /*return*/];
3793
3817
  }
@@ -3824,7 +3848,7 @@ function createPipelineExecutor(options) {
3824
3848
  }
3825
3849
  return outputParameters;
3826
3850
  }
3827
- var errors, warnings, executionReport, _a, _b, parameter, _loop_1, _c, _d, parameterName, state_1, parametersToPass, resovedParameterNames_1, unresovedTemplates_1, resolving_1, loopLimit, _loop_2, error_1, usage_1, outputParameters_1, usage, outputParameters;
3851
+ var errors, warnings, executionReport, isReturned, _a, _b, parameter, e_1_1, _loop_1, _c, _d, parameterName, state_1, e_2_1, parametersToPass, resovedParameterNames_1, unresovedTemplates_1, resolving_1, loopLimit, _loop_2, error_1, usage_1, outputParameters_1, usage, outputParameters;
3828
3852
  var e_1, _e, e_2, _f;
3829
3853
  return __generator(this, function (_g) {
3830
3854
  switch (_g.label) {
@@ -3849,77 +3873,123 @@ function createPipelineExecutor(options) {
3849
3873
  description: preparedPipeline.description,
3850
3874
  promptExecutions: [],
3851
3875
  };
3876
+ isReturned = false;
3877
+ _g.label = 3;
3878
+ case 3:
3879
+ _g.trys.push([3, 9, 10, 11]);
3880
+ _a = __values(preparedPipeline.parameters.filter(function (_a) {
3881
+ var isInput = _a.isInput;
3882
+ return isInput;
3883
+ })), _b = _a.next();
3884
+ _g.label = 4;
3885
+ case 4:
3886
+ if (!!_b.done) return [3 /*break*/, 8];
3887
+ parameter = _b.value;
3888
+ if (!(inputParameters[parameter.name] === undefined)) return [3 /*break*/, 7];
3889
+ isReturned = true;
3890
+ if (!(onProgress !== undefined)) return [3 /*break*/, 6];
3891
+ // Note: Wait a short time to prevent race conditions
3892
+ return [4 /*yield*/, forTime(IMMEDIATE_TIME)];
3893
+ case 5:
3894
+ // Note: Wait a short time to prevent race conditions
3895
+ _g.sent();
3896
+ _g.label = 6;
3897
+ case 6: return [2 /*return*/, $asDeeplyFrozenSerializableJson("Unuccessful PipelineExecutorResult (with missing parameter {".concat(parameter.name, "}) PipelineExecutorResult"), {
3898
+ isSuccessful: false,
3899
+ errors: __spreadArray([
3900
+ new PipelineExecutionError("Parameter {".concat(parameter.name, "} is required as an input parameter"))
3901
+ ], __read(errors), false).map(serializeError),
3902
+ warnings: [],
3903
+ executionReport: executionReport,
3904
+ outputParameters: {},
3905
+ usage: ZERO_USAGE,
3906
+ preparedPipeline: preparedPipeline,
3907
+ })];
3908
+ case 7:
3909
+ _b = _a.next();
3910
+ return [3 /*break*/, 4];
3911
+ case 8: return [3 /*break*/, 11];
3912
+ case 9:
3913
+ e_1_1 = _g.sent();
3914
+ e_1 = { error: e_1_1 };
3915
+ return [3 /*break*/, 11];
3916
+ case 10:
3852
3917
  try {
3853
- // Note: Check that all input input parameters are defined
3854
- for (_a = __values(preparedPipeline.parameters.filter(function (_a) {
3855
- var isInput = _a.isInput;
3856
- return isInput;
3857
- })), _b = _a.next(); !_b.done; _b = _a.next()) {
3858
- parameter = _b.value;
3859
- if (inputParameters[parameter.name] === undefined) {
3860
- return [2 /*return*/, $asDeeplyFrozenSerializableJson("Unuccessful PipelineExecutorResult (with missing parameter {".concat(parameter.name, "}) PipelineExecutorResult"), {
3861
- isSuccessful: false,
3862
- errors: __spreadArray([
3863
- new PipelineExecutionError("Parameter {".concat(parameter.name, "} is required as an input parameter"))
3864
- ], __read(errors), false).map(serializeError),
3865
- warnings: [],
3866
- executionReport: executionReport,
3867
- outputParameters: {},
3868
- usage: ZERO_USAGE,
3869
- preparedPipeline: preparedPipeline,
3870
- })];
3871
- }
3872
- }
3873
- }
3874
- catch (e_1_1) { e_1 = { error: e_1_1 }; }
3875
- finally {
3876
- try {
3877
- if (_b && !_b.done && (_e = _a.return)) _e.call(_a);
3878
- }
3879
- finally { if (e_1) throw e_1.error; }
3918
+ if (_b && !_b.done && (_e = _a.return)) _e.call(_a);
3880
3919
  }
3920
+ finally { if (e_1) throw e_1.error; }
3921
+ return [7 /*endfinally*/];
3922
+ case 11:
3881
3923
  _loop_1 = function (parameterName) {
3882
- var parameter = preparedPipeline.parameters.find(function (_a) {
3883
- var name = _a.name;
3884
- return name === parameterName;
3924
+ var parameter;
3925
+ return __generator(this, function (_h) {
3926
+ switch (_h.label) {
3927
+ case 0:
3928
+ parameter = preparedPipeline.parameters.find(function (_a) {
3929
+ var name = _a.name;
3930
+ return name === parameterName;
3931
+ });
3932
+ if (!(parameter === undefined)) return [3 /*break*/, 1];
3933
+ warnings.push(new PipelineExecutionError(spaceTrim$1(function (block) { return "\n Extra parameter {".concat(parameterName, "} is being passed which is not part of the pipeline.\n\n ").concat(block(pipelineIdentification), "\n "); })));
3934
+ return [3 /*break*/, 4];
3935
+ case 1:
3936
+ if (!(parameter.isInput === false)) return [3 /*break*/, 4];
3937
+ isReturned = true;
3938
+ if (!(onProgress !== undefined)) return [3 /*break*/, 3];
3939
+ // Note: Wait a short time to prevent race conditions
3940
+ return [4 /*yield*/, forTime(IMMEDIATE_TIME)];
3941
+ case 2:
3942
+ // Note: Wait a short time to prevent race conditions
3943
+ _h.sent();
3944
+ _h.label = 3;
3945
+ case 3: return [2 /*return*/, { value: $asDeeplyFrozenSerializableJson(spaceTrim$1(function (block) { return "\n Unuccessful PipelineExecutorResult (with extra parameter {".concat(parameter.name, "}) PipelineExecutorResult\n\n ").concat(block(pipelineIdentification), "\n "); }), {
3946
+ isSuccessful: false,
3947
+ errors: __spreadArray([
3948
+ new PipelineExecutionError(spaceTrim$1(function (block) { return "\n Parameter {".concat(parameter.name, "} is passed as input parameter but it is not input\n\n ").concat(block(pipelineIdentification), "\n "); }))
3949
+ ], __read(errors), false).map(serializeError),
3950
+ warnings: warnings.map(serializeError),
3951
+ executionReport: executionReport,
3952
+ outputParameters: {},
3953
+ usage: ZERO_USAGE,
3954
+ preparedPipeline: preparedPipeline,
3955
+ }) }];
3956
+ case 4: return [2 /*return*/];
3957
+ }
3885
3958
  });
3886
- if (parameter === undefined) {
3887
- warnings.push(new PipelineExecutionError(spaceTrim$1(function (block) { return "\n Extra parameter {".concat(parameterName, "} is being passed which is not part of the pipeline.\n\n ").concat(block(pipelineIdentification), "\n "); })));
3888
- }
3889
- else if (parameter.isInput === false) {
3890
- return { value: $asDeeplyFrozenSerializableJson(spaceTrim$1(function (block) { return "\n Unuccessful PipelineExecutorResult (with extra parameter {".concat(parameter.name, "}) PipelineExecutorResult\n\n ").concat(block(pipelineIdentification), "\n "); }), {
3891
- isSuccessful: false,
3892
- errors: __spreadArray([
3893
- new PipelineExecutionError(spaceTrim$1(function (block) { return "\n Parameter {".concat(parameter.name, "} is passed as input parameter but it is not input\n\n ").concat(block(pipelineIdentification), "\n "); }))
3894
- ], __read(errors), false).map(serializeError),
3895
- warnings: warnings.map(serializeError),
3896
- executionReport: executionReport,
3897
- outputParameters: {},
3898
- usage: ZERO_USAGE,
3899
- preparedPipeline: preparedPipeline,
3900
- }) };
3901
- }
3902
3959
  };
3960
+ _g.label = 12;
3961
+ case 12:
3962
+ _g.trys.push([12, 17, 18, 19]);
3963
+ _c = __values(Object.keys(inputParameters)), _d = _c.next();
3964
+ _g.label = 13;
3965
+ case 13:
3966
+ if (!!_d.done) return [3 /*break*/, 16];
3967
+ parameterName = _d.value;
3968
+ return [5 /*yield**/, _loop_1(parameterName)];
3969
+ case 14:
3970
+ state_1 = _g.sent();
3971
+ if (typeof state_1 === "object")
3972
+ return [2 /*return*/, state_1.value];
3973
+ _g.label = 15;
3974
+ case 15:
3975
+ _d = _c.next();
3976
+ return [3 /*break*/, 13];
3977
+ case 16: return [3 /*break*/, 19];
3978
+ case 17:
3979
+ e_2_1 = _g.sent();
3980
+ e_2 = { error: e_2_1 };
3981
+ return [3 /*break*/, 19];
3982
+ case 18:
3903
3983
  try {
3904
- // Note: Check that no extra input parameters are passed
3905
- for (_c = __values(Object.keys(inputParameters)), _d = _c.next(); !_d.done; _d = _c.next()) {
3906
- parameterName = _d.value;
3907
- state_1 = _loop_1(parameterName);
3908
- if (typeof state_1 === "object")
3909
- return [2 /*return*/, state_1.value];
3910
- }
3911
- }
3912
- catch (e_2_1) { e_2 = { error: e_2_1 }; }
3913
- finally {
3914
- try {
3915
- if (_d && !_d.done && (_f = _c.return)) _f.call(_c);
3916
- }
3917
- finally { if (e_2) throw e_2.error; }
3984
+ if (_d && !_d.done && (_f = _c.return)) _f.call(_c);
3918
3985
  }
3986
+ finally { if (e_2) throw e_2.error; }
3987
+ return [7 /*endfinally*/];
3988
+ case 19:
3919
3989
  parametersToPass = inputParameters;
3920
- _g.label = 3;
3921
- case 3:
3922
- _g.trys.push([3, 8, , 9]);
3990
+ _g.label = 20;
3991
+ case 20:
3992
+ _g.trys.push([20, 25, , 28]);
3923
3993
  resovedParameterNames_1 = preparedPipeline.parameters
3924
3994
  .filter(function (_a) {
3925
3995
  var isInput = _a.isInput;
@@ -3934,8 +4004,8 @@ function createPipelineExecutor(options) {
3934
4004
  loopLimit = LOOP_LIMIT;
3935
4005
  _loop_2 = function () {
3936
4006
  var currentTemplate, work_1;
3937
- return __generator(this, function (_h) {
3938
- switch (_h.label) {
4007
+ return __generator(this, function (_j) {
4008
+ switch (_j.label) {
3939
4009
  case 0:
3940
4010
  if (loopLimit-- < 0) {
3941
4011
  // Note: Really UnexpectedError not LimitReachedError - this should be catched during validatePipeline
@@ -3961,7 +4031,7 @@ function createPipelineExecutor(options) {
3961
4031
  if (!!currentTemplate) return [3 /*break*/, 3];
3962
4032
  /* [5] */ return [4 /*yield*/, Promise.race(resolving_1)];
3963
4033
  case 2:
3964
- /* [5] */ _h.sent();
4034
+ /* [5] */ _j.sent();
3965
4035
  return [3 /*break*/, 4];
3966
4036
  case 3:
3967
4037
  unresovedTemplates_1 = unresovedTemplates_1.filter(function (template) { return template !== currentTemplate; });
@@ -3973,23 +4043,23 @@ function createPipelineExecutor(options) {
3973
4043
  resolving_1 = resolving_1.filter(function (w) { return w !== work_1; });
3974
4044
  });
3975
4045
  resolving_1.push(work_1);
3976
- _h.label = 4;
4046
+ _j.label = 4;
3977
4047
  case 4: return [2 /*return*/];
3978
4048
  }
3979
4049
  });
3980
4050
  };
3981
- _g.label = 4;
3982
- case 4:
3983
- if (!(unresovedTemplates_1.length > 0)) return [3 /*break*/, 6];
4051
+ _g.label = 21;
4052
+ case 21:
4053
+ if (!(unresovedTemplates_1.length > 0)) return [3 /*break*/, 23];
3984
4054
  return [5 /*yield**/, _loop_2()];
3985
- case 5:
4055
+ case 22:
3986
4056
  _g.sent();
3987
- return [3 /*break*/, 4];
3988
- case 6: return [4 /*yield*/, Promise.all(resolving_1)];
3989
- case 7:
4057
+ return [3 /*break*/, 21];
4058
+ case 23: return [4 /*yield*/, Promise.all(resolving_1)];
4059
+ case 24:
3990
4060
  _g.sent();
3991
- return [3 /*break*/, 9];
3992
- case 8:
4061
+ return [3 /*break*/, 28];
4062
+ case 25:
3993
4063
  error_1 = _g.sent();
3994
4064
  if (!(error_1 instanceof Error)) {
3995
4065
  throw error_1;
@@ -3999,30 +4069,46 @@ function createPipelineExecutor(options) {
3999
4069
  return (result === null || result === void 0 ? void 0 : result.usage) || ZERO_USAGE;
4000
4070
  })), false));
4001
4071
  outputParameters_1 = filterJustOutputParameters();
4002
- return [2 /*return*/, $asDeeplyFrozenSerializableJson('Unuccessful PipelineExecutorResult (with misc errors) PipelineExecutorResult', {
4003
- isSuccessful: false,
4004
- errors: __spreadArray([error_1], __read(errors), false).map(serializeError),
4005
- warnings: warnings.map(serializeError),
4006
- usage: usage_1,
4007
- executionReport: executionReport,
4008
- outputParameters: outputParameters_1,
4009
- preparedPipeline: preparedPipeline,
4010
- })];
4011
- case 9:
4072
+ isReturned = true;
4073
+ if (!(onProgress !== undefined)) return [3 /*break*/, 27];
4074
+ // Note: Wait a short time to prevent race conditions
4075
+ return [4 /*yield*/, forTime(IMMEDIATE_TIME)];
4076
+ case 26:
4077
+ // Note: Wait a short time to prevent race conditions
4078
+ _g.sent();
4079
+ _g.label = 27;
4080
+ case 27: return [2 /*return*/, $asDeeplyFrozenSerializableJson('Unuccessful PipelineExecutorResult (with misc errors) PipelineExecutorResult', {
4081
+ isSuccessful: false,
4082
+ errors: __spreadArray([error_1], __read(errors), false).map(serializeError),
4083
+ warnings: warnings.map(serializeError),
4084
+ usage: usage_1,
4085
+ executionReport: executionReport,
4086
+ outputParameters: outputParameters_1,
4087
+ preparedPipeline: preparedPipeline,
4088
+ })];
4089
+ case 28:
4012
4090
  usage = addUsage.apply(void 0, __spreadArray([], __read(executionReport.promptExecutions.map(function (_a) {
4013
4091
  var result = _a.result;
4014
4092
  return (result === null || result === void 0 ? void 0 : result.usage) || ZERO_USAGE;
4015
4093
  })), false));
4016
4094
  outputParameters = filterJustOutputParameters();
4017
- return [2 /*return*/, $asDeeplyFrozenSerializableJson('Successful PipelineExecutorResult', {
4018
- isSuccessful: true,
4019
- errors: errors.map(serializeError),
4020
- warnings: warnings.map(serializeError),
4021
- usage: usage,
4022
- executionReport: executionReport,
4023
- outputParameters: outputParameters,
4024
- preparedPipeline: preparedPipeline,
4025
- })];
4095
+ isReturned = true;
4096
+ if (!(onProgress !== undefined)) return [3 /*break*/, 30];
4097
+ // Note: Wait a short time to prevent race conditions
4098
+ return [4 /*yield*/, forTime(IMMEDIATE_TIME)];
4099
+ case 29:
4100
+ // Note: Wait a short time to prevent race conditions
4101
+ _g.sent();
4102
+ _g.label = 30;
4103
+ case 30: return [2 /*return*/, $asDeeplyFrozenSerializableJson('Successful PipelineExecutorResult', {
4104
+ isSuccessful: true,
4105
+ errors: errors.map(serializeError),
4106
+ warnings: warnings.map(serializeError),
4107
+ usage: usage,
4108
+ executionReport: executionReport,
4109
+ outputParameters: outputParameters,
4110
+ preparedPipeline: preparedPipeline,
4111
+ })];
4026
4112
  }
4027
4113
  });
4028
4114
  }); };