@promptbook/node 0.68.3 → 0.68.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (31) hide show
  1. package/README.md +4 -0
  2. package/esm/index.es.js +29 -28
  3. package/esm/index.es.js.map +1 -1
  4. package/esm/typings/promptbook-collection/index.d.ts +0 -3
  5. package/esm/typings/src/cli/cli-commands/make.d.ts +1 -1
  6. package/esm/typings/src/collection/constructors/createCollectionFromUrl.d.ts +1 -1
  7. package/esm/typings/src/config.d.ts +2 -2
  8. package/esm/typings/src/conversion/pipelineStringToJsonSync.d.ts +1 -1
  9. package/esm/typings/src/conversion/validation/validatePipeline.d.ts +5 -5
  10. package/esm/typings/src/execution/createPipelineExecutor.d.ts +1 -1
  11. package/esm/typings/src/knowledge/prepare-knowledge/markdown/prepareKnowledgeFromMarkdown.d.ts +1 -1
  12. package/esm/typings/src/knowledge/prepare-knowledge/pdf/prepareKnowledgeFromPdf.d.ts +1 -1
  13. package/esm/typings/src/llm-providers/_common/utils/cache/CacheItem.d.ts +1 -1
  14. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionTools.d.ts +2 -1
  15. package/esm/typings/src/llm-providers/anthropic-claude/anthropic-claude-models.d.ts +1 -1
  16. package/esm/typings/src/llm-providers/anthropic-claude/createAnthropicClaudeExecutionTools.d.ts +2 -2
  17. package/esm/typings/src/llm-providers/anthropic-claude/playground/playground.d.ts +2 -2
  18. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionTools.d.ts +2 -1
  19. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +2 -1
  20. package/esm/typings/src/llm-providers/openai/playground/playground.d.ts +1 -1
  21. package/esm/typings/src/personas/preparePersona.d.ts +1 -1
  22. package/esm/typings/src/prepare/isPipelinePrepared.d.ts +1 -1
  23. package/esm/typings/src/prepare/prepareTemplates.d.ts +1 -1
  24. package/esm/typings/src/types/PipelineJson/PipelineJson.d.ts +1 -1
  25. package/esm/typings/src/types/typeAliases.d.ts +1 -1
  26. package/esm/typings/src/utils/serialization/checkSerializableAsJson.d.ts +1 -1
  27. package/esm/typings/src/utils/serialization/isSerializableAsJson.d.ts +1 -1
  28. package/package.json +2 -2
  29. package/umd/index.umd.js +29 -28
  30. package/umd/index.umd.js.map +1 -1
  31. package/esm/typings/src/personas/preparePersona.test.d.ts +0 -1
package/README.md CHANGED
@@ -42,6 +42,10 @@ Core of the library for Node.js runtime, it contains the main logic for promptbo
42
42
 
43
43
  Rest of the documentation is common for **entire promptbook ecosystem**:
44
44
 
45
+ # ✨ New Features
46
+
47
+ - ✨ **Support [OpenAI o1 model](https://openai.com/o1/)**
48
+
45
49
  ## 🤍 The Promptbook Whitepaper
46
50
 
47
51
 
package/esm/index.es.js CHANGED
@@ -14,8 +14,8 @@ import * as dotenv from 'dotenv';
14
14
  /**
15
15
  * The version of the Promptbook library
16
16
  */
17
- var PROMPTBOOK_VERSION = '0.68.2';
18
- // TODO: !!!! List here all the versions and annotate + put into script
17
+ var PROMPTBOOK_VERSION = '0.68.4';
18
+ // TODO:[main] !!!! List here all the versions and annotate + put into script
19
19
 
20
20
  /*! *****************************************************************************
21
21
  Copyright (c) Microsoft Corporation.
@@ -325,7 +325,7 @@ function checkSerializableAsJson(name, value) {
325
325
  }
326
326
  /**
327
327
  * TODO: [🧠][🛣] More elegant way to tracking than passing `name`
328
- * TODO: [🧠] !!! In-memory cache of same values to prevent multiple checks
328
+ * TODO: [🧠][main] !!! In-memory cache of same values to prevent multiple checks
329
329
  * Note: [🐠] This is how `checkSerializableAsJson` + `isSerializableAsJson` together can just retun true/false or rich error message
330
330
  */
331
331
 
@@ -494,7 +494,7 @@ function pipelineJsonToString(pipelineJson) {
494
494
  commands.push("PIPELINE URL ".concat(pipelineUrl));
495
495
  }
496
496
  commands.push("PROMPTBOOK VERSION ".concat(promptbookVersion));
497
- // TODO: !!! This increase size of the bundle and is probbably not necessary
497
+ // TODO:[main] !!! This increase size of the bundle and is probbably not necessary
498
498
  pipelineString = prettifyMarkdown(pipelineString);
499
499
  try {
500
500
  for (var _g = __values(parameters.filter(function (_a) {
@@ -642,12 +642,12 @@ function pipelineJsonToString(pipelineJson) {
642
642
  pipelineString += '```' + contentLanguage;
643
643
  pipelineString += '\n';
644
644
  pipelineString += spaceTrim$1(content);
645
- // <- TODO: !!! Escape
645
+ // <- TODO:[main] !!! Escape
646
646
  // <- TODO: [🧠] Some clear strategy how to spaceTrim the blocks
647
647
  pipelineString += '\n';
648
648
  pipelineString += '```';
649
649
  pipelineString += '\n\n';
650
- pipelineString += "`-> {".concat(resultingParameterName, "}`"); // <- TODO: !!! If the parameter here has description, add it and use templateParameterJsonToString
650
+ pipelineString += "`-> {".concat(resultingParameterName, "}`"); // <- TODO:[main] !!! If the parameter here has description, add it and use templateParameterJsonToString
651
651
  }
652
652
  }
653
653
  catch (e_3_1) { e_3 = { error: e_3_1 }; }
@@ -874,7 +874,7 @@ function forEachAsync(array, options, callbackfunction) {
874
874
  });
875
875
  }
876
876
 
877
- var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.ptbk.md",promptbookVersion:"0.68.2",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",resultingParameterName:"knowledgePieces",dependentParameterNames:["knowledgeContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-from-markdown.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.ptbk.md",promptbookVersion:"0.68.2",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",resultingParameterName:"keywords",dependentParameterNames:["knowledgePieceContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-keywords.ptbk.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.ptbk.md",promptbookVersion:"0.68.2",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",resultingParameterName:"title",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-title.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.ptbk.md",promptbookVersion:"0.68.2",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"make-model-requirements",title:"Make modelRequirements",content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Sample\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",resultingParameterName:"modelRequirements",format:"JSON",dependentParameterNames:["availableModelNames","personaDescription"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-persona.ptbk.md"}];
877
+ var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.ptbk.md",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",resultingParameterName:"knowledgePieces",dependentParameterNames:["knowledgeContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-from-markdown.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.ptbk.md",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",resultingParameterName:"keywords",dependentParameterNames:["knowledgePieceContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-keywords.ptbk.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.ptbk.md",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",resultingParameterName:"title",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-title.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.ptbk.md",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],templates:[{templateType:"PROMPT_TEMPLATE",name:"make-model-requirements",title:"Make modelRequirements",content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Sample\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",resultingParameterName:"modelRequirements",format:"JSON",dependentParameterNames:["availableModelNames","personaDescription"]}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-persona.ptbk.md"}];
878
878
 
879
879
  /**
880
880
  * This error indicates that the promptbook in a markdown format cannot be parsed into a valid promptbook object
@@ -949,7 +949,7 @@ function isValidPromptbookVersion(version) {
949
949
  if ( /* version === '1.0.0' || */version === '2.0.0' || version === '3.0.0') {
950
950
  return false;
951
951
  }
952
- // <- TODO: !!! Check isValidPromptbookVersion against PROMPTBOOK_VERSIONS
952
+ // <- TODO:[main] !!! Check isValidPromptbookVersion against PROMPTBOOK_VERSIONS
953
953
  return true;
954
954
  }
955
955
 
@@ -1098,7 +1098,7 @@ function validatePipeline(pipeline) {
1098
1098
  // <- Note: [🚲]
1099
1099
  throw new PipelineLogicError(spaceTrim(function (block) { return "\n Invalid promptbook URL \"".concat(pipeline.pipelineUrl, "\"\n\n ").concat(block(pipelineIdentification), "\n "); }));
1100
1100
  }
1101
- if (!isValidPromptbookVersion(pipeline.promptbookVersion)) {
1101
+ if (pipeline.promptbookVersion !== undefined && !isValidPromptbookVersion(pipeline.promptbookVersion)) {
1102
1102
  // <- Note: [🚲]
1103
1103
  throw new PipelineLogicError(spaceTrim(function (block) { return "\n Invalid Promptbook Version \"".concat(pipeline.promptbookVersion, "\"\n\n ").concat(block(pipelineIdentification), "\n "); }));
1104
1104
  }
@@ -1293,11 +1293,11 @@ function validatePipeline(pipeline) {
1293
1293
  * > ex port function validatePipeline(promptbook: really_unknown): asserts promptbook is PipelineJson {
1294
1294
  */
1295
1295
  /**
1296
- * TODO: [🐣] !!!! Validate that all samples match expectations
1297
- * TODO: [🐣][🐝] !!!! Validate that knowledge is valid (non-void)
1298
- * TODO: [🐣] !!!! Validate that persona can be used only with CHAT variant
1299
- * TODO: [🐣] !!!! Validate that parameter with reserved name not used RESERVED_PARAMETER_NAMES
1300
- * TODO: [🐣] !!!! Validate that reserved parameter is not used as joker
1296
+ * TODO: [🐣][main] !!!! Validate that all samples match expectations
1297
+ * TODO: [🐣][🐝][main] !!!! Validate that knowledge is valid (non-void)
1298
+ * TODO: [🐣][main] !!!! Validate that persona can be used only with CHAT variant
1299
+ * TODO: [🐣][main] !!!! Validate that parameter with reserved name not used RESERVED_PARAMETER_NAMES
1300
+ * TODO: [🐣][main] !!!! Validate that reserved parameter is not used as joker
1301
1301
  * TODO: [🧠] Validation not only logic itself but imports around - files and websites and rerefenced pipelines exists
1302
1302
  * TODO: [🛠] Actions, instruments (and maybe knowledge) => Functions and tools
1303
1303
  */
@@ -2598,7 +2598,7 @@ function isPipelinePrepared(pipeline) {
2598
2598
  return true;
2599
2599
  }
2600
2600
  /**
2601
- * TODO: [🔃] !!!!! If the pipeline was prepared with different version or different set of models, prepare it once again
2601
+ * TODO: [🔃][main] !!!!! If the pipeline was prepared with different version or different set of models, prepare it once again
2602
2602
  * TODO: [🐠] Maybe base this on `makeValidator`
2603
2603
  * TODO: [🧊] Pipeline can be partially prepared, this should return true ONLY if fully prepared
2604
2604
  * TODO: [🧿] Maybe do same process with same granularity and subfinctions as `preparePipeline`
@@ -2977,7 +2977,7 @@ function createPipelineExecutor(options) {
2977
2977
  console.warn(spaceTrim(function (block) { return "\n Pipeline is not prepared\n\n ".concat(block(pipelineIdentification), "\n\n It will be prepared ad-hoc before the first execution and **returned as `preparedPipeline` in `PipelineExecutorResult`**\n But it is recommended to prepare the pipeline during collection preparation\n\n @see more at https://ptbk.io/prepare-pipeline\n "); }));
2978
2978
  }
2979
2979
  var pipelineExecutor = function (inputParameters, onProgress) { return __awaiter(_this, void 0, void 0, function () {
2980
- // TODO: !!! Extract to separate functions and files - ALL FUNCTIONS BELOW
2980
+ // TODO:[main] !!! Extract to separate functions and files - ALL FUNCTIONS BELOW
2981
2981
  function getContextForTemplate(template) {
2982
2982
  return __awaiter(this, void 0, void 0, function () {
2983
2983
  return __generator(this, function (_a) {
@@ -3796,7 +3796,7 @@ function createPipelineExecutor(options) {
3796
3796
  return pipelineExecutor;
3797
3797
  }
3798
3798
  /**
3799
- * TODO: !!! Identify not only pipeline BUT exact template ${block(pipelineIdentification)}
3799
+ * TODO:[main] !!! Identify not only pipeline BUT exact template ${block(pipelineIdentification)}
3800
3800
  * TODO: Use isVerbose here (not only pass to `preparePipeline`)
3801
3801
  * TODO: [🧠][🌳] Use here `countTotalUsage` and put preparation and prepared pipiline to report
3802
3802
  * TODO: [🪂] Use maxParallelCount here (not only pass to `preparePipeline`)
@@ -3859,7 +3859,7 @@ function prepareKnowledgeFromMarkdown(knowledgeContent /* <- TODO: [🖖] (?mayb
3859
3859
  outputParameters = result.outputParameters;
3860
3860
  knowledgePiecesRaw = outputParameters.knowledgePieces;
3861
3861
  knowledgeTextPieces = (knowledgePiecesRaw || '').split('\n---\n');
3862
- // <- TODO: !!!!! Smarter split and filter out empty pieces
3862
+ // <- TODO:[main] !!!!! Smarter split and filter out empty pieces
3863
3863
  if (isVerbose) {
3864
3864
  console.info('knowledgeTextPieces:', knowledgeTextPieces);
3865
3865
  }
@@ -3939,7 +3939,7 @@ function prepareKnowledgeFromMarkdown(knowledgeContent /* <- TODO: [🖖] (?mayb
3939
3939
  });
3940
3940
  }
3941
3941
  /**
3942
- * TODO: [🐝][🔼] !!! Export via `@promptbook/markdown`
3942
+ * TODO: [🐝][🔼][main] !!! Export via `@promptbook/markdown`
3943
3943
  * TODO: [🪂] Do it in parallel 11:11
3944
3944
  * Note: No need to aggregate usage here, it is done by intercepting the llmTools
3945
3945
  */
@@ -3963,7 +3963,7 @@ function prepareKnowledgePieces(knowledgeSources, options) {
3963
3963
  var partialPieces, pieces;
3964
3964
  return __generator(this, function (_a) {
3965
3965
  switch (_a.label) {
3966
- case 0: return [4 /*yield*/, prepareKnowledgeFromMarkdown(knowledgeSource.sourceContent, // <- TODO: [🐝] !!! Unhardcode markdown, detect which type it is - BE AWARE of big package size
3966
+ case 0: return [4 /*yield*/, prepareKnowledgeFromMarkdown(knowledgeSource.sourceContent, // <- TODO: [🐝][main] !!! Unhardcode markdown, detect which type it is - BE AWARE of big package size
3967
3967
  options)];
3968
3968
  case 1:
3969
3969
  partialPieces = _a.sent();
@@ -4155,7 +4155,7 @@ function preparePersona(personaDescription, options) {
4155
4155
  });
4156
4156
  }
4157
4157
  /**
4158
- * TODO: [🔃] !!!!! If the persona was prepared with different version or different set of models, prepare it once again
4158
+ * TODO: [🔃][main] !!!!! If the persona was prepared with different version or different set of models, prepare it once again
4159
4159
  * TODO: [🏢] !! Check validity of `modelName` in pipeline
4160
4160
  * TODO: [🏢] !! Check validity of `systemMessage` in pipeline
4161
4161
  * TODO: [🏢] !! Check validity of `temperature` in pipeline
@@ -4204,7 +4204,7 @@ function prepareTemplates(pipeline, options) {
4204
4204
  case 0:
4205
4205
  _a = options.maxParallelCount, maxParallelCount = _a === void 0 ? MAX_PARALLEL_COUNT : _a;
4206
4206
  templates = pipeline.templates, parameters = pipeline.parameters, knowledgePiecesCount = pipeline.knowledgePiecesCount;
4207
- // TODO: !!!!! Apply samples to each template (if missing and is for the template defined)
4207
+ // TODO:[main] !!!!! Apply samples to each template (if missing and is for the template defined)
4208
4208
  TODO_USE(parameters);
4209
4209
  templatesPrepared = new Array(
4210
4210
  // <- TODO: [🧱] Implement in a functional (not new Class) way
@@ -4236,7 +4236,7 @@ function prepareTemplates(pipeline, options) {
4236
4236
  /**
4237
4237
  * TODO: [🧠] Add context to each template (if missing)
4238
4238
  * TODO: [🧠] What is better name `prepareTemplate` or `prepareTemplateAndParameters`
4239
- * TODO: [♨] !!! Prepare index the samples and maybe templates
4239
+ * TODO: [♨][main] !!! Prepare index the samples and maybe templates
4240
4240
  * TODO: Write tests for `preparePipeline`
4241
4241
  * TODO: [🏏] Leverage the batch API and build queues @see https://platform.openai.com/docs/guides/batch
4242
4242
  * TODO: [🧊] In future one preparation can take data from previous preparation and save tokens and time
@@ -4408,7 +4408,7 @@ var knowledgeCommandParser = {
4408
4408
  if (sourceContent === '') {
4409
4409
  throw new ParseError("Source is not defined");
4410
4410
  }
4411
- // TODO: !!!! Following checks should be applied every link in the `sourceContent`
4411
+ // TODO:[main] !!!! Following checks should be applied every link in the `sourceContent`
4412
4412
  if (sourceContent.startsWith('http://')) {
4413
4413
  throw new ParseError("Source is not secure");
4414
4414
  }
@@ -4611,7 +4611,7 @@ var templateCommandParser = {
4611
4611
  if (command.templateType === 'KNOWLEDGE') {
4612
4612
  knowledgeCommandParser.$applyToPipelineJson({
4613
4613
  type: 'KNOWLEDGE',
4614
- sourceContent: $templateJson.content, // <- TODO: [🐝] !!! Work with KNOWLEDGE which not referring to the source file or website, but its content itself
4614
+ sourceContent: $templateJson.content, // <- TODO: [🐝][main] !!! Work with KNOWLEDGE which not referring to the source file or website, but its content itself
4615
4615
  }, $pipelineJson);
4616
4616
  $templateJson.isTemplate = false;
4617
4617
  return;
@@ -5653,6 +5653,7 @@ var promptbookVersionCommandParser = {
5653
5653
  * Note: `$` is used to indicate that this function mutates given `pipelineJson`
5654
5654
  */
5655
5655
  $applyToPipelineJson: function (command, $pipelineJson) {
5656
+ // TODO: Warn if the version is overridden
5656
5657
  $pipelineJson.promptbookVersion = command.promptbookVersion;
5657
5658
  },
5658
5659
  /**
@@ -6542,7 +6543,7 @@ function pipelineStringToJsonSync(pipelineString) {
6542
6543
  var $pipelineJson = {
6543
6544
  title: undefined /* <- Note: [🍙] Putting here placeholder to keep `title` on top at final JSON */,
6544
6545
  pipelineUrl: undefined /* <- Note: Putting here placeholder to keep `pipelineUrl` on top at final JSON */,
6545
- promptbookVersion: PROMPTBOOK_VERSION,
6546
+ promptbookVersion: undefined /* <- Note: By default no explicit version */,
6546
6547
  description: undefined /* <- Note: [🍙] Putting here placeholder to keep `description` on top at final JSON */,
6547
6548
  parameters: [],
6548
6549
  templates: [],
@@ -6833,7 +6834,7 @@ function pipelineStringToJsonSync(pipelineString) {
6833
6834
  return $asDeeplyFrozenSerializableJson('pipelineJson', $pipelineJson);
6834
6835
  }
6835
6836
  /**
6836
- * TODO: !!!! Warn if used only sync version
6837
+ * TODO:[main] !!!! Warn if used only sync version
6837
6838
  * TODO: [🚞] Report here line/column of error
6838
6839
  * TODO: Use spaceTrim more effectively
6839
6840
  * TODO: [🧠] Parameter flags - isInput, isOutput, isInternal
@@ -7616,7 +7617,7 @@ function isSerializableAsJson(value) {
7616
7617
  }
7617
7618
  }
7618
7619
  /**
7619
- * TODO: [🧠] !!! In-memory cache of same values to prevent multiple checks
7620
+ * TODO: [🧠][main] !!! In-memory cache of same values to prevent multiple checks
7620
7621
  * TODO: [🧠][💺] Can be done this on type-level?
7621
7622
  */
7622
7623