@promptbook/node 0.66.0-9 → 0.66.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -13,7 +13,7 @@ import * as dotenv from 'dotenv';
13
13
  /**
14
14
  * The version of the Promptbook library
15
15
  */
16
- var PROMPTBOOK_VERSION = '0.66.0-8';
16
+ var PROMPTBOOK_VERSION = '0.66.0-9';
17
17
  // TODO: !!!! List here all the versions and annotate + put into script
18
18
 
19
19
  /*! *****************************************************************************
@@ -265,6 +265,13 @@ var RESERVED_PARAMETER_MISSING_VALUE = 'MISSING-' + REPLACING_NONCE;
265
265
  * @private within the repository
266
266
  */
267
267
  var RESERVED_PARAMETER_RESTRICTED = 'RESTRICTED-' + REPLACING_NONCE;
268
+ // <- TODO: [🧜‍♂️]
269
+ /**
270
+ * @@@
271
+ *
272
+ * @public exported from `@promptbook/core`
273
+ */
274
+ var IS_VERBOSE = false;
268
275
  /**
269
276
  * TODO: [🧠][🧜‍♂️] Maybe join remoteUrl and path into single value
270
277
  */
@@ -712,7 +719,7 @@ function forEachAsync(array, options, callbackfunction) {
712
719
  });
713
720
  }
714
721
 
715
- var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.ptbk.md",promptbookVersion:"0.66.0-8",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT",modelName:"claude-3-opus-20240229"},content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",dependentParameterNames:["knowledgeContent"],resultingParameterName:"knowledgePieces"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-from-markdown.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.ptbk.md",promptbookVersion:"0.66.0-8",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT",modelName:"claude-3-opus-20240229"},content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",dependentParameterNames:["knowledgePieceContent"],resultingParameterName:"keywords"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-keywords.ptbk.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.ptbk.md",promptbookVersion:"0.66.0-8",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT",modelName:"claude-3-opus-20240229"},content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"],resultingParameterName:"title"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-title.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.ptbk.md",promptbookVersion:"0.66.0-8",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"make-model-requirements",title:"Make modelRequirements",modelRequirements:{modelVariant:"CHAT",modelName:"gpt-4-turbo"},content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Sample\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n### Option `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Option `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Option `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",expectFormat:"JSON",dependentParameterNames:["availableModelNames","personaDescription"],resultingParameterName:"modelRequirements"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-persona.ptbk.md"}];
722
+ var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.ptbk.md",promptbookVersion:"0.66.0-9",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT",modelName:"claude-3-opus-20240229"},content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",dependentParameterNames:["knowledgeContent"],resultingParameterName:"knowledgePieces"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-from-markdown.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.ptbk.md",promptbookVersion:"0.66.0-9",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT",modelName:"claude-3-opus-20240229"},content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",dependentParameterNames:["knowledgePieceContent"],resultingParameterName:"keywords"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-keywords.ptbk.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.ptbk.md",promptbookVersion:"0.66.0-9",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"knowledge",title:"Knowledge",modelRequirements:{modelVariant:"CHAT",modelName:"claude-3-opus-20240229"},content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"],resultingParameterName:"title"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-knowledge-title.ptbk.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.ptbk.md",promptbookVersion:"0.66.0-9",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],promptTemplates:[{blockType:"PROMPT_TEMPLATE",name:"make-model-requirements",title:"Make modelRequirements",modelRequirements:{modelVariant:"CHAT",modelName:"gpt-4-turbo"},content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Sample\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n### Option `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Option `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Option `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",expectFormat:"JSON",dependentParameterNames:["availableModelNames","personaDescription"],resultingParameterName:"modelRequirements"}],knowledgeSources:[],knowledgePieces:[],personas:[],preparations:[],sourceFile:"./promptbook-collection/prepare-persona.ptbk.md"}];
716
723
 
717
724
  /**
718
725
  * This error indicates that the promptbook in a markdown format cannot be parsed into a valid promptbook object
@@ -2569,7 +2576,7 @@ function checkExpectations(expectations, value) {
2569
2576
  function createPipelineExecutor(options) {
2570
2577
  var _this = this;
2571
2578
  var pipeline = options.pipeline, tools = options.tools, _a = options.settings, settings = _a === void 0 ? {} : _a;
2572
- var _b = settings.maxExecutionAttempts, maxExecutionAttempts = _b === void 0 ? MAX_EXECUTION_ATTEMPTS : _b, _c = settings.maxParallelCount, maxParallelCount = _c === void 0 ? MAX_PARALLEL_COUNT : _c, _d = settings.isVerbose, isVerbose = _d === void 0 ? false : _d, _e = settings.isNotPreparedWarningSupressed, isNotPreparedWarningSupressed = _e === void 0 ? false : _e;
2579
+ var _b = settings.maxExecutionAttempts, maxExecutionAttempts = _b === void 0 ? MAX_EXECUTION_ATTEMPTS : _b, _c = settings.maxParallelCount, maxParallelCount = _c === void 0 ? MAX_PARALLEL_COUNT : _c, _d = settings.isVerbose, isVerbose = _d === void 0 ? IS_VERBOSE : _d, _e = settings.isNotPreparedWarningSupressed, isNotPreparedWarningSupressed = _e === void 0 ? false : _e;
2573
2580
  validatePipeline(pipeline);
2574
2581
  var llmTools = joinLlmExecutionTools.apply(void 0, __spreadArray([], __read(arrayableToArray(tools.llm)), false));
2575
2582
  var preparedPipeline;
@@ -3364,7 +3371,7 @@ function prepareKnowledgeFromMarkdown(knowledgeContent /* <- TODO: [🖖] (?mayb
3364
3371
  return __generator(this, function (_j) {
3365
3372
  switch (_j.label) {
3366
3373
  case 0:
3367
- llmTools = options.llmTools, _a = options.maxParallelCount, maxParallelCount = _a === void 0 ? MAX_PARALLEL_COUNT : _a, _b = options.isVerbose, isVerbose = _b === void 0 ? false : _b;
3374
+ llmTools = options.llmTools, _a = options.maxParallelCount, maxParallelCount = _a === void 0 ? MAX_PARALLEL_COUNT : _a, _b = options.isVerbose, isVerbose = _b === void 0 ? IS_VERBOSE : _b;
3368
3375
  TODO_USE(maxParallelCount); // <- [🪂]
3369
3376
  collection = createCollectionFromJson.apply(void 0, __spreadArray([], __read(PipelineCollection), false));
3370
3377
  _c = createPipelineExecutor;
@@ -3651,7 +3658,7 @@ function preparePersona(personaDescription, options) {
3651
3658
  return __generator(this, function (_d) {
3652
3659
  switch (_d.label) {
3653
3660
  case 0:
3654
- llmTools = options.llmTools, _a = options.isVerbose, isVerbose = _a === void 0 ? false : _a;
3661
+ llmTools = options.llmTools, _a = options.isVerbose, isVerbose = _a === void 0 ? IS_VERBOSE : _a;
3655
3662
  collection = createCollectionFromJson.apply(void 0, __spreadArray([], __read(PipelineCollection), false));
3656
3663
  _b = createPipelineExecutor;
3657
3664
  _c = {};
@@ -3808,7 +3815,7 @@ function preparePipeline(pipeline, options) {
3808
3815
  if (isPipelinePrepared(pipeline)) {
3809
3816
  return [2 /*return*/, pipeline];
3810
3817
  }
3811
- llmTools = options.llmTools, _a = options.maxParallelCount, maxParallelCount = _a === void 0 ? MAX_PARALLEL_COUNT : _a, _b = options.isVerbose, isVerbose = _b === void 0 ? false : _b;
3818
+ llmTools = options.llmTools, _a = options.maxParallelCount, maxParallelCount = _a === void 0 ? MAX_PARALLEL_COUNT : _a, _b = options.isVerbose, isVerbose = _b === void 0 ? IS_VERBOSE : _b;
3812
3819
  parameters = pipeline.parameters, promptTemplates = pipeline.promptTemplates, knowledgeSources = pipeline.knowledgeSources, personas = pipeline.personas;
3813
3820
  llmToolsWithUsage = countTotalUsage(llmTools);
3814
3821
  currentPreparation = {
@@ -6173,7 +6180,7 @@ function createCollectionFromDirectory(path, options) {
6173
6180
  // TODO: !! Implement;
6174
6181
  // TODO: [🌗]
6175
6182
  }
6176
- _a = options || {}, _b = _a.isRecursive, isRecursive = _b === void 0 ? true : _b, _c = _a.isVerbose, isVerbose = _c === void 0 ? false : _c, _d = _a.isLazyLoaded, isLazyLoaded = _d === void 0 ? false : _d, _e = _a.isCrashedOnError, isCrashedOnError = _e === void 0 ? true : _e;
6183
+ _a = options || {}, _b = _a.isRecursive, isRecursive = _b === void 0 ? true : _b, _c = _a.isVerbose, isVerbose = _c === void 0 ? IS_VERBOSE : _c, _d = _a.isLazyLoaded, isLazyLoaded = _d === void 0 ? false : _d, _e = _a.isCrashedOnError, isCrashedOnError = _e === void 0 ? true : _e;
6177
6184
  collection = createCollectionFromPromise(function () { return __awaiter(_this, void 0, void 0, function () {
6178
6185
  var fileNames, collection, _loop_1, fileNames_1, fileNames_1_1, fileName, e_1_1;
6179
6186
  var e_1, _a;
@@ -6391,13 +6398,10 @@ var $Register = /** @class */ (function () {
6391
6398
  var packageName = registered.packageName, className = registered.className;
6392
6399
  var existingRegistrationIndex = this.storage.findIndex(function (item) { return item.packageName === packageName && item.className === className; });
6393
6400
  var existingRegistration = this.storage[existingRegistrationIndex];
6394
- // TODO: !!!!!! Global IS_VERBOSE mode
6395
6401
  if (!existingRegistration) {
6396
- console.warn("[\uD83D\uDCE6] Registering `".concat(packageName, ".").concat(className, "` to `").concat(this.storageName, "`"));
6397
6402
  this.storage.push(registered);
6398
6403
  }
6399
6404
  else {
6400
- console.warn("[\uD83D\uDCE6] Re-registering `".concat(packageName, ".").concat(className, "` to `").concat(this.storageName, "`"));
6401
6405
  this.storage[existingRegistrationIndex] = registered;
6402
6406
  }
6403
6407
  };
@@ -6561,7 +6565,7 @@ function $registeredLlmToolsMessage() {
6561
6565
  */
6562
6566
  function createLlmToolsFromConfiguration(configuration, options) {
6563
6567
  if (options === void 0) { options = {}; }
6564
- var _a = options.isVerbose, isVerbose = _a === void 0 ? false : _a;
6568
+ var _a = options.isVerbose, isVerbose = _a === void 0 ? IS_VERBOSE : _a;
6565
6569
  var llmTools = configuration.map(function (llmConfiguration) {
6566
6570
  var registeredItem = $llmToolsRegister
6567
6571
  .list()