@promptbook/node 0.81.0-5 → 0.81.0-6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -26,7 +26,7 @@ var BOOK_LANGUAGE_VERSION = '1.0.0';
26
26
  *
27
27
  * @see https://github.com/webgptorg/promptbook
28
28
  */
29
- var PROMPTBOOK_ENGINE_VERSION = '0.81.0-4';
29
+ var PROMPTBOOK_ENGINE_VERSION = '0.81.0-5';
30
30
  /**
31
31
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
32
32
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -204,6 +204,26 @@ var DEFAULT_TITLE = "Untitled";
204
204
  * @private within the repository - too low-level in comparison with other `MAX_...`
205
205
  */
206
206
  var LOOP_LIMIT = 1000;
207
+ /**
208
+ * Strings to represent various values in the context of parameter values
209
+ *
210
+ * @public exported from `@promptbook/utils`
211
+ */
212
+ var VALUE_STRINGS = {
213
+ empty: '(nothing; empty string)',
214
+ null: '(no value; null)',
215
+ undefined: '(unknown value; undefined)',
216
+ nan: '(not a number; NaN)',
217
+ infinity: '(infinity; ∞)',
218
+ negativeInfinity: '(negative infinity; -∞)',
219
+ unserializable: '(unserializable value)',
220
+ };
221
+ /**
222
+ * Small number limit
223
+ *
224
+ * @public exported from `@promptbook/utils`
225
+ */
226
+ var SMALL_NUMBER = 0.001;
207
227
  /**
208
228
  * Short time interval to prevent race conditions in milliseconds
209
229
  *
@@ -559,6 +579,7 @@ function exportJson(options) {
559
579
  * @public exported from `@promptbook/core`
560
580
  */
561
581
  var ORDER_OF_PIPELINE_JSON = [
582
+ // Note: [🍙] In this order will be pipeline serialized
562
583
  'title',
563
584
  'pipelineUrl',
564
585
  'bookVersion',
@@ -570,6 +591,7 @@ var ORDER_OF_PIPELINE_JSON = [
570
591
  'preparations',
571
592
  'knowledgeSources',
572
593
  'knowledgePieces',
594
+ 'sources', // <- TODO: [🧠] Where should the `sources` be
573
595
  ];
574
596
  /**
575
597
  * Nonce which is used for replacing things in strings
@@ -1228,7 +1250,7 @@ function joinLlmExecutionTools() {
1228
1250
  * TODO: [👷‍♂️] @@@ Manual about construction of llmTools
1229
1251
  */
1230
1252
 
1231
- var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",resultingParameterName:"knowledgePieces",dependentParameterNames:["knowledgeContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sourceFile:"./books/prepare-knowledge-from-markdown.book.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",resultingParameterName:"keywords",dependentParameterNames:["knowledgePieceContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sourceFile:"./books/prepare-knowledge-keywords.book.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",resultingParameterName:"title",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sourceFile:"./books/prepare-knowledge-title.book.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.book.md",formfactorName:"GENERIC",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"make-model-requirements",title:"Make modelRequirements",content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Example\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",resultingParameterName:"modelRequirements",format:"JSON",dependentParameterNames:["availableModelNames","personaDescription"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sourceFile:"./books/prepare-persona.book.md"}];
1253
+ var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",resultingParameterName:"knowledgePieces",dependentParameterNames:["knowledgeContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sources:[{type:"BOOK",path:null,content:"# Prepare Knowledge from Markdown\n\n- PIPELINE URL `https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.book.md`\n- INPUT PARAMETER `{knowledgeContent}` Markdown document content\n- OUTPUT PARAMETER `{knowledgePieces}` The knowledge JSON object\n\n## Knowledge\n\n<!-- TODO: [🍆] -FORMAT JSON -->\n\n```markdown\nYou are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}\n```\n\n`-> {knowledgePieces}`\n"}],sourceFile:"./books/prepare-knowledge-from-markdown.book.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",resultingParameterName:"keywords",dependentParameterNames:["knowledgePieceContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sources:[{type:"BOOK",path:null,content:"# Prepare Keywords\n\n- PIPELINE URL `https://promptbook.studio/promptbook/prepare-knowledge-keywords.book.md`\n- INPUT PARAMETER `{knowledgePieceContent}` The content\n- OUTPUT PARAMETER `{keywords}` Keywords separated by comma\n\n## Knowledge\n\n<!-- TODO: [🍆] -FORMAT JSON -->\n\n```markdown\nYou are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}\n```\n\n`-> {keywords}`\n"}],sourceFile:"./books/prepare-knowledge-keywords.book.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",resultingParameterName:"title",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sources:[{type:"BOOK",path:null,content:"# Prepare Title\n\n- PIPELINE URL `https://promptbook.studio/promptbook/prepare-knowledge-title.book.md`\n- INPUT PARAMETER `{knowledgePieceContent}` The content\n- OUTPUT PARAMETER `{title}` The title of the document\n\n## Knowledge\n\n- EXPECT MIN 1 WORD\n- EXPECT MAX 8 WORDS\n\n```markdown\nYou are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}\n```\n\n`-> {title}`\n"}],sourceFile:"./books/prepare-knowledge-title.book.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.book.md",formfactorName:"GENERIC",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"make-model-requirements",title:"Make modelRequirements",content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Example\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",resultingParameterName:"modelRequirements",format:"JSON",dependentParameterNames:["availableModelNames","personaDescription"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sources:[{type:"BOOK",path:null,content:"# Prepare Keywords\n\n- PIPELINE URL `https://promptbook.studio/promptbook/prepare-persona.book.md`\n- INPUT PARAMETER `{availableModelNames}` List of available model names separated by comma (,)\n- INPUT PARAMETER `{personaDescription}` Description of the persona\n- OUTPUT PARAMETER `{modelRequirements}` Specific requirements for the model\n\n## Make modelRequirements\n\n- FORMAT JSON\n\n```markdown\nYou are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Example\n\n\\`\\`\\`json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n\\`\\`\\`\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}\n```\n\n`-> {modelRequirements}`\n"}],sourceFile:"./books/prepare-persona.book.md"}];
1232
1254
 
1233
1255
  /**
1234
1256
  * Prettify the html code
@@ -3126,9 +3148,87 @@ function arrayableToArray(input) {
3126
3148
  return [input];
3127
3149
  }
3128
3150
 
3151
+ /**
3152
+ * Format either small or big number
3153
+ *
3154
+ * @public exported from `@promptbook/utils`
3155
+ */
3156
+ function numberToString(value) {
3157
+ if (value === 0) {
3158
+ return '0';
3159
+ }
3160
+ else if (Number.isNaN(value)) {
3161
+ return VALUE_STRINGS.nan;
3162
+ }
3163
+ else if (value === Infinity) {
3164
+ return VALUE_STRINGS.infinity;
3165
+ }
3166
+ else if (value === -Infinity) {
3167
+ return VALUE_STRINGS.negativeInfinity;
3168
+ }
3169
+ for (var exponent = 0; exponent < 15; exponent++) {
3170
+ var factor = Math.pow(10, exponent);
3171
+ var valueRounded = Math.round(value * factor) / factor;
3172
+ if (Math.abs(value - valueRounded) / value < SMALL_NUMBER) {
3173
+ return valueRounded.toFixed(exponent);
3174
+ }
3175
+ }
3176
+ return value.toString();
3177
+ }
3178
+
3179
+ /**
3180
+ * Function `valueToString` will convert the given value to string
3181
+ * This is useful and used in the `templateParameters` function
3182
+ *
3183
+ * Note: This function is not just calling `toString` method
3184
+ * It's more complex and can handle this conversion specifically for LLM models
3185
+ * See `VALUE_STRINGS`
3186
+ *
3187
+ * Note: There are 2 similar functions
3188
+ * - `valueToString` converts value to string for LLM models as human-readable string
3189
+ * - `asSerializable` converts value to string to preserve full information to be able to convert it back
3190
+ *
3191
+ * @public exported from `@promptbook/utils`
3192
+ */
3193
+ function valueToString(value) {
3194
+ try {
3195
+ if (value === '') {
3196
+ return VALUE_STRINGS.empty;
3197
+ }
3198
+ else if (value === null) {
3199
+ return VALUE_STRINGS.null;
3200
+ }
3201
+ else if (value === undefined) {
3202
+ return VALUE_STRINGS.undefined;
3203
+ }
3204
+ else if (typeof value === 'string') {
3205
+ return value;
3206
+ }
3207
+ else if (typeof value === 'number') {
3208
+ return numberToString(value);
3209
+ }
3210
+ else if (value instanceof Date) {
3211
+ return value.toISOString();
3212
+ }
3213
+ else {
3214
+ return JSON.stringify(value);
3215
+ }
3216
+ }
3217
+ catch (error) {
3218
+ if (!(error instanceof Error)) {
3219
+ throw error;
3220
+ }
3221
+ console.error(error);
3222
+ return VALUE_STRINGS.unserializable;
3223
+ }
3224
+ }
3225
+
3129
3226
  /**
3130
3227
  * Replaces parameters in template with values from parameters object
3131
3228
  *
3229
+ * Note: This function is not places strings into string,
3230
+ * It's more complex and can handle this operation specifically for LLM models
3231
+ *
3132
3232
  * @param template the template with parameters in {curly} braces
3133
3233
  * @param parameters the object with parameters
3134
3234
  * @returns the template with replaced parameters
@@ -3178,7 +3278,7 @@ function templateParameters(template, parameters) {
3178
3278
  if (parameterValue === undefined) {
3179
3279
  throw new PipelineExecutionError("Parameter `{".concat(parameterName, "}` is not defined"));
3180
3280
  }
3181
- parameterValue = parameterValue.toString();
3281
+ parameterValue = valueToString(parameterValue);
3182
3282
  if (parameterValue.includes('\n') && /^\s*\W{0,3}\s*$/.test(precol)) {
3183
3283
  parameterValue = parameterValue
3184
3284
  .split('\n')
@@ -8656,6 +8756,14 @@ function precompilePipeline(pipelineString) {
8656
8756
  knowledgePieces: [],
8657
8757
  personas: [],
8658
8758
  preparations: [],
8759
+ sources: [
8760
+ {
8761
+ type: 'BOOK',
8762
+ path: null,
8763
+ // <- TODO: !!!!!! Pass here path of the file
8764
+ content: pipelineString,
8765
+ },
8766
+ ],
8659
8767
  };
8660
8768
  function getPipelineIdentification() {
8661
8769
  // Note: This is a 😐 implementation of [🚞]
@@ -9063,7 +9171,6 @@ function precompilePipeline(pipelineString) {
9063
9171
  $pipelineJson.formfactorName = 'GENERIC';
9064
9172
  }
9065
9173
  // =============================================================
9066
- // TODO: [🍙] Maybe do reorder of `$pipelineJson` here
9067
9174
  return exportJson({
9068
9175
  name: 'pipelineJson',
9069
9176
  message: "Result of `precompilePipeline`",
@@ -10506,11 +10613,9 @@ function createCollectionFromDirectory(path, tools, options) {
10506
10613
  , ".json"));
10507
10614
  return [4 /*yield*/, isFileExisting(makedLibraryFilePath, tools.fs)];
10508
10615
  case 3:
10509
- if (!(_f.sent())) {
10510
- console.info(colors.yellow("Tip: Prebuild your pipeline collection (file with supposed prebuild ".concat(makedLibraryFilePath, " not found) with CLI util \"ptbk make\" to speed up the collection creation.")));
10511
- }
10616
+ if (!(_f.sent())) ;
10512
10617
  else {
10513
- colors.green("(In future, not implemented yet) Using your prebuild pipeline collection ".concat(makedLibraryFilePath));
10618
+ colors.green("(In future, not implemented yet) Using your compiled pipeline collection ".concat(makedLibraryFilePath));
10514
10619
  // TODO: !! Implement;
10515
10620
  // TODO: [🌗]
10516
10621
  }
@@ -10530,10 +10635,10 @@ function createCollectionFromDirectory(path, tools, options) {
10530
10635
  // Note: First load all .book.json and then .book.md files
10531
10636
  // .book.json can be prepared so it is faster to load
10532
10637
  fileNames.sort(function (a, b) {
10533
- if (a.endsWith('.book.json') && b.endsWith('.book.md')) {
10638
+ if (a.endsWith('.json') && b.endsWith('.md')) {
10534
10639
  return -1;
10535
10640
  }
10536
- if (a.endsWith('.book.md') && b.endsWith('.book.json')) {
10641
+ if (a.endsWith('.md') && b.endsWith('.json')) {
10537
10642
  return 1;
10538
10643
  }
10539
10644
  return 0;
@@ -10582,7 +10687,7 @@ function createCollectionFromDirectory(path, tools, options) {
10582
10687
  // TODO: [👠] DRY
10583
10688
  if (pipeline.pipelineUrl === undefined) {
10584
10689
  if (isVerbose) {
10585
- console.info(colors.red("Can not load pipeline from ".concat(fileName
10690
+ console.info(colors.yellow("Can not load pipeline from ".concat(fileName
10586
10691
  .split('\\')
10587
10692
  .join('/'), " because of missing URL")));
10588
10693
  }