@promptbook/markdown-utils 0.81.0-5 → 0.81.0-7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -20,6 +20,11 @@ declare const _default: ({
20
20
  preparations: never[];
21
21
  knowledgeSources: never[];
22
22
  knowledgePieces: never[];
23
+ sources: {
24
+ type: string;
25
+ path: null;
26
+ content: string;
27
+ }[];
23
28
  sourceFile: string;
24
29
  } | {
25
30
  title: string;
@@ -49,6 +54,11 @@ declare const _default: ({
49
54
  preparations: never[];
50
55
  knowledgeSources: never[];
51
56
  knowledgePieces: never[];
57
+ sources: {
58
+ type: string;
59
+ path: null;
60
+ content: string;
61
+ }[];
52
62
  sourceFile: string;
53
63
  } | {
54
64
  title: string;
@@ -73,6 +83,11 @@ declare const _default: ({
73
83
  preparations: never[];
74
84
  knowledgeSources: never[];
75
85
  knowledgePieces: never[];
86
+ sources: {
87
+ type: string;
88
+ path: null;
89
+ content: string;
90
+ }[];
76
91
  sourceFile: string;
77
92
  })[];
78
93
  export default _default;
@@ -27,6 +27,7 @@ import { DEFAULT_CSV_SETTINGS } from '../config';
27
27
  import { DEFAULT_IS_VERBOSE } from '../config';
28
28
  import { SET_IS_VERBOSE } from '../config';
29
29
  import { DEFAULT_IS_AUTO_INSTALLED } from '../config';
30
+ import { DEFAULT_GET_PIPELINE_COLLECTION_FUNCTION_NAME } from '../config';
30
31
  import { ORDER_OF_PIPELINE_JSON } from '../constants';
31
32
  import { RESERVED_PARAMETER_NAMES } from '../constants';
32
33
  import { compilePipeline } from '../conversion/compilePipeline';
@@ -145,6 +146,7 @@ export { DEFAULT_CSV_SETTINGS };
145
146
  export { DEFAULT_IS_VERBOSE };
146
147
  export { SET_IS_VERBOSE };
147
148
  export { DEFAULT_IS_AUTO_INSTALLED };
149
+ export { DEFAULT_GET_PIPELINE_COLLECTION_FUNCTION_NAME };
148
150
  export { ORDER_OF_PIPELINE_JSON };
149
151
  export { RESERVED_PARAMETER_NAMES };
150
152
  export { compilePipeline };
@@ -1,4 +1,4 @@
1
1
  import { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION } from '../version';
2
- import { getBookTemplate } from '../utils/getBookTemplate';
2
+ import { getBookTemplate } from '../other/templates/getBookTemplate';
3
3
  export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION };
4
4
  export { getBookTemplate };
@@ -1,4 +1,6 @@
1
1
  import { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION } from '../version';
2
+ import { VALUE_STRINGS } from '../config';
3
+ import { SMALL_NUMBER } from '../config';
2
4
  import { renderPromptbookMermaid } from '../conversion/prettify/renderPipelineMermaidOptions';
3
5
  import { extractVariablesFromScript } from '../conversion/utils/extractVariablesFromScript';
4
6
  import { deserializeError } from '../errors/utils/deserializeError';
@@ -46,7 +48,9 @@ import { searchKeywords } from '../utils/normalization/searchKeywords';
46
48
  import { titleToName } from '../utils/normalization/titleToName';
47
49
  import { spaceTrim } from '../utils/organization/spaceTrim';
48
50
  import { extractParameterNames } from '../utils/parameters/extractParameterNames';
51
+ import { numberToString } from '../utils/parameters/numberToString';
49
52
  import { templateParameters } from '../utils/parameters/templateParameters';
53
+ import { valueToString } from '../utils/parameters/valueToString';
50
54
  import { parseNumber } from '../utils/parseNumber';
51
55
  import { $randomSeed } from '../utils/random/$randomSeed';
52
56
  import { removeEmojis } from '../utils/removeEmojis';
@@ -74,6 +78,8 @@ import { isValidPipelineUrl } from '../utils/validators/url/isValidPipelineUrl';
74
78
  import { isValidUrl } from '../utils/validators/url/isValidUrl';
75
79
  import { isValidUuid } from '../utils/validators/uuid/isValidUuid';
76
80
  export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION };
81
+ export { VALUE_STRINGS };
82
+ export { SMALL_NUMBER };
77
83
  export { renderPromptbookMermaid };
78
84
  export { extractVariablesFromScript };
79
85
  export { deserializeError };
@@ -121,7 +127,9 @@ export { searchKeywords };
121
127
  export { titleToName };
122
128
  export { spaceTrim };
123
129
  export { extractParameterNames };
130
+ export { numberToString };
124
131
  export { templateParameters };
132
+ export { valueToString };
125
133
  export { parseNumber };
126
134
  export { $randomSeed };
127
135
  export { removeEmojis };
@@ -77,6 +77,26 @@ export declare const LOOP_LIMIT = 1000;
77
77
  * @private within the repository - too low-level in comparison with other `MAX_...`
78
78
  */
79
79
  export declare const CHARACTER_LOOP_LIMIT = 100000;
80
+ /**
81
+ * Strings to represent various values in the context of parameter values
82
+ *
83
+ * @public exported from `@promptbook/utils`
84
+ */
85
+ export declare const VALUE_STRINGS: {
86
+ readonly empty: "(nothing; empty string)";
87
+ readonly null: "(no value; null)";
88
+ readonly undefined: "(unknown value; undefined)";
89
+ readonly nan: "(not a number; NaN)";
90
+ readonly infinity: "(infinity; ∞)";
91
+ readonly negativeInfinity: "(negative infinity; -∞)";
92
+ readonly unserializable: "(unserializable value)";
93
+ };
94
+ /**
95
+ * Small number limit
96
+ *
97
+ * @public exported from `@promptbook/utils`
98
+ */
99
+ export declare const SMALL_NUMBER = 0.001;
80
100
  /**
81
101
  * Timeout for the connections in milliseconds
82
102
  *
@@ -209,6 +229,12 @@ export declare function SET_IS_VERBOSE(isVerbose: boolean): void;
209
229
  * @public exported from `@promptbook/core`
210
230
  */
211
231
  export declare const DEFAULT_IS_AUTO_INSTALLED = false;
232
+ /**
233
+ * Function name for generated function via `ptbk make` to get the pipeline collection
234
+ *
235
+ * @public exported from `@promptbook/core`
236
+ */
237
+ export declare const DEFAULT_GET_PIPELINE_COLLECTION_FUNCTION_NAME = "getPipelineCollection";
212
238
  /**
213
239
  * @@@
214
240
  *
@@ -18,6 +18,11 @@ export declare const HIGH_LEVEL_ABSTRACTIONS: readonly [{
18
18
  readonly knowledgePieces: import("../_packages/types.index").KnowledgePiecePreparedJson[];
19
19
  readonly personas: (import("../_packages/types.index").PersonaJson | import("../_packages/types.index").PersonaPreparedJson)[];
20
20
  readonly preparations: import("../_packages/types.index").PreparationJson[];
21
+ readonly sources: readonly {
22
+ type: "BOOK";
23
+ path: string | null;
24
+ content: import("../pipeline/PipelineString").PipelineString;
25
+ }[];
21
26
  readonly formfactorName?: "CHATBOT" | "GENERATOR" | "GENERIC" | "EXPERIMENTAL_MATCHER" | "SHEETS" | "TRANSLATOR" | undefined;
22
27
  }>): void;
23
28
  }, {
@@ -35,6 +40,11 @@ export declare const HIGH_LEVEL_ABSTRACTIONS: readonly [{
35
40
  readonly knowledgePieces: import("../_packages/types.index").KnowledgePiecePreparedJson[];
36
41
  readonly personas: (import("../_packages/types.index").PersonaJson | import("../_packages/types.index").PersonaPreparedJson)[];
37
42
  readonly preparations: import("../_packages/types.index").PreparationJson[];
43
+ readonly sources: readonly {
44
+ type: "BOOK";
45
+ path: string | null;
46
+ content: import("../pipeline/PipelineString").PipelineString;
47
+ }[];
38
48
  readonly formfactorName?: "CHATBOT" | "GENERATOR" | "GENERIC" | "EXPERIMENTAL_MATCHER" | "SHEETS" | "TRANSLATOR" | undefined;
39
49
  }>): void;
40
50
  }];
@@ -0,0 +1,12 @@
1
+ import type { string_formfactor_name } from '../../formfactors/_common/string_formfactor_name';
2
+ import type { PipelineJson } from '../../pipeline/PipelineJson/PipelineJson';
3
+ /**
4
+ * Get template for new book
5
+ *
6
+ * @public exported from `@promptbook/templates`
7
+ */
8
+ export declare function getBookTemplate(formfactorName: string_formfactor_name): Promise<PipelineJson | null>;
9
+ /**
10
+ * TODO: [🧠] Which is the best place for this function
11
+ * TODO: !!!!!! `book string template notation
12
+ */
@@ -0,0 +1,10 @@
1
+ import type { PipelineCollection } from '../../collection/PipelineCollection';
2
+ /**
3
+ * Get pipeline collection for Untitled Promptbook project
4
+ *
5
+ * ⚠️ WARNING: This code has been generated by `@promptbook/cli` so that any manual changes will be overwritten
6
+ *
7
+ * @generated
8
+ * @returns {PipelineCollection} Library of promptbooks for Untitled Promptbook project
9
+ */
10
+ export declare function getTemplatesPipelineCollection(): PipelineCollection;
@@ -3,7 +3,9 @@ import type { ModelRequirements } from '../../types/ModelRequirements';
3
3
  import type { string_filename } from '../../types/typeAliases';
4
4
  import type { string_markdown_text } from '../../types/typeAliases';
5
5
  import type { string_pipeline_url } from '../../types/typeAliases';
6
+ import type { string_relative_filename } from '../../types/typeAliases';
6
7
  import type { string_semantic_version } from '../../types/typeAliases';
8
+ import type { PipelineString } from '../PipelineString';
7
9
  import type { KnowledgePiecePreparedJson } from './KnowledgePieceJson';
8
10
  import type { KnowledgeSourceJson } from './KnowledgeSourceJson';
9
11
  import type { KnowledgeSourcePreparedJson } from './KnowledgeSourceJson';
@@ -91,6 +93,14 @@ export type PipelineJson = {
91
93
  * @see https://github.com/webgptorg/promptbook/discussions/78
92
94
  */
93
95
  readonly preparations: Array<PreparationJson>;
96
+ /**
97
+ * Backup of the original book source
98
+ */
99
+ readonly sources: ReadonlyArray<{
100
+ type: 'BOOK';
101
+ path: string_relative_filename | null;
102
+ content: PipelineString;
103
+ }>;
94
104
  };
95
105
  /**
96
106
  * TODO: [🛳] Default PERSONA for the pipeline `defaultPersonaName` (same as `defaultModelRequirements`)
@@ -1,8 +1,6 @@
1
1
  import type { ErrorJson } from '../../../errors/utils/ErrorJson';
2
2
  import type { PipelineJson } from '../../../pipeline/PipelineJson/PipelineJson';
3
3
  import type { string_date_iso8601 } from '../../../types/typeAliases';
4
- import type { string_pipeline_url } from '../../../types/typeAliases';
5
- import type { PipelineString } from '../../../pipeline/PipelineString';
6
4
  /**
7
5
  * Represents a single pipeline in PromptbookStudio
8
6
  *
@@ -22,19 +20,6 @@ export type PipelineEditableSerialized = PipelineJson & {
22
20
  * When was the pipeline last modified
23
21
  */
24
22
  readonly updatedAt: string_date_iso8601 | null;
25
- /**
26
- * Unique identifier of the pipeline
27
- *
28
- * Note: In PromptbookStudio it is required
29
- */
30
- readonly pipelineUrl: string_pipeline_url;
31
- /**
32
- * Backup of the pipeline string
33
- *
34
- * Note: This is present ONLY if pipelineString can not be automatically converted into json (i.e. compilePipeline throws an error)
35
- * In other words, this is just a BACKUP of pipeline which will be deleted whener pipeline is valid again
36
- */
37
- readonly pipelineString: PipelineString | string | null;
38
23
  /**
39
24
  * Known errors to transfer to new PipelineEditable
40
25
  */
@@ -0,0 +1,7 @@
1
+ import type { string_parameter_value } from '../../types/typeAliases';
2
+ /**
3
+ * Format either small or big number
4
+ *
5
+ * @public exported from `@promptbook/utils`
6
+ */
7
+ export declare function numberToString(value: number): string_parameter_value;
@@ -1,12 +1,16 @@
1
- import type { Parameters } from '../../types/typeAliases';
1
+ import type { string_parameter_name } from '../../types/typeAliases';
2
2
  import type { string_template } from '../../types/typeAliases';
3
+ import type { really_unknown } from '../organization/really_unknown';
3
4
  /**
4
5
  * Replaces parameters in template with values from parameters object
5
6
  *
7
+ * Note: This function is not places strings into string,
8
+ * It's more complex and can handle this operation specifically for LLM models
9
+ *
6
10
  * @param template the template with parameters in {curly} braces
7
11
  * @param parameters the object with parameters
8
12
  * @returns the template with replaced parameters
9
13
  * @throws {PipelineExecutionError} if parameter is not defined, not closed, or not opened
10
14
  * @public exported from `@promptbook/utils`
11
15
  */
12
- export declare function templateParameters(template: string_template, parameters: Parameters): string;
16
+ export declare function templateParameters(template: string_template, parameters: Record<string_parameter_name, really_unknown>): string;
@@ -0,0 +1,17 @@
1
+ import type { string_parameter_value } from '../../types/typeAliases';
2
+ import type { really_unknown } from '../organization/really_unknown';
3
+ /**
4
+ * Function `valueToString` will convert the given value to string
5
+ * This is useful and used in the `templateParameters` function
6
+ *
7
+ * Note: This function is not just calling `toString` method
8
+ * It's more complex and can handle this conversion specifically for LLM models
9
+ * See `VALUE_STRINGS`
10
+ *
11
+ * Note: There are 2 similar functions
12
+ * - `valueToString` converts value to string for LLM models as human-readable string
13
+ * - `asSerializable` converts value to string to preserve full information to be able to convert it back
14
+ *
15
+ * @public exported from `@promptbook/utils`
16
+ */
17
+ export declare function valueToString(value: really_unknown): string_parameter_value;
@@ -6,6 +6,10 @@ import type { really_any } from '../organization/really_any';
6
6
  * For example:
7
7
  * - `Date` objects will be converted to string
8
8
  *
9
+ * Note: There are 2 similar functions
10
+ * - `valueToString` converts value to string for LLM models as human-readable string
11
+ * - `asSerializable` converts value to string to preserve full information to be able to convert it back
12
+ *
9
13
  * @private Internal helper function
10
14
  */
11
15
  export declare function asSerializable(value: really_any): really_any;
@@ -2,15 +2,22 @@ import type { string_semantic_version } from './types/typeAliases';
2
2
  /**
3
3
  * The version of the Book language
4
4
  *
5
+ * @generated
5
6
  * @see https://github.com/webgptorg/book
6
7
  */
7
8
  export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
8
9
  /**
9
10
  * The version of the Promptbook engine
10
11
  *
12
+ * @generated
11
13
  * @see https://github.com/webgptorg/promptbook
12
14
  */
13
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
+ /**
17
+ * @@@
18
+ *
19
+ * @generated
20
+ */
14
21
  export type string_promptbook_version = string_semantic_version;
15
22
  /**
16
23
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/markdown-utils",
3
- "version": "0.81.0-5",
3
+ "version": "0.81.0-7",
4
4
  "description": "It's time for a paradigm shift. The future of software in plain English, French or Latin",
5
5
  "--note-0": " <- [🐊]",
6
6
  "private": false,
package/umd/index.umd.js CHANGED
@@ -14,15 +14,17 @@
14
14
  /**
15
15
  * The version of the Book language
16
16
  *
17
+ * @generated
17
18
  * @see https://github.com/webgptorg/book
18
19
  */
19
20
  var BOOK_LANGUAGE_VERSION = '1.0.0';
20
21
  /**
21
22
  * The version of the Promptbook engine
22
23
  *
24
+ * @generated
23
25
  * @see https://github.com/webgptorg/promptbook
24
26
  */
25
- var PROMPTBOOK_ENGINE_VERSION = '0.81.0-4';
27
+ var PROMPTBOOK_ENGINE_VERSION = '0.81.0-6';
26
28
  /**
27
29
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
28
30
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -358,7 +360,7 @@
358
360
  * TODO: [🏢] Make this logic part of `JsonFormatDefinition` or `isValidJsonString`
359
361
  */
360
362
 
361
- var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",resultingParameterName:"knowledgePieces",dependentParameterNames:["knowledgeContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sourceFile:"./books/prepare-knowledge-from-markdown.book.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",resultingParameterName:"keywords",dependentParameterNames:["knowledgePieceContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sourceFile:"./books/prepare-knowledge-keywords.book.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",resultingParameterName:"title",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sourceFile:"./books/prepare-knowledge-title.book.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.book.md",formfactorName:"GENERIC",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"make-model-requirements",title:"Make modelRequirements",content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Example\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",resultingParameterName:"modelRequirements",format:"JSON",dependentParameterNames:["availableModelNames","personaDescription"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sourceFile:"./books/prepare-persona.book.md"}];
363
+ var PipelineCollection = [{title:"Prepare Knowledge from Markdown",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgeContent",description:"Markdown document content",isInput:true,isOutput:false},{name:"knowledgePieces",description:"The knowledge JSON object",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}",resultingParameterName:"knowledgePieces",dependentParameterNames:["knowledgeContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sources:[{type:"BOOK",path:null,content:"# Prepare Knowledge from Markdown\n\n- PIPELINE URL `https://promptbook.studio/promptbook/prepare-knowledge-from-markdown.book.md`\n- INPUT PARAMETER `{knowledgeContent}` Markdown document content\n- OUTPUT PARAMETER `{knowledgePieces}` The knowledge JSON object\n\n## Knowledge\n\n<!-- TODO: [🍆] -FORMAT JSON -->\n\n```markdown\nYou are experienced data researcher, extract the important knowledge from the document.\n\n# Rules\n\n- Make pieces of information concise, clear, and easy to understand\n- One piece of information should be approximately 1 paragraph\n- Divide the paragraphs by markdown horizontal lines ---\n- Omit irrelevant information\n- Group redundant information\n- Write just extracted information, nothing else\n\n# The document\n\nTake information from this document:\n\n> {knowledgeContent}\n```\n\n`-> {knowledgePieces}`\n"}],sourceFile:"./books/prepare-knowledge-from-markdown.book.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-keywords.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"keywords",description:"Keywords separated by comma",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}",resultingParameterName:"keywords",dependentParameterNames:["knowledgePieceContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sources:[{type:"BOOK",path:null,content:"# Prepare Keywords\n\n- PIPELINE URL `https://promptbook.studio/promptbook/prepare-knowledge-keywords.book.md`\n- INPUT PARAMETER `{knowledgePieceContent}` The content\n- OUTPUT PARAMETER `{keywords}` Keywords separated by comma\n\n## Knowledge\n\n<!-- TODO: [🍆] -FORMAT JSON -->\n\n```markdown\nYou are experienced data researcher, detect the important keywords in the document.\n\n# Rules\n\n- Write just keywords separated by comma\n\n# The document\n\nTake information from this document:\n\n> {knowledgePieceContent}\n```\n\n`-> {keywords}`\n"}],sourceFile:"./books/prepare-knowledge-keywords.book.md"},{title:"Prepare Title",pipelineUrl:"https://promptbook.studio/promptbook/prepare-knowledge-title.book.md",formfactorName:"GENERIC",parameters:[{name:"knowledgePieceContent",description:"The content",isInput:true,isOutput:false},{name:"title",description:"The title of the document",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"knowledge",title:"Knowledge",content:"You are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}",resultingParameterName:"title",expectations:{words:{min:1,max:8}},dependentParameterNames:["knowledgePieceContent"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sources:[{type:"BOOK",path:null,content:"# Prepare Title\n\n- PIPELINE URL `https://promptbook.studio/promptbook/prepare-knowledge-title.book.md`\n- INPUT PARAMETER `{knowledgePieceContent}` The content\n- OUTPUT PARAMETER `{title}` The title of the document\n\n## Knowledge\n\n- EXPECT MIN 1 WORD\n- EXPECT MAX 8 WORDS\n\n```markdown\nYou are experienced content creator, write best title for the document.\n\n# Rules\n\n- Write just title, nothing else\n- Title should be concise and clear\n- Write maximum 5 words for the title\n\n# The document\n\n> {knowledgePieceContent}\n```\n\n`-> {title}`\n"}],sourceFile:"./books/prepare-knowledge-title.book.md"},{title:"Prepare Keywords",pipelineUrl:"https://promptbook.studio/promptbook/prepare-persona.book.md",formfactorName:"GENERIC",parameters:[{name:"availableModelNames",description:"List of available model names separated by comma (,)",isInput:true,isOutput:false},{name:"personaDescription",description:"Description of the persona",isInput:true,isOutput:false},{name:"modelRequirements",description:"Specific requirements for the model",isInput:false,isOutput:true}],tasks:[{taskType:"PROMPT_TASK",name:"make-model-requirements",title:"Make modelRequirements",content:"You are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Example\n\n```json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n```\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}",resultingParameterName:"modelRequirements",format:"JSON",dependentParameterNames:["availableModelNames","personaDescription"]}],personas:[],preparations:[],knowledgeSources:[],knowledgePieces:[],sources:[{type:"BOOK",path:null,content:"# Prepare Keywords\n\n- PIPELINE URL `https://promptbook.studio/promptbook/prepare-persona.book.md`\n- INPUT PARAMETER `{availableModelNames}` List of available model names separated by comma (,)\n- INPUT PARAMETER `{personaDescription}` Description of the persona\n- OUTPUT PARAMETER `{modelRequirements}` Specific requirements for the model\n\n## Make modelRequirements\n\n- FORMAT JSON\n\n```markdown\nYou are experienced AI engineer, you need to create virtual assistant.\nWrite\n\n## Example\n\n\\`\\`\\`json\n{\n\"modelName\": \"gpt-4o\",\n\"systemMessage\": \"You are experienced AI engineer and helpfull assistant.\",\n\"temperature\": 0.7\n}\n\\`\\`\\`\n\n## Instructions\n\n- Your output format is JSON object\n- Write just the JSON object, no other text should be present\n- It contains the following keys:\n - `modelName`: The name of the model to use\n - `systemMessage`: The system message to provide context to the model\n - `temperature`: The sampling temperature to use\n\n### Key `modelName`\n\nPick from the following models:\n\n- {availableModelNames}\n\n### Key `systemMessage`\n\nThe system message is used to communicate instructions or provide context to the model at the beginning of a conversation. It is displayed in a different format compared to user messages, helping the model understand its role in the conversation. The system message typically guides the model's behavior, sets the tone, or specifies desired output from the model. By utilizing the system message effectively, users can steer the model towards generating more accurate and relevant responses.\n\nFor example:\n\n> You are an experienced AI engineer and helpful assistant.\n\n> You are a friendly and knowledgeable chatbot.\n\n### Key `temperature`\n\nThe sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature until certain thresholds are hit.\n\nYou can pick a value between 0 and 2. For example:\n\n- `0.1`: Low temperature, extremely conservative and deterministic\n- `0.5`: Medium temperature, balanced between conservative and creative\n- `1.0`: High temperature, creative and bit random\n- `1.5`: Very high temperature, extremely creative and often chaotic and unpredictable\n- `2.0`: Maximum temperature, completely random and unpredictable, for some extreme creative use cases\n\n# The assistant\n\nTake this description of the persona:\n\n> {personaDescription}\n```\n\n`-> {modelRequirements}`\n"}],sourceFile:"./books/prepare-persona.book.md"}];
362
364
 
363
365
  /**
364
366
  * Prettify the html code
@@ -660,6 +662,26 @@
660
662
  * @private within the repository - too low-level in comparison with other `MAX_...`
661
663
  */
662
664
  var LOOP_LIMIT = 1000;
665
+ /**
666
+ * Strings to represent various values in the context of parameter values
667
+ *
668
+ * @public exported from `@promptbook/utils`
669
+ */
670
+ var VALUE_STRINGS = {
671
+ empty: '(nothing; empty string)',
672
+ null: '(no value; null)',
673
+ undefined: '(unknown value; undefined)',
674
+ nan: '(not a number; NaN)',
675
+ infinity: '(infinity; ∞)',
676
+ negativeInfinity: '(negative infinity; -∞)',
677
+ unserializable: '(unserializable value)',
678
+ };
679
+ /**
680
+ * Small number limit
681
+ *
682
+ * @public exported from `@promptbook/utils`
683
+ */
684
+ var SMALL_NUMBER = 0.001;
663
685
  /**
664
686
  * Short time interval to prevent race conditions in milliseconds
665
687
  *
@@ -1003,6 +1025,7 @@
1003
1025
  * @public exported from `@promptbook/core`
1004
1026
  */
1005
1027
  var ORDER_OF_PIPELINE_JSON = [
1028
+ // Note: [🍙] In this order will be pipeline serialized
1006
1029
  'title',
1007
1030
  'pipelineUrl',
1008
1031
  'bookVersion',
@@ -1014,6 +1037,7 @@
1014
1037
  'preparations',
1015
1038
  'knowledgeSources',
1016
1039
  'knowledgePieces',
1040
+ 'sources', // <- TODO: [🧠] Where should the `sources` be
1017
1041
  ];
1018
1042
  /**
1019
1043
  * Nonce which is used for replacing things in strings
@@ -4397,9 +4421,87 @@
4397
4421
  return mappedParameters;
4398
4422
  }
4399
4423
 
4424
+ /**
4425
+ * Format either small or big number
4426
+ *
4427
+ * @public exported from `@promptbook/utils`
4428
+ */
4429
+ function numberToString(value) {
4430
+ if (value === 0) {
4431
+ return '0';
4432
+ }
4433
+ else if (Number.isNaN(value)) {
4434
+ return VALUE_STRINGS.nan;
4435
+ }
4436
+ else if (value === Infinity) {
4437
+ return VALUE_STRINGS.infinity;
4438
+ }
4439
+ else if (value === -Infinity) {
4440
+ return VALUE_STRINGS.negativeInfinity;
4441
+ }
4442
+ for (var exponent = 0; exponent < 15; exponent++) {
4443
+ var factor = Math.pow(10, exponent);
4444
+ var valueRounded = Math.round(value * factor) / factor;
4445
+ if (Math.abs(value - valueRounded) / value < SMALL_NUMBER) {
4446
+ return valueRounded.toFixed(exponent);
4447
+ }
4448
+ }
4449
+ return value.toString();
4450
+ }
4451
+
4452
+ /**
4453
+ * Function `valueToString` will convert the given value to string
4454
+ * This is useful and used in the `templateParameters` function
4455
+ *
4456
+ * Note: This function is not just calling `toString` method
4457
+ * It's more complex and can handle this conversion specifically for LLM models
4458
+ * See `VALUE_STRINGS`
4459
+ *
4460
+ * Note: There are 2 similar functions
4461
+ * - `valueToString` converts value to string for LLM models as human-readable string
4462
+ * - `asSerializable` converts value to string to preserve full information to be able to convert it back
4463
+ *
4464
+ * @public exported from `@promptbook/utils`
4465
+ */
4466
+ function valueToString(value) {
4467
+ try {
4468
+ if (value === '') {
4469
+ return VALUE_STRINGS.empty;
4470
+ }
4471
+ else if (value === null) {
4472
+ return VALUE_STRINGS.null;
4473
+ }
4474
+ else if (value === undefined) {
4475
+ return VALUE_STRINGS.undefined;
4476
+ }
4477
+ else if (typeof value === 'string') {
4478
+ return value;
4479
+ }
4480
+ else if (typeof value === 'number') {
4481
+ return numberToString(value);
4482
+ }
4483
+ else if (value instanceof Date) {
4484
+ return value.toISOString();
4485
+ }
4486
+ else {
4487
+ return JSON.stringify(value);
4488
+ }
4489
+ }
4490
+ catch (error) {
4491
+ if (!(error instanceof Error)) {
4492
+ throw error;
4493
+ }
4494
+ console.error(error);
4495
+ return VALUE_STRINGS.unserializable;
4496
+ }
4497
+ }
4498
+
4400
4499
  /**
4401
4500
  * Replaces parameters in template with values from parameters object
4402
4501
  *
4502
+ * Note: This function is not places strings into string,
4503
+ * It's more complex and can handle this operation specifically for LLM models
4504
+ *
4403
4505
  * @param template the template with parameters in {curly} braces
4404
4506
  * @param parameters the object with parameters
4405
4507
  * @returns the template with replaced parameters
@@ -4449,7 +4551,7 @@
4449
4551
  if (parameterValue === undefined) {
4450
4552
  throw new PipelineExecutionError("Parameter `{".concat(parameterName, "}` is not defined"));
4451
4553
  }
4452
- parameterValue = parameterValue.toString();
4554
+ parameterValue = valueToString(parameterValue);
4453
4555
  if (parameterValue.includes('\n') && /^\s*\W{0,3}\s*$/.test(precol)) {
4454
4556
  parameterValue = parameterValue
4455
4557
  .split('\n')
@@ -6046,26 +6148,6 @@
6046
6148
  * TODO: [🏛] This can be part of markdown builder
6047
6149
  */
6048
6150
 
6049
- /**
6050
- * Format either small or big number
6051
- *
6052
- * @private within the repository
6053
- */
6054
- function formatNumber(value) {
6055
- if (value === 0) {
6056
- return '0';
6057
- }
6058
- for (var exponent = 0; exponent < 15; exponent++) {
6059
- var factor = Math.pow(10, exponent);
6060
- var valueRounded = Math.round(value * factor) / factor;
6061
- if (Math.abs(value - valueRounded) / value <
6062
- 0.001 /* <- TODO: Pass as option, pass to executionReportJsonToString as option */) {
6063
- return valueRounded.toFixed(exponent);
6064
- }
6065
- }
6066
- return value.toString();
6067
- }
6068
-
6069
6151
  /**
6070
6152
  * Create a markdown table from a 2D array of strings
6071
6153
  *
@@ -6125,7 +6207,7 @@
6125
6207
  }
6126
6208
  finally { if (e_1) throw e_1.error; }
6127
6209
  }
6128
- var legend = "_Note: Each \u2588 represents ".concat(formatNumber(1 / scale), " ").concat(unitName, ", width of ").concat(valueHeader.toLowerCase(), " is ").concat(formatNumber(to - from), " ").concat(unitName, " = ").concat(width, " squares_");
6210
+ var legend = "_Note: Each \u2588 represents ".concat(numberToString(1 / scale), " ").concat(unitName, ", width of ").concat(valueHeader.toLowerCase(), " is ").concat(numberToString(to - from), " ").concat(unitName, " = ").concat(width, " squares_");
6129
6211
  return createMarkdownTable(table) + '\n\n' + legend;
6130
6212
  }
6131
6213
  /**