@promptbook/markdown-utils 0.94.0-0 → 0.94.0-12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/README.md +1 -8
  2. package/esm/index.es.js +8 -6
  3. package/esm/index.es.js.map +1 -1
  4. package/esm/typings/src/_packages/cli.index.d.ts +4 -0
  5. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  6. package/esm/typings/src/_packages/ollama.index.d.ts +14 -0
  7. package/esm/typings/src/_packages/openai.index.d.ts +2 -0
  8. package/esm/typings/src/_packages/types.index.d.ts +2 -0
  9. package/esm/typings/src/_packages/wizzard.index.d.ts +4 -0
  10. package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
  11. package/esm/typings/src/execution/ExecutionTask.d.ts +3 -1
  12. package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
  13. package/esm/typings/src/llm-providers/{openai/computeUsage.d.ts → _common/utils/pricing.d.ts} +2 -2
  14. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
  15. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
  16. package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
  17. package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
  18. package/esm/typings/src/llm-providers/ollama/OllamaExecutionTools.d.ts +44 -0
  19. package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -0
  20. package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +11 -0
  21. package/esm/typings/src/llm-providers/ollama/ollama-models.d.ts +14 -0
  22. package/esm/typings/src/llm-providers/ollama/playground/playground.d.ts +6 -0
  23. package/esm/typings/src/llm-providers/ollama/register-configuration.d.ts +14 -0
  24. package/esm/typings/src/llm-providers/ollama/register-constructor.d.ts +15 -0
  25. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
  26. package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +91 -0
  27. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +12 -53
  28. package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
  29. package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
  30. package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
  31. package/esm/typings/src/version.d.ts +1 -1
  32. package/package.json +24 -1
  33. package/umd/index.umd.js +8 -6
  34. package/umd/index.umd.js.map +1 -1
  35. /package/esm/typings/src/llm-providers/{openai/computeUsage.test.d.ts → _common/utils/pricing.test.d.ts} +0 -0
package/README.md CHANGED
@@ -187,16 +187,8 @@ Join our growing community of developers and users:
187
187
 
188
188
  _A concise, Markdown-based DSL for crafting AI workflows and automations._
189
189
 
190
- ---
191
190
 
192
- ### 📑 Table of Contents
193
191
 
194
- - [Introduction](#introduction)
195
- - [Example](#example)
196
- - [1. What: Workflows, Tasks & Parameters](#1-what-workflows-tasks--parameters)
197
- - [2. Who: Personas](#2-who-personas)
198
- - [3. How: Knowledge, Instruments & Actions](#3-how-knowledge-instruments-and-actions)
199
- - [General Principles](#general-principles)
200
192
 
201
193
  ### Introduction
202
194
 
@@ -309,6 +301,7 @@ Or you can install them separately:
309
301
  - **[@promptbook/vercel](https://www.npmjs.com/package/@promptbook/vercel)** - Adapter for Vercel functionalities
310
302
  - **[@promptbook/google](https://www.npmjs.com/package/@promptbook/google)** - Integration with Google's Gemini API
311
303
  - **[@promptbook/deepseek](https://www.npmjs.com/package/@promptbook/deepseek)** - Integration with [DeepSeek API](https://www.deepseek.com/)
304
+ - **[@promptbook/ollama](https://www.npmjs.com/package/@promptbook/ollama)** - Integration with [Ollama](https://ollama.com/) API
312
305
  - **[@promptbook/azure-openai](https://www.npmjs.com/package/@promptbook/azure-openai)** - Execution tools for Azure OpenAI API
313
306
 
314
307
  - **[@promptbook/fake-llm](https://www.npmjs.com/package/@promptbook/fake-llm)** - Mocked execution tools for testing the library and saving the tokens
package/esm/index.es.js CHANGED
@@ -25,7 +25,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
25
25
  * @generated
26
26
  * @see https://github.com/webgptorg/promptbook
27
27
  */
28
- const PROMPTBOOK_ENGINE_VERSION = '0.94.0-0';
28
+ const PROMPTBOOK_ENGINE_VERSION = '0.94.0-12';
29
29
  /**
30
30
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
31
31
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -5504,11 +5504,6 @@ async function executeTask(options) {
5504
5504
  const jokerParameterNames = currentTask.jokerParameterNames || [];
5505
5505
  const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
5506
5506
  // <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
5507
- await onProgress({
5508
- outputParameters: {
5509
- [currentTask.resultingParameterName]: '',
5510
- },
5511
- });
5512
5507
  const resultString = await executeFormatSubvalues({
5513
5508
  jokerParameterNames,
5514
5509
  priority,
@@ -5607,6 +5602,13 @@ async function executePipeline(options) {
5607
5602
  * Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
5608
5603
  */
5609
5604
  let isReturned = false;
5605
+ // Note: Report all output parameters upfront as empty strings
5606
+ if (onProgress) {
5607
+ const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
5608
+ onProgress({
5609
+ outputParameters: emptyOutputParameters,
5610
+ });
5611
+ }
5610
5612
  // Note: Check that all input input parameters are defined
5611
5613
  for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
5612
5614
  if (inputParameters[parameter.name] === undefined) {