@promptbook/legacy-documents 0.94.0-0 → 0.94.0-12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/README.md +1 -8
  2. package/esm/index.es.js +8 -6
  3. package/esm/index.es.js.map +1 -1
  4. package/esm/typings/src/_packages/cli.index.d.ts +4 -0
  5. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  6. package/esm/typings/src/_packages/ollama.index.d.ts +14 -0
  7. package/esm/typings/src/_packages/openai.index.d.ts +2 -0
  8. package/esm/typings/src/_packages/types.index.d.ts +2 -0
  9. package/esm/typings/src/_packages/wizzard.index.d.ts +4 -0
  10. package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
  11. package/esm/typings/src/execution/ExecutionTask.d.ts +3 -1
  12. package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
  13. package/esm/typings/src/llm-providers/{openai/computeUsage.d.ts → _common/utils/pricing.d.ts} +2 -2
  14. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
  15. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
  16. package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
  17. package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
  18. package/esm/typings/src/llm-providers/ollama/OllamaExecutionTools.d.ts +44 -0
  19. package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -0
  20. package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +11 -0
  21. package/esm/typings/src/llm-providers/ollama/ollama-models.d.ts +14 -0
  22. package/esm/typings/src/llm-providers/ollama/playground/playground.d.ts +6 -0
  23. package/esm/typings/src/llm-providers/ollama/register-configuration.d.ts +14 -0
  24. package/esm/typings/src/llm-providers/ollama/register-constructor.d.ts +15 -0
  25. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
  26. package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +91 -0
  27. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +12 -53
  28. package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
  29. package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
  30. package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
  31. package/esm/typings/src/version.d.ts +1 -1
  32. package/package.json +25 -2
  33. package/umd/index.umd.js +8 -6
  34. package/umd/index.umd.js.map +1 -1
  35. /package/esm/typings/src/llm-providers/{openai/computeUsage.test.d.ts → _common/utils/pricing.test.d.ts} +0 -0
package/README.md CHANGED
@@ -189,16 +189,8 @@ Join our growing community of developers and users:
189
189
 
190
190
  _A concise, Markdown-based DSL for crafting AI workflows and automations._
191
191
 
192
- ---
193
192
 
194
- ### 📑 Table of Contents
195
193
 
196
- - [Introduction](#introduction)
197
- - [Example](#example)
198
- - [1. What: Workflows, Tasks & Parameters](#1-what-workflows-tasks--parameters)
199
- - [2. Who: Personas](#2-who-personas)
200
- - [3. How: Knowledge, Instruments & Actions](#3-how-knowledge-instruments-and-actions)
201
- - [General Principles](#general-principles)
202
194
 
203
195
  ### Introduction
204
196
 
@@ -311,6 +303,7 @@ Or you can install them separately:
311
303
  - **[@promptbook/vercel](https://www.npmjs.com/package/@promptbook/vercel)** - Adapter for Vercel functionalities
312
304
  - **[@promptbook/google](https://www.npmjs.com/package/@promptbook/google)** - Integration with Google's Gemini API
313
305
  - **[@promptbook/deepseek](https://www.npmjs.com/package/@promptbook/deepseek)** - Integration with [DeepSeek API](https://www.deepseek.com/)
306
+ - **[@promptbook/ollama](https://www.npmjs.com/package/@promptbook/ollama)** - Integration with [Ollama](https://ollama.com/) API
314
307
  - **[@promptbook/azure-openai](https://www.npmjs.com/package/@promptbook/azure-openai)** - Execution tools for Azure OpenAI API
315
308
 
316
309
  - **[@promptbook/fake-llm](https://www.npmjs.com/package/@promptbook/fake-llm)** - Mocked execution tools for testing the library and saving the tokens
package/esm/index.es.js CHANGED
@@ -28,7 +28,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
28
28
  * @generated
29
29
  * @see https://github.com/webgptorg/promptbook
30
30
  */
31
- const PROMPTBOOK_ENGINE_VERSION = '0.94.0-0';
31
+ const PROMPTBOOK_ENGINE_VERSION = '0.94.0-12';
32
32
  /**
33
33
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
34
34
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -5684,11 +5684,6 @@ async function executeTask(options) {
5684
5684
  const jokerParameterNames = currentTask.jokerParameterNames || [];
5685
5685
  const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
5686
5686
  // <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
5687
- await onProgress({
5688
- outputParameters: {
5689
- [currentTask.resultingParameterName]: '',
5690
- },
5691
- });
5692
5687
  const resultString = await executeFormatSubvalues({
5693
5688
  jokerParameterNames,
5694
5689
  priority,
@@ -5787,6 +5782,13 @@ async function executePipeline(options) {
5787
5782
  * Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
5788
5783
  */
5789
5784
  let isReturned = false;
5785
+ // Note: Report all output parameters upfront as empty strings
5786
+ if (onProgress) {
5787
+ const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
5788
+ onProgress({
5789
+ outputParameters: emptyOutputParameters,
5790
+ });
5791
+ }
5790
5792
  // Note: Check that all input input parameters are defined
5791
5793
  for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
5792
5794
  if (inputParameters[parameter.name] === undefined) {