@promptbook/documents 0.94.0-0 → 0.94.0-3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -8
- package/esm/index.es.js +8 -6
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/cli.index.d.ts +4 -0
- package/esm/typings/src/_packages/core.index.d.ts +2 -0
- package/esm/typings/src/_packages/ollama.index.d.ts +10 -0
- package/esm/typings/src/_packages/types.index.d.ts +2 -0
- package/esm/typings/src/_packages/wizzard.index.d.ts +4 -0
- package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
- package/esm/typings/src/execution/ExecutionTask.d.ts +3 -1
- package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
- package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -0
- package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +11 -0
- package/esm/typings/src/llm-providers/ollama/playground/playground.d.ts +6 -0
- package/esm/typings/src/llm-providers/ollama/register-configuration.d.ts +14 -0
- package/esm/typings/src/llm-providers/ollama/register-constructor.d.ts +15 -0
- package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
- package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +8 -6
- package/umd/index.umd.js.map +1 -1
package/README.md
CHANGED
|
@@ -190,16 +190,8 @@ Join our growing community of developers and users:
|
|
|
190
190
|
|
|
191
191
|
_A concise, Markdown-based DSL for crafting AI workflows and automations._
|
|
192
192
|
|
|
193
|
-
---
|
|
194
193
|
|
|
195
|
-
### 📑 Table of Contents
|
|
196
194
|
|
|
197
|
-
- [Introduction](#introduction)
|
|
198
|
-
- [Example](#example)
|
|
199
|
-
- [1. What: Workflows, Tasks & Parameters](#1-what-workflows-tasks--parameters)
|
|
200
|
-
- [2. Who: Personas](#2-who-personas)
|
|
201
|
-
- [3. How: Knowledge, Instruments & Actions](#3-how-knowledge-instruments-and-actions)
|
|
202
|
-
- [General Principles](#general-principles)
|
|
203
195
|
|
|
204
196
|
### Introduction
|
|
205
197
|
|
|
@@ -312,6 +304,7 @@ Or you can install them separately:
|
|
|
312
304
|
- **[@promptbook/vercel](https://www.npmjs.com/package/@promptbook/vercel)** - Adapter for Vercel functionalities
|
|
313
305
|
- **[@promptbook/google](https://www.npmjs.com/package/@promptbook/google)** - Integration with Google's Gemini API
|
|
314
306
|
- **[@promptbook/deepseek](https://www.npmjs.com/package/@promptbook/deepseek)** - Integration with [DeepSeek API](https://www.deepseek.com/)
|
|
307
|
+
- **[@promptbook/ollama](https://www.npmjs.com/package/@promptbook/ollama)** - Integration with [Ollama](https://ollama.com/) API
|
|
315
308
|
- **[@promptbook/azure-openai](https://www.npmjs.com/package/@promptbook/azure-openai)** - Execution tools for Azure OpenAI API
|
|
316
309
|
|
|
317
310
|
- **[@promptbook/fake-llm](https://www.npmjs.com/package/@promptbook/fake-llm)** - Mocked execution tools for testing the library and saving the tokens
|
package/esm/index.es.js
CHANGED
|
@@ -28,7 +28,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
28
28
|
* @generated
|
|
29
29
|
* @see https://github.com/webgptorg/promptbook
|
|
30
30
|
*/
|
|
31
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.94.0-
|
|
31
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.94.0-3';
|
|
32
32
|
/**
|
|
33
33
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
34
34
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -5684,11 +5684,6 @@ async function executeTask(options) {
|
|
|
5684
5684
|
const jokerParameterNames = currentTask.jokerParameterNames || [];
|
|
5685
5685
|
const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
|
|
5686
5686
|
// <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
|
|
5687
|
-
await onProgress({
|
|
5688
|
-
outputParameters: {
|
|
5689
|
-
[currentTask.resultingParameterName]: '',
|
|
5690
|
-
},
|
|
5691
|
-
});
|
|
5692
5687
|
const resultString = await executeFormatSubvalues({
|
|
5693
5688
|
jokerParameterNames,
|
|
5694
5689
|
priority,
|
|
@@ -5787,6 +5782,13 @@ async function executePipeline(options) {
|
|
|
5787
5782
|
* Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
|
|
5788
5783
|
*/
|
|
5789
5784
|
let isReturned = false;
|
|
5785
|
+
// Note: Report all output parameters upfront as empty strings
|
|
5786
|
+
if (onProgress) {
|
|
5787
|
+
const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
|
|
5788
|
+
onProgress({
|
|
5789
|
+
outputParameters: emptyOutputParameters,
|
|
5790
|
+
});
|
|
5791
|
+
}
|
|
5790
5792
|
// Note: Check that all input input parameters are defined
|
|
5791
5793
|
for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
|
|
5792
5794
|
if (inputParameters[parameter.name] === undefined) {
|