@promptbook/node 0.94.0-0 → 0.94.0-3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -8
- package/esm/index.es.js +8 -6
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/cli.index.d.ts +4 -0
- package/esm/typings/src/_packages/core.index.d.ts +2 -0
- package/esm/typings/src/_packages/ollama.index.d.ts +10 -0
- package/esm/typings/src/_packages/types.index.d.ts +2 -0
- package/esm/typings/src/_packages/wizzard.index.d.ts +4 -0
- package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
- package/esm/typings/src/execution/ExecutionTask.d.ts +3 -1
- package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
- package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -0
- package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +11 -0
- package/esm/typings/src/llm-providers/ollama/playground/playground.d.ts +6 -0
- package/esm/typings/src/llm-providers/ollama/register-configuration.d.ts +14 -0
- package/esm/typings/src/llm-providers/ollama/register-constructor.d.ts +15 -0
- package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
- package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +8 -6
- package/umd/index.umd.js.map +1 -1
package/README.md
CHANGED
|
@@ -191,16 +191,8 @@ Join our growing community of developers and users:
|
|
|
191
191
|
|
|
192
192
|
_A concise, Markdown-based DSL for crafting AI workflows and automations._
|
|
193
193
|
|
|
194
|
-
---
|
|
195
194
|
|
|
196
|
-
### 📑 Table of Contents
|
|
197
195
|
|
|
198
|
-
- [Introduction](#introduction)
|
|
199
|
-
- [Example](#example)
|
|
200
|
-
- [1. What: Workflows, Tasks & Parameters](#1-what-workflows-tasks--parameters)
|
|
201
|
-
- [2. Who: Personas](#2-who-personas)
|
|
202
|
-
- [3. How: Knowledge, Instruments & Actions](#3-how-knowledge-instruments-and-actions)
|
|
203
|
-
- [General Principles](#general-principles)
|
|
204
196
|
|
|
205
197
|
### Introduction
|
|
206
198
|
|
|
@@ -313,6 +305,7 @@ Or you can install them separately:
|
|
|
313
305
|
- **[@promptbook/vercel](https://www.npmjs.com/package/@promptbook/vercel)** - Adapter for Vercel functionalities
|
|
314
306
|
- **[@promptbook/google](https://www.npmjs.com/package/@promptbook/google)** - Integration with Google's Gemini API
|
|
315
307
|
- **[@promptbook/deepseek](https://www.npmjs.com/package/@promptbook/deepseek)** - Integration with [DeepSeek API](https://www.deepseek.com/)
|
|
308
|
+
- **[@promptbook/ollama](https://www.npmjs.com/package/@promptbook/ollama)** - Integration with [Ollama](https://ollama.com/) API
|
|
316
309
|
- **[@promptbook/azure-openai](https://www.npmjs.com/package/@promptbook/azure-openai)** - Execution tools for Azure OpenAI API
|
|
317
310
|
|
|
318
311
|
- **[@promptbook/fake-llm](https://www.npmjs.com/package/@promptbook/fake-llm)** - Mocked execution tools for testing the library and saving the tokens
|
package/esm/index.es.js
CHANGED
|
@@ -30,7 +30,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
30
30
|
* @generated
|
|
31
31
|
* @see https://github.com/webgptorg/promptbook
|
|
32
32
|
*/
|
|
33
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.94.0-
|
|
33
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.94.0-3';
|
|
34
34
|
/**
|
|
35
35
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
36
36
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -4447,11 +4447,6 @@ async function executeTask(options) {
|
|
|
4447
4447
|
const jokerParameterNames = currentTask.jokerParameterNames || [];
|
|
4448
4448
|
const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
|
|
4449
4449
|
// <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
|
|
4450
|
-
await onProgress({
|
|
4451
|
-
outputParameters: {
|
|
4452
|
-
[currentTask.resultingParameterName]: '',
|
|
4453
|
-
},
|
|
4454
|
-
});
|
|
4455
4450
|
const resultString = await executeFormatSubvalues({
|
|
4456
4451
|
jokerParameterNames,
|
|
4457
4452
|
priority,
|
|
@@ -4550,6 +4545,13 @@ async function executePipeline(options) {
|
|
|
4550
4545
|
* Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
|
|
4551
4546
|
*/
|
|
4552
4547
|
let isReturned = false;
|
|
4548
|
+
// Note: Report all output parameters upfront as empty strings
|
|
4549
|
+
if (onProgress) {
|
|
4550
|
+
const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
|
|
4551
|
+
onProgress({
|
|
4552
|
+
outputParameters: emptyOutputParameters,
|
|
4553
|
+
});
|
|
4554
|
+
}
|
|
4553
4555
|
// Note: Check that all input input parameters are defined
|
|
4554
4556
|
for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
|
|
4555
4557
|
if (inputParameters[parameter.name] === undefined) {
|