@promptbook/remote-server 0.94.0-0 → 0.94.0-3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -8
- package/esm/index.es.js +8 -6
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/cli.index.d.ts +4 -0
- package/esm/typings/src/_packages/core.index.d.ts +2 -0
- package/esm/typings/src/_packages/ollama.index.d.ts +10 -0
- package/esm/typings/src/_packages/types.index.d.ts +2 -0
- package/esm/typings/src/_packages/wizzard.index.d.ts +4 -0
- package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
- package/esm/typings/src/execution/ExecutionTask.d.ts +3 -1
- package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
- package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -0
- package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +11 -0
- package/esm/typings/src/llm-providers/ollama/playground/playground.d.ts +6 -0
- package/esm/typings/src/llm-providers/ollama/register-configuration.d.ts +14 -0
- package/esm/typings/src/llm-providers/ollama/register-constructor.d.ts +15 -0
- package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
- package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +8 -6
- package/umd/index.umd.js.map +1 -1
package/README.md
CHANGED
|
@@ -187,16 +187,8 @@ Join our growing community of developers and users:
|
|
|
187
187
|
|
|
188
188
|
_A concise, Markdown-based DSL for crafting AI workflows and automations._
|
|
189
189
|
|
|
190
|
-
---
|
|
191
190
|
|
|
192
|
-
### 📑 Table of Contents
|
|
193
191
|
|
|
194
|
-
- [Introduction](#introduction)
|
|
195
|
-
- [Example](#example)
|
|
196
|
-
- [1. What: Workflows, Tasks & Parameters](#1-what-workflows-tasks--parameters)
|
|
197
|
-
- [2. Who: Personas](#2-who-personas)
|
|
198
|
-
- [3. How: Knowledge, Instruments & Actions](#3-how-knowledge-instruments-and-actions)
|
|
199
|
-
- [General Principles](#general-principles)
|
|
200
192
|
|
|
201
193
|
### Introduction
|
|
202
194
|
|
|
@@ -309,6 +301,7 @@ Or you can install them separately:
|
|
|
309
301
|
- **[@promptbook/vercel](https://www.npmjs.com/package/@promptbook/vercel)** - Adapter for Vercel functionalities
|
|
310
302
|
- **[@promptbook/google](https://www.npmjs.com/package/@promptbook/google)** - Integration with Google's Gemini API
|
|
311
303
|
- **[@promptbook/deepseek](https://www.npmjs.com/package/@promptbook/deepseek)** - Integration with [DeepSeek API](https://www.deepseek.com/)
|
|
304
|
+
- **[@promptbook/ollama](https://www.npmjs.com/package/@promptbook/ollama)** - Integration with [Ollama](https://ollama.com/) API
|
|
312
305
|
- **[@promptbook/azure-openai](https://www.npmjs.com/package/@promptbook/azure-openai)** - Execution tools for Azure OpenAI API
|
|
313
306
|
|
|
314
307
|
- **[@promptbook/fake-llm](https://www.npmjs.com/package/@promptbook/fake-llm)** - Mocked execution tools for testing the library and saving the tokens
|
package/esm/index.es.js
CHANGED
|
@@ -33,7 +33,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
33
33
|
* @generated
|
|
34
34
|
* @see https://github.com/webgptorg/promptbook
|
|
35
35
|
*/
|
|
36
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.94.0-
|
|
36
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.94.0-3';
|
|
37
37
|
/**
|
|
38
38
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
39
39
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -5875,11 +5875,6 @@ async function executeTask(options) {
|
|
|
5875
5875
|
const jokerParameterNames = currentTask.jokerParameterNames || [];
|
|
5876
5876
|
const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
|
|
5877
5877
|
// <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
|
|
5878
|
-
await onProgress({
|
|
5879
|
-
outputParameters: {
|
|
5880
|
-
[currentTask.resultingParameterName]: '',
|
|
5881
|
-
},
|
|
5882
|
-
});
|
|
5883
5878
|
const resultString = await executeFormatSubvalues({
|
|
5884
5879
|
jokerParameterNames,
|
|
5885
5880
|
priority,
|
|
@@ -5978,6 +5973,13 @@ async function executePipeline(options) {
|
|
|
5978
5973
|
* Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
|
|
5979
5974
|
*/
|
|
5980
5975
|
let isReturned = false;
|
|
5976
|
+
// Note: Report all output parameters upfront as empty strings
|
|
5977
|
+
if (onProgress) {
|
|
5978
|
+
const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
|
|
5979
|
+
onProgress({
|
|
5980
|
+
outputParameters: emptyOutputParameters,
|
|
5981
|
+
});
|
|
5982
|
+
}
|
|
5981
5983
|
// Note: Check that all input input parameters are defined
|
|
5982
5984
|
for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
|
|
5983
5985
|
if (inputParameters[parameter.name] === undefined) {
|