@promptbook/cli 0.94.0-0 → 0.94.0-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -114,8 +114,6 @@ Rest of the documentation is common for **entire promptbook ecosystem**:
114
114
 
115
115
  During the computer revolution, we have seen [multiple generations of computer languages](https://github.com/webgptorg/promptbook/discussions/180), from the physical rewiring of the vacuum tubes through low-level machine code to the high-level languages like Python or JavaScript. And now, we're on the edge of the **next revolution**!
116
116
 
117
-
118
-
119
117
  It's a revolution of writing software in **plain human language** that is understandable and executable by both humans and machines – and it's going to change everything!
120
118
 
121
119
  The incredible growth in power of microprocessors and the Moore's Law have been the driving force behind the ever-more powerful languages, and it's been an amazing journey! Similarly, the large language models (like GPT or Claude) are the next big thing in language technology, and they're set to transform the way we interact with computers.
@@ -301,8 +299,6 @@ Personas can have access to different knowledge, tools and actions. They can als
301
299
 
302
300
  - [PERSONA](https://github.com/webgptorg/promptbook/blob/main/documents/commands/PERSONA.md)
303
301
 
304
-
305
-
306
302
  ### **3. How:** Knowledge, Instruments and Actions
307
303
 
308
304
  The resources used by the personas are used to do the work.
@@ -363,6 +359,7 @@ Or you can install them separately:
363
359
  - **[@promptbook/vercel](https://www.npmjs.com/package/@promptbook/vercel)** - Adapter for Vercel functionalities
364
360
  - **[@promptbook/google](https://www.npmjs.com/package/@promptbook/google)** - Integration with Google's Gemini API
365
361
  - **[@promptbook/deepseek](https://www.npmjs.com/package/@promptbook/deepseek)** - Integration with [DeepSeek API](https://www.deepseek.com/)
362
+ - **[@promptbook/ollama](https://www.npmjs.com/package/@promptbook/ollama)** - Integration with [Ollama](https://ollama.com/) API
366
363
  - **[@promptbook/azure-openai](https://www.npmjs.com/package/@promptbook/azure-openai)** - Execution tools for Azure OpenAI API
367
364
 
368
365
  - **[@promptbook/fake-llm](https://www.npmjs.com/package/@promptbook/fake-llm)** - Mocked execution tools for testing the library and saving the tokens
@@ -401,8 +398,6 @@ The following glossary is used to clarify certain concepts:
401
398
 
402
399
  _Note: This section is not complete dictionary, more list of general AI / LLM terms that has connection with Promptbook_
403
400
 
404
-
405
-
406
401
  ### 💯 Core concepts
407
402
 
408
403
  - [📚 Collection of pipelines](https://github.com/webgptorg/promptbook/discussions/65)
package/esm/index.es.js CHANGED
@@ -28,6 +28,7 @@ import swaggerUi from 'swagger-ui-express';
28
28
  import Anthropic from '@anthropic-ai/sdk';
29
29
  import Bottleneck from 'bottleneck';
30
30
  import { OpenAIClient, AzureKeyCredential } from '@azure/openai';
31
+ import fetch$1 from 'node-fetch';
31
32
  import OpenAI from 'openai';
32
33
  import { Readability } from '@mozilla/readability';
33
34
  import { JSDOM } from 'jsdom';
@@ -47,7 +48,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
47
48
  * @generated
48
49
  * @see https://github.com/webgptorg/promptbook
49
50
  */
50
- const PROMPTBOOK_ENGINE_VERSION = '0.94.0-0';
51
+ const PROMPTBOOK_ENGINE_VERSION = '0.94.0-1';
51
52
  /**
52
53
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
53
54
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -6961,11 +6962,6 @@ async function executeTask(options) {
6961
6962
  const jokerParameterNames = currentTask.jokerParameterNames || [];
6962
6963
  const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
6963
6964
  // <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
6964
- await onProgress({
6965
- outputParameters: {
6966
- [currentTask.resultingParameterName]: '',
6967
- },
6968
- });
6969
6965
  const resultString = await executeFormatSubvalues({
6970
6966
  jokerParameterNames,
6971
6967
  priority,
@@ -7064,6 +7060,13 @@ async function executePipeline(options) {
7064
7060
  * Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
7065
7061
  */
7066
7062
  let isReturned = false;
7063
+ // Note: Report all output parameters upfront as empty strings
7064
+ if (onProgress) {
7065
+ const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
7066
+ onProgress({
7067
+ outputParameters: emptyOutputParameters,
7068
+ });
7069
+ }
7067
7070
  // Note: Check that all input input parameters are defined
7068
7071
  for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
7069
7072
  if (inputParameters[parameter.name] === undefined) {
@@ -16989,6 +16992,157 @@ const _GoogleRegistration = $llmToolsRegister.register(createGoogleExecutionTool
16989
16992
  * Note: [💞] Ignore a discrepancy between file name and entity name
16990
16993
  */
16991
16994
 
16995
+ /**
16996
+ * Registration of LLM provider metadata
16997
+ *
16998
+ * Warning: This is not useful for the end user, it is just a side effect of the mechanism that handles all available LLM tools
16999
+ *
17000
+ * @public exported from `@promptbook/core`
17001
+ * @public exported from `@promptbook/wizzard`
17002
+ * @public exported from `@promptbook/cli`
17003
+ */
17004
+ const _OllamaMetadataRegistration = $llmToolsMetadataRegister.register({
17005
+ title: 'Ollama',
17006
+ packageName: '@promptbook/ollama',
17007
+ className: 'OllamaExecutionTools',
17008
+ envVariables: ['OLLAMA_BASE_URL', 'OLLAMA_MODEL'],
17009
+ trustLevel: 'CLOSED_LOCAL',
17010
+ order: MODEL_ORDERS.NORMAL,
17011
+ getBoilerplateConfiguration() {
17012
+ return {
17013
+ title: 'Ollama',
17014
+ packageName: '@promptbook/ollama',
17015
+ className: 'OllamaExecutionTools',
17016
+ options: {
17017
+ baseUrl: 'http://localhost:11434',
17018
+ model: 'llama2',
17019
+ maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
17020
+ },
17021
+ };
17022
+ },
17023
+ createConfigurationFromEnv(env) {
17024
+ if (typeof env.OLLAMA_BASE_URL === 'string') {
17025
+ return {
17026
+ title: 'Ollama (from env)',
17027
+ packageName: '@promptbook/ollama',
17028
+ className: 'OllamaExecutionTools',
17029
+ options: {
17030
+ baseUrl: env.OLLAMA_BASE_URL,
17031
+ model: env.OLLAMA_MODEL || 'llama2',
17032
+ maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
17033
+ },
17034
+ };
17035
+ }
17036
+ return null;
17037
+ },
17038
+ });
17039
+ /**
17040
+ * Note: [💞] Ignore a discrepancy between file name and entity name
17041
+ */
17042
+
17043
+ /**
17044
+ * Execution Tools for calling a local Ollama model via HTTP API
17045
+ *
17046
+ * @public exported from `@promptbook/ollama`
17047
+ */
17048
+ class OllamaExecutionTools {
17049
+ constructor(options) {
17050
+ this.options = options;
17051
+ this.limiter = new Bottleneck({
17052
+ minTime: 60000 / (options.maxRequestsPerMinute || DEFAULT_MAX_REQUESTS_PER_MINUTE),
17053
+ });
17054
+ }
17055
+ get title() {
17056
+ return 'Ollama';
17057
+ }
17058
+ get description() {
17059
+ return 'Local Ollama LLM via HTTP';
17060
+ }
17061
+ async checkConfiguration() {
17062
+ const res = await fetch$1(`${this.options.baseUrl}/models`);
17063
+ if (!res.ok)
17064
+ throw new UnexpectedError(`Failed to reach Ollama API at ${this.options.baseUrl}`);
17065
+ }
17066
+ async listModels() {
17067
+ const res = await fetch$1(`${this.options.baseUrl}/models`);
17068
+ if (!res.ok)
17069
+ throw new UnexpectedError(`Error listing Ollama models: ${res.statusText}`);
17070
+ const data = (await res.json());
17071
+ return data.map((m) => ({ modelName: m.name, modelVariant: 'CHAT' }));
17072
+ }
17073
+ async callChatModel(prompt) {
17074
+ const { content, parameters, modelRequirements } = prompt;
17075
+ if (modelRequirements.modelVariant !== 'CHAT') {
17076
+ throw new PipelineExecutionError('Use callChatModel only for CHAT variant');
17077
+ }
17078
+ const modelName = modelRequirements.modelName || this.options.model;
17079
+ const body = {
17080
+ model: modelName,
17081
+ messages: [
17082
+ ...(modelRequirements.systemMessage
17083
+ ? [{ role: 'system', content: modelRequirements.systemMessage }]
17084
+ : []),
17085
+ { role: 'user', content: content },
17086
+ ],
17087
+ parameters: parameters,
17088
+ };
17089
+ const start = $getCurrentDate();
17090
+ const res = await this.limiter.schedule(() => fetch$1(`${this.options.baseUrl}/chat/completions`, {
17091
+ method: 'POST',
17092
+ headers: { 'Content-Type': 'application/json' },
17093
+ body: JSON.stringify(body),
17094
+ }));
17095
+ if (!res.ok)
17096
+ throw new PipelineExecutionError(`Ollama API error: ${res.statusText}`);
17097
+ const json = await res.json();
17098
+ const complete = $getCurrentDate();
17099
+ if (!json.choices || !json.choices[0]) {
17100
+ throw new PipelineExecutionError('No choices from Ollama');
17101
+ }
17102
+ const resultContent = json.choices[0].message.content;
17103
+ const usage = { price: { value: 0, isUncertain: true }, input: {}, output: {} }; /* <- !!! */
17104
+ return exportJson({
17105
+ name: 'promptResult',
17106
+ message: 'Result of Ollama',
17107
+ order: [],
17108
+ value: {
17109
+ content: resultContent,
17110
+ modelName,
17111
+ timing: { start, complete },
17112
+ usage,
17113
+ rawPromptContent: content,
17114
+ rawRequest: body,
17115
+ rawResponse: json,
17116
+ },
17117
+ });
17118
+ }
17119
+ }
17120
+
17121
+ /**
17122
+ * Execution Tools for calling Ollama API
17123
+ *
17124
+ * @public exported from `@promptbook/ollama`
17125
+ */
17126
+ const createOllamaExecutionTools = Object.assign((options) => new OllamaExecutionTools(options), {
17127
+ packageName: '@promptbook/ollama',
17128
+ className: 'OllamaExecutionTools',
17129
+ });
17130
+
17131
+ /**
17132
+ * Registration of LLM provider
17133
+ *
17134
+ * Warning: This is not useful for the end user, it is just a side effect of the mechanism that handles all available LLM tools
17135
+ *
17136
+ * @public exported from `@promptbook/ollama`
17137
+ * @public exported from `@promptbook/wizzard`
17138
+ * @public exported from `@promptbook/cli`
17139
+ */
17140
+ const _OllamaRegistration = $llmToolsRegister.register(createOllamaExecutionTools);
17141
+ /**
17142
+ * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
17143
+ * Note: [💞] Ignore a discrepancy between file name and entity name
17144
+ */
17145
+
16992
17146
  /**
16993
17147
  * Registration of LLM provider metadata
16994
17148
  *
@@ -18947,5 +19101,5 @@ const _WebsiteScraperRegistration = $scrapersRegister.register(createWebsiteScra
18947
19101
  * Note: [💞] Ignore a discrepancy between file name and entity name
18948
19102
  */
18949
19103
 
18950
- export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION, _AnthropicClaudeMetadataRegistration, _AnthropicClaudeRegistration, _AzureOpenAiMetadataRegistration, _AzureOpenAiRegistration, _BoilerplateScraperMetadataRegistration, _BoilerplateScraperRegistration, _CLI, _DeepseekMetadataRegistration, _DeepseekRegistration, _DocumentScraperMetadataRegistration, _DocumentScraperRegistration, _GoogleMetadataRegistration, _GoogleRegistration, _LegacyDocumentScraperMetadataRegistration, _LegacyDocumentScraperRegistration, _MarkdownScraperMetadataRegistration, _MarkdownScraperRegistration, _MarkitdownScraperMetadataRegistration, _MarkitdownScraperRegistration, _OpenAiAssistantMetadataRegistration, _OpenAiAssistantRegistration, _OpenAiMetadataRegistration, _OpenAiRegistration, _PdfScraperMetadataRegistration, _PdfScraperRegistration, _WebsiteScraperMetadataRegistration, _WebsiteScraperRegistration };
19104
+ export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION, _AnthropicClaudeMetadataRegistration, _AnthropicClaudeRegistration, _AzureOpenAiMetadataRegistration, _AzureOpenAiRegistration, _BoilerplateScraperMetadataRegistration, _BoilerplateScraperRegistration, _CLI, _DeepseekMetadataRegistration, _DeepseekRegistration, _DocumentScraperMetadataRegistration, _DocumentScraperRegistration, _GoogleMetadataRegistration, _GoogleRegistration, _LegacyDocumentScraperMetadataRegistration, _LegacyDocumentScraperRegistration, _MarkdownScraperMetadataRegistration, _MarkdownScraperRegistration, _MarkitdownScraperMetadataRegistration, _MarkitdownScraperRegistration, _OllamaMetadataRegistration, _OllamaRegistration, _OpenAiAssistantMetadataRegistration, _OpenAiAssistantRegistration, _OpenAiMetadataRegistration, _OpenAiRegistration, _PdfScraperMetadataRegistration, _PdfScraperRegistration, _WebsiteScraperMetadataRegistration, _WebsiteScraperRegistration };
18951
19105
  //# sourceMappingURL=index.es.js.map