@promptbook/cli 0.94.0-0 → 0.94.0-2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/README.md +1 -8
  2. package/esm/index.es.js +205 -93
  3. package/esm/index.es.js.map +1 -1
  4. package/esm/typings/src/_packages/cli.index.d.ts +4 -0
  5. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  6. package/esm/typings/src/_packages/ollama.index.d.ts +10 -0
  7. package/esm/typings/src/_packages/types.index.d.ts +2 -0
  8. package/esm/typings/src/_packages/wizzard.index.d.ts +4 -0
  9. package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
  10. package/esm/typings/src/execution/ExecutionTask.d.ts +3 -1
  11. package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
  12. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
  13. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
  14. package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
  15. package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
  16. package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -0
  17. package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +11 -0
  18. package/esm/typings/src/llm-providers/ollama/playground/playground.d.ts +6 -0
  19. package/esm/typings/src/llm-providers/ollama/register-configuration.d.ts +14 -0
  20. package/esm/typings/src/llm-providers/ollama/register-constructor.d.ts +15 -0
  21. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
  22. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +1 -1
  23. package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
  24. package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
  25. package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
  26. package/esm/typings/src/version.d.ts +1 -1
  27. package/package.json +1 -1
  28. package/umd/index.umd.js +206 -92
  29. package/umd/index.umd.js.map +1 -1
package/README.md CHANGED
@@ -241,16 +241,8 @@ Join our growing community of developers and users:
241
241
 
242
242
  _A concise, Markdown-based DSL for crafting AI workflows and automations._
243
243
 
244
- ---
245
244
 
246
- ### 📑 Table of Contents
247
245
 
248
- - [Introduction](#introduction)
249
- - [Example](#example)
250
- - [1. What: Workflows, Tasks & Parameters](#1-what-workflows-tasks--parameters)
251
- - [2. Who: Personas](#2-who-personas)
252
- - [3. How: Knowledge, Instruments & Actions](#3-how-knowledge-instruments-and-actions)
253
- - [General Principles](#general-principles)
254
246
 
255
247
  ### Introduction
256
248
 
@@ -363,6 +355,7 @@ Or you can install them separately:
363
355
  - **[@promptbook/vercel](https://www.npmjs.com/package/@promptbook/vercel)** - Adapter for Vercel functionalities
364
356
  - **[@promptbook/google](https://www.npmjs.com/package/@promptbook/google)** - Integration with Google's Gemini API
365
357
  - **[@promptbook/deepseek](https://www.npmjs.com/package/@promptbook/deepseek)** - Integration with [DeepSeek API](https://www.deepseek.com/)
358
+ - **[@promptbook/ollama](https://www.npmjs.com/package/@promptbook/ollama)** - Integration with [Ollama](https://ollama.com/) API
366
359
  - **[@promptbook/azure-openai](https://www.npmjs.com/package/@promptbook/azure-openai)** - Execution tools for Azure OpenAI API
367
360
 
368
361
  - **[@promptbook/fake-llm](https://www.npmjs.com/package/@promptbook/fake-llm)** - Mocked execution tools for testing the library and saving the tokens
package/esm/index.es.js CHANGED
@@ -47,7 +47,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
47
47
  * @generated
48
48
  * @see https://github.com/webgptorg/promptbook
49
49
  */
50
- const PROMPTBOOK_ENGINE_VERSION = '0.94.0-0';
50
+ const PROMPTBOOK_ENGINE_VERSION = '0.94.0-2';
51
51
  /**
52
52
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
53
53
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -6961,11 +6961,6 @@ async function executeTask(options) {
6961
6961
  const jokerParameterNames = currentTask.jokerParameterNames || [];
6962
6962
  const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
6963
6963
  // <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
6964
- await onProgress({
6965
- outputParameters: {
6966
- [currentTask.resultingParameterName]: '',
6967
- },
6968
- });
6969
6964
  const resultString = await executeFormatSubvalues({
6970
6965
  jokerParameterNames,
6971
6966
  priority,
@@ -7064,6 +7059,13 @@ async function executePipeline(options) {
7064
7059
  * Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
7065
7060
  */
7066
7061
  let isReturned = false;
7062
+ // Note: Report all output parameters upfront as empty strings
7063
+ if (onProgress) {
7064
+ const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
7065
+ onProgress({
7066
+ outputParameters: emptyOutputParameters,
7067
+ });
7068
+ }
7067
7069
  // Note: Check that all input input parameters are defined
7068
7070
  for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
7069
7071
  if (inputParameters[parameter.name] === undefined) {
@@ -16998,85 +17000,39 @@ const _GoogleRegistration = $llmToolsRegister.register(createGoogleExecutionTool
16998
17000
  * @public exported from `@promptbook/wizzard`
16999
17001
  * @public exported from `@promptbook/cli`
17000
17002
  */
17001
- const _OpenAiMetadataRegistration = $llmToolsMetadataRegister.register({
17002
- title: 'Open AI',
17003
- packageName: '@promptbook/openai',
17004
- className: 'OpenAiExecutionTools',
17005
- envVariables: ['OPENAI_API_KEY'],
17006
- trustLevel: 'CLOSED',
17007
- order: MODEL_ORDERS.TOP_TIER,
17008
- getBoilerplateConfiguration() {
17009
- return {
17010
- title: 'Open AI',
17011
- packageName: '@promptbook/openai',
17012
- className: 'OpenAiExecutionTools',
17013
- options: {
17014
- apiKey: 'sk-',
17015
- maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
17016
- },
17017
- };
17018
- },
17019
- createConfigurationFromEnv(env) {
17020
- // Note: Note using `process.env` BUT `env` to pass in the environment variables dynamically
17021
- if (typeof env.OPENAI_API_KEY === 'string') {
17022
- return {
17023
- title: 'Open AI (from env)',
17024
- packageName: '@promptbook/openai',
17025
- className: 'OpenAiExecutionTools',
17026
- options: {
17027
- apiKey: env.OPENAI_API_KEY,
17028
- },
17029
- };
17030
- }
17031
- return null;
17032
- },
17033
- });
17034
- /**
17035
- * Registration of the OpenAI Assistant metadata
17036
- *
17037
- * Note: [🏐] Configurations registrations are done in the metadata registration section, but the constructor registration is handled separately.
17038
- *
17039
- * @public exported from `@promptbook/core`
17040
- * @public exported from `@promptbook/wizzard`
17041
- * @public exported from `@promptbook/cli`
17042
- */
17043
- const _OpenAiAssistantMetadataRegistration = $llmToolsMetadataRegister.register({
17044
- title: 'Open AI Assistant',
17045
- packageName: '@promptbook/openai',
17046
- className: 'OpenAiAssistantExecutionTools',
17047
- envVariables: null,
17048
- // <- TODO: ['OPENAI_API_KEY', 'OPENAI_ASSISTANT_ID']
17049
- trustLevel: 'CLOSED',
17003
+ const _OllamaMetadataRegistration = $llmToolsMetadataRegister.register({
17004
+ title: 'Ollama',
17005
+ packageName: '@promptbook/ollama',
17006
+ className: 'OllamaExecutionTools',
17007
+ envVariables: ['OLLAMA_BASE_URL', 'OLLAMA_MODEL'],
17008
+ trustLevel: 'CLOSED_LOCAL',
17050
17009
  order: MODEL_ORDERS.NORMAL,
17051
17010
  getBoilerplateConfiguration() {
17052
17011
  return {
17053
- title: 'Open AI Assistant',
17054
- packageName: '@promptbook/openai',
17055
- className: 'OpenAiAssistantExecutionTools',
17012
+ title: 'Ollama',
17013
+ packageName: '@promptbook/ollama',
17014
+ className: 'OllamaExecutionTools',
17056
17015
  options: {
17057
- apiKey: 'sk-',
17058
- assistantId: 'asst_',
17016
+ baseURL: 'http://localhost:11434',
17017
+ model: 'llama2',
17059
17018
  maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
17060
17019
  },
17061
17020
  };
17062
17021
  },
17063
17022
  createConfigurationFromEnv(env) {
17064
- return null;
17065
- /*
17066
- if (typeof env.OPENAI_API_KEY === 'string' || typeof env.OPENAI_XXX === 'string') {
17023
+ if (typeof env.OLLAMA_BASE_URL === 'string') {
17067
17024
  return {
17068
- title: 'Open AI Assistant (from env)',
17069
- packageName: '@promptbook/openai',
17070
- className: 'OpenAiAssistantExecutionTools',
17025
+ title: 'Ollama (from env)',
17026
+ packageName: '@promptbook/ollama',
17027
+ className: 'OllamaExecutionTools',
17071
17028
  options: {
17072
- apiKey: env.OPENAI_API_KEY!,
17073
- assistantId: env.OPENAI_XXX!
17029
+ baseURL: env.OLLAMA_BASE_URL,
17030
+ model: env.OLLAMA_MODEL || 'llama2',
17031
+ maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
17074
17032
  },
17075
17033
  };
17076
17034
  }
17077
-
17078
17035
  return null;
17079
- */
17080
17036
  },
17081
17037
  });
17082
17038
  /**
@@ -17195,7 +17151,7 @@ class OpenAiExecutionTools {
17195
17151
  /**
17196
17152
  * List all available OpenAI models that can be used
17197
17153
  */
17198
- listModels() {
17154
+ async listModels() {
17199
17155
  /*
17200
17156
  Note: Dynamic lising of the models
17201
17157
  const models = await this.openai.models.list({});
@@ -17203,7 +17159,26 @@ class OpenAiExecutionTools {
17203
17159
  console.log({ models });
17204
17160
  console.log(models.data);
17205
17161
  */
17206
- return OPENAI_MODELS;
17162
+ const client = await this.getClient();
17163
+ const rawModelsList = await client.models.list();
17164
+ const availableModels = rawModelsList.data
17165
+ .sort((a, b) => (a.created > b.created ? 1 : -1))
17166
+ .map((modelFromApi) => {
17167
+ // TODO: !!!! What about other model compatibilities?
17168
+ const modelFromList = OPENAI_MODELS.find(({ modelName }) => modelName === modelFromApi.id ||
17169
+ modelName.startsWith(modelFromApi.id) ||
17170
+ modelFromApi.id.startsWith(modelName));
17171
+ if (modelFromList !== undefined) {
17172
+ return modelFromList;
17173
+ }
17174
+ return {
17175
+ modelVariant: 'CHAT',
17176
+ modelTitle: modelFromApi.id,
17177
+ modelName: modelFromApi.id,
17178
+ modelDescription: '',
17179
+ };
17180
+ });
17181
+ return availableModels;
17207
17182
  }
17208
17183
  /**
17209
17184
  * Calls OpenAI API to use a chat model.
@@ -17483,6 +17458,163 @@ class OpenAiExecutionTools {
17483
17458
  * TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
17484
17459
  */
17485
17460
 
17461
+ /**
17462
+ * Execution Tools for calling OpenAI API
17463
+ *
17464
+ * Note: This can be also used for other OpenAI compatible APIs, like Ollama
17465
+ *
17466
+ * @public exported from `@promptbook/openai`
17467
+ */
17468
+ const createOpenAiExecutionTools = Object.assign((options) => {
17469
+ // TODO: [🧠][main] !!4 If browser, auto add `dangerouslyAllowBrowser`
17470
+ if (($isRunningInBrowser() || $isRunningInWebWorker()) && !options.dangerouslyAllowBrowser) {
17471
+ options = { ...options, dangerouslyAllowBrowser: true };
17472
+ }
17473
+ return new OpenAiExecutionTools(options);
17474
+ }, {
17475
+ packageName: '@promptbook/openai',
17476
+ className: 'OpenAiExecutionTools',
17477
+ });
17478
+ /**
17479
+ * TODO: [🦺] Is there some way how to put `packageName` and `className` on top and function definition on bottom?
17480
+ * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
17481
+ */
17482
+
17483
+ /**
17484
+ * Default base URL for Ollama API
17485
+ *
17486
+ * @public exported from `@promptbook/ollama`
17487
+ */
17488
+ const DEFAULT_OLLAMA_BASE_URL = 'http://localhost:11434'; // <- TODO: !!!! What is the correct base URL? /v1?
17489
+
17490
+ /**
17491
+ * Execution Tools for calling Ollama API
17492
+ *
17493
+ * @public exported from `@promptbook/ollama`
17494
+ */
17495
+ const createOllamaExecutionTools = Object.assign((ollamaOptions) => {
17496
+ const openAiCompatibleOptions = {
17497
+ baseURL: DEFAULT_OLLAMA_BASE_URL,
17498
+ ...ollamaOptions,
17499
+ userId: 'ollama',
17500
+ };
17501
+ // TODO: !!!! Listing the models - do it dynamically in OpenAiExecutionTools
17502
+ // TODO: !!!! Do not allow to create Assistant from OpenAi compatible tools
17503
+ return createOpenAiExecutionTools(openAiCompatibleOptions);
17504
+ }, {
17505
+ packageName: '@promptbook/ollama',
17506
+ className: 'OllamaExecutionTools',
17507
+ });
17508
+
17509
+ /**
17510
+ * Registration of LLM provider
17511
+ *
17512
+ * Warning: This is not useful for the end user, it is just a side effect of the mechanism that handles all available LLM tools
17513
+ *
17514
+ * @public exported from `@promptbook/ollama`
17515
+ * @public exported from `@promptbook/wizzard`
17516
+ * @public exported from `@promptbook/cli`
17517
+ */
17518
+ const _OllamaRegistration = $llmToolsRegister.register(createOllamaExecutionTools);
17519
+ /**
17520
+ * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
17521
+ * Note: [💞] Ignore a discrepancy between file name and entity name
17522
+ */
17523
+
17524
+ /**
17525
+ * Registration of LLM provider metadata
17526
+ *
17527
+ * Warning: This is not useful for the end user, it is just a side effect of the mechanism that handles all available LLM tools
17528
+ *
17529
+ * @public exported from `@promptbook/core`
17530
+ * @public exported from `@promptbook/wizzard`
17531
+ * @public exported from `@promptbook/cli`
17532
+ */
17533
+ const _OpenAiMetadataRegistration = $llmToolsMetadataRegister.register({
17534
+ title: 'Open AI',
17535
+ packageName: '@promptbook/openai',
17536
+ className: 'OpenAiExecutionTools',
17537
+ envVariables: ['OPENAI_API_KEY'],
17538
+ trustLevel: 'CLOSED',
17539
+ order: MODEL_ORDERS.TOP_TIER,
17540
+ getBoilerplateConfiguration() {
17541
+ return {
17542
+ title: 'Open AI',
17543
+ packageName: '@promptbook/openai',
17544
+ className: 'OpenAiExecutionTools',
17545
+ options: {
17546
+ apiKey: 'sk-',
17547
+ maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
17548
+ },
17549
+ };
17550
+ },
17551
+ createConfigurationFromEnv(env) {
17552
+ // Note: Note using `process.env` BUT `env` to pass in the environment variables dynamically
17553
+ if (typeof env.OPENAI_API_KEY === 'string') {
17554
+ return {
17555
+ title: 'Open AI (from env)',
17556
+ packageName: '@promptbook/openai',
17557
+ className: 'OpenAiExecutionTools',
17558
+ options: {
17559
+ apiKey: env.OPENAI_API_KEY,
17560
+ },
17561
+ };
17562
+ }
17563
+ return null;
17564
+ },
17565
+ });
17566
+ /**
17567
+ * Registration of the OpenAI Assistant metadata
17568
+ *
17569
+ * Note: [🏐] Configurations registrations are done in the metadata registration section, but the constructor registration is handled separately.
17570
+ *
17571
+ * @public exported from `@promptbook/core`
17572
+ * @public exported from `@promptbook/wizzard`
17573
+ * @public exported from `@promptbook/cli`
17574
+ */
17575
+ const _OpenAiAssistantMetadataRegistration = $llmToolsMetadataRegister.register({
17576
+ title: 'Open AI Assistant',
17577
+ packageName: '@promptbook/openai',
17578
+ className: 'OpenAiAssistantExecutionTools',
17579
+ envVariables: null,
17580
+ // <- TODO: ['OPENAI_API_KEY', 'OPENAI_ASSISTANT_ID']
17581
+ trustLevel: 'CLOSED',
17582
+ order: MODEL_ORDERS.NORMAL,
17583
+ getBoilerplateConfiguration() {
17584
+ return {
17585
+ title: 'Open AI Assistant',
17586
+ packageName: '@promptbook/openai',
17587
+ className: 'OpenAiAssistantExecutionTools',
17588
+ options: {
17589
+ apiKey: 'sk-',
17590
+ assistantId: 'asst_',
17591
+ maxRequestsPerMinute: DEFAULT_MAX_REQUESTS_PER_MINUTE,
17592
+ },
17593
+ };
17594
+ },
17595
+ createConfigurationFromEnv(env) {
17596
+ return null;
17597
+ /*
17598
+ if (typeof env.OPENAI_API_KEY === 'string' || typeof env.OPENAI_XXX === 'string') {
17599
+ return {
17600
+ title: 'Open AI Assistant (from env)',
17601
+ packageName: '@promptbook/openai',
17602
+ className: 'OpenAiAssistantExecutionTools',
17603
+ options: {
17604
+ apiKey: env.OPENAI_API_KEY!,
17605
+ assistantId: env.OPENAI_XXX!
17606
+ },
17607
+ };
17608
+ }
17609
+
17610
+ return null;
17611
+ */
17612
+ },
17613
+ });
17614
+ /**
17615
+ * Note: [💞] Ignore a discrepancy between file name and entity name
17616
+ */
17617
+
17486
17618
  /**
17487
17619
  * Execution Tools for calling OpenAI API Assistants
17488
17620
  *
@@ -17670,26 +17802,6 @@ const createOpenAiAssistantExecutionTools = Object.assign((options) => {
17670
17802
  * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
17671
17803
  */
17672
17804
 
17673
- /**
17674
- * Execution Tools for calling OpenAI API
17675
- *
17676
- * @public exported from `@promptbook/openai`
17677
- */
17678
- const createOpenAiExecutionTools = Object.assign((options) => {
17679
- // TODO: [🧠][main] !!4 If browser, auto add `dangerouslyAllowBrowser`
17680
- if (($isRunningInBrowser() || $isRunningInWebWorker()) && !options.dangerouslyAllowBrowser) {
17681
- options = { ...options, dangerouslyAllowBrowser: true };
17682
- }
17683
- return new OpenAiExecutionTools(options);
17684
- }, {
17685
- packageName: '@promptbook/openai',
17686
- className: 'OpenAiExecutionTools',
17687
- });
17688
- /**
17689
- * TODO: [🦺] Is there some way how to put `packageName` and `className` on top and function definition on bottom?
17690
- * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
17691
- */
17692
-
17693
17805
  /**
17694
17806
  * Registration of LLM provider
17695
17807
  *
@@ -18947,5 +19059,5 @@ const _WebsiteScraperRegistration = $scrapersRegister.register(createWebsiteScra
18947
19059
  * Note: [💞] Ignore a discrepancy between file name and entity name
18948
19060
  */
18949
19061
 
18950
- export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION, _AnthropicClaudeMetadataRegistration, _AnthropicClaudeRegistration, _AzureOpenAiMetadataRegistration, _AzureOpenAiRegistration, _BoilerplateScraperMetadataRegistration, _BoilerplateScraperRegistration, _CLI, _DeepseekMetadataRegistration, _DeepseekRegistration, _DocumentScraperMetadataRegistration, _DocumentScraperRegistration, _GoogleMetadataRegistration, _GoogleRegistration, _LegacyDocumentScraperMetadataRegistration, _LegacyDocumentScraperRegistration, _MarkdownScraperMetadataRegistration, _MarkdownScraperRegistration, _MarkitdownScraperMetadataRegistration, _MarkitdownScraperRegistration, _OpenAiAssistantMetadataRegistration, _OpenAiAssistantRegistration, _OpenAiMetadataRegistration, _OpenAiRegistration, _PdfScraperMetadataRegistration, _PdfScraperRegistration, _WebsiteScraperMetadataRegistration, _WebsiteScraperRegistration };
19062
+ export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION, _AnthropicClaudeMetadataRegistration, _AnthropicClaudeRegistration, _AzureOpenAiMetadataRegistration, _AzureOpenAiRegistration, _BoilerplateScraperMetadataRegistration, _BoilerplateScraperRegistration, _CLI, _DeepseekMetadataRegistration, _DeepseekRegistration, _DocumentScraperMetadataRegistration, _DocumentScraperRegistration, _GoogleMetadataRegistration, _GoogleRegistration, _LegacyDocumentScraperMetadataRegistration, _LegacyDocumentScraperRegistration, _MarkdownScraperMetadataRegistration, _MarkdownScraperRegistration, _MarkitdownScraperMetadataRegistration, _MarkitdownScraperRegistration, _OllamaMetadataRegistration, _OllamaRegistration, _OpenAiAssistantMetadataRegistration, _OpenAiAssistantRegistration, _OpenAiMetadataRegistration, _OpenAiRegistration, _PdfScraperMetadataRegistration, _PdfScraperRegistration, _WebsiteScraperMetadataRegistration, _WebsiteScraperRegistration };
18951
19063
  //# sourceMappingURL=index.es.js.map