@promptbook/openai 0.52.0-30 → 0.52.0-31

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -52,6 +52,7 @@ const promptbook = await library.getPromptbookByUrl(`https://promptbook.studio/m
52
52
  // ▶ Prepare tools
53
53
  const tools = {
54
54
  llm: new OpenAiExecutionTools({
55
+ isVerbose: true,
55
56
  apiKey: process.env.OPENAI_API_KEY,
56
57
  }),
57
58
  script: [new JavascriptExecutionTools()],
@@ -97,15 +98,12 @@ const promptbook = await library.getPromptbookByUrl(`https://promptbook.studio/m
97
98
  const tools = new MultipleLlmExecutionTools(
98
99
  // Note: You can use multiple LLM providers in one Promptbook execution. The best model will be chosen automatically according to the prompt and the model's capabilities.
99
100
  new OpenAiExecutionTools({
100
- isVerbose: true,
101
101
  apiKey: process.env.OPENAI_API_KEY,
102
102
  }),
103
103
  new AnthropicClaudeExecutionTools({
104
- isVerbose: true,
105
104
  apiKey: process.env.ANTHROPIC_CLAUDE_API_KEY,
106
105
  }),
107
106
  new AzureOpenAiExecutionTools({
108
- isVerbose: true,
109
107
  resourceName: process.env.AZUREOPENAI_RESOURCE_NAME,
110
108
  deploymentName: process.env.AZUREOPENAI_DEPLOYMENT_NAME,
111
109
  apiKey: process.env.AZUREOPENAI_API_KEY,
@@ -116,7 +114,7 @@ const tools = new MultipleLlmExecutionTools(
116
114
  const promptbookExecutor = createPromptbookExecutor({ promptbook, tools });
117
115
 
118
116
  // ▶ Prepare input parameters
119
- const inputParameters = { word: 'cat' };
117
+ const inputParameters = { word: 'dog' };
120
118
 
121
119
  // 🚀▶ Execute the Promptbook
122
120
  const result = await promptbookExecutor(inputParameters);
@@ -10,6 +10,7 @@ import { RemoteServerOptions } from './interfaces/RemoteServerOptions';
10
10
  */
11
11
  export declare function startRemoteServer(options: RemoteServerOptions): IDestroyable;
12
12
  /**
13
+ * TODO: [⚖] Expose the library to be able to connect to same library via createPromptbookLibraryFromUrl
13
14
  * TODO: Handle progress - support streaming
14
15
  * TODO: [🤹‍♂️] Do not hang up immediately but wait until client closes OR timeout
15
16
  * TODO: [🤹‍♂️] Timeout on chat to free up resources
@@ -25,5 +25,6 @@ type CreatePromptbookLibraryFromUrlyOptions = {
25
25
  export declare function createPromptbookLibraryFromUrl(url: string_url | URL, options: CreatePromptbookLibraryFromUrlyOptions): Promise<PromptbookLibrary>;
26
26
  export {};
27
27
  /***
28
+ * TODO: [⚖] Compatible with remote server
28
29
  * TODO: [🍓][🚯] !!! Add to README and samples + maybe make `@promptbook/library` package
29
30
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/openai",
3
- "version": "0.52.0-30",
3
+ "version": "0.52.0-31",
4
4
  "description": "Library to supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -48,7 +48,7 @@
48
48
  }
49
49
  ],
50
50
  "peerDependencies": {
51
- "@promptbook/core": "0.52.0-30"
51
+ "@promptbook/core": "0.52.0-31"
52
52
  },
53
53
  "main": "./umd/index.umd.js",
54
54
  "module": "./esm/index.es.js",
@@ -10,6 +10,7 @@ import { RemoteServerOptions } from './interfaces/RemoteServerOptions';
10
10
  */
11
11
  export declare function startRemoteServer(options: RemoteServerOptions): IDestroyable;
12
12
  /**
13
+ * TODO: [⚖] Expose the library to be able to connect to same library via createPromptbookLibraryFromUrl
13
14
  * TODO: Handle progress - support streaming
14
15
  * TODO: [🤹‍♂️] Do not hang up immediately but wait until client closes OR timeout
15
16
  * TODO: [🤹‍♂️] Timeout on chat to free up resources
@@ -25,5 +25,6 @@ type CreatePromptbookLibraryFromUrlyOptions = {
25
25
  export declare function createPromptbookLibraryFromUrl(url: string_url | URL, options: CreatePromptbookLibraryFromUrlyOptions): Promise<PromptbookLibrary>;
26
26
  export {};
27
27
  /***
28
+ * TODO: [⚖] Compatible with remote server
28
29
  * TODO: [🍓][🚯] !!! Add to README and samples + maybe make `@promptbook/library` package
29
30
  */