@promptbook/remote-client 0.63.1 → 0.63.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -4,7 +4,7 @@ import { io } from 'socket.io-client';
4
4
  /**
5
5
  * The version of the Promptbook library
6
6
  */
7
- var PROMPTBOOK_VERSION = '0.63.0';
7
+ var PROMPTBOOK_VERSION = '0.63.2';
8
8
  // TODO: !!!! List here all the versions and annotate + put into script
9
9
 
10
10
  /*! *****************************************************************************
@@ -147,7 +147,7 @@ var RemoteLlmExecutionTools = /** @class */ (function () {
147
147
  if (this.options.isVerbose) {
148
148
  console.info("\uD83D\uDD8B Remote callChatModel call");
149
149
  }
150
- return /* not await */ this.callModelCommon(prompt);
150
+ return /* not await */ this.callCommonModel(prompt);
151
151
  };
152
152
  /**
153
153
  * Calls remote proxy server to use a completion model
@@ -156,7 +156,7 @@ var RemoteLlmExecutionTools = /** @class */ (function () {
156
156
  if (this.options.isVerbose) {
157
157
  console.info("\uD83D\uDCAC Remote callCompletionModel call");
158
158
  }
159
- return /* not await */ this.callModelCommon(prompt);
159
+ return /* not await */ this.callCommonModel(prompt);
160
160
  };
161
161
  /**
162
162
  * Calls remote proxy server to use a embedding model
@@ -165,13 +165,13 @@ var RemoteLlmExecutionTools = /** @class */ (function () {
165
165
  if (this.options.isVerbose) {
166
166
  console.info("\uD83D\uDCAC Remote callEmbeddingModel call");
167
167
  }
168
- return /* not await */ this.callModelCommon(prompt);
168
+ return /* not await */ this.callCommonModel(prompt);
169
169
  };
170
170
  // <- Note: [🤖] callXxxModel
171
171
  /**
172
172
  * Calls remote proxy server to use both completion or chat model
173
173
  */
174
- RemoteLlmExecutionTools.prototype.callModelCommon = function (prompt) {
174
+ RemoteLlmExecutionTools.prototype.callCommonModel = function (prompt) {
175
175
  return __awaiter(this, void 0, void 0, function () {
176
176
  var socket, promptResult;
177
177
  return __generator(this, function (_a) {
@@ -36,6 +36,7 @@ import type { LlmExecutionToolsWithTotalUsage } from '../llm-providers/_common/u
36
36
  import type { AnthropicClaudeExecutionToolsOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
37
37
  import type { AzureOpenAiExecutionToolsOptions } from '../llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions';
38
38
  import type { LangtailExecutionToolsOptions } from '../llm-providers/langtail/LangtailExecutionToolsOptions';
39
+ import type { MultipleLlmExecutionTools } from '../llm-providers/multiple/MultipleLlmExecutionTools';
39
40
  import type { OpenAiExecutionToolsOptions } from '../llm-providers/openai/OpenAiExecutionToolsOptions';
40
41
  import type { Promptbook_Server_Error } from '../llm-providers/remote/interfaces/Promptbook_Server_Error';
41
42
  import type { Promptbook_Server_Progress } from '../llm-providers/remote/interfaces/Promptbook_Server_Progress';
@@ -242,6 +243,7 @@ export type { LlmExecutionToolsWithTotalUsage };
242
243
  export type { AnthropicClaudeExecutionToolsOptions };
243
244
  export type { AzureOpenAiExecutionToolsOptions };
244
245
  export type { LangtailExecutionToolsOptions };
246
+ export type { MultipleLlmExecutionTools };
245
247
  export type { OpenAiExecutionToolsOptions };
246
248
  export type { Promptbook_Server_Error };
247
249
  export type { Promptbook_Server_Progress };
@@ -3,22 +3,25 @@ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
3
3
  import type { ChatPromptResult } from '../../execution/PromptResult';
4
4
  import type { CompletionPromptResult } from '../../execution/PromptResult';
5
5
  import type { EmbeddingPromptResult } from '../../execution/PromptResult';
6
+ import type { PromptResult } from '../../execution/PromptResult';
6
7
  import type { ChatPrompt } from '../../types/Prompt';
7
8
  import type { CompletionPrompt } from '../../types/Prompt';
8
9
  import type { EmbeddingPrompt } from '../../types/Prompt';
10
+ import type { Prompt } from '../../types/Prompt';
9
11
  import type { string_markdown } from '../../types/typeAliases';
10
12
  import type { string_markdown_text } from '../../types/typeAliases';
11
13
  import type { string_title } from '../../types/typeAliases';
12
14
  /**
13
15
  * Multiple LLM Execution Tools is a proxy server that uses multiple execution tools internally and exposes the executor interface externally.
14
16
  *
15
- * @private internal utility of `joinLlmExecutionTools`
17
+ * Note: Internal utility of `joinLlmExecutionTools` but exposed type
18
+ * @public exported from `@promptbook/types`
16
19
  */
17
20
  export declare class MultipleLlmExecutionTools implements LlmExecutionTools {
18
21
  /**
19
22
  * Array of execution tools in order of priority
20
23
  */
21
- private readonly llmExecutionTools;
24
+ readonly llmExecutionTools: Array<LlmExecutionTools>;
22
25
  /**
23
26
  * Gets array of execution tools in order of priority
24
27
  */
@@ -40,7 +43,7 @@ export declare class MultipleLlmExecutionTools implements LlmExecutionTools {
40
43
  /**
41
44
  * Calls the best available model
42
45
  */
43
- private callModelCommon;
46
+ protected callCommonModel(prompt: Prompt): Promise<PromptResult>;
44
47
  /**
45
48
  * List all available models that can be used
46
49
  * This lists is a combination of all available models from all execution tools
@@ -43,7 +43,7 @@ export declare class RemoteLlmExecutionTools implements LlmExecutionTools {
43
43
  /**
44
44
  * Calls remote proxy server to use both completion or chat model
45
45
  */
46
- private callModelCommon;
46
+ private callCommonModel;
47
47
  /**
48
48
  * List all available models that can be used
49
49
  */
@@ -53,4 +53,4 @@ export declare class RemoteLlmExecutionTools implements LlmExecutionTools {
53
53
  * TODO: [🍓] Allow to list compatible models with each variant
54
54
  * TODO: [🗯] RemoteLlmExecutionTools should extend Destroyable and implement IDestroyable
55
55
  * TODO: [🍜] Add anonymous option
56
- */
56
+ */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/remote-client",
3
- "version": "0.63.1",
3
+ "version": "0.63.3",
4
4
  "description": "Supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -47,7 +47,7 @@
47
47
  "module": "./esm/index.es.js",
48
48
  "typings": "./esm/typings/src/_packages/remote-client.index.d.ts",
49
49
  "peerDependencies": {
50
- "@promptbook/core": "0.63.1"
50
+ "@promptbook/core": "0.63.3"
51
51
  },
52
52
  "dependencies": {
53
53
  "socket.io-client": "4.7.2"
package/umd/index.umd.js CHANGED
@@ -8,7 +8,7 @@
8
8
  /**
9
9
  * The version of the Promptbook library
10
10
  */
11
- var PROMPTBOOK_VERSION = '0.63.0';
11
+ var PROMPTBOOK_VERSION = '0.63.2';
12
12
  // TODO: !!!! List here all the versions and annotate + put into script
13
13
 
14
14
  /*! *****************************************************************************
@@ -151,7 +151,7 @@
151
151
  if (this.options.isVerbose) {
152
152
  console.info("\uD83D\uDD8B Remote callChatModel call");
153
153
  }
154
- return /* not await */ this.callModelCommon(prompt);
154
+ return /* not await */ this.callCommonModel(prompt);
155
155
  };
156
156
  /**
157
157
  * Calls remote proxy server to use a completion model
@@ -160,7 +160,7 @@
160
160
  if (this.options.isVerbose) {
161
161
  console.info("\uD83D\uDCAC Remote callCompletionModel call");
162
162
  }
163
- return /* not await */ this.callModelCommon(prompt);
163
+ return /* not await */ this.callCommonModel(prompt);
164
164
  };
165
165
  /**
166
166
  * Calls remote proxy server to use a embedding model
@@ -169,13 +169,13 @@
169
169
  if (this.options.isVerbose) {
170
170
  console.info("\uD83D\uDCAC Remote callEmbeddingModel call");
171
171
  }
172
- return /* not await */ this.callModelCommon(prompt);
172
+ return /* not await */ this.callCommonModel(prompt);
173
173
  };
174
174
  // <- Note: [🤖] callXxxModel
175
175
  /**
176
176
  * Calls remote proxy server to use both completion or chat model
177
177
  */
178
- RemoteLlmExecutionTools.prototype.callModelCommon = function (prompt) {
178
+ RemoteLlmExecutionTools.prototype.callCommonModel = function (prompt) {
179
179
  return __awaiter(this, void 0, void 0, function () {
180
180
  var socket, promptResult;
181
181
  return __generator(this, function (_a) {