@promptbook/remote-client 0.59.0-31 → 0.59.0-32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -156,7 +156,7 @@ var RemoteLlmExecutionTools = /** @class */ (function () {
156
156
  /**
157
157
  * The version of the Promptbook library
158
158
  */
159
- var PROMPTBOOK_VERSION = '0.59.0-30';
159
+ var PROMPTBOOK_VERSION = '0.59.0-31';
160
160
 
161
161
  export { PROMPTBOOK_VERSION, RemoteLlmExecutionTools };
162
162
  //# sourceMappingURL=index.es.js.map
@@ -27,4 +27,5 @@ export {};
27
27
  * TODO: Report here line/column of error
28
28
  * TODO: Use spaceTrim more effectively
29
29
  * TODO: [🧠] Parameter flags - isInput, isOutput, isInternal
30
+ * TODO: [🏏] Leverage the batch API and build queues @see https://platform.openai.com/docs/guides/batch
30
31
  */
@@ -27,6 +27,10 @@ export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools
27
27
  * Calls Anthropic Claude API to use a complete model.
28
28
  */
29
29
  gptComplete(prompt: Pick<Prompt, 'content' | 'modelRequirements'>): Promise<PromptCompletionResult>;
30
+ /**
31
+ * Get the model that should be used as default
32
+ */
33
+ private getDefaultModel;
30
34
  /**
31
35
  * Default model for chat variant.
32
36
  */
@@ -32,6 +32,10 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
32
32
  * Calls OpenAI API to use a embedding model
33
33
  */
34
34
  embed(prompt: Pick<Prompt, 'content' | 'modelRequirements'>): Promise<PromptEmbeddingResult>;
35
+ /**
36
+ * Get the model that should be used as default
37
+ */
38
+ private getDefaultModel;
35
39
  /**
36
40
  * Default model for chat variant.
37
41
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/remote-client",
3
- "version": "0.59.0-31",
3
+ "version": "0.59.0-32",
4
4
  "description": "Library to supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -47,7 +47,7 @@
47
47
  }
48
48
  ],
49
49
  "peerDependencies": {
50
- "@promptbook/core": "0.59.0-31"
50
+ "@promptbook/core": "0.59.0-32"
51
51
  },
52
52
  "main": "./umd/index.umd.js",
53
53
  "module": "./esm/index.es.js",
package/umd/index.umd.js CHANGED
@@ -160,7 +160,7 @@
160
160
  /**
161
161
  * The version of the Promptbook library
162
162
  */
163
- var PROMPTBOOK_VERSION = '0.59.0-30';
163
+ var PROMPTBOOK_VERSION = '0.59.0-31';
164
164
 
165
165
  exports.PROMPTBOOK_VERSION = PROMPTBOOK_VERSION;
166
166
  exports.RemoteLlmExecutionTools = RemoteLlmExecutionTools;
@@ -27,4 +27,5 @@ export {};
27
27
  * TODO: Report here line/column of error
28
28
  * TODO: Use spaceTrim more effectively
29
29
  * TODO: [🧠] Parameter flags - isInput, isOutput, isInternal
30
+ * TODO: [🏏] Leverage the batch API and build queues @see https://platform.openai.com/docs/guides/batch
30
31
  */
@@ -27,6 +27,10 @@ export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools
27
27
  * Calls Anthropic Claude API to use a complete model.
28
28
  */
29
29
  gptComplete(prompt: Pick<Prompt, 'content' | 'modelRequirements'>): Promise<PromptCompletionResult>;
30
+ /**
31
+ * Get the model that should be used as default
32
+ */
33
+ private getDefaultModel;
30
34
  /**
31
35
  * Default model for chat variant.
32
36
  */
@@ -32,6 +32,10 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
32
32
  * Calls OpenAI API to use a embedding model
33
33
  */
34
34
  embed(prompt: Pick<Prompt, 'content' | 'modelRequirements'>): Promise<PromptEmbeddingResult>;
35
+ /**
36
+ * Get the model that should be used as default
37
+ */
38
+ private getDefaultModel;
35
39
  /**
36
40
  * Default model for chat variant.
37
41
  */
@@ -1,4 +0,0 @@
1
- export type IndexPreparer = {};
2
- /**
3
- * TODO: Export via some package
4
- */
@@ -1,4 +0,0 @@
1
- export type IndexPreparer = {};
2
- /**
3
- * TODO: Export via some package
4
- */