@promptbook/remote-server 0.66.0-6 → 0.66.0-8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +270 -157
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/azure-openai.index.d.ts +4 -0
- package/esm/typings/src/_packages/cli.index.d.ts +4 -2
- package/esm/typings/src/_packages/core.index.d.ts +2 -0
- package/esm/typings/src/cli/main.d.ts +2 -2
- package/esm/typings/src/execution/LlmExecutionTools.d.ts +1 -0
- package/esm/typings/src/knowledge/prepare-knowledge/_common/prepareKnowledgePieces.test.d.ts +1 -1
- package/esm/typings/src/knowledge/prepare-knowledge/markdown/prepareKnowledgeFromMarkdown.test.d.ts +1 -1
- package/esm/typings/src/knowledge/prepare-knowledge/pdf/prepareKnowledgeFromPdf.test.d.ts +1 -1
- package/esm/typings/src/llm-providers/_common/getLlmToolsForTestingAndScriptsAndPlayground.d.ts +1 -0
- package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionTools.d.ts +10 -5
- package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionTools.d.ts +10 -5
- package/esm/typings/src/llm-providers/azure-openai/createAzureOpenAiExecutionTools.d.ts +15 -0
- package/esm/typings/src/llm-providers/azure-openai/register-configuration.d.ts +9 -0
- package/esm/typings/src/llm-providers/azure-openai/register-constructor.d.ts +11 -0
- package/esm/typings/src/llm-providers/mocked/MockedEchoLlmExecutionTools.d.ts +8 -4
- package/esm/typings/src/llm-providers/mocked/MockedFackedLlmExecutionTools.d.ts +8 -4
- package/esm/typings/src/llm-providers/multiple/MultipleLlmExecutionTools.d.ts +9 -5
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +10 -5
- package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +8 -4
- package/esm/typings/src/personas/preparePersona.test.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +270 -157
- package/umd/index.umd.js.map +1 -1
|
@@ -18,7 +18,7 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
|
|
|
18
18
|
/**
|
|
19
19
|
* OpenAI API client.
|
|
20
20
|
*/
|
|
21
|
-
private
|
|
21
|
+
private client;
|
|
22
22
|
/**
|
|
23
23
|
* Creates OpenAI Execution Tools.
|
|
24
24
|
*
|
|
@@ -27,6 +27,15 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
|
|
|
27
27
|
constructor(options?: OpenAiExecutionToolsOptions);
|
|
28
28
|
get title(): string_title & string_markdown_text;
|
|
29
29
|
get description(): string_markdown;
|
|
30
|
+
private getClient;
|
|
31
|
+
/**
|
|
32
|
+
* Check the `options` passed to `constructor`
|
|
33
|
+
*/
|
|
34
|
+
checkConfiguration(): Promise<void>;
|
|
35
|
+
/**
|
|
36
|
+
* List all available OpenAI models that can be used
|
|
37
|
+
*/
|
|
38
|
+
listModels(): Array<AvailableModel>;
|
|
30
39
|
/**
|
|
31
40
|
* Calls OpenAI API to use a chat model.
|
|
32
41
|
*/
|
|
@@ -55,10 +64,6 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
|
|
|
55
64
|
* Default model for completion variant.
|
|
56
65
|
*/
|
|
57
66
|
private getDefaultEmbeddingModel;
|
|
58
|
-
/**
|
|
59
|
-
* List all available OpenAI models that can be used
|
|
60
|
-
*/
|
|
61
|
-
listModels(): Array<AvailableModel>;
|
|
62
67
|
}
|
|
63
68
|
/**
|
|
64
69
|
* TODO: [🧠][🧙♂️] Maybe there can be some wizzard for thoose who want to use just OpenAI
|
|
@@ -24,6 +24,14 @@ export declare class RemoteLlmExecutionTools implements LlmExecutionTools {
|
|
|
24
24
|
constructor(options: RemoteLlmExecutionToolsOptions);
|
|
25
25
|
get title(): string_title & string_markdown_text;
|
|
26
26
|
get description(): string_markdown;
|
|
27
|
+
/**
|
|
28
|
+
* Check the configuration of all execution tools
|
|
29
|
+
*/
|
|
30
|
+
checkConfiguration(): Promise<void>;
|
|
31
|
+
/**
|
|
32
|
+
* List all available models that can be used
|
|
33
|
+
*/
|
|
34
|
+
listModels(): Promise<Array<AvailableModel>>;
|
|
27
35
|
/**
|
|
28
36
|
* Creates a connection to the remote proxy server.
|
|
29
37
|
*/
|
|
@@ -44,10 +52,6 @@ export declare class RemoteLlmExecutionTools implements LlmExecutionTools {
|
|
|
44
52
|
* Calls remote proxy server to use both completion or chat model
|
|
45
53
|
*/
|
|
46
54
|
private callCommonModel;
|
|
47
|
-
/**
|
|
48
|
-
* List all available models that can be used
|
|
49
|
-
*/
|
|
50
|
-
listModels(): Promise<Array<AvailableModel>>;
|
|
51
55
|
}
|
|
52
56
|
/**
|
|
53
57
|
* TODO: [🍓] Allow to list compatible models with each variant
|
|
@@ -1 +1 @@
|
|
|
1
|
-
|
|
1
|
+
export {};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@promptbook/remote-server",
|
|
3
|
-
"version": "0.66.0-
|
|
3
|
+
"version": "0.66.0-8",
|
|
4
4
|
"description": "Supercharge your use of large language models",
|
|
5
5
|
"private": false,
|
|
6
6
|
"sideEffects": false,
|
|
@@ -47,7 +47,7 @@
|
|
|
47
47
|
"module": "./esm/index.es.js",
|
|
48
48
|
"typings": "./esm/typings/src/_packages/remote-server.index.d.ts",
|
|
49
49
|
"peerDependencies": {
|
|
50
|
-
"@promptbook/core": "0.66.0-
|
|
50
|
+
"@promptbook/core": "0.66.0-8"
|
|
51
51
|
},
|
|
52
52
|
"dependencies": {
|
|
53
53
|
"@anthropic-ai/sdk": "0.26.1",
|