@promptbook/openai 0.94.0-1 → 0.94.0-4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +6 -8
- package/esm/index.es.js +159 -121
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/ollama.index.d.ts +6 -0
- package/esm/typings/src/_packages/openai.index.d.ts +2 -0
- package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
- package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
- package/esm/typings/src/llm-providers/{openai/computeUsage.d.ts → _common/utils/pricing.d.ts} +2 -2
- package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/ollama/OllamaExecutionTools.d.ts +36 -11
- package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -12
- package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +3 -3
- package/esm/typings/src/llm-providers/ollama/ollama-models.d.ts +14 -0
- package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +91 -0
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +12 -53
- package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
- package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
- package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +159 -120
- package/umd/index.umd.js.map +1 -1
- /package/esm/typings/src/llm-providers/{openai/computeUsage.test.d.ts → _common/utils/pricing.test.d.ts} +0 -0
|
@@ -1,79 +1,38 @@
|
|
|
1
|
-
import OpenAI from 'openai';
|
|
2
1
|
import type { AvailableModel } from '../../execution/AvailableModel';
|
|
3
2
|
import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
|
|
4
|
-
import type { ChatPromptResult } from '../../execution/PromptResult';
|
|
5
|
-
import type { CompletionPromptResult } from '../../execution/PromptResult';
|
|
6
|
-
import type { EmbeddingPromptResult } from '../../execution/PromptResult';
|
|
7
|
-
import type { Prompt } from '../../types/Prompt';
|
|
8
3
|
import type { string_markdown } from '../../types/typeAliases';
|
|
9
4
|
import type { string_markdown_text } from '../../types/typeAliases';
|
|
10
5
|
import type { string_title } from '../../types/typeAliases';
|
|
11
|
-
import
|
|
6
|
+
import { computeOpenAiUsage } from './computeOpenAiUsage';
|
|
7
|
+
import { OpenAiCompatibleExecutionTools } from './OpenAiCompatibleExecutionTools';
|
|
12
8
|
/**
|
|
13
9
|
* Execution Tools for calling OpenAI API
|
|
14
10
|
*
|
|
15
11
|
* @public exported from `@promptbook/openai`
|
|
16
12
|
*/
|
|
17
|
-
export declare class OpenAiExecutionTools implements LlmExecutionTools {
|
|
18
|
-
protected readonly options: OpenAiExecutionToolsOptions;
|
|
19
|
-
/**
|
|
20
|
-
* OpenAI API client.
|
|
21
|
-
*/
|
|
22
|
-
private client;
|
|
23
|
-
/**
|
|
24
|
-
* Rate limiter instance
|
|
25
|
-
*/
|
|
26
|
-
private limiter;
|
|
27
|
-
/**
|
|
28
|
-
* Creates OpenAI Execution Tools.
|
|
29
|
-
*
|
|
30
|
-
* @param options which are relevant are directly passed to the OpenAI client
|
|
31
|
-
*/
|
|
32
|
-
constructor(options: OpenAiExecutionToolsOptions);
|
|
13
|
+
export declare class OpenAiExecutionTools extends OpenAiCompatibleExecutionTools implements LlmExecutionTools {
|
|
33
14
|
get title(): string_title & string_markdown_text;
|
|
34
15
|
get description(): string_markdown;
|
|
35
|
-
getClient(): Promise<OpenAI>;
|
|
36
|
-
/**
|
|
37
|
-
* Check the `options` passed to `constructor`
|
|
38
|
-
*/
|
|
39
|
-
checkConfiguration(): Promise<void>;
|
|
40
16
|
/**
|
|
41
|
-
* List all available
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
/**
|
|
45
|
-
* Calls OpenAI API to use a chat model.
|
|
46
|
-
*/
|
|
47
|
-
callChatModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'format'>): Promise<ChatPromptResult>;
|
|
48
|
-
/**
|
|
49
|
-
* Calls OpenAI API to use a complete model.
|
|
50
|
-
*/
|
|
51
|
-
callCompletionModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements'>): Promise<CompletionPromptResult>;
|
|
52
|
-
/**
|
|
53
|
-
* Calls OpenAI API to use a embedding model
|
|
17
|
+
* List all available models (non dynamically)
|
|
18
|
+
*
|
|
19
|
+
* Note: Purpose of this is to provide more information about models than standard listing from API
|
|
54
20
|
*/
|
|
55
|
-
|
|
21
|
+
protected get HARDCODED_MODELS(): ReadonlyArray<AvailableModel>;
|
|
56
22
|
/**
|
|
57
|
-
*
|
|
23
|
+
* Computes the usage of the OpenAI API based on the response from OpenAI
|
|
58
24
|
*/
|
|
59
|
-
|
|
25
|
+
protected computeUsage: typeof computeOpenAiUsage;
|
|
60
26
|
/**
|
|
61
27
|
* Default model for chat variant.
|
|
62
28
|
*/
|
|
63
|
-
|
|
29
|
+
protected getDefaultChatModel(): AvailableModel;
|
|
64
30
|
/**
|
|
65
31
|
* Default model for completion variant.
|
|
66
32
|
*/
|
|
67
|
-
|
|
33
|
+
protected getDefaultCompletionModel(): AvailableModel;
|
|
68
34
|
/**
|
|
69
35
|
* Default model for completion variant.
|
|
70
36
|
*/
|
|
71
|
-
|
|
37
|
+
protected getDefaultEmbeddingModel(): AvailableModel;
|
|
72
38
|
}
|
|
73
|
-
/**
|
|
74
|
-
* TODO: [🧠][🧙♂️] Maybe there can be some wizzard for thoose who want to use just OpenAI
|
|
75
|
-
* TODO: Maybe Create some common util for callChatModel and callCompletionModel
|
|
76
|
-
* TODO: Maybe make custom OpenAiError
|
|
77
|
-
* TODO: [🧠][🈁] Maybe use `isDeterministic` from options
|
|
78
|
-
* TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
|
|
79
|
-
*/
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import type { ClientOptions } from 'openai';
|
|
2
2
|
import type { CommonToolsOptions } from '../../execution/CommonToolsOptions';
|
|
3
3
|
/**
|
|
4
|
-
* Options for `OpenAiExecutionTools`
|
|
4
|
+
* Options for `createOpenAiExecutionTools` and `OpenAiExecutionTools`
|
|
5
5
|
*
|
|
6
6
|
* This extends OpenAI's `ClientOptions` with are directly passed to the OpenAI client.
|
|
7
7
|
* Rest is used by the `OpenAiExecutionTools`.
|
|
@@ -3,6 +3,8 @@ import type { OpenAiExecutionToolsOptions } from './OpenAiExecutionToolsOptions'
|
|
|
3
3
|
/**
|
|
4
4
|
* Execution Tools for calling OpenAI API
|
|
5
5
|
*
|
|
6
|
+
* Note: This can be also used for other OpenAI compatible APIs, like Ollama
|
|
7
|
+
*
|
|
6
8
|
* @public exported from `@promptbook/openai`
|
|
7
9
|
*/
|
|
8
10
|
export declare const createOpenAiExecutionTools: ((options: OpenAiExecutionToolsOptions) => OpenAiExecutionTools) & {
|
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
import type { AvailableModel } from '../../execution/AvailableModel';
|
|
2
|
-
import type { number_usd } from '../../types/typeAliases';
|
|
3
2
|
/**
|
|
4
3
|
* List of available OpenAI models with pricing
|
|
5
4
|
*
|
|
@@ -9,12 +8,7 @@ import type { number_usd } from '../../types/typeAliases';
|
|
|
9
8
|
* @see https://openai.com/api/pricing/
|
|
10
9
|
* @public exported from `@promptbook/openai`
|
|
11
10
|
*/
|
|
12
|
-
export declare const OPENAI_MODELS: ReadonlyArray<AvailableModel
|
|
13
|
-
pricing?: {
|
|
14
|
-
readonly prompt: number_usd;
|
|
15
|
-
readonly output: number_usd;
|
|
16
|
-
};
|
|
17
|
-
}>;
|
|
11
|
+
export declare const OPENAI_MODELS: ReadonlyArray<AvailableModel>;
|
|
18
12
|
/**
|
|
19
13
|
* Note: [🤖] Add models of new variant
|
|
20
14
|
* TODO: [🧠] Some mechanism to propagate unsureness
|
|
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
|
|
|
15
15
|
export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
|
|
16
16
|
/**
|
|
17
17
|
* Represents the version string of the Promptbook engine.
|
|
18
|
-
* It follows semantic versioning (e.g., `0.94.0-
|
|
18
|
+
* It follows semantic versioning (e.g., `0.94.0-3`).
|
|
19
19
|
*
|
|
20
20
|
* @generated
|
|
21
21
|
*/
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@promptbook/openai",
|
|
3
|
-
"version": "0.94.0-
|
|
3
|
+
"version": "0.94.0-4",
|
|
4
4
|
"description": "Promptbook: Run AI apps in plain human language across multiple models and platforms",
|
|
5
5
|
"private": false,
|
|
6
6
|
"sideEffects": false,
|
|
@@ -63,7 +63,7 @@
|
|
|
63
63
|
"module": "./esm/index.es.js",
|
|
64
64
|
"typings": "./esm/typings/src/_packages/openai.index.d.ts",
|
|
65
65
|
"peerDependencies": {
|
|
66
|
-
"@promptbook/core": "0.94.0-
|
|
66
|
+
"@promptbook/core": "0.94.0-4"
|
|
67
67
|
},
|
|
68
68
|
"dependencies": {
|
|
69
69
|
"bottleneck": "^2.19.5",
|