@promptbook/cli 0.50.0-16 → 0.50.0-18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +1 -1
- package/esm/typings/_packages/openai.index.d.ts +2 -1
- package/esm/typings/execution/plugins/llm-execution-tools/openai/computeOpenaiUsage.d.ts +0 -3
- package/esm/typings/execution/plugins/llm-execution-tools/openai/computeUsage.d.ts +13 -0
- package/esm/typings/execution/plugins/llm-execution-tools/openai/computeUsage.test.d.ts +1 -0
- package/esm/typings/execution/plugins/llm-execution-tools/openai/models.d.ts +25 -0
- package/package.json +2 -2
- package/umd/index.umd.js +1 -1
- package/umd/typings/_packages/openai.index.d.ts +2 -1
- package/umd/typings/execution/plugins/llm-execution-tools/openai/computeOpenaiUsage.d.ts +0 -3
- package/umd/typings/execution/plugins/llm-execution-tools/openai/computeUsage.d.ts +13 -0
- package/umd/typings/execution/plugins/llm-execution-tools/openai/computeUsage.test.d.ts +1 -0
- package/umd/typings/execution/plugins/llm-execution-tools/openai/models.d.ts +25 -0
package/esm/index.es.js
CHANGED
|
@@ -143,7 +143,7 @@ new Function("\n try {\n if (typeof WorkerGlobalScope !== 'undefined'
|
|
|
143
143
|
/**
|
|
144
144
|
* The version of the Promptbook library
|
|
145
145
|
*/
|
|
146
|
-
var PROMPTBOOK_VERSION = '0.50.0-
|
|
146
|
+
var PROMPTBOOK_VERSION = '0.50.0-17';
|
|
147
147
|
|
|
148
148
|
/**
|
|
149
149
|
* This error indicates that the promptbook in a markdown format cannot be parsed into a valid promptbook object
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { OPENAI_MODELS } from '../execution/plugins/llm-execution-tools/openai/models';
|
|
1
2
|
import { OpenAiExecutionTools } from '../execution/plugins/llm-execution-tools/openai/OpenAiExecutionTools';
|
|
2
3
|
import { OpenAiExecutionToolsOptions } from '../execution/plugins/llm-execution-tools/openai/OpenAiExecutionToolsOptions';
|
|
3
|
-
export { OpenAiExecutionTools, OpenAiExecutionToolsOptions };
|
|
4
|
+
export { OPENAI_MODELS, OpenAiExecutionTools, OpenAiExecutionToolsOptions };
|
|
@@ -6,6 +6,3 @@ import type { PromptResult } from '../../../PromptResult';
|
|
|
6
6
|
* @throws {PromptbookExecutionError} If the usage is not defined in the response from OpenAI
|
|
7
7
|
*/
|
|
8
8
|
export declare function computeOpenaiUsage(rawResponse: Pick<OpenAI.Chat.Completions.ChatCompletion | OpenAI.Completions.Completion, 'model' | 'usage'>): PromptResult['usage'];
|
|
9
|
-
/**
|
|
10
|
-
* TODO: [🍓] Make better
|
|
11
|
-
*/
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* String value found on openai page
|
|
3
|
+
*
|
|
4
|
+
* @private within the library, used only as internal helper for `OPENAI_MODELS` and `computeUsage`
|
|
5
|
+
*/
|
|
6
|
+
type string_openai_price = `$${number}.${number} / ${number}M tokens`;
|
|
7
|
+
/**
|
|
8
|
+
* Function computeUsage will create price per one token based on the string value found on openai page
|
|
9
|
+
*
|
|
10
|
+
* @private within the library, used only as internal helper for `OPENAI_MODELS`
|
|
11
|
+
*/
|
|
12
|
+
export declare function computeUsage(value: string_openai_price): number;
|
|
13
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { number_usd } from '../../../../types/typeAliases';
|
|
2
|
+
import type { AvailableModel } from '../../../LlmExecutionTools';
|
|
3
|
+
/**
|
|
4
|
+
* List of available OpenAI models with pricing
|
|
5
|
+
*
|
|
6
|
+
* Note: Done at 2024-05-20
|
|
7
|
+
*
|
|
8
|
+
* @see https://platform.openai.com/docs/models/
|
|
9
|
+
* @see https://openai.com/api/pricing/
|
|
10
|
+
*/
|
|
11
|
+
export declare const OPENAI_MODELS: Array<AvailableModel & {
|
|
12
|
+
pricing?: {
|
|
13
|
+
prompt: number_usd;
|
|
14
|
+
output: number_usd;
|
|
15
|
+
};
|
|
16
|
+
}>;
|
|
17
|
+
/**
|
|
18
|
+
* TODO: [🧠] Some mechanism to propagate unsureness
|
|
19
|
+
* TODO: [🕚] Make this list dynamic - dynamically can be listed modelNames but not modelVariant, legacy status, context length and pricing
|
|
20
|
+
* @see https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4
|
|
21
|
+
* @see https://openai.com/api/pricing/
|
|
22
|
+
* @see /other/playground/playground.ts
|
|
23
|
+
* TODO: [🍓] Make better
|
|
24
|
+
* TODO: Change model titles to human eg: "gpt-4-turbo-2024-04-09" -> "GPT-4 Turbo (2024-04-09)"
|
|
25
|
+
*/
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@promptbook/cli",
|
|
3
|
-
"version": "0.50.0-
|
|
3
|
+
"version": "0.50.0-18",
|
|
4
4
|
"description": "Library to supercharge your use of large language models",
|
|
5
5
|
"private": false,
|
|
6
6
|
"sideEffects": false,
|
|
@@ -52,7 +52,7 @@
|
|
|
52
52
|
}
|
|
53
53
|
],
|
|
54
54
|
"peerDependencies": {
|
|
55
|
-
"@promptbook/core": "0.50.0-
|
|
55
|
+
"@promptbook/core": "0.50.0-18"
|
|
56
56
|
},
|
|
57
57
|
"main": "./umd/index.umd.js",
|
|
58
58
|
"module": "./esm/index.es.js",
|
package/umd/index.umd.js
CHANGED
|
@@ -146,7 +146,7 @@
|
|
|
146
146
|
/**
|
|
147
147
|
* The version of the Promptbook library
|
|
148
148
|
*/
|
|
149
|
-
var PROMPTBOOK_VERSION = '0.50.0-
|
|
149
|
+
var PROMPTBOOK_VERSION = '0.50.0-17';
|
|
150
150
|
|
|
151
151
|
/**
|
|
152
152
|
* This error indicates that the promptbook in a markdown format cannot be parsed into a valid promptbook object
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { OPENAI_MODELS } from '../execution/plugins/llm-execution-tools/openai/models';
|
|
1
2
|
import { OpenAiExecutionTools } from '../execution/plugins/llm-execution-tools/openai/OpenAiExecutionTools';
|
|
2
3
|
import { OpenAiExecutionToolsOptions } from '../execution/plugins/llm-execution-tools/openai/OpenAiExecutionToolsOptions';
|
|
3
|
-
export { OpenAiExecutionTools, OpenAiExecutionToolsOptions };
|
|
4
|
+
export { OPENAI_MODELS, OpenAiExecutionTools, OpenAiExecutionToolsOptions };
|
|
@@ -6,6 +6,3 @@ import type { PromptResult } from '../../../PromptResult';
|
|
|
6
6
|
* @throws {PromptbookExecutionError} If the usage is not defined in the response from OpenAI
|
|
7
7
|
*/
|
|
8
8
|
export declare function computeOpenaiUsage(rawResponse: Pick<OpenAI.Chat.Completions.ChatCompletion | OpenAI.Completions.Completion, 'model' | 'usage'>): PromptResult['usage'];
|
|
9
|
-
/**
|
|
10
|
-
* TODO: [🍓] Make better
|
|
11
|
-
*/
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* String value found on openai page
|
|
3
|
+
*
|
|
4
|
+
* @private within the library, used only as internal helper for `OPENAI_MODELS` and `computeUsage`
|
|
5
|
+
*/
|
|
6
|
+
type string_openai_price = `$${number}.${number} / ${number}M tokens`;
|
|
7
|
+
/**
|
|
8
|
+
* Function computeUsage will create price per one token based on the string value found on openai page
|
|
9
|
+
*
|
|
10
|
+
* @private within the library, used only as internal helper for `OPENAI_MODELS`
|
|
11
|
+
*/
|
|
12
|
+
export declare function computeUsage(value: string_openai_price): number;
|
|
13
|
+
export {};
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { number_usd } from '../../../../types/typeAliases';
|
|
2
|
+
import type { AvailableModel } from '../../../LlmExecutionTools';
|
|
3
|
+
/**
|
|
4
|
+
* List of available OpenAI models with pricing
|
|
5
|
+
*
|
|
6
|
+
* Note: Done at 2024-05-20
|
|
7
|
+
*
|
|
8
|
+
* @see https://platform.openai.com/docs/models/
|
|
9
|
+
* @see https://openai.com/api/pricing/
|
|
10
|
+
*/
|
|
11
|
+
export declare const OPENAI_MODELS: Array<AvailableModel & {
|
|
12
|
+
pricing?: {
|
|
13
|
+
prompt: number_usd;
|
|
14
|
+
output: number_usd;
|
|
15
|
+
};
|
|
16
|
+
}>;
|
|
17
|
+
/**
|
|
18
|
+
* TODO: [🧠] Some mechanism to propagate unsureness
|
|
19
|
+
* TODO: [🕚] Make this list dynamic - dynamically can be listed modelNames but not modelVariant, legacy status, context length and pricing
|
|
20
|
+
* @see https://platform.openai.com/docs/models/gpt-4-turbo-and-gpt-4
|
|
21
|
+
* @see https://openai.com/api/pricing/
|
|
22
|
+
* @see /other/playground/playground.ts
|
|
23
|
+
* TODO: [🍓] Make better
|
|
24
|
+
* TODO: Change model titles to human eg: "gpt-4-turbo-2024-04-09" -> "GPT-4 Turbo (2024-04-09)"
|
|
25
|
+
*/
|