@promptbook/openai 0.98.0-6 → 0.98.0-9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,14 @@
1
+ import type { AvailableModel } from '../../execution/AvailableModel';
2
+ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
3
+ import type { Usage } from '../../execution/Usage';
4
+ import type { string_markdown } from '../../types/typeAliases';
5
+ import type { string_markdown_text } from '../../types/typeAliases';
6
+ import type { string_model_name } from '../../types/typeAliases';
7
+ import type { string_title } from '../../types/typeAliases';
8
+ import { RemoteLlmExecutionTools } from '../remote/RemoteLlmExecutionTools';
9
+ import { computeOpenAiUsage } from './computeOpenAiUsage';
1
10
  import { OpenAiCompatibleExecutionTools } from './OpenAiCompatibleExecutionTools';
11
+ import type { OpenAiCompatibleExecutionToolsNonProxiedOptions } from './OpenAiCompatibleExecutionToolsOptions';
2
12
  import type { OpenAiCompatibleExecutionToolsOptions } from './OpenAiCompatibleExecutionToolsOptions';
3
13
  /**
4
14
  * Execution Tools for calling OpenAI compatible API
@@ -7,10 +17,57 @@ import type { OpenAiCompatibleExecutionToolsOptions } from './OpenAiCompatibleEx
7
17
  *
8
18
  * @public exported from `@promptbook/openai`
9
19
  */
10
- export declare const createOpenAiCompatibleExecutionTools: ((options: OpenAiCompatibleExecutionToolsOptions) => OpenAiCompatibleExecutionTools) & {
20
+ export declare const createOpenAiCompatibleExecutionTools: ((options: OpenAiCompatibleExecutionToolsOptions & {
21
+ /**
22
+ * The model name to use for all operations
23
+ *
24
+ * This will be the only model available through this LLM provider and it will be a chat model.
25
+ * Other variants won't be available for now.
26
+ */
27
+ defaultModelName: string_model_name;
28
+ }) => OpenAiCompatibleExecutionTools | RemoteLlmExecutionTools) & {
11
29
  packageName: string;
12
30
  className: string;
13
31
  };
32
+ /**
33
+ * Execution Tools for calling ONE SPECIFIC PRECONFIGURED OpenAI compatible provider
34
+ *
35
+ * @private for `createOpenAiCompatibleExecutionTools`
36
+ */
37
+ export declare class HardcodedOpenAiCompatibleExecutionTools extends OpenAiCompatibleExecutionTools implements LlmExecutionTools {
38
+ private readonly defaultModelName;
39
+ protected readonly options: OpenAiCompatibleExecutionToolsNonProxiedOptions;
40
+ /**
41
+ * Creates OpenAI compatible Execution Tools.
42
+ *
43
+ * @param options which are relevant are directly passed to the OpenAI compatible client
44
+ */
45
+ constructor(defaultModelName: string_model_name, options: OpenAiCompatibleExecutionToolsNonProxiedOptions);
46
+ get title(): string_title & string_markdown_text;
47
+ get description(): string_markdown;
48
+ /**
49
+ * List all available models (non dynamically)
50
+ *
51
+ * Note: Purpose of this is to provide more information about models than standard listing from API
52
+ */
53
+ protected get HARDCODED_MODELS(): ReadonlyArray<AvailableModel>;
54
+ /**
55
+ * Computes the usage
56
+ */
57
+ protected computeUsage(...args: Parameters<typeof computeOpenAiUsage>): Usage;
58
+ /**
59
+ * Default model for chat variant.
60
+ */
61
+ protected getDefaultChatModel(): AvailableModel;
62
+ /**
63
+ * Default model for completion variant.
64
+ */
65
+ protected getDefaultCompletionModel(): AvailableModel;
66
+ /**
67
+ * Default model for completion variant.
68
+ */
69
+ protected getDefaultEmbeddingModel(): AvailableModel;
70
+ }
14
71
  /**
15
72
  * TODO: [🦺] Is there some way how to put `packageName` and `className` on top and function definition on bottom?
16
73
  * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.98.0-5`).
18
+ * It follows semantic versioning (e.g., `0.98.0-8`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/openai",
3
- "version": "0.98.0-6",
3
+ "version": "0.98.0-9",
4
4
  "description": "Promptbook: Run AI apps in plain human language across multiple models and platforms",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -74,13 +74,14 @@
74
74
  "module": "./esm/index.es.js",
75
75
  "typings": "./esm/typings/src/_packages/openai.index.d.ts",
76
76
  "peerDependencies": {
77
- "@promptbook/core": "0.98.0-6"
77
+ "@promptbook/core": "0.98.0-9"
78
78
  },
79
79
  "dependencies": {
80
80
  "bottleneck": "^2.19.5",
81
81
  "colors": "1.4.0",
82
82
  "crypto": "1.0.1",
83
83
  "openai": "4.63.0",
84
+ "socket.io-client": "4.7.2",
84
85
  "spacetrim": "0.11.59"
85
86
  }
86
87
  }