@promptbook/wizard 0.95.0 → 0.98.0-10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/README.md +12 -0
  2. package/esm/index.es.js +345 -70
  3. package/esm/index.es.js.map +1 -1
  4. package/esm/typings/src/_packages/anthropic-claude.index.d.ts +2 -2
  5. package/esm/typings/src/_packages/cli.index.d.ts +4 -0
  6. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  7. package/esm/typings/src/_packages/openai.index.d.ts +10 -0
  8. package/esm/typings/src/_packages/types.index.d.ts +12 -2
  9. package/esm/typings/src/_packages/wizard.index.d.ts +4 -0
  10. package/esm/typings/src/config.d.ts +1 -1
  11. package/esm/typings/src/execution/createPipelineExecutor/$OngoingTaskResult.d.ts +8 -0
  12. package/esm/typings/src/execution/utils/validatePromptResult.d.ts +53 -0
  13. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionTools.d.ts +3 -3
  14. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +2 -2
  15. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +2 -2
  16. package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +4 -4
  17. package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionToolsOptions.d.ts +52 -0
  18. package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +3 -5
  19. package/esm/typings/src/llm-providers/openai/createOpenAiCompatibleExecutionTools.d.ts +74 -0
  20. package/esm/typings/src/llm-providers/openai/register-configuration.d.ts +11 -0
  21. package/esm/typings/src/llm-providers/openai/register-constructor.d.ts +14 -0
  22. package/esm/typings/src/version.d.ts +1 -1
  23. package/package.json +2 -2
  24. package/umd/index.umd.js +346 -69
  25. package/umd/index.umd.js.map +1 -1
@@ -2,7 +2,7 @@ import { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION } from '../version';
2
2
  import { ANTHROPIC_CLAUDE_MODELS } from '../llm-providers/anthropic-claude/anthropic-claude-models';
3
3
  import { AnthropicClaudeExecutionTools } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionTools';
4
4
  import type { AnthropicClaudeExecutionToolsOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
5
- import type { AnthropicClaudeExecutionToolsDirectOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
5
+ import type { AnthropicClaudeExecutionToolsNonProxiedOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
6
6
  import type { AnthropicClaudeExecutionToolsProxiedOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
7
7
  import { createAnthropicClaudeExecutionTools } from '../llm-providers/anthropic-claude/createAnthropicClaudeExecutionTools';
8
8
  import { _AnthropicClaudeRegistration } from '../llm-providers/anthropic-claude/register-constructor';
@@ -10,7 +10,7 @@ export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION };
10
10
  export { ANTHROPIC_CLAUDE_MODELS };
11
11
  export { AnthropicClaudeExecutionTools };
12
12
  export type { AnthropicClaudeExecutionToolsOptions };
13
- export type { AnthropicClaudeExecutionToolsDirectOptions };
13
+ export type { AnthropicClaudeExecutionToolsNonProxiedOptions };
14
14
  export type { AnthropicClaudeExecutionToolsProxiedOptions };
15
15
  export { createAnthropicClaudeExecutionTools };
16
16
  export { _AnthropicClaudeRegistration };
@@ -12,8 +12,10 @@ import { _OllamaMetadataRegistration } from '../llm-providers/ollama/register-co
12
12
  import { _OllamaRegistration } from '../llm-providers/ollama/register-constructor';
13
13
  import { _OpenAiMetadataRegistration } from '../llm-providers/openai/register-configuration';
14
14
  import { _OpenAiAssistantMetadataRegistration } from '../llm-providers/openai/register-configuration';
15
+ import { _OpenAiCompatibleMetadataRegistration } from '../llm-providers/openai/register-configuration';
15
16
  import { _OpenAiRegistration } from '../llm-providers/openai/register-constructor';
16
17
  import { _OpenAiAssistantRegistration } from '../llm-providers/openai/register-constructor';
18
+ import { _OpenAiCompatibleRegistration } from '../llm-providers/openai/register-constructor';
17
19
  import { _BoilerplateScraperRegistration } from '../scrapers/_boilerplate/register-constructor';
18
20
  import { _BoilerplateScraperMetadataRegistration } from '../scrapers/_boilerplate/register-metadata';
19
21
  import { _LegacyDocumentScraperRegistration } from '../scrapers/document-legacy/register-constructor';
@@ -42,8 +44,10 @@ export { _OllamaMetadataRegistration };
42
44
  export { _OllamaRegistration };
43
45
  export { _OpenAiMetadataRegistration };
44
46
  export { _OpenAiAssistantMetadataRegistration };
47
+ export { _OpenAiCompatibleMetadataRegistration };
45
48
  export { _OpenAiRegistration };
46
49
  export { _OpenAiAssistantRegistration };
50
+ export { _OpenAiCompatibleRegistration };
47
51
  export { _BoilerplateScraperRegistration };
48
52
  export { _BoilerplateScraperMetadataRegistration };
49
53
  export { _LegacyDocumentScraperRegistration };
@@ -111,6 +111,7 @@ import { _GoogleMetadataRegistration } from '../llm-providers/google/register-co
111
111
  import { _OllamaMetadataRegistration } from '../llm-providers/ollama/register-configuration';
112
112
  import { _OpenAiMetadataRegistration } from '../llm-providers/openai/register-configuration';
113
113
  import { _OpenAiAssistantMetadataRegistration } from '../llm-providers/openai/register-configuration';
114
+ import { _OpenAiCompatibleMetadataRegistration } from '../llm-providers/openai/register-configuration';
114
115
  import { migratePipeline } from '../migrations/migratePipeline';
115
116
  import { preparePersona } from '../personas/preparePersona';
116
117
  import { book } from '../pipeline/book-notation';
@@ -259,6 +260,7 @@ export { _GoogleMetadataRegistration };
259
260
  export { _OllamaMetadataRegistration };
260
261
  export { _OpenAiMetadataRegistration };
261
262
  export { _OpenAiAssistantMetadataRegistration };
263
+ export { _OpenAiCompatibleMetadataRegistration };
262
264
  export { migratePipeline };
263
265
  export { preparePersona };
264
266
  export { book };
@@ -1,22 +1,32 @@
1
1
  import { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION } from '../version';
2
2
  import { createOpenAiAssistantExecutionTools } from '../llm-providers/openai/createOpenAiAssistantExecutionTools';
3
+ import { createOpenAiCompatibleExecutionTools } from '../llm-providers/openai/createOpenAiCompatibleExecutionTools';
3
4
  import { createOpenAiExecutionTools } from '../llm-providers/openai/createOpenAiExecutionTools';
4
5
  import { OPENAI_MODELS } from '../llm-providers/openai/openai-models';
5
6
  import { OpenAiAssistantExecutionTools } from '../llm-providers/openai/OpenAiAssistantExecutionTools';
6
7
  import type { OpenAiAssistantExecutionToolsOptions } from '../llm-providers/openai/OpenAiAssistantExecutionToolsOptions';
7
8
  import { OpenAiCompatibleExecutionTools } from '../llm-providers/openai/OpenAiCompatibleExecutionTools';
9
+ import type { OpenAiCompatibleExecutionToolsOptions } from '../llm-providers/openai/OpenAiCompatibleExecutionToolsOptions';
10
+ import type { OpenAiCompatibleExecutionToolsNonProxiedOptions } from '../llm-providers/openai/OpenAiCompatibleExecutionToolsOptions';
11
+ import type { OpenAiCompatibleExecutionToolsProxiedOptions } from '../llm-providers/openai/OpenAiCompatibleExecutionToolsOptions';
8
12
  import { OpenAiExecutionTools } from '../llm-providers/openai/OpenAiExecutionTools';
9
13
  import type { OpenAiExecutionToolsOptions } from '../llm-providers/openai/OpenAiExecutionToolsOptions';
10
14
  import { _OpenAiRegistration } from '../llm-providers/openai/register-constructor';
11
15
  import { _OpenAiAssistantRegistration } from '../llm-providers/openai/register-constructor';
16
+ import { _OpenAiCompatibleRegistration } from '../llm-providers/openai/register-constructor';
12
17
  export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION };
13
18
  export { createOpenAiAssistantExecutionTools };
19
+ export { createOpenAiCompatibleExecutionTools };
14
20
  export { createOpenAiExecutionTools };
15
21
  export { OPENAI_MODELS };
16
22
  export { OpenAiAssistantExecutionTools };
17
23
  export type { OpenAiAssistantExecutionToolsOptions };
18
24
  export { OpenAiCompatibleExecutionTools };
25
+ export type { OpenAiCompatibleExecutionToolsOptions };
26
+ export type { OpenAiCompatibleExecutionToolsNonProxiedOptions };
27
+ export type { OpenAiCompatibleExecutionToolsProxiedOptions };
19
28
  export { OpenAiExecutionTools };
20
29
  export type { OpenAiExecutionToolsOptions };
21
30
  export { _OpenAiRegistration };
22
31
  export { _OpenAiAssistantRegistration };
32
+ export { _OpenAiCompatibleRegistration };
@@ -61,6 +61,8 @@ import type { Usage } from '../execution/Usage';
61
61
  import type { UsageCounts } from '../execution/Usage';
62
62
  import type { UserInterfaceTools } from '../execution/UserInterfaceTools';
63
63
  import type { UserInterfaceToolsPromptDialogOptions } from '../execution/UserInterfaceTools';
64
+ import type { ValidatePromptResultOptions } from '../execution/utils/validatePromptResult';
65
+ import type { ValidatePromptResultResult } from '../execution/utils/validatePromptResult';
64
66
  import type { FormatSubvalueParser } from '../formats/_common/FormatSubvalueParser';
65
67
  import type { FormatSubvalueParserMapValuesOptions } from '../formats/_common/FormatSubvalueParser';
66
68
  import type { CsvSettings } from '../formats/csv/CsvSettings';
@@ -74,13 +76,16 @@ import type { CacheItem } from '../llm-providers/_common/utils/cache/CacheItem';
74
76
  import type { CacheLlmToolsOptions } from '../llm-providers/_common/utils/cache/CacheLlmToolsOptions';
75
77
  import type { LlmExecutionToolsWithTotalUsage } from '../llm-providers/_common/utils/count-total-usage/LlmExecutionToolsWithTotalUsage';
76
78
  import type { AnthropicClaudeExecutionToolsOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
77
- import type { AnthropicClaudeExecutionToolsDirectOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
79
+ import type { AnthropicClaudeExecutionToolsNonProxiedOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
78
80
  import type { AnthropicClaudeExecutionToolsProxiedOptions } from '../llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions';
79
81
  import type { AzureOpenAiExecutionToolsOptions } from '../llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions';
80
82
  import type { DeepseekExecutionToolsOptions } from '../llm-providers/deepseek/DeepseekExecutionToolsOptions';
81
83
  import type { GoogleExecutionToolsOptions } from '../llm-providers/google/GoogleExecutionToolsOptions';
82
84
  import type { OllamaExecutionToolsOptions } from '../llm-providers/ollama/OllamaExecutionToolsOptions';
83
85
  import type { OpenAiAssistantExecutionToolsOptions } from '../llm-providers/openai/OpenAiAssistantExecutionToolsOptions';
86
+ import type { OpenAiCompatibleExecutionToolsOptions } from '../llm-providers/openai/OpenAiCompatibleExecutionToolsOptions';
87
+ import type { OpenAiCompatibleExecutionToolsNonProxiedOptions } from '../llm-providers/openai/OpenAiCompatibleExecutionToolsOptions';
88
+ import type { OpenAiCompatibleExecutionToolsProxiedOptions } from '../llm-providers/openai/OpenAiCompatibleExecutionToolsOptions';
84
89
  import type { OpenAiExecutionToolsOptions } from '../llm-providers/openai/OpenAiExecutionToolsOptions';
85
90
  import type { VercelExecutionToolsOptions } from '../llm-providers/vercel/VercelExecutionToolsOptions';
86
91
  import type { VercelProvider } from '../llm-providers/vercel/VercelProvider';
@@ -360,6 +365,8 @@ export type { Usage };
360
365
  export type { UsageCounts };
361
366
  export type { UserInterfaceTools };
362
367
  export type { UserInterfaceToolsPromptDialogOptions };
368
+ export type { ValidatePromptResultOptions };
369
+ export type { ValidatePromptResultResult };
363
370
  export type { FormatSubvalueParser };
364
371
  export type { FormatSubvalueParserMapValuesOptions };
365
372
  export type { CsvSettings };
@@ -373,13 +380,16 @@ export type { CacheItem };
373
380
  export type { CacheLlmToolsOptions };
374
381
  export type { LlmExecutionToolsWithTotalUsage };
375
382
  export type { AnthropicClaudeExecutionToolsOptions };
376
- export type { AnthropicClaudeExecutionToolsDirectOptions };
383
+ export type { AnthropicClaudeExecutionToolsNonProxiedOptions };
377
384
  export type { AnthropicClaudeExecutionToolsProxiedOptions };
378
385
  export type { AzureOpenAiExecutionToolsOptions };
379
386
  export type { DeepseekExecutionToolsOptions };
380
387
  export type { GoogleExecutionToolsOptions };
381
388
  export type { OllamaExecutionToolsOptions };
382
389
  export type { OpenAiAssistantExecutionToolsOptions };
390
+ export type { OpenAiCompatibleExecutionToolsOptions };
391
+ export type { OpenAiCompatibleExecutionToolsNonProxiedOptions };
392
+ export type { OpenAiCompatibleExecutionToolsProxiedOptions };
383
393
  export type { OpenAiExecutionToolsOptions };
384
394
  export type { VercelExecutionToolsOptions };
385
395
  export type { VercelProvider };
@@ -11,8 +11,10 @@ import { _OllamaMetadataRegistration } from '../llm-providers/ollama/register-co
11
11
  import { _OllamaRegistration } from '../llm-providers/ollama/register-constructor';
12
12
  import { _OpenAiMetadataRegistration } from '../llm-providers/openai/register-configuration';
13
13
  import { _OpenAiAssistantMetadataRegistration } from '../llm-providers/openai/register-configuration';
14
+ import { _OpenAiCompatibleMetadataRegistration } from '../llm-providers/openai/register-configuration';
14
15
  import { _OpenAiRegistration } from '../llm-providers/openai/register-constructor';
15
16
  import { _OpenAiAssistantRegistration } from '../llm-providers/openai/register-constructor';
17
+ import { _OpenAiCompatibleRegistration } from '../llm-providers/openai/register-constructor';
16
18
  import { _BoilerplateScraperRegistration } from '../scrapers/_boilerplate/register-constructor';
17
19
  import { _BoilerplateScraperMetadataRegistration } from '../scrapers/_boilerplate/register-metadata';
18
20
  import { _LegacyDocumentScraperRegistration } from '../scrapers/document-legacy/register-constructor';
@@ -41,8 +43,10 @@ export { _OllamaMetadataRegistration };
41
43
  export { _OllamaRegistration };
42
44
  export { _OpenAiMetadataRegistration };
43
45
  export { _OpenAiAssistantMetadataRegistration };
46
+ export { _OpenAiCompatibleMetadataRegistration };
44
47
  export { _OpenAiRegistration };
45
48
  export { _OpenAiAssistantRegistration };
49
+ export { _OpenAiCompatibleRegistration };
46
50
  export { _BoilerplateScraperRegistration };
47
51
  export { _BoilerplateScraperMetadataRegistration };
48
52
  export { _LegacyDocumentScraperRegistration };
@@ -176,7 +176,7 @@ export declare const DEFAULT_MAX_PARALLEL_COUNT = 5;
176
176
  *
177
177
  * @public exported from `@promptbook/core`
178
178
  */
179
- export declare const DEFAULT_MAX_EXECUTION_ATTEMPTS = 10;
179
+ export declare const DEFAULT_MAX_EXECUTION_ATTEMPTS = 7;
180
180
  /**
181
181
  * The maximum depth to which knowledge sources will be scraped when building a knowledge base.
182
182
  * This prevents infinite recursion and limits resource usage.
@@ -45,4 +45,12 @@ export type $OngoingTaskResult = {
45
45
  * List of errors encountered during script postprocessing or execution.
46
46
  */
47
47
  $scriptPipelineExecutionErrors: Array<Error>;
48
+ /**
49
+ * Array of all failed attempts, storing both the result string and the error for each failure
50
+ */
51
+ $failedResults: Array<{
52
+ attemptIndex: number;
53
+ result: string | null;
54
+ error: ExpectError;
55
+ }>;
48
56
  };
@@ -0,0 +1,53 @@
1
+ import type { FormatCommand } from '../../commands/FORMAT/FormatCommand';
2
+ import { ExpectError } from '../../errors/ExpectError';
3
+ import type { Expectations } from '../../pipeline/PipelineJson/Expectations';
4
+ import type { string_postprocessing_function_name } from '../../types/typeAliases';
5
+ /**
6
+ * Options for validating a prompt result
7
+ */
8
+ export interface ValidatePromptResultOptions {
9
+ /**
10
+ * The result string to validate
11
+ */
12
+ resultString: string;
13
+ /**
14
+ * Expectations for the result (word count, sentence count, etc.)
15
+ */
16
+ expectations?: Expectations;
17
+ /**
18
+ * Expected format of the result (e.g., 'JSON')
19
+ */
20
+ format?: FormatCommand['format'];
21
+ /**
22
+ * List of postprocessing function names that should be applied
23
+ * Note: This is for validation purposes only - postprocessing should be done before calling this function
24
+ */
25
+ postprocessingFunctionNames?: ReadonlyArray<string_postprocessing_function_name>;
26
+ }
27
+ /**
28
+ * Result of prompt result validation
29
+ */
30
+ export interface ValidatePromptResultResult {
31
+ /**
32
+ * Whether the result is valid (passes all expectations and format checks)
33
+ */
34
+ isValid: boolean;
35
+ /**
36
+ * The processed result string (may be modified if format extraction was needed)
37
+ */
38
+ processedResultString: string;
39
+ /**
40
+ * Error that occurred during validation, if any
41
+ */
42
+ error?: ExpectError;
43
+ }
44
+ /**
45
+ * Validates a prompt result against expectations and format requirements.
46
+ * This function provides a common abstraction for result validation that can be used
47
+ * by both execution logic and caching logic to ensure consistency.
48
+ *
49
+ * @param options - The validation options including result string, expectations, and format
50
+ * @returns Validation result with processed string and validity status
51
+ * @private internal function of `createPipelineExecutor` and `cacheLlmTools`
52
+ */
53
+ export declare function validatePromptResult(options: ValidatePromptResultOptions): ValidatePromptResultResult;
@@ -7,7 +7,7 @@ import type { Prompt } from '../../types/Prompt';
7
7
  import type { string_markdown } from '../../types/typeAliases';
8
8
  import type { string_markdown_text } from '../../types/typeAliases';
9
9
  import type { string_title } from '../../types/typeAliases';
10
- import type { AnthropicClaudeExecutionToolsDirectOptions } from './AnthropicClaudeExecutionToolsOptions';
10
+ import type { AnthropicClaudeExecutionToolsNonProxiedOptions } from './AnthropicClaudeExecutionToolsOptions';
11
11
  /**
12
12
  * Execution Tools for calling Anthropic Claude API.
13
13
  *
@@ -15,7 +15,7 @@ import type { AnthropicClaudeExecutionToolsDirectOptions } from './AnthropicClau
15
15
  * @deprecated use `createAnthropicClaudeExecutionTools` instead
16
16
  */
17
17
  export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools {
18
- protected readonly options: AnthropicClaudeExecutionToolsDirectOptions;
18
+ protected readonly options: AnthropicClaudeExecutionToolsNonProxiedOptions;
19
19
  /**
20
20
  * Anthropic Claude API client.
21
21
  */
@@ -26,7 +26,7 @@ export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools
26
26
  *
27
27
  * @param options which are relevant are directly passed to the Anthropic Claude client
28
28
  */
29
- constructor(options?: AnthropicClaudeExecutionToolsDirectOptions);
29
+ constructor(options?: AnthropicClaudeExecutionToolsNonProxiedOptions);
30
30
  get title(): string_title & string_markdown_text;
31
31
  get description(): string_markdown;
32
32
  getClient(): Promise<Anthropic>;
@@ -7,14 +7,14 @@ import type { RemoteClientOptions } from '../../remote-server/types/RemoteClient
7
7
  * This extends Anthropic's `ClientOptions` with are directly passed to the Anthropic client.
8
8
  * @public exported from `@promptbook/anthropic-claude`
9
9
  */
10
- export type AnthropicClaudeExecutionToolsOptions = AnthropicClaudeExecutionToolsDirectOptions | AnthropicClaudeExecutionToolsProxiedOptions;
10
+ export type AnthropicClaudeExecutionToolsOptions = AnthropicClaudeExecutionToolsNonProxiedOptions | AnthropicClaudeExecutionToolsProxiedOptions;
11
11
  /**
12
12
  * Options for directly used `AnthropicClaudeExecutionTools`
13
13
  *
14
14
  * This extends Anthropic's `ClientOptions` with are directly passed to the Anthropic client.
15
15
  * @public exported from `@promptbook/anthropic-claude`
16
16
  */
17
- export type AnthropicClaudeExecutionToolsDirectOptions = CommonToolsOptions & ClientOptions & {
17
+ export type AnthropicClaudeExecutionToolsNonProxiedOptions = CommonToolsOptions & ClientOptions & {
18
18
  isProxied?: false;
19
19
  };
20
20
  /**
@@ -1,12 +1,12 @@
1
1
  import type { ClientOptions } from 'openai';
2
2
  import type { string_token } from '../../types/typeAliases';
3
- import type { OpenAiExecutionToolsOptions } from './OpenAiExecutionToolsOptions';
3
+ import type { OpenAiCompatibleExecutionToolsOptions } from './OpenAiCompatibleExecutionToolsOptions';
4
4
  /**
5
5
  * Options for `createOpenAiAssistantExecutionTools` and `OpenAiAssistantExecutionTools`
6
6
  *
7
7
  * @public exported from `@promptbook/openai`
8
8
  */
9
- export type OpenAiAssistantExecutionToolsOptions = OpenAiExecutionToolsOptions & ClientOptions & {
9
+ export type OpenAiAssistantExecutionToolsOptions = OpenAiCompatibleExecutionToolsOptions & ClientOptions & {
10
10
  /**
11
11
  * Which assistant to use
12
12
  */
@@ -11,14 +11,14 @@ import type { string_markdown_text } from '../../types/typeAliases';
11
11
  import type { string_model_name } from '../../types/typeAliases';
12
12
  import type { string_title } from '../../types/typeAliases';
13
13
  import { computeOpenAiUsage } from './computeOpenAiUsage';
14
- import type { OpenAiExecutionToolsOptions } from './OpenAiExecutionToolsOptions';
14
+ import type { OpenAiCompatibleExecutionToolsNonProxiedOptions } from './OpenAiCompatibleExecutionToolsOptions';
15
15
  /**
16
- * Execution Tools for calling OpenAI API or other OpeenAI compatible provider
16
+ * Execution Tools for calling OpenAI API or other OpenAI compatible provider
17
17
  *
18
18
  * @public exported from `@promptbook/openai`
19
19
  */
20
20
  export declare abstract class OpenAiCompatibleExecutionTools implements LlmExecutionTools {
21
- protected readonly options: OpenAiExecutionToolsOptions;
21
+ protected readonly options: OpenAiCompatibleExecutionToolsNonProxiedOptions;
22
22
  /**
23
23
  * OpenAI API client.
24
24
  */
@@ -32,7 +32,7 @@ export declare abstract class OpenAiCompatibleExecutionTools implements LlmExecu
32
32
  *
33
33
  * @param options which are relevant are directly passed to the OpenAI compatible client
34
34
  */
35
- constructor(options: OpenAiExecutionToolsOptions);
35
+ constructor(options: OpenAiCompatibleExecutionToolsNonProxiedOptions);
36
36
  abstract get title(): string_title & string_markdown_text;
37
37
  abstract get description(): string_markdown;
38
38
  getClient(): Promise<OpenAI>;
@@ -0,0 +1,52 @@
1
+ import type { ClientOptions } from 'openai';
2
+ import type { CommonToolsOptions } from '../../execution/CommonToolsOptions';
3
+ import type { RemoteClientOptions } from '../../remote-server/types/RemoteClientOptions';
4
+ /**
5
+ * Options for `createOpenAiCompatibleExecutionTools` and `OpenAiCompatibleExecutionTools`
6
+ *
7
+ * This extends OpenAI's `ClientOptions` with are directly passed to the OpenAI client.
8
+ * Rest is used by the `OpenAiCompatibleExecutionTools`.
9
+ *
10
+ * @public exported from `@promptbook/openai`
11
+ */
12
+ export type OpenAiCompatibleExecutionToolsOptions = OpenAiCompatibleExecutionToolsNonProxiedOptions | OpenAiCompatibleExecutionToolsProxiedOptions;
13
+ /**
14
+ * Options for directly used `OpenAiCompatibleExecutionTools`
15
+ *
16
+ * This extends OpenAI's `ClientOptions` with are directly passed to the OpenAI client.
17
+ * @public exported from `@promptbook/openai`
18
+ */
19
+ export type OpenAiCompatibleExecutionToolsNonProxiedOptions = CommonToolsOptions & ClientOptions & {
20
+ /**
21
+ * Base URL for the OpenAI-compatible API endpoint
22
+ *
23
+ * This allows connecting to any OpenAI-compatible LLM service by specifying their API endpoint.
24
+ *
25
+ * @example 'https://https://promptbook.s5.ptbk.io/' (Promptbook)
26
+ * @example 'https://api.openai.com/v1' (OpenAI)
27
+ * @example 'http://localhost:11434/v1' (Ollama)
28
+ * @example 'https://api.deepseek.com/v1' (DeepSeek)
29
+ */
30
+ baseURL?: string;
31
+ isProxied?: false;
32
+ };
33
+ /**
34
+ * Options for proxied `OpenAiCompatibleExecutionTools`
35
+ *
36
+ * This extends OpenAI's `ClientOptions` with are directly passed to the OpenAI client.
37
+ * @public exported from `@promptbook/openai`
38
+ */
39
+ export type OpenAiCompatibleExecutionToolsProxiedOptions = CommonToolsOptions & ClientOptions & {
40
+ /**
41
+ * Base URL for the OpenAI-compatible API endpoint
42
+ *
43
+ * This allows connecting to any OpenAI-compatible LLM service by specifying their API endpoint.
44
+ *
45
+ * @example 'https://https://promptbook.s5.ptbk.io/' (Promptbook)
46
+ * @example 'https://api.openai.com/v1' (OpenAI)
47
+ * @example 'http://localhost:11434/v1' (Ollama)
48
+ * @example 'https://api.deepseek.com/v1' (DeepSeek)
49
+ */
50
+ baseURL?: string;
51
+ isProxied: true;
52
+ } & Pick<RemoteClientOptions<undefined>, 'remoteServerUrl'>;
@@ -1,11 +1,9 @@
1
- import type { ClientOptions } from 'openai';
2
- import type { CommonToolsOptions } from '../../execution/CommonToolsOptions';
1
+ import type { OpenAiCompatibleExecutionToolsOptions } from './OpenAiCompatibleExecutionToolsOptions';
3
2
  /**
4
3
  * Options for `createOpenAiExecutionTools` and `OpenAiExecutionTools`
5
4
  *
6
- * This extends OpenAI's `ClientOptions` with are directly passed to the OpenAI client.
7
- * Rest is used by the `OpenAiExecutionTools`.
5
+ * This extends OpenAiCompatibleExecutionToolsOptions.
8
6
  *
9
7
  * @public exported from `@promptbook/openai`
10
8
  */
11
- export type OpenAiExecutionToolsOptions = CommonToolsOptions & ClientOptions;
9
+ export type OpenAiExecutionToolsOptions = OpenAiCompatibleExecutionToolsOptions;
@@ -0,0 +1,74 @@
1
+ import type { AvailableModel } from '../../execution/AvailableModel';
2
+ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
3
+ import type { Usage } from '../../execution/Usage';
4
+ import type { string_markdown } from '../../types/typeAliases';
5
+ import type { string_markdown_text } from '../../types/typeAliases';
6
+ import type { string_model_name } from '../../types/typeAliases';
7
+ import type { string_title } from '../../types/typeAliases';
8
+ import { RemoteLlmExecutionTools } from '../remote/RemoteLlmExecutionTools';
9
+ import { computeOpenAiUsage } from './computeOpenAiUsage';
10
+ import { OpenAiCompatibleExecutionTools } from './OpenAiCompatibleExecutionTools';
11
+ import type { OpenAiCompatibleExecutionToolsNonProxiedOptions } from './OpenAiCompatibleExecutionToolsOptions';
12
+ import type { OpenAiCompatibleExecutionToolsOptions } from './OpenAiCompatibleExecutionToolsOptions';
13
+ /**
14
+ * Execution Tools for calling OpenAI compatible API
15
+ *
16
+ * Note: This can be used for any OpenAI compatible APIs
17
+ *
18
+ * @public exported from `@promptbook/openai`
19
+ */
20
+ export declare const createOpenAiCompatibleExecutionTools: ((options: OpenAiCompatibleExecutionToolsOptions & {
21
+ /**
22
+ * The model name to use for all operations
23
+ *
24
+ * This will be the only model available through this LLM provider and it will be a chat model.
25
+ * Other variants won't be available for now.
26
+ */
27
+ defaultModelName: string_model_name;
28
+ }) => OpenAiCompatibleExecutionTools | RemoteLlmExecutionTools) & {
29
+ packageName: string;
30
+ className: string;
31
+ };
32
+ /**
33
+ * Execution Tools for calling ONE SPECIFIC PRECONFIGURED OpenAI compatible provider
34
+ *
35
+ * @private for `createOpenAiCompatibleExecutionTools`
36
+ */
37
+ export declare class HardcodedOpenAiCompatibleExecutionTools extends OpenAiCompatibleExecutionTools implements LlmExecutionTools {
38
+ private readonly defaultModelName;
39
+ protected readonly options: OpenAiCompatibleExecutionToolsNonProxiedOptions;
40
+ /**
41
+ * Creates OpenAI compatible Execution Tools.
42
+ *
43
+ * @param options which are relevant are directly passed to the OpenAI compatible client
44
+ */
45
+ constructor(defaultModelName: string_model_name, options: OpenAiCompatibleExecutionToolsNonProxiedOptions);
46
+ get title(): string_title & string_markdown_text;
47
+ get description(): string_markdown;
48
+ /**
49
+ * List all available models (non dynamically)
50
+ *
51
+ * Note: Purpose of this is to provide more information about models than standard listing from API
52
+ */
53
+ protected get HARDCODED_MODELS(): ReadonlyArray<AvailableModel>;
54
+ /**
55
+ * Computes the usage
56
+ */
57
+ protected computeUsage(...args: Parameters<typeof computeOpenAiUsage>): Usage;
58
+ /**
59
+ * Default model for chat variant.
60
+ */
61
+ protected getDefaultChatModel(): AvailableModel;
62
+ /**
63
+ * Default model for completion variant.
64
+ */
65
+ protected getDefaultCompletionModel(): AvailableModel;
66
+ /**
67
+ * Default model for completion variant.
68
+ */
69
+ protected getDefaultEmbeddingModel(): AvailableModel;
70
+ }
71
+ /**
72
+ * TODO: [🦺] Is there some way how to put `packageName` and `className` on top and function definition on bottom?
73
+ * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
74
+ */
@@ -19,6 +19,17 @@ export declare const _OpenAiMetadataRegistration: Registration;
19
19
  * @public exported from `@promptbook/cli`
20
20
  */
21
21
  export declare const _OpenAiAssistantMetadataRegistration: Registration;
22
+ /**
23
+ * Registration of the OpenAI Compatible metadata
24
+ *
25
+ * Note: OpenAiCompatibleExecutionTools is an abstract class and cannot be instantiated directly.
26
+ * It serves as a base class for OpenAiExecutionTools and other compatible implementations.
27
+ *
28
+ * @public exported from `@promptbook/core`
29
+ * @public exported from `@promptbook/wizard`
30
+ * @public exported from `@promptbook/cli`
31
+ */
32
+ export declare const _OpenAiCompatibleMetadataRegistration: Registration;
22
33
  /**
23
34
  * Note: [💞] Ignore a discrepancy between file name and entity name
24
35
  */
@@ -19,6 +19,20 @@ export declare const _OpenAiRegistration: Registration;
19
19
  * @public exported from `@promptbook/cli`
20
20
  */
21
21
  export declare const _OpenAiAssistantRegistration: Registration;
22
+ /**
23
+ * Registration of the OpenAI Compatible provider
24
+ *
25
+ * Note: [🏐] Configurations registrations are done in register-constructor.ts BUT constructor register-constructor.ts
26
+ *
27
+ * @public exported from `@promptbook/openai`
28
+ * @public exported from `@promptbook/wizard`
29
+ * @public exported from `@promptbook/cli`
30
+ */
31
+ export declare const _OpenAiCompatibleRegistration: Registration;
32
+ /**
33
+ * Note: OpenAiCompatibleExecutionTools is an abstract class and cannot be registered directly.
34
+ * It serves as a base class for OpenAiExecutionTools and other compatible implementations.
35
+ */
22
36
  /**
23
37
  * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
24
38
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.94.0`).
18
+ * It follows semantic versioning (e.g., `0.98.0-9`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/wizard",
3
- "version": "0.95.0",
3
+ "version": "0.98.0-10",
4
4
  "description": "Promptbook: Run AI apps in plain human language across multiple models and platforms",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -74,7 +74,7 @@
74
74
  "module": "./esm/index.es.js",
75
75
  "typings": "./esm/typings/src/_packages/wizard.index.d.ts",
76
76
  "peerDependencies": {
77
- "@promptbook/core": "0.95.0"
77
+ "@promptbook/core": "0.98.0-10"
78
78
  },
79
79
  "dependencies": {
80
80
  "@ai-sdk/deepseek": "0.1.6",