@promptbook/remote-server 0.71.0-0 → 0.72.0-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. package/esm/index.es.js +1 -1
  2. package/esm/typings/src/_packages/cli.index.d.ts +4 -0
  3. package/esm/typings/src/_packages/core.index.d.ts +6 -2
  4. package/esm/typings/src/_packages/openai.index.d.ts +8 -0
  5. package/esm/typings/src/_packages/types.index.d.ts +2 -0
  6. package/esm/typings/src/execution/createPipelineExecutor/10-executePipeline.d.ts +1 -1
  7. package/esm/typings/src/execution/translation/automatic-translate/automatic-translators/LindatAutomaticTranslator.d.ts +1 -1
  8. package/esm/typings/src/execution/utils/addUsage.d.ts +0 -56
  9. package/esm/typings/src/execution/utils/usage-constants.d.ts +127 -0
  10. package/esm/typings/src/knowledge/dialogs/callback/CallbackInterfaceTools.d.ts +1 -1
  11. package/esm/typings/src/knowledge/dialogs/simple-prompt/SimplePromptInterfaceTools.d.ts +1 -1
  12. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionTools.d.ts +3 -2
  13. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionTools.d.ts +3 -2
  14. package/esm/typings/src/llm-providers/mocked/MockedEchoLlmExecutionTools.d.ts +1 -1
  15. package/esm/typings/src/llm-providers/mocked/MockedFackedLlmExecutionTools.d.ts +1 -1
  16. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionTools.d.ts +37 -0
  17. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +14 -0
  18. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +12 -2
  19. package/esm/typings/src/llm-providers/openai/createOpenAiAssistantExecutionTools.d.ts +15 -0
  20. package/esm/typings/src/llm-providers/openai/register-configuration.d.ts +9 -0
  21. package/esm/typings/src/llm-providers/openai/register-constructor.d.ts +9 -0
  22. package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +1 -1
  23. package/esm/typings/src/scripting/javascript/JavascriptEvalExecutionTools.d.ts +1 -1
  24. package/esm/typings/src/scripting/python/PythonExecutionTools.d.ts +1 -1
  25. package/esm/typings/src/scripting/typescript/TypescriptExecutionTools.d.ts +1 -1
  26. package/esm/typings/src/storage/files-storage/FilesStorage.d.ts +1 -1
  27. package/esm/typings/src/types/PipelineJson/KnowledgeSourceJson.d.ts +2 -9
  28. package/package.json +2 -2
  29. package/umd/index.umd.js +1 -1
package/esm/index.es.js CHANGED
@@ -7,7 +7,7 @@ import spaceTrim$1, { spaceTrim } from 'spacetrim';
7
7
  /**
8
8
  * The version of the Promptbook library
9
9
  */
10
- var PROMPTBOOK_VERSION = '0.70.0-1';
10
+ var PROMPTBOOK_VERSION = '0.72.0-0';
11
11
  // TODO:[main] !!!! List here all the versions and annotate + put into script
12
12
 
13
13
  /*! *****************************************************************************
@@ -5,7 +5,9 @@ import { _AnthropicClaudeRegistration } from '../llm-providers/anthropic-claude/
5
5
  import { _AzureOpenAiMetadataRegistration } from '../llm-providers/azure-openai/register-configuration';
6
6
  import { _AzureOpenAiRegistration } from '../llm-providers/azure-openai/register-constructor';
7
7
  import { _OpenAiMetadataRegistration } from '../llm-providers/openai/register-configuration';
8
+ import { _OpenAiAssistantMetadataRegistration } from '../llm-providers/openai/register-configuration';
8
9
  import { _OpenAiRegistration } from '../llm-providers/openai/register-constructor';
10
+ import { _OpenAiAssistantRegistration } from '../llm-providers/openai/register-constructor';
9
11
  export { PROMPTBOOK_VERSION };
10
12
  export { _CLI };
11
13
  export { _AnthropicClaudeMetadataRegistration };
@@ -13,4 +15,6 @@ export { _AnthropicClaudeRegistration };
13
15
  export { _AzureOpenAiMetadataRegistration };
14
16
  export { _AzureOpenAiRegistration };
15
17
  export { _OpenAiMetadataRegistration };
18
+ export { _OpenAiAssistantMetadataRegistration };
16
19
  export { _OpenAiRegistration };
20
+ export { _OpenAiAssistantRegistration };
@@ -41,9 +41,10 @@ import { UnexpectedError } from '../errors/UnexpectedError';
41
41
  import { assertsExecutionSuccessful } from '../execution/assertsExecutionSuccessful';
42
42
  import { createPipelineExecutor } from '../execution/createPipelineExecutor/00-createPipelineExecutor';
43
43
  import { embeddingVectorToString } from '../execution/embeddingVectorToString';
44
- import { ZERO_USAGE } from '../execution/utils/addUsage';
45
44
  import { addUsage } from '../execution/utils/addUsage';
46
45
  import { isPassingExpectations } from '../execution/utils/checkExpectations';
46
+ import { ZERO_USAGE } from '../execution/utils/usage-constants';
47
+ import { UNCERTAIN_USAGE } from '../execution/utils/usage-constants';
47
48
  import { usageToHuman } from '../execution/utils/usageToHuman';
48
49
  import { usageToWorktime } from '../execution/utils/usageToWorktime';
49
50
  import { CsvFormatDefinition } from '../formats/csv/CsvFormatDefinition';
@@ -63,6 +64,7 @@ import { _AnthropicClaudeMetadataRegistration } from '../llm-providers/anthropic
63
64
  import { _AzureOpenAiMetadataRegistration } from '../llm-providers/azure-openai/register-configuration';
64
65
  import { joinLlmExecutionTools } from '../llm-providers/multiple/joinLlmExecutionTools';
65
66
  import { _OpenAiMetadataRegistration } from '../llm-providers/openai/register-configuration';
67
+ import { _OpenAiAssistantMetadataRegistration } from '../llm-providers/openai/register-configuration';
66
68
  import { preparePersona } from '../personas/preparePersona';
67
69
  import { isPipelinePrepared } from '../prepare/isPipelinePrepared';
68
70
  import { preparePipeline } from '../prepare/preparePipeline';
@@ -118,9 +120,10 @@ export { UnexpectedError };
118
120
  export { assertsExecutionSuccessful };
119
121
  export { createPipelineExecutor };
120
122
  export { embeddingVectorToString };
121
- export { ZERO_USAGE };
122
123
  export { addUsage };
123
124
  export { isPassingExpectations };
125
+ export { ZERO_USAGE };
126
+ export { UNCERTAIN_USAGE };
124
127
  export { usageToHuman };
125
128
  export { usageToWorktime };
126
129
  export { CsvFormatDefinition };
@@ -140,6 +143,7 @@ export { _AnthropicClaudeMetadataRegistration };
140
143
  export { _AzureOpenAiMetadataRegistration };
141
144
  export { joinLlmExecutionTools };
142
145
  export { _OpenAiMetadataRegistration };
146
+ export { _OpenAiAssistantMetadataRegistration };
143
147
  export { preparePersona };
144
148
  export { isPipelinePrepared };
145
149
  export { preparePipeline };
@@ -1,12 +1,20 @@
1
1
  import { PROMPTBOOK_VERSION } from '../version';
2
+ import { createOpenAiAssistantExecutionTools } from '../llm-providers/openai/createOpenAiAssistantExecutionTools';
2
3
  import { createOpenAiExecutionTools } from '../llm-providers/openai/createOpenAiExecutionTools';
3
4
  import { OPENAI_MODELS } from '../llm-providers/openai/openai-models';
5
+ import { OpenAiAssistantExecutionTools } from '../llm-providers/openai/OpenAiAssistantExecutionTools';
6
+ import type { OpenAiAssistantExecutionToolsOptions } from '../llm-providers/openai/OpenAiAssistantExecutionToolsOptions';
4
7
  import { OpenAiExecutionTools } from '../llm-providers/openai/OpenAiExecutionTools';
5
8
  import type { OpenAiExecutionToolsOptions } from '../llm-providers/openai/OpenAiExecutionToolsOptions';
6
9
  import { _OpenAiRegistration } from '../llm-providers/openai/register-constructor';
10
+ import { _OpenAiAssistantRegistration } from '../llm-providers/openai/register-constructor';
7
11
  export { PROMPTBOOK_VERSION };
12
+ export { createOpenAiAssistantExecutionTools };
8
13
  export { createOpenAiExecutionTools };
9
14
  export { OPENAI_MODELS };
15
+ export { OpenAiAssistantExecutionTools };
16
+ export type { OpenAiAssistantExecutionToolsOptions };
10
17
  export { OpenAiExecutionTools };
11
18
  export type { OpenAiExecutionToolsOptions };
12
19
  export { _OpenAiRegistration };
20
+ export { _OpenAiAssistantRegistration };
@@ -50,6 +50,7 @@ import type { AnthropicClaudeExecutionToolsProxiedOptions } from '../llm-provide
50
50
  import type { AzureOpenAiExecutionToolsOptions } from '../llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions';
51
51
  import type { LangtailExecutionToolsOptions } from '../llm-providers/langtail/LangtailExecutionToolsOptions';
52
52
  import type { MultipleLlmExecutionTools } from '../llm-providers/multiple/MultipleLlmExecutionTools';
53
+ import type { OpenAiAssistantExecutionToolsOptions } from '../llm-providers/openai/OpenAiAssistantExecutionToolsOptions';
53
54
  import type { OpenAiExecutionToolsOptions } from '../llm-providers/openai/OpenAiExecutionToolsOptions';
54
55
  import type { PromptbookServer_Error } from '../llm-providers/remote/interfaces/PromptbookServer_Error';
55
56
  import type { PromptbookServer_ListModels_Request } from '../llm-providers/remote/interfaces/PromptbookServer_ListModels_Request';
@@ -281,6 +282,7 @@ export type { AnthropicClaudeExecutionToolsProxiedOptions };
281
282
  export type { AzureOpenAiExecutionToolsOptions };
282
283
  export type { LangtailExecutionToolsOptions };
283
284
  export type { MultipleLlmExecutionTools };
285
+ export type { OpenAiAssistantExecutionToolsOptions };
284
286
  export type { OpenAiExecutionToolsOptions };
285
287
  export type { PromptbookServer_Error };
286
288
  export type { PromptbookServer_ListModels_Request };
@@ -22,7 +22,7 @@ type ExecutePipelineOptions = {
22
22
  /**
23
23
  * @@@
24
24
  */
25
- readonly onProgress?: (taskProgress: TaskProgress) => Promisable<void>;
25
+ onProgress?(taskProgress: TaskProgress): Promisable<void>;
26
26
  /**
27
27
  * @@@
28
28
  */
@@ -7,7 +7,7 @@ interface LindatAutomaticTranslatorOptions extends TranslatorOptions {
7
7
  * @private still in development [🏳]
8
8
  */
9
9
  export declare class LindatAutomaticTranslator implements AutomaticTranslator {
10
- private readonly options;
10
+ protected readonly options: LindatAutomaticTranslatorOptions;
11
11
  constructor(options: LindatAutomaticTranslatorOptions);
12
12
  translate(message: string): Promise<string>;
13
13
  }
@@ -1,60 +1,4 @@
1
1
  import type { PromptResultUsage } from '../PromptResultUsage';
2
- /**
3
- * @@@
4
- *
5
- * @public exported from `@promptbook/core`
6
- */
7
- export declare const ZERO_USAGE: import("type-fest/source/readonly-deep").ReadonlyObjectDeep<{
8
- readonly price: {
9
- readonly value: 0;
10
- };
11
- readonly input: {
12
- readonly tokensCount: {
13
- readonly value: 0;
14
- };
15
- readonly charactersCount: {
16
- readonly value: 0;
17
- };
18
- readonly wordsCount: {
19
- readonly value: 0;
20
- };
21
- readonly sentencesCount: {
22
- readonly value: 0;
23
- };
24
- readonly linesCount: {
25
- readonly value: 0;
26
- };
27
- readonly paragraphsCount: {
28
- readonly value: 0;
29
- };
30
- readonly pagesCount: {
31
- readonly value: 0;
32
- };
33
- };
34
- readonly output: {
35
- readonly tokensCount: {
36
- readonly value: 0;
37
- };
38
- readonly charactersCount: {
39
- readonly value: 0;
40
- };
41
- readonly wordsCount: {
42
- readonly value: 0;
43
- };
44
- readonly sentencesCount: {
45
- readonly value: 0;
46
- };
47
- readonly linesCount: {
48
- readonly value: 0;
49
- };
50
- readonly paragraphsCount: {
51
- readonly value: 0;
52
- };
53
- readonly pagesCount: {
54
- readonly value: 0;
55
- };
56
- };
57
- }>;
58
2
  /**
59
3
  * Function `addUsage` will add multiple usages into one
60
4
  *
@@ -0,0 +1,127 @@
1
+ /**
2
+ * Represents the usage with no resources consumed
3
+ *
4
+ * @public exported from `@promptbook/core`
5
+ */
6
+ export declare const ZERO_USAGE: import("type-fest/source/readonly-deep").ReadonlyObjectDeep<{
7
+ readonly price: {
8
+ readonly value: 0;
9
+ };
10
+ readonly input: {
11
+ readonly tokensCount: {
12
+ readonly value: 0;
13
+ };
14
+ readonly charactersCount: {
15
+ readonly value: 0;
16
+ };
17
+ readonly wordsCount: {
18
+ readonly value: 0;
19
+ };
20
+ readonly sentencesCount: {
21
+ readonly value: 0;
22
+ };
23
+ readonly linesCount: {
24
+ readonly value: 0;
25
+ };
26
+ readonly paragraphsCount: {
27
+ readonly value: 0;
28
+ };
29
+ readonly pagesCount: {
30
+ readonly value: 0;
31
+ };
32
+ };
33
+ readonly output: {
34
+ readonly tokensCount: {
35
+ readonly value: 0;
36
+ };
37
+ readonly charactersCount: {
38
+ readonly value: 0;
39
+ };
40
+ readonly wordsCount: {
41
+ readonly value: 0;
42
+ };
43
+ readonly sentencesCount: {
44
+ readonly value: 0;
45
+ };
46
+ readonly linesCount: {
47
+ readonly value: 0;
48
+ };
49
+ readonly paragraphsCount: {
50
+ readonly value: 0;
51
+ };
52
+ readonly pagesCount: {
53
+ readonly value: 0;
54
+ };
55
+ };
56
+ }>;
57
+ /**
58
+ * Represents the usage with unknown resources consumed
59
+ *
60
+ * @public exported from `@promptbook/core`
61
+ */
62
+ export declare const UNCERTAIN_USAGE: import("type-fest/source/readonly-deep").ReadonlyObjectDeep<{
63
+ readonly price: {
64
+ readonly value: 0;
65
+ readonly isUncertain: true;
66
+ };
67
+ readonly input: {
68
+ readonly tokensCount: {
69
+ readonly value: 0;
70
+ readonly isUncertain: true;
71
+ };
72
+ readonly charactersCount: {
73
+ readonly value: 0;
74
+ readonly isUncertain: true;
75
+ };
76
+ readonly wordsCount: {
77
+ readonly value: 0;
78
+ readonly isUncertain: true;
79
+ };
80
+ readonly sentencesCount: {
81
+ readonly value: 0;
82
+ readonly isUncertain: true;
83
+ };
84
+ readonly linesCount: {
85
+ readonly value: 0;
86
+ readonly isUncertain: true;
87
+ };
88
+ readonly paragraphsCount: {
89
+ readonly value: 0;
90
+ readonly isUncertain: true;
91
+ };
92
+ readonly pagesCount: {
93
+ readonly value: 0;
94
+ readonly isUncertain: true;
95
+ };
96
+ };
97
+ readonly output: {
98
+ readonly tokensCount: {
99
+ readonly value: 0;
100
+ readonly isUncertain: true;
101
+ };
102
+ readonly charactersCount: {
103
+ readonly value: 0;
104
+ readonly isUncertain: true;
105
+ };
106
+ readonly wordsCount: {
107
+ readonly value: 0;
108
+ readonly isUncertain: true;
109
+ };
110
+ readonly sentencesCount: {
111
+ readonly value: 0;
112
+ readonly isUncertain: true;
113
+ };
114
+ readonly linesCount: {
115
+ readonly value: 0;
116
+ readonly isUncertain: true;
117
+ };
118
+ readonly paragraphsCount: {
119
+ readonly value: 0;
120
+ readonly isUncertain: true;
121
+ };
122
+ readonly pagesCount: {
123
+ readonly value: 0;
124
+ readonly isUncertain: true;
125
+ };
126
+ };
127
+ }>;
@@ -8,7 +8,7 @@ import type { CallbackInterfaceToolsOptions } from './CallbackInterfaceToolsOpti
8
8
  * @public exported from `@promptbook/core`
9
9
  */
10
10
  export declare class CallbackInterfaceTools implements UserInterfaceTools {
11
- private readonly options;
11
+ protected readonly options: CallbackInterfaceToolsOptions;
12
12
  constructor(options: CallbackInterfaceToolsOptions);
13
13
  /**
14
14
  * Trigger the custom callback function
@@ -10,7 +10,7 @@ import type { UserInterfaceToolsPromptDialogOptions } from '../../../execution/U
10
10
  * @public exported from `@promptbook/browser`
11
11
  */
12
12
  export declare class SimplePromptInterfaceTools implements UserInterfaceTools {
13
- private readonly options;
13
+ protected readonly options: CommonExecutionToolsOptions;
14
14
  constructor(options?: CommonExecutionToolsOptions);
15
15
  /**
16
16
  * Trigger window.DIALOG TEMPLATE
@@ -1,3 +1,4 @@
1
+ import Anthropic from '@anthropic-ai/sdk';
1
2
  import type { AvailableModel } from '../../execution/AvailableModel';
2
3
  import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
3
4
  import type { ChatPromptResult } from '../../execution/PromptResult';
@@ -13,7 +14,7 @@ import type { AnthropicClaudeExecutionToolsDirectOptions } from './AnthropicClau
13
14
  * @deprecated use `createAnthropicClaudeExecutionTools` instead
14
15
  */
15
16
  export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools {
16
- private readonly options;
17
+ protected readonly options: AnthropicClaudeExecutionToolsDirectOptions;
17
18
  /**
18
19
  * Anthropic Claude API client.
19
20
  */
@@ -26,7 +27,7 @@ export declare class AnthropicClaudeExecutionTools implements LlmExecutionTools
26
27
  constructor(options?: AnthropicClaudeExecutionToolsDirectOptions);
27
28
  get title(): string_title & string_markdown_text;
28
29
  get description(): string_markdown;
29
- private getClient;
30
+ getClient(): Promise<Anthropic>;
30
31
  /**
31
32
  * Check the `options` passed to `constructor`
32
33
  */
@@ -1,3 +1,4 @@
1
+ import { OpenAIClient } from '@azure/openai';
1
2
  import type { AvailableModel } from '../../execution/AvailableModel';
2
3
  import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
3
4
  import type { ChatPromptResult } from '../../execution/PromptResult';
@@ -13,7 +14,7 @@ import type { AzureOpenAiExecutionToolsOptions } from './AzureOpenAiExecutionToo
13
14
  * @public exported from `@promptbook/azure-openai`
14
15
  */
15
16
  export declare class AzureOpenAiExecutionTools implements LlmExecutionTools {
16
- private readonly options;
17
+ protected readonly options: AzureOpenAiExecutionToolsOptions;
17
18
  /**
18
19
  * OpenAI Azure API client.
19
20
  */
@@ -26,7 +27,7 @@ export declare class AzureOpenAiExecutionTools implements LlmExecutionTools {
26
27
  constructor(options: AzureOpenAiExecutionToolsOptions);
27
28
  get title(): string_title & string_markdown_text;
28
29
  get description(): string_markdown;
29
- private getClient;
30
+ getClient(): Promise<OpenAIClient>;
30
31
  /**
31
32
  * Check the `options` passed to `constructor`
32
33
  */
@@ -13,7 +13,7 @@ import type { string_title } from '../../types/typeAliases';
13
13
  * @public exported from `@promptbook/fake-llm`
14
14
  */
15
15
  export declare class MockedEchoLlmExecutionTools implements LlmExecutionTools {
16
- private readonly options;
16
+ protected readonly options: CommonExecutionToolsOptions;
17
17
  constructor(options?: CommonExecutionToolsOptions);
18
18
  get title(): string_title & string_markdown_text;
19
19
  get description(): string_markdown;
@@ -14,7 +14,7 @@ import type { string_title } from '../../types/typeAliases';
14
14
  * @public exported from `@promptbook/fake-llm`
15
15
  */
16
16
  export declare class MockedFackedLlmExecutionTools implements LlmExecutionTools {
17
- private readonly options;
17
+ protected readonly options: CommonExecutionToolsOptions;
18
18
  constructor(options?: CommonExecutionToolsOptions);
19
19
  get title(): string_title & string_markdown_text;
20
20
  get description(): string_markdown;
@@ -0,0 +1,37 @@
1
+ import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
2
+ import type { ChatPromptResult } from '../../execution/PromptResult';
3
+ import type { Prompt } from '../../types/Prompt';
4
+ import type { string_markdown } from '../../types/typeAliases';
5
+ import type { string_markdown_text } from '../../types/typeAliases';
6
+ import type { string_title } from '../../types/typeAliases';
7
+ import type { OpenAiAssistantExecutionToolsOptions } from './OpenAiAssistantExecutionToolsOptions';
8
+ import { OpenAiExecutionTools } from './OpenAiExecutionTools';
9
+ /**
10
+ * Execution Tools for calling OpenAI API Assistants
11
+ *
12
+ * This is usefull for calling OpenAI API with a single assistant, for more wide usage use `OpenAiExecutionTools`.
13
+ *
14
+ * @public exported from `@promptbook/openai`
15
+ */
16
+ export declare class OpenAiAssistantExecutionTools extends OpenAiExecutionTools implements LlmExecutionTools {
17
+ private readonly assistantId?;
18
+ /**
19
+ * Creates OpenAI Execution Tools.
20
+ *
21
+ * @param options which are relevant are directly passed to the OpenAI client
22
+ */
23
+ constructor(options: OpenAiAssistantExecutionToolsOptions);
24
+ get title(): string_title & string_markdown_text;
25
+ get description(): string_markdown;
26
+ /**
27
+ * Calls OpenAI API to use a chat model.
28
+ */
29
+ callChatModel(prompt: Pick<Prompt, 'content' | 'parameters' | 'modelRequirements' | 'format'>): Promise<ChatPromptResult>;
30
+ }
31
+ /**
32
+ * TODO: !!!!!! DO not use colors - can be used in browser
33
+ * TODO: [🧠][🧙‍♂️] Maybe there can be some wizzard for thoose who want to use just OpenAI
34
+ * TODO: Maybe make custom OpenAiError
35
+ * TODO: [🧠][🈁] Maybe use `isDeterministic` from options
36
+ * TODO: [🧠][🌰] Allow to pass `title` for tracking purposes
37
+ */
@@ -0,0 +1,14 @@
1
+ import type { ClientOptions } from 'openai';
2
+ import type { string_token } from '../../types/typeAliases';
3
+ import type { OpenAiExecutionToolsOptions } from './OpenAiExecutionToolsOptions';
4
+ /**
5
+ * Options for `OpenAiAssistantExecutionTools`
6
+ *
7
+ * @public exported from `@promptbook/openai`
8
+ */
9
+ export type OpenAiAssistantExecutionToolsOptions = OpenAiExecutionToolsOptions & ClientOptions & {
10
+ /**
11
+ * Which assistant to use
12
+ */
13
+ assistantId: string_token;
14
+ };
@@ -1,3 +1,4 @@
1
+ import OpenAI from 'openai';
1
2
  import type { AvailableModel } from '../../execution/AvailableModel';
2
3
  import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
3
4
  import type { ChatPromptResult } from '../../execution/PromptResult';
@@ -7,6 +8,8 @@ import type { Prompt } from '../../types/Prompt';
7
8
  import type { string_markdown } from '../../types/typeAliases';
8
9
  import type { string_markdown_text } from '../../types/typeAliases';
9
10
  import type { string_title } from '../../types/typeAliases';
11
+ import type { string_token } from '../../types/typeAliases';
12
+ import { OpenAiAssistantExecutionTools } from './OpenAiAssistantExecutionTools';
10
13
  import type { OpenAiExecutionToolsOptions } from './OpenAiExecutionToolsOptions';
11
14
  /**
12
15
  * Execution Tools for calling OpenAI API
@@ -14,7 +17,7 @@ import type { OpenAiExecutionToolsOptions } from './OpenAiExecutionToolsOptions'
14
17
  * @public exported from `@promptbook/openai`
15
18
  */
16
19
  export declare class OpenAiExecutionTools implements LlmExecutionTools {
17
- private readonly options;
20
+ protected readonly options: OpenAiExecutionToolsOptions;
18
21
  /**
19
22
  * OpenAI API client.
20
23
  */
@@ -27,7 +30,14 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
27
30
  constructor(options?: OpenAiExecutionToolsOptions);
28
31
  get title(): string_title & string_markdown_text;
29
32
  get description(): string_markdown;
30
- private getClient;
33
+ getClient(): Promise<OpenAI>;
34
+ /**
35
+ * Create (sub)tools for calling OpenAI API Assistants
36
+ *
37
+ * @param assistantId Which assistant to use
38
+ * @returns Tools for calling OpenAI API Assistants with same token
39
+ */
40
+ createAssistantSubtools(assistantId: string_token): OpenAiAssistantExecutionTools;
31
41
  /**
32
42
  * Check the `options` passed to `constructor`
33
43
  */
@@ -0,0 +1,15 @@
1
+ import { OpenAiAssistantExecutionTools } from './OpenAiAssistantExecutionTools';
2
+ import type { OpenAiAssistantExecutionToolsOptions } from './OpenAiAssistantExecutionToolsOptions';
3
+ /**
4
+ * Execution Tools for calling OpenAI API
5
+ *
6
+ * @public exported from `@promptbook/openai`
7
+ */
8
+ export declare const createOpenAiAssistantExecutionTools: ((options: OpenAiAssistantExecutionToolsOptions) => OpenAiAssistantExecutionTools) & {
9
+ packageName: string;
10
+ className: string;
11
+ };
12
+ /**
13
+ * TODO: [🦺] Is there some way how to put `packageName` and `className` on top and function definition on bottom?
14
+ * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
15
+ */
@@ -7,3 +7,12 @@
7
7
  * @public exported from `@promptbook/cli`
8
8
  */
9
9
  export declare const _OpenAiMetadataRegistration: void;
10
+ /**
11
+ * @@@ registration1 of default configuration for Open AI
12
+ *
13
+ * Note: [🏐] Configurations registrations are done in @@@ BUT constructor @@@
14
+ *
15
+ * @public exported from `@promptbook/core`
16
+ * @public exported from `@promptbook/cli`
17
+ */
18
+ export declare const _OpenAiAssistantMetadataRegistration: void;
@@ -7,6 +7,15 @@
7
7
  * @public exported from `@promptbook/cli`
8
8
  */
9
9
  export declare const _OpenAiRegistration: void;
10
+ /**
11
+ * @@@ registration2
12
+ *
13
+ * Note: [🏐] Configurations registrations are done in @@@ BUT constructor @@@
14
+ *
15
+ * @public exported from `@promptbook/openai`
16
+ * @public exported from `@promptbook/cli`
17
+ */
18
+ export declare const _OpenAiAssistantRegistration: void;
10
19
  /**
11
20
  * TODO: [🎶] Naming "constructor" vs "creator" vs "factory"
12
21
  */
@@ -20,7 +20,7 @@ import type { RemoteLlmExecutionToolsOptions } from './interfaces/RemoteLlmExecu
20
20
  * @public exported from `@promptbook/remote-client`
21
21
  */
22
22
  export declare class RemoteLlmExecutionTools implements LlmExecutionTools {
23
- private readonly options;
23
+ protected readonly options: RemoteLlmExecutionToolsOptions;
24
24
  constructor(options: RemoteLlmExecutionToolsOptions);
25
25
  get title(): string_title & string_markdown_text;
26
26
  get description(): string_markdown;
@@ -9,7 +9,7 @@ import type { JavascriptExecutionToolsOptions } from './JavascriptExecutionTools
9
9
  * @public exported from `@promptbook/execute-javascript`
10
10
  */
11
11
  export declare class JavascriptEvalExecutionTools implements ScriptExecutionTools {
12
- private readonly options;
12
+ protected readonly options: JavascriptExecutionToolsOptions;
13
13
  constructor(options?: JavascriptExecutionToolsOptions);
14
14
  /**
15
15
  * Executes a JavaScript
@@ -9,7 +9,7 @@ import type { ScriptExecutionToolsExecuteOptions } from '../../execution/ScriptE
9
9
  * @private still in development
10
10
  */
11
11
  export declare class PythonExecutionTools implements ScriptExecutionTools {
12
- private readonly options;
12
+ protected readonly options: CommonExecutionToolsOptions;
13
13
  constructor(options?: CommonExecutionToolsOptions);
14
14
  /**
15
15
  * Executes a Python
@@ -9,7 +9,7 @@ import type { ScriptExecutionToolsExecuteOptions } from '../../execution/ScriptE
9
9
  * @private still in development
10
10
  */
11
11
  export declare class TypescriptExecutionTools implements ScriptExecutionTools {
12
- private readonly options;
12
+ protected readonly options: CommonExecutionToolsOptions;
13
13
  constructor(options?: CommonExecutionToolsOptions);
14
14
  /**
15
15
  * Executes a TypeScript
@@ -6,7 +6,7 @@ import type { FilesStorageOptions } from './FilesStorageOptions';
6
6
  * @public exported from `@promptbook/node`
7
7
  */
8
8
  export declare class FilesStorage<TItem> implements PromptbookStorage<TItem> {
9
- private readonly options;
9
+ protected readonly options: FilesStorageOptions;
10
10
  constructor(options: FilesStorageOptions);
11
11
  /**
12
12
  * @@@
@@ -11,18 +11,11 @@ import type { string_name } from '../typeAliases';
11
11
  */
12
12
  export type KnowledgeSourceJson = {
13
13
  /**
14
- * Unique identifier of the knowledge source
14
+ * @@@
15
15
  */
16
16
  readonly name: string_name;
17
17
  /**
18
- * Source of one knowledge
19
- *
20
- * It can be a link, a relative path to file or direct text or combination of those
21
- *
22
- * For example `"https://pavolhejny.com/"`
23
- * For example `"./pavol-hejny-cv.pdf"`
24
- * For example `"Pavol Hejný has web https://pavolhejny.com/"`
25
- * For example `"Pavol Hejný is web developer and creator of Promptbook and Collboard"`
18
+ * @@@
26
19
  */
27
20
  readonly sourceContent: string_knowledge_source_content;
28
21
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/remote-server",
3
- "version": "0.71.0-0",
3
+ "version": "0.72.0-1",
4
4
  "description": "Supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -51,7 +51,7 @@
51
51
  "module": "./esm/index.es.js",
52
52
  "typings": "./esm/typings/src/_packages/remote-server.index.d.ts",
53
53
  "peerDependencies": {
54
- "@promptbook/core": "0.71.0-0"
54
+ "@promptbook/core": "0.72.0-1"
55
55
  },
56
56
  "dependencies": {
57
57
  "colors": "1.4.0",
package/umd/index.umd.js CHANGED
@@ -14,7 +14,7 @@
14
14
  /**
15
15
  * The version of the Promptbook library
16
16
  */
17
- var PROMPTBOOK_VERSION = '0.70.0-1';
17
+ var PROMPTBOOK_VERSION = '0.72.0-0';
18
18
  // TODO:[main] !!!! List here all the versions and annotate + put into script
19
19
 
20
20
  /*! *****************************************************************************