@promptbook/remote-server 0.101.0-20 β†’ 0.101.0-21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/esm/index.es.js +8 -70
  2. package/esm/index.es.js.map +1 -1
  3. package/esm/typings/src/book-2.0/commitments/ACTION/ACTION.d.ts +0 -12
  4. package/esm/typings/src/book-2.0/commitments/DELETE/DELETE.d.ts +0 -24
  5. package/esm/typings/src/book-2.0/commitments/FORMAT/FORMAT.d.ts +0 -12
  6. package/esm/typings/src/book-2.0/commitments/GOAL/GOAL.d.ts +0 -12
  7. package/esm/typings/src/book-2.0/commitments/KNOWLEDGE/KNOWLEDGE.d.ts +0 -6
  8. package/esm/typings/src/book-2.0/commitments/MEMORY/MEMORY.d.ts +0 -12
  9. package/esm/typings/src/book-2.0/commitments/MESSAGE/MESSAGE.d.ts +0 -12
  10. package/esm/typings/src/book-2.0/commitments/META/META.d.ts +0 -6
  11. package/esm/typings/src/book-2.0/commitments/META_IMAGE/META_IMAGE.d.ts +0 -6
  12. package/esm/typings/src/book-2.0/commitments/META_LINK/META_LINK.d.ts +0 -6
  13. package/esm/typings/src/book-2.0/commitments/MODEL/MODEL.d.ts +0 -12
  14. package/esm/typings/src/book-2.0/commitments/NOTE/NOTE.d.ts +0 -24
  15. package/esm/typings/src/book-2.0/commitments/PERSONA/PERSONA.d.ts +0 -12
  16. package/esm/typings/src/book-2.0/commitments/RULE/RULE.d.ts +0 -12
  17. package/esm/typings/src/book-2.0/commitments/SAMPLE/SAMPLE.d.ts +0 -12
  18. package/esm/typings/src/book-2.0/commitments/SCENARIO/SCENARIO.d.ts +0 -12
  19. package/esm/typings/src/book-2.0/commitments/STYLE/STYLE.d.ts +0 -12
  20. package/esm/typings/src/book-2.0/commitments/_base/createEmptyAgentModelRequirements.d.ts +1 -1
  21. package/esm/typings/src/book-components/AvatarProfile/AvatarChip/AvatarChip.d.ts +3 -0
  22. package/esm/typings/src/book-components/AvatarProfile/AvatarProfile/AvatarProfile.d.ts +3 -0
  23. package/esm/typings/src/book-components/BookEditor/BookEditor.d.ts +3 -0
  24. package/esm/typings/src/book-components/BookEditor/BookEditorInner.d.ts +2 -16
  25. package/esm/typings/src/book-components/Chat/Chat/ChatProps.d.ts +3 -0
  26. package/esm/typings/src/execution/PromptResult.d.ts +2 -4
  27. package/esm/typings/src/llm-providers/_multiple/MultipleLlmExecutionTools.d.ts +2 -5
  28. package/esm/typings/src/llm-providers/agent/AgentLlmExecutionTools.d.ts +6 -2
  29. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionTools.d.ts +3 -8
  30. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionTools.d.ts +4 -5
  31. package/esm/typings/src/llm-providers/mocked/MockedEchoLlmExecutionTools.d.ts +2 -5
  32. package/esm/typings/src/llm-providers/mocked/MockedFackedLlmExecutionTools.d.ts +2 -0
  33. package/esm/typings/src/llm-providers/mocked/test/joker.test.d.ts +4 -0
  34. package/esm/typings/src/llm-providers/mocked/test/mocked-chat.test.d.ts +5 -0
  35. package/esm/typings/src/llm-providers/mocked/test/mocked-completion.test.d.ts +4 -0
  36. package/esm/typings/src/llm-providers/ollama/OllamaExecutionTools.d.ts +3 -3
  37. package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +1 -0
  38. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +3 -8
  39. package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +5 -14
  40. package/esm/typings/src/scripting/_test/postprocessing.test.d.ts +1 -0
  41. package/esm/typings/src/utils/markdown/humanizeAiText.d.ts +0 -1
  42. package/esm/typings/src/utils/markdown/promptbookifyAiText.d.ts +2 -2
  43. package/esm/typings/src/version.d.ts +1 -1
  44. package/package.json +2 -2
  45. package/umd/index.umd.js +8 -70
  46. package/umd/index.umd.js.map +1 -1
  47. package/esm/typings/src/book-components/Chat/examples/ChatMarkdownDemo.d.ts +0 -16
  48. package/esm/typings/src/expectations/drafts/isDomainNameFree.d.ts +0 -10
  49. package/esm/typings/src/expectations/drafts/isGithubNameFree.d.ts +0 -10
  50. package/esm/typings/src/llm-providers/_common/profiles/llmProviderProfiles.d.ts +0 -81
  51. /package/esm/typings/src/{llm-providers/_common/profiles/test/llmProviderProfiles.test.d.ts β†’ cli/test/ptbk.test.d.ts} +0 -0
@@ -1,15 +1,10 @@
1
+ import type { ChatParticipant } from '../../book-components/Chat/types/ChatParticipant';
1
2
  import type { AvailableModel } from '../../execution/AvailableModel';
2
3
  import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
3
- import type { ChatPromptResult } from '../../execution/PromptResult';
4
- import type { CompletionPromptResult } from '../../execution/PromptResult';
5
- import type { EmbeddingPromptResult } from '../../execution/PromptResult';
4
+ import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult } from '../../execution/PromptResult';
6
5
  import type { RemoteClientOptions } from '../../remote-server/types/RemoteClientOptions';
7
- import type { ChatPrompt } from '../../types/Prompt';
8
- import type { CompletionPrompt } from '../../types/Prompt';
9
- import type { EmbeddingPrompt } from '../../types/Prompt';
10
- import type { string_markdown } from '../../types/typeAliases';
11
- import type { string_markdown_text } from '../../types/typeAliases';
12
- import type { string_title } from '../../types/typeAliases';
6
+ import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt } from '../../types/Prompt';
7
+ import type { string_markdown, string_markdown_text, string_title } from '../../types/typeAliases';
13
8
  /**
14
9
  * Remote server is a proxy server that uses its execution tools internally and exposes the executor interface externally.
15
10
  *
@@ -24,11 +19,7 @@ export declare class RemoteLlmExecutionTools<TCustomOptions = undefined> impleme
24
19
  constructor(options: RemoteClientOptions<TCustomOptions>);
25
20
  get title(): string_title & string_markdown_text;
26
21
  get description(): string_markdown;
27
- get profile(): {
28
- name: string;
29
- fullname: string;
30
- color: string;
31
- };
22
+ get profile(): ChatParticipant;
32
23
  /**
33
24
  * Check the configuration of all execution tools
34
25
  */
@@ -10,5 +10,4 @@ import { string_markdown } from '../../types/typeAliases';
10
10
  export declare function humanizeAiText(aiText: string_markdown): string_markdown;
11
11
  /**
12
12
  * TODO: [🧠] Maybe this should be exported from `@promptbook/utils` not `@promptbook/markdown-utils`
13
- * TODO: [πŸ…ΎοΈ] !!! Use this across the project where AI text is involved
14
13
  */
@@ -2,11 +2,11 @@ import { string_markdown } from '../../types/typeAliases';
2
2
  /**
3
3
  * Function `promptbookifyAiText` will slightly modify the text so we know it was processed by Promptbook
4
4
  *
5
+ * Note: [πŸ”‚] This function is idempotent.
6
+ *
5
7
  * @public exported from `@promptbook/markdown-utils`
6
8
  */
7
9
  export declare function promptbookifyAiText(text: string_markdown): string_markdown;
8
10
  /**
9
- * TODO: !!!!! Make the function idempotent and add "Note: [πŸ”‚] This function is idempotent."
10
- * TODO: [πŸ…ΎοΈ]!!! Use this across the project where AI text is involved
11
11
  * TODO: [🧠][✌️] Make some Promptbook-native token system
12
12
  */
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.101.0-19`).
18
+ * It follows semantic versioning (e.g., `0.101.0-20`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/remote-server",
3
- "version": "0.101.0-20",
3
+ "version": "0.101.0-21",
4
4
  "description": "Promptbook: Run AI apps in plain human language across multiple models and platforms",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -95,7 +95,7 @@
95
95
  "module": "./esm/index.es.js",
96
96
  "typings": "./esm/typings/src/_packages/remote-server.index.d.ts",
97
97
  "peerDependencies": {
98
- "@promptbook/core": "0.101.0-20"
98
+ "@promptbook/core": "0.101.0-21"
99
99
  },
100
100
  "dependencies": {
101
101
  "colors": "1.4.0",
package/umd/index.umd.js CHANGED
@@ -47,7 +47,7 @@
47
47
  * @generated
48
48
  * @see https://github.com/webgptorg/promptbook
49
49
  */
50
- const PROMPTBOOK_ENGINE_VERSION = '0.101.0-20';
50
+ const PROMPTBOOK_ENGINE_VERSION = '0.101.0-21';
51
51
  /**
52
52
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
53
53
  * Note: [πŸ’ž] Ignore a discrepancy between file name and entity name
@@ -2913,75 +2913,13 @@
2913
2913
  }
2914
2914
 
2915
2915
  /**
2916
- * Predefined profiles for LLM providers to maintain consistency across the application
2917
- * These profiles represent each provider as a virtual persona in chat interfaces
2918
- *
2919
- * @private !!!!
2916
+ * Profile for Multiple providers aggregation
2920
2917
  */
2921
- const LLM_PROVIDER_PROFILES = {
2922
- OPENAI: {
2923
- name: 'OPENAI',
2924
- fullname: 'OpenAI GPT',
2925
- color: '#10a37f', // OpenAI's signature green
2926
- // Note: avatarSrc could be added when we have provider logos available
2927
- },
2928
- ANTHROPIC: {
2929
- name: 'ANTHROPIC',
2930
- fullname: 'Anthropic Claude',
2931
- color: '#d97706', // Anthropic's orange/amber color
2932
- },
2933
- AZURE_OPENAI: {
2934
- name: 'AZURE_OPENAI',
2935
- fullname: 'Azure OpenAI',
2936
- color: '#0078d4', // Microsoft Azure blue
2937
- },
2938
- GOOGLE: {
2939
- name: 'GOOGLE',
2940
- fullname: 'Google Gemini',
2941
- color: '#4285f4', // Google blue
2942
- },
2943
- DEEPSEEK: {
2944
- name: 'DEEPSEEK',
2945
- fullname: 'DeepSeek',
2946
- color: '#7c3aed', // Purple color for DeepSeek
2947
- },
2948
- OLLAMA: {
2949
- name: 'OLLAMA',
2950
- fullname: 'Ollama',
2951
- color: '#059669', // Emerald green for local models
2952
- },
2953
- REMOTE: {
2954
- name: 'REMOTE',
2955
- fullname: 'Remote Server',
2956
- color: '#6b7280', // Gray for remote/proxy connections
2957
- },
2958
- MOCKED_ECHO: {
2959
- name: 'MOCKED_ECHO',
2960
- fullname: 'Echo (Test)',
2961
- color: '#8b5cf6', // Purple for test/mock tools
2962
- },
2963
- MOCKED_FAKE: {
2964
- name: 'MOCKED_FAKE',
2965
- fullname: 'Fake LLM (Test)',
2966
- color: '#ec4899', // Pink for fake/test tools
2967
- },
2968
- VERCEL: {
2969
- name: 'VERCEL',
2970
- fullname: 'Vercel AI',
2971
- color: '#000000', // Vercel's black
2972
- },
2973
- MULTIPLE: {
2974
- name: 'MULTIPLE',
2975
- fullname: 'Multiple Providers',
2976
- color: '#6366f1', // Indigo for combined/multiple providers
2977
- },
2918
+ const MULTIPLE_PROVIDER_PROFILE = {
2919
+ name: 'MULTIPLE',
2920
+ fullname: 'Multiple Providers',
2921
+ color: '#6366f1',
2978
2922
  };
2979
- /**
2980
- * TODO: Refactor this - each profile must be alongside the provider definition
2981
- * TODO: [πŸ•›] Unite `AgentBasicInformation`, `ChatParticipant`, `LlmExecutionTools` + `LlmToolsMetadata`
2982
- * Note: [πŸ’ž] Ignore a discrepancy between file name and entity name
2983
- */
2984
-
2985
2923
  /**
2986
2924
  * Multiple LLM Execution Tools is a proxy server that uses multiple execution tools internally and exposes the executor interface externally.
2987
2925
  *
@@ -3017,7 +2955,7 @@
3017
2955
  `);
3018
2956
  }
3019
2957
  get profile() {
3020
- return LLM_PROVIDER_PROFILES.MULTIPLE;
2958
+ return MULTIPLE_PROVIDER_PROFILE;
3021
2959
  }
3022
2960
  /**
3023
2961
  * Check the configuration of all execution tools
@@ -8241,7 +8179,7 @@
8241
8179
  });
8242
8180
  // Note: OpenAI-compatible chat completions endpoint
8243
8181
  app.post('/v1/chat/completions', async (request, response) => {
8244
- // TODO: !!!! Make more promptbook-native:
8182
+ // TODO: [🧠][🦒] Make OpenAI compatible more promptbook-native - make reverse adapter from LlmExecutionTools to OpenAI-compatible:
8245
8183
  try {
8246
8184
  const params = request.body;
8247
8185
  const { model, messages } = params;