@promptbook/wizard 0.100.0-48 → 0.100.0-49

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -39,7 +39,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
39
39
  * @generated
40
40
  * @see https://github.com/webgptorg/promptbook
41
41
  */
42
- const PROMPTBOOK_ENGINE_VERSION = '0.100.0-48';
42
+ const PROMPTBOOK_ENGINE_VERSION = '0.100.0-49';
43
43
  /**
44
44
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
45
45
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -14,6 +14,8 @@ import type { ChatProps } from '../book-components/Chat/Chat/Chat';
14
14
  import { Chat } from '../book-components/Chat/Chat/Chat';
15
15
  import type { ChatMessage } from '../book-components/Chat/interfaces/ChatMessage';
16
16
  import type { ChatParticipant } from '../book-components/Chat/interfaces/ChatParticipant';
17
+ import type { LlmChatProps } from '../book-components/Chat/LlmChat/LlmChat';
18
+ import { LlmChat } from '../book-components/Chat/LlmChat/LlmChat';
17
19
  export { BOOK_LANGUAGE_VERSION, PROMPTBOOK_ENGINE_VERSION };
18
20
  export type { AvatarChipProps };
19
21
  export { AvatarChip };
@@ -30,3 +32,5 @@ export type { ChatProps };
30
32
  export { Chat };
31
33
  export type { ChatMessage };
32
34
  export type { ChatParticipant };
35
+ export type { LlmChatProps };
36
+ export { LlmChat };
@@ -19,6 +19,7 @@ import type { BookEditorProps } from '../book-components/BookEditor/BookEditor';
19
19
  import type { ChatProps } from '../book-components/Chat/Chat/Chat';
20
20
  import type { ChatMessage } from '../book-components/Chat/interfaces/ChatMessage';
21
21
  import type { ChatParticipant } from '../book-components/Chat/interfaces/ChatParticipant';
22
+ import type { LlmChatProps } from '../book-components/Chat/LlmChat/LlmChat';
22
23
  import type { PipelineCollection } from '../collection/PipelineCollection';
23
24
  import type { Command } from '../commands/_common/types/Command';
24
25
  import type { CommandParser } from '../commands/_common/types/CommandParser';
@@ -347,6 +348,7 @@ export type { BookEditorProps };
347
348
  export type { ChatProps };
348
349
  export type { ChatMessage };
349
350
  export type { ChatParticipant };
351
+ export type { LlmChatProps };
350
352
  export type { PipelineCollection };
351
353
  export type { Command };
352
354
  export type { CommandParser };
@@ -0,0 +1,108 @@
1
+ import type { CSSProperties, ReactNode } from 'react';
2
+ import type { Promisable } from 'type-fest';
3
+ import type { LlmExecutionTools } from '../../../execution/LlmExecutionTools';
4
+ import type { ChatMessage } from '../interfaces/ChatMessage';
5
+ import type { ChatParticipant } from '../interfaces/ChatParticipant';
6
+ /**
7
+ * Props for LlmChat component, derived from ChatProps but with LLM-specific modifications
8
+ *
9
+ * @public exported from `@promptbook/components`
10
+ */
11
+ export interface LlmChatProps {
12
+ /**
13
+ * LLM execution tools for chatting with the model
14
+ */
15
+ readonly llmTools: LlmExecutionTools;
16
+ /**
17
+ * Called when the chat state changes (messages, participants, etc.)
18
+ */
19
+ onChange?(messages: ReadonlyArray<ChatMessage>, participants: ReadonlyArray<ChatParticipant>): void;
20
+ /**
21
+ * Optional callback, when set, button for resetting chat will be shown
22
+ */
23
+ onReset?(): Promisable<void>;
24
+ /**
25
+ * Determines whether the voice recognition button is rendered
26
+ */
27
+ readonly isVoiceRecognitionButtonShown?: boolean;
28
+ /**
29
+ * The language code to use for voice recognition
30
+ */
31
+ readonly voiceLanguage?: string;
32
+ /**
33
+ * Optional placeholder message for the textarea
34
+ *
35
+ * @default "Write a message"
36
+ */
37
+ readonly placeholderMessageContent?: string;
38
+ /**
39
+ * Optional preset message in chat
40
+ */
41
+ readonly defaultMessage?: string;
42
+ /**
43
+ * Content to be shown inside the chat bar in head
44
+ * If not provided, the chat bar will not be rendered
45
+ */
46
+ readonly children?: ReactNode;
47
+ /**
48
+ * Optional CSS class name which will be added to root <div/> element
49
+ */
50
+ readonly className?: string;
51
+ /**
52
+ * Optional CSS style which will be added to root <div/> element
53
+ */
54
+ readonly style?: CSSProperties;
55
+ /**
56
+ * Voice call props - when provided, voice call button will be shown
57
+ */
58
+ readonly voiceCallProps?: {
59
+ selectedModel: string;
60
+ providerClients: Map<string, unknown>;
61
+ currentPersonaContent?: string;
62
+ onVoiceMessage?: (content: string, isVoiceCall: boolean) => void;
63
+ onAssistantVoiceResponse?: (content: string, isVoiceCall: boolean) => void;
64
+ onVoiceCallStateChange?: (isVoiceCalling: boolean) => void;
65
+ };
66
+ /**
67
+ * Indicates whether a voice call is currently active
68
+ */
69
+ readonly isVoiceCalling?: boolean;
70
+ /**
71
+ * Whether experimental features are enabled (required for voice calling)
72
+ */
73
+ readonly isExperimental?: boolean;
74
+ /**
75
+ * Whether the save button is enabled and shown
76
+ */
77
+ readonly isSaveButtonEnabled?: boolean;
78
+ /**
79
+ * Optional markdown header to include at the top of exported files.
80
+ * Example: "## Discussion Topic\n\nSome topic here"
81
+ */
82
+ readonly exportHeaderMarkdown?: string;
83
+ /**
84
+ * Optional callback to create a new agent from the template.
85
+ * If provided, renders the [Use this template] button.
86
+ */
87
+ onUseTemplate?(): void;
88
+ }
89
+ /**
90
+ * LlmChat component that provides chat functionality with LLM integration
91
+ *
92
+ * This component internally manages messages, participants, and task progress,
93
+ * and uses the provided LLM tools to generate responses via callChatModel.
94
+ *
95
+ * Note: There are multiple chat components:
96
+ * - <Chat/> renders chat as it is without any logic
97
+ * - <SimpleChat/> with callback function after each message 🔵->🟢->🔵->🟢->🔵->🟢->...
98
+ * - <WorkerChat/> with continuously running worker function on background which binds on dialogues queue 🔵->🟢->🔵->🟢->🔵->🟢->...
99
+ * - <SignalChat/> fully controlled by signal that is passed in 🔵->🟢->🟢->🟢->🔵->🟢->...
100
+ * - <LlmChat/> connected to LLM Execution Tools of Promptbook
101
+ * - <AgentChat/> direct OpenAI API integration with streaming responses and model selection
102
+ * - <ChatbotMiniapp/> Fully working chatbot miniapp created from book
103
+ * - <AssistantChatPage/> page for assistant chat with welcome message and avatar
104
+ * - <ModelAwareChat/> wrapper around <Chat/> that provides model-aware avatars
105
+ *
106
+ * @public exported from `@promptbook/components`
107
+ */
108
+ export declare function LlmChat(props: LlmChatProps): import("react/jsx-runtime").JSX.Element;
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
15
15
  export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
16
16
  /**
17
17
  * Represents the version string of the Promptbook engine.
18
- * It follows semantic versioning (e.g., `0.100.0-47`).
18
+ * It follows semantic versioning (e.g., `0.100.0-48`).
19
19
  *
20
20
  * @generated
21
21
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/wizard",
3
- "version": "0.100.0-48",
3
+ "version": "0.100.0-49",
4
4
  "description": "Promptbook: Run AI apps in plain human language across multiple models and platforms",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -95,7 +95,7 @@
95
95
  "module": "./esm/index.es.js",
96
96
  "typings": "./esm/typings/src/_packages/wizard.index.d.ts",
97
97
  "peerDependencies": {
98
- "@promptbook/core": "0.100.0-48"
98
+ "@promptbook/core": "0.100.0-49"
99
99
  },
100
100
  "dependencies": {
101
101
  "@ai-sdk/deepseek": "0.1.6",
package/umd/index.umd.js CHANGED
@@ -50,7 +50,7 @@
50
50
  * @generated
51
51
  * @see https://github.com/webgptorg/promptbook
52
52
  */
53
- const PROMPTBOOK_ENGINE_VERSION = '0.100.0-48';
53
+ const PROMPTBOOK_ENGINE_VERSION = '0.100.0-49';
54
54
  /**
55
55
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
56
56
  * Note: [💞] Ignore a discrepancy between file name and entity name