react-optimistic-chat 1.0.0 → 1.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.mts +103 -48
- package/dist/index.d.ts +103 -48
- package/dist/index.js +266 -96
- package/dist/index.mjs +264 -96
- package/package.json +1 -1
package/dist/index.d.mts
CHANGED
|
@@ -2,12 +2,13 @@ import * as react_jsx_runtime from 'react/jsx-runtime';
|
|
|
2
2
|
import React$1 from 'react';
|
|
3
3
|
|
|
4
4
|
type ChatRole = "AI" | "USER";
|
|
5
|
-
type
|
|
5
|
+
type BaseMessage = {
|
|
6
6
|
id: number | string;
|
|
7
7
|
role: ChatRole;
|
|
8
8
|
content: string;
|
|
9
9
|
isLoading?: boolean;
|
|
10
10
|
};
|
|
11
|
+
type Message<T = {}> = BaseMessage & T;
|
|
11
12
|
|
|
12
13
|
type Size$1 = 'xs' | 'sm' | 'md' | 'lg';
|
|
13
14
|
type Props$5 = {
|
|
@@ -34,14 +35,83 @@ type Props$3 = Message & {
|
|
|
34
35
|
};
|
|
35
36
|
declare function ChatMessage({ id, role, content, isLoading, wrapperClassName, icon, aiIconWrapperClassName, aiIconColor, bubbleClassName, aiBubbleClassName, userBubbleClassName, position, loadingRenderer, }: Props$3): react_jsx_runtime.JSX.Element;
|
|
36
37
|
|
|
37
|
-
type
|
|
38
|
+
type MessagePatch = Partial<BaseMessage> & Record<string, unknown>;
|
|
39
|
+
type Props$2<T extends Message = Message> = {
|
|
38
40
|
messages: T[];
|
|
39
|
-
messageMapper?: (msg: T) =>
|
|
40
|
-
messageRenderer?: (msg:
|
|
41
|
+
messageMapper?: (msg: T) => MessagePatch;
|
|
42
|
+
messageRenderer?: (msg: T) => React$1.ReactNode;
|
|
41
43
|
className?: string;
|
|
42
44
|
loadingRenderer?: React$1.ReactNode;
|
|
43
45
|
};
|
|
44
|
-
declare function ChatList<T>({ messages, messageMapper, messageRenderer, className, loadingRenderer, }: Props$2<T>): react_jsx_runtime.JSX.Element;
|
|
46
|
+
declare function ChatList<T extends Message>({ messages, messageMapper, messageRenderer, className, loadingRenderer, }: Props$2<T>): react_jsx_runtime.JSX.Element;
|
|
47
|
+
|
|
48
|
+
type VoiceRecognitionController$1 = {
|
|
49
|
+
start: () => void;
|
|
50
|
+
stop: () => void;
|
|
51
|
+
isRecording: boolean;
|
|
52
|
+
};
|
|
53
|
+
type ButtonConfig = {
|
|
54
|
+
className?: string;
|
|
55
|
+
icon?: React.ReactNode;
|
|
56
|
+
};
|
|
57
|
+
type Props$1 = {
|
|
58
|
+
onSend: (value: string) => void | Promise<void>;
|
|
59
|
+
voice?: boolean | VoiceRecognitionController$1;
|
|
60
|
+
placeholder?: string;
|
|
61
|
+
className?: string;
|
|
62
|
+
inputClassName?: string;
|
|
63
|
+
micButton?: ButtonConfig;
|
|
64
|
+
recordingButton?: ButtonConfig;
|
|
65
|
+
sendButton?: ButtonConfig;
|
|
66
|
+
sendingButton?: ButtonConfig;
|
|
67
|
+
maxHeight?: number;
|
|
68
|
+
value?: string;
|
|
69
|
+
onChange?: (value: string) => void;
|
|
70
|
+
isSending: boolean;
|
|
71
|
+
submitOnEnter?: boolean;
|
|
72
|
+
};
|
|
73
|
+
declare function ChatInput({ onSend, voice, placeholder, className, inputClassName, micButton, recordingButton, sendButton, sendingButton, maxHeight, value, onChange, isSending, submitOnEnter, }: Props$1): react_jsx_runtime.JSX.Element;
|
|
74
|
+
|
|
75
|
+
type MessageProps = {
|
|
76
|
+
messages: Message[];
|
|
77
|
+
messageMapper?: never;
|
|
78
|
+
};
|
|
79
|
+
type RawProps<T> = {
|
|
80
|
+
messages: T[];
|
|
81
|
+
messageMapper: (msg: T) => Message;
|
|
82
|
+
};
|
|
83
|
+
type CommonProps = {
|
|
84
|
+
messageRenderer?: (msg: Message) => React.ReactNode;
|
|
85
|
+
loadingRenderer?: React.ReactNode;
|
|
86
|
+
listClassName?: string;
|
|
87
|
+
onSend: (value: string) => void | Promise<void>;
|
|
88
|
+
isSending: boolean;
|
|
89
|
+
disableVoice?: boolean;
|
|
90
|
+
placeholder?: string;
|
|
91
|
+
inputClassName?: string;
|
|
92
|
+
className?: string;
|
|
93
|
+
};
|
|
94
|
+
type Props<T> = CommonProps & (MessageProps | RawProps<T>);
|
|
95
|
+
declare function ChatContainer<T>(props: Props<T>): react_jsx_runtime.JSX.Element;
|
|
96
|
+
|
|
97
|
+
type ExtraFromRaw$1<TRaw> = Omit<TRaw, keyof BaseMessage>;
|
|
98
|
+
type MessageMapperResult$1 = Pick<BaseMessage, "id" | "role" | "content">;
|
|
99
|
+
type MessageMapper$1<TRaw> = Message<ExtraFromRaw$1<TRaw>>;
|
|
100
|
+
type Options$2<TRaw> = {
|
|
101
|
+
queryKey: readonly unknown[];
|
|
102
|
+
queryFn: () => Promise<TRaw[]>;
|
|
103
|
+
mutationFn: (content: string) => Promise<TRaw>;
|
|
104
|
+
map: (raw: TRaw) => MessageMapperResult$1;
|
|
105
|
+
onError?: (error: unknown) => void;
|
|
106
|
+
staleTime?: number;
|
|
107
|
+
gcTime?: number;
|
|
108
|
+
};
|
|
109
|
+
declare function useOptimisticChat<TRaw>({ queryKey, queryFn, mutationFn, map, onError, staleTime, gcTime, }: Options$2<TRaw>): {
|
|
110
|
+
messages: MessageMapper$1<TRaw>[];
|
|
111
|
+
sendUserMessage: (content: string) => void;
|
|
112
|
+
isPending: boolean;
|
|
113
|
+
isInitialLoading: boolean;
|
|
114
|
+
};
|
|
45
115
|
|
|
46
116
|
interface SpeechGrammar {
|
|
47
117
|
src: string;
|
|
@@ -100,59 +170,44 @@ declare global {
|
|
|
100
170
|
webkitSpeechRecognition: new () => SpeechRecognition;
|
|
101
171
|
}
|
|
102
172
|
}
|
|
103
|
-
type
|
|
104
|
-
|
|
105
|
-
|
|
173
|
+
type Options$1 = {
|
|
174
|
+
lang?: string;
|
|
175
|
+
onStart?: () => void;
|
|
176
|
+
onEnd?: () => void;
|
|
177
|
+
onError?: (error: unknown) => void;
|
|
106
178
|
};
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
inputClassName?: string;
|
|
113
|
-
micButton?: ButtonConfig;
|
|
114
|
-
recordingButton?: ButtonConfig;
|
|
115
|
-
sendButton?: ButtonConfig;
|
|
116
|
-
sendingButton?: ButtonConfig;
|
|
117
|
-
maxHeight?: number;
|
|
118
|
-
value?: string;
|
|
119
|
-
onChange?: (value: string) => void;
|
|
120
|
-
isSending: boolean;
|
|
121
|
-
submitOnEnter?: boolean;
|
|
122
|
-
speechLang?: string;
|
|
179
|
+
declare function useBrowserSpeechRecognition({ lang, onStart, onEnd, onError, }?: Options$1): {
|
|
180
|
+
start: () => void;
|
|
181
|
+
stop: () => void;
|
|
182
|
+
isRecording: boolean;
|
|
183
|
+
onTranscript: (text: string) => void;
|
|
123
184
|
};
|
|
124
|
-
declare function ChatInput({ onSend, disableVoice, placeholder, className, inputClassName, micButton, recordingButton, sendButton, sendingButton, maxHeight, value, onChange, isSending, submitOnEnter, speechLang, }: Props$1): react_jsx_runtime.JSX.Element;
|
|
125
185
|
|
|
126
|
-
type
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
listClassName?: string;
|
|
132
|
-
onSend: (value: string) => void | Promise<void>;
|
|
133
|
-
isSending: boolean;
|
|
134
|
-
disableVoice?: boolean;
|
|
135
|
-
placeholder?: string;
|
|
136
|
-
inputClassName?: string;
|
|
137
|
-
className?: string;
|
|
186
|
+
type VoiceRecognitionController = {
|
|
187
|
+
start: () => void;
|
|
188
|
+
stop: () => void;
|
|
189
|
+
isRecording: boolean;
|
|
190
|
+
onTranscript: (text: string) => void;
|
|
138
191
|
};
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
type MessageMapper<TRaw> =
|
|
142
|
-
type Options<
|
|
192
|
+
type ExtraFromRaw<TRaw> = Omit<TRaw, keyof BaseMessage>;
|
|
193
|
+
type MessageMapperResult = Pick<BaseMessage, "id" | "role" | "content">;
|
|
194
|
+
type MessageMapper<TRaw> = Message<ExtraFromRaw<TRaw>>;
|
|
195
|
+
type Options<TRaw> = {
|
|
143
196
|
queryKey: readonly unknown[];
|
|
144
|
-
queryFn: () => Promise<
|
|
145
|
-
mutationFn: (content: string) => Promise<
|
|
146
|
-
map:
|
|
197
|
+
queryFn: () => Promise<TRaw[]>;
|
|
198
|
+
mutationFn: (content: string) => Promise<TRaw>;
|
|
199
|
+
map: (raw: TRaw) => MessageMapperResult;
|
|
200
|
+
voice: VoiceRecognitionController;
|
|
147
201
|
onError?: (error: unknown) => void;
|
|
148
202
|
staleTime?: number;
|
|
149
203
|
gcTime?: number;
|
|
150
204
|
};
|
|
151
|
-
declare function
|
|
152
|
-
messages:
|
|
153
|
-
sendUserMessage: (content: string) => void;
|
|
205
|
+
declare function useVoiceOptimisticChat<TRaw>({ queryKey, queryFn, mutationFn, map, voice, onError, staleTime, gcTime, }: Options<TRaw>): {
|
|
206
|
+
messages: MessageMapper<TRaw>[];
|
|
154
207
|
isPending: boolean;
|
|
155
208
|
isInitialLoading: boolean;
|
|
209
|
+
startRecording: () => Promise<void>;
|
|
210
|
+
stopRecording: () => void;
|
|
156
211
|
};
|
|
157
212
|
|
|
158
|
-
export { ChatContainer, ChatInput, ChatList, ChatMessage, LoadingSpinner, type Message, SendingDots, useOptimisticChat };
|
|
213
|
+
export { ChatContainer, ChatInput, ChatList, ChatMessage, LoadingSpinner, type Message, SendingDots, useBrowserSpeechRecognition, useOptimisticChat, useVoiceOptimisticChat };
|
package/dist/index.d.ts
CHANGED
|
@@ -2,12 +2,13 @@ import * as react_jsx_runtime from 'react/jsx-runtime';
|
|
|
2
2
|
import React$1 from 'react';
|
|
3
3
|
|
|
4
4
|
type ChatRole = "AI" | "USER";
|
|
5
|
-
type
|
|
5
|
+
type BaseMessage = {
|
|
6
6
|
id: number | string;
|
|
7
7
|
role: ChatRole;
|
|
8
8
|
content: string;
|
|
9
9
|
isLoading?: boolean;
|
|
10
10
|
};
|
|
11
|
+
type Message<T = {}> = BaseMessage & T;
|
|
11
12
|
|
|
12
13
|
type Size$1 = 'xs' | 'sm' | 'md' | 'lg';
|
|
13
14
|
type Props$5 = {
|
|
@@ -34,14 +35,83 @@ type Props$3 = Message & {
|
|
|
34
35
|
};
|
|
35
36
|
declare function ChatMessage({ id, role, content, isLoading, wrapperClassName, icon, aiIconWrapperClassName, aiIconColor, bubbleClassName, aiBubbleClassName, userBubbleClassName, position, loadingRenderer, }: Props$3): react_jsx_runtime.JSX.Element;
|
|
36
37
|
|
|
37
|
-
type
|
|
38
|
+
type MessagePatch = Partial<BaseMessage> & Record<string, unknown>;
|
|
39
|
+
type Props$2<T extends Message = Message> = {
|
|
38
40
|
messages: T[];
|
|
39
|
-
messageMapper?: (msg: T) =>
|
|
40
|
-
messageRenderer?: (msg:
|
|
41
|
+
messageMapper?: (msg: T) => MessagePatch;
|
|
42
|
+
messageRenderer?: (msg: T) => React$1.ReactNode;
|
|
41
43
|
className?: string;
|
|
42
44
|
loadingRenderer?: React$1.ReactNode;
|
|
43
45
|
};
|
|
44
|
-
declare function ChatList<T>({ messages, messageMapper, messageRenderer, className, loadingRenderer, }: Props$2<T>): react_jsx_runtime.JSX.Element;
|
|
46
|
+
declare function ChatList<T extends Message>({ messages, messageMapper, messageRenderer, className, loadingRenderer, }: Props$2<T>): react_jsx_runtime.JSX.Element;
|
|
47
|
+
|
|
48
|
+
type VoiceRecognitionController$1 = {
|
|
49
|
+
start: () => void;
|
|
50
|
+
stop: () => void;
|
|
51
|
+
isRecording: boolean;
|
|
52
|
+
};
|
|
53
|
+
type ButtonConfig = {
|
|
54
|
+
className?: string;
|
|
55
|
+
icon?: React.ReactNode;
|
|
56
|
+
};
|
|
57
|
+
type Props$1 = {
|
|
58
|
+
onSend: (value: string) => void | Promise<void>;
|
|
59
|
+
voice?: boolean | VoiceRecognitionController$1;
|
|
60
|
+
placeholder?: string;
|
|
61
|
+
className?: string;
|
|
62
|
+
inputClassName?: string;
|
|
63
|
+
micButton?: ButtonConfig;
|
|
64
|
+
recordingButton?: ButtonConfig;
|
|
65
|
+
sendButton?: ButtonConfig;
|
|
66
|
+
sendingButton?: ButtonConfig;
|
|
67
|
+
maxHeight?: number;
|
|
68
|
+
value?: string;
|
|
69
|
+
onChange?: (value: string) => void;
|
|
70
|
+
isSending: boolean;
|
|
71
|
+
submitOnEnter?: boolean;
|
|
72
|
+
};
|
|
73
|
+
declare function ChatInput({ onSend, voice, placeholder, className, inputClassName, micButton, recordingButton, sendButton, sendingButton, maxHeight, value, onChange, isSending, submitOnEnter, }: Props$1): react_jsx_runtime.JSX.Element;
|
|
74
|
+
|
|
75
|
+
type MessageProps = {
|
|
76
|
+
messages: Message[];
|
|
77
|
+
messageMapper?: never;
|
|
78
|
+
};
|
|
79
|
+
type RawProps<T> = {
|
|
80
|
+
messages: T[];
|
|
81
|
+
messageMapper: (msg: T) => Message;
|
|
82
|
+
};
|
|
83
|
+
type CommonProps = {
|
|
84
|
+
messageRenderer?: (msg: Message) => React.ReactNode;
|
|
85
|
+
loadingRenderer?: React.ReactNode;
|
|
86
|
+
listClassName?: string;
|
|
87
|
+
onSend: (value: string) => void | Promise<void>;
|
|
88
|
+
isSending: boolean;
|
|
89
|
+
disableVoice?: boolean;
|
|
90
|
+
placeholder?: string;
|
|
91
|
+
inputClassName?: string;
|
|
92
|
+
className?: string;
|
|
93
|
+
};
|
|
94
|
+
type Props<T> = CommonProps & (MessageProps | RawProps<T>);
|
|
95
|
+
declare function ChatContainer<T>(props: Props<T>): react_jsx_runtime.JSX.Element;
|
|
96
|
+
|
|
97
|
+
type ExtraFromRaw$1<TRaw> = Omit<TRaw, keyof BaseMessage>;
|
|
98
|
+
type MessageMapperResult$1 = Pick<BaseMessage, "id" | "role" | "content">;
|
|
99
|
+
type MessageMapper$1<TRaw> = Message<ExtraFromRaw$1<TRaw>>;
|
|
100
|
+
type Options$2<TRaw> = {
|
|
101
|
+
queryKey: readonly unknown[];
|
|
102
|
+
queryFn: () => Promise<TRaw[]>;
|
|
103
|
+
mutationFn: (content: string) => Promise<TRaw>;
|
|
104
|
+
map: (raw: TRaw) => MessageMapperResult$1;
|
|
105
|
+
onError?: (error: unknown) => void;
|
|
106
|
+
staleTime?: number;
|
|
107
|
+
gcTime?: number;
|
|
108
|
+
};
|
|
109
|
+
declare function useOptimisticChat<TRaw>({ queryKey, queryFn, mutationFn, map, onError, staleTime, gcTime, }: Options$2<TRaw>): {
|
|
110
|
+
messages: MessageMapper$1<TRaw>[];
|
|
111
|
+
sendUserMessage: (content: string) => void;
|
|
112
|
+
isPending: boolean;
|
|
113
|
+
isInitialLoading: boolean;
|
|
114
|
+
};
|
|
45
115
|
|
|
46
116
|
interface SpeechGrammar {
|
|
47
117
|
src: string;
|
|
@@ -100,59 +170,44 @@ declare global {
|
|
|
100
170
|
webkitSpeechRecognition: new () => SpeechRecognition;
|
|
101
171
|
}
|
|
102
172
|
}
|
|
103
|
-
type
|
|
104
|
-
|
|
105
|
-
|
|
173
|
+
type Options$1 = {
|
|
174
|
+
lang?: string;
|
|
175
|
+
onStart?: () => void;
|
|
176
|
+
onEnd?: () => void;
|
|
177
|
+
onError?: (error: unknown) => void;
|
|
106
178
|
};
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
inputClassName?: string;
|
|
113
|
-
micButton?: ButtonConfig;
|
|
114
|
-
recordingButton?: ButtonConfig;
|
|
115
|
-
sendButton?: ButtonConfig;
|
|
116
|
-
sendingButton?: ButtonConfig;
|
|
117
|
-
maxHeight?: number;
|
|
118
|
-
value?: string;
|
|
119
|
-
onChange?: (value: string) => void;
|
|
120
|
-
isSending: boolean;
|
|
121
|
-
submitOnEnter?: boolean;
|
|
122
|
-
speechLang?: string;
|
|
179
|
+
declare function useBrowserSpeechRecognition({ lang, onStart, onEnd, onError, }?: Options$1): {
|
|
180
|
+
start: () => void;
|
|
181
|
+
stop: () => void;
|
|
182
|
+
isRecording: boolean;
|
|
183
|
+
onTranscript: (text: string) => void;
|
|
123
184
|
};
|
|
124
|
-
declare function ChatInput({ onSend, disableVoice, placeholder, className, inputClassName, micButton, recordingButton, sendButton, sendingButton, maxHeight, value, onChange, isSending, submitOnEnter, speechLang, }: Props$1): react_jsx_runtime.JSX.Element;
|
|
125
185
|
|
|
126
|
-
type
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
listClassName?: string;
|
|
132
|
-
onSend: (value: string) => void | Promise<void>;
|
|
133
|
-
isSending: boolean;
|
|
134
|
-
disableVoice?: boolean;
|
|
135
|
-
placeholder?: string;
|
|
136
|
-
inputClassName?: string;
|
|
137
|
-
className?: string;
|
|
186
|
+
type VoiceRecognitionController = {
|
|
187
|
+
start: () => void;
|
|
188
|
+
stop: () => void;
|
|
189
|
+
isRecording: boolean;
|
|
190
|
+
onTranscript: (text: string) => void;
|
|
138
191
|
};
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
type MessageMapper<TRaw> =
|
|
142
|
-
type Options<
|
|
192
|
+
type ExtraFromRaw<TRaw> = Omit<TRaw, keyof BaseMessage>;
|
|
193
|
+
type MessageMapperResult = Pick<BaseMessage, "id" | "role" | "content">;
|
|
194
|
+
type MessageMapper<TRaw> = Message<ExtraFromRaw<TRaw>>;
|
|
195
|
+
type Options<TRaw> = {
|
|
143
196
|
queryKey: readonly unknown[];
|
|
144
|
-
queryFn: () => Promise<
|
|
145
|
-
mutationFn: (content: string) => Promise<
|
|
146
|
-
map:
|
|
197
|
+
queryFn: () => Promise<TRaw[]>;
|
|
198
|
+
mutationFn: (content: string) => Promise<TRaw>;
|
|
199
|
+
map: (raw: TRaw) => MessageMapperResult;
|
|
200
|
+
voice: VoiceRecognitionController;
|
|
147
201
|
onError?: (error: unknown) => void;
|
|
148
202
|
staleTime?: number;
|
|
149
203
|
gcTime?: number;
|
|
150
204
|
};
|
|
151
|
-
declare function
|
|
152
|
-
messages:
|
|
153
|
-
sendUserMessage: (content: string) => void;
|
|
205
|
+
declare function useVoiceOptimisticChat<TRaw>({ queryKey, queryFn, mutationFn, map, voice, onError, staleTime, gcTime, }: Options<TRaw>): {
|
|
206
|
+
messages: MessageMapper<TRaw>[];
|
|
154
207
|
isPending: boolean;
|
|
155
208
|
isInitialLoading: boolean;
|
|
209
|
+
startRecording: () => Promise<void>;
|
|
210
|
+
stopRecording: () => void;
|
|
156
211
|
};
|
|
157
212
|
|
|
158
|
-
export { ChatContainer, ChatInput, ChatList, ChatMessage, LoadingSpinner, type Message, SendingDots, useOptimisticChat };
|
|
213
|
+
export { ChatContainer, ChatInput, ChatList, ChatMessage, LoadingSpinner, type Message, SendingDots, useBrowserSpeechRecognition, useOptimisticChat, useVoiceOptimisticChat };
|
package/dist/index.js
CHANGED
|
@@ -53,7 +53,9 @@ __export(index_exports, {
|
|
|
53
53
|
ChatMessage: () => ChatMessage,
|
|
54
54
|
LoadingSpinner: () => LoadingSpinner,
|
|
55
55
|
SendingDots: () => SendingDots,
|
|
56
|
-
|
|
56
|
+
useBrowserSpeechRecognition: () => useBrowserSpeechRecognition,
|
|
57
|
+
useOptimisticChat: () => useOptimisticChat,
|
|
58
|
+
useVoiceOptimisticChat: () => useVoiceOptimisticChat
|
|
57
59
|
});
|
|
58
60
|
module.exports = __toCommonJS(index_exports);
|
|
59
61
|
|
|
@@ -204,7 +206,7 @@ function ChatList({
|
|
|
204
206
|
className,
|
|
205
207
|
loadingRenderer
|
|
206
208
|
}) {
|
|
207
|
-
const mappedMessages = messageMapper ? messages.map(messageMapper) : messages;
|
|
209
|
+
const mappedMessages = messageMapper ? messages.map((msg) => __spreadValues(__spreadValues({}, msg), messageMapper(msg))) : messages;
|
|
208
210
|
return /* @__PURE__ */ (0, import_jsx_runtime4.jsx)("div", { className: `flex flex-col ${className}`, children: mappedMessages.map((msg) => {
|
|
209
211
|
if (messageRenderer) {
|
|
210
212
|
return /* @__PURE__ */ (0, import_jsx_runtime4.jsx)(import_react2.default.Fragment, { children: messageRenderer(msg) }, msg.id);
|
|
@@ -220,11 +222,82 @@ function ChatList({
|
|
|
220
222
|
}
|
|
221
223
|
|
|
222
224
|
// src/components/ChatInput.tsx
|
|
225
|
+
var import_react4 = require("react");
|
|
226
|
+
|
|
227
|
+
// src/hooks/useBrowserSpeechRecognition.ts
|
|
223
228
|
var import_react3 = require("react");
|
|
229
|
+
function useBrowserSpeechRecognition({
|
|
230
|
+
lang = "ko-KR",
|
|
231
|
+
onStart,
|
|
232
|
+
onEnd,
|
|
233
|
+
onError
|
|
234
|
+
} = {}) {
|
|
235
|
+
const [isRecording, setIsRecording] = (0, import_react3.useState)(false);
|
|
236
|
+
const recognitionRef = (0, import_react3.useRef)(null);
|
|
237
|
+
const onTranscriptRef = (0, import_react3.useRef)(void 0);
|
|
238
|
+
const start = () => {
|
|
239
|
+
const Speech = window.SpeechRecognition || window.webkitSpeechRecognition;
|
|
240
|
+
if (!Speech) {
|
|
241
|
+
onError == null ? void 0 : onError(new Error("SpeechRecognition not supported"));
|
|
242
|
+
return;
|
|
243
|
+
}
|
|
244
|
+
const recognition = new Speech();
|
|
245
|
+
recognition.lang = lang;
|
|
246
|
+
recognition.continuous = true;
|
|
247
|
+
recognition.interimResults = true;
|
|
248
|
+
recognition.onstart = () => {
|
|
249
|
+
setIsRecording(true);
|
|
250
|
+
onStart == null ? void 0 : onStart();
|
|
251
|
+
};
|
|
252
|
+
recognition.onend = () => {
|
|
253
|
+
setIsRecording(false);
|
|
254
|
+
onEnd == null ? void 0 : onEnd();
|
|
255
|
+
};
|
|
256
|
+
recognition.onresult = (event) => {
|
|
257
|
+
var _a;
|
|
258
|
+
const transcript = Array.from(event.results).map((r) => {
|
|
259
|
+
var _a2;
|
|
260
|
+
return (_a2 = r[0]) == null ? void 0 : _a2.transcript;
|
|
261
|
+
}).join("");
|
|
262
|
+
(_a = onTranscriptRef.current) == null ? void 0 : _a.call(onTranscriptRef, transcript);
|
|
263
|
+
};
|
|
264
|
+
recognition.onerror = (e) => {
|
|
265
|
+
onError == null ? void 0 : onError(e);
|
|
266
|
+
};
|
|
267
|
+
recognitionRef.current = recognition;
|
|
268
|
+
recognition.start();
|
|
269
|
+
};
|
|
270
|
+
const stop = () => {
|
|
271
|
+
var _a;
|
|
272
|
+
(_a = recognitionRef.current) == null ? void 0 : _a.stop();
|
|
273
|
+
};
|
|
274
|
+
(0, import_react3.useEffect)(() => {
|
|
275
|
+
return () => {
|
|
276
|
+
var _a;
|
|
277
|
+
(_a = recognitionRef.current) == null ? void 0 : _a.stop();
|
|
278
|
+
recognitionRef.current = null;
|
|
279
|
+
};
|
|
280
|
+
}, []);
|
|
281
|
+
return {
|
|
282
|
+
start,
|
|
283
|
+
// 음성 인식 시작
|
|
284
|
+
stop,
|
|
285
|
+
// 음성 인식 종료
|
|
286
|
+
isRecording,
|
|
287
|
+
// 음성 인식 상태
|
|
288
|
+
// 외부에서 음성 인식 결과(transcript) 처리 로직을 주입하기 위한 setter
|
|
289
|
+
// 음성 인식 이벤트는 React 생명주기와 무관하게 발생하므로 ref로 관리한다
|
|
290
|
+
set onTranscript(fn) {
|
|
291
|
+
onTranscriptRef.current = fn;
|
|
292
|
+
}
|
|
293
|
+
};
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
// src/components/ChatInput.tsx
|
|
224
297
|
var import_jsx_runtime5 = require("react/jsx-runtime");
|
|
225
298
|
function ChatInput({
|
|
226
299
|
onSend,
|
|
227
|
-
|
|
300
|
+
voice = true,
|
|
228
301
|
placeholder = "\uBA54\uC2DC\uC9C0\uB97C \uC785\uB825\uD558\uC138\uC694...",
|
|
229
302
|
className = "",
|
|
230
303
|
inputClassName = "",
|
|
@@ -236,34 +309,29 @@ function ChatInput({
|
|
|
236
309
|
value,
|
|
237
310
|
onChange,
|
|
238
311
|
isSending,
|
|
239
|
-
submitOnEnter = false
|
|
240
|
-
speechLang = "ko-KR"
|
|
312
|
+
submitOnEnter = false
|
|
241
313
|
}) {
|
|
242
|
-
|
|
243
|
-
const [
|
|
244
|
-
const textareaRef = (0,
|
|
314
|
+
var _a;
|
|
315
|
+
const [innerText, setInnerText] = (0, import_react4.useState)("");
|
|
316
|
+
const textareaRef = (0, import_react4.useRef)(null);
|
|
245
317
|
const isControlled = value !== void 0;
|
|
246
318
|
const text = isControlled ? value : innerText;
|
|
247
319
|
const isEmpty = text.trim().length === 0;
|
|
248
|
-
const
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
r.onresult = null;
|
|
255
|
-
r.onstart = null;
|
|
256
|
-
r.onend = null;
|
|
257
|
-
try {
|
|
258
|
-
r.stop();
|
|
259
|
-
} catch (e) {
|
|
260
|
-
console.warn("SpeechRecognition stop error:", e);
|
|
261
|
-
}
|
|
320
|
+
const defaultVoice = useBrowserSpeechRecognition();
|
|
321
|
+
(0, import_react4.useEffect)(() => {
|
|
322
|
+
if (!defaultVoice) return;
|
|
323
|
+
defaultVoice.onTranscript = (text2) => {
|
|
324
|
+
if (!isControlled) {
|
|
325
|
+
setInnerText(text2);
|
|
262
326
|
}
|
|
263
|
-
|
|
327
|
+
onChange == null ? void 0 : onChange(text2);
|
|
264
328
|
};
|
|
265
|
-
}, []);
|
|
266
|
-
|
|
329
|
+
}, [defaultVoice, isControlled, onChange]);
|
|
330
|
+
const voiceController = voice === true ? defaultVoice : typeof voice === "object" ? voice : null;
|
|
331
|
+
const isRecording = (_a = voiceController == null ? void 0 : voiceController.isRecording) != null ? _a : false;
|
|
332
|
+
const isVoiceEnabled = Boolean(voiceController);
|
|
333
|
+
const isVoiceMode = isVoiceEnabled && !isSending && (isEmpty || isRecording);
|
|
334
|
+
(0, import_react4.useEffect)(() => {
|
|
267
335
|
const el = textareaRef.current;
|
|
268
336
|
if (!el) return;
|
|
269
337
|
el.style.height = "auto";
|
|
@@ -299,53 +367,21 @@ function ChatInput({
|
|
|
299
367
|
}
|
|
300
368
|
};
|
|
301
369
|
const handleRecord = () => {
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
console.error("Browser does not support SpeechRecognition");
|
|
308
|
-
alert("\uD604\uC7AC \uBE0C\uB77C\uC6B0\uC800\uC5D0\uC11C\uB294 \uC74C\uC131 \uC778\uC2DD \uAE30\uB2A5\uC744 \uC0AC\uC6A9\uD560 \uC218 \uC5C6\uC2B5\uB2C8\uB2E4.");
|
|
309
|
-
return;
|
|
310
|
-
}
|
|
311
|
-
recognition.current = new Speech();
|
|
312
|
-
recognition.current.lang = speechLang;
|
|
313
|
-
recognition.current.continuous = true;
|
|
314
|
-
recognition.current.interimResults = true;
|
|
315
|
-
recognition.current.onstart = () => {
|
|
316
|
-
setIsRecording(true);
|
|
317
|
-
};
|
|
318
|
-
recognition.current.onend = () => {
|
|
319
|
-
setIsRecording(false);
|
|
320
|
-
};
|
|
321
|
-
recognition.current.onresult = (event) => {
|
|
322
|
-
const newTranscript = Array.from(event.results).map((r) => {
|
|
323
|
-
var _a2;
|
|
324
|
-
return (_a2 = r[0]) == null ? void 0 : _a2.transcript;
|
|
325
|
-
}).join("");
|
|
326
|
-
setInnerText(newTranscript);
|
|
327
|
-
};
|
|
328
|
-
(_a = recognition.current) == null ? void 0 : _a.start();
|
|
329
|
-
} else {
|
|
330
|
-
(_b = recognition.current) == null ? void 0 : _b.stop();
|
|
331
|
-
}
|
|
332
|
-
} catch (e) {
|
|
333
|
-
console.error("Speech Recognition error: ", e);
|
|
334
|
-
alert("\uC74C\uC131 \uC785\uB825\uC744 \uC0AC\uC6A9\uD560 \uC218 \uC5C6\uC2B5\uB2C8\uB2E4. \uD14D\uC2A4\uD2B8\uB85C \uC785\uB825\uD574\uC8FC\uC138\uC694.");
|
|
335
|
-
setIsRecording(false);
|
|
370
|
+
if (!voiceController) return;
|
|
371
|
+
if (isRecording) {
|
|
372
|
+
voiceController.stop();
|
|
373
|
+
} else {
|
|
374
|
+
voiceController.start();
|
|
336
375
|
}
|
|
337
376
|
};
|
|
338
377
|
const getActivityLayer = () => {
|
|
339
378
|
if (isSending) return "sending";
|
|
340
|
-
if (
|
|
379
|
+
if (isVoiceEnabled) {
|
|
341
380
|
if (isRecording) return "recording";
|
|
342
381
|
if (isVoiceMode) return "mic";
|
|
343
382
|
return "send";
|
|
344
383
|
}
|
|
345
|
-
if (
|
|
346
|
-
if (!isEmpty) return "send";
|
|
347
|
-
return null;
|
|
348
|
-
}
|
|
384
|
+
if (!isEmpty) return "send";
|
|
349
385
|
return null;
|
|
350
386
|
};
|
|
351
387
|
const activeLayer = getActivityLayer();
|
|
@@ -480,24 +516,25 @@ function ChatInput({
|
|
|
480
516
|
}
|
|
481
517
|
|
|
482
518
|
// src/components/ChatContainer.tsx
|
|
483
|
-
var
|
|
519
|
+
var import_react5 = require("react");
|
|
484
520
|
var import_jsx_runtime6 = require("react/jsx-runtime");
|
|
485
|
-
function ChatContainer({
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
521
|
+
function ChatContainer(props) {
|
|
522
|
+
const [isAtBottom, setIsAtBottom] = (0, import_react5.useState)(true);
|
|
523
|
+
const scrollRef = (0, import_react5.useRef)(null);
|
|
524
|
+
const {
|
|
525
|
+
messages,
|
|
526
|
+
messageRenderer,
|
|
527
|
+
loadingRenderer,
|
|
528
|
+
listClassName,
|
|
529
|
+
onSend,
|
|
530
|
+
isSending,
|
|
531
|
+
disableVoice,
|
|
532
|
+
placeholder,
|
|
533
|
+
inputClassName,
|
|
534
|
+
className
|
|
535
|
+
} = props;
|
|
536
|
+
const mappedMessages = typeof props.messageMapper === "function" ? props.messages.map(props.messageMapper) : messages;
|
|
537
|
+
(0, import_react5.useEffect)(() => {
|
|
501
538
|
const el = scrollRef.current;
|
|
502
539
|
if (!el) return;
|
|
503
540
|
el.scrollTop = el.scrollHeight;
|
|
@@ -508,7 +545,7 @@ function ChatContainer({
|
|
|
508
545
|
el.addEventListener("scroll", handleScroll);
|
|
509
546
|
return () => el.removeEventListener("scroll", handleScroll);
|
|
510
547
|
}, []);
|
|
511
|
-
(0,
|
|
548
|
+
(0, import_react5.useEffect)(() => {
|
|
512
549
|
const el = scrollRef.current;
|
|
513
550
|
if (!el) return;
|
|
514
551
|
if (isAtBottom) {
|
|
@@ -542,9 +579,9 @@ function ChatContainer({
|
|
|
542
579
|
className: `flex-1 overflow-y-auto chatContainer-scroll p-2`,
|
|
543
580
|
children: /* @__PURE__ */ (0, import_jsx_runtime6.jsx)(
|
|
544
581
|
ChatList,
|
|
545
|
-
__spreadValues(__spreadValues(__spreadValues(
|
|
546
|
-
messages
|
|
547
|
-
},
|
|
582
|
+
__spreadValues(__spreadValues(__spreadValues({
|
|
583
|
+
messages: mappedMessages
|
|
584
|
+
}, messageRenderer && { messageRenderer }), loadingRenderer && { loadingRenderer }), listClassName && { className: listClassName })
|
|
548
585
|
)
|
|
549
586
|
}
|
|
550
587
|
),
|
|
@@ -590,7 +627,7 @@ function ChatContainer({
|
|
|
590
627
|
|
|
591
628
|
// src/hooks/useOptimisticChat.ts
|
|
592
629
|
var import_react_query = require("@tanstack/react-query");
|
|
593
|
-
var
|
|
630
|
+
var import_react6 = require("react");
|
|
594
631
|
function useOptimisticChat({
|
|
595
632
|
queryKey,
|
|
596
633
|
queryFn,
|
|
@@ -600,7 +637,7 @@ function useOptimisticChat({
|
|
|
600
637
|
staleTime = 0,
|
|
601
638
|
gcTime = 0
|
|
602
639
|
}) {
|
|
603
|
-
const [isPending, setIsPending] = (0,
|
|
640
|
+
const [isPending, setIsPending] = (0, import_react6.useState)(false);
|
|
604
641
|
const queryClient = (0, import_react_query.useQueryClient)();
|
|
605
642
|
const {
|
|
606
643
|
data: messages = [],
|
|
@@ -608,8 +645,8 @@ function useOptimisticChat({
|
|
|
608
645
|
} = (0, import_react_query.useQuery)({
|
|
609
646
|
queryKey,
|
|
610
647
|
queryFn: async () => {
|
|
611
|
-
const
|
|
612
|
-
return
|
|
648
|
+
const raw = await queryFn();
|
|
649
|
+
return raw.map((r) => __spreadValues(__spreadValues({}, map(r)), r));
|
|
613
650
|
},
|
|
614
651
|
staleTime,
|
|
615
652
|
gcTime
|
|
@@ -642,10 +679,10 @@ function useOptimisticChat({
|
|
|
642
679
|
}
|
|
643
680
|
];
|
|
644
681
|
});
|
|
645
|
-
return { prev };
|
|
682
|
+
return prev ? { prev } : {};
|
|
646
683
|
},
|
|
647
684
|
onSuccess: (rawAiResponse) => {
|
|
648
|
-
const aiMessage = map(rawAiResponse);
|
|
685
|
+
const aiMessage = __spreadValues(__spreadValues({}, map(rawAiResponse)), rawAiResponse);
|
|
649
686
|
queryClient.setQueryData(queryKey, (old) => {
|
|
650
687
|
if (!old || old.length === 0) {
|
|
651
688
|
return [aiMessage];
|
|
@@ -665,10 +702,6 @@ function useOptimisticChat({
|
|
|
665
702
|
queryClient.setQueryData(queryKey, context.prev);
|
|
666
703
|
}
|
|
667
704
|
onError == null ? void 0 : onError(error);
|
|
668
|
-
},
|
|
669
|
-
// mutation 이후 서버 기준 최신 데이터 재동기화
|
|
670
|
-
onSettled: () => {
|
|
671
|
-
queryClient.invalidateQueries({ queryKey });
|
|
672
705
|
}
|
|
673
706
|
});
|
|
674
707
|
const sendUserMessage = (content) => {
|
|
@@ -686,6 +719,141 @@ function useOptimisticChat({
|
|
|
686
719
|
// 초기 로딩 상태
|
|
687
720
|
};
|
|
688
721
|
}
|
|
722
|
+
|
|
723
|
+
// src/hooks/useVoiceOptimisticChat.ts
|
|
724
|
+
var import_react_query2 = require("@tanstack/react-query");
|
|
725
|
+
var import_react7 = require("react");
|
|
726
|
+
function useVoiceOptimisticChat({
|
|
727
|
+
queryKey,
|
|
728
|
+
queryFn,
|
|
729
|
+
mutationFn,
|
|
730
|
+
map,
|
|
731
|
+
voice,
|
|
732
|
+
onError,
|
|
733
|
+
staleTime = 0,
|
|
734
|
+
gcTime = 0
|
|
735
|
+
}) {
|
|
736
|
+
const [isPending, setIsPending] = (0, import_react7.useState)(false);
|
|
737
|
+
const queryClient = (0, import_react_query2.useQueryClient)();
|
|
738
|
+
const currentTextRef = (0, import_react7.useRef)("");
|
|
739
|
+
const rollbackRef = (0, import_react7.useRef)(void 0);
|
|
740
|
+
const {
|
|
741
|
+
data: messages = [],
|
|
742
|
+
isLoading: isInitialLoading
|
|
743
|
+
} = (0, import_react_query2.useQuery)({
|
|
744
|
+
queryKey,
|
|
745
|
+
queryFn: async () => {
|
|
746
|
+
const raw = await queryFn();
|
|
747
|
+
return raw.map((r) => __spreadValues(__spreadValues({}, map(r)), r));
|
|
748
|
+
},
|
|
749
|
+
staleTime,
|
|
750
|
+
gcTime
|
|
751
|
+
});
|
|
752
|
+
const mutation = (0, import_react_query2.useMutation)({
|
|
753
|
+
mutationFn,
|
|
754
|
+
// (content: string) => Promise<TMutationRaw>
|
|
755
|
+
onMutate: async () => {
|
|
756
|
+
setIsPending(true);
|
|
757
|
+
const prev = queryClient.getQueryData(queryKey);
|
|
758
|
+
if (prev) {
|
|
759
|
+
await queryClient.cancelQueries({ queryKey });
|
|
760
|
+
}
|
|
761
|
+
queryClient.setQueryData(queryKey, (old) => {
|
|
762
|
+
const base = old != null ? old : [];
|
|
763
|
+
return [
|
|
764
|
+
...base,
|
|
765
|
+
// AI placeholder 추가
|
|
766
|
+
{
|
|
767
|
+
id: crypto.randomUUID(),
|
|
768
|
+
role: "AI",
|
|
769
|
+
content: "",
|
|
770
|
+
isLoading: true
|
|
771
|
+
}
|
|
772
|
+
];
|
|
773
|
+
});
|
|
774
|
+
return prev ? { prev } : {};
|
|
775
|
+
},
|
|
776
|
+
onSuccess: (rawAiResponse) => {
|
|
777
|
+
const aiMessage = __spreadValues(__spreadValues({}, map(rawAiResponse)), rawAiResponse);
|
|
778
|
+
queryClient.setQueryData(queryKey, (old) => {
|
|
779
|
+
if (!old || old.length === 0) {
|
|
780
|
+
return [aiMessage];
|
|
781
|
+
}
|
|
782
|
+
const next = [...old];
|
|
783
|
+
const lastIndex = next.length - 1;
|
|
784
|
+
next[lastIndex] = __spreadProps(__spreadValues(__spreadValues({}, next[lastIndex]), aiMessage), {
|
|
785
|
+
isLoading: false
|
|
786
|
+
});
|
|
787
|
+
return next;
|
|
788
|
+
});
|
|
789
|
+
setIsPending(false);
|
|
790
|
+
},
|
|
791
|
+
onError: (error, _variables, context) => {
|
|
792
|
+
setIsPending(false);
|
|
793
|
+
if (context == null ? void 0 : context.prev) {
|
|
794
|
+
queryClient.setQueryData(queryKey, context.prev);
|
|
795
|
+
}
|
|
796
|
+
onError == null ? void 0 : onError(error);
|
|
797
|
+
}
|
|
798
|
+
});
|
|
799
|
+
const startRecording = async () => {
|
|
800
|
+
currentTextRef.current = "";
|
|
801
|
+
const prev = queryClient.getQueryData(queryKey);
|
|
802
|
+
rollbackRef.current = prev;
|
|
803
|
+
if (prev) {
|
|
804
|
+
await queryClient.cancelQueries({ queryKey });
|
|
805
|
+
}
|
|
806
|
+
queryClient.setQueryData(queryKey, (old) => [
|
|
807
|
+
...old != null ? old : [],
|
|
808
|
+
{
|
|
809
|
+
id: crypto.randomUUID(),
|
|
810
|
+
role: "USER",
|
|
811
|
+
content: ""
|
|
812
|
+
}
|
|
813
|
+
]);
|
|
814
|
+
voice.start();
|
|
815
|
+
};
|
|
816
|
+
const onTranscript = (text) => {
|
|
817
|
+
currentTextRef.current = text;
|
|
818
|
+
queryClient.setQueryData(queryKey, (old) => {
|
|
819
|
+
var _a;
|
|
820
|
+
if (!old) return old;
|
|
821
|
+
const next = [...old];
|
|
822
|
+
const last = next.length - 1;
|
|
823
|
+
if (((_a = next[last]) == null ? void 0 : _a.role) !== "USER") return old;
|
|
824
|
+
next[last] = __spreadProps(__spreadValues({}, next[last]), {
|
|
825
|
+
content: text
|
|
826
|
+
});
|
|
827
|
+
return next;
|
|
828
|
+
});
|
|
829
|
+
};
|
|
830
|
+
(0, import_react7.useEffect)(() => {
|
|
831
|
+
voice.onTranscript = onTranscript;
|
|
832
|
+
}, [voice]);
|
|
833
|
+
const stopRecording = () => {
|
|
834
|
+
voice.stop();
|
|
835
|
+
const finalText = currentTextRef.current.trim();
|
|
836
|
+
if (!finalText) {
|
|
837
|
+
if (rollbackRef.current) {
|
|
838
|
+
queryClient.setQueryData(queryKey, rollbackRef.current);
|
|
839
|
+
}
|
|
840
|
+
return;
|
|
841
|
+
}
|
|
842
|
+
mutation.mutate(finalText);
|
|
843
|
+
};
|
|
844
|
+
return {
|
|
845
|
+
messages,
|
|
846
|
+
// Message<TExtra>[]
|
|
847
|
+
isPending,
|
|
848
|
+
// 사용자가 채팅 전송 후 AI 응답이 올 때까지의 로딩
|
|
849
|
+
isInitialLoading,
|
|
850
|
+
// 초기 로딩 상태
|
|
851
|
+
startRecording,
|
|
852
|
+
// 음성 인식 시작 함수
|
|
853
|
+
stopRecording
|
|
854
|
+
// 음성 인식 종료 함수
|
|
855
|
+
};
|
|
856
|
+
}
|
|
689
857
|
// Annotate the CommonJS export names for ESM import in node:
|
|
690
858
|
0 && (module.exports = {
|
|
691
859
|
ChatContainer,
|
|
@@ -694,5 +862,7 @@ function useOptimisticChat({
|
|
|
694
862
|
ChatMessage,
|
|
695
863
|
LoadingSpinner,
|
|
696
864
|
SendingDots,
|
|
697
|
-
|
|
865
|
+
useBrowserSpeechRecognition,
|
|
866
|
+
useOptimisticChat,
|
|
867
|
+
useVoiceOptimisticChat
|
|
698
868
|
});
|
package/dist/index.mjs
CHANGED
|
@@ -165,7 +165,7 @@ function ChatList({
|
|
|
165
165
|
className,
|
|
166
166
|
loadingRenderer
|
|
167
167
|
}) {
|
|
168
|
-
const mappedMessages = messageMapper ? messages.map(messageMapper) : messages;
|
|
168
|
+
const mappedMessages = messageMapper ? messages.map((msg) => __spreadValues(__spreadValues({}, msg), messageMapper(msg))) : messages;
|
|
169
169
|
return /* @__PURE__ */ jsx4("div", { className: `flex flex-col ${className}`, children: mappedMessages.map((msg) => {
|
|
170
170
|
if (messageRenderer) {
|
|
171
171
|
return /* @__PURE__ */ jsx4(React2.Fragment, { children: messageRenderer(msg) }, msg.id);
|
|
@@ -181,11 +181,82 @@ function ChatList({
|
|
|
181
181
|
}
|
|
182
182
|
|
|
183
183
|
// src/components/ChatInput.tsx
|
|
184
|
-
import { useState, useRef, useEffect } from "react";
|
|
184
|
+
import { useState as useState2, useRef as useRef2, useEffect as useEffect2 } from "react";
|
|
185
|
+
|
|
186
|
+
// src/hooks/useBrowserSpeechRecognition.ts
|
|
187
|
+
import { useEffect, useRef, useState } from "react";
|
|
188
|
+
function useBrowserSpeechRecognition({
|
|
189
|
+
lang = "ko-KR",
|
|
190
|
+
onStart,
|
|
191
|
+
onEnd,
|
|
192
|
+
onError
|
|
193
|
+
} = {}) {
|
|
194
|
+
const [isRecording, setIsRecording] = useState(false);
|
|
195
|
+
const recognitionRef = useRef(null);
|
|
196
|
+
const onTranscriptRef = useRef(void 0);
|
|
197
|
+
const start = () => {
|
|
198
|
+
const Speech = window.SpeechRecognition || window.webkitSpeechRecognition;
|
|
199
|
+
if (!Speech) {
|
|
200
|
+
onError == null ? void 0 : onError(new Error("SpeechRecognition not supported"));
|
|
201
|
+
return;
|
|
202
|
+
}
|
|
203
|
+
const recognition = new Speech();
|
|
204
|
+
recognition.lang = lang;
|
|
205
|
+
recognition.continuous = true;
|
|
206
|
+
recognition.interimResults = true;
|
|
207
|
+
recognition.onstart = () => {
|
|
208
|
+
setIsRecording(true);
|
|
209
|
+
onStart == null ? void 0 : onStart();
|
|
210
|
+
};
|
|
211
|
+
recognition.onend = () => {
|
|
212
|
+
setIsRecording(false);
|
|
213
|
+
onEnd == null ? void 0 : onEnd();
|
|
214
|
+
};
|
|
215
|
+
recognition.onresult = (event) => {
|
|
216
|
+
var _a;
|
|
217
|
+
const transcript = Array.from(event.results).map((r) => {
|
|
218
|
+
var _a2;
|
|
219
|
+
return (_a2 = r[0]) == null ? void 0 : _a2.transcript;
|
|
220
|
+
}).join("");
|
|
221
|
+
(_a = onTranscriptRef.current) == null ? void 0 : _a.call(onTranscriptRef, transcript);
|
|
222
|
+
};
|
|
223
|
+
recognition.onerror = (e) => {
|
|
224
|
+
onError == null ? void 0 : onError(e);
|
|
225
|
+
};
|
|
226
|
+
recognitionRef.current = recognition;
|
|
227
|
+
recognition.start();
|
|
228
|
+
};
|
|
229
|
+
const stop = () => {
|
|
230
|
+
var _a;
|
|
231
|
+
(_a = recognitionRef.current) == null ? void 0 : _a.stop();
|
|
232
|
+
};
|
|
233
|
+
useEffect(() => {
|
|
234
|
+
return () => {
|
|
235
|
+
var _a;
|
|
236
|
+
(_a = recognitionRef.current) == null ? void 0 : _a.stop();
|
|
237
|
+
recognitionRef.current = null;
|
|
238
|
+
};
|
|
239
|
+
}, []);
|
|
240
|
+
return {
|
|
241
|
+
start,
|
|
242
|
+
// 음성 인식 시작
|
|
243
|
+
stop,
|
|
244
|
+
// 음성 인식 종료
|
|
245
|
+
isRecording,
|
|
246
|
+
// 음성 인식 상태
|
|
247
|
+
// 외부에서 음성 인식 결과(transcript) 처리 로직을 주입하기 위한 setter
|
|
248
|
+
// 음성 인식 이벤트는 React 생명주기와 무관하게 발생하므로 ref로 관리한다
|
|
249
|
+
set onTranscript(fn) {
|
|
250
|
+
onTranscriptRef.current = fn;
|
|
251
|
+
}
|
|
252
|
+
};
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
// src/components/ChatInput.tsx
|
|
185
256
|
import { jsx as jsx5, jsxs as jsxs3 } from "react/jsx-runtime";
|
|
186
257
|
function ChatInput({
|
|
187
258
|
onSend,
|
|
188
|
-
|
|
259
|
+
voice = true,
|
|
189
260
|
placeholder = "\uBA54\uC2DC\uC9C0\uB97C \uC785\uB825\uD558\uC138\uC694...",
|
|
190
261
|
className = "",
|
|
191
262
|
inputClassName = "",
|
|
@@ -197,34 +268,29 @@ function ChatInput({
|
|
|
197
268
|
value,
|
|
198
269
|
onChange,
|
|
199
270
|
isSending,
|
|
200
|
-
submitOnEnter = false
|
|
201
|
-
speechLang = "ko-KR"
|
|
271
|
+
submitOnEnter = false
|
|
202
272
|
}) {
|
|
203
|
-
|
|
204
|
-
const [
|
|
205
|
-
const textareaRef =
|
|
273
|
+
var _a;
|
|
274
|
+
const [innerText, setInnerText] = useState2("");
|
|
275
|
+
const textareaRef = useRef2(null);
|
|
206
276
|
const isControlled = value !== void 0;
|
|
207
277
|
const text = isControlled ? value : innerText;
|
|
208
278
|
const isEmpty = text.trim().length === 0;
|
|
209
|
-
const
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
r.onresult = null;
|
|
216
|
-
r.onstart = null;
|
|
217
|
-
r.onend = null;
|
|
218
|
-
try {
|
|
219
|
-
r.stop();
|
|
220
|
-
} catch (e) {
|
|
221
|
-
console.warn("SpeechRecognition stop error:", e);
|
|
222
|
-
}
|
|
279
|
+
const defaultVoice = useBrowserSpeechRecognition();
|
|
280
|
+
useEffect2(() => {
|
|
281
|
+
if (!defaultVoice) return;
|
|
282
|
+
defaultVoice.onTranscript = (text2) => {
|
|
283
|
+
if (!isControlled) {
|
|
284
|
+
setInnerText(text2);
|
|
223
285
|
}
|
|
224
|
-
|
|
286
|
+
onChange == null ? void 0 : onChange(text2);
|
|
225
287
|
};
|
|
226
|
-
}, []);
|
|
227
|
-
|
|
288
|
+
}, [defaultVoice, isControlled, onChange]);
|
|
289
|
+
const voiceController = voice === true ? defaultVoice : typeof voice === "object" ? voice : null;
|
|
290
|
+
const isRecording = (_a = voiceController == null ? void 0 : voiceController.isRecording) != null ? _a : false;
|
|
291
|
+
const isVoiceEnabled = Boolean(voiceController);
|
|
292
|
+
const isVoiceMode = isVoiceEnabled && !isSending && (isEmpty || isRecording);
|
|
293
|
+
useEffect2(() => {
|
|
228
294
|
const el = textareaRef.current;
|
|
229
295
|
if (!el) return;
|
|
230
296
|
el.style.height = "auto";
|
|
@@ -260,53 +326,21 @@ function ChatInput({
|
|
|
260
326
|
}
|
|
261
327
|
};
|
|
262
328
|
const handleRecord = () => {
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
console.error("Browser does not support SpeechRecognition");
|
|
269
|
-
alert("\uD604\uC7AC \uBE0C\uB77C\uC6B0\uC800\uC5D0\uC11C\uB294 \uC74C\uC131 \uC778\uC2DD \uAE30\uB2A5\uC744 \uC0AC\uC6A9\uD560 \uC218 \uC5C6\uC2B5\uB2C8\uB2E4.");
|
|
270
|
-
return;
|
|
271
|
-
}
|
|
272
|
-
recognition.current = new Speech();
|
|
273
|
-
recognition.current.lang = speechLang;
|
|
274
|
-
recognition.current.continuous = true;
|
|
275
|
-
recognition.current.interimResults = true;
|
|
276
|
-
recognition.current.onstart = () => {
|
|
277
|
-
setIsRecording(true);
|
|
278
|
-
};
|
|
279
|
-
recognition.current.onend = () => {
|
|
280
|
-
setIsRecording(false);
|
|
281
|
-
};
|
|
282
|
-
recognition.current.onresult = (event) => {
|
|
283
|
-
const newTranscript = Array.from(event.results).map((r) => {
|
|
284
|
-
var _a2;
|
|
285
|
-
return (_a2 = r[0]) == null ? void 0 : _a2.transcript;
|
|
286
|
-
}).join("");
|
|
287
|
-
setInnerText(newTranscript);
|
|
288
|
-
};
|
|
289
|
-
(_a = recognition.current) == null ? void 0 : _a.start();
|
|
290
|
-
} else {
|
|
291
|
-
(_b = recognition.current) == null ? void 0 : _b.stop();
|
|
292
|
-
}
|
|
293
|
-
} catch (e) {
|
|
294
|
-
console.error("Speech Recognition error: ", e);
|
|
295
|
-
alert("\uC74C\uC131 \uC785\uB825\uC744 \uC0AC\uC6A9\uD560 \uC218 \uC5C6\uC2B5\uB2C8\uB2E4. \uD14D\uC2A4\uD2B8\uB85C \uC785\uB825\uD574\uC8FC\uC138\uC694.");
|
|
296
|
-
setIsRecording(false);
|
|
329
|
+
if (!voiceController) return;
|
|
330
|
+
if (isRecording) {
|
|
331
|
+
voiceController.stop();
|
|
332
|
+
} else {
|
|
333
|
+
voiceController.start();
|
|
297
334
|
}
|
|
298
335
|
};
|
|
299
336
|
const getActivityLayer = () => {
|
|
300
337
|
if (isSending) return "sending";
|
|
301
|
-
if (
|
|
338
|
+
if (isVoiceEnabled) {
|
|
302
339
|
if (isRecording) return "recording";
|
|
303
340
|
if (isVoiceMode) return "mic";
|
|
304
341
|
return "send";
|
|
305
342
|
}
|
|
306
|
-
if (
|
|
307
|
-
if (!isEmpty) return "send";
|
|
308
|
-
return null;
|
|
309
|
-
}
|
|
343
|
+
if (!isEmpty) return "send";
|
|
310
344
|
return null;
|
|
311
345
|
};
|
|
312
346
|
const activeLayer = getActivityLayer();
|
|
@@ -441,24 +475,25 @@ function ChatInput({
|
|
|
441
475
|
}
|
|
442
476
|
|
|
443
477
|
// src/components/ChatContainer.tsx
|
|
444
|
-
import { useEffect as
|
|
478
|
+
import { useEffect as useEffect3, useRef as useRef3, useState as useState3 } from "react";
|
|
445
479
|
import { Fragment, jsx as jsx6, jsxs as jsxs4 } from "react/jsx-runtime";
|
|
446
|
-
function ChatContainer({
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
480
|
+
function ChatContainer(props) {
|
|
481
|
+
const [isAtBottom, setIsAtBottom] = useState3(true);
|
|
482
|
+
const scrollRef = useRef3(null);
|
|
483
|
+
const {
|
|
484
|
+
messages,
|
|
485
|
+
messageRenderer,
|
|
486
|
+
loadingRenderer,
|
|
487
|
+
listClassName,
|
|
488
|
+
onSend,
|
|
489
|
+
isSending,
|
|
490
|
+
disableVoice,
|
|
491
|
+
placeholder,
|
|
492
|
+
inputClassName,
|
|
493
|
+
className
|
|
494
|
+
} = props;
|
|
495
|
+
const mappedMessages = typeof props.messageMapper === "function" ? props.messages.map(props.messageMapper) : messages;
|
|
496
|
+
useEffect3(() => {
|
|
462
497
|
const el = scrollRef.current;
|
|
463
498
|
if (!el) return;
|
|
464
499
|
el.scrollTop = el.scrollHeight;
|
|
@@ -469,7 +504,7 @@ function ChatContainer({
|
|
|
469
504
|
el.addEventListener("scroll", handleScroll);
|
|
470
505
|
return () => el.removeEventListener("scroll", handleScroll);
|
|
471
506
|
}, []);
|
|
472
|
-
|
|
507
|
+
useEffect3(() => {
|
|
473
508
|
const el = scrollRef.current;
|
|
474
509
|
if (!el) return;
|
|
475
510
|
if (isAtBottom) {
|
|
@@ -503,9 +538,9 @@ function ChatContainer({
|
|
|
503
538
|
className: `flex-1 overflow-y-auto chatContainer-scroll p-2`,
|
|
504
539
|
children: /* @__PURE__ */ jsx6(
|
|
505
540
|
ChatList,
|
|
506
|
-
__spreadValues(__spreadValues(__spreadValues(
|
|
507
|
-
messages
|
|
508
|
-
},
|
|
541
|
+
__spreadValues(__spreadValues(__spreadValues({
|
|
542
|
+
messages: mappedMessages
|
|
543
|
+
}, messageRenderer && { messageRenderer }), loadingRenderer && { loadingRenderer }), listClassName && { className: listClassName })
|
|
509
544
|
)
|
|
510
545
|
}
|
|
511
546
|
),
|
|
@@ -551,7 +586,7 @@ function ChatContainer({
|
|
|
551
586
|
|
|
552
587
|
// src/hooks/useOptimisticChat.ts
|
|
553
588
|
import { useMutation, useQuery, useQueryClient } from "@tanstack/react-query";
|
|
554
|
-
import { useState as
|
|
589
|
+
import { useState as useState4 } from "react";
|
|
555
590
|
function useOptimisticChat({
|
|
556
591
|
queryKey,
|
|
557
592
|
queryFn,
|
|
@@ -561,7 +596,7 @@ function useOptimisticChat({
|
|
|
561
596
|
staleTime = 0,
|
|
562
597
|
gcTime = 0
|
|
563
598
|
}) {
|
|
564
|
-
const [isPending, setIsPending] =
|
|
599
|
+
const [isPending, setIsPending] = useState4(false);
|
|
565
600
|
const queryClient = useQueryClient();
|
|
566
601
|
const {
|
|
567
602
|
data: messages = [],
|
|
@@ -569,8 +604,8 @@ function useOptimisticChat({
|
|
|
569
604
|
} = useQuery({
|
|
570
605
|
queryKey,
|
|
571
606
|
queryFn: async () => {
|
|
572
|
-
const
|
|
573
|
-
return
|
|
607
|
+
const raw = await queryFn();
|
|
608
|
+
return raw.map((r) => __spreadValues(__spreadValues({}, map(r)), r));
|
|
574
609
|
},
|
|
575
610
|
staleTime,
|
|
576
611
|
gcTime
|
|
@@ -603,10 +638,10 @@ function useOptimisticChat({
|
|
|
603
638
|
}
|
|
604
639
|
];
|
|
605
640
|
});
|
|
606
|
-
return { prev };
|
|
641
|
+
return prev ? { prev } : {};
|
|
607
642
|
},
|
|
608
643
|
onSuccess: (rawAiResponse) => {
|
|
609
|
-
const aiMessage = map(rawAiResponse);
|
|
644
|
+
const aiMessage = __spreadValues(__spreadValues({}, map(rawAiResponse)), rawAiResponse);
|
|
610
645
|
queryClient.setQueryData(queryKey, (old) => {
|
|
611
646
|
if (!old || old.length === 0) {
|
|
612
647
|
return [aiMessage];
|
|
@@ -626,10 +661,6 @@ function useOptimisticChat({
|
|
|
626
661
|
queryClient.setQueryData(queryKey, context.prev);
|
|
627
662
|
}
|
|
628
663
|
onError == null ? void 0 : onError(error);
|
|
629
|
-
},
|
|
630
|
-
// mutation 이후 서버 기준 최신 데이터 재동기화
|
|
631
|
-
onSettled: () => {
|
|
632
|
-
queryClient.invalidateQueries({ queryKey });
|
|
633
664
|
}
|
|
634
665
|
});
|
|
635
666
|
const sendUserMessage = (content) => {
|
|
@@ -647,6 +678,141 @@ function useOptimisticChat({
|
|
|
647
678
|
// 초기 로딩 상태
|
|
648
679
|
};
|
|
649
680
|
}
|
|
681
|
+
|
|
682
|
+
// src/hooks/useVoiceOptimisticChat.ts
|
|
683
|
+
import { useMutation as useMutation2, useQuery as useQuery2, useQueryClient as useQueryClient2 } from "@tanstack/react-query";
|
|
684
|
+
import { useEffect as useEffect4, useRef as useRef4, useState as useState5 } from "react";
|
|
685
|
+
function useVoiceOptimisticChat({
|
|
686
|
+
queryKey,
|
|
687
|
+
queryFn,
|
|
688
|
+
mutationFn,
|
|
689
|
+
map,
|
|
690
|
+
voice,
|
|
691
|
+
onError,
|
|
692
|
+
staleTime = 0,
|
|
693
|
+
gcTime = 0
|
|
694
|
+
}) {
|
|
695
|
+
const [isPending, setIsPending] = useState5(false);
|
|
696
|
+
const queryClient = useQueryClient2();
|
|
697
|
+
const currentTextRef = useRef4("");
|
|
698
|
+
const rollbackRef = useRef4(void 0);
|
|
699
|
+
const {
|
|
700
|
+
data: messages = [],
|
|
701
|
+
isLoading: isInitialLoading
|
|
702
|
+
} = useQuery2({
|
|
703
|
+
queryKey,
|
|
704
|
+
queryFn: async () => {
|
|
705
|
+
const raw = await queryFn();
|
|
706
|
+
return raw.map((r) => __spreadValues(__spreadValues({}, map(r)), r));
|
|
707
|
+
},
|
|
708
|
+
staleTime,
|
|
709
|
+
gcTime
|
|
710
|
+
});
|
|
711
|
+
const mutation = useMutation2({
|
|
712
|
+
mutationFn,
|
|
713
|
+
// (content: string) => Promise<TMutationRaw>
|
|
714
|
+
onMutate: async () => {
|
|
715
|
+
setIsPending(true);
|
|
716
|
+
const prev = queryClient.getQueryData(queryKey);
|
|
717
|
+
if (prev) {
|
|
718
|
+
await queryClient.cancelQueries({ queryKey });
|
|
719
|
+
}
|
|
720
|
+
queryClient.setQueryData(queryKey, (old) => {
|
|
721
|
+
const base = old != null ? old : [];
|
|
722
|
+
return [
|
|
723
|
+
...base,
|
|
724
|
+
// AI placeholder 추가
|
|
725
|
+
{
|
|
726
|
+
id: crypto.randomUUID(),
|
|
727
|
+
role: "AI",
|
|
728
|
+
content: "",
|
|
729
|
+
isLoading: true
|
|
730
|
+
}
|
|
731
|
+
];
|
|
732
|
+
});
|
|
733
|
+
return prev ? { prev } : {};
|
|
734
|
+
},
|
|
735
|
+
onSuccess: (rawAiResponse) => {
|
|
736
|
+
const aiMessage = __spreadValues(__spreadValues({}, map(rawAiResponse)), rawAiResponse);
|
|
737
|
+
queryClient.setQueryData(queryKey, (old) => {
|
|
738
|
+
if (!old || old.length === 0) {
|
|
739
|
+
return [aiMessage];
|
|
740
|
+
}
|
|
741
|
+
const next = [...old];
|
|
742
|
+
const lastIndex = next.length - 1;
|
|
743
|
+
next[lastIndex] = __spreadProps(__spreadValues(__spreadValues({}, next[lastIndex]), aiMessage), {
|
|
744
|
+
isLoading: false
|
|
745
|
+
});
|
|
746
|
+
return next;
|
|
747
|
+
});
|
|
748
|
+
setIsPending(false);
|
|
749
|
+
},
|
|
750
|
+
onError: (error, _variables, context) => {
|
|
751
|
+
setIsPending(false);
|
|
752
|
+
if (context == null ? void 0 : context.prev) {
|
|
753
|
+
queryClient.setQueryData(queryKey, context.prev);
|
|
754
|
+
}
|
|
755
|
+
onError == null ? void 0 : onError(error);
|
|
756
|
+
}
|
|
757
|
+
});
|
|
758
|
+
const startRecording = async () => {
|
|
759
|
+
currentTextRef.current = "";
|
|
760
|
+
const prev = queryClient.getQueryData(queryKey);
|
|
761
|
+
rollbackRef.current = prev;
|
|
762
|
+
if (prev) {
|
|
763
|
+
await queryClient.cancelQueries({ queryKey });
|
|
764
|
+
}
|
|
765
|
+
queryClient.setQueryData(queryKey, (old) => [
|
|
766
|
+
...old != null ? old : [],
|
|
767
|
+
{
|
|
768
|
+
id: crypto.randomUUID(),
|
|
769
|
+
role: "USER",
|
|
770
|
+
content: ""
|
|
771
|
+
}
|
|
772
|
+
]);
|
|
773
|
+
voice.start();
|
|
774
|
+
};
|
|
775
|
+
const onTranscript = (text) => {
|
|
776
|
+
currentTextRef.current = text;
|
|
777
|
+
queryClient.setQueryData(queryKey, (old) => {
|
|
778
|
+
var _a;
|
|
779
|
+
if (!old) return old;
|
|
780
|
+
const next = [...old];
|
|
781
|
+
const last = next.length - 1;
|
|
782
|
+
if (((_a = next[last]) == null ? void 0 : _a.role) !== "USER") return old;
|
|
783
|
+
next[last] = __spreadProps(__spreadValues({}, next[last]), {
|
|
784
|
+
content: text
|
|
785
|
+
});
|
|
786
|
+
return next;
|
|
787
|
+
});
|
|
788
|
+
};
|
|
789
|
+
useEffect4(() => {
|
|
790
|
+
voice.onTranscript = onTranscript;
|
|
791
|
+
}, [voice]);
|
|
792
|
+
const stopRecording = () => {
|
|
793
|
+
voice.stop();
|
|
794
|
+
const finalText = currentTextRef.current.trim();
|
|
795
|
+
if (!finalText) {
|
|
796
|
+
if (rollbackRef.current) {
|
|
797
|
+
queryClient.setQueryData(queryKey, rollbackRef.current);
|
|
798
|
+
}
|
|
799
|
+
return;
|
|
800
|
+
}
|
|
801
|
+
mutation.mutate(finalText);
|
|
802
|
+
};
|
|
803
|
+
return {
|
|
804
|
+
messages,
|
|
805
|
+
// Message<TExtra>[]
|
|
806
|
+
isPending,
|
|
807
|
+
// 사용자가 채팅 전송 후 AI 응답이 올 때까지의 로딩
|
|
808
|
+
isInitialLoading,
|
|
809
|
+
// 초기 로딩 상태
|
|
810
|
+
startRecording,
|
|
811
|
+
// 음성 인식 시작 함수
|
|
812
|
+
stopRecording
|
|
813
|
+
// 음성 인식 종료 함수
|
|
814
|
+
};
|
|
815
|
+
}
|
|
650
816
|
export {
|
|
651
817
|
ChatContainer,
|
|
652
818
|
ChatInput,
|
|
@@ -654,5 +820,7 @@ export {
|
|
|
654
820
|
ChatMessage,
|
|
655
821
|
LoadingSpinner,
|
|
656
822
|
SendingDots,
|
|
657
|
-
|
|
823
|
+
useBrowserSpeechRecognition,
|
|
824
|
+
useOptimisticChat,
|
|
825
|
+
useVoiceOptimisticChat
|
|
658
826
|
};
|