rt-chat-input 1.0.3 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +113 -12
- package/dist/chat-input.es.js +3 -2
- package/dist/chat-input.umd.js +1 -1
- package/dist/style.css +1 -1
- package/dist/types/components/ChatInput.vue.d.ts +2 -1
- package/dist/types/types/index.d.ts +8 -0
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -79,18 +79,30 @@ const handleAttach = (files) => {
|
|
|
79
79
|
|
|
80
80
|
#### Props
|
|
81
81
|
|
|
82
|
-
| 属性名 | 类型 | 默认值 | 说明
|
|
83
|
-
| ---------------------- | ----------------------------- | ------------------------- |
|
|
84
|
-
| `placeholder` | `string` | `'请输入,或按住说话...'` | 输入框占位符
|
|
85
|
-
| `disabled` | `boolean` | `false` | 是否禁用
|
|
86
|
-
| `wsUrl` | `string` | `undefined` |
|
|
87
|
-
| `maxVoiceDuration` | `number` | `60` | 最大录音时长(秒)
|
|
88
|
-
| `theme` | `'light' \| 'dark' \| 'auto'` | `'light'` | 主题模式
|
|
89
|
-
| `fixed` | `boolean` | `false` | 是否开启固定底部定位(开箱即用模式)
|
|
90
|
-
| `bottomOffset` | `number \| string` | `36` | 底部距离偏移量(仅 fixed=true 时有效),数字单位 px
|
|
91
|
-
| `showVoiceButton` | `boolean` | `true` | 是否显示语音按钮
|
|
92
|
-
| `showAttachmentButton` | `boolean` | `true` | 是否显示附件按钮
|
|
93
|
-
| `acceptFileTypes` | `string` | `'image/*,.pdf...'`
|
|
82
|
+
| 属性名 | 类型 | 默认值 | 说明 |
|
|
83
|
+
| ---------------------- | ----------------------------- | ------------------------- | -------------------------------------------------------------- |
|
|
84
|
+
| `placeholder` | `string` | `'请输入,或按住说话...'` | 输入框占位符 |
|
|
85
|
+
| `disabled` | `boolean` | `false` | 是否禁用 |
|
|
86
|
+
| `wsUrl` | `string` | `undefined` | 语音转写 WebSocket 地址(见下方[后端接入指南](#后端接入指南)) |
|
|
87
|
+
| `maxVoiceDuration` | `number` | `60` | 最大录音时长(秒) |
|
|
88
|
+
| `theme` | `'light' \| 'dark' \| 'auto'` | `'light'` | 主题模式 |
|
|
89
|
+
| `fixed` | `boolean` | `false` | 是否开启固定底部定位(开箱即用模式) |
|
|
90
|
+
| `bottomOffset` | `number \| string` | `36` | 底部距离偏移量(仅 fixed=true 时有效),数字单位 px |
|
|
91
|
+
| `showVoiceButton` | `boolean` | `true` | 是否显示语音按钮 |
|
|
92
|
+
| `showAttachmentButton` | `boolean` | `true` | 是否显示附件按钮 |
|
|
93
|
+
| `acceptFileTypes` | `string` | `'image/*,.pdf,.doc...'` | 允许上传的文件类型(默认为常见图片及办公文档格式) |
|
|
94
|
+
| `agentSteps` | `AgentStep[]` | `[]` | Agent 执行步骤(用于展示思考过程),格式见下方定义 |
|
|
95
|
+
|
|
96
|
+
#### AgentStep 类型定义
|
|
97
|
+
|
|
98
|
+
```typescript
|
|
99
|
+
interface AgentStep {
|
|
100
|
+
id: string | number;
|
|
101
|
+
title: string;
|
|
102
|
+
status: "pending" | "loading" | "success" | "error";
|
|
103
|
+
details?: string;
|
|
104
|
+
}
|
|
105
|
+
```
|
|
94
106
|
|
|
95
107
|
#### Events
|
|
96
108
|
|
|
@@ -101,6 +113,9 @@ const handleAttach = (files) => {
|
|
|
101
113
|
| `change` | `(text: string)` | 输入框内容变化时触发 |
|
|
102
114
|
| `attach` | `(files: File[])` | 附件列表变化时触发 |
|
|
103
115
|
| `error` | `(message: string)` | 发生错误时触发 |
|
|
116
|
+
| `stop` | `-` | 点击停止按钮时触发(仅 loading=true 时) |
|
|
117
|
+
| `focus` | `(e: FocusEvent)` | 输入框获得焦点时触发 |
|
|
118
|
+
| `blur` | `(e: FocusEvent)` | 输入框失去焦点时触发 |
|
|
104
119
|
|
|
105
120
|
### VoiceRecorder 组件
|
|
106
121
|
|
|
@@ -116,6 +131,92 @@ import { VoiceRecorder } from "rt-chat-input";
|
|
|
116
131
|
</template>
|
|
117
132
|
```
|
|
118
133
|
|
|
134
|
+
## 后端接入指南
|
|
135
|
+
|
|
136
|
+
组件通过 WebSocket 发送 16k 采样率的 PCM 音频流,并期望接收 JSON 格式的转写结果。
|
|
137
|
+
|
|
138
|
+
### 1. 通信协议
|
|
139
|
+
|
|
140
|
+
- **客户端发送**:Raw PCM Audio (Int16, 16000Hz, Mono)
|
|
141
|
+
- **服务端返回**:JSON 字符串
|
|
142
|
+
```json
|
|
143
|
+
{
|
|
144
|
+
"text": "转写文本内容",
|
|
145
|
+
"isFinal": false // true 表示句尾(最终结果),false 表示中间结果
|
|
146
|
+
}
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
### 2. Spring WebFlux 实现示例
|
|
150
|
+
|
|
151
|
+
推荐使用后端作为代理连接 FunASR 服务(避免前端直接处理复杂的握手协议)。
|
|
152
|
+
|
|
153
|
+
```kotlin
|
|
154
|
+
// WebSocketHandler 实现
|
|
155
|
+
@Component
|
|
156
|
+
class RealTimeTranscriptionHandler(
|
|
157
|
+
private val objectMapper: ObjectMapper
|
|
158
|
+
) : WebSocketHandler {
|
|
159
|
+
|
|
160
|
+
// FunASR 服务地址 (e.g. ws://192.168.1.100:10095)
|
|
161
|
+
@Value("\${asr.service.url}")
|
|
162
|
+
private lateinit var asrServiceUrl: String
|
|
163
|
+
|
|
164
|
+
override fun handle(session: WebSocketSession): Mono<Void> {
|
|
165
|
+
val client = ReactorNettyWebSocketClient()
|
|
166
|
+
return client.execute(URI(asrServiceUrl)) { asrSession ->
|
|
167
|
+
// 1. 发送 FunASR 握手包 (必须配置如下参数以匹配模型要求)
|
|
168
|
+
val handshake = mapOf(
|
|
169
|
+
"mode" to "2pass",
|
|
170
|
+
"chunk_size" to listOf(5, 10, 5),
|
|
171
|
+
"encoder_chunk_look_back" to 4,
|
|
172
|
+
"decoder_chunk_look_back" to 1,
|
|
173
|
+
"wav_name" to "microphone",
|
|
174
|
+
"wav_format" to "pcm",
|
|
175
|
+
"audio_fs" to 16000,
|
|
176
|
+
"is_speaking" to true
|
|
177
|
+
)
|
|
178
|
+
val handshakeMsg = asrSession.textMessage(objectMapper.writeValueAsString(handshake))
|
|
179
|
+
|
|
180
|
+
// 2. 转发音频流 (Frontend -> FunASR)
|
|
181
|
+
val upstream = session.receive()
|
|
182
|
+
.filter { it.type == WebSocketMessage.Type.BINARY }
|
|
183
|
+
.map { msg ->
|
|
184
|
+
// 提取二进制数据并转发
|
|
185
|
+
val bytes = ByteArray(msg.payload.readableByteCount())
|
|
186
|
+
msg.payload.read(bytes)
|
|
187
|
+
asrSession.binaryMessage { it.wrap(bytes) }
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
// 3. 接收结果并转换 (FunASR -> Frontend)
|
|
191
|
+
val downstream = asrSession.receive()
|
|
192
|
+
.map { it.payloadAsText }
|
|
193
|
+
.mapNotNull { json ->
|
|
194
|
+
// 解析 FunASR 响应并转换为组件所需格式
|
|
195
|
+
val node = objectMapper.readTree(json)
|
|
196
|
+
val text = node.path("text").asText()
|
|
197
|
+
val mode = node.path("mode").asText()
|
|
198
|
+
if (!text.isNullOrBlank()) {
|
|
199
|
+
val response = mapOf(
|
|
200
|
+
"text" to text,
|
|
201
|
+
"isFinal" to (mode == "2pass-offline")
|
|
202
|
+
)
|
|
203
|
+
session.textMessage(objectMapper.writeValueAsString(response))
|
|
204
|
+
} else null
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
// 合并流:发送握手 + 双向转发
|
|
208
|
+
// 关键修复:在前端断开或流结束时,发送 {"is_speaking": false} 以触发 2pass-offline 最终结果
|
|
209
|
+
val endSignal = Mono.defer {
|
|
210
|
+
Mono.just(asrSession.textMessage("{\"is_speaking\":false}"))
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
asrSession.send(upstream.startWith(handshakeMsg).concatWith(endSignal))
|
|
214
|
+
.then(session.send(downstream))
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
```
|
|
219
|
+
|
|
119
220
|
## 主题定制
|
|
120
221
|
|
|
121
222
|
组件使用 CSS 变量进行样式定义,你可以通过覆盖这些变量来定制主题:
|
package/dist/chat-input.es.js
CHANGED
|
@@ -962,7 +962,8 @@ const sg = { class: "chat-modal-container" }, yg = {
|
|
|
962
962
|
showAttachmentButton: { type: Boolean, default: !0 },
|
|
963
963
|
acceptFileTypes: { default: "image/*,.pdf,.doc,.docx,.xls,.xlsx,.ppt,.pptx,.txt" },
|
|
964
964
|
fixed: { type: Boolean, default: !1 },
|
|
965
|
-
bottomOffset: { default: 36 }
|
|
965
|
+
bottomOffset: { default: 36 },
|
|
966
|
+
agentSteps: { default: () => [] }
|
|
966
967
|
},
|
|
967
968
|
emits: ["send", "stop", "voice", "change", "attach", "error", "focus", "blur"],
|
|
968
969
|
setup(f, { expose: L, emit: w }) {
|
|
@@ -1285,7 +1286,7 @@ const sg = { class: "chat-modal-container" }, yg = {
|
|
|
1285
1286
|
], 6)
|
|
1286
1287
|
], 6));
|
|
1287
1288
|
}
|
|
1288
|
-
}), GB = /* @__PURE__ */ EA(SB, [["__scopeId", "data-v-
|
|
1289
|
+
}), GB = /* @__PURE__ */ EA(SB, [["__scopeId", "data-v-454751bc"]]), xB = {
|
|
1289
1290
|
install(f) {
|
|
1290
1291
|
f.component("ChatInput", GB), f.component("VoiceRecorder", aA);
|
|
1291
1292
|
}
|