@lee-zg/melange 1.2.2 → 1.2.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/README.md +257 -256
  2. package/dist/{chunk-YZVCK6VZ.cjs → chunk-AAWWAPSG.cjs} +1425 -2
  3. package/dist/{chunk-3RM45M64.js → chunk-R3BPDZ3R.js} +1388 -3
  4. package/dist/container-B8n-ok8M.d.cts +201 -0
  5. package/dist/container-n8Q2Xa_y.d.ts +201 -0
  6. package/dist/core/index.d.cts +3 -200
  7. package/dist/core/index.d.ts +3 -200
  8. package/dist/generator-CEj5qxFh.d.cts +1752 -0
  9. package/dist/generator-D7ofMxfc.d.ts +1752 -0
  10. package/dist/index.cjs +138 -34
  11. package/dist/index.d.cts +4 -3
  12. package/dist/index.d.ts +4 -3
  13. package/dist/index.js +2 -2
  14. package/dist/plugins/index.cjs +162 -9
  15. package/dist/plugins/index.d.cts +108 -755
  16. package/dist/plugins/index.d.ts +108 -755
  17. package/dist/plugins/index.js +2 -1
  18. package/package.json +143 -135
  19. package/dist/chunk-3RM45M64.js.map +0 -1
  20. package/dist/chunk-7QVYU63E.js.map +0 -1
  21. package/dist/chunk-BEY4UAYF.cjs.map +0 -1
  22. package/dist/chunk-GXFWPL5M.js.map +0 -1
  23. package/dist/chunk-GZBY4BUP.js.map +0 -1
  24. package/dist/chunk-ILNGTDQ4.js.map +0 -1
  25. package/dist/chunk-JLBTZPBY.cjs.map +0 -1
  26. package/dist/chunk-PK6SKIKE.cjs.map +0 -1
  27. package/dist/chunk-UYJUSNDI.cjs.map +0 -1
  28. package/dist/chunk-YZVCK6VZ.cjs.map +0 -1
  29. package/dist/core/index.cjs.map +0 -1
  30. package/dist/core/index.js.map +0 -1
  31. package/dist/fp/index.cjs.map +0 -1
  32. package/dist/fp/index.js.map +0 -1
  33. package/dist/index.cjs.map +0 -1
  34. package/dist/index.js.map +0 -1
  35. package/dist/plugins/index.cjs.map +0 -1
  36. package/dist/plugins/index.js.map +0 -1
  37. package/dist/utils/index.cjs.map +0 -1
  38. package/dist/utils/index.js.map +0 -1
@@ -1,6 +1,14 @@
1
+ import { globalContainer } from './chunk-ILNGTDQ4.js';
1
2
  import { __name } from './chunk-7QVYU63E.js';
2
3
 
3
4
  // src/plugins/speech/synthesis.ts
5
+ var SynthesisStatus = /* @__PURE__ */ ((SynthesisStatus2) => {
6
+ SynthesisStatus2["IDLE"] = "IDLE";
7
+ SynthesisStatus2["LOADING"] = "LOADING";
8
+ SynthesisStatus2["SPEAKING"] = "SPEAKING";
9
+ SynthesisStatus2["PAUSED"] = "PAUSED";
10
+ return SynthesisStatus2;
11
+ })(SynthesisStatus || {});
4
12
  var SynthesisAudioUtils = {
5
13
  /**
6
14
  * 创建 AudioContext
@@ -51,6 +59,380 @@ var SynthesisAudioUtils = {
51
59
  return Math.ceil(byteLength * 8 / bitRates[format] * 1e3);
52
60
  }
53
61
  };
62
+ var GenericSynthesisAdapter = class {
63
+ constructor(baseUrl) {
64
+ this.baseUrl = baseUrl;
65
+ }
66
+ static {
67
+ __name(this, "GenericSynthesisAdapter");
68
+ }
69
+ name = "Generic/BFF";
70
+ async synthesize(text, config) {
71
+ const response = await fetch(`${this.baseUrl}/synthesize`, {
72
+ method: "POST",
73
+ headers: { "Content-Type": "application/json" },
74
+ body: JSON.stringify({
75
+ text,
76
+ lang: config?.lang ?? "zh-CN",
77
+ voice: typeof config?.voice === "string" ? config.voice : config?.voice?.id,
78
+ rate: config?.rate ?? 1,
79
+ pitch: config?.pitch ?? 1,
80
+ volume: config?.volume ?? 1,
81
+ format: config?.audioFormat ?? "mp3"
82
+ })
83
+ });
84
+ if (!response.ok) {
85
+ throw new Error(`HTTP ${response.status}: ${response.statusText}`);
86
+ }
87
+ const audioData = await response.arrayBuffer();
88
+ return {
89
+ audioData,
90
+ format: config?.audioFormat ?? "mp3",
91
+ duration: SynthesisAudioUtils.estimateDuration(
92
+ audioData.byteLength,
93
+ config?.audioFormat ?? "mp3"
94
+ )
95
+ };
96
+ }
97
+ async getVoices() {
98
+ try {
99
+ const response = await fetch(`${this.baseUrl}/voices`);
100
+ if (!response.ok) return [];
101
+ const data = await response.json();
102
+ return data.voices ?? [];
103
+ } catch {
104
+ return [];
105
+ }
106
+ }
107
+ };
108
+ var AzureSynthesisAdapter = class {
109
+ constructor(subscriptionKey, region, defaultVoice = "zh-CN-XiaoxiaoNeural") {
110
+ this.subscriptionKey = subscriptionKey;
111
+ this.region = region;
112
+ this.defaultVoice = defaultVoice;
113
+ }
114
+ static {
115
+ __name(this, "AzureSynthesisAdapter");
116
+ }
117
+ name = "Azure";
118
+ async synthesize(text, config) {
119
+ const voice = typeof config?.voice === "string" ? config.voice : config?.voice?.id ?? this.defaultVoice;
120
+ const rate = config?.rate ?? 1;
121
+ const pitch = config?.pitch ?? 1;
122
+ const ssml = `
123
+ <speak version="1.0" xmlns="http://www.w3.org/2001/10/synthesis" xml:lang="${config?.lang ?? "zh-CN"}">
124
+ <voice name="${voice}">
125
+ <prosody rate="${rate}" pitch="${(pitch - 1) * 50}%">
126
+ ${this.escapeXml(text)}
127
+ </prosody>
128
+ </voice>
129
+ </speak>`;
130
+ const response = await fetch(
131
+ `https://${this.region}.tts.speech.microsoft.com/cognitiveservices/v1`,
132
+ {
133
+ method: "POST",
134
+ headers: {
135
+ "Ocp-Apim-Subscription-Key": this.subscriptionKey,
136
+ "Content-Type": "application/ssml+xml",
137
+ "X-Microsoft-OutputFormat": "audio-16khz-128kbitrate-mono-mp3"
138
+ },
139
+ body: ssml
140
+ }
141
+ );
142
+ if (!response.ok) {
143
+ throw new Error(`Azure TTS Error: ${response.status}`);
144
+ }
145
+ const audioData = await response.arrayBuffer();
146
+ return { audioData, format: "mp3" };
147
+ }
148
+ async getVoices() {
149
+ const response = await fetch(
150
+ `https://${this.region}.tts.speech.microsoft.com/cognitiveservices/voices/list`,
151
+ {
152
+ headers: { "Ocp-Apim-Subscription-Key": this.subscriptionKey }
153
+ }
154
+ );
155
+ if (!response.ok) return [];
156
+ const voices = await response.json();
157
+ return voices.map((v) => ({
158
+ id: v.ShortName,
159
+ name: v.DisplayName,
160
+ lang: v.Locale,
161
+ gender: v.Gender.toLowerCase(),
162
+ provider: "Azure"
163
+ }));
164
+ }
165
+ escapeXml(text) {
166
+ return text.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;").replace(/"/g, "&quot;").replace(/'/g, "&apos;");
167
+ }
168
+ };
169
+ var GoogleSynthesisAdapter = class {
170
+ constructor(apiKey, defaultVoice = "zh-CN-Wavenet-A") {
171
+ this.apiKey = apiKey;
172
+ this.defaultVoice = defaultVoice;
173
+ }
174
+ static {
175
+ __name(this, "GoogleSynthesisAdapter");
176
+ }
177
+ name = "Google";
178
+ async synthesize(text, config) {
179
+ const voice = typeof config?.voice === "string" ? config.voice : config?.voice?.id ?? this.defaultVoice;
180
+ const lang = config?.lang ?? "zh-CN";
181
+ const payload = {
182
+ input: config?.enableSSML ? { ssml: text } : { text },
183
+ voice: {
184
+ languageCode: lang,
185
+ name: voice
186
+ },
187
+ audioConfig: {
188
+ audioEncoding: "MP3",
189
+ speakingRate: config?.rate ?? 1,
190
+ pitch: config?.pitch ?? 0,
191
+ volumeGainDb: ((config?.volume ?? 1) - 1) * 10
192
+ }
193
+ };
194
+ const response = await fetch(
195
+ `https://texttospeech.googleapis.com/v1/text:synthesize?key=${this.apiKey}`,
196
+ {
197
+ method: "POST",
198
+ headers: { "Content-Type": "application/json" },
199
+ body: JSON.stringify(payload)
200
+ }
201
+ );
202
+ if (!response.ok) {
203
+ const error = await response.json();
204
+ throw new Error(error.error?.message ?? "Google TTS Error");
205
+ }
206
+ const data = await response.json();
207
+ const audioData = SynthesisAudioUtils.base64ToArrayBuffer(data.audioContent);
208
+ return { audioData, format: "mp3" };
209
+ }
210
+ async getVoices() {
211
+ const response = await fetch(
212
+ `https://texttospeech.googleapis.com/v1/voices?key=${this.apiKey}`
213
+ );
214
+ if (!response.ok) return [];
215
+ const data = await response.json();
216
+ return data.voices.map((v) => ({
217
+ id: v.name,
218
+ name: v.name,
219
+ lang: v.languageCodes[0] ?? "en-US",
220
+ gender: v.ssmlGender.toLowerCase(),
221
+ provider: "Google"
222
+ }));
223
+ }
224
+ };
225
+ var AWSSynthesisAdapter = class {
226
+ constructor(accessKeyId, secretAccessKey, region, defaultVoice = "Zhiyu") {
227
+ this.accessKeyId = accessKeyId;
228
+ this.secretAccessKey = secretAccessKey;
229
+ this.region = region;
230
+ this.defaultVoice = defaultVoice;
231
+ }
232
+ static {
233
+ __name(this, "AWSSynthesisAdapter");
234
+ }
235
+ name = "AWS";
236
+ async synthesize(text, config) {
237
+ const voice = typeof config?.voice === "string" ? config.voice : config?.voice?.id ?? this.defaultVoice;
238
+ const response = await fetch("/api/aws-polly/synthesize", {
239
+ method: "POST",
240
+ headers: { "Content-Type": "application/json" },
241
+ body: JSON.stringify({
242
+ text,
243
+ voiceId: voice,
244
+ languageCode: config?.lang ?? "cmn-CN",
245
+ outputFormat: "mp3",
246
+ accessKeyId: this.accessKeyId,
247
+ secretAccessKey: this.secretAccessKey,
248
+ region: this.region
249
+ })
250
+ });
251
+ if (!response.ok) {
252
+ throw new Error(`AWS Polly Error: ${response.status}`);
253
+ }
254
+ const audioData = await response.arrayBuffer();
255
+ return { audioData, format: "mp3" };
256
+ }
257
+ getVoices() {
258
+ return Promise.resolve([
259
+ { id: "Zhiyu", name: "\u667A\u745C", lang: "cmn-CN", gender: "female", provider: "AWS" }
260
+ ]);
261
+ }
262
+ };
263
+ var XunfeiSynthesisAdapter = class {
264
+ // Note: apiKey and apiSecret are reserved for future direct API authentication
265
+ // Currently using BFF proxy mode which handles auth server-side
266
+ constructor(appId, _apiKey, _apiSecret, defaultVoice = "xiaoyan") {
267
+ this.appId = appId;
268
+ this.defaultVoice = defaultVoice;
269
+ }
270
+ static {
271
+ __name(this, "XunfeiSynthesisAdapter");
272
+ }
273
+ name = "Xunfei";
274
+ async synthesize(text, config) {
275
+ const voice = typeof config?.voice === "string" ? config.voice : config?.voice?.id ?? this.defaultVoice;
276
+ const response = await fetch("/api/xunfei-tts/synthesize", {
277
+ method: "POST",
278
+ headers: { "Content-Type": "application/json" },
279
+ body: JSON.stringify({
280
+ text,
281
+ appId: this.appId,
282
+ vcn: voice,
283
+ speed: Math.round((config?.rate ?? 1) * 50),
284
+ pitch: Math.round((config?.pitch ?? 1) * 50),
285
+ volume: Math.round((config?.volume ?? 1) * 100),
286
+ aue: "lame"
287
+ // mp3
288
+ })
289
+ });
290
+ if (!response.ok) {
291
+ throw new Error(`\u8BAF\u98DE TTS Error: ${response.status}`);
292
+ }
293
+ const audioData = await response.arrayBuffer();
294
+ return { audioData, format: "mp3" };
295
+ }
296
+ getVoices() {
297
+ return Promise.resolve([
298
+ { id: "xiaoyan", name: "\u5C0F\u71D5", lang: "zh-CN", gender: "female", provider: "Xunfei" },
299
+ { id: "aisjiuxu", name: "\u8BB8\u4E45", lang: "zh-CN", gender: "male", provider: "Xunfei" },
300
+ { id: "aisxping", name: "\u5C0F\u840D", lang: "zh-CN", gender: "female", provider: "Xunfei" },
301
+ { id: "aisjinger", name: "\u5C0F\u5A67", lang: "zh-CN", gender: "female", provider: "Xunfei" }
302
+ ]);
303
+ }
304
+ };
305
+ var TencentSynthesisAdapter = class {
306
+ constructor(secretId, secretKey, defaultVoice = "101001") {
307
+ this.secretId = secretId;
308
+ this.secretKey = secretKey;
309
+ this.defaultVoice = defaultVoice;
310
+ }
311
+ static {
312
+ __name(this, "TencentSynthesisAdapter");
313
+ }
314
+ name = "Tencent";
315
+ async synthesize(text, config) {
316
+ const voice = typeof config?.voice === "string" ? config.voice : config?.voice?.id ?? this.defaultVoice;
317
+ const response = await fetch("/api/tencent-tts/synthesize", {
318
+ method: "POST",
319
+ headers: { "Content-Type": "application/json" },
320
+ body: JSON.stringify({
321
+ text,
322
+ secretId: this.secretId,
323
+ secretKey: this.secretKey,
324
+ voiceType: Number(voice),
325
+ speed: config?.rate ?? 1,
326
+ volume: config?.volume ?? 0,
327
+ codec: "mp3"
328
+ })
329
+ });
330
+ if (!response.ok) {
331
+ throw new Error(`\u817E\u8BAF\u4E91 TTS Error: ${response.status}`);
332
+ }
333
+ const audioData = await response.arrayBuffer();
334
+ return { audioData, format: "mp3" };
335
+ }
336
+ getVoices() {
337
+ return Promise.resolve([
338
+ { id: "101001", name: "\u667A\u745C", lang: "zh-CN", gender: "female", provider: "Tencent" },
339
+ { id: "101002", name: "\u667A\u8046", lang: "zh-CN", gender: "male", provider: "Tencent" },
340
+ { id: "101003", name: "\u667A\u7F8E", lang: "zh-CN", gender: "female", provider: "Tencent" }
341
+ ]);
342
+ }
343
+ };
344
+ var BaiduSynthesisAdapter = class {
345
+ constructor(accessToken, defaultVoice = "0") {
346
+ this.accessToken = accessToken;
347
+ this.defaultVoice = defaultVoice;
348
+ }
349
+ static {
350
+ __name(this, "BaiduSynthesisAdapter");
351
+ }
352
+ name = "Baidu";
353
+ async synthesize(text, config) {
354
+ const voice = typeof config?.voice === "string" ? config.voice : config?.voice?.id ?? this.defaultVoice;
355
+ const params = new URLSearchParams({
356
+ tex: encodeURIComponent(text),
357
+ tok: this.accessToken,
358
+ cuid: `sdk-user-${Date.now()}`,
359
+ ctp: "1",
360
+ lan: config?.lang === "en-US" ? "en" : "zh",
361
+ spd: String(Math.round((config?.rate ?? 1) * 5)),
362
+ pit: String(Math.round((config?.pitch ?? 1) * 5)),
363
+ vol: String(Math.round((config?.volume ?? 1) * 15)),
364
+ per: voice,
365
+ aue: "3"
366
+ // mp3
367
+ });
368
+ const response = await fetch("/api/baidu-tts/synthesize", {
369
+ method: "POST",
370
+ headers: { "Content-Type": "application/x-www-form-urlencoded" },
371
+ body: params.toString()
372
+ });
373
+ const contentType = response.headers.get("Content-Type") ?? "";
374
+ if (contentType.includes("audio")) {
375
+ const audioData = await response.arrayBuffer();
376
+ return { audioData, format: "mp3" };
377
+ }
378
+ const error = await response.json();
379
+ throw new Error(error.err_msg ?? "\u767E\u5EA6 TTS Error");
380
+ }
381
+ getVoices() {
382
+ return Promise.resolve([
383
+ { id: "0", name: "\u5EA6\u5C0F\u7F8E", lang: "zh-CN", gender: "female", provider: "Baidu" },
384
+ { id: "1", name: "\u5EA6\u5C0F\u5B87", lang: "zh-CN", gender: "male", provider: "Baidu" },
385
+ { id: "3", name: "\u5EA6\u900D\u9065", lang: "zh-CN", gender: "male", provider: "Baidu" },
386
+ { id: "4", name: "\u5EA6\u4E2B\u4E2B", lang: "zh-CN", gender: "female", provider: "Baidu" }
387
+ ]);
388
+ }
389
+ };
390
+ var AlibabaSynthesisAdapter = class {
391
+ // Note: accessKeySecret is reserved for future direct API authentication
392
+ // Currently using BFF proxy mode which handles auth server-side
393
+ constructor(accessKeyId, _accessKeySecret, appKey, defaultVoice = "xiaoyun") {
394
+ this.accessKeyId = accessKeyId;
395
+ this.appKey = appKey;
396
+ this.defaultVoice = defaultVoice;
397
+ }
398
+ static {
399
+ __name(this, "AlibabaSynthesisAdapter");
400
+ }
401
+ name = "Alibaba";
402
+ async synthesize(text, config) {
403
+ const voice = typeof config?.voice === "string" ? config.voice : config?.voice?.id ?? this.defaultVoice;
404
+ const response = await fetch("/api/alibaba-tts/synthesize", {
405
+ method: "POST",
406
+ headers: {
407
+ "Content-Type": "application/json",
408
+ "X-NLS-Token": this.accessKeyId
409
+ },
410
+ body: JSON.stringify({
411
+ appkey: this.appKey,
412
+ text,
413
+ format: "mp3",
414
+ voice,
415
+ sample_rate: 16e3,
416
+ speech_rate: Math.round((config?.rate ?? 1 - 1) * 500),
417
+ pitch_rate: Math.round((config?.pitch ?? 1 - 1) * 500),
418
+ volume: Math.round((config?.volume ?? 1) * 100)
419
+ })
420
+ });
421
+ if (!response.ok) {
422
+ throw new Error(`\u963F\u91CC\u4E91 TTS Error: ${response.status}`);
423
+ }
424
+ const audioData = await response.arrayBuffer();
425
+ return { audioData, format: "mp3" };
426
+ }
427
+ getVoices() {
428
+ return Promise.resolve([
429
+ { id: "xiaoyun", name: "\u5C0F\u4E91", lang: "zh-CN", gender: "female", provider: "Alibaba" },
430
+ { id: "xiaogang", name: "\u5C0F\u521A", lang: "zh-CN", gender: "male", provider: "Alibaba" },
431
+ { id: "ruoxi", name: "\u82E5\u516E", lang: "zh-CN", gender: "female", provider: "Alibaba" },
432
+ { id: "siqi", name: "\u601D\u742A", lang: "zh-CN", gender: "female", provider: "Alibaba" }
433
+ ]);
434
+ }
435
+ };
54
436
  var BaseSynthesisStrategy = class {
55
437
  constructor(config) {
56
438
  this.config = config;
@@ -629,8 +1011,28 @@ async function speak(text, config) {
629
1011
  }
630
1012
  }
631
1013
  __name(speak, "speak");
1014
+ async function speakWithCloud(text, adapter, config) {
1015
+ const synthesizer = await createSpeechSynthesizer({
1016
+ ...config,
1017
+ mode: "cloud",
1018
+ cloudAdapter: adapter
1019
+ });
1020
+ try {
1021
+ await synthesizer.speak(text, config);
1022
+ } finally {
1023
+ synthesizer.dispose();
1024
+ }
1025
+ }
1026
+ __name(speakWithCloud, "speakWithCloud");
632
1027
 
633
1028
  // src/plugins/speech/recognition.ts
1029
+ var RecognitionStatus = /* @__PURE__ */ ((RecognitionStatus2) => {
1030
+ RecognitionStatus2["IDLE"] = "IDLE";
1031
+ RecognitionStatus2["CONNECTING"] = "CONNECTING";
1032
+ RecognitionStatus2["RECORDING"] = "RECORDING";
1033
+ RecognitionStatus2["PROCESSING"] = "PROCESSING";
1034
+ return RecognitionStatus2;
1035
+ })(RecognitionStatus || {});
634
1036
  var AudioUtils = {
635
1037
  /**
636
1038
  * 重采样音频数据
@@ -826,6 +1228,455 @@ class SpeechProcessor extends AudioWorkletProcessor {
826
1228
  }
827
1229
  registerProcessor('speech-processor', SpeechProcessor);
828
1230
  `;
1231
+ var GenericAdapter = class {
1232
+ constructor(baseUrl) {
1233
+ this.baseUrl = baseUrl;
1234
+ }
1235
+ static {
1236
+ __name(this, "GenericAdapter");
1237
+ }
1238
+ name = "Generic/BFF";
1239
+ getConnectUrl() {
1240
+ return this.baseUrl.replace(/^http/, "ws");
1241
+ }
1242
+ async recognizeShortAudio(audioData) {
1243
+ const formData = new FormData();
1244
+ formData.append("file", new Blob([audioData], { type: "audio/wav" }));
1245
+ const res = await fetch(`${this.baseUrl}/recognize`, {
1246
+ method: "POST",
1247
+ body: formData
1248
+ });
1249
+ if (!res.ok) {
1250
+ throw new Error(`HTTP ${res.status}: ${res.statusText}`);
1251
+ }
1252
+ const json = await res.json();
1253
+ const result = this.parseResult(json);
1254
+ if (!result) throw new Error("\u89E3\u6790\u7ED3\u679C\u5931\u8D25");
1255
+ return result;
1256
+ }
1257
+ parseResult(data) {
1258
+ return {
1259
+ transcript: data["text"] || data["transcript"] || "",
1260
+ isFinal: true,
1261
+ confidence: data["score"] || data["confidence"] || 0.9,
1262
+ original: data
1263
+ };
1264
+ }
1265
+ };
1266
+ var XunfeiAdapter = class {
1267
+ constructor(appId, apiKey, apiSecret) {
1268
+ this.appId = appId;
1269
+ this.apiKey = apiKey;
1270
+ this.apiSecret = apiSecret;
1271
+ }
1272
+ static {
1273
+ __name(this, "XunfeiAdapter");
1274
+ }
1275
+ name = "Xunfei";
1276
+ /** API Key - 生产环境应在后端使用 */
1277
+ apiKey;
1278
+ /** API Secret - 生产环境应在后端使用 */
1279
+ apiSecret;
1280
+ getConnectUrl() {
1281
+ const host = "iat-api.xfyun.cn";
1282
+ const path = "/v2/iat";
1283
+ return `wss://${host}${path}?authorization=...&date=...&host=${host}`;
1284
+ }
1285
+ getHandshakeParams() {
1286
+ return {
1287
+ common: { app_id: this.appId },
1288
+ business: {
1289
+ language: "zh_cn",
1290
+ domain: "iat",
1291
+ accent: "mandarin",
1292
+ vad_eos: 3e3
1293
+ },
1294
+ data: {
1295
+ status: 0,
1296
+ format: "audio/L16;rate=16000",
1297
+ encoding: "raw"
1298
+ }
1299
+ };
1300
+ }
1301
+ async recognizeShortAudio(audioData) {
1302
+ const blob = new Blob([audioData], { type: "audio/wav" });
1303
+ const formData = new FormData();
1304
+ formData.append("audio", blob);
1305
+ formData.append("engine_type", "sms16k");
1306
+ const response = await fetch("https://api.xfyun.cn/v1/service/v1/iat", {
1307
+ method: "POST",
1308
+ headers: {
1309
+ "X-Appid": this.appId
1310
+ // 生产环境需添加: 'X-CurTime', 'X-Param', 'X-CheckSum'
1311
+ },
1312
+ body: formData
1313
+ });
1314
+ const data = await response.json();
1315
+ const result = this.parseResult(data);
1316
+ if (!result) throw new Error("\u8BAF\u98DE\u8BC6\u522B\u5931\u8D25");
1317
+ return result;
1318
+ }
1319
+ parseResult(data) {
1320
+ if (data["code"] !== void 0 && data["code"] !== 0) {
1321
+ return null;
1322
+ }
1323
+ let text = "";
1324
+ const wsData = data["data"];
1325
+ if (wsData?.["result"]) {
1326
+ const result = wsData["result"];
1327
+ text = result.ws?.map((w) => w.cw[0]?.w ?? "").join("") ?? "";
1328
+ }
1329
+ if (data["desc"] === "success" && typeof data["data"] === "string") {
1330
+ text = data["data"];
1331
+ }
1332
+ if (!text) return null;
1333
+ return {
1334
+ transcript: text,
1335
+ isFinal: wsData?.["status"] === 2 || !!data["desc"],
1336
+ confidence: 0.9,
1337
+ original: data
1338
+ };
1339
+ }
1340
+ };
1341
+ var TencentAdapter = class {
1342
+ static {
1343
+ __name(this, "TencentAdapter");
1344
+ }
1345
+ name = "Tencent";
1346
+ /** Secret ID - 生产环境应在后端使用 */
1347
+ secretId;
1348
+ /** Secret Key - 生产环境应在后端使用 */
1349
+ secretKey;
1350
+ constructor(secretId, secretKey) {
1351
+ this.secretId = secretId;
1352
+ this.secretKey = secretKey;
1353
+ }
1354
+ async recognizeShortAudio(audioData) {
1355
+ const base64Audio = AudioUtils.arrayBufferToBase64(audioData);
1356
+ const payload = {
1357
+ ProjectId: 0,
1358
+ SubServiceType: 2,
1359
+ EngSerViceType: "16k_zh",
1360
+ SourceType: 1,
1361
+ VoiceFormat: "wav",
1362
+ UsrAudioKey: `session-${Date.now()}`,
1363
+ Data: base64Audio,
1364
+ DataLen: audioData.byteLength
1365
+ };
1366
+ const res = await fetch("https://asr.tencentcloudapi.com", {
1367
+ method: "POST",
1368
+ headers: {
1369
+ "Content-Type": "application/json",
1370
+ "X-TC-Action": "SentenceRecognition"
1371
+ // 生产环境需添加: 'Authorization', 'X-TC-Timestamp' 等
1372
+ },
1373
+ body: JSON.stringify(payload)
1374
+ });
1375
+ const json = await res.json();
1376
+ const result = this.parseResult(json);
1377
+ if (!result) throw new Error("\u817E\u8BAF\u4E91\u8BC6\u522B\u5931\u8D25");
1378
+ return result;
1379
+ }
1380
+ parseResult(data) {
1381
+ const resp = data["Response"];
1382
+ if (resp?.["Error"]) {
1383
+ const error = resp["Error"];
1384
+ throw new Error(error.Message ?? "\u817E\u8BAF\u4E91 API \u9519\u8BEF");
1385
+ }
1386
+ if (resp?.["Result"]) {
1387
+ return {
1388
+ transcript: resp["Result"],
1389
+ isFinal: true,
1390
+ confidence: 0.9,
1391
+ original: data
1392
+ };
1393
+ }
1394
+ return null;
1395
+ }
1396
+ };
1397
+ var BaiduAdapter = class {
1398
+ constructor(accessToken, appId, appKey, devPid = 1537) {
1399
+ this.accessToken = accessToken;
1400
+ this.appId = appId;
1401
+ this.appKey = appKey;
1402
+ this.devPid = devPid;
1403
+ }
1404
+ static {
1405
+ __name(this, "BaiduAdapter");
1406
+ }
1407
+ name = "Baidu";
1408
+ getConnectUrl() {
1409
+ const sn = Math.random().toString(36).substring(2) + Date.now();
1410
+ return `wss://vop.baidu.com/realtime_asr?sn=${sn}`;
1411
+ }
1412
+ getHandshakeParams() {
1413
+ if (!this.appId || !this.appKey) {
1414
+ console.warn("[BaiduAdapter] WebSocket \u6A21\u5F0F\u9700\u8981 appId \u548C appKey");
1415
+ }
1416
+ return {
1417
+ type: "START",
1418
+ data: {
1419
+ appid: Number(this.appId),
1420
+ appkey: this.appKey,
1421
+ dev_pid: this.devPid,
1422
+ cuid: `sdk-user-${Date.now()}`,
1423
+ format: "pcm",
1424
+ sample: 16e3
1425
+ }
1426
+ };
1427
+ }
1428
+ async recognizeShortAudio(audioData) {
1429
+ const base64Audio = AudioUtils.arrayBufferToBase64(audioData);
1430
+ const payload = {
1431
+ format: "wav",
1432
+ rate: 16e3,
1433
+ channel: 1,
1434
+ cuid: `sdk-user-${Date.now()}`,
1435
+ token: this.accessToken,
1436
+ dev_pid: this.devPid,
1437
+ speech: base64Audio,
1438
+ len: audioData.byteLength
1439
+ };
1440
+ const response = await fetch("/api/baidu-speech/pro_api", {
1441
+ method: "POST",
1442
+ headers: { "Content-Type": "application/json" },
1443
+ body: JSON.stringify(payload)
1444
+ });
1445
+ const data = await response.json();
1446
+ const result = this.parseResult(data);
1447
+ if (!result) throw new Error("\u767E\u5EA6\u8BC6\u522B\u5931\u8D25");
1448
+ return result;
1449
+ }
1450
+ parseResult(data) {
1451
+ if (data["err_no"] !== void 0) {
1452
+ if (data["err_no"] !== 0) {
1453
+ throw new Error(`Baidu API Error [${String(data["err_no"])}]: ${String(data["err_msg"])}`);
1454
+ }
1455
+ const result = data["result"];
1456
+ if (result && result.length > 0) {
1457
+ return {
1458
+ transcript: result[0] ?? "",
1459
+ isFinal: true,
1460
+ confidence: 0.9,
1461
+ original: data
1462
+ };
1463
+ }
1464
+ }
1465
+ if (data["type"]) {
1466
+ if (data["type"] === "HEARTBEAT") return null;
1467
+ if (data["type"] === "ERROR") {
1468
+ throw new Error(`Baidu WS Error: ${String(data["err_msg"])}`);
1469
+ }
1470
+ if (data["type"] === "MID_TEXT" || data["type"] === "FIN_TEXT") {
1471
+ return {
1472
+ transcript: data["result"],
1473
+ isFinal: data["type"] === "FIN_TEXT",
1474
+ confidence: 0.9,
1475
+ original: data
1476
+ };
1477
+ }
1478
+ }
1479
+ return null;
1480
+ }
1481
+ };
1482
+ var AlibabaAdapter = class {
1483
+ constructor(accessKeyId, accessKeySecret, appKey) {
1484
+ this.accessKeyId = accessKeyId;
1485
+ this.appKey = appKey;
1486
+ this.accessKeySecret = accessKeySecret;
1487
+ }
1488
+ static {
1489
+ __name(this, "AlibabaAdapter");
1490
+ }
1491
+ name = "Alibaba";
1492
+ /** Access Key Secret - 生产环境应在后端使用 */
1493
+ accessKeySecret;
1494
+ getConnectUrl() {
1495
+ return `wss://nls-gateway.cn-shanghai.aliyuncs.com/ws/v1`;
1496
+ }
1497
+ getHandshakeParams() {
1498
+ return {
1499
+ header: {
1500
+ message_id: this.generateUUID(),
1501
+ task_id: this.generateUUID(),
1502
+ namespace: "SpeechRecognizer",
1503
+ name: "StartRecognition",
1504
+ appkey: this.appKey
1505
+ },
1506
+ payload: {
1507
+ format: "pcm",
1508
+ sample_rate: 16e3,
1509
+ enable_intermediate_result: true,
1510
+ enable_punctuation_prediction: true,
1511
+ enable_inverse_text_normalization: true
1512
+ }
1513
+ };
1514
+ }
1515
+ async recognizeShortAudio(audioData) {
1516
+ const base64Audio = AudioUtils.arrayBufferToBase64(audioData);
1517
+ const response = await fetch("/api/alibaba-speech/recognize", {
1518
+ method: "POST",
1519
+ headers: {
1520
+ "Content-Type": "application/json",
1521
+ "X-NLS-Token": this.accessKeyId
1522
+ },
1523
+ body: JSON.stringify({
1524
+ appkey: this.appKey,
1525
+ format: "wav",
1526
+ sample_rate: 16e3,
1527
+ audio: base64Audio
1528
+ })
1529
+ });
1530
+ const data = await response.json();
1531
+ const result = this.parseResult(data);
1532
+ if (!result) throw new Error("\u963F\u91CC\u4E91\u8BC6\u522B\u5931\u8D25");
1533
+ return result;
1534
+ }
1535
+ parseResult(data) {
1536
+ const header = data["header"];
1537
+ if (header) {
1538
+ const status = header["status"];
1539
+ if (status !== 2e7) {
1540
+ throw new Error(`Alibaba Error [${status}]: ${String(header["status_text"])}`);
1541
+ }
1542
+ const payload = data["payload"];
1543
+ if (payload?.["result"]) {
1544
+ return {
1545
+ transcript: payload["result"],
1546
+ isFinal: header["name"] === "RecognitionCompleted",
1547
+ confidence: 0.9,
1548
+ original: data
1549
+ };
1550
+ }
1551
+ }
1552
+ if (data["result"]) {
1553
+ return {
1554
+ transcript: data["result"],
1555
+ isFinal: true,
1556
+ confidence: data["confidence"] || 0.9,
1557
+ original: data
1558
+ };
1559
+ }
1560
+ return null;
1561
+ }
1562
+ generateUUID() {
1563
+ return "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, (c) => {
1564
+ const r = Math.random() * 16 | 0;
1565
+ const v = c === "x" ? r : r & 3 | 8;
1566
+ return v.toString(16);
1567
+ });
1568
+ }
1569
+ };
1570
+ var GoogleAdapter = class {
1571
+ constructor(apiKey, languageCode = "zh-CN") {
1572
+ this.apiKey = apiKey;
1573
+ this.languageCode = languageCode;
1574
+ }
1575
+ static {
1576
+ __name(this, "GoogleAdapter");
1577
+ }
1578
+ name = "Google";
1579
+ async recognizeShortAudio(audioData) {
1580
+ const base64Audio = AudioUtils.arrayBufferToBase64(audioData);
1581
+ const payload = {
1582
+ config: {
1583
+ encoding: "LINEAR16",
1584
+ sampleRateHertz: 16e3,
1585
+ languageCode: this.languageCode,
1586
+ enableAutomaticPunctuation: true
1587
+ },
1588
+ audio: {
1589
+ content: base64Audio
1590
+ }
1591
+ };
1592
+ const response = await fetch(
1593
+ `https://speech.googleapis.com/v1/speech:recognize?key=${this.apiKey}`,
1594
+ {
1595
+ method: "POST",
1596
+ headers: { "Content-Type": "application/json" },
1597
+ body: JSON.stringify(payload)
1598
+ }
1599
+ );
1600
+ const data = await response.json();
1601
+ const result = this.parseResult(data);
1602
+ if (!result) throw new Error("Google \u8BC6\u522B\u5931\u8D25");
1603
+ return result;
1604
+ }
1605
+ parseResult(data) {
1606
+ if (data["error"]) {
1607
+ const error = data["error"];
1608
+ throw new Error(error.message ?? "Google API Error");
1609
+ }
1610
+ const results = data["results"];
1611
+ if (results && results.length > 0) {
1612
+ const alternatives = results[0]?.alternatives;
1613
+ if (alternatives && alternatives.length > 0) {
1614
+ return {
1615
+ transcript: alternatives[0]?.transcript ?? "",
1616
+ isFinal: true,
1617
+ confidence: alternatives[0]?.confidence ?? 0.9,
1618
+ original: data
1619
+ };
1620
+ }
1621
+ }
1622
+ return null;
1623
+ }
1624
+ };
1625
+ var AzureAdapter = class {
1626
+ constructor(subscriptionKey, region, language = "zh-CN") {
1627
+ this.subscriptionKey = subscriptionKey;
1628
+ this.region = region;
1629
+ this.language = language;
1630
+ }
1631
+ static {
1632
+ __name(this, "AzureAdapter");
1633
+ }
1634
+ name = "Azure";
1635
+ getConnectUrl() {
1636
+ return `wss://${this.region}.stt.speech.microsoft.com/speech/recognition/conversation/cognitiveservices/v1?language=${this.language}`;
1637
+ }
1638
+ getHandshakeParams() {
1639
+ return {
1640
+ "Ocp-Apim-Subscription-Key": this.subscriptionKey
1641
+ };
1642
+ }
1643
+ async recognizeShortAudio(audioData) {
1644
+ const response = await fetch(
1645
+ `https://${this.region}.stt.speech.microsoft.com/speech/recognition/conversation/cognitiveservices/v1?language=${this.language}`,
1646
+ {
1647
+ method: "POST",
1648
+ headers: {
1649
+ "Ocp-Apim-Subscription-Key": this.subscriptionKey,
1650
+ "Content-Type": "audio/wav; codecs=audio/pcm; samplerate=16000"
1651
+ },
1652
+ body: audioData
1653
+ }
1654
+ );
1655
+ const data = await response.json();
1656
+ const result = this.parseResult(data);
1657
+ if (!result) throw new Error("Azure \u8BC6\u522B\u5931\u8D25");
1658
+ return result;
1659
+ }
1660
+ parseResult(data) {
1661
+ if (data["RecognitionStatus"] === "Success") {
1662
+ return {
1663
+ transcript: data["DisplayText"] || data["Text"] || "",
1664
+ isFinal: true,
1665
+ confidence: data["Confidence"] || 0.9,
1666
+ original: data
1667
+ };
1668
+ }
1669
+ if (data["Text"]) {
1670
+ return {
1671
+ transcript: data["Text"],
1672
+ isFinal: data["RecognitionStatus"] === "Success",
1673
+ confidence: 0.9,
1674
+ original: data
1675
+ };
1676
+ }
1677
+ return null;
1678
+ }
1679
+ };
829
1680
  var BaseRecognitionStrategy = class {
830
1681
  constructor(config) {
831
1682
  this.config = config;
@@ -1631,7 +2482,541 @@ async function listen(config) {
1631
2482
  });
1632
2483
  }
1633
2484
  __name(listen, "listen");
2485
+ async function listenWithTimeout(config, timeout = 1e4) {
2486
+ return Promise.race([
2487
+ listen(config),
2488
+ new Promise((_, reject) => {
2489
+ setTimeout(() => reject(new Error("\u8BC6\u522B\u8D85\u65F6")), timeout);
2490
+ })
2491
+ ]);
2492
+ }
2493
+ __name(listenWithTimeout, "listenWithTimeout");
2494
+
2495
+ // src/plugins/fingerprint/utils.ts
2496
+ var FNV_OFFSET_BASIS = 0xcbf29ce484222325n;
2497
+ var FNV_PRIME = 0x100000001b3n;
2498
+ var UINT64_MASK = 0xffffffffffffffffn;
2499
+ function stableStringify(value) {
2500
+ if (value === null || typeof value !== "object") {
2501
+ return JSON.stringify(value);
2502
+ }
2503
+ if (Array.isArray(value)) {
2504
+ return `[${value.map((item) => stableStringify(item)).join(",")}]`;
2505
+ }
2506
+ const record = value;
2507
+ const entries = Object.keys(record).sort().map((key) => [key, record[key] ?? null]);
2508
+ return `{${entries.map(([key, entryValue]) => `${JSON.stringify(key)}:${stableStringify(entryValue)}`).join(",")}}`;
2509
+ }
2510
+ __name(stableStringify, "stableStringify");
2511
+ function serializeComponents(components, salt) {
2512
+ const values = {};
2513
+ for (const key of Object.keys(components).sort()) {
2514
+ const component = components[key];
2515
+ if (component) {
2516
+ values[key] = component.value;
2517
+ }
2518
+ }
2519
+ return stableStringify({ salt, values });
2520
+ }
2521
+ __name(serializeComponents, "serializeComponents");
2522
+ function fnv1a64(input) {
2523
+ let hash = FNV_OFFSET_BASIS;
2524
+ for (let i = 0; i < input.length; i++) {
2525
+ hash ^= BigInt(input.charCodeAt(i));
2526
+ hash = hash * FNV_PRIME & UINT64_MASK;
2527
+ }
2528
+ return hash.toString(16).padStart(16, "0");
2529
+ }
2530
+ __name(fnv1a64, "fnv1a64");
2531
+ async function sha256(input) {
2532
+ const subtle = globalThis.crypto?.subtle;
2533
+ if (!subtle) {
2534
+ return null;
2535
+ }
2536
+ const data = new TextEncoder().encode(input);
2537
+ const digest = await subtle.digest("SHA-256", data);
2538
+ return Array.from(new Uint8Array(digest)).map((byte) => byte.toString(16).padStart(2, "0")).join("");
2539
+ }
2540
+ __name(sha256, "sha256");
2541
+ async function hashString(input, algorithm = "fnv1a64") {
2542
+ if (algorithm === "sha256") {
2543
+ return await sha256(input) ?? fnv1a64(input);
2544
+ }
2545
+ return fnv1a64(input);
2546
+ }
2547
+ __name(hashString, "hashString");
2548
+ function bucketNumber(value, bucketSize) {
2549
+ if (!Number.isFinite(value) || bucketSize <= 0) {
2550
+ return value;
2551
+ }
2552
+ return Math.round(value / bucketSize) * bucketSize;
2553
+ }
2554
+ __name(bucketNumber, "bucketNumber");
2555
+ function normalizeUserAgent(userAgent) {
2556
+ return userAgent.replace(/\b(\d+)\.(\d+)(?:\.\d+)+\b/g, "$1.$2").replace(/\bBuild\/[^\s;)]+/gi, "Build/*").replace(/\bVersion\/(\d+)\.(\d+)(?:\.\d+)+/gi, "Version/$1.$2").trim();
2557
+ }
2558
+ __name(normalizeUserAgent, "normalizeUserAgent");
2559
+ function bucketHardwareConcurrency(value) {
2560
+ if (value <= 2) return value;
2561
+ if (value <= 4) return 4;
2562
+ if (value <= 8) return 8;
2563
+ return 16;
2564
+ }
2565
+ __name(bucketHardwareConcurrency, "bucketHardwareConcurrency");
2566
+ function bucketDeviceMemory(value) {
2567
+ if (value <= 1) return 1;
2568
+ if (value <= 2) return 2;
2569
+ if (value <= 4) return 4;
2570
+ if (value <= 8) return 8;
2571
+ return 16;
2572
+ }
2573
+ __name(bucketDeviceMemory, "bucketDeviceMemory");
2574
+
2575
+ // src/plugins/fingerprint/collectors.ts
2576
+ function getNavigator() {
2577
+ return typeof globalThis.navigator === "undefined" ? void 0 : globalThis.navigator;
2578
+ }
2579
+ __name(getNavigator, "getNavigator");
2580
+ function getScreen() {
2581
+ return typeof globalThis.screen === "undefined" ? void 0 : globalThis.screen;
2582
+ }
2583
+ __name(getScreen, "getScreen");
2584
+ function getWindow() {
2585
+ return typeof window === "undefined" ? void 0 : window;
2586
+ }
2587
+ __name(getWindow, "getWindow");
2588
+ function getStorageAvailability(type) {
2589
+ const currentWindow = getWindow();
2590
+ if (!currentWindow) {
2591
+ return false;
2592
+ }
2593
+ try {
2594
+ return Boolean(currentWindow[type]);
2595
+ } catch {
2596
+ return false;
2597
+ }
2598
+ }
2599
+ __name(getStorageAvailability, "getStorageAvailability");
2600
+ function getIndexedDBAvailability() {
2601
+ try {
2602
+ return typeof globalThis.indexedDB !== "undefined";
2603
+ } catch {
2604
+ return false;
2605
+ }
2606
+ }
2607
+ __name(getIndexedDBAvailability, "getIndexedDBAvailability");
2608
+ function getTimezone(context) {
2609
+ const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
2610
+ if (context.options.privacyMode === "strict") {
2611
+ return timezone ? "available" : "unknown";
2612
+ }
2613
+ return timezone || "unknown";
2614
+ }
2615
+ __name(getTimezone, "getTimezone");
2616
+ function getScreenValue(context) {
2617
+ const screen = getScreen();
2618
+ if (!screen) {
2619
+ return null;
2620
+ }
2621
+ const width = screen.width ?? 0;
2622
+ const height = screen.height ?? 0;
2623
+ if (context.options.privacyMode === "debug") {
2624
+ return {
2625
+ width,
2626
+ height,
2627
+ availWidth: screen.availWidth ?? width,
2628
+ availHeight: screen.availHeight ?? height
2629
+ };
2630
+ }
2631
+ const sorted = [width, height].sort((left, right) => left - right);
2632
+ return {
2633
+ min: bucketNumber(sorted[0] ?? 0, context.options.screenBucketSize),
2634
+ max: bucketNumber(sorted[1] ?? 0, context.options.screenBucketSize)
2635
+ };
2636
+ }
2637
+ __name(getScreenValue, "getScreenValue");
2638
+ function collectUserAgent(context) {
2639
+ const userAgent = getNavigator()?.userAgent;
2640
+ if (!userAgent) {
2641
+ return null;
2642
+ }
2643
+ if (context.options.privacyMode === "strict") {
2644
+ return "available";
2645
+ }
2646
+ return context.options.normalizeUserAgent ? normalizeUserAgent(userAgent) : userAgent;
2647
+ }
2648
+ __name(collectUserAgent, "collectUserAgent");
2649
+ function collectLanguages(context) {
2650
+ const navigator2 = getNavigator();
2651
+ const languages = navigator2?.languages?.length ? [...navigator2.languages] : [navigator2?.language ?? "unknown"];
2652
+ if (context.options.privacyMode === "strict") {
2653
+ return languages.slice(0, 1);
2654
+ }
2655
+ return languages;
2656
+ }
2657
+ __name(collectLanguages, "collectLanguages");
2658
+ var defaultFingerprintCollectors = [
2659
+ {
2660
+ // User-Agent 是 FingerprintJS 类工具常见组件。这里默认归一化以降低波动。
2661
+ key: "userAgent",
2662
+ source: "low-entropy",
2663
+ confidence: 0.72,
2664
+ collect: collectUserAgent
2665
+ },
2666
+ {
2667
+ // 平台值通常较稳定,但粒度较粗,因此给中等权重。
2668
+ key: "platform",
2669
+ source: "low-entropy",
2670
+ confidence: 0.55,
2671
+ collect: /* @__PURE__ */ __name(() => getNavigator()?.platform ?? null, "collect")
2672
+ },
2673
+ {
2674
+ // 首选语言可用于区分环境区域偏好,但不应单独识别用户。
2675
+ key: "language",
2676
+ source: "low-entropy",
2677
+ confidence: 0.5,
2678
+ collect: /* @__PURE__ */ __name(() => getNavigator()?.language ?? "unknown", "collect")
2679
+ },
2680
+ {
2681
+ // 语言列表在 strict 模式下会收敛为首选语言。
2682
+ key: "languages",
2683
+ source: "low-entropy",
2684
+ confidence: 0.48,
2685
+ collect: collectLanguages
2686
+ },
2687
+ {
2688
+ // 时区在 strict 模式下只记录可用性,避免过度暴露地区信息。
2689
+ key: "timezone",
2690
+ source: "low-entropy",
2691
+ confidence: 0.64,
2692
+ collect: getTimezone
2693
+ },
2694
+ {
2695
+ // 偏移量比 IANA 时区更粗粒度,作为兼容补充。
2696
+ key: "timezoneOffset",
2697
+ source: "low-entropy",
2698
+ confidence: 0.55,
2699
+ collect: /* @__PURE__ */ __name(() => (/* @__PURE__ */ new Date()).getTimezoneOffset(), "collect")
2700
+ },
2701
+ {
2702
+ // 屏幕尺寸默认分桶并忽略方向,减少精确设备特征。
2703
+ key: "screen",
2704
+ source: "low-entropy",
2705
+ confidence: 0.62,
2706
+ collect: getScreenValue
2707
+ },
2708
+ {
2709
+ // 色深通常选择空间很小,权重较低。
2710
+ key: "colorDepth",
2711
+ source: "low-entropy",
2712
+ confidence: 0.38,
2713
+ collect: /* @__PURE__ */ __name(() => getScreen()?.colorDepth ?? null, "collect")
2714
+ },
2715
+ {
2716
+ // 像素比可能受缩放影响,因此按 0.25 分桶并给较低权重。
2717
+ key: "pixelRatio",
2718
+ source: "low-entropy",
2719
+ confidence: 0.42,
2720
+ collect: /* @__PURE__ */ __name(() => {
2721
+ const currentWindow = getWindow();
2722
+ return currentWindow ? bucketNumber(currentWindow.devicePixelRatio || 1, 0.25) : 1;
2723
+ }, "collect")
2724
+ },
2725
+ {
2726
+ // 线程数默认分桶,debug 模式才保留原始值。
2727
+ key: "hardwareConcurrency",
2728
+ source: "low-entropy",
2729
+ confidence: 0.5,
2730
+ collect: /* @__PURE__ */ __name((context) => {
2731
+ const value = getNavigator()?.hardwareConcurrency;
2732
+ if (!value) {
2733
+ return null;
2734
+ }
2735
+ return context.options.privacyMode === "debug" ? value : bucketHardwareConcurrency(value);
2736
+ }, "collect")
2737
+ },
2738
+ {
2739
+ // 设备内存不是所有浏览器都支持;默认分桶降低唯一性。
2740
+ key: "deviceMemory",
2741
+ source: "low-entropy",
2742
+ confidence: 0.44,
2743
+ collect: /* @__PURE__ */ __name((context) => {
2744
+ const value = getNavigator()?.deviceMemory;
2745
+ if (!value) {
2746
+ return null;
2747
+ }
2748
+ return context.options.privacyMode === "debug" ? value : bucketDeviceMemory(value);
2749
+ }, "collect")
2750
+ },
2751
+ {
2752
+ // 触摸点有助于区分桌面/触屏环境,但粒度仍较粗。
2753
+ key: "maxTouchPoints",
2754
+ source: "low-entropy",
2755
+ confidence: 0.45,
2756
+ collect: /* @__PURE__ */ __name(() => getNavigator()?.maxTouchPoints ?? 0, "collect")
2757
+ },
2758
+ {
2759
+ // 仅检测 Cookie 能力,不读写任何 Cookie。
2760
+ key: "cookiesEnabled",
2761
+ source: "capability",
2762
+ confidence: 0.32,
2763
+ collect: /* @__PURE__ */ __name(() => getNavigator()?.cookieEnabled ?? false, "collect")
2764
+ },
2765
+ {
2766
+ // 仅检测存储 API 是否可访问,不写入测试数据。
2767
+ key: "storage",
2768
+ source: "capability",
2769
+ confidence: 0.28,
2770
+ collect: /* @__PURE__ */ __name(() => ({
2771
+ localStorage: getStorageAvailability("localStorage"),
2772
+ sessionStorage: getStorageAvailability("sessionStorage")
2773
+ }), "collect")
2774
+ },
2775
+ {
2776
+ // IndexedDB 只做存在性检测,不打开数据库。
2777
+ key: "indexedDB",
2778
+ source: "capability",
2779
+ confidence: 0.24,
2780
+ collect: /* @__PURE__ */ __name(() => getIndexedDBAvailability(), "collect")
2781
+ },
2782
+ {
2783
+ // 记录浏览器隐私偏好,业务侧可用来决定是否进一步降级采集。
2784
+ key: "doNotTrack",
2785
+ source: "capability",
2786
+ confidence: 0.2,
2787
+ collect: /* @__PURE__ */ __name(() => getNavigator()?.doNotTrack ?? null, "collect")
2788
+ }
2789
+ ];
2790
+
2791
+ // src/plugins/fingerprint/generator.ts
2792
+ var FINGERPRINT_VERSION = "1.0.0";
2793
+ var FINGERPRINT_GENERATOR = /* @__PURE__ */ Symbol("melange:fingerprint-generator");
2794
+ var DEFAULT_OPTIONS = {
2795
+ salt: "melange",
2796
+ cache: true,
2797
+ cacheTtl: 5 * 60 * 1e3,
2798
+ componentTimeout: 80,
2799
+ privacyMode: "balanced",
2800
+ hashAlgorithm: "fnv1a64",
2801
+ screenBucketSize: 100,
2802
+ include: [],
2803
+ exclude: [],
2804
+ collectors: []
2805
+ };
2806
+ function now() {
2807
+ return typeof performance !== "undefined" ? performance.now() : Date.now();
2808
+ }
2809
+ __name(now, "now");
2810
+ function resolveOptions(baseOptions, overrideOptions = {}) {
2811
+ const privacyMode = overrideOptions.privacyMode ?? baseOptions.privacyMode ?? DEFAULT_OPTIONS.privacyMode;
2812
+ const normalizeUserAgent2 = overrideOptions.normalizeUserAgent ?? baseOptions.normalizeUserAgent ?? privacyMode !== "debug";
2813
+ return {
2814
+ salt: overrideOptions.salt ?? baseOptions.salt ?? DEFAULT_OPTIONS.salt,
2815
+ cache: overrideOptions.cache ?? baseOptions.cache ?? DEFAULT_OPTIONS.cache,
2816
+ cacheTtl: overrideOptions.cacheTtl ?? baseOptions.cacheTtl ?? DEFAULT_OPTIONS.cacheTtl,
2817
+ componentTimeout: overrideOptions.componentTimeout ?? baseOptions.componentTimeout ?? DEFAULT_OPTIONS.componentTimeout,
2818
+ privacyMode,
2819
+ hashAlgorithm: overrideOptions.hashAlgorithm ?? baseOptions.hashAlgorithm ?? DEFAULT_OPTIONS.hashAlgorithm,
2820
+ screenBucketSize: overrideOptions.screenBucketSize ?? baseOptions.screenBucketSize ?? DEFAULT_OPTIONS.screenBucketSize,
2821
+ normalizeUserAgent: normalizeUserAgent2,
2822
+ include: overrideOptions.include ?? baseOptions.include ?? DEFAULT_OPTIONS.include,
2823
+ exclude: overrideOptions.exclude ?? baseOptions.exclude ?? DEFAULT_OPTIONS.exclude,
2824
+ collectors: [
2825
+ ...baseOptions.collectors ?? DEFAULT_OPTIONS.collectors,
2826
+ ...overrideOptions.collectors ?? DEFAULT_OPTIONS.collectors
2827
+ ]
2828
+ };
2829
+ }
2830
+ __name(resolveOptions, "resolveOptions");
2831
+ function createCacheKey(options) {
2832
+ return JSON.stringify({
2833
+ salt: options.salt,
2834
+ privacyMode: options.privacyMode,
2835
+ hashAlgorithm: options.hashAlgorithm,
2836
+ screenBucketSize: options.screenBucketSize,
2837
+ normalizeUserAgent: options.normalizeUserAgent,
2838
+ include: options.include,
2839
+ exclude: options.exclude,
2840
+ collectors: options.collectors.map((collector) => collector.key)
2841
+ });
2842
+ }
2843
+ __name(createCacheKey, "createCacheKey");
2844
+ function shouldCollect(collector, options) {
2845
+ if (options.include.length > 0 && !options.include.includes(collector.key)) {
2846
+ return false;
2847
+ }
2848
+ return !options.exclude.includes(collector.key);
2849
+ }
2850
+ __name(shouldCollect, "shouldCollect");
2851
+ function withTimeout(operation, timeout) {
2852
+ return new Promise((resolve, reject) => {
2853
+ const timeoutId = setTimeout(() => resolve(void 0), timeout);
2854
+ operation.then(
2855
+ (value) => {
2856
+ clearTimeout(timeoutId);
2857
+ resolve(value);
2858
+ },
2859
+ (error) => {
2860
+ clearTimeout(timeoutId);
2861
+ reject(error instanceof Error ? error : new Error(String(error)));
2862
+ }
2863
+ );
2864
+ });
2865
+ }
2866
+ __name(withTimeout, "withTimeout");
2867
+ async function collectWithTimeout(collector, context) {
2868
+ const startedAt = context.now();
2869
+ const value = await withTimeout(
2870
+ Promise.resolve(collector.collect(context)),
2871
+ context.options.componentTimeout
2872
+ );
2873
+ if (value === void 0) {
2874
+ return { key: collector.key };
2875
+ }
2876
+ return {
2877
+ key: collector.key,
2878
+ component: {
2879
+ value,
2880
+ source: collector.source ?? "custom",
2881
+ duration: Math.max(0, context.now() - startedAt),
2882
+ confidence: Math.max(0, Math.min(1, collector.confidence ?? 0.4))
2883
+ }
2884
+ };
2885
+ }
2886
+ __name(collectWithTimeout, "collectWithTimeout");
2887
+ function calculateConfidence(components, skippedCount) {
2888
+ const values = Object.values(components);
2889
+ if (values.length === 0) {
2890
+ return {
2891
+ score: 0,
2892
+ componentCount: 0,
2893
+ skippedCount
2894
+ };
2895
+ }
2896
+ const totalWeight = values.reduce((sum, component) => sum + component.confidence, 0);
2897
+ const averageWeight = totalWeight / values.length;
2898
+ const coverage = values.length / (values.length + skippedCount);
2899
+ const score = Math.max(0, Math.min(0.99, averageWeight * 0.7 + coverage * 0.3));
2900
+ return {
2901
+ score: Number(score.toFixed(3)),
2902
+ componentCount: values.length,
2903
+ skippedCount
2904
+ };
2905
+ }
2906
+ __name(calculateConfidence, "calculateConfidence");
2907
+ var FingerprintGeneratorImpl = class {
2908
+ /**
2909
+ * 创建指纹生成器。
2910
+ *
2911
+ * @param baseOptions - 生成器默认配置;后续 `get(options)` 可覆盖这些配置
2912
+ */
2913
+ constructor(baseOptions = {}) {
2914
+ this.baseOptions = baseOptions;
2915
+ }
2916
+ static {
2917
+ __name(this, "FingerprintGeneratorImpl");
2918
+ }
2919
+ /**
2920
+ * 当前生成器实例的内存缓存。
2921
+ *
2922
+ * @remarks
2923
+ * 只缓存最后一次匹配配置的结果,不持久化。
2924
+ */
2925
+ cacheEntry;
2926
+ /**
2927
+ * 获取指纹结果。
2928
+ *
2929
+ * @description
2930
+ * 生成流程:
2931
+ * 1. 合并默认配置和本次调用配置;
2932
+ * 2. 命中内存缓存时直接返回;
2933
+ * 3. 并发采集组件并跳过失败组件;
2934
+ * 4. 稳定序列化组件值并计算哈希;
2935
+ * 5. 返回 visitorId、组件详情、置信度和耗时。
2936
+ *
2937
+ * @param options - 本次生成配置,会覆盖构造函数中的同名默认配置
2938
+ * @returns 指纹生成结果
2939
+ */
2940
+ async get(options = {}) {
2941
+ const resolvedOptions = resolveOptions(this.baseOptions, options);
2942
+ const cacheKey = createCacheKey(resolvedOptions);
2943
+ const generatedAt = Date.now();
2944
+ if (resolvedOptions.cache && this.cacheEntry?.key === cacheKey && this.cacheEntry.expiresAt > generatedAt) {
2945
+ return this.cacheEntry.result;
2946
+ }
2947
+ const startedAt = now();
2948
+ const collectors = [...defaultFingerprintCollectors, ...resolvedOptions.collectors].filter(
2949
+ (collector) => shouldCollect(collector, resolvedOptions)
2950
+ );
2951
+ const context = {
2952
+ options: resolvedOptions,
2953
+ now
2954
+ };
2955
+ const collected = await Promise.all(
2956
+ collectors.map(
2957
+ (collector) => collectWithTimeout(collector, context).catch(
2958
+ () => ({ key: collector.key })
2959
+ )
2960
+ )
2961
+ );
2962
+ const components = {};
2963
+ let skippedCount = defaultFingerprintCollectors.length + resolvedOptions.collectors.length;
2964
+ for (const item of collected) {
2965
+ if (item.component) {
2966
+ components[item.key] = item.component;
2967
+ skippedCount--;
2968
+ }
2969
+ }
2970
+ const input = serializeComponents(components, resolvedOptions.salt);
2971
+ const visitorId = await hashString(input, resolvedOptions.hashAlgorithm);
2972
+ const result = {
2973
+ visitorId,
2974
+ components,
2975
+ confidence: calculateConfidence(components, skippedCount),
2976
+ version: FINGERPRINT_VERSION,
2977
+ duration: Math.max(0, now() - startedAt),
2978
+ generatedAt
2979
+ };
2980
+ if (resolvedOptions.cache) {
2981
+ this.cacheEntry = {
2982
+ key: cacheKey,
2983
+ expiresAt: generatedAt + resolvedOptions.cacheTtl,
2984
+ result
2985
+ };
2986
+ }
2987
+ return result;
2988
+ }
2989
+ /**
2990
+ * 清除内存缓存。
2991
+ *
2992
+ * @description
2993
+ * 下次调用 `get()` 时会重新采集组件。该方法不会清理任何浏览器存储,
2994
+ * 因为本插件默认不会写入持久化存储。
2995
+ */
2996
+ clearCache() {
2997
+ this.cacheEntry = void 0;
2998
+ }
2999
+ };
3000
+ function createFingerprintGenerator(options = {}) {
3001
+ return new FingerprintGeneratorImpl(options);
3002
+ }
3003
+ __name(createFingerprintGenerator, "createFingerprintGenerator");
3004
+ async function getFingerprint(options = {}) {
3005
+ return createFingerprintGenerator(options).get();
3006
+ }
3007
+ __name(getFingerprint, "getFingerprint");
3008
+ function registerFingerprintPlugin(container = globalContainer, options = {}) {
3009
+ return container.registerSingleton(
3010
+ FINGERPRINT_GENERATOR,
3011
+ () => new FingerprintGeneratorImpl(options)
3012
+ );
3013
+ }
3014
+ __name(registerFingerprintPlugin, "registerFingerprintPlugin");
3015
+ function isFingerprintSupported() {
3016
+ return typeof globalThis.navigator !== "undefined" || typeof globalThis.screen !== "undefined";
3017
+ }
3018
+ __name(isFingerprintSupported, "isFingerprintSupported");
1634
3019
 
1635
- export { SpeechRecognizerImpl, SpeechSynthesizerImpl, createSpeechRecognizer, createSpeechSynthesizer, isSpeechRecognitionSupported, isSpeechSynthesisSupported, listen, speak };
1636
- //# sourceMappingURL=chunk-3RM45M64.js.map
1637
- //# sourceMappingURL=chunk-3RM45M64.js.map
3020
+ export { AWSSynthesisAdapter, AlibabaAdapter, AlibabaSynthesisAdapter, AudioUtils, AzureAdapter, AzureSynthesisAdapter, BaiduAdapter, BaiduSynthesisAdapter, FINGERPRINT_GENERATOR, FINGERPRINT_VERSION, FingerprintGeneratorImpl, GenericAdapter, GenericSynthesisAdapter, GoogleAdapter, GoogleSynthesisAdapter, RecognitionStatus, SpeechRecognizerImpl, SpeechSynthesizerImpl, SynthesisAudioUtils, SynthesisStatus, TencentAdapter, TencentSynthesisAdapter, XunfeiAdapter, XunfeiSynthesisAdapter, bucketDeviceMemory, bucketHardwareConcurrency, bucketNumber, createFingerprintGenerator, createSpeechRecognizer, createSpeechSynthesizer, defaultFingerprintCollectors, fnv1a64, getFingerprint, hashString, isFingerprintSupported, isSpeechRecognitionSupported, isSpeechSynthesisSupported, listen, listenWithTimeout, normalizeUserAgent, registerFingerprintPlugin, serializeComponents, sha256, speak, speakWithCloud, stableStringify };
3021
+ //# sourceMappingURL=chunk-R3BPDZ3R.js.map
3022
+ //# sourceMappingURL=chunk-R3BPDZ3R.js.map