@easyv/biz-components 0.0.13 → 0.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. package/dist/hooks/index.d.ts +1 -0
  2. package/dist/hooks/useXunFeiSteamVoiceManager.d.ts +8 -0
  3. package/dist/hooks/useXunFeiSteamVoiceManager.es.js +15 -0
  4. package/dist/hooks/useXunFeiSteamVoiceManager.es.js.map +1 -0
  5. package/dist/index.es.js +10 -6
  6. package/dist/index.es.js.map +1 -1
  7. package/dist/utils/xunFeiVoice/RecorderManager/RecorderManager-draft.d.ts +38 -0
  8. package/dist/utils/xunFeiVoice/RecorderManager/RecorderManager.d.ts +42 -0
  9. package/dist/utils/xunFeiVoice/RecorderManager/RecorderManager.es.js +118 -0
  10. package/dist/utils/xunFeiVoice/RecorderManager/RecorderManager.es.js.map +1 -0
  11. package/dist/utils/xunFeiVoice/RecorderManager/index.d.ts +1 -0
  12. package/dist/utils/xunFeiVoice/RecorderManager/processorSource/processor.worker.d.ts +0 -0
  13. package/dist/utils/xunFeiVoice/RecorderManager/processorSource/processor.worker.es.js +233 -0
  14. package/dist/utils/xunFeiVoice/RecorderManager/processorSource/processor.worker.es.js.map +1 -0
  15. package/dist/utils/xunFeiVoice/RecorderManager/processorSource/processor.worklet.d.ts +0 -0
  16. package/dist/utils/xunFeiVoice/RecorderManager/processorSource/processor.worklet.es.js +379 -0
  17. package/dist/utils/xunFeiVoice/RecorderManager/processorSource/processor.worklet.es.js.map +1 -0
  18. package/dist/utils/xunFeiVoice/{xunFeiStreamVoiceManager.d.ts → XunFeiStreamVoiceManager.d.ts} +16 -15
  19. package/dist/utils/xunFeiVoice/XunFeiStreamVoiceManager.es.js +177 -0
  20. package/dist/utils/xunFeiVoice/XunFeiStreamVoiceManager.es.js.map +1 -0
  21. package/dist/utils/xunFeiVoice/index.d.ts +3 -1
  22. package/dist/utils/xunFeiVoice/test/RecorderManager.cy.d.ts +1 -0
  23. package/dist/utils/xunFeiVoice/test/XunFeiVoiceManager.cy.d.ts +1 -0
  24. package/dist/utils/xunFeiVoice/types.d.ts +11 -2
  25. package/dist/utils/xunFeiVoice/utils.d.ts +0 -1
  26. package/dist/utils/xunFeiVoice/utils.es.js.map +1 -1
  27. package/package.json +4 -2
  28. package/dist/utils/xunFeiVoice/recordManager.es.js +0 -175
  29. package/dist/utils/xunFeiVoice/recordManager.es.js.map +0 -1
  30. package/dist/utils/xunFeiVoice/xunFeiStreamVoiceManager.es.js +0 -140
  31. package/dist/utils/xunFeiVoice/xunFeiStreamVoiceManager.es.js.map +0 -1
  32. /package/dist/utils/xunFeiVoice/{recordManager.d.ts → RecorderManager/RecordManager-origin.d.ts} +0 -0
@@ -1 +1,2 @@
1
1
  export * from './useDivAutoScroll';
2
+ export * from './useXunFeiSteamVoiceManager';
@@ -0,0 +1,8 @@
1
+ import { XunFeiStreamVoiceManager, XunFeiStreamVoiceManagerConfig } from '../utils';
2
+
3
+ /**
4
+ * 讯飞流式语音识别的 hook,注意 config 只在初识化时注册一次,后续更改,请使用 manager.setConfig 方法更新
5
+ * @param config XunFeiStreamVoiceManagerConfig
6
+ * @returns
7
+ */
8
+ export declare const useXunFeiSteamVoiceManager: (config: XunFeiStreamVoiceManagerConfig) => XunFeiStreamVoiceManager | null;
@@ -0,0 +1,15 @@
1
+ import { useState as o, useEffect as a } from "react";
2
+ import { XunFeiStreamVoiceManager as s } from "../utils/xunFeiVoice/XunFeiStreamVoiceManager.es.js";
3
+ const c = (t) => {
4
+ const [r, n] = o(null);
5
+ return a(() => {
6
+ const e = new s(t);
7
+ return n(e), () => {
8
+ e == null || e.destroy();
9
+ };
10
+ }, []), r;
11
+ };
12
+ export {
13
+ c as useXunFeiSteamVoiceManager
14
+ };
15
+ //# sourceMappingURL=useXunFeiSteamVoiceManager.es.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"useXunFeiSteamVoiceManager.es.js","sources":["../../src/hooks/useXunFeiSteamVoiceManager.ts"],"sourcesContent":["import { useEffect, useState } from 'react';\nimport { XunFeiStreamVoiceManager, XunFeiStreamVoiceManagerConfig } from '@/utils';\n\n/**\n * 讯飞流式语音识别的 hook,注意 config 只在初识化时注册一次,后续更改,请使用 manager.setConfig 方法更新\n * @param config XunFeiStreamVoiceManagerConfig\n * @returns\n */\nexport const useXunFeiSteamVoiceManager = (config: XunFeiStreamVoiceManagerConfig) => {\n const [manager, setManager] = useState<XunFeiStreamVoiceManager | null>(null);\n\n useEffect(() => {\n const newManager = new XunFeiStreamVoiceManager(config);\n setManager(newManager);\n return () => {\n newManager?.destroy();\n };\n }, []);\n return manager;\n};\n"],"names":["useXunFeiSteamVoiceManager","config","manager","setManager","useState","useEffect","newManager","XunFeiStreamVoiceManager"],"mappings":";;AAQa,MAAAA,IAA6B,CAACC,MAA2C;AACpF,QAAM,CAACC,GAASC,CAAU,IAAIC,EAA0C,IAAI;AAE5E,SAAAC,EAAU,MAAM;AACR,UAAAC,IAAa,IAAIC,EAAyBN,CAAM;AACtD,WAAAE,EAAWG,CAAU,GACd,MAAM;AACX,MAAAA,KAAA,QAAAA,EAAY;AAAA,IACd;AAAA,EACF,GAAG,EAAE,GACEJ;AACT;"}
package/dist/index.es.js CHANGED
@@ -3,17 +3,21 @@ import { AiMessageRender as t } from "./components/AiMessageRender/AiMessageRend
3
3
  import { ShadowDom as n } from "./components/ShadowDom/ShadowDom.es.js";
4
4
  import { ScrollController as i } from "./components/ScrollController/ScrollController.es.js";
5
5
  import { VoiceAnimation as f } from "./components/VoiceAnimation/VoiceAnimation.es.js";
6
- import { useDivAutoScroll as S } from "./hooks/useDivAutoScroll.es.js";
7
- import { FunASRManager as l, getWebSocketConnectForFunASR as u } from "./utils/funASR/funASRManager.es.js";
8
- import { XunFeiStreamVoiceManager as g } from "./utils/xunFeiVoice/xunFeiStreamVoiceManager.es.js";
6
+ import { useDivAutoScroll as c } from "./hooks/useDivAutoScroll.es.js";
7
+ import { useXunFeiSteamVoiceManager as u } from "./hooks/useXunFeiSteamVoiceManager.es.js";
8
+ import { FunASRManager as l, getWebSocketConnectForFunASR as A } from "./utils/funASR/funASRManager.es.js";
9
+ import { XunFeiStreamVoiceManager as M } from "./utils/xunFeiVoice/XunFeiStreamVoiceManager.es.js";
10
+ import { RecorderManager as R } from "./utils/xunFeiVoice/RecorderManager/RecorderManager.es.js";
9
11
  export {
10
12
  t as AiMessageRender,
11
13
  l as FunASRManager,
14
+ R as RecorderManager,
12
15
  i as ScrollController,
13
16
  n as ShadowDom,
14
17
  f as VoiceAnimation,
15
- g as XunFeiStreamVoiceManager,
16
- u as getWebSocketConnectForFunASR,
17
- S as useDivAutoScroll
18
+ M as XunFeiStreamVoiceManager,
19
+ A as getWebSocketConnectForFunASR,
20
+ c as useDivAutoScroll,
21
+ u as useXunFeiSteamVoiceManager
18
22
  };
19
23
  //# sourceMappingURL=index.es.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.es.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}
1
+ {"version":3,"file":"index.es.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;;;"}
@@ -0,0 +1,38 @@
1
+ import { RecordFrameInfo } from '../types';
2
+
3
+ export interface RecorderManagerConfig {
4
+ /** 需要传入 一个 AudioWorklet 脚本,应用到 audioContext.audioWorklet.addModule(workletJsPath).
5
+ * AudioWorklet 运行在 Web Audio API 的 AudioWorklet 环境中,延迟更低,更适合实时音频处理。 */
6
+ workletJsPath: string;
7
+ /** 传入一个 一个 Web Worker 脚本。
8
+ * 用于后台对音频数据的处理。
9
+ */
10
+ workerJsPath?: string;
11
+ onStop?: (audioBuffers: ArrayBuffer[]) => void;
12
+ onFrameRecorded?: (params: RecordFrameInfo) => void;
13
+ /**
14
+ * 监听录音开始事件
15
+ */
16
+ onStart?: () => void;
17
+ onError?: (error: string) => void;
18
+ }
19
+ export interface startConfig {
20
+ toSampleRate?: number;
21
+ fromSampleRate?: number;
22
+ frameSize?: number;
23
+ arrayBufferType?: 'short16' | 'float32';
24
+ }
25
+ export declare class RecorderManager {
26
+ config: RecorderManagerConfig;
27
+ audioContext: AudioContext | null;
28
+ audioSource: MediaStreamAudioSourceNode | undefined;
29
+ workletNode: AudioWorkletNode | undefined;
30
+ status: 'beforeInit' | 'initialized' | 'start' | 'stop';
31
+ audioChunks: any[];
32
+ constructor(config: RecorderManagerConfig);
33
+ setConfig: (newConfig: Partial<RecorderManagerConfig>) => void;
34
+ resetAudioContext: () => Promise<void>;
35
+ start: (config: startConfig) => Promise<void>;
36
+ stop: () => void;
37
+ getAudioUrl: () => string;
38
+ }
@@ -0,0 +1,42 @@
1
+ /**
2
+ * 录音管理器
3
+ */
4
+ export class RecorderManager {
5
+ /** @type {Array<ArrayBuffer>} */
6
+ audioBuffers: Array<ArrayBuffer>;
7
+ /** @type {(params: RecordFrameInfo) => void} */
8
+ onFrameRecorded: (params: RecordFrameInfo) => void;
9
+ onStart: () => void;
10
+ /** @type {(data: Array<ArrayBuffer>) => void} */
11
+ onStop: (data: Array<ArrayBuffer>) => void;
12
+ /**
13
+ * 开始录音
14
+ * @param {{
15
+ * sampleRate?: number,
16
+ * frameSize?: number,
17
+ * arrayBufferType?: string
18
+ * }} config 录音配置
19
+ */
20
+ start(config: {
21
+ sampleRate?: number;
22
+ frameSize?: number;
23
+ arrayBufferType?: string;
24
+ }): Promise<void>;
25
+ audioWorkletNode: AudioWorkletNode | {
26
+ port: Worker;
27
+ } | undefined;
28
+ /**
29
+ * 创建音频上下文
30
+ * @private
31
+ */
32
+ private createAudioContext;
33
+ /**
34
+ * 配置音频处理管线
35
+ * @private
36
+ */
37
+ private setupAudioProcessing;
38
+ mediaSource: any;
39
+ /** 停止录音 */
40
+ stop(): void;
41
+ close(): any;
42
+ }
@@ -0,0 +1,118 @@
1
+ var S = Object.defineProperty;
2
+ var g = (r, e, t) => e in r ? S(r, e, { enumerable: !0, configurable: !0, writable: !0, value: t }) : r[e] = t;
3
+ var c = (r, e, t) => g(r, typeof e != "symbol" ? e + "" : e, t);
4
+ import k from "../../../node_modules/.pnpm/co-web-worker@1.0.1/node_modules/co-web-worker/index.es.js";
5
+ import y from "./processorSource/processor.worker.es.js";
6
+ import R from "./processorSource/processor.worklet.es.js";
7
+ const { AudioWorkletNode: h } = window, u = !!h;
8
+ async function M() {
9
+ const r = navigator.mediaDevices ?? {};
10
+ if (typeof r.getUserMedia == "function")
11
+ return r.getUserMedia({ audio: !0, video: !1 });
12
+ if (typeof navigator.getUserMedia == "function")
13
+ return new Promise((e, t) => {
14
+ navigator.getUserMedia({ audio: !0, video: !1 }, e, t);
15
+ });
16
+ throw new Error("浏览器不支持录音!");
17
+ }
18
+ const w = (r) => {
19
+ const e = r === "worker" ? y : R, t = new Blob([e], { type: "application/javascript" });
20
+ return URL.createObjectURL(t);
21
+ };
22
+ async function A(r) {
23
+ return u ? (await r.audioWorklet.addModule(w("worklet")), new h(r, "processor-worklet")) : { port: new k(w("worker")) };
24
+ }
25
+ const d = () => {
26
+ };
27
+ class v {
28
+ constructor() {
29
+ /** @type {Array<ArrayBuffer>} */
30
+ c(this, "audioBuffers", []);
31
+ /** @type {(params: RecordFrameInfo) => void} */
32
+ c(this, "onFrameRecorded", d);
33
+ c(this, "onStart", d);
34
+ /** @type {(data: Array<ArrayBuffer>) => void} */
35
+ c(this, "onStop", d);
36
+ }
37
+ /**
38
+ * 开始录音
39
+ * @param {{
40
+ * sampleRate?: number,
41
+ * frameSize?: number,
42
+ * arrayBufferType?: string
43
+ * }} config 录音配置
44
+ */
45
+ async start(e) {
46
+ var t, s;
47
+ try {
48
+ this.audioBuffers = [];
49
+ const o = await M(), i = this.createAudioContext(o, e.sampleRate);
50
+ this.audioWorkletNode = await A(i), this.setupAudioProcessing(i, o, this.audioWorkletNode, e), (t = this.audioContext) == null || t.resume(), (s = this.onStart) == null || s.call(this);
51
+ } catch (o) {
52
+ throw console.error("Failed to start recording:", o), o;
53
+ }
54
+ }
55
+ /**
56
+ * 创建音频上下文
57
+ * @private
58
+ */
59
+ createAudioContext(e, t) {
60
+ try {
61
+ const s = new (window.AudioContext || window.webkitAudioContext)({
62
+ sampleRate: t
63
+ });
64
+ return s.createMediaStreamSource(e), s;
65
+ } catch {
66
+ const o = new (window.AudioContext || window.webkitAudioContext)();
67
+ return console.warn(`Using default sample rate: ${o.sampleRate}`), o.createMediaStreamSource(e), o;
68
+ }
69
+ }
70
+ /**
71
+ * 配置音频处理管线
72
+ * @private
73
+ */
74
+ setupAudioProcessing(e, t, s, o) {
75
+ const i = e.createMediaStreamSource(t);
76
+ if (this.mediaSource = i, s.port.postMessage({
77
+ type: "init",
78
+ data: {
79
+ frameSize: o.frameSize,
80
+ toSampleRate: o.sampleRate || e.sampleRate,
81
+ fromSampleRate: e.sampleRate,
82
+ arrayBufferType: o.arrayBufferType || "short16"
83
+ }
84
+ }), s.port.onmessage = (n) => {
85
+ var p, f, l, m;
86
+ const { data: a } = n;
87
+ o.frameSize && this.onFrameRecorded && this.onFrameRecorded(a), a.frameBuffer && this.audioBuffers.push(a.frameBuffer), a.isLastFrame && (u || (p = s.port) == null || p.terminate(), (f = this.mediaSource) == null || f.disconnect(), (l = this.audioContext) == null || l.close(), (m = this.onStop) == null || m.call(this, this.audioBuffers));
88
+ }, u)
89
+ i.connect(s);
90
+ else {
91
+ const n = e.createScriptProcessor(0, 1, 1);
92
+ n.onaudioprocess = ({ inputBuffer: a }) => {
93
+ s.port.postMessage({
94
+ type: "message",
95
+ data: a.getChannelData(0)
96
+ });
97
+ }, i.connect(n), n.connect(e.destination);
98
+ }
99
+ }
100
+ /** 停止录音 */
101
+ stop() {
102
+ var e;
103
+ if ((e = this.audioContext) == null || e.suspend(), !this.audioWorkletNode) {
104
+ console.error("Recorder is not started.");
105
+ return;
106
+ }
107
+ this.audioWorkletNode && this.audioWorkletNode.port.postMessage({ type: "stop" });
108
+ }
109
+ close() {
110
+ var e, t;
111
+ if (this.audioBuffers = [], ((e = this.audioContext) == null ? void 0 : e.state) !== "closed")
112
+ return (t = this.audioContext) == null ? void 0 : t.close();
113
+ }
114
+ }
115
+ export {
116
+ v as RecorderManager
117
+ };
118
+ //# sourceMappingURL=RecorderManager.es.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"RecorderManager.es.js","sources":["../../../../src/utils/xunFeiVoice/RecorderManager/RecorderManager.js"],"sourcesContent":["/**\n * 录音管理器 - 用于处理音频录制及处理\n */\nimport CrossOriginWorker from 'co-web-worker';\nimport workerJsString from './processorSource/processor.worker.js?raw';\nimport workletJsString from './processorSource/processor.worklet.js?raw';\n\n//------------------------------ 音频处理核心类 ------------------------------\nconst { AudioWorkletNode } = window;\nconst isAudioWorkletSupported = !!AudioWorkletNode;\n\n/**\n * 获取用户媒体设备权限\n * @returns {Promise<MediaStream>}\n */\nasync function requestMicrophonePermission() {\n const mediaDevices = navigator.mediaDevices ?? {};\n\n if (typeof mediaDevices.getUserMedia === 'function') {\n return mediaDevices.getUserMedia({ audio: true, video: false });\n }\n\n if (typeof navigator.getUserMedia === 'function') {\n return new Promise((resolve, reject) => {\n navigator.getUserMedia({ audio: true, video: false }, resolve, reject);\n });\n }\n\n throw new Error('浏览器不支持录音!');\n}\n\n/** @type {(type: 'worker' | 'worklet') => string} */\nconst generateWorkletUrl = (type) => {\n const jsStr = type === 'worker' ? workerJsString : workletJsString;\n const blob = new Blob([jsStr], { type: 'application/javascript' });\n const scriptUrl = URL.createObjectURL(blob);\n return scriptUrl;\n};\n\n/**\n * 初始化音频处理器\n * @param {AudioContext} audioContext\n * @returns {Promise<AudioWorkletNode|{port: Worker}>}\n */\nasync function initializeAudioProcessor(audioContext) {\n if (isAudioWorkletSupported) {\n await audioContext.audioWorklet.addModule(generateWorkletUrl('worklet'));\n return new AudioWorkletNode(audioContext, 'processor-worklet');\n }\n\n const worker = new CrossOriginWorker(generateWorkletUrl('worker'));\n return { port: worker };\n}\n\nconst NOOP = () => {};\n/**\n * 录音管理器\n */\n//------------------------------ 录音管理器类 ------------------------------\nexport class RecorderManager {\n /** @type {Array<ArrayBuffer>} */\n audioBuffers = [];\n /** @type {(params: RecordFrameInfo) => void} */\n onFrameRecorded = NOOP;\n onStart = NOOP;\n /** @type {(data: Array<ArrayBuffer>) => void} */\n onStop = NOOP;\n\n constructor() {}\n\n /**\n * 开始录音\n * @param {{\n * sampleRate?: number,\n * frameSize?: number,\n * arrayBufferType?: string\n * }} config 录音配置\n */\n async start(config) {\n try {\n this.audioBuffers = [];\n\n // 获取音频流并创建处理节点\n const mediaStream = await requestMicrophonePermission();\n const audioContext = this.createAudioContext(mediaStream, config.sampleRate);\n this.audioWorkletNode = await initializeAudioProcessor(audioContext);\n\n // 初始化音频处理\n this.setupAudioProcessing(audioContext, mediaStream, this.audioWorkletNode, config);\n /** 恢复之前暂停播放的音频。 */\n this.audioContext?.resume();\n\n this.onStart?.();\n } catch (error) {\n console.error('Failed to start recording:', error);\n throw error;\n }\n }\n\n /**\n * 创建音频上下文\n * @private\n */\n createAudioContext(mediaStream, targetSampleRate) {\n try {\n const context = new (window.AudioContext || window.webkitAudioContext)({\n sampleRate: targetSampleRate,\n });\n context.createMediaStreamSource(mediaStream);\n return context;\n } catch (error) {\n // 回退处理:当目标采样率不支持时使用默认采样率\n const fallbackContext = new (window.AudioContext || window.webkitAudioContext)();\n console.warn(`Using default sample rate: ${fallbackContext.sampleRate}`);\n fallbackContext.createMediaStreamSource(mediaStream);\n return fallbackContext;\n }\n }\n\n /**\n * 配置音频处理管线\n * @private\n */\n setupAudioProcessing(audioContext, mediaStream, audioWorkletNode, config) {\n // 创建媒体流源节点\n const mediaSource = audioContext.createMediaStreamSource(mediaStream);\n this.mediaSource = mediaSource;\n\n // 配置处理器通信\n audioWorkletNode.port.postMessage({\n type: 'init',\n data: {\n frameSize: config.frameSize,\n toSampleRate: config.sampleRate || audioContext.sampleRate,\n fromSampleRate: audioContext.sampleRate,\n arrayBufferType: config.arrayBufferType || 'short16',\n },\n });\n\n // 处理音频数据回调\n audioWorkletNode.port.onmessage = (e) => {\n const { data } = e;\n if (config.frameSize && this.onFrameRecorded) {\n this.onFrameRecorded(data);\n }\n\n if (data.frameBuffer) {\n this.audioBuffers.push(data.frameBuffer);\n }\n\n if (data.isLastFrame) {\n if (!isAudioWorkletSupported) {\n audioWorkletNode.port?.terminate();\n }\n this.mediaSource?.disconnect();\n this.audioContext?.close();\n this.onStop?.(this.audioBuffers);\n }\n };\n\n // 连接音频节点\n if (isAudioWorkletSupported) {\n mediaSource.connect(audioWorkletNode);\n } else {\n const scriptProcessor = audioContext.createScriptProcessor(0, 1, 1);\n scriptProcessor.onaudioprocess = ({ inputBuffer }) => {\n audioWorkletNode.port.postMessage({\n type: 'message',\n data: inputBuffer.getChannelData(0),\n });\n };\n mediaSource.connect(scriptProcessor);\n scriptProcessor.connect(audioContext.destination);\n }\n }\n\n /** 停止录音 */\n stop() {\n /** 暂停音频上下文对象中的进度,并暂时剥离进程对音频设备硬件的访问权限,减少 CPU 和电池的使用 */\n this.audioContext?.suspend();\n if (!this.audioWorkletNode) {\n console.error('Recorder is not started.');\n return;\n }\n if (this.audioWorkletNode) {\n this.audioWorkletNode.port.postMessage({ type: 'stop' });\n }\n }\n\n close() {\n this.audioBuffers = [];\n if (this.audioContext?.state !== 'closed') {\n return this.audioContext?.close();\n }\n }\n}\n"],"names":["AudioWorkletNode","isAudioWorkletSupported","requestMicrophonePermission","mediaDevices","resolve","reject","generateWorkletUrl","type","jsStr","workerJsString","workletJsString","blob","initializeAudioProcessor","audioContext","CrossOriginWorker","NOOP","RecorderManager","__publicField","config","mediaStream","_a","_b","error","targetSampleRate","context","fallbackContext","audioWorkletNode","mediaSource","e","data","_c","_d","scriptProcessor","inputBuffer"],"mappings":";;;;;;AAQA,MAAM,EAAE,kBAAAA,EAAkB,IAAG,QACvBC,IAA0B,CAAC,CAACD;AAMlC,eAAeE,IAA8B;AAC3C,QAAMC,IAAe,UAAU,gBAAgB,CAAE;AAEjD,MAAI,OAAOA,EAAa,gBAAiB;AACvC,WAAOA,EAAa,aAAa,EAAE,OAAO,IAAM,OAAO,IAAO;AAGhE,MAAI,OAAO,UAAU,gBAAiB;AACpC,WAAO,IAAI,QAAQ,CAACC,GAASC,MAAW;AACtC,gBAAU,aAAa,EAAE,OAAO,IAAM,OAAO,GAAK,GAAID,GAASC,CAAM;AAAA,IAC3E,CAAK;AAGH,QAAM,IAAI,MAAM,WAAW;AAC7B;AAGA,MAAMC,IAAqB,CAACC,MAAS;AACnC,QAAMC,IAAQD,MAAS,WAAWE,IAAiBC,GAC7CC,IAAO,IAAI,KAAK,CAACH,CAAK,GAAG,EAAE,MAAM,0BAA0B;AAEjE,SADkB,IAAI,gBAAgBG,CAAI;AAE5C;AAOA,eAAeC,EAAyBC,GAAc;AACpD,SAAIZ,KACF,MAAMY,EAAa,aAAa,UAAUP,EAAmB,SAAS,CAAC,GAChE,IAAIN,EAAiBa,GAAc,mBAAmB,KAIxD,EAAE,MADM,IAAIC,EAAkBR,EAAmB,QAAQ,CAAC,EAC1C;AACzB;AAEA,MAAMS,IAAO,MAAM;AAAE;AAKd,MAAMC,EAAgB;AAAA,EAS3B,cAAc;AAPd;AAAA,IAAAC,EAAA,sBAAe,CAAE;AAEjB;AAAA,IAAAA,EAAA,yBAAkBF;AAClB,IAAAE,EAAA,iBAAUF;AAEV;AAAA,IAAAE,EAAA,gBAASF;AAAA,EAEK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUd,MAAM,MAAMG,GAAQ;;AAClB,QAAI;AACF,WAAK,eAAe,CAAE;AAGtB,YAAMC,IAAc,MAAMjB,EAA6B,GACjDW,IAAe,KAAK,mBAAmBM,GAAaD,EAAO,UAAU;AAC3E,WAAK,mBAAmB,MAAMN,EAAyBC,CAAY,GAGnE,KAAK,qBAAqBA,GAAcM,GAAa,KAAK,kBAAkBD,CAAM,IAElFE,IAAA,KAAK,iBAAL,QAAAA,EAAmB,WAEnBC,IAAA,KAAK,YAAL,QAAAA,EAAA;AAAA,IACD,SAAQC,GAAO;AACd,oBAAQ,MAAM,8BAA8BA,CAAK,GAC3CA;AAAA,IACZ;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAME,mBAAmBH,GAAaI,GAAkB;AAChD,QAAI;AACF,YAAMC,IAAU,KAAK,OAAO,gBAAgB,OAAO,oBAAoB;AAAA,QACrE,YAAYD;AAAA,MACpB,CAAO;AACD,aAAAC,EAAQ,wBAAwBL,CAAW,GACpCK;AAAA,IACR,QAAe;AAEd,YAAMC,IAAkB,KAAK,OAAO,gBAAgB,OAAO,oBAAqB;AAChF,qBAAQ,KAAK,8BAA8BA,EAAgB,UAAU,EAAE,GACvEA,EAAgB,wBAAwBN,CAAW,GAC5CM;AAAA,IACb;AAAA,EACA;AAAA;AAAA;AAAA;AAAA;AAAA,EAME,qBAAqBZ,GAAcM,GAAaO,GAAkBR,GAAQ;AAExE,UAAMS,IAAcd,EAAa,wBAAwBM,CAAW;AAoCpE,QAnCA,KAAK,cAAcQ,GAGnBD,EAAiB,KAAK,YAAY;AAAA,MAChC,MAAM;AAAA,MACN,MAAM;AAAA,QACJ,WAAWR,EAAO;AAAA,QAClB,cAAcA,EAAO,cAAcL,EAAa;AAAA,QAChD,gBAAgBA,EAAa;AAAA,QAC7B,iBAAiBK,EAAO,mBAAmB;AAAA,MAC5C;AAAA,IACP,CAAK,GAGDQ,EAAiB,KAAK,YAAY,CAACE,MAAM;;AACvC,YAAM,EAAE,MAAAC,EAAI,IAAKD;AACjB,MAAIV,EAAO,aAAa,KAAK,mBAC3B,KAAK,gBAAgBW,CAAI,GAGvBA,EAAK,eACP,KAAK,aAAa,KAAKA,EAAK,WAAW,GAGrCA,EAAK,gBACF5B,MACHmB,IAAAM,EAAiB,SAAjB,QAAAN,EAAuB,cAEzBC,IAAA,KAAK,gBAAL,QAAAA,EAAkB,eAClBS,IAAA,KAAK,iBAAL,QAAAA,EAAmB,UACnBC,IAAA,KAAK,WAAL,QAAAA,EAAA,WAAc,KAAK;AAAA,IAEtB,GAGG9B;AACF,MAAA0B,EAAY,QAAQD,CAAgB;AAAA,SAC/B;AACL,YAAMM,IAAkBnB,EAAa,sBAAsB,GAAG,GAAG,CAAC;AAClE,MAAAmB,EAAgB,iBAAiB,CAAC,EAAE,aAAAC,QAAkB;AACpD,QAAAP,EAAiB,KAAK,YAAY;AAAA,UAChC,MAAM;AAAA,UACN,MAAMO,EAAY,eAAe,CAAC;AAAA,QAC5C,CAAS;AAAA,MACF,GACDN,EAAY,QAAQK,CAAe,GACnCA,EAAgB,QAAQnB,EAAa,WAAW;AAAA,IACtD;AAAA,EACA;AAAA;AAAA,EAGE,OAAO;;AAGL,SADAO,IAAA,KAAK,iBAAL,QAAAA,EAAmB,WACf,CAAC,KAAK,kBAAkB;AAC1B,cAAQ,MAAM,0BAA0B;AACxC;AAAA,IACN;AACI,IAAI,KAAK,oBACP,KAAK,iBAAiB,KAAK,YAAY,EAAE,MAAM,QAAQ;AAAA,EAE7D;AAAA,EAEE,QAAQ;;AAEN,QADA,KAAK,eAAe,CAAE,KAClBA,IAAA,KAAK,iBAAL,gBAAAA,EAAmB,WAAU;AAC/B,cAAOC,IAAA,KAAK,iBAAL,gBAAAA,EAAmB;AAAA,EAEhC;AACA;"}
@@ -0,0 +1 @@
1
+ export * from './RecorderManager';
@@ -0,0 +1,233 @@
1
+ const n = `!(function () {
2
+ "use strict";
3
+ function t(t) {
4
+ return (
5
+ (function (t) {
6
+ if (Array.isArray(t)) return e(t);
7
+ })(t) ||
8
+ (function (t) {
9
+ if (
10
+ ("undefined" != typeof Symbol && null != t[Symbol.iterator]) ||
11
+ null != t["@@iterator"]
12
+ )
13
+ return Array.from(t);
14
+ })(t) ||
15
+ (function (t, r) {
16
+ if (!t) return;
17
+ if ("string" == typeof t) return e(t, r);
18
+ var i = Object.prototype.toString.call(t).slice(8, -1);
19
+ "Object" === i && t.constructor && (i = t.constructor.name);
20
+ if ("Map" === i || "Set" === i) return Array.from(t);
21
+ if (
22
+ "Arguments" === i ||
23
+ /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(i)
24
+ )
25
+ return e(t, r);
26
+ })(t) ||
27
+ (function () {
28
+ throw new TypeError(
29
+ "Invalid attempt to spread non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."
30
+ );
31
+ })()
32
+ );
33
+ }
34
+ function e(t, e) {
35
+ (null == e || e > t.length) && (e = t.length);
36
+ for (var r = 0, i = new Array(e); r < e; r++) i[r] = t[r];
37
+ return i;
38
+ }
39
+ function r(t, e, r, i) {
40
+ (this.fromSampleRate = t),
41
+ (this.toSampleRate = e),
42
+ (this.channels = 0 | r),
43
+ (this.noReturn = !!i),
44
+ this.initialize();
45
+ }
46
+ (r.prototype.initialize = function () {
47
+ if (
48
+ !(this.fromSampleRate > 0 && this.toSampleRate > 0 && this.channels > 0)
49
+ )
50
+ throw new Error("Invalid settings specified for the resampler.");
51
+ this.fromSampleRate == this.toSampleRate
52
+ ? ((this.resampler = this.bypassResampler), (this.ratioWeight = 1))
53
+ : (this.fromSampleRate < this.toSampleRate
54
+ ? ((this.lastWeight = 1),
55
+ (this.resampler = this.compileLinearInterpolation))
56
+ : ((this.tailExists = !1),
57
+ (this.lastWeight = 0),
58
+ (this.resampler = this.compileMultiTap)),
59
+ (this.ratioWeight = this.fromSampleRate / this.toSampleRate));
60
+ }),
61
+ (r.prototype.compileLinearInterpolation = function (t) {
62
+ var e = t.length;
63
+ this.initializeBuffers(e);
64
+ var r,
65
+ i,
66
+ s = this.outputBufferSize,
67
+ a = this.ratioWeight,
68
+ f = this.lastWeight,
69
+ n = 0,
70
+ o = 0,
71
+ h = 0,
72
+ l = this.outputBuffer;
73
+ if (e % this.channels == 0) {
74
+ if (e > 0) {
75
+ for (; f < 1; f += a)
76
+ for (n = 1 - (o = f % 1), r = 0; r < this.channels; ++r)
77
+ l[h++] = this.lastOutput[r] * n + t[r] * o;
78
+ for (
79
+ f--, e -= this.channels, i = Math.floor(f) * this.channels;
80
+ h < s && i < e;
81
+
82
+ ) {
83
+ for (n = 1 - (o = f % 1), r = 0; r < this.channels; ++r)
84
+ l[h++] = t[i + r] * n + t[i + this.channels + r] * o;
85
+ (f += a), (i = Math.floor(f) * this.channels);
86
+ }
87
+ for (r = 0; r < this.channels; ++r) this.lastOutput[r] = t[i++];
88
+ return (this.lastWeight = f % 1), this.bufferSlice(h);
89
+ }
90
+ return this.noReturn ? 0 : [];
91
+ }
92
+ throw new Error("Buffer was of incorrect sample length.");
93
+ }),
94
+ (r.prototype.compileMultiTap = function (t) {
95
+ var e = [],
96
+ r = t.length;
97
+ this.initializeBuffers(r);
98
+ var i = this.outputBufferSize;
99
+ if (r % this.channels == 0) {
100
+ if (r > 0) {
101
+ for (var s = this.ratioWeight, a = 0, f = 0; f < this.channels; ++f)
102
+ e[f] = 0;
103
+ var n = 0,
104
+ o = 0,
105
+ h = !this.tailExists;
106
+ this.tailExists = !1;
107
+ var l = this.outputBuffer,
108
+ u = 0,
109
+ p = 0;
110
+ do {
111
+ if (h) for (a = s, f = 0; f < this.channels; ++f) e[f] = 0;
112
+ else {
113
+ for (a = this.lastWeight, f = 0; f < this.channels; ++f)
114
+ e[f] += this.lastOutput[f];
115
+ h = !0;
116
+ }
117
+ for (; a > 0 && n < r; ) {
118
+ if (!(a >= (o = 1 + n - p))) {
119
+ for (f = 0; f < this.channels; ++f) e[f] += t[n + f] * a;
120
+ (p += a), (a = 0);
121
+ break;
122
+ }
123
+ for (f = 0; f < this.channels; ++f) e[f] += t[n++] * o;
124
+ (p = n), (a -= o);
125
+ }
126
+ if (0 != a) {
127
+ for (this.lastWeight = a, f = 0; f < this.channels; ++f)
128
+ this.lastOutput[f] = e[f];
129
+ this.tailExists = !0;
130
+ break;
131
+ }
132
+ for (f = 0; f < this.channels; ++f) l[u++] = e[f] / s;
133
+ } while (n < r && u < i);
134
+ return this.bufferSlice(u);
135
+ }
136
+ return this.noReturn ? 0 : [];
137
+ }
138
+ throw new Error("Buffer was of incorrect sample length.");
139
+ }),
140
+ (r.prototype.bypassResampler = function (t) {
141
+ return this.noReturn ? ((this.outputBuffer = t), t.length) : t;
142
+ }),
143
+ (r.prototype.bufferSlice = function (t) {
144
+ if (this.noReturn) return t;
145
+ try {
146
+ return this.outputBuffer.subarray(0, t);
147
+ } catch (e) {
148
+ try {
149
+ return (this.outputBuffer.length = t), this.outputBuffer;
150
+ } catch (e) {
151
+ return this.outputBuffer.slice(0, t);
152
+ }
153
+ }
154
+ }),
155
+ (r.prototype.initializeBuffers = function (t) {
156
+ this.outputBufferSize = Math.ceil(
157
+ (t * this.toSampleRate) / this.fromSampleRate
158
+ );
159
+ try {
160
+ (this.outputBuffer = new Float32Array(this.outputBufferSize)),
161
+ (this.lastOutput = new Float32Array(this.channels));
162
+ } catch (t) {
163
+ (this.outputBuffer = []), (this.lastOutput = []);
164
+ }
165
+ }),
166
+ (self.transData = function (t) {
167
+ return (
168
+ "short16" === self.arrayBufferType &&
169
+ (t = (function (t) {
170
+ for (
171
+ var e = new ArrayBuffer(2 * t.length),
172
+ r = new DataView(e),
173
+ i = 0,
174
+ s = 0;
175
+ s < t.length;
176
+ s += 1, i += 2
177
+ ) {
178
+ var a = Math.max(-1, Math.min(1, t[s]));
179
+ r.setInt16(i, a < 0 ? 32768 * a : 32767 * a, !0);
180
+ }
181
+ return r.buffer;
182
+ })((t = self.resampler.resampler(t)))),
183
+ t
184
+ );
185
+ }),
186
+ (self.onmessage = function (e) {
187
+ var i = e.data,
188
+ s = i.type,
189
+ a = i.data;
190
+ if ("init" === s) {
191
+ var f = a.frameSize,
192
+ n = a.toSampleRate,
193
+ o = a.fromSampleRate,
194
+ h = a.arrayBufferType;
195
+ return (
196
+ (self.frameSize = f * Math.floor(o / n)),
197
+ (self.resampler = new r(o, n, 1)),
198
+ (self.frameBuffer = []),
199
+ void (self.arrayBufferType = h)
200
+ );
201
+ }
202
+ if (
203
+ ("stop" === s &&
204
+ (self.postMessage({
205
+ frameBuffer: self.transData(self.frameBuffer),
206
+ isLastFrame: !0,
207
+ }),
208
+ (self.frameBuffer = [])),
209
+ "message" === s)
210
+ ) {
211
+ var l,
212
+ u = a;
213
+ if (self.frameSize)
214
+ return (
215
+ (l = self.frameBuffer).push.apply(l, t(u)),
216
+ self.frameBuffer.length >= self.frameSize &&
217
+ (self.postMessage({
218
+ frameBuffer: self.transData(this.frameBuffer),
219
+ isLastFrame: !1,
220
+ }),
221
+ (self.frameBuffer = [])),
222
+ !0
223
+ );
224
+ u &&
225
+ self.postMessage({ frameBuffer: self.transData(u), isLastFrame: !1 });
226
+ }
227
+ });
228
+ })();
229
+ `;
230
+ export {
231
+ n as default
232
+ };
233
+ //# sourceMappingURL=processor.worker.es.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"processor.worker.es.js","sources":["../../../../../src/utils/xunFeiVoice/RecorderManager/processorSource/processor.worker.js?raw"],"sourcesContent":["export default \"!(function () {\\n \\\"use strict\\\";\\n function t(t) {\\n return (\\n (function (t) {\\n if (Array.isArray(t)) return e(t);\\n })(t) ||\\n (function (t) {\\n if (\\n (\\\"undefined\\\" != typeof Symbol && null != t[Symbol.iterator]) ||\\n null != t[\\\"@@iterator\\\"]\\n )\\n return Array.from(t);\\n })(t) ||\\n (function (t, r) {\\n if (!t) return;\\n if (\\\"string\\\" == typeof t) return e(t, r);\\n var i = Object.prototype.toString.call(t).slice(8, -1);\\n \\\"Object\\\" === i && t.constructor && (i = t.constructor.name);\\n if (\\\"Map\\\" === i || \\\"Set\\\" === i) return Array.from(t);\\n if (\\n \\\"Arguments\\\" === i ||\\n /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(i)\\n )\\n return e(t, r);\\n })(t) ||\\n (function () {\\n throw new TypeError(\\n \\\"Invalid attempt to spread non-iterable instance.\\\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\\\"\\n );\\n })()\\n );\\n }\\n function e(t, e) {\\n (null == e || e > t.length) && (e = t.length);\\n for (var r = 0, i = new Array(e); r < e; r++) i[r] = t[r];\\n return i;\\n }\\n function r(t, e, r, i) {\\n (this.fromSampleRate = t),\\n (this.toSampleRate = e),\\n (this.channels = 0 | r),\\n (this.noReturn = !!i),\\n this.initialize();\\n }\\n (r.prototype.initialize = function () {\\n if (\\n !(this.fromSampleRate > 0 && this.toSampleRate > 0 && this.channels > 0)\\n )\\n throw new Error(\\\"Invalid settings specified for the resampler.\\\");\\n this.fromSampleRate == this.toSampleRate\\n ? ((this.resampler = this.bypassResampler), (this.ratioWeight = 1))\\n : (this.fromSampleRate < this.toSampleRate\\n ? ((this.lastWeight = 1),\\n (this.resampler = this.compileLinearInterpolation))\\n : ((this.tailExists = !1),\\n (this.lastWeight = 0),\\n (this.resampler = this.compileMultiTap)),\\n (this.ratioWeight = this.fromSampleRate / this.toSampleRate));\\n }),\\n (r.prototype.compileLinearInterpolation = function (t) {\\n var e = t.length;\\n this.initializeBuffers(e);\\n var r,\\n i,\\n s = this.outputBufferSize,\\n a = this.ratioWeight,\\n f = this.lastWeight,\\n n = 0,\\n o = 0,\\n h = 0,\\n l = this.outputBuffer;\\n if (e % this.channels == 0) {\\n if (e > 0) {\\n for (; f < 1; f += a)\\n for (n = 1 - (o = f % 1), r = 0; r < this.channels; ++r)\\n l[h++] = this.lastOutput[r] * n + t[r] * o;\\n for (\\n f--, e -= this.channels, i = Math.floor(f) * this.channels;\\n h < s && i < e;\\n\\n ) {\\n for (n = 1 - (o = f % 1), r = 0; r < this.channels; ++r)\\n l[h++] = t[i + r] * n + t[i + this.channels + r] * o;\\n (f += a), (i = Math.floor(f) * this.channels);\\n }\\n for (r = 0; r < this.channels; ++r) this.lastOutput[r] = t[i++];\\n return (this.lastWeight = f % 1), this.bufferSlice(h);\\n }\\n return this.noReturn ? 0 : [];\\n }\\n throw new Error(\\\"Buffer was of incorrect sample length.\\\");\\n }),\\n (r.prototype.compileMultiTap = function (t) {\\n var e = [],\\n r = t.length;\\n this.initializeBuffers(r);\\n var i = this.outputBufferSize;\\n if (r % this.channels == 0) {\\n if (r > 0) {\\n for (var s = this.ratioWeight, a = 0, f = 0; f < this.channels; ++f)\\n e[f] = 0;\\n var n = 0,\\n o = 0,\\n h = !this.tailExists;\\n this.tailExists = !1;\\n var l = this.outputBuffer,\\n u = 0,\\n p = 0;\\n do {\\n if (h) for (a = s, f = 0; f < this.channels; ++f) e[f] = 0;\\n else {\\n for (a = this.lastWeight, f = 0; f < this.channels; ++f)\\n e[f] += this.lastOutput[f];\\n h = !0;\\n }\\n for (; a > 0 && n < r; ) {\\n if (!(a >= (o = 1 + n - p))) {\\n for (f = 0; f < this.channels; ++f) e[f] += t[n + f] * a;\\n (p += a), (a = 0);\\n break;\\n }\\n for (f = 0; f < this.channels; ++f) e[f] += t[n++] * o;\\n (p = n), (a -= o);\\n }\\n if (0 != a) {\\n for (this.lastWeight = a, f = 0; f < this.channels; ++f)\\n this.lastOutput[f] = e[f];\\n this.tailExists = !0;\\n break;\\n }\\n for (f = 0; f < this.channels; ++f) l[u++] = e[f] / s;\\n } while (n < r && u < i);\\n return this.bufferSlice(u);\\n }\\n return this.noReturn ? 0 : [];\\n }\\n throw new Error(\\\"Buffer was of incorrect sample length.\\\");\\n }),\\n (r.prototype.bypassResampler = function (t) {\\n return this.noReturn ? ((this.outputBuffer = t), t.length) : t;\\n }),\\n (r.prototype.bufferSlice = function (t) {\\n if (this.noReturn) return t;\\n try {\\n return this.outputBuffer.subarray(0, t);\\n } catch (e) {\\n try {\\n return (this.outputBuffer.length = t), this.outputBuffer;\\n } catch (e) {\\n return this.outputBuffer.slice(0, t);\\n }\\n }\\n }),\\n (r.prototype.initializeBuffers = function (t) {\\n this.outputBufferSize = Math.ceil(\\n (t * this.toSampleRate) / this.fromSampleRate\\n );\\n try {\\n (this.outputBuffer = new Float32Array(this.outputBufferSize)),\\n (this.lastOutput = new Float32Array(this.channels));\\n } catch (t) {\\n (this.outputBuffer = []), (this.lastOutput = []);\\n }\\n }),\\n (self.transData = function (t) {\\n return (\\n \\\"short16\\\" === self.arrayBufferType &&\\n (t = (function (t) {\\n for (\\n var e = new ArrayBuffer(2 * t.length),\\n r = new DataView(e),\\n i = 0,\\n s = 0;\\n s < t.length;\\n s += 1, i += 2\\n ) {\\n var a = Math.max(-1, Math.min(1, t[s]));\\n r.setInt16(i, a < 0 ? 32768 * a : 32767 * a, !0);\\n }\\n return r.buffer;\\n })((t = self.resampler.resampler(t)))),\\n t\\n );\\n }),\\n (self.onmessage = function (e) {\\n var i = e.data,\\n s = i.type,\\n a = i.data;\\n if (\\\"init\\\" === s) {\\n var f = a.frameSize,\\n n = a.toSampleRate,\\n o = a.fromSampleRate,\\n h = a.arrayBufferType;\\n return (\\n (self.frameSize = f * Math.floor(o / n)),\\n (self.resampler = new r(o, n, 1)),\\n (self.frameBuffer = []),\\n void (self.arrayBufferType = h)\\n );\\n }\\n if (\\n (\\\"stop\\\" === s &&\\n (self.postMessage({\\n frameBuffer: self.transData(self.frameBuffer),\\n isLastFrame: !0,\\n }),\\n (self.frameBuffer = [])),\\n \\\"message\\\" === s)\\n ) {\\n var l,\\n u = a;\\n if (self.frameSize)\\n return (\\n (l = self.frameBuffer).push.apply(l, t(u)),\\n self.frameBuffer.length >= self.frameSize &&\\n (self.postMessage({\\n frameBuffer: self.transData(this.frameBuffer),\\n isLastFrame: !1,\\n }),\\n (self.frameBuffer = [])),\\n !0\\n );\\n u &&\\n self.postMessage({ frameBuffer: self.transData(u), isLastFrame: !1 });\\n }\\n });\\n})();\\n\""],"names":["workerJsString"],"mappings":"AAAA,MAAeA,IAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;"}