brilliantsole 0.0.27 → 0.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/3d/anchor.glb +0 -0
- package/assets/3d/coin.glb +0 -0
- package/assets/3d/glasses.glb +0 -0
- package/assets/audio/bounceMedium.wav +0 -0
- package/assets/audio/bounceStrong.wav +0 -0
- package/assets/audio/bounceWeak.wav +0 -0
- package/assets/audio/coin.wav +0 -0
- package/assets/audio/getUp.wav +0 -0
- package/assets/audio/grab.wav +0 -0
- package/assets/audio/kick.wav +0 -0
- package/assets/audio/platterFadeIn old.wav +0 -0
- package/assets/audio/platterFadeIn.wav +0 -0
- package/assets/audio/platterFadeOut.wav +0 -0
- package/assets/audio/punch.wav +0 -0
- package/assets/audio/punchSqueak.wav +0 -0
- package/assets/audio/purr.wav +0 -0
- package/assets/audio/purrFadeOut.wav +0 -0
- package/assets/audio/release.wav +0 -0
- package/assets/audio/splat.wav +0 -0
- package/assets/audio/stomp.wav +0 -0
- package/build/brilliantsole.cjs +3091 -741
- package/build/brilliantsole.cjs.map +1 -1
- package/build/brilliantsole.js +2759 -709
- package/build/brilliantsole.js.map +1 -1
- package/build/brilliantsole.ls.js +2602 -543
- package/build/brilliantsole.ls.js.map +1 -1
- package/build/brilliantsole.min.js +1 -1
- package/build/brilliantsole.min.js.map +1 -1
- package/build/brilliantsole.module.d.ts +295 -65
- package/build/brilliantsole.module.js +2749 -710
- package/build/brilliantsole.module.js.map +1 -1
- package/build/brilliantsole.module.min.d.ts +295 -65
- package/build/brilliantsole.module.min.js +1 -1
- package/build/brilliantsole.module.min.js.map +1 -1
- package/build/brilliantsole.node.module.d.ts +289 -62
- package/build/brilliantsole.node.module.js +3080 -742
- package/build/brilliantsole.node.module.js.map +1 -1
- package/build/dts/BS-output.d.ts +10 -0
- package/build/dts/BS.d.ts +21 -8
- package/build/dts/CameraManager.d.ts +72 -0
- package/build/dts/Device.d.ts +64 -13
- package/build/dts/DeviceInformationManager.d.ts +4 -4
- package/build/dts/DeviceManager.d.ts +2 -0
- package/build/dts/FileTransferManager.d.ts +18 -8
- package/build/dts/InformationManager.d.ts +2 -0
- package/build/dts/MicrophoneManager.d.ts +88 -0
- package/build/dts/TfliteManager.d.ts +22 -2
- package/build/dts/WifiManager.d.ts +61 -0
- package/build/dts/connection/BaseConnectionManager.d.ts +35 -3
- package/build/dts/connection/ClientConnectionManager.d.ts +7 -2
- package/build/dts/connection/bluetooth/NobleConnectionManager.d.ts +2 -1
- package/build/dts/connection/bluetooth/WebBluetoothConnectionManager.d.ts +1 -0
- package/build/dts/connection/bluetooth/bluetoothUUIDs.d.ts +2 -2
- package/build/dts/connection/udp/UDPConnectionManager.d.ts +28 -0
- package/build/dts/connection/webSocket/WebSocketConnectionManager.d.ts +25 -0
- package/build/dts/devicePair/DevicePair.d.ts +5 -5
- package/build/dts/scanner/BaseScanner.d.ts +4 -1
- package/build/dts/scanner/NobleScanner.d.ts +2 -1
- package/build/dts/sensor/MotionSensorDataManager.d.ts +5 -2
- package/build/dts/sensor/SensorDataManager.d.ts +5 -4
- package/build/dts/server/BaseClient.d.ts +5 -3
- package/build/dts/server/ServerUtils.d.ts +1 -1
- package/build/dts/server/websocket/WebSocketUtils.d.ts +1 -1
- package/build/dts/utils/AudioUtils.d.ts +2 -0
- package/build/dts/utils/Console.d.ts +2 -0
- package/build/dts/utils/ThrottleUtils.d.ts +2 -0
- package/build/dts/vibration/VibrationManager.d.ts +19 -2
- package/build/index.d.ts +292 -62
- package/build/index.node.d.ts +286 -59
- package/examples/3d/scene.html +19 -5
- package/examples/3d-generic/index.html +144 -0
- package/examples/3d-generic/script.js +266 -0
- package/examples/basic/index.html +267 -17
- package/examples/basic/script.js +958 -105
- package/examples/camera/barcode-detector.js +109 -0
- package/examples/camera/depth-estimation.js +71 -0
- package/examples/camera/face-detector.js +119 -0
- package/examples/camera/face-landmark.js +111 -0
- package/examples/camera/gesture-recognition.js +97 -0
- package/examples/camera/hand-landmark.js +74 -0
- package/examples/camera/image-segmentation.js +98 -0
- package/examples/camera/image-to-text.js +43 -0
- package/examples/camera/image-upscale.js +75 -0
- package/examples/camera/index.html +129 -0
- package/examples/camera/object-detection.js +98 -0
- package/examples/camera/pose-landmark.js +60 -0
- package/examples/camera/script.js +316 -0
- package/examples/camera/utils.js +165 -0
- package/examples/camera/yolo-tiny.js +54 -0
- package/examples/camera/yolo.js +119 -0
- package/examples/edge-impulse/script.js +157 -48
- package/examples/edge-impulse-test/README.md +11 -0
- package/examples/edge-impulse-test/edge-impulse-standalone.js +7228 -0
- package/examples/edge-impulse-test/edge-impulse-standalone.wasm +0 -0
- package/examples/edge-impulse-test/index.html +75 -0
- package/examples/edge-impulse-test/run-impulse.js +135 -0
- package/examples/edge-impulse-test/script.js +200 -0
- package/examples/glasses-gestures/README.md +11 -0
- package/examples/glasses-gestures/edge-impulse-standalone.js +7228 -0
- package/examples/glasses-gestures/edge-impulse-standalone.wasm +0 -0
- package/examples/glasses-gestures/index.html +69 -0
- package/examples/glasses-gestures/run-impulse.js +135 -0
- package/examples/glasses-gestures/script.js +226 -0
- package/examples/gloves/edge-impulse-standalone.js +7228 -0
- package/examples/gloves/edge-impulse-standalone.wasm +0 -0
- package/examples/gloves/index.html +4 -1
- package/examples/gloves/run-impulse.js +135 -0
- package/examples/gloves/script.js +367 -51
- package/examples/graph/script.js +94 -37
- package/examples/microphone/gender.js +54 -0
- package/examples/microphone/index.html +102 -0
- package/examples/microphone/script.js +394 -0
- package/examples/microphone/utils.js +45 -0
- package/examples/microphone/whisper-realtime.js +166 -0
- package/examples/microphone/whisper.js +132 -0
- package/examples/punch/index.html +135 -0
- package/examples/punch/punch.tflite +0 -0
- package/examples/punch/script.js +169 -0
- package/examples/server/index.html +98 -22
- package/examples/server/script.js +317 -109
- package/examples/ukaton-firmware-update/merged-firmware.bin +0 -0
- package/examples/utils/aframe/aframe-master.min.js +2 -0
- package/examples/utils/aframe/bs-vibration.js +150 -0
- package/examples/utils/aframe/force-pushable.js +80 -0
- package/examples/utils/aframe/grabbable-anchor.js +46 -0
- package/examples/utils/aframe/grabbable-listener.js +31 -0
- package/examples/utils/aframe/grabbable-physics-body.js +190 -0
- package/examples/utils/aframe/grow-shrink.js +25 -0
- package/examples/utils/aframe/hand-punch.js +119 -0
- package/examples/utils/aframe/my-obb-collider.js +293 -0
- package/examples/utils/aframe/occlude-hand-tracking-controls.js +47 -0
- package/examples/utils/aframe/occlude-mesh.js +42 -0
- package/examples/utils/aframe/palm-up-detector.js +47 -0
- package/examples/utils/aframe/shadow-material.js +20 -0
- package/examples/utils/aframe/soft-shadow-light.js +9 -0
- package/examples/webxr-2/assets/3d/soccerBall.glb +0 -0
- package/examples/webxr-2/assets/audio/shellBounce.wav +0 -0
- package/examples/webxr-2/assets/audio/shellHit.wav +0 -0
- package/examples/webxr-2/assets/audio/shellKick.wav +0 -0
- package/examples/webxr-2/assets/audio/soccerBounce.wav +0 -0
- package/examples/webxr-2/assets/audio/soccerKick.mp3 +0 -0
- package/examples/webxr-2/assets/images/shellTexture.png +0 -0
- package/examples/webxr-2/components/bs-ankle.js +337 -0
- package/examples/webxr-2/components/coin.js +84 -0
- package/examples/webxr-2/components/custom-wrap.js +17 -0
- package/examples/webxr-2/components/goomba.js +3250 -0
- package/examples/webxr-2/components/init-shell-material.js +215 -0
- package/examples/webxr-2/components/platter.js +172 -0
- package/examples/webxr-2/components/shell.js +374 -0
- package/examples/webxr-2/components/soccer-ball.js +250 -0
- package/examples/webxr-2/components/squashed-goomba.js +249 -0
- package/examples/webxr-2/edge-impulse-standalone.js +7228 -0
- package/examples/webxr-2/edge-impulse-standalone.wasm +0 -0
- package/examples/webxr-2/index.html +996 -0
- package/examples/webxr-2/kick.tflite +0 -0
- package/examples/webxr-2/kick2.tflite +0 -0
- package/examples/webxr-2/run-impulse.js +135 -0
- package/examples/webxr-2/script.js +384 -0
- package/examples/webxr-3/components/bs-camera.js +65 -0
- package/examples/webxr-3/index.html +134 -0
- package/examples/webxr-3/script.js +432 -0
- package/package.json +2 -1
- package/src/.prettierrc +4 -0
- package/src/BS.ts +79 -8
- package/src/CameraManager.ts +497 -0
- package/src/Device.ts +691 -86
- package/src/DeviceInformationManager.ts +19 -10
- package/src/DeviceManager.ts +85 -25
- package/src/FileTransferManager.ts +145 -20
- package/src/InformationManager.ts +40 -15
- package/src/MicrophoneManager.ts +599 -0
- package/src/TfliteManager.ts +171 -25
- package/src/WifiManager.ts +323 -0
- package/src/connection/BaseConnectionManager.ts +130 -30
- package/src/connection/ClientConnectionManager.ts +34 -10
- package/src/connection/bluetooth/BluetoothConnectionManager.ts +8 -2
- package/src/connection/bluetooth/NobleConnectionManager.ts +147 -41
- package/src/connection/bluetooth/WebBluetoothConnectionManager.ts +99 -34
- package/src/connection/bluetooth/bluetoothUUIDs.ts +40 -13
- package/src/connection/udp/UDPConnectionManager.ts +356 -0
- package/src/connection/websocket/WebSocketConnectionManager.ts +282 -0
- package/src/devicePair/DevicePair.ts +95 -25
- package/src/devicePair/DevicePairPressureSensorDataManager.ts +27 -7
- package/src/scanner/BaseScanner.ts +49 -11
- package/src/scanner/NobleScanner.ts +76 -14
- package/src/sensor/MotionSensorDataManager.ts +21 -6
- package/src/sensor/PressureSensorDataManager.ts +37 -8
- package/src/sensor/SensorConfigurationManager.ts +73 -22
- package/src/sensor/SensorDataManager.ts +109 -23
- package/src/server/BaseClient.ts +150 -36
- package/src/server/BaseServer.ts +50 -2
- package/src/server/ServerUtils.ts +39 -9
- package/src/server/udp/UDPServer.ts +73 -22
- package/src/server/udp/UDPUtils.ts +9 -2
- package/src/server/websocket/WebSocketClient.ts +27 -7
- package/src/server/websocket/WebSocketUtils.ts +4 -2
- package/src/utils/AudioUtils.ts +65 -0
- package/src/utils/Console.ts +62 -9
- package/src/utils/ParseUtils.ts +24 -5
- package/src/utils/ThrottleUtils.ts +62 -0
- package/src/utils/Timer.ts +1 -1
- package/src/vibration/VibrationManager.ts +166 -40
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { ConnectionType } from "./connection/BaseConnectionManager.ts";
|
|
1
2
|
import Device, { SendMessageCallback } from "./Device.ts";
|
|
2
3
|
import { createConsole } from "./utils/Console.ts";
|
|
3
4
|
import { isInBrowser } from "./utils/environment.ts";
|
|
@@ -8,7 +9,14 @@ import autoBind from "auto-bind";
|
|
|
8
9
|
|
|
9
10
|
const _console = createConsole("InformationManager", { log: false });
|
|
10
11
|
|
|
11
|
-
export const DeviceTypes = [
|
|
12
|
+
export const DeviceTypes = [
|
|
13
|
+
"leftInsole",
|
|
14
|
+
"rightInsole",
|
|
15
|
+
"leftGlove",
|
|
16
|
+
"rightGlove",
|
|
17
|
+
"glasses",
|
|
18
|
+
"generic",
|
|
19
|
+
] as const;
|
|
12
20
|
export type DeviceType = (typeof DeviceTypes)[number];
|
|
13
21
|
|
|
14
22
|
export const Sides = ["left", "right"] as const;
|
|
@@ -44,8 +52,13 @@ export interface InformationEventMessages {
|
|
|
44
52
|
getCurrentTime: { currentTime: number };
|
|
45
53
|
}
|
|
46
54
|
|
|
47
|
-
export type InformationEventDispatcher = EventDispatcher<
|
|
48
|
-
|
|
55
|
+
export type InformationEventDispatcher = EventDispatcher<
|
|
56
|
+
Device,
|
|
57
|
+
InformationEventType,
|
|
58
|
+
InformationEventMessages
|
|
59
|
+
>;
|
|
60
|
+
export type SendInformationMessageCallback =
|
|
61
|
+
SendMessageCallback<InformationMessageType>;
|
|
49
62
|
|
|
50
63
|
class InformationManager {
|
|
51
64
|
constructor() {
|
|
@@ -89,7 +102,9 @@ class InformationManager {
|
|
|
89
102
|
_console.assertTypeWithError(updatedBatteryCurrent, "number");
|
|
90
103
|
this.#batteryCurrent = updatedBatteryCurrent;
|
|
91
104
|
_console.log({ batteryCurrent: this.#batteryCurrent });
|
|
92
|
-
this.#dispatchEvent("getBatteryCurrent", {
|
|
105
|
+
this.#dispatchEvent("getBatteryCurrent", {
|
|
106
|
+
batteryCurrent: this.#batteryCurrent,
|
|
107
|
+
});
|
|
93
108
|
}
|
|
94
109
|
|
|
95
110
|
#id!: string;
|
|
@@ -116,13 +131,11 @@ class InformationManager {
|
|
|
116
131
|
}
|
|
117
132
|
async setName(newName: string) {
|
|
118
133
|
_console.assertTypeWithError(newName, "string");
|
|
119
|
-
_console.
|
|
120
|
-
newName
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
newName.length < MaxNameLength,
|
|
125
|
-
`name must be less than ${MaxNameLength} characters long ("${newName}" is ${newName.length} characters long)`
|
|
134
|
+
_console.assertRangeWithError(
|
|
135
|
+
"newName",
|
|
136
|
+
newName.length,
|
|
137
|
+
MinNameLength,
|
|
138
|
+
MaxNameLength
|
|
126
139
|
);
|
|
127
140
|
const setNameData = textEncoder.encode(newName);
|
|
128
141
|
_console.log({ setNameData });
|
|
@@ -145,7 +158,10 @@ class InformationManager {
|
|
|
145
158
|
}
|
|
146
159
|
#assertValidDeviceTypeEnum(typeEnum: number) {
|
|
147
160
|
_console.assertTypeWithError(typeEnum, "number");
|
|
148
|
-
_console.assertWithError(
|
|
161
|
+
_console.assertWithError(
|
|
162
|
+
typeEnum in DeviceTypes,
|
|
163
|
+
`invalid typeEnum ${typeEnum}`
|
|
164
|
+
);
|
|
149
165
|
}
|
|
150
166
|
updateType(updatedType: DeviceType) {
|
|
151
167
|
this.#assertValidDeviceType(updatedType);
|
|
@@ -227,7 +243,8 @@ class InformationManager {
|
|
|
227
243
|
|
|
228
244
|
#onCurrentTime(currentTime: number) {
|
|
229
245
|
_console.log({ currentTime });
|
|
230
|
-
this.#isCurrentTimeSet =
|
|
246
|
+
this.#isCurrentTimeSet =
|
|
247
|
+
currentTime != 0 || Math.abs(Date.now() - currentTime) < Uint16Max;
|
|
231
248
|
if (!this.#isCurrentTimeSet) {
|
|
232
249
|
this.#setCurrentTime(false);
|
|
233
250
|
}
|
|
@@ -237,7 +254,10 @@ class InformationManager {
|
|
|
237
254
|
const dataView = new DataView(new ArrayBuffer(8));
|
|
238
255
|
dataView.setBigUint64(0, BigInt(Date.now()), true);
|
|
239
256
|
const promise = this.waitForEvent("getCurrentTime");
|
|
240
|
-
this.sendMessage(
|
|
257
|
+
this.sendMessage(
|
|
258
|
+
[{ type: "setCurrentTime", data: dataView.buffer }],
|
|
259
|
+
sendImmediately
|
|
260
|
+
);
|
|
241
261
|
await promise;
|
|
242
262
|
}
|
|
243
263
|
|
|
@@ -276,7 +296,10 @@ class InformationManager {
|
|
|
276
296
|
break;
|
|
277
297
|
case "getMtu":
|
|
278
298
|
let mtu = dataView.getUint16(0, true);
|
|
279
|
-
if (
|
|
299
|
+
if (
|
|
300
|
+
this.connectionType != "webSocket" &&
|
|
301
|
+
this.connectionType != "udp"
|
|
302
|
+
) {
|
|
280
303
|
mtu = Math.min(mtu, 512);
|
|
281
304
|
}
|
|
282
305
|
_console.log({ mtu });
|
|
@@ -295,6 +318,8 @@ class InformationManager {
|
|
|
295
318
|
clear() {
|
|
296
319
|
this.#isCurrentTimeSet = false;
|
|
297
320
|
}
|
|
321
|
+
|
|
322
|
+
connectionType?: ConnectionType;
|
|
298
323
|
}
|
|
299
324
|
|
|
300
325
|
export default InformationManager;
|
|
@@ -0,0 +1,599 @@
|
|
|
1
|
+
import Device, { SendMessageCallback } from "./Device.ts";
|
|
2
|
+
import { createConsole } from "./utils/Console.ts";
|
|
3
|
+
import EventDispatcher from "./utils/EventDispatcher.ts";
|
|
4
|
+
import autoBind from "auto-bind";
|
|
5
|
+
import { concatenateArrayBuffers } from "./utils/ArrayBufferUtils.ts";
|
|
6
|
+
import { float32ArrayToWav } from "./utils/AudioUtils.ts";
|
|
7
|
+
|
|
8
|
+
const _console = createConsole("MicrophoneManager", { log: false });
|
|
9
|
+
|
|
10
|
+
export const MicrophoneSensorTypes = ["microphone"] as const;
|
|
11
|
+
export type MicrophoneSensorType = (typeof MicrophoneSensorTypes)[number];
|
|
12
|
+
|
|
13
|
+
export const MicrophoneCommands = ["start", "stop", "vad"] as const;
|
|
14
|
+
export type MicrophoneCommand = (typeof MicrophoneCommands)[number];
|
|
15
|
+
|
|
16
|
+
export const MicrophoneStatuses = ["idle", "streaming", "vad"] as const;
|
|
17
|
+
export type MicrophoneStatus = (typeof MicrophoneStatuses)[number];
|
|
18
|
+
|
|
19
|
+
export const MicrophoneConfigurationTypes = ["sampleRate", "bitDepth"] as const;
|
|
20
|
+
export type MicrophoneConfigurationType =
|
|
21
|
+
(typeof MicrophoneConfigurationTypes)[number];
|
|
22
|
+
|
|
23
|
+
export const MicrophoneSampleRates = ["8000", "16000"] as const;
|
|
24
|
+
export type MicrophoneSampleRate = (typeof MicrophoneSampleRates)[number];
|
|
25
|
+
|
|
26
|
+
export const MicrophoneBitDepths = ["8", "16"] as const;
|
|
27
|
+
export type MicrophoneBitDepth = (typeof MicrophoneBitDepths)[number];
|
|
28
|
+
|
|
29
|
+
export const MicrophoneMessageTypes = [
|
|
30
|
+
"microphoneStatus",
|
|
31
|
+
"microphoneCommand",
|
|
32
|
+
"getMicrophoneConfiguration",
|
|
33
|
+
"setMicrophoneConfiguration",
|
|
34
|
+
"microphoneData",
|
|
35
|
+
] as const;
|
|
36
|
+
export type MicrophoneMessageType = (typeof MicrophoneMessageTypes)[number];
|
|
37
|
+
|
|
38
|
+
export type MicrophoneConfiguration = {
|
|
39
|
+
sampleRate?: MicrophoneSampleRate;
|
|
40
|
+
bitDepth?: MicrophoneBitDepth;
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
export const MicrophoneConfigurationValues = {
|
|
44
|
+
sampleRate: MicrophoneSampleRates,
|
|
45
|
+
bitDepth: MicrophoneBitDepths,
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
export const RequiredMicrophoneMessageTypes: MicrophoneMessageType[] = [
|
|
49
|
+
"getMicrophoneConfiguration",
|
|
50
|
+
"microphoneStatus",
|
|
51
|
+
] as const;
|
|
52
|
+
|
|
53
|
+
export const MicrophoneEventTypes = [
|
|
54
|
+
...MicrophoneMessageTypes,
|
|
55
|
+
"isRecordingMicrophone",
|
|
56
|
+
"microphoneRecording",
|
|
57
|
+
] as const;
|
|
58
|
+
export type MicrophoneEventType = (typeof MicrophoneEventTypes)[number];
|
|
59
|
+
|
|
60
|
+
export interface MicrophoneEventMessages {
|
|
61
|
+
microphoneStatus: {
|
|
62
|
+
microphoneStatus: MicrophoneStatus;
|
|
63
|
+
previousMicrophoneStatus: MicrophoneStatus;
|
|
64
|
+
};
|
|
65
|
+
getMicrophoneConfiguration: {
|
|
66
|
+
microphoneConfiguration: MicrophoneConfiguration;
|
|
67
|
+
};
|
|
68
|
+
microphoneData: {
|
|
69
|
+
samples: Float32Array;
|
|
70
|
+
sampleRate: MicrophoneSampleRate;
|
|
71
|
+
bitDepth: MicrophoneBitDepth;
|
|
72
|
+
};
|
|
73
|
+
isRecordingMicrophone: {
|
|
74
|
+
isRecordingMicrophone: boolean;
|
|
75
|
+
};
|
|
76
|
+
microphoneRecording: {
|
|
77
|
+
samples: Float32Array;
|
|
78
|
+
sampleRate: MicrophoneSampleRate;
|
|
79
|
+
bitDepth: MicrophoneBitDepth;
|
|
80
|
+
blob: Blob;
|
|
81
|
+
url: string;
|
|
82
|
+
};
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
export type MicrophoneEventDispatcher = EventDispatcher<
|
|
86
|
+
Device,
|
|
87
|
+
MicrophoneEventType,
|
|
88
|
+
MicrophoneEventMessages
|
|
89
|
+
>;
|
|
90
|
+
export type SendMicrophoneMessageCallback =
|
|
91
|
+
SendMessageCallback<MicrophoneMessageType>;
|
|
92
|
+
|
|
93
|
+
class MicrophoneManager {
|
|
94
|
+
constructor() {
|
|
95
|
+
autoBind(this);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
sendMessage!: SendMicrophoneMessageCallback;
|
|
99
|
+
|
|
100
|
+
eventDispatcher!: MicrophoneEventDispatcher;
|
|
101
|
+
get #dispatchEvent() {
|
|
102
|
+
return this.eventDispatcher.dispatchEvent;
|
|
103
|
+
}
|
|
104
|
+
get waitForEvent() {
|
|
105
|
+
return this.eventDispatcher.waitForEvent;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
requestRequiredInformation() {
|
|
109
|
+
_console.log("requesting required microphone information");
|
|
110
|
+
const messages = RequiredMicrophoneMessageTypes.map((messageType) => ({
|
|
111
|
+
type: messageType,
|
|
112
|
+
}));
|
|
113
|
+
this.sendMessage(messages, false);
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
// MICROPHONE STATUS
|
|
117
|
+
#microphoneStatus!: MicrophoneStatus;
|
|
118
|
+
get microphoneStatus() {
|
|
119
|
+
return this.#microphoneStatus;
|
|
120
|
+
}
|
|
121
|
+
#parseMicrophoneStatus(dataView: DataView) {
|
|
122
|
+
const microphoneStatusIndex = dataView.getUint8(0);
|
|
123
|
+
const newMicrophoneStatus = MicrophoneStatuses[microphoneStatusIndex];
|
|
124
|
+
this.#updateMicrophoneStatus(newMicrophoneStatus);
|
|
125
|
+
}
|
|
126
|
+
#updateMicrophoneStatus(newMicrophoneStatus: MicrophoneStatus) {
|
|
127
|
+
_console.assertEnumWithError(newMicrophoneStatus, MicrophoneStatuses);
|
|
128
|
+
if (newMicrophoneStatus == this.#microphoneStatus) {
|
|
129
|
+
_console.log(`redundant microphoneStatus ${newMicrophoneStatus}`);
|
|
130
|
+
return;
|
|
131
|
+
}
|
|
132
|
+
const previousMicrophoneStatus = this.#microphoneStatus;
|
|
133
|
+
this.#microphoneStatus = newMicrophoneStatus;
|
|
134
|
+
_console.log(`updated microphoneStatus to "${this.microphoneStatus}"`);
|
|
135
|
+
this.#dispatchEvent("microphoneStatus", {
|
|
136
|
+
microphoneStatus: this.microphoneStatus,
|
|
137
|
+
previousMicrophoneStatus,
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// MICROPHONE COMMAND
|
|
142
|
+
async #sendMicrophoneCommand(
|
|
143
|
+
command: MicrophoneCommand,
|
|
144
|
+
sendImmediately?: boolean
|
|
145
|
+
) {
|
|
146
|
+
_console.assertEnumWithError(command, MicrophoneCommands);
|
|
147
|
+
_console.log(`sending microphone command "${command}"`);
|
|
148
|
+
|
|
149
|
+
const promise = this.waitForEvent("microphoneStatus");
|
|
150
|
+
_console.log(`setting command "${command}"`);
|
|
151
|
+
const commandEnum = MicrophoneCommands.indexOf(command);
|
|
152
|
+
this.sendMessage(
|
|
153
|
+
[
|
|
154
|
+
{
|
|
155
|
+
type: "microphoneCommand",
|
|
156
|
+
data: Uint8Array.from([commandEnum]).buffer,
|
|
157
|
+
},
|
|
158
|
+
],
|
|
159
|
+
sendImmediately
|
|
160
|
+
);
|
|
161
|
+
|
|
162
|
+
await promise;
|
|
163
|
+
}
|
|
164
|
+
#assertIsIdle() {
|
|
165
|
+
_console.assertWithError(
|
|
166
|
+
this.#microphoneStatus == "idle",
|
|
167
|
+
`microphone is not idle - currently ${this.#microphoneStatus}`
|
|
168
|
+
);
|
|
169
|
+
}
|
|
170
|
+
#assertIsNotIdle() {
|
|
171
|
+
_console.assertWithError(
|
|
172
|
+
this.#microphoneStatus != "idle",
|
|
173
|
+
`microphone is idle`
|
|
174
|
+
);
|
|
175
|
+
}
|
|
176
|
+
#assertIsStreaming() {
|
|
177
|
+
_console.assertWithError(
|
|
178
|
+
this.#microphoneStatus == "streaming",
|
|
179
|
+
`microphone is not recording - currently ${this.#microphoneStatus}`
|
|
180
|
+
);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
async start() {
|
|
184
|
+
await this.#sendMicrophoneCommand("start");
|
|
185
|
+
}
|
|
186
|
+
async stop() {
|
|
187
|
+
this.#assertIsNotIdle();
|
|
188
|
+
await this.#sendMicrophoneCommand("stop");
|
|
189
|
+
}
|
|
190
|
+
async vad() {
|
|
191
|
+
await this.#sendMicrophoneCommand("vad");
|
|
192
|
+
}
|
|
193
|
+
async toggle() {
|
|
194
|
+
switch (this.microphoneStatus) {
|
|
195
|
+
case "idle":
|
|
196
|
+
this.start();
|
|
197
|
+
break;
|
|
198
|
+
case "streaming":
|
|
199
|
+
this.stop();
|
|
200
|
+
break;
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// MICROPHONE DATA
|
|
205
|
+
#assertValidBitDepth() {
|
|
206
|
+
_console.assertEnumWithError(this.bitDepth!, MicrophoneBitDepths);
|
|
207
|
+
}
|
|
208
|
+
#fadeDuration = 0.001;
|
|
209
|
+
#playbackTime = 0;
|
|
210
|
+
#parseMicrophoneData(dataView: DataView) {
|
|
211
|
+
this.#assertValidBitDepth();
|
|
212
|
+
|
|
213
|
+
_console.log("parsing microphone data", dataView);
|
|
214
|
+
|
|
215
|
+
const numberOfSamples = dataView.byteLength / this.#bytesPerSample!;
|
|
216
|
+
const samples = new Float32Array(numberOfSamples);
|
|
217
|
+
|
|
218
|
+
for (let i = 0; i < numberOfSamples; i++) {
|
|
219
|
+
let sample;
|
|
220
|
+
switch (this.bitDepth) {
|
|
221
|
+
case "16":
|
|
222
|
+
sample = dataView.getInt16(i * 2, true);
|
|
223
|
+
samples[i] = sample / 2 ** 15; // Normalize to [-1, 1]
|
|
224
|
+
break;
|
|
225
|
+
case "8":
|
|
226
|
+
sample = dataView.getInt8(i);
|
|
227
|
+
samples[i] = sample / 2 ** 7; // Normalize to [-1, 1]
|
|
228
|
+
break;
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
_console.log("samples", samples);
|
|
233
|
+
|
|
234
|
+
if (this.#isRecording && this.#microphoneRecordingData) {
|
|
235
|
+
this.#microphoneRecordingData!.push(samples);
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
if (this.#audioContext) {
|
|
239
|
+
if (this.#gainNode) {
|
|
240
|
+
const audioBuffer = this.#audioContext.createBuffer(
|
|
241
|
+
1,
|
|
242
|
+
samples.length,
|
|
243
|
+
Number(this.sampleRate!)
|
|
244
|
+
);
|
|
245
|
+
audioBuffer.getChannelData(0).set(samples);
|
|
246
|
+
|
|
247
|
+
const bufferSource = this.#audioContext.createBufferSource();
|
|
248
|
+
bufferSource.buffer = audioBuffer;
|
|
249
|
+
|
|
250
|
+
const channelData = audioBuffer.getChannelData(0);
|
|
251
|
+
const sampleRate = Number(this.sampleRate!);
|
|
252
|
+
|
|
253
|
+
for (let i = 0; i < this.#fadeDuration * sampleRate; i++) {
|
|
254
|
+
channelData[i] *= i / (this.#fadeDuration * sampleRate);
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
for (
|
|
258
|
+
let i = channelData.length - 1;
|
|
259
|
+
i >= channelData.length - this.#fadeDuration * sampleRate;
|
|
260
|
+
i--
|
|
261
|
+
) {
|
|
262
|
+
channelData[i] *=
|
|
263
|
+
(channelData.length - i) / (this.#fadeDuration * sampleRate);
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
bufferSource.connect(this.#gainNode!);
|
|
267
|
+
|
|
268
|
+
if (this.#playbackTime < this.#audioContext.currentTime) {
|
|
269
|
+
this.#playbackTime = this.#audioContext.currentTime;
|
|
270
|
+
}
|
|
271
|
+
bufferSource.start(this.#playbackTime);
|
|
272
|
+
this.#playbackTime += audioBuffer.duration;
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
this.#dispatchEvent("microphoneData", {
|
|
277
|
+
samples,
|
|
278
|
+
sampleRate: this.sampleRate!,
|
|
279
|
+
bitDepth: this.bitDepth!,
|
|
280
|
+
});
|
|
281
|
+
}
|
|
282
|
+
get #bytesPerSample() {
|
|
283
|
+
switch (this.bitDepth) {
|
|
284
|
+
case "8":
|
|
285
|
+
return 1;
|
|
286
|
+
case "16":
|
|
287
|
+
return 2;
|
|
288
|
+
}
|
|
289
|
+
}
|
|
290
|
+
|
|
291
|
+
// CONFIG
|
|
292
|
+
#microphoneConfiguration: MicrophoneConfiguration = {};
|
|
293
|
+
get microphoneConfiguration() {
|
|
294
|
+
return this.#microphoneConfiguration;
|
|
295
|
+
}
|
|
296
|
+
#availableMicrophoneConfigurationTypes!: MicrophoneConfigurationType[];
|
|
297
|
+
get availableMicrophoneConfigurationTypes() {
|
|
298
|
+
return this.#availableMicrophoneConfigurationTypes;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
get bitDepth() {
|
|
302
|
+
return this.#microphoneConfiguration.bitDepth;
|
|
303
|
+
}
|
|
304
|
+
get sampleRate() {
|
|
305
|
+
return this.#microphoneConfiguration.sampleRate;
|
|
306
|
+
}
|
|
307
|
+
|
|
308
|
+
#parseMicrophoneConfiguration(dataView: DataView) {
|
|
309
|
+
const parsedMicrophoneConfiguration: MicrophoneConfiguration = {};
|
|
310
|
+
|
|
311
|
+
let byteOffset = 0;
|
|
312
|
+
while (byteOffset < dataView.byteLength) {
|
|
313
|
+
const microphoneConfigurationTypeIndex = dataView.getUint8(byteOffset++);
|
|
314
|
+
const microphoneConfigurationType =
|
|
315
|
+
MicrophoneConfigurationTypes[microphoneConfigurationTypeIndex];
|
|
316
|
+
_console.assertWithError(
|
|
317
|
+
microphoneConfigurationType,
|
|
318
|
+
`invalid microphoneConfigurationTypeIndex ${microphoneConfigurationTypeIndex}`
|
|
319
|
+
);
|
|
320
|
+
let rawValue = dataView.getUint8(byteOffset++);
|
|
321
|
+
const values = MicrophoneConfigurationValues[microphoneConfigurationType];
|
|
322
|
+
const value = values[rawValue];
|
|
323
|
+
_console.assertEnumWithError(value, values);
|
|
324
|
+
_console.log({ microphoneConfigurationType, value });
|
|
325
|
+
// @ts-expect-error
|
|
326
|
+
parsedMicrophoneConfiguration[microphoneConfigurationType] = value;
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
_console.log({ parsedMicrophoneConfiguration });
|
|
330
|
+
this.#availableMicrophoneConfigurationTypes = Object.keys(
|
|
331
|
+
parsedMicrophoneConfiguration
|
|
332
|
+
) as MicrophoneConfigurationType[];
|
|
333
|
+
this.#microphoneConfiguration = parsedMicrophoneConfiguration;
|
|
334
|
+
this.#dispatchEvent("getMicrophoneConfiguration", {
|
|
335
|
+
microphoneConfiguration: this.#microphoneConfiguration,
|
|
336
|
+
});
|
|
337
|
+
}
|
|
338
|
+
|
|
339
|
+
#isMicrophoneConfigurationRedundant(
|
|
340
|
+
microphoneConfiguration: MicrophoneConfiguration
|
|
341
|
+
) {
|
|
342
|
+
let microphoneConfigurationTypes = Object.keys(
|
|
343
|
+
microphoneConfiguration
|
|
344
|
+
) as MicrophoneConfigurationType[];
|
|
345
|
+
return microphoneConfigurationTypes.every((microphoneConfigurationType) => {
|
|
346
|
+
return (
|
|
347
|
+
this.microphoneConfiguration[microphoneConfigurationType] ==
|
|
348
|
+
microphoneConfiguration[microphoneConfigurationType]
|
|
349
|
+
);
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
async setMicrophoneConfiguration(
|
|
353
|
+
newMicrophoneConfiguration: MicrophoneConfiguration
|
|
354
|
+
) {
|
|
355
|
+
_console.log({ newMicrophoneConfiguration });
|
|
356
|
+
if (this.#isMicrophoneConfigurationRedundant(newMicrophoneConfiguration)) {
|
|
357
|
+
_console.log("redundant microphone configuration");
|
|
358
|
+
return;
|
|
359
|
+
}
|
|
360
|
+
const setMicrophoneConfigurationData = this.#createData(
|
|
361
|
+
newMicrophoneConfiguration
|
|
362
|
+
);
|
|
363
|
+
_console.log({ setMicrophoneConfigurationData });
|
|
364
|
+
|
|
365
|
+
const promise = this.waitForEvent("getMicrophoneConfiguration");
|
|
366
|
+
this.sendMessage([
|
|
367
|
+
{
|
|
368
|
+
type: "setMicrophoneConfiguration",
|
|
369
|
+
data: setMicrophoneConfigurationData.buffer,
|
|
370
|
+
},
|
|
371
|
+
]);
|
|
372
|
+
await promise;
|
|
373
|
+
}
|
|
374
|
+
|
|
375
|
+
#assertAvailableMicrophoneConfigurationType(
|
|
376
|
+
microphoneConfigurationType: MicrophoneConfigurationType
|
|
377
|
+
) {
|
|
378
|
+
_console.assertWithError(
|
|
379
|
+
this.#availableMicrophoneConfigurationTypes,
|
|
380
|
+
"must get initial microphoneConfiguration"
|
|
381
|
+
);
|
|
382
|
+
const isMicrophoneConfigurationTypeAvailable =
|
|
383
|
+
this.#availableMicrophoneConfigurationTypes?.includes(
|
|
384
|
+
microphoneConfigurationType
|
|
385
|
+
);
|
|
386
|
+
_console.assertWithError(
|
|
387
|
+
isMicrophoneConfigurationTypeAvailable,
|
|
388
|
+
`unavailable microphone configuration type "${microphoneConfigurationType}"`
|
|
389
|
+
);
|
|
390
|
+
return isMicrophoneConfigurationTypeAvailable;
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
static AssertValidMicrophoneConfigurationType(
|
|
394
|
+
microphoneConfigurationType: MicrophoneConfigurationType
|
|
395
|
+
) {
|
|
396
|
+
_console.assertEnumWithError(
|
|
397
|
+
microphoneConfigurationType,
|
|
398
|
+
MicrophoneConfigurationTypes
|
|
399
|
+
);
|
|
400
|
+
}
|
|
401
|
+
static AssertValidMicrophoneConfigurationTypeEnum(
|
|
402
|
+
microphoneConfigurationTypeEnum: number
|
|
403
|
+
) {
|
|
404
|
+
_console.assertTypeWithError(microphoneConfigurationTypeEnum, "number");
|
|
405
|
+
_console.assertWithError(
|
|
406
|
+
microphoneConfigurationTypeEnum in MicrophoneConfigurationTypes,
|
|
407
|
+
`invalid microphoneConfigurationTypeEnum ${microphoneConfigurationTypeEnum}`
|
|
408
|
+
);
|
|
409
|
+
}
|
|
410
|
+
|
|
411
|
+
#createData(microphoneConfiguration: MicrophoneConfiguration) {
|
|
412
|
+
let microphoneConfigurationTypes = Object.keys(
|
|
413
|
+
microphoneConfiguration
|
|
414
|
+
) as MicrophoneConfigurationType[];
|
|
415
|
+
microphoneConfigurationTypes = microphoneConfigurationTypes.filter(
|
|
416
|
+
(microphoneConfigurationType) =>
|
|
417
|
+
this.#assertAvailableMicrophoneConfigurationType(
|
|
418
|
+
microphoneConfigurationType
|
|
419
|
+
)
|
|
420
|
+
);
|
|
421
|
+
|
|
422
|
+
const dataView = new DataView(
|
|
423
|
+
new ArrayBuffer(microphoneConfigurationTypes.length * 2)
|
|
424
|
+
);
|
|
425
|
+
microphoneConfigurationTypes.forEach(
|
|
426
|
+
(microphoneConfigurationType, index) => {
|
|
427
|
+
MicrophoneManager.AssertValidMicrophoneConfigurationType(
|
|
428
|
+
microphoneConfigurationType
|
|
429
|
+
);
|
|
430
|
+
const microphoneConfigurationTypeEnum =
|
|
431
|
+
MicrophoneConfigurationTypes.indexOf(microphoneConfigurationType);
|
|
432
|
+
dataView.setUint8(index * 2, microphoneConfigurationTypeEnum);
|
|
433
|
+
|
|
434
|
+
let value = microphoneConfiguration[microphoneConfigurationType]!;
|
|
435
|
+
if (typeof value == "number") {
|
|
436
|
+
// @ts-ignore
|
|
437
|
+
value = value.toString();
|
|
438
|
+
}
|
|
439
|
+
const values =
|
|
440
|
+
MicrophoneConfigurationValues[microphoneConfigurationType];
|
|
441
|
+
_console.assertEnumWithError(value, values);
|
|
442
|
+
// @ts-expect-error
|
|
443
|
+
const rawValue = values.indexOf(value);
|
|
444
|
+
dataView.setUint8(index * 2 + 1, rawValue);
|
|
445
|
+
}
|
|
446
|
+
);
|
|
447
|
+
_console.log({ sensorConfigurationData: dataView });
|
|
448
|
+
return dataView;
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
// MESSAGE
|
|
452
|
+
parseMessage(messageType: MicrophoneMessageType, dataView: DataView) {
|
|
453
|
+
_console.log({ messageType, dataView });
|
|
454
|
+
|
|
455
|
+
switch (messageType) {
|
|
456
|
+
case "microphoneStatus":
|
|
457
|
+
this.#parseMicrophoneStatus(dataView);
|
|
458
|
+
break;
|
|
459
|
+
case "getMicrophoneConfiguration":
|
|
460
|
+
case "setMicrophoneConfiguration":
|
|
461
|
+
this.#parseMicrophoneConfiguration(dataView);
|
|
462
|
+
break;
|
|
463
|
+
case "microphoneData":
|
|
464
|
+
this.#parseMicrophoneData(dataView);
|
|
465
|
+
break;
|
|
466
|
+
default:
|
|
467
|
+
throw Error(`uncaught messageType ${messageType}`);
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
|
|
471
|
+
#audioContext?: AudioContext;
|
|
472
|
+
get audioContext() {
|
|
473
|
+
return this.#audioContext;
|
|
474
|
+
}
|
|
475
|
+
set audioContext(newAudioContext) {
|
|
476
|
+
if (this.#audioContext == newAudioContext) {
|
|
477
|
+
_console.log("redundant audioContext assignment", this.#audioContext);
|
|
478
|
+
return;
|
|
479
|
+
}
|
|
480
|
+
|
|
481
|
+
this.#audioContext = newAudioContext;
|
|
482
|
+
|
|
483
|
+
_console.log("assigned new audioContext", this.#audioContext);
|
|
484
|
+
if (this.#audioContext) {
|
|
485
|
+
this.#playbackTime = this.#audioContext.currentTime;
|
|
486
|
+
} else {
|
|
487
|
+
if (this.#mediaStreamDestination) {
|
|
488
|
+
this.#mediaStreamDestination.disconnect();
|
|
489
|
+
this.#mediaStreamDestination = undefined;
|
|
490
|
+
}
|
|
491
|
+
if (this.#gainNode) {
|
|
492
|
+
this.#gainNode.disconnect();
|
|
493
|
+
this.#gainNode = undefined;
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
|
|
498
|
+
#gainNode?: GainNode;
|
|
499
|
+
get gainNode() {
|
|
500
|
+
_console.assertWithError(
|
|
501
|
+
this.#audioContext,
|
|
502
|
+
"audioContext assignment required for gainNode"
|
|
503
|
+
);
|
|
504
|
+
if (!this.#gainNode) {
|
|
505
|
+
_console.log("creating gainNode...");
|
|
506
|
+
this.#gainNode = this.#audioContext!.createGain();
|
|
507
|
+
_console.log("created gainNode", this.#gainNode);
|
|
508
|
+
}
|
|
509
|
+
return this.#gainNode;
|
|
510
|
+
}
|
|
511
|
+
|
|
512
|
+
#mediaStreamDestination?: MediaStreamAudioDestinationNode;
|
|
513
|
+
get mediaStreamDestination() {
|
|
514
|
+
_console.assertWithError(
|
|
515
|
+
this.#audioContext,
|
|
516
|
+
"audioContext assignment required for mediaStreamDestination"
|
|
517
|
+
);
|
|
518
|
+
if (!this.#mediaStreamDestination) {
|
|
519
|
+
_console.log("creating mediaStreamDestination...");
|
|
520
|
+
this.#mediaStreamDestination =
|
|
521
|
+
this.#audioContext!.createMediaStreamDestination();
|
|
522
|
+
this.gainNode?.connect(this.#mediaStreamDestination);
|
|
523
|
+
_console.log(
|
|
524
|
+
"created mediaStreamDestination",
|
|
525
|
+
this.#mediaStreamDestination
|
|
526
|
+
);
|
|
527
|
+
}
|
|
528
|
+
return this.#mediaStreamDestination;
|
|
529
|
+
}
|
|
530
|
+
|
|
531
|
+
#isRecording = false;
|
|
532
|
+
get isRecording() {
|
|
533
|
+
return this.#isRecording;
|
|
534
|
+
}
|
|
535
|
+
#microphoneRecordingData?: Float32Array[];
|
|
536
|
+
startRecording() {
|
|
537
|
+
if (this.isRecording) {
|
|
538
|
+
_console.log("already recording");
|
|
539
|
+
return;
|
|
540
|
+
}
|
|
541
|
+
this.#microphoneRecordingData = [];
|
|
542
|
+
this.#isRecording = true;
|
|
543
|
+
this.#dispatchEvent("isRecordingMicrophone", {
|
|
544
|
+
isRecordingMicrophone: this.isRecording,
|
|
545
|
+
});
|
|
546
|
+
}
|
|
547
|
+
stopRecording() {
|
|
548
|
+
if (!this.isRecording) {
|
|
549
|
+
_console.log("already not recording");
|
|
550
|
+
return;
|
|
551
|
+
}
|
|
552
|
+
this.#isRecording = false;
|
|
553
|
+
if (
|
|
554
|
+
this.#microphoneRecordingData &&
|
|
555
|
+
this.#microphoneRecordingData.length > 0
|
|
556
|
+
) {
|
|
557
|
+
_console.log(
|
|
558
|
+
"parsing microphone data...",
|
|
559
|
+
this.#microphoneRecordingData.length
|
|
560
|
+
);
|
|
561
|
+
const arrayBuffer = concatenateArrayBuffers(
|
|
562
|
+
...this.#microphoneRecordingData
|
|
563
|
+
);
|
|
564
|
+
const samples = new Float32Array(arrayBuffer);
|
|
565
|
+
|
|
566
|
+
const blob = float32ArrayToWav(samples, Number(this.sampleRate)!, 1);
|
|
567
|
+
const url = URL.createObjectURL(blob);
|
|
568
|
+
this.#dispatchEvent("microphoneRecording", {
|
|
569
|
+
samples,
|
|
570
|
+
sampleRate: this.sampleRate!,
|
|
571
|
+
bitDepth: this.bitDepth!,
|
|
572
|
+
blob,
|
|
573
|
+
url,
|
|
574
|
+
});
|
|
575
|
+
}
|
|
576
|
+
this.#microphoneRecordingData = undefined;
|
|
577
|
+
this.#dispatchEvent("isRecordingMicrophone", {
|
|
578
|
+
isRecordingMicrophone: this.isRecording,
|
|
579
|
+
});
|
|
580
|
+
}
|
|
581
|
+
toggleRecording() {
|
|
582
|
+
if (this.#isRecording) {
|
|
583
|
+
this.stopRecording();
|
|
584
|
+
} else {
|
|
585
|
+
this.startRecording();
|
|
586
|
+
}
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
clear() {
|
|
590
|
+
// @ts-ignore
|
|
591
|
+
this.#microphoneStatus = undefined;
|
|
592
|
+
this.#microphoneConfiguration = {};
|
|
593
|
+
if (this.isRecording) {
|
|
594
|
+
this.stopRecording();
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
|
|
599
|
+
export default MicrophoneManager;
|