brilliantsole 0.0.27 → 0.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/3d/anchor.glb +0 -0
- package/assets/3d/coin.glb +0 -0
- package/assets/3d/glasses.glb +0 -0
- package/assets/audio/bounceMedium.wav +0 -0
- package/assets/audio/bounceStrong.wav +0 -0
- package/assets/audio/bounceWeak.wav +0 -0
- package/assets/audio/coin.wav +0 -0
- package/assets/audio/getUp.wav +0 -0
- package/assets/audio/grab.wav +0 -0
- package/assets/audio/kick.wav +0 -0
- package/assets/audio/platterFadeIn old.wav +0 -0
- package/assets/audio/platterFadeIn.wav +0 -0
- package/assets/audio/platterFadeOut.wav +0 -0
- package/assets/audio/punch.wav +0 -0
- package/assets/audio/punchSqueak.wav +0 -0
- package/assets/audio/purr.wav +0 -0
- package/assets/audio/purrFadeOut.wav +0 -0
- package/assets/audio/release.wav +0 -0
- package/assets/audio/splat.wav +0 -0
- package/assets/audio/stomp.wav +0 -0
- package/build/brilliantsole.cjs +3091 -741
- package/build/brilliantsole.cjs.map +1 -1
- package/build/brilliantsole.js +2759 -709
- package/build/brilliantsole.js.map +1 -1
- package/build/brilliantsole.ls.js +2602 -543
- package/build/brilliantsole.ls.js.map +1 -1
- package/build/brilliantsole.min.js +1 -1
- package/build/brilliantsole.min.js.map +1 -1
- package/build/brilliantsole.module.d.ts +295 -65
- package/build/brilliantsole.module.js +2749 -710
- package/build/brilliantsole.module.js.map +1 -1
- package/build/brilliantsole.module.min.d.ts +295 -65
- package/build/brilliantsole.module.min.js +1 -1
- package/build/brilliantsole.module.min.js.map +1 -1
- package/build/brilliantsole.node.module.d.ts +289 -62
- package/build/brilliantsole.node.module.js +3080 -742
- package/build/brilliantsole.node.module.js.map +1 -1
- package/build/dts/BS-output.d.ts +10 -0
- package/build/dts/BS.d.ts +21 -8
- package/build/dts/CameraManager.d.ts +72 -0
- package/build/dts/Device.d.ts +64 -13
- package/build/dts/DeviceInformationManager.d.ts +4 -4
- package/build/dts/DeviceManager.d.ts +2 -0
- package/build/dts/FileTransferManager.d.ts +18 -8
- package/build/dts/InformationManager.d.ts +2 -0
- package/build/dts/MicrophoneManager.d.ts +88 -0
- package/build/dts/TfliteManager.d.ts +22 -2
- package/build/dts/WifiManager.d.ts +61 -0
- package/build/dts/connection/BaseConnectionManager.d.ts +35 -3
- package/build/dts/connection/ClientConnectionManager.d.ts +7 -2
- package/build/dts/connection/bluetooth/NobleConnectionManager.d.ts +2 -1
- package/build/dts/connection/bluetooth/WebBluetoothConnectionManager.d.ts +1 -0
- package/build/dts/connection/bluetooth/bluetoothUUIDs.d.ts +2 -2
- package/build/dts/connection/udp/UDPConnectionManager.d.ts +28 -0
- package/build/dts/connection/webSocket/WebSocketConnectionManager.d.ts +25 -0
- package/build/dts/devicePair/DevicePair.d.ts +5 -5
- package/build/dts/scanner/BaseScanner.d.ts +4 -1
- package/build/dts/scanner/NobleScanner.d.ts +2 -1
- package/build/dts/sensor/MotionSensorDataManager.d.ts +5 -2
- package/build/dts/sensor/SensorDataManager.d.ts +5 -4
- package/build/dts/server/BaseClient.d.ts +5 -3
- package/build/dts/server/ServerUtils.d.ts +1 -1
- package/build/dts/server/websocket/WebSocketUtils.d.ts +1 -1
- package/build/dts/utils/AudioUtils.d.ts +2 -0
- package/build/dts/utils/Console.d.ts +2 -0
- package/build/dts/utils/ThrottleUtils.d.ts +2 -0
- package/build/dts/vibration/VibrationManager.d.ts +19 -2
- package/build/index.d.ts +292 -62
- package/build/index.node.d.ts +286 -59
- package/examples/3d/scene.html +19 -5
- package/examples/3d-generic/index.html +144 -0
- package/examples/3d-generic/script.js +266 -0
- package/examples/basic/index.html +267 -17
- package/examples/basic/script.js +958 -105
- package/examples/camera/barcode-detector.js +109 -0
- package/examples/camera/depth-estimation.js +71 -0
- package/examples/camera/face-detector.js +119 -0
- package/examples/camera/face-landmark.js +111 -0
- package/examples/camera/gesture-recognition.js +97 -0
- package/examples/camera/hand-landmark.js +74 -0
- package/examples/camera/image-segmentation.js +98 -0
- package/examples/camera/image-to-text.js +43 -0
- package/examples/camera/image-upscale.js +75 -0
- package/examples/camera/index.html +129 -0
- package/examples/camera/object-detection.js +98 -0
- package/examples/camera/pose-landmark.js +60 -0
- package/examples/camera/script.js +316 -0
- package/examples/camera/utils.js +165 -0
- package/examples/camera/yolo-tiny.js +54 -0
- package/examples/camera/yolo.js +119 -0
- package/examples/edge-impulse/script.js +157 -48
- package/examples/edge-impulse-test/README.md +11 -0
- package/examples/edge-impulse-test/edge-impulse-standalone.js +7228 -0
- package/examples/edge-impulse-test/edge-impulse-standalone.wasm +0 -0
- package/examples/edge-impulse-test/index.html +75 -0
- package/examples/edge-impulse-test/run-impulse.js +135 -0
- package/examples/edge-impulse-test/script.js +200 -0
- package/examples/glasses-gestures/README.md +11 -0
- package/examples/glasses-gestures/edge-impulse-standalone.js +7228 -0
- package/examples/glasses-gestures/edge-impulse-standalone.wasm +0 -0
- package/examples/glasses-gestures/index.html +69 -0
- package/examples/glasses-gestures/run-impulse.js +135 -0
- package/examples/glasses-gestures/script.js +226 -0
- package/examples/gloves/edge-impulse-standalone.js +7228 -0
- package/examples/gloves/edge-impulse-standalone.wasm +0 -0
- package/examples/gloves/index.html +4 -1
- package/examples/gloves/run-impulse.js +135 -0
- package/examples/gloves/script.js +367 -51
- package/examples/graph/script.js +94 -37
- package/examples/microphone/gender.js +54 -0
- package/examples/microphone/index.html +102 -0
- package/examples/microphone/script.js +394 -0
- package/examples/microphone/utils.js +45 -0
- package/examples/microphone/whisper-realtime.js +166 -0
- package/examples/microphone/whisper.js +132 -0
- package/examples/punch/index.html +135 -0
- package/examples/punch/punch.tflite +0 -0
- package/examples/punch/script.js +169 -0
- package/examples/server/index.html +98 -22
- package/examples/server/script.js +317 -109
- package/examples/ukaton-firmware-update/merged-firmware.bin +0 -0
- package/examples/utils/aframe/aframe-master.min.js +2 -0
- package/examples/utils/aframe/bs-vibration.js +150 -0
- package/examples/utils/aframe/force-pushable.js +80 -0
- package/examples/utils/aframe/grabbable-anchor.js +46 -0
- package/examples/utils/aframe/grabbable-listener.js +31 -0
- package/examples/utils/aframe/grabbable-physics-body.js +190 -0
- package/examples/utils/aframe/grow-shrink.js +25 -0
- package/examples/utils/aframe/hand-punch.js +119 -0
- package/examples/utils/aframe/my-obb-collider.js +293 -0
- package/examples/utils/aframe/occlude-hand-tracking-controls.js +47 -0
- package/examples/utils/aframe/occlude-mesh.js +42 -0
- package/examples/utils/aframe/palm-up-detector.js +47 -0
- package/examples/utils/aframe/shadow-material.js +20 -0
- package/examples/utils/aframe/soft-shadow-light.js +9 -0
- package/examples/webxr-2/assets/3d/soccerBall.glb +0 -0
- package/examples/webxr-2/assets/audio/shellBounce.wav +0 -0
- package/examples/webxr-2/assets/audio/shellHit.wav +0 -0
- package/examples/webxr-2/assets/audio/shellKick.wav +0 -0
- package/examples/webxr-2/assets/audio/soccerBounce.wav +0 -0
- package/examples/webxr-2/assets/audio/soccerKick.mp3 +0 -0
- package/examples/webxr-2/assets/images/shellTexture.png +0 -0
- package/examples/webxr-2/components/bs-ankle.js +337 -0
- package/examples/webxr-2/components/coin.js +84 -0
- package/examples/webxr-2/components/custom-wrap.js +17 -0
- package/examples/webxr-2/components/goomba.js +3250 -0
- package/examples/webxr-2/components/init-shell-material.js +215 -0
- package/examples/webxr-2/components/platter.js +172 -0
- package/examples/webxr-2/components/shell.js +374 -0
- package/examples/webxr-2/components/soccer-ball.js +250 -0
- package/examples/webxr-2/components/squashed-goomba.js +249 -0
- package/examples/webxr-2/edge-impulse-standalone.js +7228 -0
- package/examples/webxr-2/edge-impulse-standalone.wasm +0 -0
- package/examples/webxr-2/index.html +996 -0
- package/examples/webxr-2/kick.tflite +0 -0
- package/examples/webxr-2/kick2.tflite +0 -0
- package/examples/webxr-2/run-impulse.js +135 -0
- package/examples/webxr-2/script.js +384 -0
- package/examples/webxr-3/components/bs-camera.js +65 -0
- package/examples/webxr-3/index.html +134 -0
- package/examples/webxr-3/script.js +432 -0
- package/package.json +2 -1
- package/src/.prettierrc +4 -0
- package/src/BS.ts +79 -8
- package/src/CameraManager.ts +497 -0
- package/src/Device.ts +691 -86
- package/src/DeviceInformationManager.ts +19 -10
- package/src/DeviceManager.ts +85 -25
- package/src/FileTransferManager.ts +145 -20
- package/src/InformationManager.ts +40 -15
- package/src/MicrophoneManager.ts +599 -0
- package/src/TfliteManager.ts +171 -25
- package/src/WifiManager.ts +323 -0
- package/src/connection/BaseConnectionManager.ts +130 -30
- package/src/connection/ClientConnectionManager.ts +34 -10
- package/src/connection/bluetooth/BluetoothConnectionManager.ts +8 -2
- package/src/connection/bluetooth/NobleConnectionManager.ts +147 -41
- package/src/connection/bluetooth/WebBluetoothConnectionManager.ts +99 -34
- package/src/connection/bluetooth/bluetoothUUIDs.ts +40 -13
- package/src/connection/udp/UDPConnectionManager.ts +356 -0
- package/src/connection/websocket/WebSocketConnectionManager.ts +282 -0
- package/src/devicePair/DevicePair.ts +95 -25
- package/src/devicePair/DevicePairPressureSensorDataManager.ts +27 -7
- package/src/scanner/BaseScanner.ts +49 -11
- package/src/scanner/NobleScanner.ts +76 -14
- package/src/sensor/MotionSensorDataManager.ts +21 -6
- package/src/sensor/PressureSensorDataManager.ts +37 -8
- package/src/sensor/SensorConfigurationManager.ts +73 -22
- package/src/sensor/SensorDataManager.ts +109 -23
- package/src/server/BaseClient.ts +150 -36
- package/src/server/BaseServer.ts +50 -2
- package/src/server/ServerUtils.ts +39 -9
- package/src/server/udp/UDPServer.ts +73 -22
- package/src/server/udp/UDPUtils.ts +9 -2
- package/src/server/websocket/WebSocketClient.ts +27 -7
- package/src/server/websocket/WebSocketUtils.ts +4 -2
- package/src/utils/AudioUtils.ts +65 -0
- package/src/utils/Console.ts +62 -9
- package/src/utils/ParseUtils.ts +24 -5
- package/src/utils/ThrottleUtils.ts +62 -0
- package/src/utils/Timer.ts +1 -1
- package/src/vibration/VibrationManager.ts +166 -40
|
@@ -0,0 +1,394 @@
|
|
|
1
|
+
import * as BS from "../../build/brilliantsole.module.js";
|
|
2
|
+
window.BS = BS;
|
|
3
|
+
console.log(BS);
|
|
4
|
+
|
|
5
|
+
// DEVICE
|
|
6
|
+
|
|
7
|
+
const device = new BS.Device();
|
|
8
|
+
console.log({ device });
|
|
9
|
+
window.device = device;
|
|
10
|
+
|
|
11
|
+
// CONNECT
|
|
12
|
+
|
|
13
|
+
const toggleConnectionButton = document.getElementById("toggleConnection");
|
|
14
|
+
toggleConnectionButton.addEventListener("click", () =>
|
|
15
|
+
device.toggleConnection()
|
|
16
|
+
);
|
|
17
|
+
device.addEventListener("connectionStatus", () => {
|
|
18
|
+
let disabled = false;
|
|
19
|
+
let innerText = device.connectionStatus;
|
|
20
|
+
switch (device.connectionStatus) {
|
|
21
|
+
case "notConnected":
|
|
22
|
+
innerText = "connect";
|
|
23
|
+
break;
|
|
24
|
+
case "connected":
|
|
25
|
+
innerText = "disconnect";
|
|
26
|
+
break;
|
|
27
|
+
}
|
|
28
|
+
toggleConnectionButton.disabled = disabled;
|
|
29
|
+
toggleConnectionButton.innerText = innerText;
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
// MICROPHONE
|
|
33
|
+
|
|
34
|
+
device.addEventListener("connected", () => {
|
|
35
|
+
if (device.hasMicrophone) {
|
|
36
|
+
device.setSensorConfiguration({ microphone: 5 });
|
|
37
|
+
device.setMicrophoneConfiguration({ sampleRate: "16000", bitDepth: "16" });
|
|
38
|
+
} else {
|
|
39
|
+
console.error("device doesn't have microphone");
|
|
40
|
+
device.disconnect();
|
|
41
|
+
}
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
/** @type {HTMLSpanElement} */
|
|
45
|
+
const microphoneStatusSpan = document.getElementById("microphoneStatus");
|
|
46
|
+
device.addEventListener("microphoneStatus", () => {
|
|
47
|
+
microphoneStatusSpan.innerText = device.microphoneStatus;
|
|
48
|
+
});
|
|
49
|
+
|
|
50
|
+
/** @type {HTMLPreElement} */
|
|
51
|
+
const microphoneConfigurationPre = document.getElementById(
|
|
52
|
+
"microphoneConfigurationPre"
|
|
53
|
+
);
|
|
54
|
+
device.addEventListener("getMicrophoneConfiguration", () => {
|
|
55
|
+
microphoneConfigurationPre.textContent = JSON.stringify(
|
|
56
|
+
device.microphoneConfiguration,
|
|
57
|
+
null,
|
|
58
|
+
2
|
|
59
|
+
);
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
const microphoneConfigurationContainer = document.getElementById(
|
|
63
|
+
"microphoneConfiguration"
|
|
64
|
+
);
|
|
65
|
+
/** @type {HTMLTemplateElement} */
|
|
66
|
+
const microphoneConfigurationTypeTemplate = document.getElementById(
|
|
67
|
+
"microphoneConfigurationTypeTemplate"
|
|
68
|
+
);
|
|
69
|
+
BS.MicrophoneConfigurationTypes.forEach((microphoneConfigurationType) => {
|
|
70
|
+
const microphoneConfigurationTypeContainer =
|
|
71
|
+
microphoneConfigurationTypeTemplate.content
|
|
72
|
+
.cloneNode(true)
|
|
73
|
+
.querySelector(".microphoneConfigurationType");
|
|
74
|
+
|
|
75
|
+
microphoneConfigurationContainer.appendChild(
|
|
76
|
+
microphoneConfigurationTypeContainer
|
|
77
|
+
);
|
|
78
|
+
|
|
79
|
+
microphoneConfigurationTypeContainer.querySelector(".type").innerText =
|
|
80
|
+
microphoneConfigurationType;
|
|
81
|
+
|
|
82
|
+
/** @type {HTMLSelectElement} */
|
|
83
|
+
const select = microphoneConfigurationTypeContainer.querySelector("select");
|
|
84
|
+
/** @type {HTMLOptGroupElement} */
|
|
85
|
+
const optgroup = select.querySelector("optgroup");
|
|
86
|
+
optgroup.label = microphoneConfigurationType;
|
|
87
|
+
|
|
88
|
+
BS.MicrophoneConfigurationValues[microphoneConfigurationType].forEach(
|
|
89
|
+
(value) => {
|
|
90
|
+
optgroup.appendChild(new Option(value));
|
|
91
|
+
}
|
|
92
|
+
);
|
|
93
|
+
|
|
94
|
+
/** @type {HTMLSpanElement} */
|
|
95
|
+
const span = microphoneConfigurationTypeContainer.querySelector("span");
|
|
96
|
+
|
|
97
|
+
device.addEventListener("isConnected", () => {
|
|
98
|
+
updateisInputDisabled();
|
|
99
|
+
});
|
|
100
|
+
device.addEventListener("microphoneStatus", () => {
|
|
101
|
+
updateisInputDisabled();
|
|
102
|
+
});
|
|
103
|
+
const updateisInputDisabled = () => {
|
|
104
|
+
select.disabled =
|
|
105
|
+
!device.isConnected ||
|
|
106
|
+
!device.hasMicrophone ||
|
|
107
|
+
device.microphoneStatus != "idle";
|
|
108
|
+
};
|
|
109
|
+
|
|
110
|
+
const updateSelect = () => {
|
|
111
|
+
const value = device.microphoneConfiguration[microphoneConfigurationType];
|
|
112
|
+
span.innerText = value;
|
|
113
|
+
select.value = value;
|
|
114
|
+
};
|
|
115
|
+
|
|
116
|
+
device.addEventListener("connected", () => {
|
|
117
|
+
if (!device.hasMicrophone) {
|
|
118
|
+
return;
|
|
119
|
+
}
|
|
120
|
+
updateSelect();
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
device.addEventListener("getMicrophoneConfiguration", () => {
|
|
124
|
+
updateSelect();
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
select.addEventListener("input", () => {
|
|
128
|
+
const value = select.value;
|
|
129
|
+
// console.log(`updating ${microphoneConfigurationType} to ${value}`);
|
|
130
|
+
device.setMicrophoneConfiguration({
|
|
131
|
+
[microphoneConfigurationType]: value,
|
|
132
|
+
});
|
|
133
|
+
});
|
|
134
|
+
});
|
|
135
|
+
|
|
136
|
+
/** @type {HTMLButtonElement} */
|
|
137
|
+
const toggleMicrophoneButton = document.getElementById("toggleMicrophone");
|
|
138
|
+
toggleMicrophoneButton.addEventListener("click", () => {
|
|
139
|
+
device.toggleMicrophone();
|
|
140
|
+
});
|
|
141
|
+
device.addEventListener("connected", () => {
|
|
142
|
+
updateToggleMicrophoneButton();
|
|
143
|
+
});
|
|
144
|
+
device.addEventListener("getSensorConfiguration", () => {
|
|
145
|
+
updateToggleMicrophoneButton();
|
|
146
|
+
});
|
|
147
|
+
const updateToggleMicrophoneButton = () => {
|
|
148
|
+
let disabled =
|
|
149
|
+
!device.isConnected ||
|
|
150
|
+
device.sensorConfiguration.microphone == 0 ||
|
|
151
|
+
!device.hasMicrophone;
|
|
152
|
+
|
|
153
|
+
switch (device.microphoneStatus) {
|
|
154
|
+
case "streaming":
|
|
155
|
+
toggleMicrophoneButton.innerText = "stop microphone";
|
|
156
|
+
break;
|
|
157
|
+
case "idle":
|
|
158
|
+
toggleMicrophoneButton.innerText = "start microphone";
|
|
159
|
+
break;
|
|
160
|
+
}
|
|
161
|
+
toggleMicrophoneButton.disabled = disabled;
|
|
162
|
+
};
|
|
163
|
+
device.addEventListener("microphoneStatus", () => {
|
|
164
|
+
updateToggleMicrophoneButton();
|
|
165
|
+
});
|
|
166
|
+
|
|
167
|
+
/** @type {HTMLButtonElement} */
|
|
168
|
+
const startMicrophoneButton = document.getElementById("startMicrophone");
|
|
169
|
+
startMicrophoneButton.addEventListener("click", () => {
|
|
170
|
+
device.startMicrophone();
|
|
171
|
+
});
|
|
172
|
+
/** @type {HTMLButtonElement} */
|
|
173
|
+
const stopMicrophoneButton = document.getElementById("stopMicrophone");
|
|
174
|
+
stopMicrophoneButton.addEventListener("click", () => {
|
|
175
|
+
device.stopMicrophone();
|
|
176
|
+
});
|
|
177
|
+
/** @type {HTMLButtonElement} */
|
|
178
|
+
const enableMicrophoneVadButton = document.getElementById("enableMicrphoneVad");
|
|
179
|
+
enableMicrophoneVadButton.addEventListener("click", () => {
|
|
180
|
+
device.enableMicrophoneVad();
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
const updateMicrophoneButtons = () => {
|
|
184
|
+
let disabled =
|
|
185
|
+
!device.isConnected ||
|
|
186
|
+
device.sensorConfiguration.microphone == 0 ||
|
|
187
|
+
!device.hasMicrophone;
|
|
188
|
+
|
|
189
|
+
startMicrophoneButton.disabled =
|
|
190
|
+
disabled || device.microphoneStatus == "streaming";
|
|
191
|
+
stopMicrophoneButton.disabled = disabled || device.microphoneStatus == "idle";
|
|
192
|
+
enableMicrophoneVadButton.disabled =
|
|
193
|
+
disabled || device.microphoneStatus == "vad";
|
|
194
|
+
};
|
|
195
|
+
device.addEventListener("microphoneStatus", () => {
|
|
196
|
+
updateMicrophoneButtons();
|
|
197
|
+
});
|
|
198
|
+
device.addEventListener("connected", () => {
|
|
199
|
+
updateMicrophoneButtons();
|
|
200
|
+
});
|
|
201
|
+
device.addEventListener("getSensorConfiguration", () => {
|
|
202
|
+
updateMicrophoneButtons();
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
const audioContext = new (window.AudioContext || window.webkitAudioContext)({
|
|
206
|
+
sampleRate: 16_000,
|
|
207
|
+
});
|
|
208
|
+
window.audioContext = audioContext;
|
|
209
|
+
const checkAudioContextState = () => {
|
|
210
|
+
const { state } = audioContext;
|
|
211
|
+
console.log({ audioContextState: state });
|
|
212
|
+
if (state != "running") {
|
|
213
|
+
document.addEventListener("click", () => audioContext.resume(), {
|
|
214
|
+
once: true,
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
};
|
|
218
|
+
audioContext.addEventListener("statechange", () => {
|
|
219
|
+
checkAudioContextState();
|
|
220
|
+
});
|
|
221
|
+
checkAudioContextState();
|
|
222
|
+
|
|
223
|
+
device.audioContext = audioContext;
|
|
224
|
+
device.microphoneGainNode.gain.value = 10;
|
|
225
|
+
|
|
226
|
+
/** @type {HTMLAudioElement} */
|
|
227
|
+
const microphoneStreamAudioElement =
|
|
228
|
+
document.getElementById("microphoneStream");
|
|
229
|
+
microphoneStreamAudioElement.srcObject =
|
|
230
|
+
device.microphoneMediaStreamDestination.stream;
|
|
231
|
+
|
|
232
|
+
/** @type {HTMLAudioElement} */
|
|
233
|
+
const microphoneRecordingAudioElement = document.getElementById(
|
|
234
|
+
"microphoneRecording"
|
|
235
|
+
);
|
|
236
|
+
/** @type {HTMLInputElement} */
|
|
237
|
+
const autoPlayMicrphoneRecordingCheckbox = document.getElementById(
|
|
238
|
+
"autoPlayMicrphoneRecording"
|
|
239
|
+
);
|
|
240
|
+
let autoPlayMicrphoneRecording = autoPlayMicrphoneRecordingCheckbox.checked;
|
|
241
|
+
console.log("autoPlayMicrphoneRecording", autoPlayMicrphoneRecording);
|
|
242
|
+
autoPlayMicrphoneRecordingCheckbox.addEventListener("input", () => {
|
|
243
|
+
autoPlayMicrphoneRecording = autoPlayMicrphoneRecordingCheckbox.checked;
|
|
244
|
+
console.log({ autoPlayMicrphoneRecording });
|
|
245
|
+
});
|
|
246
|
+
device.addEventListener("microphoneRecording", (event) => {
|
|
247
|
+
microphoneRecordingAudioElement.src = event.message.url;
|
|
248
|
+
if (autoPlayMicrphoneRecording) {
|
|
249
|
+
microphoneRecordingAudioElement.play();
|
|
250
|
+
}
|
|
251
|
+
});
|
|
252
|
+
|
|
253
|
+
/** @type {HTMLButtonElement} */
|
|
254
|
+
const toggleMicrophoneRecordingButton = document.getElementById(
|
|
255
|
+
"toggleMicrophoneRecording"
|
|
256
|
+
);
|
|
257
|
+
toggleMicrophoneRecordingButton.addEventListener("click", () => {
|
|
258
|
+
device.toggleMicrophoneRecording();
|
|
259
|
+
});
|
|
260
|
+
device.addEventListener("connected", () => {
|
|
261
|
+
updateToggleMicrophoneRecordingButton();
|
|
262
|
+
});
|
|
263
|
+
device.addEventListener("getSensorConfiguration", () => {
|
|
264
|
+
updateToggleMicrophoneRecordingButton();
|
|
265
|
+
});
|
|
266
|
+
const updateToggleMicrophoneRecordingButton = () => {
|
|
267
|
+
let disabled =
|
|
268
|
+
!device.isConnected ||
|
|
269
|
+
device.sensorConfiguration.microphone == 0 ||
|
|
270
|
+
!device.hasMicrophone ||
|
|
271
|
+
device.microphoneStatus != "streaming";
|
|
272
|
+
|
|
273
|
+
toggleMicrophoneRecordingButton.innerText = device.isRecordingMicrophone
|
|
274
|
+
? "stop recording"
|
|
275
|
+
: "start recording";
|
|
276
|
+
|
|
277
|
+
toggleMicrophoneRecordingButton.disabled = disabled;
|
|
278
|
+
};
|
|
279
|
+
device.addEventListener("isRecordingMicrophone", () => {
|
|
280
|
+
updateToggleMicrophoneRecordingButton();
|
|
281
|
+
if (!device.isRecordingMicrophone) {
|
|
282
|
+
device.stopMicrophone();
|
|
283
|
+
}
|
|
284
|
+
});
|
|
285
|
+
device.addEventListener("microphoneStatus", () => {
|
|
286
|
+
updateToggleMicrophoneRecordingButton();
|
|
287
|
+
});
|
|
288
|
+
|
|
289
|
+
const peaksOptions = {
|
|
290
|
+
zoomview: {
|
|
291
|
+
container: document.getElementById("zoomview-container"),
|
|
292
|
+
},
|
|
293
|
+
overview: {
|
|
294
|
+
container: document.getElementById("overview-container"),
|
|
295
|
+
},
|
|
296
|
+
mediaElement: document.getElementById("microphoneRecording"),
|
|
297
|
+
webAudio: {
|
|
298
|
+
audioContext: audioContext,
|
|
299
|
+
scale: 128,
|
|
300
|
+
multiChannel: false,
|
|
301
|
+
},
|
|
302
|
+
};
|
|
303
|
+
|
|
304
|
+
microphoneRecordingAudioElement.addEventListener("loadeddata", () => {
|
|
305
|
+
peaks.init(peaksOptions, (error, peaksInstance) => {
|
|
306
|
+
if (error) {
|
|
307
|
+
console.error("error initializing peaks", error);
|
|
308
|
+
}
|
|
309
|
+
});
|
|
310
|
+
});
|
|
311
|
+
|
|
312
|
+
/** @type {HTMLCanvasElement} */
|
|
313
|
+
const canvas = document.getElementById("audioVisualizer");
|
|
314
|
+
const canvasCtx = canvas.getContext("2d");
|
|
315
|
+
|
|
316
|
+
const analyser = audioContext.createAnalyser();
|
|
317
|
+
device.microphoneGainNode.connect(analyser);
|
|
318
|
+
analyser.fftSize = 1024;
|
|
319
|
+
|
|
320
|
+
const bufferLength = analyser.frequencyBinCount;
|
|
321
|
+
const dataArray = new Uint8Array(bufferLength);
|
|
322
|
+
analyser.getByteTimeDomainData(dataArray);
|
|
323
|
+
|
|
324
|
+
const audioVisualizationTypeSelect = document.getElementById(
|
|
325
|
+
"audioVisualizationType"
|
|
326
|
+
);
|
|
327
|
+
let audioVisualizationType = audioVisualizationTypeSelect.value;
|
|
328
|
+
audioVisualizationTypeSelect.addEventListener("input", () => {
|
|
329
|
+
audioVisualizationType = audioVisualizationTypeSelect.value;
|
|
330
|
+
console.log({ audioVisualizationType });
|
|
331
|
+
});
|
|
332
|
+
|
|
333
|
+
function draw() {
|
|
334
|
+
requestAnimationFrame(draw);
|
|
335
|
+
|
|
336
|
+
if (device.microphoneStatus != "streaming") {
|
|
337
|
+
return;
|
|
338
|
+
}
|
|
339
|
+
|
|
340
|
+
if (audioVisualizationType == "waveform") {
|
|
341
|
+
analyser.getByteTimeDomainData(dataArray);
|
|
342
|
+
|
|
343
|
+
canvasCtx.fillStyle = "rgb(200 200 200)";
|
|
344
|
+
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
|
|
345
|
+
|
|
346
|
+
canvasCtx.lineWidth = 2;
|
|
347
|
+
canvasCtx.strokeStyle = "rgb(0 0 0)";
|
|
348
|
+
|
|
349
|
+
canvasCtx.beginPath();
|
|
350
|
+
|
|
351
|
+
const sliceWidth = (canvas.width * 1.0) / bufferLength;
|
|
352
|
+
let x = 0;
|
|
353
|
+
|
|
354
|
+
for (let i = 0; i < bufferLength; i++) {
|
|
355
|
+
const v = dataArray[i] / 128.0;
|
|
356
|
+
const y = (v * canvas.height) / 2;
|
|
357
|
+
|
|
358
|
+
if (i === 0) {
|
|
359
|
+
canvasCtx.moveTo(x, y);
|
|
360
|
+
} else {
|
|
361
|
+
canvasCtx.lineTo(x, y);
|
|
362
|
+
}
|
|
363
|
+
|
|
364
|
+
x += sliceWidth;
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
canvasCtx.lineTo(canvas.width, canvas.height / 2);
|
|
368
|
+
canvasCtx.stroke();
|
|
369
|
+
} else if (audioVisualizationType == "fft") {
|
|
370
|
+
analyser.getByteFrequencyData(dataArray); // Fill dataArray with frequency data
|
|
371
|
+
|
|
372
|
+
canvasCtx.fillStyle = "rgb(0, 0, 0)";
|
|
373
|
+
canvasCtx.fillRect(0, 0, canvas.width, canvas.height);
|
|
374
|
+
|
|
375
|
+
const barWidth = (canvas.width / bufferLength) * 2.5;
|
|
376
|
+
let x = 0;
|
|
377
|
+
|
|
378
|
+
for (let i = 0; i < bufferLength; i++) {
|
|
379
|
+
const barHeight = dataArray[i];
|
|
380
|
+
|
|
381
|
+
canvasCtx.fillStyle = `rgb(${barHeight + 100}, 50, 50)`;
|
|
382
|
+
canvasCtx.fillRect(
|
|
383
|
+
x,
|
|
384
|
+
canvas.height - barHeight / 2,
|
|
385
|
+
barWidth,
|
|
386
|
+
barHeight / 2
|
|
387
|
+
);
|
|
388
|
+
|
|
389
|
+
x += barWidth + 1;
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
draw();
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
/** @type {HTMLAudioElement} */
|
|
2
|
+
const microphoneRecordingAudio = document.getElementById("microphoneRecording");
|
|
3
|
+
|
|
4
|
+
/** @type {HTMLAudioElement} */
|
|
5
|
+
const microphoneStreamAudio = document.getElementById("microphoneStream");
|
|
6
|
+
|
|
7
|
+
const modelResultsElement = document.getElementById("modelResults");
|
|
8
|
+
|
|
9
|
+
/** @type {HTMLSelectElement} */
|
|
10
|
+
const modelTypeSelect = document.getElementById("modelType");
|
|
11
|
+
/** @type {HTMLOptGroupElement} */
|
|
12
|
+
const modelTypeOptgroup = modelTypeSelect.querySelector("optgroup");
|
|
13
|
+
|
|
14
|
+
const modelTypeSelectedCallbacks = {};
|
|
15
|
+
const modelTypeDeselectedCallbacks = {};
|
|
16
|
+
|
|
17
|
+
let modelType = modelTypeSelect.value;
|
|
18
|
+
modelTypeSelect.addEventListener("input", () => {
|
|
19
|
+
modelTypeDeselectedCallbacks[modelType]?.();
|
|
20
|
+
modelType = modelTypeSelect.value;
|
|
21
|
+
console.log({ modelType });
|
|
22
|
+
modelTypeSelectedCallbacks[modelType]?.(
|
|
23
|
+
microphoneStreamAudio,
|
|
24
|
+
modelResultsElement
|
|
25
|
+
);
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
const audioCallbacks = {};
|
|
29
|
+
microphoneRecordingAudio.addEventListener("loadeddata", () => {
|
|
30
|
+
modelResultsElement.innerHTML = "";
|
|
31
|
+
audioCallbacks[modelType]?.(microphoneRecordingAudio, modelResultsElement);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* @param {string} name
|
|
36
|
+
* @param {(microphoneStreamAudio: HTMLAudioElement, modelResultsElement: HTMLElement) => void} onSelect
|
|
37
|
+
* @param {() => void} onDeselect
|
|
38
|
+
* @param {(microphoneRecordingAudio: HTMLAudioElement, modelResultsElement: HTMLElement) => void} onAudio
|
|
39
|
+
*/
|
|
40
|
+
export function registerModel(name, onSelect, onDeselect, onAudio) {
|
|
41
|
+
modelTypeOptgroup.appendChild(new Option(name));
|
|
42
|
+
modelTypeSelectedCallbacks[name] = onSelect;
|
|
43
|
+
modelTypeDeselectedCallbacks[name] = onDeselect;
|
|
44
|
+
audioCallbacks[name] = onAudio;
|
|
45
|
+
}
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
import {
|
|
2
|
+
AutoTokenizer,
|
|
3
|
+
AutoProcessor,
|
|
4
|
+
WhisperForConditionalGeneration,
|
|
5
|
+
TextStreamer,
|
|
6
|
+
full,
|
|
7
|
+
} from "https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.2.4";
|
|
8
|
+
|
|
9
|
+
import { registerModel } from "./utils.js";
|
|
10
|
+
|
|
11
|
+
const WHISPER_SAMPLING_RATE = 16_000;
|
|
12
|
+
const MAX_AUDIO_LENGTH = 30; // seconds
|
|
13
|
+
const MAX_SAMPLES = WHISPER_SAMPLING_RATE * MAX_AUDIO_LENGTH;
|
|
14
|
+
const MAX_NEW_TOKENS = 64;
|
|
15
|
+
|
|
16
|
+
/** @type {HTMLAudioElement} */
|
|
17
|
+
let _microphoneStreamAudio;
|
|
18
|
+
/** @type {MediaStream} */
|
|
19
|
+
let stream;
|
|
20
|
+
|
|
21
|
+
/** @type {MediaRecorder} */
|
|
22
|
+
let mediaRecorder;
|
|
23
|
+
|
|
24
|
+
let model_id = null;
|
|
25
|
+
let tokenizer = null;
|
|
26
|
+
let processor = null;
|
|
27
|
+
let model = null;
|
|
28
|
+
let loadedModel = false;
|
|
29
|
+
let isProcessing = false;
|
|
30
|
+
let chunks = [];
|
|
31
|
+
let isRunning = false;
|
|
32
|
+
|
|
33
|
+
const progress_callback = (progress) => {
|
|
34
|
+
//console.log("progress_callback", progress);
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const loadModel = async () => {
|
|
38
|
+
console.log("creating model");
|
|
39
|
+
model_id = "onnx-community/whisper-base";
|
|
40
|
+
|
|
41
|
+
tokenizer = await AutoTokenizer.from_pretrained(model_id, {
|
|
42
|
+
progress_callback,
|
|
43
|
+
});
|
|
44
|
+
processor = await AutoProcessor.from_pretrained(model_id, {
|
|
45
|
+
progress_callback,
|
|
46
|
+
});
|
|
47
|
+
|
|
48
|
+
model = await WhisperForConditionalGeneration.from_pretrained(model_id, {
|
|
49
|
+
dtype: {
|
|
50
|
+
encoder_model: "fp32", // 'fp16' works too
|
|
51
|
+
decoder_model_merged: "q4", // or 'fp32' ('fp16' is broken)
|
|
52
|
+
},
|
|
53
|
+
device: "webgpu",
|
|
54
|
+
progress_callback,
|
|
55
|
+
});
|
|
56
|
+
|
|
57
|
+
console.log(model);
|
|
58
|
+
|
|
59
|
+
await model.generate({
|
|
60
|
+
input_features: full([1, 80, 3000], 0.0),
|
|
61
|
+
max_new_tokens: 1,
|
|
62
|
+
});
|
|
63
|
+
loadedModel = true;
|
|
64
|
+
console.log("created model", model);
|
|
65
|
+
};
|
|
66
|
+
|
|
67
|
+
registerModel(
|
|
68
|
+
"whisper realtime",
|
|
69
|
+
async (microphoneStreamAudio, modelResultsElement) => {
|
|
70
|
+
if (!loadedModel) {
|
|
71
|
+
await loadModel();
|
|
72
|
+
}
|
|
73
|
+
if (!_microphoneStreamAudio) {
|
|
74
|
+
_microphoneStreamAudio = microphoneStreamAudio;
|
|
75
|
+
stream = _microphoneStreamAudio.srcObject;
|
|
76
|
+
console.log({ stream });
|
|
77
|
+
mediaRecorder = new MediaRecorder(stream);
|
|
78
|
+
mediaRecorder.ondataavailable = (e) => {
|
|
79
|
+
// console.log("ondataavailable", e);
|
|
80
|
+
if (e.data.size > 0) {
|
|
81
|
+
chunks = [...chunks, e.data];
|
|
82
|
+
|
|
83
|
+
if (chunks.length > 0) {
|
|
84
|
+
// Generate from data
|
|
85
|
+
const blob = new Blob(chunks, { type: "wav" });
|
|
86
|
+
|
|
87
|
+
const fileReader = new FileReader();
|
|
88
|
+
|
|
89
|
+
fileReader.onloadend = async () => {
|
|
90
|
+
const arrayBuffer = fileReader.result;
|
|
91
|
+
const decoded = await audioContext.decodeAudioData(arrayBuffer);
|
|
92
|
+
let audio = decoded.getChannelData(0);
|
|
93
|
+
if (audio.length > MAX_SAMPLES) {
|
|
94
|
+
// Get last MAX_SAMPLES
|
|
95
|
+
audio = audio.slice(-MAX_SAMPLES);
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
if (isProcessing) return;
|
|
99
|
+
isProcessing = true;
|
|
100
|
+
|
|
101
|
+
let startTime;
|
|
102
|
+
let numTokens = 0;
|
|
103
|
+
const callback_function = (output) => {
|
|
104
|
+
startTime ??= performance.now();
|
|
105
|
+
|
|
106
|
+
let tps;
|
|
107
|
+
if (numTokens++ > 0) {
|
|
108
|
+
tps = (numTokens / (performance.now() - startTime)) * 1000;
|
|
109
|
+
}
|
|
110
|
+
//console.log({ output, tps, numTokens });
|
|
111
|
+
};
|
|
112
|
+
|
|
113
|
+
const streamer = new TextStreamer(tokenizer, {
|
|
114
|
+
skip_prompt: true,
|
|
115
|
+
skip_special_tokens: true,
|
|
116
|
+
callback_function,
|
|
117
|
+
});
|
|
118
|
+
|
|
119
|
+
const inputs = await processor(audio);
|
|
120
|
+
|
|
121
|
+
const outputs = await model.generate({
|
|
122
|
+
...inputs,
|
|
123
|
+
max_new_tokens: MAX_NEW_TOKENS,
|
|
124
|
+
language: "en",
|
|
125
|
+
streamer,
|
|
126
|
+
});
|
|
127
|
+
|
|
128
|
+
const outputText = tokenizer.batch_decode(outputs, {
|
|
129
|
+
skip_special_tokens: true,
|
|
130
|
+
});
|
|
131
|
+
|
|
132
|
+
console.log("outputText", outputText);
|
|
133
|
+
|
|
134
|
+
modelResultsElement.innerText = outputText;
|
|
135
|
+
|
|
136
|
+
isProcessing = false;
|
|
137
|
+
};
|
|
138
|
+
fileReader.readAsArrayBuffer(blob);
|
|
139
|
+
} else {
|
|
140
|
+
mediaRecorder.requestData();
|
|
141
|
+
}
|
|
142
|
+
} else {
|
|
143
|
+
// Empty chunk received, so we request new data after a short timeout
|
|
144
|
+
setTimeout(() => {
|
|
145
|
+
mediaRecorder.requestData();
|
|
146
|
+
}, 25);
|
|
147
|
+
}
|
|
148
|
+
};
|
|
149
|
+
mediaRecorder.onstart = () => {
|
|
150
|
+
isRunning = true;
|
|
151
|
+
console.log({ isRunning });
|
|
152
|
+
chunks = [];
|
|
153
|
+
};
|
|
154
|
+
mediaRecorder.onstop = () => {
|
|
155
|
+
isRunning = false;
|
|
156
|
+
console.log({ isRunning });
|
|
157
|
+
};
|
|
158
|
+
console.log("starting mediaRecorder", mediaRecorder);
|
|
159
|
+
}
|
|
160
|
+
mediaRecorder.start(500);
|
|
161
|
+
},
|
|
162
|
+
() => {
|
|
163
|
+
mediaRecorder.stop();
|
|
164
|
+
},
|
|
165
|
+
async (microphoneRecordingAudio, mediaResultsElement) => {}
|
|
166
|
+
);
|