brilliantsole 0.0.28 → 0.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/brilliantsole.cjs +656 -158
- package/build/brilliantsole.cjs.map +1 -1
- package/build/brilliantsole.js +656 -158
- package/build/brilliantsole.js.map +1 -1
- package/build/brilliantsole.ls.js +656 -158
- package/build/brilliantsole.ls.js.map +1 -1
- package/build/brilliantsole.min.js +1 -1
- package/build/brilliantsole.min.js.map +1 -1
- package/build/brilliantsole.module.d.ts +75 -15
- package/build/brilliantsole.module.js +654 -159
- package/build/brilliantsole.module.js.map +1 -1
- package/build/brilliantsole.module.min.d.ts +75 -15
- package/build/brilliantsole.module.min.js +1 -1
- package/build/brilliantsole.module.min.js.map +1 -1
- package/build/brilliantsole.node.module.d.ts +75 -15
- package/build/brilliantsole.node.module.js +654 -159
- package/build/brilliantsole.node.module.js.map +1 -1
- package/build/dts/BS.d.ts +1 -0
- package/build/dts/Device.d.ts +25 -7
- package/build/dts/MicrophoneManager.d.ts +88 -0
- package/build/dts/connection/BaseConnectionManager.d.ts +2 -2
- package/build/dts/devicePair/DevicePair.d.ts +5 -5
- package/build/dts/sensor/SensorDataManager.d.ts +3 -3
- package/build/dts/utils/AudioUtils.d.ts +2 -0
- package/build/index.d.ts +75 -15
- package/build/index.node.d.ts +75 -15
- package/examples/basic/index.html +108 -53
- package/examples/basic/script.js +248 -21
- package/examples/camera/barcode-detector.js +109 -0
- package/examples/camera/depth-estimation.js +71 -0
- package/examples/camera/face-detector.js +119 -0
- package/examples/camera/face-landmark.js +111 -0
- package/examples/camera/gesture-recognition.js +97 -0
- package/examples/camera/hand-landmark.js +74 -0
- package/examples/camera/image-segmentation.js +98 -0
- package/examples/camera/image-to-text.js +43 -0
- package/examples/camera/image-upscale.js +75 -0
- package/examples/camera/index.html +129 -0
- package/examples/camera/object-detection.js +98 -0
- package/examples/camera/pose-landmark.js +60 -0
- package/examples/camera/script.js +316 -0
- package/examples/camera/utils.js +165 -0
- package/examples/camera/yolo-tiny.js +54 -0
- package/examples/camera/yolo.js +119 -0
- package/examples/edge-impulse/script.js +23 -5
- package/examples/glasses-gestures/README.md +11 -0
- package/examples/glasses-gestures/edge-impulse-standalone.js +7228 -0
- package/examples/glasses-gestures/edge-impulse-standalone.wasm +0 -0
- package/examples/glasses-gestures/index.html +69 -0
- package/examples/glasses-gestures/run-impulse.js +135 -0
- package/examples/glasses-gestures/script.js +226 -0
- package/examples/microphone/gender.js +54 -0
- package/examples/microphone/index.html +102 -0
- package/examples/microphone/script.js +394 -0
- package/examples/microphone/utils.js +45 -0
- package/examples/microphone/whisper-realtime.js +166 -0
- package/examples/microphone/whisper.js +132 -0
- package/examples/ukaton-firmware-update/merged-firmware.bin +0 -0
- package/examples/webxr-3/components/bs-camera.js +65 -0
- package/examples/webxr-3/index.html +134 -0
- package/examples/webxr-3/script.js +432 -0
- package/package.json +1 -1
- package/src/BS.ts +9 -0
- package/src/CameraManager.ts +4 -6
- package/src/Device.ts +110 -0
- package/src/MicrophoneManager.ts +599 -0
- package/src/connection/BaseConnectionManager.ts +2 -0
- package/src/sensor/SensorDataManager.ts +5 -0
- package/src/utils/AudioUtils.ts +65 -0
|
Binary file
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
<!DOCTYPE html>
|
|
2
|
+
<html>
|
|
3
|
+
<head>
|
|
4
|
+
<meta charset="utf-8" />
|
|
5
|
+
<title>Glasses Gestures</title>
|
|
6
|
+
|
|
7
|
+
<style>
|
|
8
|
+
#features {
|
|
9
|
+
width: 50%;
|
|
10
|
+
font-size: 18px;
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
#results {
|
|
14
|
+
font-family: monospace;
|
|
15
|
+
white-space: pre;
|
|
16
|
+
}
|
|
17
|
+
</style>
|
|
18
|
+
</head>
|
|
19
|
+
<body>
|
|
20
|
+
<nav>
|
|
21
|
+
<a href="../../">home</a>
|
|
22
|
+
</nav>
|
|
23
|
+
|
|
24
|
+
<h1 id="title"></h1>
|
|
25
|
+
|
|
26
|
+
<button id="toggleConnection">connect</button>
|
|
27
|
+
<h1 id="gesture"></h1>
|
|
28
|
+
<p id="results"></p>
|
|
29
|
+
|
|
30
|
+
<script src="edge-impulse-standalone.js"></script>
|
|
31
|
+
<script src="run-impulse.js"></script>
|
|
32
|
+
<script src="./script.js" type="module"></script>
|
|
33
|
+
<script>
|
|
34
|
+
(async () => {
|
|
35
|
+
return;
|
|
36
|
+
|
|
37
|
+
var classifier = new EdgeImpulseClassifier();
|
|
38
|
+
await classifier.init();
|
|
39
|
+
|
|
40
|
+
let project = classifier.getProjectInfo();
|
|
41
|
+
document.querySelector("h1").textContent =
|
|
42
|
+
project.owner +
|
|
43
|
+
" / " +
|
|
44
|
+
project.name +
|
|
45
|
+
" (version " +
|
|
46
|
+
project.deploy_version +
|
|
47
|
+
")";
|
|
48
|
+
|
|
49
|
+
document.querySelector("#run-inference").onclick = () => {
|
|
50
|
+
try {
|
|
51
|
+
let features = document
|
|
52
|
+
.querySelector("#features")
|
|
53
|
+
.value.split(",")
|
|
54
|
+
.map((x) => Number(x.trim()));
|
|
55
|
+
console.log("classify", features);
|
|
56
|
+
let res = classifier.classify(features);
|
|
57
|
+
document.querySelector("#results").textContent = JSON.stringify(
|
|
58
|
+
res,
|
|
59
|
+
null,
|
|
60
|
+
4
|
|
61
|
+
);
|
|
62
|
+
} catch (ex) {
|
|
63
|
+
alert("Failed to classify: " + (ex.message || ex.toString()));
|
|
64
|
+
}
|
|
65
|
+
};
|
|
66
|
+
})();
|
|
67
|
+
</script>
|
|
68
|
+
</body>
|
|
69
|
+
</html>
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
// Classifier module
|
|
2
|
+
let classifierInitialized = false;
|
|
3
|
+
Module.onRuntimeInitialized = function() {
|
|
4
|
+
classifierInitialized = true;
|
|
5
|
+
};
|
|
6
|
+
|
|
7
|
+
class EdgeImpulseClassifier {
|
|
8
|
+
_initialized = false;
|
|
9
|
+
|
|
10
|
+
init() {
|
|
11
|
+
if (classifierInitialized === true) return Promise.resolve();
|
|
12
|
+
|
|
13
|
+
return new Promise((resolve) => {
|
|
14
|
+
Module.onRuntimeInitialized = () => {
|
|
15
|
+
classifierInitialized = true;
|
|
16
|
+
Module.init();
|
|
17
|
+
resolve();
|
|
18
|
+
};
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
getProjectInfo() {
|
|
23
|
+
if (!classifierInitialized) throw new Error('Module is not initialized');
|
|
24
|
+
return this._convertToOrdinaryJsObject(Module.get_project(), Module.emcc_classification_project_t.prototype);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
classify(rawData, debug = false) {
|
|
28
|
+
if (!classifierInitialized) throw new Error('Module is not initialized');
|
|
29
|
+
|
|
30
|
+
const obj = this._arrayToHeap(rawData);
|
|
31
|
+
let ret = Module.run_classifier(obj.buffer.byteOffset, rawData.length, debug);
|
|
32
|
+
Module._free(obj.ptr);
|
|
33
|
+
|
|
34
|
+
if (ret.result !== 0) {
|
|
35
|
+
throw new Error('Classification failed (err code: ' + ret.result + ')');
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
return this._fillResultStruct(ret);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
classifyContinuous(rawData, enablePerfCal = true) {
|
|
42
|
+
if (!classifierInitialized) throw new Error('Module is not initialized');
|
|
43
|
+
|
|
44
|
+
const obj = this._arrayToHeap(rawData);
|
|
45
|
+
let ret = Module.run_classifier_continuous(obj.buffer.byteOffset, rawData.length, false, enablePerfCal);
|
|
46
|
+
Module._free(obj.ptr);
|
|
47
|
+
|
|
48
|
+
if (ret.result !== 0) {
|
|
49
|
+
throw new Error('Classification failed (err code: ' + ret.result + ')');
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
return this._fillResultStruct(ret);
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
getProperties() {
|
|
56
|
+
if (!classifierInitialized) throw new Error('Module is not initialized');
|
|
57
|
+
return this._convertToOrdinaryJsObject(Module.get_properties(), Module.emcc_classification_properties_t.prototype);
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Override the threshold on a learn block (you can find thresholds via getProperties().thresholds)
|
|
62
|
+
* @param {*} obj, e.g. { id: 16, min_score: 0.2 } to set min. object detection threshold to 0.2 for block ID 16
|
|
63
|
+
*/
|
|
64
|
+
setThreshold(obj) {
|
|
65
|
+
const ret = Module.set_threshold(obj);
|
|
66
|
+
if (!ret.success) {
|
|
67
|
+
throw new Error(ret.error);
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
_arrayToHeap(data) {
|
|
72
|
+
let typedArray = new Float32Array(data);
|
|
73
|
+
let numBytes = typedArray.length * typedArray.BYTES_PER_ELEMENT;
|
|
74
|
+
let ptr = Module._malloc(numBytes);
|
|
75
|
+
let heapBytes = new Uint8Array(Module.HEAPU8.buffer, ptr, numBytes);
|
|
76
|
+
heapBytes.set(new Uint8Array(typedArray.buffer));
|
|
77
|
+
return { ptr: ptr, buffer: heapBytes };
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
_convertToOrdinaryJsObject(emboundObj, prototype) {
|
|
81
|
+
let newObj = { };
|
|
82
|
+
for (const key of Object.getOwnPropertyNames(prototype)) {
|
|
83
|
+
const descriptor = Object.getOwnPropertyDescriptor(prototype, key);
|
|
84
|
+
|
|
85
|
+
if (descriptor && typeof descriptor.get === 'function') {
|
|
86
|
+
newObj[key] = emboundObj[key]; // Evaluates the getter and assigns as an own property.
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
return newObj;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
_fillResultStruct(ret) {
|
|
93
|
+
let props = Module.get_properties();
|
|
94
|
+
|
|
95
|
+
let jsResult = {
|
|
96
|
+
anomaly: ret.anomaly,
|
|
97
|
+
results: []
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
for (let cx = 0; cx < ret.size(); cx++) {
|
|
101
|
+
let c = ret.get(cx);
|
|
102
|
+
if (props.model_type === 'object_detection' || props.model_type === 'constrained_object_detection') {
|
|
103
|
+
jsResult.results.push({ label: c.label, value: c.value, x: c.x, y: c.y, width: c.width, height: c.height });
|
|
104
|
+
}
|
|
105
|
+
else {
|
|
106
|
+
jsResult.results.push({ label: c.label, value: c.value });
|
|
107
|
+
}
|
|
108
|
+
c.delete();
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
if (props.has_object_tracking) {
|
|
112
|
+
jsResult.object_tracking_results = [];
|
|
113
|
+
for (let cx = 0; cx < ret.object_tracking_size(); cx++) {
|
|
114
|
+
let c = ret.object_tracking_get(cx);
|
|
115
|
+
jsResult.object_tracking_results.push({ object_id: c.object_id, label: c.label, value: c.value, x: c.x, y: c.y, width: c.width, height: c.height });
|
|
116
|
+
c.delete();
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
if (props.has_visual_anomaly_detection) {
|
|
121
|
+
jsResult.visual_ad_max = ret.visual_ad_max;
|
|
122
|
+
jsResult.visual_ad_mean = ret.visual_ad_mean;
|
|
123
|
+
jsResult.visual_ad_grid_cells = [];
|
|
124
|
+
for (let cx = 0; cx < ret.visual_ad_grid_cells_size(); cx++) {
|
|
125
|
+
let c = ret.visual_ad_grid_cells_get(cx);
|
|
126
|
+
jsResult.visual_ad_grid_cells.push({ label: c.label, value: c.value, x: c.x, y: c.y, width: c.width, height: c.height });
|
|
127
|
+
c.delete();
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
ret.delete();
|
|
132
|
+
|
|
133
|
+
return jsResult;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
import * as BS from "../../build/brilliantsole.module.js";
|
|
2
|
+
window.BS = BS;
|
|
3
|
+
console.log(BS);
|
|
4
|
+
|
|
5
|
+
// MODEL
|
|
6
|
+
var classifier = new EdgeImpulseClassifier();
|
|
7
|
+
await classifier.init();
|
|
8
|
+
window.classifier = classifier;
|
|
9
|
+
|
|
10
|
+
let project = classifier.getProjectInfo();
|
|
11
|
+
document.querySelector("#title").textContent =
|
|
12
|
+
project.owner +
|
|
13
|
+
" / " +
|
|
14
|
+
project.name +
|
|
15
|
+
" (version " +
|
|
16
|
+
project.deploy_version +
|
|
17
|
+
")";
|
|
18
|
+
|
|
19
|
+
const gestureTitle = document.getElementById("gesture");
|
|
20
|
+
|
|
21
|
+
let threshold = 0.5;
|
|
22
|
+
/** @param {number[]} features */
|
|
23
|
+
function classify(features) {
|
|
24
|
+
try {
|
|
25
|
+
let res = classifier.classify(features);
|
|
26
|
+
document.querySelector("#results").textContent = JSON.stringify(
|
|
27
|
+
res,
|
|
28
|
+
null,
|
|
29
|
+
4
|
|
30
|
+
);
|
|
31
|
+
res.results.forEach(({ label, value }, index) => {
|
|
32
|
+
if (index == 0) {
|
|
33
|
+
return;
|
|
34
|
+
}
|
|
35
|
+
if (value > threshold) {
|
|
36
|
+
const gesture = label.split("_")[1];
|
|
37
|
+
console.log({ gesture });
|
|
38
|
+
if (true) {
|
|
39
|
+
gestureTitle.innerText = gesture;
|
|
40
|
+
} else {
|
|
41
|
+
gestureTitle.innerText = `detected "${gesture}" gesture (score: ${value.toFixed(
|
|
42
|
+
2
|
|
43
|
+
)})`;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
setTimeout(() => {
|
|
47
|
+
gestureTitle.innerText = "";
|
|
48
|
+
}, gestureDelay - 100);
|
|
49
|
+
lastTimeGestureRecognized = Date.now();
|
|
50
|
+
}
|
|
51
|
+
});
|
|
52
|
+
} catch (ex) {
|
|
53
|
+
alert("Failed to classify: " + (ex.message || ex.toString()));
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// DEVICE
|
|
58
|
+
|
|
59
|
+
const device = new BS.Device();
|
|
60
|
+
console.log({ device });
|
|
61
|
+
window.device = device;
|
|
62
|
+
|
|
63
|
+
// CONNECT
|
|
64
|
+
|
|
65
|
+
const toggleConnectionButton = document.getElementById("toggleConnection");
|
|
66
|
+
toggleConnectionButton.addEventListener("click", () =>
|
|
67
|
+
device.toggleConnection()
|
|
68
|
+
);
|
|
69
|
+
device.addEventListener("connectionStatus", () => {
|
|
70
|
+
let disabled = false;
|
|
71
|
+
let innerText = device.connectionStatus;
|
|
72
|
+
switch (device.connectionStatus) {
|
|
73
|
+
case "notConnected":
|
|
74
|
+
innerText = "connect";
|
|
75
|
+
break;
|
|
76
|
+
case "connected":
|
|
77
|
+
innerText = "disconnect";
|
|
78
|
+
break;
|
|
79
|
+
}
|
|
80
|
+
toggleConnectionButton.disabled = disabled;
|
|
81
|
+
toggleConnectionButton.innerText = innerText;
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
// SENSOR CONFIG
|
|
85
|
+
|
|
86
|
+
const sensorRate = 20;
|
|
87
|
+
/** @type {BS.SensorType[]} */
|
|
88
|
+
const sensorTypes = ["acceleration"];
|
|
89
|
+
/** @type {BS.SensorConfiguration} */
|
|
90
|
+
const sensorConfiguration = {};
|
|
91
|
+
sensorTypes.forEach((sensorType) => {
|
|
92
|
+
sensorConfiguration[sensorType] = sensorRate;
|
|
93
|
+
|
|
94
|
+
device.addEventListener(sensorType, (event) => {
|
|
95
|
+
let data = [];
|
|
96
|
+
switch (event.message.sensorType) {
|
|
97
|
+
case "pressure":
|
|
98
|
+
data = event.message.pressure.sensors.map((sensor) => sensor.rawValue);
|
|
99
|
+
break;
|
|
100
|
+
case "acceleration":
|
|
101
|
+
{
|
|
102
|
+
const { x, y, z } = event.message.acceleration;
|
|
103
|
+
data = [x, y, z];
|
|
104
|
+
}
|
|
105
|
+
break;
|
|
106
|
+
case "linearAcceleration":
|
|
107
|
+
{
|
|
108
|
+
const { x, y, z } = event.message.linearAcceleration;
|
|
109
|
+
data = [x, y, z];
|
|
110
|
+
}
|
|
111
|
+
break;
|
|
112
|
+
case "gyroscope":
|
|
113
|
+
{
|
|
114
|
+
const { x, y, z } = event.message.gyroscope;
|
|
115
|
+
data = [x, y, z];
|
|
116
|
+
}
|
|
117
|
+
break;
|
|
118
|
+
case "magnetometer":
|
|
119
|
+
{
|
|
120
|
+
const { x, y, z } = event.message.magnetometer;
|
|
121
|
+
data = [x, y, z];
|
|
122
|
+
}
|
|
123
|
+
break;
|
|
124
|
+
}
|
|
125
|
+
data = data.map((value) => value * sensorScalars[sensorType]);
|
|
126
|
+
appendData(event.message.timestamp, sensorType, data);
|
|
127
|
+
});
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
device.addEventListener("connected", () => {
|
|
131
|
+
device.setSensorConfiguration(sensorConfiguration);
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
const sensorScalars = {
|
|
135
|
+
pressure: 1 / (2 ** 16 - 1),
|
|
136
|
+
acceleration: 1 / 4,
|
|
137
|
+
linearAcceleration: 1 / 4,
|
|
138
|
+
gyroscope: 1 / 720,
|
|
139
|
+
magnetometer: 1 / 2500,
|
|
140
|
+
};
|
|
141
|
+
|
|
142
|
+
// BUFFER
|
|
143
|
+
const time = 600; // ms
|
|
144
|
+
const numberOfSamples = time / sensorRate;
|
|
145
|
+
const numberOfFeaturesInEachSensorType = {};
|
|
146
|
+
BS.SensorTypes.forEach((sensorType) => {
|
|
147
|
+
switch (sensorType) {
|
|
148
|
+
case "pressure":
|
|
149
|
+
numberOfFeaturesInEachSensorType[sensorType] = 8; // change to 16 for ukaton
|
|
150
|
+
break;
|
|
151
|
+
case "linearAcceleration":
|
|
152
|
+
case "acceleration":
|
|
153
|
+
case "gyroscope":
|
|
154
|
+
case "magnetometer":
|
|
155
|
+
numberOfFeaturesInEachSensorType[sensorType] = 3;
|
|
156
|
+
break;
|
|
157
|
+
}
|
|
158
|
+
});
|
|
159
|
+
let numberOfFeaturesInOneSample = 0;
|
|
160
|
+
sensorTypes.forEach((sensorType) => {
|
|
161
|
+
numberOfFeaturesInOneSample += numberOfFeaturesInEachSensorType[sensorType];
|
|
162
|
+
});
|
|
163
|
+
const numberOfFeatures = numberOfFeaturesInOneSample * numberOfSamples;
|
|
164
|
+
console.log({
|
|
165
|
+
time,
|
|
166
|
+
numberOfSamples,
|
|
167
|
+
numberOfFeaturesInOneSample,
|
|
168
|
+
numberOfFeatures,
|
|
169
|
+
});
|
|
170
|
+
const samples = [];
|
|
171
|
+
let pendingSample;
|
|
172
|
+
let lastTimeClassified = 0;
|
|
173
|
+
let lastTimeGestureRecognized = 0;
|
|
174
|
+
let classificationDelay = 0;
|
|
175
|
+
let gestureDelay = 1000;
|
|
176
|
+
let isClassifying = false;
|
|
177
|
+
/**
|
|
178
|
+
* @param {number} timestamp
|
|
179
|
+
* @param {BS.SensorType} sensorType
|
|
180
|
+
* @param {number[]} data
|
|
181
|
+
*/
|
|
182
|
+
function appendData(timestamp, sensorType, data) {
|
|
183
|
+
//console.log({ timestamp, sensorType, data });
|
|
184
|
+
if (!pendingSample || timestamp != pendingSample.timestamp) {
|
|
185
|
+
pendingSample = { timestamp };
|
|
186
|
+
//console.log("pendingSample", pendingSample);
|
|
187
|
+
}
|
|
188
|
+
pendingSample[sensorType] = data;
|
|
189
|
+
const gotAllSensorSamples = sensorTypes.every(
|
|
190
|
+
(sensorType) => sensorType in pendingSample
|
|
191
|
+
);
|
|
192
|
+
if (gotAllSensorSamples) {
|
|
193
|
+
//console.log("got all samples");
|
|
194
|
+
samples.push(pendingSample);
|
|
195
|
+
pendingSample = undefined;
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
//console.log(`collected ${samples.length} samples`);
|
|
199
|
+
|
|
200
|
+
while (samples.length > numberOfSamples) {
|
|
201
|
+
samples.shift();
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
if (!isClassifying && samples.length == numberOfSamples) {
|
|
205
|
+
const now = Date.now();
|
|
206
|
+
if (
|
|
207
|
+
now - lastTimeGestureRecognized < gestureDelay ||
|
|
208
|
+
now - lastTimeClassified < classificationDelay
|
|
209
|
+
) {
|
|
210
|
+
return;
|
|
211
|
+
}
|
|
212
|
+
const features = [];
|
|
213
|
+
samples.forEach((sample) => {
|
|
214
|
+
const _features = [];
|
|
215
|
+
sensorTypes.forEach((sensorType) => {
|
|
216
|
+
_features.push(...sample[sensorType]);
|
|
217
|
+
features.push(..._features);
|
|
218
|
+
});
|
|
219
|
+
});
|
|
220
|
+
isClassifying = true;
|
|
221
|
+
//console.log("classifying", features);
|
|
222
|
+
classify(features);
|
|
223
|
+
isClassifying = false;
|
|
224
|
+
lastTimeClassified = now;
|
|
225
|
+
}
|
|
226
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { pipeline } from "https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.2.4";
|
|
2
|
+
|
|
3
|
+
import { registerModel } from "./utils.js";
|
|
4
|
+
|
|
5
|
+
let classifier = undefined;
|
|
6
|
+
let isRunning = false;
|
|
7
|
+
|
|
8
|
+
const createClassifier = async () => {
|
|
9
|
+
console.log("creating classifier");
|
|
10
|
+
classifier = await pipeline(
|
|
11
|
+
"audio-classification",
|
|
12
|
+
"Xenova/wav2vec2-large-xlsr-53-gender-recognition-librispeech"
|
|
13
|
+
);
|
|
14
|
+
console.log("created classifier", classifier);
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
registerModel(
|
|
18
|
+
"gender",
|
|
19
|
+
() => {
|
|
20
|
+
if (!classifier) {
|
|
21
|
+
createClassifier();
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
() => {},
|
|
25
|
+
async (microphoneRecordingAudio, mediaResultsElement) => {
|
|
26
|
+
if (!classifier) {
|
|
27
|
+
console.error("classifier not created yet");
|
|
28
|
+
return;
|
|
29
|
+
}
|
|
30
|
+
if (isRunning) {
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
isRunning = true;
|
|
34
|
+
const output = await classifier(microphoneRecordingAudio.src);
|
|
35
|
+
isRunning = false;
|
|
36
|
+
|
|
37
|
+
console.log("output", output);
|
|
38
|
+
let maxLabel, maxScore;
|
|
39
|
+
output.forEach(({ label, score }) => {
|
|
40
|
+
maxLabel ??= label;
|
|
41
|
+
maxScore ??= score;
|
|
42
|
+
if (score > maxScore) {
|
|
43
|
+
maxScore = score;
|
|
44
|
+
maxLabel = label;
|
|
45
|
+
}
|
|
46
|
+
});
|
|
47
|
+
mediaResultsElement.innerText = `${maxLabel}: ${maxScore.toFixed(3)}`;
|
|
48
|
+
}
|
|
49
|
+
);
|
|
50
|
+
|
|
51
|
+
const sampleOutput = [
|
|
52
|
+
{ label: "male", score: 0.9976564049720764 },
|
|
53
|
+
{ label: "female", score: 0.002343568252399564 },
|
|
54
|
+
];
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
<html>
|
|
2
|
+
<head>
|
|
3
|
+
<title>Microphone | BrilliantSole JavaScript SDK</title>
|
|
4
|
+
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
5
|
+
<link rel="shortcut icon" type="image/x-icon" href="/favicon.ico" />
|
|
6
|
+
<script src="https://unpkg.com/peaks.js/dist/peaks.js"></script>
|
|
7
|
+
<script type="module" src="./script.js"></script>
|
|
8
|
+
<script type="module" src="./whisper.js"></script>
|
|
9
|
+
<script type="module" src="./whisper-realtime.js"></script>
|
|
10
|
+
<script type="module" src="./gender.js"></script>
|
|
11
|
+
</head>
|
|
12
|
+
<style>
|
|
13
|
+
#zoomview-container,
|
|
14
|
+
#overview-container {
|
|
15
|
+
width: 1000px;
|
|
16
|
+
height: 100px;
|
|
17
|
+
}
|
|
18
|
+
.highlighted {
|
|
19
|
+
color: green;
|
|
20
|
+
}
|
|
21
|
+
</style>
|
|
22
|
+
<body>
|
|
23
|
+
<nav>
|
|
24
|
+
<a href="../../">home</a>
|
|
25
|
+
</nav>
|
|
26
|
+
|
|
27
|
+
<h1>Microphone | BrilliantSole JavaScript SDK</h1>
|
|
28
|
+
|
|
29
|
+
<button id="toggleConnection">connect</button>
|
|
30
|
+
<label>
|
|
31
|
+
model type
|
|
32
|
+
<select id="modelType">
|
|
33
|
+
<optgroup label="model type">
|
|
34
|
+
<option>none</option>
|
|
35
|
+
</optgroup>
|
|
36
|
+
</select>
|
|
37
|
+
</label>
|
|
38
|
+
<br />
|
|
39
|
+
|
|
40
|
+
<label><b>status:</b> <span id="microphoneStatus"></span></label>
|
|
41
|
+
|
|
42
|
+
<button disabled hidden id="toggleMicrophone">start microphone</button>
|
|
43
|
+
<button disabled id="startMicrophone">start microphone</button>
|
|
44
|
+
<button disabled id="stopMicrophone">stop microphone</button>
|
|
45
|
+
<button disabled id="enableMicrphoneVad">
|
|
46
|
+
enable microphone voice activity detection
|
|
47
|
+
</button>
|
|
48
|
+
<br />
|
|
49
|
+
<button disabled id="toggleMicrophoneRecording">start recording</button>
|
|
50
|
+
|
|
51
|
+
<br />
|
|
52
|
+
<label>
|
|
53
|
+
<b>microphone configuration: </b>
|
|
54
|
+
<pre id="microphoneConfigurationPre"></pre>
|
|
55
|
+
</label>
|
|
56
|
+
<br />
|
|
57
|
+
|
|
58
|
+
<div id="microphoneConfiguration">
|
|
59
|
+
<template id="microphoneConfigurationTypeTemplate">
|
|
60
|
+
<label class="microphoneConfigurationType">
|
|
61
|
+
<b class="type"></b> <span hidden></span>
|
|
62
|
+
<select disabled>
|
|
63
|
+
<optgroup></optgroup>
|
|
64
|
+
</select>
|
|
65
|
+
</label>
|
|
66
|
+
</template>
|
|
67
|
+
</div>
|
|
68
|
+
|
|
69
|
+
<div>
|
|
70
|
+
<b>stream</b>
|
|
71
|
+
<br />
|
|
72
|
+
<audio id="microphoneStream" controls autoplay></audio>
|
|
73
|
+
</div>
|
|
74
|
+
|
|
75
|
+
<label>
|
|
76
|
+
visualization type
|
|
77
|
+
<select id="audioVisualizationType">
|
|
78
|
+
<optgroup label="type">
|
|
79
|
+
<option>waveform</option>
|
|
80
|
+
<option>fft</option>
|
|
81
|
+
</optgroup>
|
|
82
|
+
</select>
|
|
83
|
+
</label>
|
|
84
|
+
<br />
|
|
85
|
+
<canvas id="audioVisualizer" width="1000" height="200"></canvas>
|
|
86
|
+
|
|
87
|
+
<h1 id="modelResults"></h1>
|
|
88
|
+
|
|
89
|
+
<div>
|
|
90
|
+
<b>recording</b>
|
|
91
|
+
<label>
|
|
92
|
+
auto play
|
|
93
|
+
<input type="checkbox" id="autoPlayMicrphoneRecording" />
|
|
94
|
+
</label>
|
|
95
|
+
<br />
|
|
96
|
+
<audio id="microphoneRecording" controls></audio>
|
|
97
|
+
</div>
|
|
98
|
+
|
|
99
|
+
<div id="zoomview-container"></div>
|
|
100
|
+
<div id="overview-container"></div>
|
|
101
|
+
</body>
|
|
102
|
+
</html>
|