brilliantsole 0.0.27 → 0.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/assets/3d/anchor.glb +0 -0
- package/assets/3d/coin.glb +0 -0
- package/assets/3d/glasses.glb +0 -0
- package/assets/audio/bounceMedium.wav +0 -0
- package/assets/audio/bounceStrong.wav +0 -0
- package/assets/audio/bounceWeak.wav +0 -0
- package/assets/audio/coin.wav +0 -0
- package/assets/audio/getUp.wav +0 -0
- package/assets/audio/grab.wav +0 -0
- package/assets/audio/kick.wav +0 -0
- package/assets/audio/platterFadeIn old.wav +0 -0
- package/assets/audio/platterFadeIn.wav +0 -0
- package/assets/audio/platterFadeOut.wav +0 -0
- package/assets/audio/punch.wav +0 -0
- package/assets/audio/punchSqueak.wav +0 -0
- package/assets/audio/purr.wav +0 -0
- package/assets/audio/purrFadeOut.wav +0 -0
- package/assets/audio/release.wav +0 -0
- package/assets/audio/splat.wav +0 -0
- package/assets/audio/stomp.wav +0 -0
- package/build/brilliantsole.cjs +3091 -741
- package/build/brilliantsole.cjs.map +1 -1
- package/build/brilliantsole.js +2759 -709
- package/build/brilliantsole.js.map +1 -1
- package/build/brilliantsole.ls.js +2602 -543
- package/build/brilliantsole.ls.js.map +1 -1
- package/build/brilliantsole.min.js +1 -1
- package/build/brilliantsole.min.js.map +1 -1
- package/build/brilliantsole.module.d.ts +295 -65
- package/build/brilliantsole.module.js +2749 -710
- package/build/brilliantsole.module.js.map +1 -1
- package/build/brilliantsole.module.min.d.ts +295 -65
- package/build/brilliantsole.module.min.js +1 -1
- package/build/brilliantsole.module.min.js.map +1 -1
- package/build/brilliantsole.node.module.d.ts +289 -62
- package/build/brilliantsole.node.module.js +3080 -742
- package/build/brilliantsole.node.module.js.map +1 -1
- package/build/dts/BS-output.d.ts +10 -0
- package/build/dts/BS.d.ts +21 -8
- package/build/dts/CameraManager.d.ts +72 -0
- package/build/dts/Device.d.ts +64 -13
- package/build/dts/DeviceInformationManager.d.ts +4 -4
- package/build/dts/DeviceManager.d.ts +2 -0
- package/build/dts/FileTransferManager.d.ts +18 -8
- package/build/dts/InformationManager.d.ts +2 -0
- package/build/dts/MicrophoneManager.d.ts +88 -0
- package/build/dts/TfliteManager.d.ts +22 -2
- package/build/dts/WifiManager.d.ts +61 -0
- package/build/dts/connection/BaseConnectionManager.d.ts +35 -3
- package/build/dts/connection/ClientConnectionManager.d.ts +7 -2
- package/build/dts/connection/bluetooth/NobleConnectionManager.d.ts +2 -1
- package/build/dts/connection/bluetooth/WebBluetoothConnectionManager.d.ts +1 -0
- package/build/dts/connection/bluetooth/bluetoothUUIDs.d.ts +2 -2
- package/build/dts/connection/udp/UDPConnectionManager.d.ts +28 -0
- package/build/dts/connection/webSocket/WebSocketConnectionManager.d.ts +25 -0
- package/build/dts/devicePair/DevicePair.d.ts +5 -5
- package/build/dts/scanner/BaseScanner.d.ts +4 -1
- package/build/dts/scanner/NobleScanner.d.ts +2 -1
- package/build/dts/sensor/MotionSensorDataManager.d.ts +5 -2
- package/build/dts/sensor/SensorDataManager.d.ts +5 -4
- package/build/dts/server/BaseClient.d.ts +5 -3
- package/build/dts/server/ServerUtils.d.ts +1 -1
- package/build/dts/server/websocket/WebSocketUtils.d.ts +1 -1
- package/build/dts/utils/AudioUtils.d.ts +2 -0
- package/build/dts/utils/Console.d.ts +2 -0
- package/build/dts/utils/ThrottleUtils.d.ts +2 -0
- package/build/dts/vibration/VibrationManager.d.ts +19 -2
- package/build/index.d.ts +292 -62
- package/build/index.node.d.ts +286 -59
- package/examples/3d/scene.html +19 -5
- package/examples/3d-generic/index.html +144 -0
- package/examples/3d-generic/script.js +266 -0
- package/examples/basic/index.html +267 -17
- package/examples/basic/script.js +958 -105
- package/examples/camera/barcode-detector.js +109 -0
- package/examples/camera/depth-estimation.js +71 -0
- package/examples/camera/face-detector.js +119 -0
- package/examples/camera/face-landmark.js +111 -0
- package/examples/camera/gesture-recognition.js +97 -0
- package/examples/camera/hand-landmark.js +74 -0
- package/examples/camera/image-segmentation.js +98 -0
- package/examples/camera/image-to-text.js +43 -0
- package/examples/camera/image-upscale.js +75 -0
- package/examples/camera/index.html +129 -0
- package/examples/camera/object-detection.js +98 -0
- package/examples/camera/pose-landmark.js +60 -0
- package/examples/camera/script.js +316 -0
- package/examples/camera/utils.js +165 -0
- package/examples/camera/yolo-tiny.js +54 -0
- package/examples/camera/yolo.js +119 -0
- package/examples/edge-impulse/script.js +157 -48
- package/examples/edge-impulse-test/README.md +11 -0
- package/examples/edge-impulse-test/edge-impulse-standalone.js +7228 -0
- package/examples/edge-impulse-test/edge-impulse-standalone.wasm +0 -0
- package/examples/edge-impulse-test/index.html +75 -0
- package/examples/edge-impulse-test/run-impulse.js +135 -0
- package/examples/edge-impulse-test/script.js +200 -0
- package/examples/glasses-gestures/README.md +11 -0
- package/examples/glasses-gestures/edge-impulse-standalone.js +7228 -0
- package/examples/glasses-gestures/edge-impulse-standalone.wasm +0 -0
- package/examples/glasses-gestures/index.html +69 -0
- package/examples/glasses-gestures/run-impulse.js +135 -0
- package/examples/glasses-gestures/script.js +226 -0
- package/examples/gloves/edge-impulse-standalone.js +7228 -0
- package/examples/gloves/edge-impulse-standalone.wasm +0 -0
- package/examples/gloves/index.html +4 -1
- package/examples/gloves/run-impulse.js +135 -0
- package/examples/gloves/script.js +367 -51
- package/examples/graph/script.js +94 -37
- package/examples/microphone/gender.js +54 -0
- package/examples/microphone/index.html +102 -0
- package/examples/microphone/script.js +394 -0
- package/examples/microphone/utils.js +45 -0
- package/examples/microphone/whisper-realtime.js +166 -0
- package/examples/microphone/whisper.js +132 -0
- package/examples/punch/index.html +135 -0
- package/examples/punch/punch.tflite +0 -0
- package/examples/punch/script.js +169 -0
- package/examples/server/index.html +98 -22
- package/examples/server/script.js +317 -109
- package/examples/ukaton-firmware-update/merged-firmware.bin +0 -0
- package/examples/utils/aframe/aframe-master.min.js +2 -0
- package/examples/utils/aframe/bs-vibration.js +150 -0
- package/examples/utils/aframe/force-pushable.js +80 -0
- package/examples/utils/aframe/grabbable-anchor.js +46 -0
- package/examples/utils/aframe/grabbable-listener.js +31 -0
- package/examples/utils/aframe/grabbable-physics-body.js +190 -0
- package/examples/utils/aframe/grow-shrink.js +25 -0
- package/examples/utils/aframe/hand-punch.js +119 -0
- package/examples/utils/aframe/my-obb-collider.js +293 -0
- package/examples/utils/aframe/occlude-hand-tracking-controls.js +47 -0
- package/examples/utils/aframe/occlude-mesh.js +42 -0
- package/examples/utils/aframe/palm-up-detector.js +47 -0
- package/examples/utils/aframe/shadow-material.js +20 -0
- package/examples/utils/aframe/soft-shadow-light.js +9 -0
- package/examples/webxr-2/assets/3d/soccerBall.glb +0 -0
- package/examples/webxr-2/assets/audio/shellBounce.wav +0 -0
- package/examples/webxr-2/assets/audio/shellHit.wav +0 -0
- package/examples/webxr-2/assets/audio/shellKick.wav +0 -0
- package/examples/webxr-2/assets/audio/soccerBounce.wav +0 -0
- package/examples/webxr-2/assets/audio/soccerKick.mp3 +0 -0
- package/examples/webxr-2/assets/images/shellTexture.png +0 -0
- package/examples/webxr-2/components/bs-ankle.js +337 -0
- package/examples/webxr-2/components/coin.js +84 -0
- package/examples/webxr-2/components/custom-wrap.js +17 -0
- package/examples/webxr-2/components/goomba.js +3250 -0
- package/examples/webxr-2/components/init-shell-material.js +215 -0
- package/examples/webxr-2/components/platter.js +172 -0
- package/examples/webxr-2/components/shell.js +374 -0
- package/examples/webxr-2/components/soccer-ball.js +250 -0
- package/examples/webxr-2/components/squashed-goomba.js +249 -0
- package/examples/webxr-2/edge-impulse-standalone.js +7228 -0
- package/examples/webxr-2/edge-impulse-standalone.wasm +0 -0
- package/examples/webxr-2/index.html +996 -0
- package/examples/webxr-2/kick.tflite +0 -0
- package/examples/webxr-2/kick2.tflite +0 -0
- package/examples/webxr-2/run-impulse.js +135 -0
- package/examples/webxr-2/script.js +384 -0
- package/examples/webxr-3/components/bs-camera.js +65 -0
- package/examples/webxr-3/index.html +134 -0
- package/examples/webxr-3/script.js +432 -0
- package/package.json +2 -1
- package/src/.prettierrc +4 -0
- package/src/BS.ts +79 -8
- package/src/CameraManager.ts +497 -0
- package/src/Device.ts +691 -86
- package/src/DeviceInformationManager.ts +19 -10
- package/src/DeviceManager.ts +85 -25
- package/src/FileTransferManager.ts +145 -20
- package/src/InformationManager.ts +40 -15
- package/src/MicrophoneManager.ts +599 -0
- package/src/TfliteManager.ts +171 -25
- package/src/WifiManager.ts +323 -0
- package/src/connection/BaseConnectionManager.ts +130 -30
- package/src/connection/ClientConnectionManager.ts +34 -10
- package/src/connection/bluetooth/BluetoothConnectionManager.ts +8 -2
- package/src/connection/bluetooth/NobleConnectionManager.ts +147 -41
- package/src/connection/bluetooth/WebBluetoothConnectionManager.ts +99 -34
- package/src/connection/bluetooth/bluetoothUUIDs.ts +40 -13
- package/src/connection/udp/UDPConnectionManager.ts +356 -0
- package/src/connection/websocket/WebSocketConnectionManager.ts +282 -0
- package/src/devicePair/DevicePair.ts +95 -25
- package/src/devicePair/DevicePairPressureSensorDataManager.ts +27 -7
- package/src/scanner/BaseScanner.ts +49 -11
- package/src/scanner/NobleScanner.ts +76 -14
- package/src/sensor/MotionSensorDataManager.ts +21 -6
- package/src/sensor/PressureSensorDataManager.ts +37 -8
- package/src/sensor/SensorConfigurationManager.ts +73 -22
- package/src/sensor/SensorDataManager.ts +109 -23
- package/src/server/BaseClient.ts +150 -36
- package/src/server/BaseServer.ts +50 -2
- package/src/server/ServerUtils.ts +39 -9
- package/src/server/udp/UDPServer.ts +73 -22
- package/src/server/udp/UDPUtils.ts +9 -2
- package/src/server/websocket/WebSocketClient.ts +27 -7
- package/src/server/websocket/WebSocketUtils.ts +4 -2
- package/src/utils/AudioUtils.ts +65 -0
- package/src/utils/Console.ts +62 -9
- package/src/utils/ParseUtils.ts +24 -5
- package/src/utils/ThrottleUtils.ts +62 -0
- package/src/utils/Timer.ts +1 -1
- package/src/vibration/VibrationManager.ts +166 -40
|
@@ -0,0 +1,109 @@
|
|
|
1
|
+
import { registerModel } from "./utils.js";
|
|
2
|
+
|
|
3
|
+
let barcodeDetector;
|
|
4
|
+
let drawBarcode = false;
|
|
5
|
+
|
|
6
|
+
registerModel(
|
|
7
|
+
"barcode detector",
|
|
8
|
+
() => {
|
|
9
|
+
if (!barcodeDetector) {
|
|
10
|
+
BarcodeDetector.getSupportedFormats().then((supportedFormats) => {
|
|
11
|
+
console.log({ supportedFormats });
|
|
12
|
+
// create new detector
|
|
13
|
+
barcodeDetector = new BarcodeDetector({
|
|
14
|
+
formats: supportedFormats,
|
|
15
|
+
});
|
|
16
|
+
console.log("barcodeDetector", barcodeDetector);
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
async (image, canvas, context, modelResultsElement) => {
|
|
21
|
+
if (!barcodeDetector) {
|
|
22
|
+
console.log("barcodeDetector not created");
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const barcodes = await barcodeDetector.detect(image);
|
|
27
|
+
|
|
28
|
+
console.log("barcodes", barcodes);
|
|
29
|
+
|
|
30
|
+
const barcode = barcodes[0];
|
|
31
|
+
if (barcode) {
|
|
32
|
+
const { boundingBox, cornerPoints, rawValue, format } = sampleBarcodes[0];
|
|
33
|
+
|
|
34
|
+
modelResultsElement.innerText = `${format}: ${rawValue}`;
|
|
35
|
+
|
|
36
|
+
const { x, y, width, height, top, right, bottom, left } = boundingBox;
|
|
37
|
+
|
|
38
|
+
const _x = (x / image.naturalWidth) * canvas.width;
|
|
39
|
+
const _y = (y / image.naturalHeight) * canvas.height;
|
|
40
|
+
|
|
41
|
+
const _width = (width / image.naturalWidth) * canvas.width;
|
|
42
|
+
const _height = (height / image.naturalHeight) * canvas.height;
|
|
43
|
+
|
|
44
|
+
if (drawBarcode) {
|
|
45
|
+
context.fillStyle = "rgba(0, 191, 255, 0.4)";
|
|
46
|
+
context.fillRect(_x, _y, _width, _height);
|
|
47
|
+
|
|
48
|
+
context.strokeStyle = "white";
|
|
49
|
+
context.lineWidth = 2;
|
|
50
|
+
context.strokeRect(_x, _y, _width, _height);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
context.beginPath();
|
|
54
|
+
cornerPoints.forEach((cornerPoint, index) => {
|
|
55
|
+
const { x, y } = cornerPoint;
|
|
56
|
+
|
|
57
|
+
const _x = (x / image.naturalWidth) * canvas.width;
|
|
58
|
+
const _y = (y / image.naturalHeight) * canvas.height;
|
|
59
|
+
|
|
60
|
+
if (drawBarcode) {
|
|
61
|
+
if (index == 0) {
|
|
62
|
+
context.moveTo(_x, _y);
|
|
63
|
+
} else {
|
|
64
|
+
context.lineTo(_x, _y);
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
context.closePath();
|
|
70
|
+
context.fillStyle = "rgba(0, 191, 255, 0.4)";
|
|
71
|
+
context.fill();
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
);
|
|
75
|
+
|
|
76
|
+
const sampleBarcodes = [
|
|
77
|
+
{
|
|
78
|
+
boundingBox: {
|
|
79
|
+
x: 155.0115203857422,
|
|
80
|
+
y: 288.3844299316406,
|
|
81
|
+
width: 301.869873046875,
|
|
82
|
+
height: 298.4500427246094,
|
|
83
|
+
top: 288.3844299316406,
|
|
84
|
+
right: 456.8813934326172,
|
|
85
|
+
bottom: 586.83447265625,
|
|
86
|
+
left: 155.0115203857422,
|
|
87
|
+
},
|
|
88
|
+
cornerPoints: [
|
|
89
|
+
{
|
|
90
|
+
x: 384.11309814453125,
|
|
91
|
+
y: 288.3844299316406,
|
|
92
|
+
},
|
|
93
|
+
{
|
|
94
|
+
x: 456.8813781738281,
|
|
95
|
+
y: 512.93701171875,
|
|
96
|
+
},
|
|
97
|
+
{
|
|
98
|
+
x: 213.1260986328125,
|
|
99
|
+
y: 586.83447265625,
|
|
100
|
+
},
|
|
101
|
+
{
|
|
102
|
+
x: 155.0115203857422,
|
|
103
|
+
y: 341.5698547363281,
|
|
104
|
+
},
|
|
105
|
+
],
|
|
106
|
+
format: "qr_code",
|
|
107
|
+
rawValue: "http://en.m.wikipedia.org",
|
|
108
|
+
},
|
|
109
|
+
];
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
import { pipeline } from "https://cdn.jsdelivr.net/npm/@huggingface/transformers@3.5.2";
|
|
2
|
+
|
|
3
|
+
import {
|
|
4
|
+
drawBox,
|
|
5
|
+
drawGreyscaleImage,
|
|
6
|
+
registerModel,
|
|
7
|
+
resizeImage,
|
|
8
|
+
} from "./utils.js";
|
|
9
|
+
|
|
10
|
+
let depthEstimator = undefined;
|
|
11
|
+
let isRunning = false;
|
|
12
|
+
|
|
13
|
+
const createDepthEstimator = async () => {
|
|
14
|
+
depthEstimator = await pipeline(
|
|
15
|
+
"depth-estimation",
|
|
16
|
+
"onnx-community/depth-anything-v2-small"
|
|
17
|
+
);
|
|
18
|
+
console.log("created depthEstimator", depthEstimator);
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
registerModel(
|
|
22
|
+
"depth estimator",
|
|
23
|
+
() => {
|
|
24
|
+
if (!depthEstimator) {
|
|
25
|
+
createDepthEstimator();
|
|
26
|
+
}
|
|
27
|
+
},
|
|
28
|
+
async (
|
|
29
|
+
image,
|
|
30
|
+
canvas,
|
|
31
|
+
context,
|
|
32
|
+
mediaResultsElement,
|
|
33
|
+
generatedImageCanvas,
|
|
34
|
+
generatedImageContext
|
|
35
|
+
) => {
|
|
36
|
+
if (!depthEstimator) {
|
|
37
|
+
console.error("depthEstimator not created yet");
|
|
38
|
+
return;
|
|
39
|
+
}
|
|
40
|
+
if (isRunning) {
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
try {
|
|
44
|
+
isRunning = true;
|
|
45
|
+
|
|
46
|
+
console.log("running depthEstimator");
|
|
47
|
+
const src = resizeImage(image, 128, 128);
|
|
48
|
+
const output = await depthEstimator(src);
|
|
49
|
+
console.log("depthEstimator output", output);
|
|
50
|
+
|
|
51
|
+
if (output.depth) {
|
|
52
|
+
const { channels, data, height, width } = output.depth;
|
|
53
|
+
console.log({ channels, data, height, width });
|
|
54
|
+
if (channels == 1) {
|
|
55
|
+
generatedImageCanvas.style.display = "";
|
|
56
|
+
drawGreyscaleImage(
|
|
57
|
+
data,
|
|
58
|
+
height,
|
|
59
|
+
width,
|
|
60
|
+
generatedImageCanvas,
|
|
61
|
+
generatedImageContext
|
|
62
|
+
);
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
} catch (error) {
|
|
66
|
+
console.error("error running depthEstimator", error);
|
|
67
|
+
} finally {
|
|
68
|
+
isRunning = false;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
);
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import {
|
|
2
|
+
FaceDetector,
|
|
3
|
+
FilesetResolver,
|
|
4
|
+
DrawingUtils,
|
|
5
|
+
} from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0";
|
|
6
|
+
|
|
7
|
+
import { drawBox, drawPoint, registerModel } from "./utils.js";
|
|
8
|
+
|
|
9
|
+
let faceDetector = undefined;
|
|
10
|
+
let runningMode = "VIDEO";
|
|
11
|
+
|
|
12
|
+
registerModel(
|
|
13
|
+
"face detector",
|
|
14
|
+
() => {
|
|
15
|
+
if (!faceDetector) {
|
|
16
|
+
createFaceDetector();
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
async (image, canvas, context) => {
|
|
20
|
+
if (!faceDetector) {
|
|
21
|
+
console.error("faceDetector not created yet");
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
const faceDetectorResult = faceDetector.detectForVideo(
|
|
26
|
+
image,
|
|
27
|
+
performance.now()
|
|
28
|
+
);
|
|
29
|
+
|
|
30
|
+
//console.log("faceDetectorResult", faceDetectorResult);
|
|
31
|
+
|
|
32
|
+
context.save();
|
|
33
|
+
for (const detection of faceDetectorResult.detections) {
|
|
34
|
+
const { boundingBox, keypoints } = detection;
|
|
35
|
+
|
|
36
|
+
drawBox(boundingBox, image, canvas, context);
|
|
37
|
+
|
|
38
|
+
keypoints.forEach((keypoint) => {
|
|
39
|
+
drawPoint(keypoint, image, canvas, context);
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
context.restore();
|
|
43
|
+
}
|
|
44
|
+
);
|
|
45
|
+
|
|
46
|
+
const createFaceDetector = async () => {
|
|
47
|
+
const vision = await FilesetResolver.forVisionTasks(
|
|
48
|
+
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0/wasm"
|
|
49
|
+
);
|
|
50
|
+
faceDetector = await FaceDetector.createFromOptions(vision, {
|
|
51
|
+
baseOptions: {
|
|
52
|
+
modelAssetPath: `https://storage.googleapis.com/mediapipe-models/face_detector/blaze_face_short_range/float16/1/blaze_face_short_range.tflite`,
|
|
53
|
+
delegate: "GPU",
|
|
54
|
+
},
|
|
55
|
+
runningMode: runningMode,
|
|
56
|
+
minDetectionConfidence: 0.5,
|
|
57
|
+
minSuppressionThreshold: 0.3,
|
|
58
|
+
});
|
|
59
|
+
console.log("created faceDetector", faceDetector);
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
const sampleResult = {
|
|
63
|
+
detections: [
|
|
64
|
+
{
|
|
65
|
+
categories: [
|
|
66
|
+
{
|
|
67
|
+
score: 0.9460012316703796,
|
|
68
|
+
index: 0,
|
|
69
|
+
categoryName: "",
|
|
70
|
+
displayName: "",
|
|
71
|
+
},
|
|
72
|
+
],
|
|
73
|
+
boundingBox: {
|
|
74
|
+
originX: 196,
|
|
75
|
+
originY: 284,
|
|
76
|
+
width: 290,
|
|
77
|
+
height: 290,
|
|
78
|
+
},
|
|
79
|
+
keypoints: [
|
|
80
|
+
{
|
|
81
|
+
x: 0.3776719570159912,
|
|
82
|
+
y: 0.48935234546661377,
|
|
83
|
+
score: 0,
|
|
84
|
+
label: "",
|
|
85
|
+
},
|
|
86
|
+
{
|
|
87
|
+
x: 0.5415752530097961,
|
|
88
|
+
y: 0.48879799246788025,
|
|
89
|
+
score: 0,
|
|
90
|
+
label: "",
|
|
91
|
+
},
|
|
92
|
+
{
|
|
93
|
+
x: 0.4468824565410614,
|
|
94
|
+
y: 0.5775673985481262,
|
|
95
|
+
score: 0,
|
|
96
|
+
label: "",
|
|
97
|
+
},
|
|
98
|
+
{
|
|
99
|
+
x: 0.4519948363304138,
|
|
100
|
+
y: 0.6720198392868042,
|
|
101
|
+
score: 0,
|
|
102
|
+
label: "",
|
|
103
|
+
},
|
|
104
|
+
{
|
|
105
|
+
x: 0.3088231682777405,
|
|
106
|
+
y: 0.5479761958122253,
|
|
107
|
+
score: 0,
|
|
108
|
+
label: "",
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
x: 0.6484290957450867,
|
|
112
|
+
y: 0.549033522605896,
|
|
113
|
+
score: 0,
|
|
114
|
+
label: "",
|
|
115
|
+
},
|
|
116
|
+
],
|
|
117
|
+
},
|
|
118
|
+
],
|
|
119
|
+
};
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import vision from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.3";
|
|
2
|
+
const { FaceLandmarker, FilesetResolver, DrawingUtils } = vision;
|
|
3
|
+
|
|
4
|
+
import { registerModel } from "./utils.js";
|
|
5
|
+
|
|
6
|
+
let faceLandmarker = undefined;
|
|
7
|
+
let runningMode = "VIDEO";
|
|
8
|
+
let drawingUtils;
|
|
9
|
+
|
|
10
|
+
registerModel(
|
|
11
|
+
"face landmark",
|
|
12
|
+
() => {
|
|
13
|
+
if (!faceLandmarker) {
|
|
14
|
+
createFaceLandmarker();
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
async (image, canvas, context, mediaResultsElement) => {
|
|
18
|
+
if (!faceLandmarker) {
|
|
19
|
+
console.error("faceLandmarker not created yet");
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
drawingUtils = drawingUtils || new DrawingUtils(context);
|
|
24
|
+
|
|
25
|
+
const results = faceLandmarker.detectForVideo(image, performance.now());
|
|
26
|
+
|
|
27
|
+
console.log("faceLandmarkerResult", results);
|
|
28
|
+
const blendShapeCategories = results?.faceBlendshapes?.[0]?.categories;
|
|
29
|
+
if (blendShapeCategories) {
|
|
30
|
+
const blendShapeCategoriesString = {};
|
|
31
|
+
blendShapeCategories.forEach(({ categoryName, score }) => {
|
|
32
|
+
blendShapeCategoriesString[categoryName] = score;
|
|
33
|
+
});
|
|
34
|
+
mediaResultsElement.textContent = JSON.stringify(
|
|
35
|
+
blendShapeCategoriesString,
|
|
36
|
+
null,
|
|
37
|
+
2
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (results.faceLandmarks) {
|
|
42
|
+
for (const landmarks of results.faceLandmarks) {
|
|
43
|
+
drawingUtils.drawConnectors(
|
|
44
|
+
landmarks,
|
|
45
|
+
FaceLandmarker.FACE_LANDMARKS_TESSELATION,
|
|
46
|
+
{ color: "#C0C0C070", lineWidth: 1 }
|
|
47
|
+
);
|
|
48
|
+
drawingUtils.drawConnectors(
|
|
49
|
+
landmarks,
|
|
50
|
+
FaceLandmarker.FACE_LANDMARKS_RIGHT_EYE,
|
|
51
|
+
{ color: "#FF3030", lineWidth: 1 }
|
|
52
|
+
);
|
|
53
|
+
drawingUtils.drawConnectors(
|
|
54
|
+
landmarks,
|
|
55
|
+
FaceLandmarker.FACE_LANDMARKS_RIGHT_EYEBROW,
|
|
56
|
+
{ color: "#FF3030", lineWidth: 1 }
|
|
57
|
+
);
|
|
58
|
+
drawingUtils.drawConnectors(
|
|
59
|
+
landmarks,
|
|
60
|
+
FaceLandmarker.FACE_LANDMARKS_LEFT_EYE,
|
|
61
|
+
{ color: "#30FF30", lineWidth: 1 }
|
|
62
|
+
);
|
|
63
|
+
drawingUtils.drawConnectors(
|
|
64
|
+
landmarks,
|
|
65
|
+
FaceLandmarker.FACE_LANDMARKS_LEFT_EYEBROW,
|
|
66
|
+
{ color: "#30FF30", lineWidth: 1 }
|
|
67
|
+
);
|
|
68
|
+
drawingUtils.drawConnectors(
|
|
69
|
+
landmarks,
|
|
70
|
+
FaceLandmarker.FACE_LANDMARKS_FACE_OVAL,
|
|
71
|
+
{ color: "#E0E0E0", lineWidth: 1 }
|
|
72
|
+
);
|
|
73
|
+
drawingUtils.drawConnectors(
|
|
74
|
+
landmarks,
|
|
75
|
+
FaceLandmarker.FACE_LANDMARKS_LIPS,
|
|
76
|
+
{ color: "#E0E0E0", lineWidth: 1 }
|
|
77
|
+
);
|
|
78
|
+
drawingUtils.drawConnectors(
|
|
79
|
+
landmarks,
|
|
80
|
+
FaceLandmarker.FACE_LANDMARKS_RIGHT_IRIS,
|
|
81
|
+
{ color: "#FF3030", lineWidth: 1 }
|
|
82
|
+
);
|
|
83
|
+
drawingUtils.drawConnectors(
|
|
84
|
+
landmarks,
|
|
85
|
+
FaceLandmarker.FACE_LANDMARKS_LEFT_IRIS,
|
|
86
|
+
{ color: "#30FF30", lineWidth: 1 }
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
}
|
|
91
|
+
);
|
|
92
|
+
|
|
93
|
+
const createFaceLandmarker = async () => {
|
|
94
|
+
const filesetResolver = await FilesetResolver.forVisionTasks(
|
|
95
|
+
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.3/wasm"
|
|
96
|
+
);
|
|
97
|
+
faceLandmarker = await FaceLandmarker.createFromOptions(filesetResolver, {
|
|
98
|
+
baseOptions: {
|
|
99
|
+
modelAssetPath: `https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/1/face_landmarker.task`,
|
|
100
|
+
delegate: "GPU",
|
|
101
|
+
},
|
|
102
|
+
outputFaceBlendshapes: true,
|
|
103
|
+
runningMode,
|
|
104
|
+
numFaces: 1,
|
|
105
|
+
minFaceDetectionConfidence: 0.5,
|
|
106
|
+
minFacePresenceConfidence: 0.5,
|
|
107
|
+
minTrackingConfidence: 0.5,
|
|
108
|
+
outputFacialTransformationMatrixes: false,
|
|
109
|
+
});
|
|
110
|
+
console.log("created faceLandmarker", faceLandmarker);
|
|
111
|
+
};
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
import {
|
|
2
|
+
GestureRecognizer,
|
|
3
|
+
FilesetResolver,
|
|
4
|
+
DrawingUtils,
|
|
5
|
+
} from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.3";
|
|
6
|
+
|
|
7
|
+
import { registerModel } from "./utils.js";
|
|
8
|
+
|
|
9
|
+
let gestureRecognizer = undefined;
|
|
10
|
+
let drawingUtils;
|
|
11
|
+
let runningMode = "VIDEO";
|
|
12
|
+
|
|
13
|
+
registerModel(
|
|
14
|
+
"gesture recognition",
|
|
15
|
+
() => {
|
|
16
|
+
if (!gestureRecognizer) {
|
|
17
|
+
createGestureRecognizer();
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
async (image, canvas, context, modelResultsElement) => {
|
|
21
|
+
if (!gestureRecognizer) {
|
|
22
|
+
console.error("gestureRecognizer not created yet");
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
const gestureRecognizerResult = gestureRecognizer.recognizeForVideo(
|
|
27
|
+
image,
|
|
28
|
+
performance.now()
|
|
29
|
+
);
|
|
30
|
+
console.log("gestureRecognizerResult", gestureRecognizerResult);
|
|
31
|
+
|
|
32
|
+
if (gestureRecognizerResult.landmarks) {
|
|
33
|
+
drawingUtils = drawingUtils || new DrawingUtils(context);
|
|
34
|
+
|
|
35
|
+
for (const landmarks of gestureRecognizerResult.landmarks) {
|
|
36
|
+
drawingUtils.drawConnectors(
|
|
37
|
+
landmarks,
|
|
38
|
+
GestureRecognizer.HAND_CONNECTIONS,
|
|
39
|
+
{
|
|
40
|
+
color: "#00FF00",
|
|
41
|
+
lineWidth: 3,
|
|
42
|
+
}
|
|
43
|
+
);
|
|
44
|
+
drawingUtils.drawLandmarks(landmarks, {
|
|
45
|
+
color: "#FF0000",
|
|
46
|
+
lineWidth: 0,
|
|
47
|
+
radius: 3,
|
|
48
|
+
});
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
if (gestureRecognizerResult.gestures.length > 0) {
|
|
52
|
+
const categoryName = gestureRecognizerResult.gestures[0][0].categoryName;
|
|
53
|
+
const categoryScore = parseFloat(
|
|
54
|
+
gestureRecognizerResult.gestures[0][0].score * 100
|
|
55
|
+
).toFixed(2);
|
|
56
|
+
const handedness = gestureRecognizerResult.handednesses[0][0].displayName;
|
|
57
|
+
console.log(
|
|
58
|
+
`GestureRecognizer: ${categoryName}\n Confidence: ${categoryScore} %\n Handedness: ${handedness}`
|
|
59
|
+
);
|
|
60
|
+
modelResultsElement.innerText = `GestureRecognizer: ${categoryName}\n Confidence: ${categoryScore} %\n Handedness: ${handedness}`;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
);
|
|
64
|
+
|
|
65
|
+
const createGestureRecognizer = async () => {
|
|
66
|
+
const vision = await FilesetResolver.forVisionTasks(
|
|
67
|
+
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.3/wasm"
|
|
68
|
+
);
|
|
69
|
+
gestureRecognizer = await GestureRecognizer.createFromOptions(vision, {
|
|
70
|
+
baseOptions: {
|
|
71
|
+
modelAssetPath:
|
|
72
|
+
"https://storage.googleapis.com/mediapipe-models/gesture_recognizer/gesture_recognizer/float16/1/gesture_recognizer.task",
|
|
73
|
+
delegate: "GPU",
|
|
74
|
+
},
|
|
75
|
+
runningMode: runningMode,
|
|
76
|
+
numHands: 1,
|
|
77
|
+
|
|
78
|
+
/**
|
|
79
|
+
* The minimum confidence score for the hand detection to be considered
|
|
80
|
+
* successful. Defaults to 0.5.
|
|
81
|
+
*/
|
|
82
|
+
minHandDetectionConfidence: 0.5,
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* The minimum confidence score of hand presence score in the hand landmark
|
|
86
|
+
* detection.
|
|
87
|
+
*/
|
|
88
|
+
minHandPresenceConfidence: 0.5,
|
|
89
|
+
|
|
90
|
+
/**
|
|
91
|
+
* The minimum confidence score for the hand tracking to be considered
|
|
92
|
+
* successful.
|
|
93
|
+
*/
|
|
94
|
+
minTrackingConfidence: 0.5,
|
|
95
|
+
});
|
|
96
|
+
console.log("created gestureRecognizer", gestureRecognizer);
|
|
97
|
+
};
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import {
|
|
2
|
+
HandLandmarker,
|
|
3
|
+
FilesetResolver,
|
|
4
|
+
} from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0";
|
|
5
|
+
|
|
6
|
+
import { registerModel } from "./utils.js";
|
|
7
|
+
|
|
8
|
+
let handLandmarker = undefined;
|
|
9
|
+
let runningMode = "VIDEO";
|
|
10
|
+
|
|
11
|
+
registerModel(
|
|
12
|
+
"hand landmark",
|
|
13
|
+
() => {
|
|
14
|
+
if (!handLandmarker) {
|
|
15
|
+
createHandLandmarker();
|
|
16
|
+
}
|
|
17
|
+
},
|
|
18
|
+
async (image, canvas, context) => {
|
|
19
|
+
if (!handLandmarker) {
|
|
20
|
+
console.error("handLandmarker not created yet");
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const handLandmarkerResult = handLandmarker.detectForVideo(
|
|
25
|
+
image,
|
|
26
|
+
performance.now()
|
|
27
|
+
);
|
|
28
|
+
|
|
29
|
+
for (const landmarks of handLandmarkerResult.landmarks) {
|
|
30
|
+
drawConnectors(context, landmarks, HAND_CONNECTIONS, {
|
|
31
|
+
color: "#00FF00",
|
|
32
|
+
lineWidth: 3,
|
|
33
|
+
});
|
|
34
|
+
drawLandmarks(context, landmarks, {
|
|
35
|
+
color: "#FF0000",
|
|
36
|
+
lineWidth: 0,
|
|
37
|
+
radius: 3,
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
);
|
|
42
|
+
|
|
43
|
+
const createHandLandmarker = async () => {
|
|
44
|
+
const vision = await FilesetResolver.forVisionTasks(
|
|
45
|
+
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.0/wasm"
|
|
46
|
+
);
|
|
47
|
+
handLandmarker = await HandLandmarker.createFromOptions(vision, {
|
|
48
|
+
baseOptions: {
|
|
49
|
+
modelAssetPath: `https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task`,
|
|
50
|
+
delegate: "GPU",
|
|
51
|
+
},
|
|
52
|
+
runningMode: runningMode,
|
|
53
|
+
numHands: 1,
|
|
54
|
+
/**
|
|
55
|
+
* The minimum confidence score for the hand detection to be considered successful in palm detection model.
|
|
56
|
+
*/
|
|
57
|
+
minHandDetectionConfidence: 0.5,
|
|
58
|
+
/**
|
|
59
|
+
* The minimum confidence score for the hand presence score in the hand landmark detection model.
|
|
60
|
+
* In Video mode and Live stream mode, if the hand presence confidence score from the hand landmark model is below this threshold,
|
|
61
|
+
* Hand Landmarker triggers the palm detection model.
|
|
62
|
+
* Otherwise, a lightweight hand tracking algorithm determines the location of the hand(s) for subsequent landmark detections.
|
|
63
|
+
*/
|
|
64
|
+
minHandPresenceConfidence: 0.5,
|
|
65
|
+
/**
|
|
66
|
+
* The minimum confidence score for the hand tracking to be considered successful.
|
|
67
|
+
* This is the bounding box IoU threshold between hands in the current frame and the last frame.
|
|
68
|
+
* In Video mode and Stream mode of Hand Landmarker, if the tracking fails, Hand Landmarker triggers hand detection.
|
|
69
|
+
* Otherwise, it skips the hand detection.
|
|
70
|
+
*/
|
|
71
|
+
minTrackingConfidence: 0.5,
|
|
72
|
+
});
|
|
73
|
+
console.log("created handLandmarker", handLandmarker);
|
|
74
|
+
};
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ImageSegmenter,
|
|
3
|
+
FilesetResolver,
|
|
4
|
+
} from "https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.2";
|
|
5
|
+
|
|
6
|
+
import { registerModel } from "./utils.js";
|
|
7
|
+
|
|
8
|
+
let imageSegmenter = undefined;
|
|
9
|
+
let runningMode = "LIVE_STREAM";
|
|
10
|
+
let labels;
|
|
11
|
+
|
|
12
|
+
const legendColors = [
|
|
13
|
+
[255, 197, 0, 255], // Vivid Yellow
|
|
14
|
+
[128, 62, 117, 255], // Strong Purple
|
|
15
|
+
[255, 104, 0, 255], // Vivid Orange
|
|
16
|
+
[166, 189, 215, 255], // Very Light Blue
|
|
17
|
+
[193, 0, 32, 255], // Vivid Red
|
|
18
|
+
[206, 162, 98, 255], // Grayish Yellow
|
|
19
|
+
[129, 112, 102, 255], // Medium Gray
|
|
20
|
+
[0, 125, 52, 255], // Vivid Green
|
|
21
|
+
[246, 118, 142, 255], // Strong Purplish Pink
|
|
22
|
+
[0, 83, 138, 255], // Strong Blue
|
|
23
|
+
[255, 112, 92, 255], // Strong Yellowish Pink
|
|
24
|
+
[83, 55, 112, 255], // Strong Violet
|
|
25
|
+
[255, 142, 0, 255], // Vivid Orange Yellow
|
|
26
|
+
[179, 40, 81, 255], // Strong Purplish Red
|
|
27
|
+
[244, 200, 0, 255], // Vivid Greenish Yellow
|
|
28
|
+
[127, 24, 13, 255], // Strong Reddish Brown
|
|
29
|
+
[147, 170, 0, 255], // Vivid Yellowish Green
|
|
30
|
+
[89, 51, 21, 255], // Deep Yellowish Brown
|
|
31
|
+
[241, 58, 19, 255], // Vivid Reddish Orange
|
|
32
|
+
[35, 44, 22, 255], // Dark Olive Green
|
|
33
|
+
[0, 161, 194, 255], // Vivid Blue
|
|
34
|
+
];
|
|
35
|
+
|
|
36
|
+
registerModel(
|
|
37
|
+
"image segmenter",
|
|
38
|
+
() => {
|
|
39
|
+
if (!imageSegmenter) {
|
|
40
|
+
createImageSegmenter();
|
|
41
|
+
}
|
|
42
|
+
},
|
|
43
|
+
async (image, canvas, context, mediaResultsElement) => {
|
|
44
|
+
if (!imageSegmenter) {
|
|
45
|
+
console.error("imageSegmenter not created yet");
|
|
46
|
+
return;
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const result = imageSegmenter.segmentForVideo(image, performance.now());
|
|
50
|
+
|
|
51
|
+
console.log("imageSegmenter result", result);
|
|
52
|
+
|
|
53
|
+
const { width, height } = result.categoryMask;
|
|
54
|
+
let imageData = context.getImageData(0, 0, width, height).data;
|
|
55
|
+
canvas.width = width;
|
|
56
|
+
canvas.height = height;
|
|
57
|
+
const mask = result.categoryMask.getAsUint8Array();
|
|
58
|
+
for (let i in mask) {
|
|
59
|
+
const legendColor = legendColors[mask[i] % legendColors.length];
|
|
60
|
+
imageData[i * 4] = (legendColor[0] + imageData[i * 4]) / 2;
|
|
61
|
+
imageData[i * 4 + 1] = (legendColor[1] + imageData[i * 4 + 1]) / 2;
|
|
62
|
+
imageData[i * 4 + 2] = (legendColor[2] + imageData[i * 4 + 2]) / 2;
|
|
63
|
+
imageData[i * 4 + 3] = (legendColor[3] + imageData[i * 4 + 3]) / 2;
|
|
64
|
+
}
|
|
65
|
+
const uint8Array = new Uint8ClampedArray(imageData.buffer);
|
|
66
|
+
const dataNew = new ImageData(uint8Array, width, height);
|
|
67
|
+
context.putImageData(dataNew, 0, 0);
|
|
68
|
+
}
|
|
69
|
+
);
|
|
70
|
+
|
|
71
|
+
const modelAssetPaths = {
|
|
72
|
+
selfieMulticlass:
|
|
73
|
+
"https://storage.googleapis.com/mediapipe-models/image_segmenter/selfie_multiclass_256x256/float32/latest/selfie_multiclass_256x256.tflite",
|
|
74
|
+
hairSegmenter:
|
|
75
|
+
"https://storage.googleapis.com/mediapipe-models/image_segmenter/hair_segmenter/float32/latest/hair_segmenter.tflite",
|
|
76
|
+
selfieSegmenter:
|
|
77
|
+
"https://storage.googleapis.com/mediapipe-models/image_segmenter/selfie_segmenter/float16/latest/selfie_segmenter.tflite",
|
|
78
|
+
deeplab:
|
|
79
|
+
"https://storage.googleapis.com/mediapipe-models/image_segmenter/deeplab_v3/float32/latest/deeplab_v3.tflite",
|
|
80
|
+
};
|
|
81
|
+
const createImageSegmenter = async () => {
|
|
82
|
+
const vision = await FilesetResolver.forVisionTasks(
|
|
83
|
+
"https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.2/wasm"
|
|
84
|
+
);
|
|
85
|
+
|
|
86
|
+
imageSegmenter = await ImageSegmenter.createFromOptions(vision, {
|
|
87
|
+
baseOptions: {
|
|
88
|
+
modelAssetPath: modelAssetPaths.selfieMulticlass,
|
|
89
|
+
delegate: "GPU",
|
|
90
|
+
},
|
|
91
|
+
|
|
92
|
+
runningMode: runningMode,
|
|
93
|
+
outputCategoryMask: true,
|
|
94
|
+
outputConfidenceMasks: false,
|
|
95
|
+
});
|
|
96
|
+
labels = imageSegmenter.getLabels();
|
|
97
|
+
console.log("created imageSegmenter", imageSegmenter, labels);
|
|
98
|
+
};
|