handle-and-face 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,109 @@
1
+ # handle-and-face 🦾👤
2
+
3
+ **Improved accessibility and precise tracking for modern web applications.**
4
+
5
+ `handle-and-face` is a React library that provides highly calibrated hooks for **Hand Tracking** (index finger pointer + pinch click) and **Head Tracking** (nose-driven pointer + blink click). Built on top of MediaPipe, it simplifies the integration of advanced computer vision for accessibility and interactive web experiences.
6
+
7
+ ---
8
+
9
+ ## 🇪🇸 Documentación en Español
10
+
11
+ ### Instalación
12
+
13
+ ```bash
14
+ npm install handle-and-face
15
+ ```
16
+
17
+ ### Hooks Disponibles
18
+
19
+ #### 1. `useHandTracking`
20
+
21
+ Ideal para navegar usando el dedo índice como puntero.
22
+
23
+ - **Gesto**: Junta el pulgar y el índice para hacer click.
24
+ - **Implementación**:
25
+
26
+ ```javascript
27
+ const { videoRef, canvasRef, pointer, isClicking } = useHandTracking({
28
+ sensitivityX: 2.5,
29
+ onPinch: (x, y) => console.log("Click en", x, y),
30
+ });
31
+ ```
32
+
33
+ #### 2. `useHeadTracking`
34
+
35
+ Perfecto para control manos-libres y accesibilidad.
36
+
37
+ - **Gesto**: Parpadea una vez para click, dos veces rápido para doble click.
38
+ - **Implementación**:
39
+
40
+ ```javascript
41
+ const { videoRef, canvasRef, pointer, isClicking } = useHeadTracking({
42
+ sensitivityX: 10.0,
43
+ onClick: (x, y) => console.log("Click"),
44
+ onDoubleClick: (x, y) => console.log("Doble Click"),
45
+ });
46
+ ```
47
+
48
+ ---
49
+
50
+ ## 🇺🇸 Documentation in English
51
+
52
+ ### Installation
53
+
54
+ ```bash
55
+ npm install handle-and-face
56
+ ```
57
+
58
+ ### Available Hooks
59
+
60
+ #### 1. `useHandTracking`
61
+
62
+ Use your index finger as a virtual mouse.
63
+
64
+ - **Gesture**: Pinch thumb and index to click.
65
+ - **Usage**:
66
+
67
+ ```javascript
68
+ const { videoRef, canvasRef, pointer, isClicking } = useHandTracking({
69
+ sensitivityX: 2.0,
70
+ onPinch: (x, y) => console.log("Pinch click at", x, y),
71
+ });
72
+ ```
73
+
74
+ #### 2. `useHeadTracking`
75
+
76
+ Touch-free navigation using head movement.
77
+
78
+ - **Gesture**: Blink to click, rapid double-blink for double-click.
79
+ - **Usage**:
80
+
81
+ ```javascript
82
+ const { videoRef, canvasRef, pointer, isClicking } = useHeadTracking({
83
+ sensitivityX: 10.0,
84
+ onDoubleClick: (x, y) => console.log("Double blink detected"),
85
+ });
86
+ ```
87
+
88
+ ---
89
+
90
+ ## 🛡️ Stability & Precision
91
+
92
+ The library includes custom-built algorithms for:
93
+
94
+ - **Dynamic Smoothing**: Reduces lag during fast movements and adds weight for precision when aiming at small targets.
95
+ - **Blink-Freeze**: Temporarily anchors the pointer during blinks/clicks to prevent unintended cursor drift.
96
+ - **Auto-Calibration**: Pre-tuned sensitivity values for standard laptop cameras.
97
+
98
+ ## ⚙️ Configuration Parameters
99
+
100
+ | Parameter | Type | Default | Description |
101
+ | :-------------------- | :--------- | :----------- | :------------------------------------------------- |
102
+ | `sensitivityX/Y` | `Number` | `2.0 - 10.0` | Pointer speed multiplier. |
103
+ | `jitterThreshold` | `Number` | `1.5` | Minimum movement pixels to ignore (reduces noise). |
104
+ | `onPinch` / `onClick` | `Function` | `null` | Single interaction callback. |
105
+ | `onDoubleClick` | `Function` | `null` | Double interaction callback (Head Tracking only). |
106
+
107
+ ## 📄 License
108
+
109
+ MIT © Starla.io
package/package.json ADDED
@@ -0,0 +1,30 @@
1
+ {
2
+ "name": "handle-and-face",
3
+ "version": "1.0.0",
4
+ "description": "Custom React hooks for hand and head tracking using MediaPipe. Improved accessibility and calibrated for precise web interactions.",
5
+ "type": "module",
6
+ "main": "src/index.js",
7
+ "files": [
8
+ "src",
9
+ "README.md"
10
+ ],
11
+ "scripts": {
12
+ "test": "echo \"Error: no test specified\" && exit 1"
13
+ },
14
+ "keywords": [
15
+ "react",
16
+ "mediapipe",
17
+ "hand-tracking",
18
+ "head-tracking",
19
+ "accessibility",
20
+ "face-mesh",
21
+ "pinch-click",
22
+ "blink-detection"
23
+ ],
24
+ "author": "fernando.robles",
25
+ "license": "MIT",
26
+ "peerDependencies": {
27
+ "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0",
28
+ "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0"
29
+ }
30
+ }
@@ -0,0 +1,157 @@
1
+ 'use client';
2
+ import { useRef, useEffect, useState, useCallback } from 'react';
3
+
4
+ export function useHandTracking({
5
+ sensitivityX = 2.0,
6
+ sensitivityY = 2.0,
7
+ smoothing = 0.3,
8
+ pinchThreshold = 0.05,
9
+ onPinch = null
10
+ } = {}) {
11
+ const videoRef = useRef(null);
12
+ const canvasRef = useRef(null);
13
+ const handsRef = useRef(null);
14
+ const cameraRef = useRef(null);
15
+ const _r = useRef({ x: 0, y: 0, initialized: false });
16
+
17
+ const [isTracking, setIsTracking] = useState(false);
18
+ const [pointer, setPointer] = useState({ x: 0, y: 0 });
19
+ const [isClicking, setIsClicking] = useState(false);
20
+
21
+ useEffect(() => {
22
+ const _s = [
23
+ 'https://cdn.jsdelivr.net/npm/@mediapipe/hands/hands.js',
24
+ 'https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils/camera_utils.js',
25
+ 'https://cdn.jsdelivr.net/npm/@mediapipe/drawing_utils/drawing_utils.js'
26
+ ];
27
+ const _l = async () => {
28
+ for (const src of _s) {
29
+ if (!document.querySelector(`script[src="${src}"]`)) {
30
+ await new Promise((res) => {
31
+ const s = document.createElement('script');
32
+ s.src = src; s.onload = res;
33
+ document.body.appendChild(s);
34
+ });
35
+ }
36
+ }
37
+ };
38
+ _l();
39
+ }, []);
40
+
41
+ const onResults = useCallback((res) => {
42
+ if (!canvasRef.current || !res.image) return;
43
+ const _c = canvasRef.current;
44
+ if (_c.width !== res.image.width || _c.height !== res.image.height) {
45
+ _c.width = res.image.width; _c.height = res.image.height;
46
+ }
47
+ const _x = _c.getContext('2d');
48
+ _x.save(); _x.clearRect(0, 0, _c.width, _c.height);
49
+ _x.drawImage(res.image, 0, 0, _c.width, _c.height);
50
+
51
+ if (res.multiHandLandmarks && res.multiHandLandmarks.length > 0) {
52
+ const _p = res.multiHandLandmarks[0];
53
+ if (window.drawConnectors && window.HAND_CONNECTIONS) {
54
+ window.drawConnectors(_x, _p, window.HAND_CONNECTIONS, {color: '#00FF00', lineWidth: 2});
55
+ window.drawLandmarks(_x, _p, {color: '#FF0000', lineWidth: 1, radius: 3});
56
+ }
57
+
58
+ const [_i, _t] = [_p[8], _p[4]];
59
+ let _nx = Math.max(0, Math.min(1, ((1 - _i.x) - 0.5) * sensitivityX + 0.5));
60
+ let _ny = Math.max(0, Math.min(1, (_i.y - 0.5) * sensitivityY + 0.5));
61
+ const [_rx, _ry] = [_nx * window.innerWidth, _ny * window.innerHeight];
62
+
63
+ const _ds = Math.sqrt(Math.pow(_i.x - _t.x, 2) + Math.pow(_i.y - _t.y, 2));
64
+ const _act = _ds < pinchThreshold;
65
+
66
+ if (!_r.current.initialized) _r.current = { x: _rx, y: _ry, initialized: true };
67
+ const _f = _act ? 0.05 : smoothing;
68
+ _r.current.x += (_rx - _r.current.x) * _f;
69
+ _r.current.y += (_ry - _r.current.y) * _f;
70
+
71
+ setPointer({ x: _r.current.x, y: _r.current.y });
72
+
73
+ if (_act) {
74
+ setIsClicking(p => {
75
+ if (!p && onPinch) onPinch(_r.current.x, _r.current.y);
76
+ return true;
77
+ });
78
+ } else setIsClicking(false);
79
+ }
80
+ _x.restore();
81
+ }, [sensitivityX, sensitivityY, smoothing, pinchThreshold, onPinch]);
82
+
83
+ useEffect(() => {
84
+ if (isTracking && !handsRef.current && window.Hands) {
85
+ const hands = new window.Hands({
86
+ locateFile: (file) => `https://cdn.jsdelivr.net/npm/@mediapipe/hands/${file}`
87
+ });
88
+
89
+ hands.setOptions({
90
+ maxNumHands: 1,
91
+ modelComplexity: 1,
92
+ minDetectionConfidence: 0.5,
93
+ minTrackingConfidence: 0.5
94
+ });
95
+
96
+ hands.onResults(onResults);
97
+ handsRef.current = hands;
98
+
99
+ if (videoRef.current) {
100
+ cameraRef.current = new window.Camera(videoRef.current, {
101
+ onFrame: async () => {
102
+ if (handsRef.current && videoRef.current) {
103
+ await handsRef.current.send({ image: videoRef.current });
104
+ }
105
+ },
106
+ width: 640,
107
+ height: 480
108
+ });
109
+ cameraRef.current.start();
110
+ }
111
+ }
112
+
113
+ return () => {
114
+ if (!isTracking) {
115
+ if (cameraRef.current) {
116
+ cameraRef.current.stop();
117
+ cameraRef.current = null;
118
+ }
119
+ if (handsRef.current) {
120
+ handsRef.current.close();
121
+ handsRef.current = null;
122
+ }
123
+ }
124
+ };
125
+ }, [isTracking, onResults]);
126
+
127
+ const startTracking = async () => {
128
+ try {
129
+ const stream = await navigator.mediaDevices.getUserMedia({ video: true });
130
+ if (videoRef.current) {
131
+ videoRef.current.srcObject = stream;
132
+ setIsTracking(true);
133
+ }
134
+ } catch (err) {
135
+ console.error('Camera access error:', err);
136
+ }
137
+ };
138
+
139
+ const stopTracking = () => {
140
+ if (videoRef.current && videoRef.current.srcObject) {
141
+ videoRef.current.srcObject.getTracks().forEach(t => t.stop());
142
+ videoRef.current.srcObject = null;
143
+ }
144
+ setIsTracking(false);
145
+ _r.current.initialized = false;
146
+ };
147
+
148
+ return {
149
+ videoRef,
150
+ canvasRef,
151
+ pointer,
152
+ isClicking,
153
+ isTracking,
154
+ startTracking,
155
+ stopTracking
156
+ };
157
+ }
@@ -0,0 +1,178 @@
1
+ 'use client';
2
+ import { useRef, useEffect, useState, useCallback } from 'react';
3
+
4
+ export function useHeadTracking({
5
+ sensitivityX = 10.0,
6
+ sensitivityY = 10.0,
7
+ jitterThreshold = 1.5,
8
+ blinkThreshold = 0.15,
9
+ doubleBlinkWindow = 500,
10
+ onDoubleClick = null,
11
+ onClick = null
12
+ } = {}) {
13
+ const videoRef = useRef(null);
14
+ const canvasRef = useRef(null);
15
+ const _m = useRef(null);
16
+ const cameraRef = useRef(null);
17
+
18
+ const _p = useRef({ x: 0, y: 0, initialized: false, history: [] });
19
+ const _b = useRef({ last: 0, active: false });
20
+
21
+ const [isTracking, setIsTracking] = useState(false);
22
+ const [pointer, setPointer] = useState({ x: 0, y: 0 });
23
+ const [isClicking, setIsClicking] = useState(false);
24
+
25
+ useEffect(() => {
26
+ const _s = [
27
+ 'https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh/face_mesh.js',
28
+ 'https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils/camera_utils.js',
29
+ 'https://cdn.jsdelivr.net/npm/@mediapipe/drawing_utils/drawing_utils.js'
30
+ ];
31
+ const _l = async () => {
32
+ for (const src of _s) {
33
+ if (!document.querySelector(`script[src="${src}"]`)) {
34
+ await new Promise((res) => {
35
+ const s = document.createElement('script');
36
+ s.src = src; s.onload = res;
37
+ document.body.appendChild(s);
38
+ });
39
+ }
40
+ }
41
+ };
42
+ _l();
43
+ }, []);
44
+
45
+ const onResults = useCallback((res) => {
46
+ if (!canvasRef.current || !res.image) return;
47
+ const _c = canvasRef.current;
48
+ if (_c.width !== res.image.width || _c.height !== res.image.height) {
49
+ _c.width = res.image.width; _c.height = res.image.height;
50
+ }
51
+ const _x = _c.getContext('2d');
52
+ _x.save(); _x.clearRect(0, 0, _c.width, _c.height);
53
+ _x.drawImage(res.image, 0, 0, _c.width, _c.height);
54
+
55
+ if (res.multiFaceLandmarks && res.multiFaceLandmarks.length > 0) {
56
+ const _l = res.multiFaceLandmarks[0];
57
+ if (window.drawConnectors && window.FACEMESH_TESSELATION) {
58
+ window.drawConnectors(_x, _l, window.FACEMESH_TESSELATION, {color: '#C0C0C040', lineWidth: 1});
59
+ window.drawConnectors(_x, _l, window.FACEMESH_RIGHT_EYE, {color: '#FF3030'});
60
+ window.drawConnectors(_x, _l, window.FACEMESH_LEFT_EYE, {color: '#30FF30'});
61
+ }
62
+
63
+ const _n = _l[4];
64
+ let _nx = Math.max(0, Math.min(1, 1 - ((_n.x - 0.5) * sensitivityX + 0.5)));
65
+ let _ny = Math.max(0, Math.min(1, (_n.y - 0.5) * sensitivityY + 0.5));
66
+ const [_rx, _ry] = [_nx * window.innerWidth, _ny * window.innerHeight];
67
+
68
+ const _ear = (t, b, o, i) => Math.abs(_l[t].y - _l[b].y) / Math.abs(_l[o].x - _l[i].x);
69
+ const _blink = _ear(159, 145, 33, 133) < blinkThreshold && _ear(386, 374, 362, 263) < blinkThreshold;
70
+
71
+ const _now = Date.now();
72
+ if (_blink && !_b.current.active) {
73
+ const _delta = _now - _b.current.last;
74
+ if (_delta < doubleBlinkWindow) {
75
+ if (onDoubleClick) onDoubleClick(_p.current.x, _p.current.y);
76
+ setIsClicking(true); setTimeout(() => setIsClicking(false), 200);
77
+ } else {
78
+ if (onClick) onClick(_p.current.x, _p.current.y);
79
+ setIsClicking(true); setTimeout(() => setIsClicking(false), 100);
80
+ }
81
+ _b.current.last = _now;
82
+ }
83
+ _b.current.active = _blink;
84
+
85
+ const _h = _p.current.history;
86
+ _h.push({ x: _rx, y: _ry }); if (_h.length > 5) _h.shift();
87
+ const [_ax, _ay] = [_h.reduce((s, d) => s + d.x, 0) / _h.length, _h.reduce((s, d) => s + d.y, 0) / _h.length];
88
+
89
+ if (!_p.current.initialized) { _p.current.x = _ax; _p.current.y = _ay; _p.current.initialized = true; }
90
+
91
+ const [_dx, _dy] = [_ax - _p.current.x, _ay - _p.current.y];
92
+ const _v = Math.sqrt(_dx * _dx + _dy * _dy);
93
+
94
+ if (_v > jitterThreshold || _blink) {
95
+ const _sf = _blink ? 0.02 : (_v < 20 ? 0.15 : 0.35);
96
+ _p.current.x += _dx * _sf; _p.current.y += _dy * _sf;
97
+ }
98
+ setPointer({ x: _p.current.x, y: _p.current.y });
99
+ }
100
+ _x.restore();
101
+ }, [sensitivityX, sensitivityY, jitterThreshold, blinkThreshold, doubleBlinkWindow, onDoubleClick, onClick]);
102
+
103
+ useEffect(() => {
104
+ if (isTracking && !_m.current && window.FaceMesh) {
105
+ const faceMesh = new window.FaceMesh({
106
+ locateFile: (file) => `https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh/${file}`
107
+ });
108
+
109
+ faceMesh.setOptions({
110
+ maxNumFaces: 1,
111
+ refineLandmarks: true,
112
+ minDetectionConfidence: 0.5,
113
+ minTrackingConfidence: 0.5
114
+ });
115
+
116
+ faceMesh.onResults(onResults);
117
+ _m.current = faceMesh;
118
+
119
+ if (videoRef.current) {
120
+ cameraRef.current = new window.Camera(videoRef.current, {
121
+ onFrame: async () => {
122
+ if (_m.current && videoRef.current) {
123
+ await _m.current.send({ image: videoRef.current });
124
+ }
125
+ },
126
+ width: 640,
127
+ height: 480
128
+ });
129
+ cameraRef.current.start();
130
+ }
131
+ }
132
+
133
+ return () => {
134
+ if (!isTracking) {
135
+ if (cameraRef.current) {
136
+ cameraRef.current.stop();
137
+ cameraRef.current = null;
138
+ }
139
+ if (_m.current) {
140
+ _m.current.close();
141
+ _m.current = null;
142
+ }
143
+ }
144
+ };
145
+ }, [isTracking, onResults]);
146
+
147
+ const startTracking = async () => {
148
+ try {
149
+ const stream = await navigator.mediaDevices.getUserMedia({ video: true });
150
+ if (videoRef.current) {
151
+ videoRef.current.srcObject = stream;
152
+ setIsTracking(true);
153
+ }
154
+ } catch (err) {
155
+ console.error('Camera access error:', err);
156
+ }
157
+ };
158
+
159
+ const stopTracking = () => {
160
+ if (videoRef.current && videoRef.current.srcObject) {
161
+ videoRef.current.srcObject.getTracks().forEach(t => t.stop());
162
+ videoRef.current.srcObject = null;
163
+ }
164
+ setIsTracking(false);
165
+ _p.current.initialized = false;
166
+ _p.current.history = [];
167
+ };
168
+
169
+ return {
170
+ videoRef,
171
+ canvasRef,
172
+ pointer,
173
+ isClicking,
174
+ isTracking,
175
+ startTracking,
176
+ stopTracking
177
+ };
178
+ }
package/src/index.js ADDED
@@ -0,0 +1,2 @@
1
+ export { useHandTracking } from './hooks/useHandTracking';
2
+ export { useHeadTracking } from './hooks/useHeadTracking';