@arfuhad/react-native-smart-camera 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +341 -0
- package/README.md +154 -0
- package/android/build.gradle +89 -0
- package/android/src/main/AndroidManifest.xml +2 -0
- package/android/src/main/java/expo/modules/smartcamera/ImageLoader.kt +106 -0
- package/android/src/main/java/expo/modules/smartcamera/MLKitFaceDetector.kt +273 -0
- package/android/src/main/java/expo/modules/smartcamera/SmartCameraModule.kt +205 -0
- package/android/src/main/java/expo/modules/smartcamera/SmartCameraView.kt +153 -0
- package/android/src/main/java/expo/modules/smartcamera/WebRTCFrameBridge.kt +184 -0
- package/app.plugin.js +17 -0
- package/build/SmartCamera.d.ts +17 -0
- package/build/SmartCamera.d.ts.map +1 -0
- package/build/SmartCamera.js +270 -0
- package/build/SmartCamera.js.map +1 -0
- package/build/SmartCameraModule.d.ts +112 -0
- package/build/SmartCameraModule.d.ts.map +1 -0
- package/build/SmartCameraModule.js +121 -0
- package/build/SmartCameraModule.js.map +1 -0
- package/build/SmartCameraView.d.ts +8 -0
- package/build/SmartCameraView.d.ts.map +1 -0
- package/build/SmartCameraView.js +7 -0
- package/build/SmartCameraView.js.map +1 -0
- package/build/detection/blinkProcessor.d.ts +23 -0
- package/build/detection/blinkProcessor.d.ts.map +1 -0
- package/build/detection/blinkProcessor.js +90 -0
- package/build/detection/blinkProcessor.js.map +1 -0
- package/build/detection/faceDetector.d.ts +16 -0
- package/build/detection/faceDetector.d.ts.map +1 -0
- package/build/detection/faceDetector.js +46 -0
- package/build/detection/faceDetector.js.map +1 -0
- package/build/detection/index.d.ts +4 -0
- package/build/detection/index.d.ts.map +1 -0
- package/build/detection/index.js +4 -0
- package/build/detection/index.js.map +1 -0
- package/build/detection/staticImageDetector.d.ts +25 -0
- package/build/detection/staticImageDetector.d.ts.map +1 -0
- package/build/detection/staticImageDetector.js +48 -0
- package/build/detection/staticImageDetector.js.map +1 -0
- package/build/hooks/index.d.ts +5 -0
- package/build/hooks/index.d.ts.map +1 -0
- package/build/hooks/index.js +5 -0
- package/build/hooks/index.js.map +1 -0
- package/build/hooks/useBlinkDetection.d.ts +39 -0
- package/build/hooks/useBlinkDetection.d.ts.map +1 -0
- package/build/hooks/useBlinkDetection.js +67 -0
- package/build/hooks/useBlinkDetection.js.map +1 -0
- package/build/hooks/useFaceDetection.d.ts +46 -0
- package/build/hooks/useFaceDetection.d.ts.map +1 -0
- package/build/hooks/useFaceDetection.js +80 -0
- package/build/hooks/useFaceDetection.js.map +1 -0
- package/build/hooks/useSmartCamera.d.ts +31 -0
- package/build/hooks/useSmartCamera.d.ts.map +1 -0
- package/build/hooks/useSmartCamera.js +75 -0
- package/build/hooks/useSmartCamera.js.map +1 -0
- package/build/hooks/useSmartCameraWebRTC.d.ts +58 -0
- package/build/hooks/useSmartCameraWebRTC.d.ts.map +1 -0
- package/build/hooks/useSmartCameraWebRTC.js +160 -0
- package/build/hooks/useSmartCameraWebRTC.js.map +1 -0
- package/build/index.d.ts +14 -0
- package/build/index.d.ts.map +1 -0
- package/build/index.js +20 -0
- package/build/index.js.map +1 -0
- package/build/types.d.ts +478 -0
- package/build/types.d.ts.map +1 -0
- package/build/types.js +2 -0
- package/build/types.js.map +1 -0
- package/build/utils/index.d.ts +98 -0
- package/build/utils/index.d.ts.map +1 -0
- package/build/utils/index.js +276 -0
- package/build/utils/index.js.map +1 -0
- package/build/webrtc/WebRTCBridge.d.ts +55 -0
- package/build/webrtc/WebRTCBridge.d.ts.map +1 -0
- package/build/webrtc/WebRTCBridge.js +113 -0
- package/build/webrtc/WebRTCBridge.js.map +1 -0
- package/build/webrtc/index.d.ts +3 -0
- package/build/webrtc/index.d.ts.map +1 -0
- package/build/webrtc/index.js +2 -0
- package/build/webrtc/index.js.map +1 -0
- package/build/webrtc/types.d.ts +64 -0
- package/build/webrtc/types.d.ts.map +1 -0
- package/build/webrtc/types.js +5 -0
- package/build/webrtc/types.js.map +1 -0
- package/expo-module.config.json +9 -0
- package/ios/MLKitFaceDetector.swift +310 -0
- package/ios/SmartCamera.podspec +33 -0
- package/ios/SmartCameraModule.swift +225 -0
- package/ios/SmartCameraView.swift +146 -0
- package/ios/WebRTCFrameBridge.swift +150 -0
- package/package.json +91 -0
- package/plugin/build/index.d.ts +28 -0
- package/plugin/build/index.js +33 -0
- package/plugin/build/withSmartCameraAndroid.d.ts +9 -0
- package/plugin/build/withSmartCameraAndroid.js +108 -0
- package/plugin/build/withSmartCameraIOS.d.ts +11 -0
- package/plugin/build/withSmartCameraIOS.js +92 -0
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
import { requireNativeModule, EventEmitter } from 'expo-modules-core';
|
|
2
|
+
// Require the native module
|
|
3
|
+
const SmartCameraModule = requireNativeModule('SmartCameraModule');
|
|
4
|
+
// Create an event emitter for native events
|
|
5
|
+
const emitter = new EventEmitter(SmartCameraModule);
|
|
6
|
+
// ============================================================================
|
|
7
|
+
// Face Detection Functions
|
|
8
|
+
// ============================================================================
|
|
9
|
+
/**
|
|
10
|
+
* Detect faces in a static image
|
|
11
|
+
* @param options - Static image options including the image source and detection settings
|
|
12
|
+
* @returns Promise resolving to an array of detected faces
|
|
13
|
+
*
|
|
14
|
+
* @example
|
|
15
|
+
* ```tsx
|
|
16
|
+
* const faces = await detectFacesInImage({
|
|
17
|
+
* image: { uri: 'https://example.com/photo.jpg' },
|
|
18
|
+
* performanceMode: 'accurate',
|
|
19
|
+
* landmarkMode: 'all',
|
|
20
|
+
* });
|
|
21
|
+
* console.log(`Detected ${faces.length} faces`);
|
|
22
|
+
* ```
|
|
23
|
+
*/
|
|
24
|
+
export async function detectFacesInImage(options) {
|
|
25
|
+
return SmartCameraModule.detectFacesInImage(options);
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Update face detection options at runtime
|
|
29
|
+
* @param options - New face detection options
|
|
30
|
+
*/
|
|
31
|
+
export function updateFaceDetectionOptions(options) {
|
|
32
|
+
SmartCameraModule.updateFaceDetectionOptions(options);
|
|
33
|
+
}
|
|
34
|
+
// ============================================================================
|
|
35
|
+
// WebRTC Functions
|
|
36
|
+
// ============================================================================
|
|
37
|
+
/**
|
|
38
|
+
* Initialize WebRTC for streaming
|
|
39
|
+
* Must be called before startWebRTCStream
|
|
40
|
+
* @returns Promise resolving to true if initialized successfully
|
|
41
|
+
*/
|
|
42
|
+
export async function initializeWebRTC() {
|
|
43
|
+
return SmartCameraModule.initializeWebRTC();
|
|
44
|
+
}
|
|
45
|
+
/**
|
|
46
|
+
* Start WebRTC video streaming
|
|
47
|
+
* @param constraints - Video constraints (width, height, frameRate)
|
|
48
|
+
* @returns Promise resolving to true if started successfully
|
|
49
|
+
*/
|
|
50
|
+
export async function startWebRTCStream(constraints) {
|
|
51
|
+
return SmartCameraModule.startWebRTCStream(constraints);
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Stop WebRTC video streaming
|
|
55
|
+
*/
|
|
56
|
+
export function stopWebRTCStream() {
|
|
57
|
+
SmartCameraModule.stopWebRTCStream();
|
|
58
|
+
}
|
|
59
|
+
/**
|
|
60
|
+
* Push a frame to the WebRTC stream
|
|
61
|
+
* @param frameData - Frame data to push
|
|
62
|
+
*/
|
|
63
|
+
export function pushWebRTCFrame(frameData) {
|
|
64
|
+
SmartCameraModule.pushWebRTCFrame(frameData);
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Check if WebRTC is currently streaming
|
|
68
|
+
* @returns true if streaming, false otherwise
|
|
69
|
+
*/
|
|
70
|
+
export function isWebRTCStreaming() {
|
|
71
|
+
return SmartCameraModule.isWebRTCStreaming();
|
|
72
|
+
}
|
|
73
|
+
// ============================================================================
|
|
74
|
+
// Event Listeners
|
|
75
|
+
// ============================================================================
|
|
76
|
+
/**
|
|
77
|
+
* Subscribe to face detection events from native module
|
|
78
|
+
* @param listener - Callback function for face detection events
|
|
79
|
+
* @returns Subscription object with remove method
|
|
80
|
+
*/
|
|
81
|
+
export function addFaceDetectionListener(listener) {
|
|
82
|
+
return emitter.addListener('onFacesDetected', listener);
|
|
83
|
+
}
|
|
84
|
+
/**
|
|
85
|
+
* Subscribe to blink detection events from native module
|
|
86
|
+
* @param listener - Callback function for blink events
|
|
87
|
+
* @returns Subscription object with remove method
|
|
88
|
+
*/
|
|
89
|
+
export function addBlinkDetectionListener(listener) {
|
|
90
|
+
return emitter.addListener('onBlinkDetected', listener);
|
|
91
|
+
}
|
|
92
|
+
/**
|
|
93
|
+
* Subscribe to error events from native module
|
|
94
|
+
* @param listener - Callback function for error events
|
|
95
|
+
* @returns Subscription object with remove method
|
|
96
|
+
*/
|
|
97
|
+
export function addErrorListener(listener) {
|
|
98
|
+
return emitter.addListener('onError', listener);
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Subscribe to WebRTC state change events
|
|
102
|
+
* @param listener - Callback function for WebRTC state changes
|
|
103
|
+
* @returns Subscription object with remove method
|
|
104
|
+
*/
|
|
105
|
+
export function addWebRTCStateChangeListener(listener) {
|
|
106
|
+
return emitter.addListener('onWebRTCStateChange', listener);
|
|
107
|
+
}
|
|
108
|
+
// ============================================================================
|
|
109
|
+
// Constants
|
|
110
|
+
// ============================================================================
|
|
111
|
+
/**
|
|
112
|
+
* Module constants
|
|
113
|
+
*/
|
|
114
|
+
export const Constants = {
|
|
115
|
+
PI: SmartCameraModule.PI,
|
|
116
|
+
DEFAULT_MIN_FACE_SIZE: SmartCameraModule.DEFAULT_MIN_FACE_SIZE ?? 0.15,
|
|
117
|
+
EYE_CLOSED_THRESHOLD: SmartCameraModule.EYE_CLOSED_THRESHOLD ?? 0.3,
|
|
118
|
+
EYE_OPEN_THRESHOLD: SmartCameraModule.EYE_OPEN_THRESHOLD ?? 0.7,
|
|
119
|
+
};
|
|
120
|
+
export default SmartCameraModule;
|
|
121
|
+
//# sourceMappingURL=SmartCameraModule.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SmartCameraModule.js","sourceRoot":"","sources":["../src/SmartCameraModule.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,mBAAmB,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AAkCtE,4BAA4B;AAC5B,MAAM,iBAAiB,GAAG,mBAAmB,CAA6B,mBAAmB,CAAC,CAAC;AAE/F,4CAA4C;AAC5C,MAAM,OAAO,GAAG,IAAI,YAAY,CAAC,iBAAiB,CAAC,CAAC;AAEpD,+EAA+E;AAC/E,2BAA2B;AAC3B,+EAA+E;AAE/E;;;;;;;;;;;;;;GAcG;AACH,MAAM,CAAC,KAAK,UAAU,kBAAkB,CAAC,OAA2B;IAClE,OAAO,iBAAiB,CAAC,kBAAkB,CAAC,OAAO,CAAC,CAAC;AACvD,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,0BAA0B,CAAC,OAAgC;IACzE,iBAAiB,CAAC,0BAA0B,CAAC,OAAO,CAAC,CAAC;AACxD,CAAC;AAED,+EAA+E;AAC/E,mBAAmB;AACnB,+EAA+E;AAE/E;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,gBAAgB;IACpC,OAAO,iBAAiB,CAAC,gBAAgB,EAAE,CAAC;AAC9C,CAAC;AAED;;;;GAIG;AACH,MAAM,CAAC,KAAK,UAAU,iBAAiB,CAAC,WAA6B;IACnE,OAAO,iBAAiB,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAC;AAC1D,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,gBAAgB;IAC9B,iBAAiB,CAAC,gBAAgB,EAAE,CAAC;AACvC,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,eAAe,CAAC,SAAkC;IAChE,iBAAiB,CAAC,eAAe,CAAC,SAAS,CAAC,CAAC;AAC/C,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,iBAAiB;IAC/B,OAAO,iBAAiB,CAAC,iBAAiB,EAAE,CAAC;AAC/C,CAAC;AAED,+EAA+E;AAC/E,kBAAkB;AAClB,+EAA+E;AAE/E;;;;GAIG;AACH,MAAM,UAAU,wBAAwB,CAAC,QAAiC;IACxE,OAAO,OAAO,CAAC,WAAW,CAAC,iBAAiB,EAAE,QAAQ,CAAC,CAAC;AAC1D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,yBAAyB,CACvC,QAA0F;IAE1F,OAAO,OAAO,CAAC,WAAW,CAAC,iBAAiB,EAAE,QAAQ,CAAC,CAAC;AAC1D,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,gBAAgB,CAAC,QAA4D;IAC3F,OAAO,OAAO,CAAC,WAAW,CAAC,SAAS,EAAE,QAAQ,CAAC,CAAC;AAClD,CAAC;AAED;;;;GAIG;AACH,MAAM,UAAU,4BAA4B,CAAC,QAAmD;IAC9F,OAAO,OAAO,CAAC,WAAW,CAAC,qBAAqB,EAAE,QAAQ,CAAC,CAAC;AAC9D,CAAC;AAED,+EAA+E;AAC/E,YAAY;AACZ,+EAA+E;AAE/E;;GAEG;AACH,MAAM,CAAC,MAAM,SAAS,GAAG;IACvB,EAAE,EAAE,iBAAiB,CAAC,EAAE;IACxB,qBAAqB,EAAE,iBAAiB,CAAC,qBAAqB,IAAI,IAAI;IACtE,oBAAoB,EAAE,iBAAiB,CAAC,oBAAoB,IAAI,GAAG;IACnE,kBAAkB,EAAE,iBAAiB,CAAC,kBAAkB,IAAI,GAAG;CAChE,CAAC;AAEF,eAAe,iBAAiB,CAAC","sourcesContent":["import { requireNativeModule, EventEmitter } from 'expo-modules-core';\n\nimport type { Face, StaticImageOptions, VideoConstraints } from './types';\n\n// Define the native module interface\ninterface SmartCameraModuleInterface {\n // Face detection in static images\n detectFacesInImage(options: StaticImageOptions): Promise<Face[]>;\n\n // Update face detection options\n updateFaceDetectionOptions(options: Record<string, unknown>): void;\n\n // WebRTC functions\n initializeWebRTC(): Promise<boolean>;\n startWebRTCStream(constraints: VideoConstraints): Promise<boolean>;\n stopWebRTCStream(): void;\n pushWebRTCFrame(frameData: Record<string, unknown>): void;\n isWebRTCStreaming(): boolean;\n\n // Native module constants\n readonly PI: number;\n readonly DEFAULT_MIN_FACE_SIZE: number;\n readonly EYE_CLOSED_THRESHOLD: number;\n readonly EYE_OPEN_THRESHOLD: number;\n\n // Allow additional properties from NativeModule\n __expo_module_name__?: string;\n startObserving?: () => void;\n stopObserving?: () => void;\n addListener?: unknown;\n removeListeners?: unknown;\n [key: string]: unknown;\n}\n\n// Require the native module\nconst SmartCameraModule = requireNativeModule<SmartCameraModuleInterface>('SmartCameraModule');\n\n// Create an event emitter for native events\nconst emitter = new EventEmitter(SmartCameraModule);\n\n// ============================================================================\n// Face Detection Functions\n// ============================================================================\n\n/**\n * Detect faces in a static image\n * @param options - Static image options including the image source and detection settings\n * @returns Promise resolving to an array of detected faces\n * \n * @example\n * ```tsx\n * const faces = await detectFacesInImage({\n * image: { uri: 'https://example.com/photo.jpg' },\n * performanceMode: 'accurate',\n * landmarkMode: 'all',\n * });\n * console.log(`Detected ${faces.length} faces`);\n * ```\n */\nexport async function detectFacesInImage(options: StaticImageOptions): Promise<Face[]> {\n return SmartCameraModule.detectFacesInImage(options);\n}\n\n/**\n * Update face detection options at runtime\n * @param options - New face detection options\n */\nexport function updateFaceDetectionOptions(options: Record<string, unknown>): void {\n SmartCameraModule.updateFaceDetectionOptions(options);\n}\n\n// ============================================================================\n// WebRTC Functions\n// ============================================================================\n\n/**\n * Initialize WebRTC for streaming\n * Must be called before startWebRTCStream\n * @returns Promise resolving to true if initialized successfully\n */\nexport async function initializeWebRTC(): Promise<boolean> {\n return SmartCameraModule.initializeWebRTC();\n}\n\n/**\n * Start WebRTC video streaming\n * @param constraints - Video constraints (width, height, frameRate)\n * @returns Promise resolving to true if started successfully\n */\nexport async function startWebRTCStream(constraints: VideoConstraints): Promise<boolean> {\n return SmartCameraModule.startWebRTCStream(constraints);\n}\n\n/**\n * Stop WebRTC video streaming\n */\nexport function stopWebRTCStream(): void {\n SmartCameraModule.stopWebRTCStream();\n}\n\n/**\n * Push a frame to the WebRTC stream\n * @param frameData - Frame data to push\n */\nexport function pushWebRTCFrame(frameData: Record<string, unknown>): void {\n SmartCameraModule.pushWebRTCFrame(frameData);\n}\n\n/**\n * Check if WebRTC is currently streaming\n * @returns true if streaming, false otherwise\n */\nexport function isWebRTCStreaming(): boolean {\n return SmartCameraModule.isWebRTCStreaming();\n}\n\n// ============================================================================\n// Event Listeners\n// ============================================================================\n\n/**\n * Subscribe to face detection events from native module\n * @param listener - Callback function for face detection events\n * @returns Subscription object with remove method\n */\nexport function addFaceDetectionListener(listener: (faces: Face[]) => void) {\n return emitter.addListener('onFacesDetected', listener);\n}\n\n/**\n * Subscribe to blink detection events from native module\n * @param listener - Callback function for blink events\n * @returns Subscription object with remove method\n */\nexport function addBlinkDetectionListener(\n listener: (event: { leftEyeOpen: number; rightEyeOpen: number; isBlink: boolean }) => void\n) {\n return emitter.addListener('onBlinkDetected', listener);\n}\n\n/**\n * Subscribe to error events from native module\n * @param listener - Callback function for error events\n * @returns Subscription object with remove method\n */\nexport function addErrorListener(listener: (error: { code: string; message: string }) => void) {\n return emitter.addListener('onError', listener);\n}\n\n/**\n * Subscribe to WebRTC state change events\n * @param listener - Callback function for WebRTC state changes\n * @returns Subscription object with remove method\n */\nexport function addWebRTCStateChangeListener(listener: (state: { isStreaming: boolean }) => void) {\n return emitter.addListener('onWebRTCStateChange', listener);\n}\n\n// ============================================================================\n// Constants\n// ============================================================================\n\n/**\n * Module constants\n */\nexport const Constants = {\n PI: SmartCameraModule.PI,\n DEFAULT_MIN_FACE_SIZE: SmartCameraModule.DEFAULT_MIN_FACE_SIZE ?? 0.15,\n EYE_CLOSED_THRESHOLD: SmartCameraModule.EYE_CLOSED_THRESHOLD ?? 0.3,\n EYE_OPEN_THRESHOLD: SmartCameraModule.EYE_OPEN_THRESHOLD ?? 0.7,\n};\n\nexport default SmartCameraModule;\n"]}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import * as React from 'react';
|
|
2
|
+
import type { ViewProps } from 'react-native';
|
|
3
|
+
export interface SmartCameraViewProps extends ViewProps {
|
|
4
|
+
cameraFacing?: 'front' | 'back';
|
|
5
|
+
isActive?: boolean;
|
|
6
|
+
}
|
|
7
|
+
export declare function SmartCameraView(props: SmartCameraViewProps): React.JSX.Element;
|
|
8
|
+
//# sourceMappingURL=SmartCameraView.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SmartCameraView.d.ts","sourceRoot":"","sources":["../src/SmartCameraView.tsx"],"names":[],"mappings":"AACA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,cAAc,CAAC;AAE9C,MAAM,WAAW,oBAAqB,SAAQ,SAAS;IACrD,YAAY,CAAC,EAAE,OAAO,GAAG,MAAM,CAAC;IAChC,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAID,wBAAgB,eAAe,CAAC,KAAK,EAAE,oBAAoB,qBAE1D"}
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { requireNativeViewManager } from 'expo-modules-core';
|
|
2
|
+
import * as React from 'react';
|
|
3
|
+
const NativeView = requireNativeViewManager('SmartCameraView');
|
|
4
|
+
export function SmartCameraView(props) {
|
|
5
|
+
return <NativeView {...props}/>;
|
|
6
|
+
}
|
|
7
|
+
//# sourceMappingURL=SmartCameraView.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"SmartCameraView.js","sourceRoot":"","sources":["../src/SmartCameraView.tsx"],"names":[],"mappings":"AAAA,OAAO,EAAE,wBAAwB,EAAE,MAAM,mBAAmB,CAAC;AAC7D,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAQ/B,MAAM,UAAU,GAAG,wBAAwB,CAAC,iBAAiB,CAAC,CAAC;AAE/D,MAAM,UAAU,eAAe,CAAC,KAA2B;IACzD,OAAO,CAAC,UAAU,CAAC,IAAI,KAAK,CAAC,EAAG,CAAC;AACnC,CAAC","sourcesContent":["import { requireNativeViewManager } from 'expo-modules-core';\nimport * as React from 'react';\nimport type { ViewProps } from 'react-native';\n\nexport interface SmartCameraViewProps extends ViewProps {\n cameraFacing?: 'front' | 'back';\n isActive?: boolean;\n}\n\nconst NativeView = requireNativeViewManager('SmartCameraView');\n\nexport function SmartCameraView(props: SmartCameraViewProps) {\n return <NativeView {...props} />;\n}\n\n"]}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
import type { Face, BlinkEvent } from '../types';
|
|
2
|
+
/**
|
|
3
|
+
* Process faces to detect blinks
|
|
4
|
+
*
|
|
5
|
+
* @param faces - Array of detected faces
|
|
6
|
+
* @param lastBlinkTimestamp - Timestamp of the last detected blink
|
|
7
|
+
* @param debounceMs - Minimum time between blinks in milliseconds
|
|
8
|
+
* @returns BlinkEvent if a blink was detected, null otherwise
|
|
9
|
+
*/
|
|
10
|
+
export declare function processBlinkFromFaces(faces: Face[], lastBlinkTimestamp: number, debounceMs?: number): BlinkEvent | null;
|
|
11
|
+
/**
|
|
12
|
+
* Reset blink state for all faces
|
|
13
|
+
*/
|
|
14
|
+
export declare function resetBlinkStates(): void;
|
|
15
|
+
/**
|
|
16
|
+
* Get current eye state without blink detection
|
|
17
|
+
* Useful for real-time eye tracking UI
|
|
18
|
+
*/
|
|
19
|
+
export declare function getEyeState(face: Face): {
|
|
20
|
+
leftOpen: number;
|
|
21
|
+
rightOpen: number;
|
|
22
|
+
} | null;
|
|
23
|
+
//# sourceMappingURL=blinkProcessor.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"blinkProcessor.d.ts","sourceRoot":"","sources":["../../src/detection/blinkProcessor.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,UAAU,CAAC;AAuBjD;;;;;;;GAOG;AACH,wBAAgB,qBAAqB,CACnC,KAAK,EAAE,IAAI,EAAE,EACb,kBAAkB,EAAE,MAAM,EAC1B,UAAU,GAAE,MAAY,GACvB,UAAU,GAAG,IAAI,CA+DnB;AAED;;GAEG;AACH,wBAAgB,gBAAgB,IAAI,IAAI,CAEvC;AAED;;;GAGG;AACH,wBAAgB,WAAW,CAAC,IAAI,EAAE,IAAI,GAAG;IAAE,QAAQ,EAAE,MAAM,CAAC;IAAC,SAAS,EAAE,MAAM,CAAA;CAAE,GAAG,IAAI,CActF"}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Threshold for considering an eye as closed
|
|
3
|
+
*/
|
|
4
|
+
const EYE_CLOSED_THRESHOLD = 0.4;
|
|
5
|
+
/**
|
|
6
|
+
* Threshold for considering an eye as open
|
|
7
|
+
*/
|
|
8
|
+
const EYE_OPEN_THRESHOLD = 0.6;
|
|
9
|
+
// Global blink state per face tracking ID
|
|
10
|
+
const blinkStates = new Map();
|
|
11
|
+
/**
|
|
12
|
+
* Process faces to detect blinks
|
|
13
|
+
*
|
|
14
|
+
* @param faces - Array of detected faces
|
|
15
|
+
* @param lastBlinkTimestamp - Timestamp of the last detected blink
|
|
16
|
+
* @param debounceMs - Minimum time between blinks in milliseconds
|
|
17
|
+
* @returns BlinkEvent if a blink was detected, null otherwise
|
|
18
|
+
*/
|
|
19
|
+
export function processBlinkFromFaces(faces, lastBlinkTimestamp, debounceMs = 300) {
|
|
20
|
+
'worklet';
|
|
21
|
+
if (faces.length === 0) {
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
// Use the first face (most prominent)
|
|
25
|
+
const face = faces[0];
|
|
26
|
+
// Ensure we have eye classification data
|
|
27
|
+
if (face.leftEyeOpenProbability === undefined ||
|
|
28
|
+
face.rightEyeOpenProbability === undefined) {
|
|
29
|
+
return null;
|
|
30
|
+
}
|
|
31
|
+
const leftEyeOpen = face.leftEyeOpenProbability;
|
|
32
|
+
const rightEyeOpen = face.rightEyeOpenProbability;
|
|
33
|
+
const now = Date.now();
|
|
34
|
+
// Get or create blink state for this face
|
|
35
|
+
const faceId = face.trackingId ?? 0;
|
|
36
|
+
let state = blinkStates.get(faceId);
|
|
37
|
+
if (!state) {
|
|
38
|
+
state = {
|
|
39
|
+
wasEyesClosed: false,
|
|
40
|
+
lastBlinkTimestamp: 0,
|
|
41
|
+
};
|
|
42
|
+
blinkStates.set(faceId, state);
|
|
43
|
+
}
|
|
44
|
+
// Check if eyes are currently closed
|
|
45
|
+
const eyesClosed = leftEyeOpen < EYE_CLOSED_THRESHOLD && rightEyeOpen < EYE_CLOSED_THRESHOLD;
|
|
46
|
+
// Check if eyes are currently open
|
|
47
|
+
const eyesOpen = leftEyeOpen > EYE_OPEN_THRESHOLD && rightEyeOpen > EYE_OPEN_THRESHOLD;
|
|
48
|
+
// Detect blink: transition from closed to open
|
|
49
|
+
const isBlink = state.wasEyesClosed && eyesOpen && (now - state.lastBlinkTimestamp) > debounceMs;
|
|
50
|
+
// Update state
|
|
51
|
+
if (eyesClosed) {
|
|
52
|
+
state.wasEyesClosed = true;
|
|
53
|
+
}
|
|
54
|
+
else if (eyesOpen) {
|
|
55
|
+
state.wasEyesClosed = false;
|
|
56
|
+
}
|
|
57
|
+
if (isBlink) {
|
|
58
|
+
state.lastBlinkTimestamp = now;
|
|
59
|
+
return {
|
|
60
|
+
timestamp: now,
|
|
61
|
+
leftEyeOpen,
|
|
62
|
+
rightEyeOpen,
|
|
63
|
+
isBlink: true,
|
|
64
|
+
faceId: face.trackingId,
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
return null;
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Reset blink state for all faces
|
|
71
|
+
*/
|
|
72
|
+
export function resetBlinkStates() {
|
|
73
|
+
blinkStates.clear();
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Get current eye state without blink detection
|
|
77
|
+
* Useful for real-time eye tracking UI
|
|
78
|
+
*/
|
|
79
|
+
export function getEyeState(face) {
|
|
80
|
+
'worklet';
|
|
81
|
+
if (face.leftEyeOpenProbability === undefined ||
|
|
82
|
+
face.rightEyeOpenProbability === undefined) {
|
|
83
|
+
return null;
|
|
84
|
+
}
|
|
85
|
+
return {
|
|
86
|
+
leftOpen: face.leftEyeOpenProbability,
|
|
87
|
+
rightOpen: face.rightEyeOpenProbability,
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
//# sourceMappingURL=blinkProcessor.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"blinkProcessor.js","sourceRoot":"","sources":["../../src/detection/blinkProcessor.ts"],"names":[],"mappings":"AAEA;;GAEG;AACH,MAAM,oBAAoB,GAAG,GAAG,CAAC;AAEjC;;GAEG;AACH,MAAM,kBAAkB,GAAG,GAAG,CAAC;AAU/B,0CAA0C;AAC1C,MAAM,WAAW,GAAG,IAAI,GAAG,EAAsB,CAAC;AAElD;;;;;;;GAOG;AACH,MAAM,UAAU,qBAAqB,CACnC,KAAa,EACb,kBAA0B,EAC1B,aAAqB,GAAG;IAExB,SAAS,CAAC;IAEV,IAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;QACvB,OAAO,IAAI,CAAC;IACd,CAAC;IAED,sCAAsC;IACtC,MAAM,IAAI,GAAG,KAAK,CAAC,CAAC,CAAC,CAAC;IAEtB,yCAAyC;IACzC,IACE,IAAI,CAAC,sBAAsB,KAAK,SAAS;QACzC,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAC1C,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,WAAW,GAAG,IAAI,CAAC,sBAAsB,CAAC;IAChD,MAAM,YAAY,GAAG,IAAI,CAAC,uBAAuB,CAAC;IAClD,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;IAEvB,0CAA0C;IAC1C,MAAM,MAAM,GAAG,IAAI,CAAC,UAAU,IAAI,CAAC,CAAC;IACpC,IAAI,KAAK,GAAG,WAAW,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;IAEpC,IAAI,CAAC,KAAK,EAAE,CAAC;QACX,KAAK,GAAG;YACN,aAAa,EAAE,KAAK;YACpB,kBAAkB,EAAE,CAAC;SACtB,CAAC;QACF,WAAW,CAAC,GAAG,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;IACjC,CAAC;IAED,qCAAqC;IACrC,MAAM,UAAU,GAAG,WAAW,GAAG,oBAAoB,IAAI,YAAY,GAAG,oBAAoB,CAAC;IAE7F,mCAAmC;IACnC,MAAM,QAAQ,GAAG,WAAW,GAAG,kBAAkB,IAAI,YAAY,GAAG,kBAAkB,CAAC;IAEvF,+CAA+C;IAC/C,MAAM,OAAO,GAAG,KAAK,CAAC,aAAa,IAAI,QAAQ,IAAI,CAAC,GAAG,GAAG,KAAK,CAAC,kBAAkB,CAAC,GAAG,UAAU,CAAC;IAEjG,eAAe;IACf,IAAI,UAAU,EAAE,CAAC;QACf,KAAK,CAAC,aAAa,GAAG,IAAI,CAAC;IAC7B,CAAC;SAAM,IAAI,QAAQ,EAAE,CAAC;QACpB,KAAK,CAAC,aAAa,GAAG,KAAK,CAAC;IAC9B,CAAC;IAED,IAAI,OAAO,EAAE,CAAC;QACZ,KAAK,CAAC,kBAAkB,GAAG,GAAG,CAAC;QAE/B,OAAO;YACL,SAAS,EAAE,GAAG;YACd,WAAW;YACX,YAAY;YACZ,OAAO,EAAE,IAAI;YACb,MAAM,EAAE,IAAI,CAAC,UAAU;SACxB,CAAC;IACJ,CAAC;IAED,OAAO,IAAI,CAAC;AACd,CAAC;AAED;;GAEG;AACH,MAAM,UAAU,gBAAgB;IAC9B,WAAW,CAAC,KAAK,EAAE,CAAC;AACtB,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,WAAW,CAAC,IAAU;IACpC,SAAS,CAAC;IAEV,IACE,IAAI,CAAC,sBAAsB,KAAK,SAAS;QACzC,IAAI,CAAC,uBAAuB,KAAK,SAAS,EAC1C,CAAC;QACD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO;QACL,QAAQ,EAAE,IAAI,CAAC,sBAAsB;QACrC,SAAS,EAAE,IAAI,CAAC,uBAAuB;KACxC,CAAC;AACJ,CAAC","sourcesContent":["import type { Face, BlinkEvent } from '../types';\n\n/**\n * Threshold for considering an eye as closed\n */\nconst EYE_CLOSED_THRESHOLD = 0.4;\n\n/**\n * Threshold for considering an eye as open\n */\nconst EYE_OPEN_THRESHOLD = 0.6;\n\n/**\n * State for tracking blink across frames\n */\ninterface BlinkState {\n wasEyesClosed: boolean;\n lastBlinkTimestamp: number;\n}\n\n// Global blink state per face tracking ID\nconst blinkStates = new Map<number, BlinkState>();\n\n/**\n * Process faces to detect blinks\n * \n * @param faces - Array of detected faces\n * @param lastBlinkTimestamp - Timestamp of the last detected blink\n * @param debounceMs - Minimum time between blinks in milliseconds\n * @returns BlinkEvent if a blink was detected, null otherwise\n */\nexport function processBlinkFromFaces(\n faces: Face[],\n lastBlinkTimestamp: number,\n debounceMs: number = 300\n): BlinkEvent | null {\n 'worklet';\n\n if (faces.length === 0) {\n return null;\n }\n\n // Use the first face (most prominent)\n const face = faces[0];\n\n // Ensure we have eye classification data\n if (\n face.leftEyeOpenProbability === undefined ||\n face.rightEyeOpenProbability === undefined\n ) {\n return null;\n }\n\n const leftEyeOpen = face.leftEyeOpenProbability;\n const rightEyeOpen = face.rightEyeOpenProbability;\n const now = Date.now();\n\n // Get or create blink state for this face\n const faceId = face.trackingId ?? 0;\n let state = blinkStates.get(faceId);\n\n if (!state) {\n state = {\n wasEyesClosed: false,\n lastBlinkTimestamp: 0,\n };\n blinkStates.set(faceId, state);\n }\n\n // Check if eyes are currently closed\n const eyesClosed = leftEyeOpen < EYE_CLOSED_THRESHOLD && rightEyeOpen < EYE_CLOSED_THRESHOLD;\n\n // Check if eyes are currently open\n const eyesOpen = leftEyeOpen > EYE_OPEN_THRESHOLD && rightEyeOpen > EYE_OPEN_THRESHOLD;\n\n // Detect blink: transition from closed to open\n const isBlink = state.wasEyesClosed && eyesOpen && (now - state.lastBlinkTimestamp) > debounceMs;\n\n // Update state\n if (eyesClosed) {\n state.wasEyesClosed = true;\n } else if (eyesOpen) {\n state.wasEyesClosed = false;\n }\n\n if (isBlink) {\n state.lastBlinkTimestamp = now;\n\n return {\n timestamp: now,\n leftEyeOpen,\n rightEyeOpen,\n isBlink: true,\n faceId: face.trackingId,\n };\n }\n\n return null;\n}\n\n/**\n * Reset blink state for all faces\n */\nexport function resetBlinkStates(): void {\n blinkStates.clear();\n}\n\n/**\n * Get current eye state without blink detection\n * Useful for real-time eye tracking UI\n */\nexport function getEyeState(face: Face): { leftOpen: number; rightOpen: number } | null {\n 'worklet';\n\n if (\n face.leftEyeOpenProbability === undefined ||\n face.rightEyeOpenProbability === undefined\n ) {\n return null;\n }\n\n return {\n leftOpen: face.leftEyeOpenProbability,\n rightOpen: face.rightEyeOpenProbability,\n };\n}\n\n"]}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { Frame } from 'react-native-vision-camera';
|
|
2
|
+
import type { Face, FrameProcessorOptions } from '../types';
|
|
3
|
+
/**
|
|
4
|
+
* Detect faces in a camera frame
|
|
5
|
+
*
|
|
6
|
+
* @param frame - The camera frame from VisionCamera
|
|
7
|
+
* @param options - Face detection options
|
|
8
|
+
* @returns Array of detected faces
|
|
9
|
+
*/
|
|
10
|
+
export declare function detectFaces(frame: Frame, options?: Partial<FrameProcessorOptions>): Face[];
|
|
11
|
+
/**
|
|
12
|
+
* Register the face detection frame processor plugin
|
|
13
|
+
* This is called during module initialization
|
|
14
|
+
*/
|
|
15
|
+
export declare function registerFaceDetectorPlugin(): void;
|
|
16
|
+
//# sourceMappingURL=faceDetector.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"faceDetector.d.ts","sourceRoot":"","sources":["../../src/detection/faceDetector.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,KAAK,EAAE,MAAM,4BAA4B,CAAC;AACxD,OAAO,KAAK,EAAE,IAAI,EAAE,qBAAqB,EAAE,MAAM,UAAU,CAAC;AAwB5D;;;;;;GAMG;AACH,wBAAgB,WAAW,CAAC,KAAK,EAAE,KAAK,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,qBAAqB,CAAC,GAAG,IAAI,EAAE,CAc1F;AAED;;;GAGG;AACH,wBAAgB,0BAA0B,IAAI,IAAI,CAIjD"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Default face detection options
|
|
3
|
+
*/
|
|
4
|
+
const DEFAULT_OPTIONS = {
|
|
5
|
+
performanceMode: 'fast',
|
|
6
|
+
landmarkMode: 'none',
|
|
7
|
+
contourMode: 'none',
|
|
8
|
+
classificationMode: 'none',
|
|
9
|
+
minFaceSize: 0.15,
|
|
10
|
+
trackingEnabled: false,
|
|
11
|
+
cameraFacing: 'front',
|
|
12
|
+
autoMode: false,
|
|
13
|
+
windowWidth: 1.0,
|
|
14
|
+
windowHeight: 1.0,
|
|
15
|
+
};
|
|
16
|
+
/**
|
|
17
|
+
* Detect faces in a camera frame
|
|
18
|
+
*
|
|
19
|
+
* @param frame - The camera frame from VisionCamera
|
|
20
|
+
* @param options - Face detection options
|
|
21
|
+
* @returns Array of detected faces
|
|
22
|
+
*/
|
|
23
|
+
export function detectFaces(frame, options) {
|
|
24
|
+
'worklet';
|
|
25
|
+
const mergedOptions = {
|
|
26
|
+
...DEFAULT_OPTIONS,
|
|
27
|
+
...options,
|
|
28
|
+
};
|
|
29
|
+
try {
|
|
30
|
+
return __detectFaces(frame, mergedOptions);
|
|
31
|
+
}
|
|
32
|
+
catch (error) {
|
|
33
|
+
// Return empty array on error in worklet context
|
|
34
|
+
return [];
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Register the face detection frame processor plugin
|
|
39
|
+
* This is called during module initialization
|
|
40
|
+
*/
|
|
41
|
+
export function registerFaceDetectorPlugin() {
|
|
42
|
+
// The native plugin registration happens automatically
|
|
43
|
+
// through the Expo module system
|
|
44
|
+
console.log('[SmartCamera] Face detector plugin registered');
|
|
45
|
+
}
|
|
46
|
+
//# sourceMappingURL=faceDetector.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"faceDetector.js","sourceRoot":"","sources":["../../src/detection/faceDetector.ts"],"names":[],"mappings":"AASA;;GAEG;AACH,MAAM,eAAe,GAA0B;IAC7C,eAAe,EAAE,MAAM;IACvB,YAAY,EAAE,MAAM;IACpB,WAAW,EAAE,MAAM;IACnB,kBAAkB,EAAE,MAAM;IAC1B,WAAW,EAAE,IAAI;IACjB,eAAe,EAAE,KAAK;IACtB,YAAY,EAAE,OAAO;IACrB,QAAQ,EAAE,KAAK;IACf,WAAW,EAAE,GAAG;IAChB,YAAY,EAAE,GAAG;CAClB,CAAC;AAEF;;;;;;GAMG;AACH,MAAM,UAAU,WAAW,CAAC,KAAY,EAAE,OAAwC;IAChF,SAAS,CAAC;IAEV,MAAM,aAAa,GAA0B;QAC3C,GAAG,eAAe;QAClB,GAAG,OAAO;KACX,CAAC;IAEF,IAAI,CAAC;QACH,OAAO,aAAa,CAAC,KAAK,EAAE,aAAa,CAAC,CAAC;IAC7C,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,iDAAiD;QACjD,OAAO,EAAE,CAAC;IACZ,CAAC;AACH,CAAC;AAED;;;GAGG;AACH,MAAM,UAAU,0BAA0B;IACxC,uDAAuD;IACvD,iCAAiC;IACjC,OAAO,CAAC,GAAG,CAAC,+CAA+C,CAAC,CAAC;AAC/D,CAAC","sourcesContent":["import type { Frame } from 'react-native-vision-camera';\nimport type { Face, FrameProcessorOptions } from '../types';\n\n/**\n * Native face detection function placeholder\n * This will be implemented as a VisionCamera frame processor plugin\n */\ndeclare function __detectFaces(frame: Frame, options: FrameProcessorOptions): Face[];\n\n/**\n * Default face detection options\n */\nconst DEFAULT_OPTIONS: FrameProcessorOptions = {\n performanceMode: 'fast',\n landmarkMode: 'none',\n contourMode: 'none',\n classificationMode: 'none',\n minFaceSize: 0.15,\n trackingEnabled: false,\n cameraFacing: 'front',\n autoMode: false,\n windowWidth: 1.0,\n windowHeight: 1.0,\n};\n\n/**\n * Detect faces in a camera frame\n * \n * @param frame - The camera frame from VisionCamera\n * @param options - Face detection options\n * @returns Array of detected faces\n */\nexport function detectFaces(frame: Frame, options?: Partial<FrameProcessorOptions>): Face[] {\n 'worklet';\n\n const mergedOptions: FrameProcessorOptions = {\n ...DEFAULT_OPTIONS,\n ...options,\n };\n\n try {\n return __detectFaces(frame, mergedOptions);\n } catch (error) {\n // Return empty array on error in worklet context\n return [];\n }\n}\n\n/**\n * Register the face detection frame processor plugin\n * This is called during module initialization\n */\nexport function registerFaceDetectorPlugin(): void {\n // The native plugin registration happens automatically\n // through the Expo module system\n console.log('[SmartCamera] Face detector plugin registered');\n}\n\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/detection/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,0BAA0B,EAAE,MAAM,gBAAgB,CAAC;AACzE,OAAO,EAAE,qBAAqB,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AACxF,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/detection/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,WAAW,EAAE,0BAA0B,EAAE,MAAM,gBAAgB,CAAC;AACzE,OAAO,EAAE,qBAAqB,EAAE,gBAAgB,EAAE,WAAW,EAAE,MAAM,kBAAkB,CAAC;AACxF,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC","sourcesContent":["export { detectFaces, registerFaceDetectorPlugin } from './faceDetector';\nexport { processBlinkFromFaces, resetBlinkStates, getEyeState } from './blinkProcessor';\nexport { detectFacesInImage } from './staticImageDetector';\n"]}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import type { Face, StaticImageOptions } from '../types';
|
|
2
|
+
/**
|
|
3
|
+
* Detect faces in a static image
|
|
4
|
+
*
|
|
5
|
+
* @param options - Image source and detection options
|
|
6
|
+
* @returns Promise resolving to array of detected faces
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```tsx
|
|
10
|
+
* // Using require
|
|
11
|
+
* const faces = await detectFacesInImage({
|
|
12
|
+
* image: require('./photo.jpg'),
|
|
13
|
+
* performanceMode: 'accurate',
|
|
14
|
+
* landmarkMode: 'all',
|
|
15
|
+
* });
|
|
16
|
+
*
|
|
17
|
+
* // Using URI
|
|
18
|
+
* const faces = await detectFacesInImage({
|
|
19
|
+
* image: { uri: 'https://example.com/photo.jpg' },
|
|
20
|
+
* classificationMode: 'all',
|
|
21
|
+
* });
|
|
22
|
+
* ```
|
|
23
|
+
*/
|
|
24
|
+
export declare function detectFacesInImage(options: StaticImageOptions): Promise<Face[]>;
|
|
25
|
+
//# sourceMappingURL=staticImageDetector.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"staticImageDetector.d.ts","sourceRoot":"","sources":["../../src/detection/staticImageDetector.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,IAAI,EAAE,kBAAkB,EAAE,MAAM,UAAU,CAAC;AAczD;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,wBAAsB,kBAAkB,CAAC,OAAO,EAAE,kBAAkB,GAAG,OAAO,CAAC,IAAI,EAAE,CAAC,CAYrF"}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
import SmartCameraModule from '../SmartCameraModule';
|
|
2
|
+
/**
|
|
3
|
+
* Default options for static image face detection
|
|
4
|
+
*/
|
|
5
|
+
const DEFAULT_OPTIONS = {
|
|
6
|
+
performanceMode: 'accurate',
|
|
7
|
+
landmarkMode: 'none',
|
|
8
|
+
contourMode: 'none',
|
|
9
|
+
classificationMode: 'none',
|
|
10
|
+
minFaceSize: 0.15,
|
|
11
|
+
trackingEnabled: false,
|
|
12
|
+
};
|
|
13
|
+
/**
|
|
14
|
+
* Detect faces in a static image
|
|
15
|
+
*
|
|
16
|
+
* @param options - Image source and detection options
|
|
17
|
+
* @returns Promise resolving to array of detected faces
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```tsx
|
|
21
|
+
* // Using require
|
|
22
|
+
* const faces = await detectFacesInImage({
|
|
23
|
+
* image: require('./photo.jpg'),
|
|
24
|
+
* performanceMode: 'accurate',
|
|
25
|
+
* landmarkMode: 'all',
|
|
26
|
+
* });
|
|
27
|
+
*
|
|
28
|
+
* // Using URI
|
|
29
|
+
* const faces = await detectFacesInImage({
|
|
30
|
+
* image: { uri: 'https://example.com/photo.jpg' },
|
|
31
|
+
* classificationMode: 'all',
|
|
32
|
+
* });
|
|
33
|
+
* ```
|
|
34
|
+
*/
|
|
35
|
+
export async function detectFacesInImage(options) {
|
|
36
|
+
const mergedOptions = {
|
|
37
|
+
...DEFAULT_OPTIONS,
|
|
38
|
+
...options,
|
|
39
|
+
};
|
|
40
|
+
try {
|
|
41
|
+
return await SmartCameraModule.detectFacesInImage(mergedOptions);
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
console.error('[SmartCamera] Error detecting faces in image:', error);
|
|
45
|
+
throw error;
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
//# sourceMappingURL=staticImageDetector.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"staticImageDetector.js","sourceRoot":"","sources":["../../src/detection/staticImageDetector.ts"],"names":[],"mappings":"AAAA,OAAO,iBAAiB,MAAM,sBAAsB,CAAC;AAGrD;;GAEG;AACH,MAAM,eAAe,GAAgC;IACnD,eAAe,EAAE,UAAU;IAC3B,YAAY,EAAE,MAAM;IACpB,WAAW,EAAE,MAAM;IACnB,kBAAkB,EAAE,MAAM;IAC1B,WAAW,EAAE,IAAI;IACjB,eAAe,EAAE,KAAK;CACvB,CAAC;AAEF;;;;;;;;;;;;;;;;;;;;;GAqBG;AACH,MAAM,CAAC,KAAK,UAAU,kBAAkB,CAAC,OAA2B;IAClE,MAAM,aAAa,GAAuB;QACxC,GAAG,eAAe;QAClB,GAAG,OAAO;KACX,CAAC;IAEF,IAAI,CAAC;QACH,OAAO,MAAM,iBAAiB,CAAC,kBAAkB,CAAC,aAAa,CAAC,CAAC;IACnE,CAAC;IAAC,OAAO,KAAK,EAAE,CAAC;QACf,OAAO,CAAC,KAAK,CAAC,+CAA+C,EAAE,KAAK,CAAC,CAAC;QACtE,MAAM,KAAK,CAAC;IACd,CAAC;AACH,CAAC","sourcesContent":["import SmartCameraModule from '../SmartCameraModule';\nimport type { Face, StaticImageOptions } from '../types';\n\n/**\n * Default options for static image face detection\n */\nconst DEFAULT_OPTIONS: Partial<StaticImageOptions> = {\n performanceMode: 'accurate',\n landmarkMode: 'none',\n contourMode: 'none',\n classificationMode: 'none',\n minFaceSize: 0.15,\n trackingEnabled: false,\n};\n\n/**\n * Detect faces in a static image\n * \n * @param options - Image source and detection options\n * @returns Promise resolving to array of detected faces\n * \n * @example\n * ```tsx\n * // Using require\n * const faces = await detectFacesInImage({\n * image: require('./photo.jpg'),\n * performanceMode: 'accurate',\n * landmarkMode: 'all',\n * });\n * \n * // Using URI\n * const faces = await detectFacesInImage({\n * image: { uri: 'https://example.com/photo.jpg' },\n * classificationMode: 'all',\n * });\n * ```\n */\nexport async function detectFacesInImage(options: StaticImageOptions): Promise<Face[]> {\n const mergedOptions: StaticImageOptions = {\n ...DEFAULT_OPTIONS,\n ...options,\n };\n\n try {\n return await SmartCameraModule.detectFacesInImage(mergedOptions);\n } catch (error) {\n console.error('[SmartCamera] Error detecting faces in image:', error);\n throw error;\n }\n}\n\n"]}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { useSmartCamera, getAvailableCameras } from './useSmartCamera';
|
|
2
|
+
export { useFaceDetection, type UseFaceDetectionOptions } from './useFaceDetection';
|
|
3
|
+
export { useBlinkDetection, type UseBlinkDetectionOptions } from './useBlinkDetection';
|
|
4
|
+
export { useSmartCameraWebRTC, type UseSmartCameraWebRTCOptions } from './useSmartCameraWebRTC';
|
|
5
|
+
//# sourceMappingURL=index.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/hooks/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,mBAAmB,EAAE,MAAM,kBAAkB,CAAC;AACvE,OAAO,EAAE,gBAAgB,EAAE,KAAK,uBAAuB,EAAE,MAAM,oBAAoB,CAAC;AACpF,OAAO,EAAE,iBAAiB,EAAE,KAAK,wBAAwB,EAAE,MAAM,qBAAqB,CAAC;AACvF,OAAO,EAAE,oBAAoB,EAAE,KAAK,2BAA2B,EAAE,MAAM,wBAAwB,CAAC"}
|
|
@@ -0,0 +1,5 @@
|
|
|
1
|
+
export { useSmartCamera, getAvailableCameras } from './useSmartCamera';
|
|
2
|
+
export { useFaceDetection } from './useFaceDetection';
|
|
3
|
+
export { useBlinkDetection } from './useBlinkDetection';
|
|
4
|
+
export { useSmartCameraWebRTC } from './useSmartCameraWebRTC';
|
|
5
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/hooks/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,mBAAmB,EAAE,MAAM,kBAAkB,CAAC;AACvE,OAAO,EAAE,gBAAgB,EAAgC,MAAM,oBAAoB,CAAC;AACpF,OAAO,EAAE,iBAAiB,EAAiC,MAAM,qBAAqB,CAAC;AACvF,OAAO,EAAE,oBAAoB,EAAoC,MAAM,wBAAwB,CAAC","sourcesContent":["export { useSmartCamera, getAvailableCameras } from './useSmartCamera';\nexport { useFaceDetection, type UseFaceDetectionOptions } from './useFaceDetection';\nexport { useBlinkDetection, type UseBlinkDetectionOptions } from './useBlinkDetection';\nexport { useSmartCameraWebRTC, type UseSmartCameraWebRTCOptions } from './useSmartCameraWebRTC';\n"]}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import type { BlinkEvent, UseBlinkDetectionResult } from '../types';
|
|
2
|
+
/**
|
|
3
|
+
* Options for useBlinkDetection hook
|
|
4
|
+
*/
|
|
5
|
+
export interface UseBlinkDetectionOptions {
|
|
6
|
+
/** Whether blink detection is enabled. Default: true */
|
|
7
|
+
enabled?: boolean;
|
|
8
|
+
/** Minimum time between blinks in milliseconds. Default: 300 */
|
|
9
|
+
debounceMs?: number;
|
|
10
|
+
/** Callback when a blink is detected */
|
|
11
|
+
onBlink?: (event: BlinkEvent) => void;
|
|
12
|
+
}
|
|
13
|
+
/**
|
|
14
|
+
* Hook for managing blink detection state
|
|
15
|
+
*
|
|
16
|
+
* @param options - Blink detection options
|
|
17
|
+
* @returns Blink detection state and controls
|
|
18
|
+
*
|
|
19
|
+
* @example
|
|
20
|
+
* ```tsx
|
|
21
|
+
* function BlinkCounter() {
|
|
22
|
+
* const { lastBlink, blinkCount, resetCount } = useBlinkDetection({
|
|
23
|
+
* debounceMs: 300,
|
|
24
|
+
* onBlink: (event) => {
|
|
25
|
+
* console.log('Blink detected at', event.timestamp);
|
|
26
|
+
* },
|
|
27
|
+
* });
|
|
28
|
+
*
|
|
29
|
+
* return (
|
|
30
|
+
* <View>
|
|
31
|
+
* <Text>Blinks: {blinkCount}</Text>
|
|
32
|
+
* <Button onPress={resetCount} title="Reset" />
|
|
33
|
+
* </View>
|
|
34
|
+
* );
|
|
35
|
+
* }
|
|
36
|
+
* ```
|
|
37
|
+
*/
|
|
38
|
+
export declare function useBlinkDetection(options?: UseBlinkDetectionOptions): UseBlinkDetectionResult;
|
|
39
|
+
//# sourceMappingURL=useBlinkDetection.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"useBlinkDetection.d.ts","sourceRoot":"","sources":["../../src/hooks/useBlinkDetection.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,UAAU,EAAE,uBAAuB,EAAE,MAAM,UAAU,CAAC;AAEpE;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC,wDAAwD;IACxD,OAAO,CAAC,EAAE,OAAO,CAAC;IAElB,gEAAgE;IAChE,UAAU,CAAC,EAAE,MAAM,CAAC;IAEpB,wCAAwC;IACxC,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,UAAU,KAAK,IAAI,CAAC;CACvC;AAED;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAgB,iBAAiB,CAC/B,OAAO,GAAE,wBAA6B,GACrC,uBAAuB,CA+CzB"}
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { useState, useCallback, useRef, useEffect } from 'react';
|
|
2
|
+
/**
|
|
3
|
+
* Hook for managing blink detection state
|
|
4
|
+
*
|
|
5
|
+
* @param options - Blink detection options
|
|
6
|
+
* @returns Blink detection state and controls
|
|
7
|
+
*
|
|
8
|
+
* @example
|
|
9
|
+
* ```tsx
|
|
10
|
+
* function BlinkCounter() {
|
|
11
|
+
* const { lastBlink, blinkCount, resetCount } = useBlinkDetection({
|
|
12
|
+
* debounceMs: 300,
|
|
13
|
+
* onBlink: (event) => {
|
|
14
|
+
* console.log('Blink detected at', event.timestamp);
|
|
15
|
+
* },
|
|
16
|
+
* });
|
|
17
|
+
*
|
|
18
|
+
* return (
|
|
19
|
+
* <View>
|
|
20
|
+
* <Text>Blinks: {blinkCount}</Text>
|
|
21
|
+
* <Button onPress={resetCount} title="Reset" />
|
|
22
|
+
* </View>
|
|
23
|
+
* );
|
|
24
|
+
* }
|
|
25
|
+
* ```
|
|
26
|
+
*/
|
|
27
|
+
export function useBlinkDetection(options = {}) {
|
|
28
|
+
const { enabled = true, debounceMs = 300, onBlink } = options;
|
|
29
|
+
const [lastBlink, setLastBlink] = useState(null);
|
|
30
|
+
const [blinkCount, setBlinkCount] = useState(0);
|
|
31
|
+
const callbackRef = useRef(onBlink);
|
|
32
|
+
const lastBlinkTimeRef = useRef(0);
|
|
33
|
+
// Keep callback ref updated
|
|
34
|
+
useEffect(() => {
|
|
35
|
+
callbackRef.current = onBlink;
|
|
36
|
+
}, [onBlink]);
|
|
37
|
+
// Handle blink event (called from frame processor)
|
|
38
|
+
const handleBlink = useCallback((event) => {
|
|
39
|
+
const now = Date.now();
|
|
40
|
+
// Apply debounce
|
|
41
|
+
if (now - lastBlinkTimeRef.current < debounceMs) {
|
|
42
|
+
return;
|
|
43
|
+
}
|
|
44
|
+
lastBlinkTimeRef.current = now;
|
|
45
|
+
setLastBlink(event);
|
|
46
|
+
setBlinkCount((prev) => prev + 1);
|
|
47
|
+
callbackRef.current?.(event);
|
|
48
|
+
}, [debounceMs]);
|
|
49
|
+
// Reset blink count
|
|
50
|
+
const resetCount = useCallback(() => {
|
|
51
|
+
setBlinkCount(0);
|
|
52
|
+
setLastBlink(null);
|
|
53
|
+
lastBlinkTimeRef.current = 0;
|
|
54
|
+
}, []);
|
|
55
|
+
// Reset on disable
|
|
56
|
+
useEffect(() => {
|
|
57
|
+
if (!enabled) {
|
|
58
|
+
resetCount();
|
|
59
|
+
}
|
|
60
|
+
}, [enabled, resetCount]);
|
|
61
|
+
return {
|
|
62
|
+
lastBlink,
|
|
63
|
+
blinkCount,
|
|
64
|
+
resetCount,
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
//# sourceMappingURL=useBlinkDetection.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"useBlinkDetection.js","sourceRoot":"","sources":["../../src/hooks/useBlinkDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,OAAO,CAAC;AAiBjE;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,MAAM,UAAU,iBAAiB,CAC/B,UAAoC,EAAE;IAEtC,MAAM,EAAE,OAAO,GAAG,IAAI,EAAE,UAAU,GAAG,GAAG,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC;IAE9D,MAAM,CAAC,SAAS,EAAE,YAAY,CAAC,GAAG,QAAQ,CAAoB,IAAI,CAAC,CAAC;IACpE,MAAM,CAAC,UAAU,EAAE,aAAa,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,CAAC;IAChD,MAAM,WAAW,GAAG,MAAM,CAAC,OAAO,CAAC,CAAC;IACpC,MAAM,gBAAgB,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;IAEnC,4BAA4B;IAC5B,SAAS,CAAC,GAAG,EAAE;QACb,WAAW,CAAC,OAAO,GAAG,OAAO,CAAC;IAChC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC;IAEd,mDAAmD;IACnD,MAAM,WAAW,GAAG,WAAW,CAAC,CAAC,KAAiB,EAAE,EAAE;QACpD,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,EAAE,CAAC;QAEvB,iBAAiB;QACjB,IAAI,GAAG,GAAG,gBAAgB,CAAC,OAAO,GAAG,UAAU,EAAE,CAAC;YAChD,OAAO;QACT,CAAC;QAED,gBAAgB,CAAC,OAAO,GAAG,GAAG,CAAC;QAC/B,YAAY,CAAC,KAAK,CAAC,CAAC;QACpB,aAAa,CAAC,CAAC,IAAI,EAAE,EAAE,CAAC,IAAI,GAAG,CAAC,CAAC,CAAC;QAClC,WAAW,CAAC,OAAO,EAAE,CAAC,KAAK,CAAC,CAAC;IAC/B,CAAC,EAAE,CAAC,UAAU,CAAC,CAAC,CAAC;IAEjB,oBAAoB;IACpB,MAAM,UAAU,GAAG,WAAW,CAAC,GAAG,EAAE;QAClC,aAAa,CAAC,CAAC,CAAC,CAAC;QACjB,YAAY,CAAC,IAAI,CAAC,CAAC;QACnB,gBAAgB,CAAC,OAAO,GAAG,CAAC,CAAC;IAC/B,CAAC,EAAE,EAAE,CAAC,CAAC;IAEP,mBAAmB;IACnB,SAAS,CAAC,GAAG,EAAE;QACb,IAAI,CAAC,OAAO,EAAE,CAAC;YACb,UAAU,EAAE,CAAC;QACf,CAAC;IACH,CAAC,EAAE,CAAC,OAAO,EAAE,UAAU,CAAC,CAAC,CAAC;IAE1B,OAAO;QACL,SAAS;QACT,UAAU;QACV,UAAU;KACX,CAAC;AACJ,CAAC","sourcesContent":["import { useState, useCallback, useRef, useEffect } from 'react';\nimport type { BlinkEvent, UseBlinkDetectionResult } from '../types';\n\n/**\n * Options for useBlinkDetection hook\n */\nexport interface UseBlinkDetectionOptions {\n /** Whether blink detection is enabled. Default: true */\n enabled?: boolean;\n \n /** Minimum time between blinks in milliseconds. Default: 300 */\n debounceMs?: number;\n \n /** Callback when a blink is detected */\n onBlink?: (event: BlinkEvent) => void;\n}\n\n/**\n * Hook for managing blink detection state\n * \n * @param options - Blink detection options\n * @returns Blink detection state and controls\n * \n * @example\n * ```tsx\n * function BlinkCounter() {\n * const { lastBlink, blinkCount, resetCount } = useBlinkDetection({\n * debounceMs: 300,\n * onBlink: (event) => {\n * console.log('Blink detected at', event.timestamp);\n * },\n * });\n * \n * return (\n * <View>\n * <Text>Blinks: {blinkCount}</Text>\n * <Button onPress={resetCount} title=\"Reset\" />\n * </View>\n * );\n * }\n * ```\n */\nexport function useBlinkDetection(\n options: UseBlinkDetectionOptions = {}\n): UseBlinkDetectionResult {\n const { enabled = true, debounceMs = 300, onBlink } = options;\n \n const [lastBlink, setLastBlink] = useState<BlinkEvent | null>(null);\n const [blinkCount, setBlinkCount] = useState(0);\n const callbackRef = useRef(onBlink);\n const lastBlinkTimeRef = useRef(0);\n\n // Keep callback ref updated\n useEffect(() => {\n callbackRef.current = onBlink;\n }, [onBlink]);\n\n // Handle blink event (called from frame processor)\n const handleBlink = useCallback((event: BlinkEvent) => {\n const now = Date.now();\n \n // Apply debounce\n if (now - lastBlinkTimeRef.current < debounceMs) {\n return;\n }\n \n lastBlinkTimeRef.current = now;\n setLastBlink(event);\n setBlinkCount((prev) => prev + 1);\n callbackRef.current?.(event);\n }, [debounceMs]);\n\n // Reset blink count\n const resetCount = useCallback(() => {\n setBlinkCount(0);\n setLastBlink(null);\n lastBlinkTimeRef.current = 0;\n }, []);\n\n // Reset on disable\n useEffect(() => {\n if (!enabled) {\n resetCount();\n }\n }, [enabled, resetCount]);\n\n return {\n lastBlink,\n blinkCount,\n resetCount,\n };\n}\n\n"]}
|