idmission-web-sdk 2.3.162 → 2.3.163-use-doc-detect-for-face-tracking-new-model-cfd6ae1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/common/debug.d.ts +1 -1
- package/dist/components/common/debug.d.ts.map +1 -1
- package/dist/components/selfie_capture/SelfieGuidanceModelsProvider.d.ts.map +1 -1
- package/dist/components/video_signature_capture/VideoSignatureCapture.d.ts.map +1 -1
- package/dist/lib/models/DocumentDetection.d.ts +2 -1
- package/dist/lib/models/DocumentDetection.d.ts.map +1 -1
- package/dist/lib/models/FaceDetection.d.ts +5 -4
- package/dist/lib/models/FaceDetection.d.ts.map +1 -1
- package/dist/lib/models/defaults/DocumentDetector.d.ts +2 -2
- package/dist/sdk2.cjs.development.js +105 -200
- package/dist/sdk2.cjs.development.js.map +1 -1
- package/dist/sdk2.cjs.production.js +1 -1
- package/dist/sdk2.cjs.production.js.map +1 -1
- package/dist/sdk2.esm.js +106 -201
- package/dist/sdk2.esm.js.map +1 -1
- package/dist/sdk2.umd.development.js +105 -200
- package/dist/sdk2.umd.development.js.map +1 -1
- package/dist/sdk2.umd.production.js +1 -1
- package/dist/sdk2.umd.production.js.map +1 -1
- package/package.json +1 -1
|
@@ -52,7 +52,7 @@ export type SelfieCaptureFaceDebugBoxProps = {
|
|
|
52
52
|
};
|
|
53
53
|
export declare function SelfieCaptureFaceDebugBox({ face, flipX, color, scaling, }: SelfieCaptureFaceDebugBoxProps): ReactElement;
|
|
54
54
|
type SelfieCaptureFaceKeypointProps = {
|
|
55
|
-
point: FaceKeypoint;
|
|
55
|
+
point: FaceKeypoint | null;
|
|
56
56
|
scaling: DebugScalingDetails;
|
|
57
57
|
flipX?: boolean;
|
|
58
58
|
color?: string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"debug.d.ts","sourceRoot":"","sources":["../../../src/components/common/debug.tsx"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,EAAE,YAAY,EAAE,SAAS,EAAW,MAAM,OAAO,CAAA;AAE/D,OAAO,EAAE,cAAc,EAAE,MAAM,oCAAoC,CAAA;AACnE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,MAAM,gCAAgC,CAAA;AAKnE,wBAAgB,cAAc,CAAC,EAAE,QAAQ,EAAE,EAAE;IAAE,QAAQ,EAAE,SAAS,CAAA;CAAE,qBAOnE;AAED,eAAO,MAAM,iBAAiB,2MAY7B,CAAA;AAED,eAAO,MAAM,8BAA8B;aAAyB,OAAO;YAa1E,CAAA;AAED,eAAO,MAAM,uBAAuB;aACzB,MAAM;aACN,OAAO;YAQjB,CAAA;AAED,eAAO,MAAM,2BAA2B;aAC7B,MAAM;aACN,OAAO;YAWjB,CAAA;AAED,MAAM,MAAM,mBAAmB,GAAG;IAChC,UAAU,EAAE,OAAO,CAAA;IACnB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;IAClB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,MAAM,CAAA;CAChB,CAAA;AAED,wBAAgB,sBAAsB,CAAC,EACrC,OAAc,EACd,SAAS,EACT,UAAU,GACX,EAAE;IACD,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;CACnB,GAAG,mBAAmB,CAsCtB;AAED,eAAO,MAAM,uBAAuB,mCAIjC;IACD,OAAO,CAAC,EAAE,mBAAmB,CAAA;IAC7B,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,QAAQ,EAAE,SAAS,CAAA;CACpB,sBAkBA,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,GAAG,EAAE,cAAc,CAAA;IACnB,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,+BAA+B,CAAC,EAC9C,GAAG,EACH,KAAK,EACL,KAAe,EACf,OAAO,GACR,EAAE,2BAA2B,GAAG,YAAY,CAuC5C;AAED,MAAM,MAAM,8BAA8B,GAAG;IAC3C,IAAI,EAAE,IAAI,CAAA;IACV,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,yBAAyB,CAAC,EACxC,IAAI,EACJ,KAAK,EACL,KAAe,EACf,OAAO,GACR,EAAE,8BAA8B,GAAG,YAAY,CAkD/C;AAED,KAAK,8BAA8B,GAAG;IACpC,KAAK,EAAE,YAAY,CAAA;
|
|
1
|
+
{"version":3,"file":"debug.d.ts","sourceRoot":"","sources":["../../../src/components/common/debug.tsx"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,EAAE,YAAY,EAAE,SAAS,EAAW,MAAM,OAAO,CAAA;AAE/D,OAAO,EAAE,cAAc,EAAE,MAAM,oCAAoC,CAAA;AACnE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,MAAM,gCAAgC,CAAA;AAKnE,wBAAgB,cAAc,CAAC,EAAE,QAAQ,EAAE,EAAE;IAAE,QAAQ,EAAE,SAAS,CAAA;CAAE,qBAOnE;AAED,eAAO,MAAM,iBAAiB,2MAY7B,CAAA;AAED,eAAO,MAAM,8BAA8B;aAAyB,OAAO;YAa1E,CAAA;AAED,eAAO,MAAM,uBAAuB;aACzB,MAAM;aACN,OAAO;YAQjB,CAAA;AAED,eAAO,MAAM,2BAA2B;aAC7B,MAAM;aACN,OAAO;YAWjB,CAAA;AAED,MAAM,MAAM,mBAAmB,GAAG;IAChC,UAAU,EAAE,OAAO,CAAA;IACnB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;IAClB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,MAAM,CAAA;CAChB,CAAA;AAED,wBAAgB,sBAAsB,CAAC,EACrC,OAAc,EACd,SAAS,EACT,UAAU,GACX,EAAE;IACD,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;CACnB,GAAG,mBAAmB,CAsCtB;AAED,eAAO,MAAM,uBAAuB,mCAIjC;IACD,OAAO,CAAC,EAAE,mBAAmB,CAAA;IAC7B,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,QAAQ,EAAE,SAAS,CAAA;CACpB,sBAkBA,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,GAAG,EAAE,cAAc,CAAA;IACnB,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,+BAA+B,CAAC,EAC9C,GAAG,EACH,KAAK,EACL,KAAe,EACf,OAAO,GACR,EAAE,2BAA2B,GAAG,YAAY,CAuC5C;AAED,MAAM,MAAM,8BAA8B,GAAG;IAC3C,IAAI,EAAE,IAAI,CAAA;IACV,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,yBAAyB,CAAC,EACxC,IAAI,EACJ,KAAK,EACL,KAAe,EACf,OAAO,GACR,EAAE,8BAA8B,GAAG,YAAY,CAkD/C;AAED,KAAK,8BAA8B,GAAG;IACpC,KAAK,EAAE,YAAY,GAAG,IAAI,CAAA;IAC1B,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,yBAAyB,CAAC,EACxC,KAAK,EACL,KAAK,EACL,KAAa,EACb,OAAO,EAAE,EACP,UAAU,EACV,SAAS,EACT,UAAU,EACV,UAAU,EACV,WAAW,EACX,WAAW,EACX,YAAY,EACZ,OAAO,EACP,OAAO,GACR,GACF,EAAE,8BAA8B,GAAG,YAAY,CAsB/C"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"SelfieGuidanceModelsProvider.d.ts","sourceRoot":"","sources":["../../../src/components/selfie_capture/SelfieGuidanceModelsProvider.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAA;AAC9B,OAAO,EAEL,gBAAgB,EAChB,YAAY,EACZ,SAAS,EAKV,MAAM,OAAO,CAAA;AAGd,OAAO,EAEL,uBAAuB,
|
|
1
|
+
{"version":3,"file":"SelfieGuidanceModelsProvider.d.ts","sourceRoot":"","sources":["../../../src/components/selfie_capture/SelfieGuidanceModelsProvider.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAA;AAC9B,OAAO,EAEL,gBAAgB,EAChB,YAAY,EACZ,SAAS,EAKV,MAAM,OAAO,CAAA;AAGd,OAAO,EAEL,uBAAuB,EAIxB,MAAM,gCAAgC,CAAA;AAIvC,OAAO,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAA;AAOzD,MAAM,MAAM,+BAA+B,GAAG,CAC5C,UAAU,EAAE,uBAAuB,KAChC,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAA;AAEzB,KAAK,yBAAyB,GAAG;IAC/B,KAAK,EAAE,MAAM,IAAI,CAAA;IACjB,IAAI,EAAE,MAAM,IAAI,CAAA;IAChB,gBAAgB,EAAE,CAAC,OAAO,EAAE,+BAA+B,KAAK,IAAI,CAAA;IACpE,SAAS,EAAE,gBAAgB,CAAC,iBAAiB,GAAG,IAAI,CAAC,CAAA;IACrD,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,KAAK,GAAG,IAAI,CAAA;IACnB,cAAc,EAAE,cAAc,CAAA;IAC9B,qBAAqB,EAAE,MAAM,CAAA;IAC7B,qBAAqB,EAAE,MAAM,GAAG,IAAI,CAAA;CACrC,CAAA;AAED,eAAO,MAAM,2BAA2B,0CAWpC,CAAA;AAEJ,MAAM,MAAM,iCAAiC,GAAG;IAC9C,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,QAAQ,EAAE,SAAS,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAA;IACrC,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,4BAA4B,CAAC,EAAE,OAAO,CAAA;IACtC,6BAA6B,CAAC,EAAE,MAAM,CAAA;IACtC,wBAAwB,CAAC,EAAE,MAAM,CAAA;IACjC,2BAA2B,CAAC,EAAE,MAAM,CAAA;CACrC,CAAA;AAED,wBAAgB,4BAA4B,CAAC,EAC3C,SAAgB,EAChB,QAAQ,EACR,UAAU,EACV,YAAY,EACZ,kBAA2D,EAC3D,4BAAmC,EACnC,6BAA6B,EAC7B,wBAAwB,EACxB,2BAA2B,GAC5B,EAAE,iCAAiC,GAAG,YAAY,CAmJlD;AAED,wBAAgB,8BAA8B,IAAI,yBAAyB,CAQ1E"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"VideoSignatureCapture.d.ts","sourceRoot":"","sources":["../../../src/components/video_signature_capture/VideoSignatureCapture.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,EACZ,aAAa,EAMd,MAAM,OAAO,CAAA;AAad,OAAO,EAAE,wBAAwB,EAAmB,MAAM,mBAAmB,CAAA;AAC7E,OAAO,EAAE,4BAA4B,EAAE,MAAM,0CAA0C,CAAA;AACvF,OAA6B,EAC3B,8BAA8B,EAC9B,4BAA4B,EAC7B,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EACL,wBAAwB,EAEzB,MAAM,yBAAyB,CAAA;AAQhC,eAAO,MAAM,gCAAgC,KAAK,CAAA;AAClD,eAAO,MAAM,yCAAyC,OAAO,CAAA;AAC7D,eAAO,MAAM,mCAAmC,SAAS,CAAA;AAEzD,MAAM,MAAM,wBAAwB,GAAG;IACrC,iBAAiB,EAAE,MAAM,IAAI,CAAA;CAC9B,CAAA;AAED,MAAM,MAAM,+BAA+B,GAAG;IAC5C,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,wBAAwB,CAAC,EAAE,MAAM,CAAA;IACjC,eAAe,CAAC,EAAE,MAAM,CAAA;IACxB,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,MAAM,CAAC,EAAE,8BAA8B,CAAA;CACxC,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,8BAA8B,CAAC,EAAE,MAAM,CAAA;IACvC,wBAAwB,CAAC,EAAE,MAAM,CAAA;CAClC,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG;IAC1C,mBAAmB,CAAC,EAAE,wBAAwB,CAAA;IAC9C,MAAM,CAAC,EAAE,4BAA4B,CAAA;CACtC,CAAA;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,wBAAwB,CAAC,EAAE,wBAAwB,CAAA;IACnD,iBAAiB,CAAC,EAAE,MAAM,IAAI,CAAA;IAC9B,MAAM,CAAC,EAAE,MAAM,IAAI,CAAA;IACnB,iCAAiC,CAAC,EAAE,OAAO,CAAA;IAC3C,eAAe,CAAC,EAAE,aAAa,CAAC,4BAA4B,CAAC,CAAA;IAC7D,qBAAqB,CAAC,EAAE,MAAM,CAAA;IAC9B,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,8BAA8B,CAAC,EAAE,MAAM,CAAA;IACvC,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IAC1C,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,UAAU,CAAC,EAAE,+BAA+B,CAAA;IAC5C,MAAM,CAAC,EAAE,2BAA2B,CAAA;IACpC,QAAQ,CAAC,EAAE,6BAA6B,CAAA;IACxC,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB,CAAA;
|
|
1
|
+
{"version":3,"file":"VideoSignatureCapture.d.ts","sourceRoot":"","sources":["../../../src/components/video_signature_capture/VideoSignatureCapture.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,EACZ,aAAa,EAMd,MAAM,OAAO,CAAA;AAad,OAAO,EAAE,wBAAwB,EAAmB,MAAM,mBAAmB,CAAA;AAC7E,OAAO,EAAE,4BAA4B,EAAE,MAAM,0CAA0C,CAAA;AACvF,OAA6B,EAC3B,8BAA8B,EAC9B,4BAA4B,EAC7B,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EACL,wBAAwB,EAEzB,MAAM,yBAAyB,CAAA;AAQhC,eAAO,MAAM,gCAAgC,KAAK,CAAA;AAClD,eAAO,MAAM,yCAAyC,OAAO,CAAA;AAC7D,eAAO,MAAM,mCAAmC,SAAS,CAAA;AAEzD,MAAM,MAAM,wBAAwB,GAAG;IACrC,iBAAiB,EAAE,MAAM,IAAI,CAAA;CAC9B,CAAA;AAED,MAAM,MAAM,+BAA+B,GAAG;IAC5C,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,wBAAwB,CAAC,EAAE,MAAM,CAAA;IACjC,eAAe,CAAC,EAAE,MAAM,CAAA;IACxB,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,MAAM,CAAC,EAAE,8BAA8B,CAAA;CACxC,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,8BAA8B,CAAC,EAAE,MAAM,CAAA;IACvC,wBAAwB,CAAC,EAAE,MAAM,CAAA;CAClC,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG;IAC1C,mBAAmB,CAAC,EAAE,wBAAwB,CAAA;IAC9C,MAAM,CAAC,EAAE,4BAA4B,CAAA;CACtC,CAAA;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,wBAAwB,CAAC,EAAE,wBAAwB,CAAA;IACnD,iBAAiB,CAAC,EAAE,MAAM,IAAI,CAAA;IAC9B,MAAM,CAAC,EAAE,MAAM,IAAI,CAAA;IACnB,iCAAiC,CAAC,EAAE,OAAO,CAAA;IAC3C,eAAe,CAAC,EAAE,aAAa,CAAC,4BAA4B,CAAC,CAAA;IAC7D,qBAAqB,CAAC,EAAE,MAAM,CAAA;IAC9B,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,8BAA8B,CAAC,EAAE,MAAM,CAAA;IACvC,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IAC1C,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,UAAU,CAAC,EAAE,+BAA+B,CAAA;IAC5C,MAAM,CAAC,EAAE,2BAA2B,CAAA;IACpC,QAAQ,CAAC,EAAE,6BAA6B,CAAA;IACxC,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB,CAAA;AAuMD,eAAO,MAAM,qBAAqB,6GAGD,CAAA"}
|
|
@@ -13,7 +13,7 @@ export declare const documentTypeDisplayNames: {
|
|
|
13
13
|
singlePage: string;
|
|
14
14
|
none: string;
|
|
15
15
|
};
|
|
16
|
-
export type Label = 'Document' | 'Document back' | 'MRZ' | 'PDF417' | 'Primary face' | 'Secondary face' | 'Glare' | 'Punch Hole' | 'Passport page' | 'Single page';
|
|
16
|
+
export type Label = 'Document' | 'Document back' | 'MRZ' | 'PDF417' | 'Primary face' | 'Secondary face' | 'Nose' | 'Glare' | 'Punch Hole' | 'Passport page' | 'Single page';
|
|
17
17
|
export type DetectedObjectBox = {
|
|
18
18
|
xMin: number;
|
|
19
19
|
xMax: number;
|
|
@@ -36,6 +36,7 @@ export type DocumentThresholds = {
|
|
|
36
36
|
export type DocumentDetectionThresholds = DocumentThresholds & {
|
|
37
37
|
stability?: DocumentThresholds;
|
|
38
38
|
};
|
|
39
|
+
export declare function getDocumentDetector(): ObjectDetector | null;
|
|
39
40
|
export declare function loadDocumentDetector(modelAssetPath?: string, scoreThreshold?: number): Promise<ObjectDetector>;
|
|
40
41
|
export declare function closeDocumentDetector(): void;
|
|
41
42
|
export declare function useLoadDocumentDetector({ shouldLoadModels, modelPath, modelLoadTimeoutMs, scoreThreshold, onModelError, videoRef, }: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocumentDetection.d.ts","sourceRoot":"","sources":["../../../src/lib/models/DocumentDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,cAAc,EACd,oBAAoB,EACrB,MAAM,yBAAyB,CAAA;AAQhC,OAAO,EAIL,cAAc,EAEf,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,gBAAgB,EAAgC,MAAM,OAAO,CAAA;AAItE,cAAc,6BAA6B,CAAA;AAC3C,eAAO,MAAM,sCAAsC,MAAM,CAAA;AACzD,eAAO,MAAM,0CAA0C,QAAQ,CAAA;AAC/D,eAAO,MAAM,kCAAkC,EAAE,2BAWhD,CAAA;AAED,MAAM,MAAM,YAAY,GACpB,MAAM,GACN,aAAa,GACb,YAAY,GACZ,UAAU,GACV,YAAY,CAAA;AAEhB,eAAO,MAAM,wBAAwB;;;;;;CAMpC,CAAA;AAED,MAAM,MAAM,KAAK,GACb,UAAU,GACV,eAAe,GACf,KAAK,GACL,QAAQ,GACR,cAAc,GACd,gBAAgB,GAChB,OAAO,GACP,YAAY,GACZ,eAAe,GACf,aAAa,CAAA;AAEjB,MAAM,MAAM,iBAAiB,GAAG;IAC9B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;CACf,CAAA;AAED,MAAM,MAAM,cAAc,GAAG;IAC3B,GAAG,EAAE,iBAAiB,CAAA;IACtB,KAAK,EAAE,KAAK,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;CACd,CAAA;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,UAAU,CAAC,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG,kBAAkB,GAAG;IAC7D,SAAS,CAAC,EAAE,kBAAkB,CAAA;CAC/B,CAAA;AAQD,wBAAsB,oBAAoB,CACxC,cAAc,SAAmC,EACjD,cAAc,SAAyC,GACtD,OAAO,CAAC,cAAc,CAAC,CA4BzB;AAED,wBAAgB,qBAAqB,SAIpC;AAED,wBAAgB,uBAAuB,CAAC,EACtC,gBAAuB,EACvB,SAA4C,EAC5C,kBAA+D,EAC/D,cAAuD,EACvD,YAAY,EACZ,QAAQ,GACT,EAAE;IACD,gBAAgB,CAAC,EAAE,OAAO,CAAA;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAA;IACrC,QAAQ,EAAE,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC,CAAA;CACpD;;;;;;;EA4FA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,UAAU,EAAE,wBAAwB,CAAA;IAEpC,eAAe,EAAE,cAAc,EAAE,CAAA;IACjC,aAAa,EAAE,MAAM,CAAA;IACrB,cAAc,EAAE,MAAM,CAAA;IACtB,qBAAqB,EAAE,OAAO,CAAA;IAC9B,oBAAoB,EAAE,YAAY,CAAA;IAElC,yBAAyB,EAAE,MAAM,CAAA;IACjC,gCAAgC,EAAE,OAAO,CAAA;IAEzC,wBAAwB,EAAE,MAAM,CAAA;IAChC,+BAA+B,EAAE,OAAO,CAAA;IAExC,sBAAsB,EAAE,MAAM,CAAA;IAC9B,6BAA6B,EAAE,OAAO,CAAA;IAEtC,wBAAwB,EAAE,MAAM,CAAA;IAChC,+BAA+B,EAAE,OAAO,CAAA;IAExC,YAAY,EAAE,cAAc,GAAG,SAAS,CAAA;IACxC,UAAU,EAAE,cAAc,GAAG,SAAS,CAAA;IAEtC,gBAAgB,EAAE,OAAO,CAAA;IACzB,gBAAgB,EAAE,OAAO,CAAA;IACzB,gBAAgB,EAAE,OAAO,CAAA;IAEzB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IAEnB,OAAO,EAAE,OAAO,CAAA;IAChB,OAAO,CAAC,EAAE,MAAM,CAAA;CACjB,CAAA;AAED,MAAM,MAAM,wBAAwB,GAAG,oBAAoB,GAAG;IAC5D,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,wBAAsB,8BAA8B,CAClD,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,wBAAwB,GAAG,IAAI,CAAC,CAiB1C;AAED,eAAO,IAAI,eAAe,QAAI,CAAA;AAC9B,eAAO,IAAI,iBAAiB,QAAI,CAAA;AAEhC,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,MAAM,QAG9C;AA8BD,MAAM,MAAM,2BAA2B,GAAG;IACxC,GAAG,EAAE,MAAM,CAAA;IACX,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb;;;;;OAKG;IACH,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,eAAO,MAAM,kCAAkC,EAAE,2BAMhD,CAAA;AAED,wBAAgB,iCAAiC,CAC/C,UAAU,EAAE,wBAAwB,EACpC,UAAU,EAAE,2BAA2B,EACvC,UAAU,GAAE,2BAAgE,GAC3E,2BAA2B,CAgK7B"}
|
|
1
|
+
{"version":3,"file":"DocumentDetection.d.ts","sourceRoot":"","sources":["../../../src/lib/models/DocumentDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,cAAc,EACd,oBAAoB,EACrB,MAAM,yBAAyB,CAAA;AAQhC,OAAO,EAIL,cAAc,EAEf,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,gBAAgB,EAAgC,MAAM,OAAO,CAAA;AAItE,cAAc,6BAA6B,CAAA;AAC3C,eAAO,MAAM,sCAAsC,MAAM,CAAA;AACzD,eAAO,MAAM,0CAA0C,QAAQ,CAAA;AAC/D,eAAO,MAAM,kCAAkC,EAAE,2BAWhD,CAAA;AAED,MAAM,MAAM,YAAY,GACpB,MAAM,GACN,aAAa,GACb,YAAY,GACZ,UAAU,GACV,YAAY,CAAA;AAEhB,eAAO,MAAM,wBAAwB;;;;;;CAMpC,CAAA;AAED,MAAM,MAAM,KAAK,GACb,UAAU,GACV,eAAe,GACf,KAAK,GACL,QAAQ,GACR,cAAc,GACd,gBAAgB,GAChB,MAAM,GACN,OAAO,GACP,YAAY,GACZ,eAAe,GACf,aAAa,CAAA;AAEjB,MAAM,MAAM,iBAAiB,GAAG;IAC9B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;CACf,CAAA;AAED,MAAM,MAAM,cAAc,GAAG;IAC3B,GAAG,EAAE,iBAAiB,CAAA;IACtB,KAAK,EAAE,KAAK,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;CACd,CAAA;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,UAAU,CAAC,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG,kBAAkB,GAAG;IAC7D,SAAS,CAAC,EAAE,kBAAkB,CAAA;CAC/B,CAAA;AAQD,wBAAgB,mBAAmB,IAAI,cAAc,GAAG,IAAI,CAE3D;AAED,wBAAsB,oBAAoB,CACxC,cAAc,SAAmC,EACjD,cAAc,SAAyC,GACtD,OAAO,CAAC,cAAc,CAAC,CA4BzB;AAED,wBAAgB,qBAAqB,SAIpC;AAED,wBAAgB,uBAAuB,CAAC,EACtC,gBAAuB,EACvB,SAA4C,EAC5C,kBAA+D,EAC/D,cAAuD,EACvD,YAAY,EACZ,QAAQ,GACT,EAAE;IACD,gBAAgB,CAAC,EAAE,OAAO,CAAA;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAA;IACrC,QAAQ,EAAE,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC,CAAA;CACpD;;;;;;;EA4FA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,UAAU,EAAE,wBAAwB,CAAA;IAEpC,eAAe,EAAE,cAAc,EAAE,CAAA;IACjC,aAAa,EAAE,MAAM,CAAA;IACrB,cAAc,EAAE,MAAM,CAAA;IACtB,qBAAqB,EAAE,OAAO,CAAA;IAC9B,oBAAoB,EAAE,YAAY,CAAA;IAElC,yBAAyB,EAAE,MAAM,CAAA;IACjC,gCAAgC,EAAE,OAAO,CAAA;IAEzC,wBAAwB,EAAE,MAAM,CAAA;IAChC,+BAA+B,EAAE,OAAO,CAAA;IAExC,sBAAsB,EAAE,MAAM,CAAA;IAC9B,6BAA6B,EAAE,OAAO,CAAA;IAEtC,wBAAwB,EAAE,MAAM,CAAA;IAChC,+BAA+B,EAAE,OAAO,CAAA;IAExC,YAAY,EAAE,cAAc,GAAG,SAAS,CAAA;IACxC,UAAU,EAAE,cAAc,GAAG,SAAS,CAAA;IAEtC,gBAAgB,EAAE,OAAO,CAAA;IACzB,gBAAgB,EAAE,OAAO,CAAA;IACzB,gBAAgB,EAAE,OAAO,CAAA;IAEzB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IAEnB,OAAO,EAAE,OAAO,CAAA;IAChB,OAAO,CAAC,EAAE,MAAM,CAAA;CACjB,CAAA;AAED,MAAM,MAAM,wBAAwB,GAAG,oBAAoB,GAAG;IAC5D,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,wBAAsB,8BAA8B,CAClD,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,wBAAwB,GAAG,IAAI,CAAC,CAiB1C;AAED,eAAO,IAAI,eAAe,QAAI,CAAA;AAC9B,eAAO,IAAI,iBAAiB,QAAI,CAAA;AAEhC,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,MAAM,QAG9C;AA8BD,MAAM,MAAM,2BAA2B,GAAG;IACxC,GAAG,EAAE,MAAM,CAAA;IACX,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb;;;;;OAKG;IACH,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,eAAO,MAAM,kCAAkC,EAAE,2BAMhD,CAAA;AAED,wBAAgB,iCAAiC,CAC/C,UAAU,EAAE,wBAAwB,EACpC,UAAU,EAAE,2BAA2B,EACvC,UAAU,GAAE,2BAAgE,GAC3E,2BAA2B,CAgK7B"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { FaceDetector, FaceDetectorResult } from '@mediapipe/tasks-vision';
|
|
1
|
+
import { FaceDetector, FaceDetectorResult, ObjectDetectorResult } from '@mediapipe/tasks-vision';
|
|
2
2
|
import { MutableRefObject } from 'react';
|
|
3
3
|
import { Frame } from '../utils/getFrameDimensions';
|
|
4
4
|
import { ModelLoadState } from './helpers';
|
|
@@ -10,7 +10,7 @@ export type FaceKeypoint = {
|
|
|
10
10
|
name: string;
|
|
11
11
|
};
|
|
12
12
|
export type Face = {
|
|
13
|
-
keypoints: FaceKeypoint[];
|
|
13
|
+
keypoints: (FaceKeypoint | null)[];
|
|
14
14
|
box: {
|
|
15
15
|
xMin: number;
|
|
16
16
|
xMax: number;
|
|
@@ -52,11 +52,13 @@ export declare function setLastFaceDetectionAt(time: number): void;
|
|
|
52
52
|
export declare function makeFaceDetectorPrediction(imageData: Frame): (FaceDetectorResult & {
|
|
53
53
|
faces: Face[];
|
|
54
54
|
}) | null;
|
|
55
|
+
export declare function makeFacePredictionWithDocumentDetector(frame: HTMLCanvasElement): Promise<(ObjectDetectorResult & {
|
|
56
|
+
faces: Face[];
|
|
57
|
+
}) | null>;
|
|
55
58
|
export declare function processFaceDetectorPrediction({ faces, videoWidth, videoHeight, requireVerticalFaceCentering, stabilityThreshold, noseDistanceThreshold, xBoundary, // this represents the edge that the sides of the face box should not cross -- 1% of video width
|
|
56
59
|
yBoundary, // this represents the edge that the top or bottom of the face box should not cross -- 1% of video height
|
|
57
60
|
xCentroidBoundary, // this represents the edge that the centroid of the face should not cross -- 12.5% of video width
|
|
58
61
|
yCentroidBoundary, // this represents the edge that the centroid of the face should not cross -- 12.5% of video height
|
|
59
|
-
foreheadRatio, // we found that the bounding box ends at the brow and misses the forehead. this ratio represents how much we should extend the box to include the forehead.
|
|
60
62
|
noseTrackingThreshold, // this represents the maximum distance that the nose can be from the center of the face box -- 20% of the face box width or height
|
|
61
63
|
minCaptureBrightnessThreshold, minCaptureRangeThreshold, minCaptureVarianceThreshold, brightness, range, variance, }: {
|
|
62
64
|
faces: Face[];
|
|
@@ -69,7 +71,6 @@ minCaptureBrightnessThreshold, minCaptureRangeThreshold, minCaptureVarianceThres
|
|
|
69
71
|
yBoundary?: number;
|
|
70
72
|
xCentroidBoundary?: number;
|
|
71
73
|
yCentroidBoundary?: number;
|
|
72
|
-
foreheadRatio?: number;
|
|
73
74
|
noseTrackingThreshold?: number;
|
|
74
75
|
minCaptureBrightnessThreshold?: number;
|
|
75
76
|
minCaptureRangeThreshold?: number;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"FaceDetection.d.ts","sourceRoot":"","sources":["../../../src/lib/models/FaceDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,kBAAkB,
|
|
1
|
+
{"version":3,"file":"FaceDetection.d.ts","sourceRoot":"","sources":["../../../src/lib/models/FaceDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,kBAAkB,EAElB,oBAAoB,EACrB,MAAM,yBAAyB,CAAA;AAOhC,OAAO,EAAE,gBAAgB,EAAgC,MAAM,OAAO,CAAA;AACtE,OAAO,EAAE,KAAK,EAAE,MAAM,6BAA6B,CAAA;AACnD,OAAO,EAIL,cAAc,EAEf,MAAM,WAAW,CAAA;AAMlB,cAAc,yBAAyB,CAAA;AACvC,eAAO,MAAM,sCAAsC,QAAQ,CAAA;AAE3D,MAAM,MAAM,YAAY,GAAG;IAAE,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,CAAA;AACjE,MAAM,MAAM,IAAI,GAAG;IACjB,SAAS,EAAE,CAAC,YAAY,GAAG,IAAI,CAAC,EAAE,CAAA;IAClC,GAAG,EAAE;QACH,IAAI,EAAE,MAAM,CAAA;QACZ,IAAI,EAAE,MAAM,CAAA;QACZ,IAAI,EAAE,MAAM,CAAA;QACZ,IAAI,EAAE,MAAM,CAAA;QACZ,KAAK,EAAE,MAAM,CAAA;QACb,MAAM,EAAE,MAAM,CAAA;KACf,CAAA;CACF,CAAA;AAED,MAAM,MAAM,uBAAuB,GAAG;IACpC,IAAI,EAAE,IAAI,GAAG,IAAI,CAAA;IACjB,eAAe,EAAE,OAAO,CAAA;IACxB,eAAe,EAAE,OAAO,CAAA;IACxB,eAAe,EAAE,OAAO,CAAA;IACxB,YAAY,EAAE,OAAO,CAAA;IACrB,UAAU,EAAE,OAAO,CAAA;IACnB,SAAS,EAAE,OAAO,CAAA;IAClB,WAAW,EAAE,IAAI,GAAG,IAAI,CAAA;IACxB,YAAY,EAAE,OAAO,CAAA;IACrB,YAAY,EAAE,OAAO,CAAA;IACrB,oBAAoB,EAAE,OAAO,CAAA;CAC9B,CAAA;AAKD,wBAAsB,gBAAgB,CACpC,cAAc,SAA+B,GAC5C,OAAO,CAAC,YAAY,CAAC,CA0BvB;AAED,wBAAgB,iBAAiB,SAIhC;AAED,wBAAgB,mBAAmB,CAAC,EAClC,YAAY,EACZ,kBAA2D,EAC3D,QAAQ,GACT,EAAE;IACD,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAA;IACrC,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,QAAQ,EAAE,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC,CAAA;CACpD;;;;;;EAwFA;AAED,eAAO,IAAI,mBAAmB,QAAI,CAAA;AAClC,eAAO,IAAI,qBAAqB,QAAI,CAAA;AAEpC,wBAAgB,sBAAsB,CAAC,IAAI,EAAE,MAAM,QAGlD;AA0DD,wBAAgB,0BAA0B,CACxC,SAAS,EAAE,KAAK,GACf,CAAC,kBAAkB,GAAG;IAAE,KAAK,EAAE,IAAI,EAAE,CAAA;CAAE,CAAC,GAAG,IAAI,CAgBjD;AAqBD,wBAAsB,sCAAsC,CAC1D,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,CAAC,oBAAoB,GAAG;IAAE,KAAK,EAAE,IAAI,EAAE,CAAA;CAAE,CAAC,GAAG,IAAI,CAAC,CA+C5D;AAED,wBAAgB,6BAA6B,CAAC,EAC5C,KAAK,EACL,UAAU,EACV,WAAW,EACX,4BAAmC,EACnC,kBAAwB,EACxB,qBAA6B,EAC7B,SAAgB,EAAE,gGAAgG;AAClH,SAAgB,EAAE,yGAAyG;AAC3H,iBAAyB,EAAE,kGAAkG;AAC7H,iBAAyB,EAAE,mGAAmG;AAC9H,qBAA2B,EAAE,mIAAmI;AAChK,6BAA6B,EAC7B,wBAAwB,EACxB,2BAA2B,EAC3B,UAAU,EACV,KAAK,EACL,QAAQ,GACT,EAAE;IACD,KAAK,EAAE,IAAI,EAAE,CAAA;IACb,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IACnB,4BAA4B,CAAC,EAAE,OAAO,CAAA;IACtC,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,qBAAqB,CAAC,EAAE,MAAM,CAAA;IAC9B,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,qBAAqB,CAAC,EAAE,MAAM,CAAA;IAC9B,6BAA6B,CAAC,EAAE,MAAM,CAAA;IACtC,wBAAwB,CAAC,EAAE,MAAM,CAAA;IACjC,2BAA2B,CAAC,EAAE,MAAM,CAAA;IACpC,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,QAAQ,CAAC,EAAE,MAAM,CAAA;CAClB,GAAG,uBAAuB,CAyH1B"}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
export declare const defaultDocumentDetectorModelPath = "https://websdk-cdn-dev.idmission.com/assets/models/DocumentDetector/DocumentDetector-
|
|
2
|
-
export declare const defaultDocumentDetectorModelHash = "
|
|
1
|
+
export declare const defaultDocumentDetectorModelPath = "https://websdk-cdn-dev.idmission.com/assets/models/DocumentDetector/DocumentDetector-20251123_125827.tflite";
|
|
2
|
+
export declare const defaultDocumentDetectorModelHash = "SP321/MyOR20Dfxe8u42WCYXx+e+RxzET83f73Z8yM4n5FTrZfODY9zkt3h9z21Y";
|
|
3
3
|
//# sourceMappingURL=DocumentDetector.d.ts.map
|
|
@@ -2590,8 +2590,8 @@ function giveUpAfter(maxTime) {
|
|
|
2590
2590
|
|
|
2591
2591
|
var DEFAULT_CDN_URL = 'https://websdk-cdn-dev.idmission.com/assets';
|
|
2592
2592
|
|
|
2593
|
-
var defaultDocumentDetectorModelPath = "".concat(DEFAULT_CDN_URL, "/models/DocumentDetector/DocumentDetector-
|
|
2594
|
-
var defaultDocumentDetectorModelHash = '
|
|
2593
|
+
var defaultDocumentDetectorModelPath = "".concat(DEFAULT_CDN_URL, "/models/DocumentDetector/DocumentDetector-20251123_125827.tflite");
|
|
2594
|
+
var defaultDocumentDetectorModelHash = 'SP321/MyOR20Dfxe8u42WCYXx+e+RxzET83f73Z8yM4n5FTrZfODY9zkt3h9z21Y';
|
|
2595
2595
|
|
|
2596
2596
|
var defaultFocusModelPath = "".concat(DEFAULT_CDN_URL, "/models/Focus/Focus-20241008_102708.tflite");
|
|
2597
2597
|
var defaultFocusModelHash = 'HTJNLB7QybtPXIIyUI9oNqW40vE5bgSV5V24R1wXLQHknNEVOSyDQDo/QzRgJ8jb';
|
|
@@ -2908,8 +2908,8 @@ var documentTypeDisplayNames = {
|
|
|
2908
2908
|
singlePage: 'Single page',
|
|
2909
2909
|
none: 'None'
|
|
2910
2910
|
};
|
|
2911
|
-
var detector
|
|
2912
|
-
var detectorSettings
|
|
2911
|
+
var detector = null;
|
|
2912
|
+
var detectorSettings = null;
|
|
2913
2913
|
function loadDocumentDetector() {
|
|
2914
2914
|
return __awaiter(this, arguments, void 0, function (modelAssetPath, scoreThreshold) {
|
|
2915
2915
|
var _a, _b;
|
|
@@ -2922,7 +2922,7 @@ function loadDocumentDetector() {
|
|
|
2922
2922
|
return __generator(this, function (_c) {
|
|
2923
2923
|
switch (_c.label) {
|
|
2924
2924
|
case 0:
|
|
2925
|
-
if (detector
|
|
2925
|
+
if (detector && (detectorSettings === null || detectorSettings === void 0 ? void 0 : detectorSettings.modelAssetPath) === modelAssetPath && (detectorSettings === null || detectorSettings === void 0 ? void 0 : detectorSettings.scoreThreshold) === scoreThreshold) return [2 /*return*/, detector];
|
|
2926
2926
|
closeDocumentDetector();
|
|
2927
2927
|
return [4 /*yield*/, preloadDocumentDetectorDependencies()];
|
|
2928
2928
|
case 1:
|
|
@@ -2943,20 +2943,20 @@ function loadDocumentDetector() {
|
|
|
2943
2943
|
runningMode: 'VIDEO'
|
|
2944
2944
|
}])];
|
|
2945
2945
|
case 3:
|
|
2946
|
-
detector
|
|
2947
|
-
detectorSettings
|
|
2946
|
+
detector = _c.sent();
|
|
2947
|
+
detectorSettings = {
|
|
2948
2948
|
modelAssetPath: modelAssetPath,
|
|
2949
2949
|
scoreThreshold: scoreThreshold
|
|
2950
2950
|
};
|
|
2951
|
-
return [2 /*return*/, detector
|
|
2951
|
+
return [2 /*return*/, detector];
|
|
2952
2952
|
}
|
|
2953
2953
|
});
|
|
2954
2954
|
});
|
|
2955
2955
|
}
|
|
2956
2956
|
function closeDocumentDetector() {
|
|
2957
|
-
detector
|
|
2958
|
-
detector
|
|
2959
|
-
detectorSettings
|
|
2957
|
+
detector === null || detector === void 0 ? void 0 : detector.close();
|
|
2958
|
+
detector = null;
|
|
2959
|
+
detectorSettings = null;
|
|
2960
2960
|
}
|
|
2961
2961
|
function useLoadDocumentDetector(_a) {
|
|
2962
2962
|
var _b = _a.shouldLoadModels,
|
|
@@ -3057,12 +3057,12 @@ function makeDocumentDetectorPrediction(frame) {
|
|
|
3057
3057
|
return __awaiter(this, void 0, void 0, function () {
|
|
3058
3058
|
var startedAt, prediction, time, frameWidth, frameHeight;
|
|
3059
3059
|
return __generator(this, function (_a) {
|
|
3060
|
-
if (!detector
|
|
3060
|
+
if (!detector) return [2 /*return*/, null];
|
|
3061
3061
|
startedAt = performance.now();
|
|
3062
3062
|
// Detectors can throw errors, for example when using custom URLs that
|
|
3063
3063
|
// contain a model that doesn't provide the expected output.
|
|
3064
3064
|
try {
|
|
3065
|
-
prediction = detector
|
|
3065
|
+
prediction = detector.detectForVideo(frame, performance.now());
|
|
3066
3066
|
time = performance.now() - startedAt;
|
|
3067
3067
|
frameWidth = frame.width;
|
|
3068
3068
|
frameHeight = frame.height;
|
|
@@ -7062,6 +7062,7 @@ function SelfieCaptureFaceKeypoint(_a) {
|
|
|
7062
7062
|
scaledHeight = _c.scaledHeight,
|
|
7063
7063
|
xOffset = _c.xOffset,
|
|
7064
7064
|
yOffset = _c.yOffset;
|
|
7065
|
+
if (!point) return /*#__PURE__*/React__namespace.default.createElement(React__namespace.default.Fragment, null);
|
|
7065
7066
|
var left, top;
|
|
7066
7067
|
if (horizontal) {
|
|
7067
7068
|
left = point.x / videoWidth * scaledWidth - xOffset;
|
|
@@ -12153,136 +12154,6 @@ var FaceCaptureGuideOverlay = function FaceCaptureGuideOverlay(_a) {
|
|
|
12153
12154
|
var templateObject_1$n, templateObject_2$m;
|
|
12154
12155
|
|
|
12155
12156
|
var defaultSelfieCaptureModelLoadTimeoutMs = 45000;
|
|
12156
|
-
var detector = null;
|
|
12157
|
-
var detectorSettings = null;
|
|
12158
|
-
function loadFaceDetector() {
|
|
12159
|
-
return __awaiter(this, arguments, void 0, function (modelAssetPath) {
|
|
12160
|
-
var _a, _b;
|
|
12161
|
-
if (modelAssetPath === void 0) {
|
|
12162
|
-
modelAssetPath = defaultFaceDetectorModelPath;
|
|
12163
|
-
}
|
|
12164
|
-
return __generator(this, function (_c) {
|
|
12165
|
-
switch (_c.label) {
|
|
12166
|
-
case 0:
|
|
12167
|
-
if (detector && (detectorSettings === null || detectorSettings === void 0 ? void 0 : detectorSettings.modelAssetPath) === modelAssetPath) return [2 /*return*/, detector];
|
|
12168
|
-
closeFaceDetector();
|
|
12169
|
-
return [4 /*yield*/, preloadFaceDetectorDependencies()];
|
|
12170
|
-
case 1:
|
|
12171
|
-
_c.sent();
|
|
12172
|
-
if (modelCapabilities.delegate === 'NONE') {
|
|
12173
|
-
throw new Error('No available delegate for face detector.');
|
|
12174
|
-
}
|
|
12175
|
-
_b = (_a = tasksVision.FaceDetector).createFromOptions;
|
|
12176
|
-
return [4 /*yield*/, tasksVision.FilesetResolver.forVisionTasks(visionTasksBasePath)];
|
|
12177
|
-
case 2:
|
|
12178
|
-
return [4 /*yield*/, _b.apply(_a, [_c.sent(), {
|
|
12179
|
-
// canvas: document.createElement('canvas'),
|
|
12180
|
-
baseOptions: {
|
|
12181
|
-
modelAssetPath: modelAssetPath,
|
|
12182
|
-
delegate: modelCapabilities.delegate
|
|
12183
|
-
},
|
|
12184
|
-
runningMode: 'VIDEO'
|
|
12185
|
-
}])];
|
|
12186
|
-
case 3:
|
|
12187
|
-
detector = _c.sent();
|
|
12188
|
-
detectorSettings = {
|
|
12189
|
-
modelAssetPath: modelAssetPath
|
|
12190
|
-
};
|
|
12191
|
-
return [2 /*return*/, detector];
|
|
12192
|
-
}
|
|
12193
|
-
});
|
|
12194
|
-
});
|
|
12195
|
-
}
|
|
12196
|
-
function closeFaceDetector() {
|
|
12197
|
-
detector === null || detector === void 0 ? void 0 : detector.close();
|
|
12198
|
-
detector = null;
|
|
12199
|
-
detectorSettings = null;
|
|
12200
|
-
}
|
|
12201
|
-
function useLoadFaceDetector(_a) {
|
|
12202
|
-
var onModelError = _a.onModelError,
|
|
12203
|
-
_b = _a.modelLoadTimeoutMs,
|
|
12204
|
-
modelLoadTimeoutMs = _b === void 0 ? defaultSelfieCaptureModelLoadTimeoutMs : _b,
|
|
12205
|
-
videoRef = _a.videoRef;
|
|
12206
|
-
var _c = React.useState('not-started'),
|
|
12207
|
-
modelLoadState = _c[0],
|
|
12208
|
-
setModelLoadState = _c[1];
|
|
12209
|
-
var _d = React.useState(0),
|
|
12210
|
-
modelDownloadProgress = _d[0],
|
|
12211
|
-
setModelDownloadProgress = _d[1];
|
|
12212
|
-
var _e = React.useState(null),
|
|
12213
|
-
modelWarmingStartedAt = _e[0],
|
|
12214
|
-
setModelWarmingStartedAt = _e[1];
|
|
12215
|
-
var _f = React.useState(null),
|
|
12216
|
-
modelError = _f[0],
|
|
12217
|
-
setModelError = _f[1];
|
|
12218
|
-
React.useEffect(function loadModel() {
|
|
12219
|
-
var _this = this;
|
|
12220
|
-
setModelLoadState('downloading');
|
|
12221
|
-
setModelWarmingStartedAt(null);
|
|
12222
|
-
var modelLoadTimeout = setTimeout(function () {
|
|
12223
|
-
setModelError(new Error('Model loading time limit exceeded.'));
|
|
12224
|
-
}, modelLoadTimeoutMs);
|
|
12225
|
-
function handleDownloadProgress(event) {
|
|
12226
|
-
setModelDownloadProgress(progressToPercentage(event.detail));
|
|
12227
|
-
}
|
|
12228
|
-
document.addEventListener('idmission.preloadProgress.faceDetection', handleDownloadProgress);
|
|
12229
|
-
var cancelVideoReady = function cancelVideoReady() {};
|
|
12230
|
-
loadFaceDetector().then(function (model) {
|
|
12231
|
-
return __awaiter(_this, void 0, void 0, function () {
|
|
12232
|
-
var _a, videoReady, cancel, cancelled;
|
|
12233
|
-
return __generator(this, function (_b) {
|
|
12234
|
-
switch (_b.label) {
|
|
12235
|
-
case 0:
|
|
12236
|
-
setModelDownloadProgress(100);
|
|
12237
|
-
clearTimeout(modelLoadTimeout);
|
|
12238
|
-
setModelLoadState('warming');
|
|
12239
|
-
setModelWarmingStartedAt(performance.now());
|
|
12240
|
-
return [4 /*yield*/, testFaceDetectionAgainstKnownImage(model)];
|
|
12241
|
-
case 1:
|
|
12242
|
-
_b.sent();
|
|
12243
|
-
_a = waitForVideoReady(videoRef), videoReady = _a[0], cancel = _a[1];
|
|
12244
|
-
cancelled = false;
|
|
12245
|
-
cancelVideoReady = function cancelVideoReady() {
|
|
12246
|
-
cancelled = true;
|
|
12247
|
-
cancel();
|
|
12248
|
-
};
|
|
12249
|
-
return [4 /*yield*/, videoReady];
|
|
12250
|
-
case 2:
|
|
12251
|
-
_b.sent();
|
|
12252
|
-
if (cancelled) return [2 /*return*/];
|
|
12253
|
-
model.detectForVideo(videoRef.current, performance.now());
|
|
12254
|
-
setModelLoadState('ready');
|
|
12255
|
-
return [2 /*return*/];
|
|
12256
|
-
}
|
|
12257
|
-
});
|
|
12258
|
-
});
|
|
12259
|
-
})["catch"](function (e) {
|
|
12260
|
-
setModelError(e);
|
|
12261
|
-
setModelLoadState('error');
|
|
12262
|
-
})["finally"](function () {
|
|
12263
|
-
clearTimeout(modelLoadTimeout);
|
|
12264
|
-
});
|
|
12265
|
-
return function () {
|
|
12266
|
-
log('unloading face detection model');
|
|
12267
|
-
cancelVideoReady();
|
|
12268
|
-
closeFaceDetector();
|
|
12269
|
-
clearTimeout(modelLoadTimeout);
|
|
12270
|
-
document.removeEventListener('idmission.preloadProgress.faceDetection', handleDownloadProgress);
|
|
12271
|
-
};
|
|
12272
|
-
}, [modelLoadTimeoutMs, videoRef]);
|
|
12273
|
-
React.useEffect(function handleModelError() {
|
|
12274
|
-
if (modelError) onModelError === null || onModelError === void 0 ? void 0 : onModelError(modelError);
|
|
12275
|
-
}, [modelError, onModelError]);
|
|
12276
|
-
return React.useMemo(function () {
|
|
12277
|
-
return {
|
|
12278
|
-
ready: modelLoadState === 'ready',
|
|
12279
|
-
modelLoadState: modelLoadState,
|
|
12280
|
-
modelDownloadProgress: modelDownloadProgress,
|
|
12281
|
-
modelWarmingStartedAt: modelWarmingStartedAt,
|
|
12282
|
-
modelError: modelError
|
|
12283
|
-
};
|
|
12284
|
-
}, [modelLoadState, modelDownloadProgress, modelWarmingStartedAt, modelError]);
|
|
12285
|
-
}
|
|
12286
12157
|
var lastFaceDetectionAt = 0;
|
|
12287
12158
|
var lastFaceDetectionTime = 0;
|
|
12288
12159
|
function setLastFaceDetectionAt(time) {
|
|
@@ -12332,24 +12203,68 @@ function trackFace(face, framesNeeded, frameWidth, frameHeight) {
|
|
|
12332
12203
|
if (lastNNosePairs.length > framesNeeded - 1) lastNNosePairs.length = framesNeeded - 1;
|
|
12333
12204
|
}
|
|
12334
12205
|
}
|
|
12335
|
-
function
|
|
12336
|
-
|
|
12337
|
-
|
|
12338
|
-
var faces = prediction.detections.map(function (d) {
|
|
12339
|
-
return {
|
|
12340
|
-
box: convertBoundingBox(d.boundingBox),
|
|
12341
|
-
keypoints: d.keypoints.map(function (k) {
|
|
12342
|
-
var _a;
|
|
12343
|
-
return _assign(_assign({}, k), {
|
|
12344
|
-
x: k.x * imageData.width,
|
|
12345
|
-
y: k.y * imageData.height,
|
|
12346
|
-
name: (_a = k.label) !== null && _a !== void 0 ? _a : ''
|
|
12347
|
-
});
|
|
12348
|
-
})
|
|
12349
|
-
};
|
|
12206
|
+
function isFaceDetection(detection) {
|
|
12207
|
+
return detection.boundingBox && detection.categories.some(function (c) {
|
|
12208
|
+
return c.categoryName === 'Primary face' || c.categoryName === 'Secondary face';
|
|
12350
12209
|
});
|
|
12351
|
-
|
|
12352
|
-
|
|
12210
|
+
}
|
|
12211
|
+
function isNoseDetection(detection) {
|
|
12212
|
+
return detection.categories.some(function (c) {
|
|
12213
|
+
return c.categoryName === 'Nose';
|
|
12214
|
+
});
|
|
12215
|
+
}
|
|
12216
|
+
function makeFacePredictionWithDocumentDetector(frame) {
|
|
12217
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
12218
|
+
var prediction, faceDetections, noseDetections, _i, _a, detection, faces, _b, faceDetections_1, d, faceBox, nose, _c, noseDetections_1, n, noseBox, cX, cY;
|
|
12219
|
+
return __generator(this, function (_d) {
|
|
12220
|
+
switch (_d.label) {
|
|
12221
|
+
case 0:
|
|
12222
|
+
return [4 /*yield*/, makeDocumentDetectorPrediction(frame)];
|
|
12223
|
+
case 1:
|
|
12224
|
+
prediction = _d.sent();
|
|
12225
|
+
if (!prediction) return [2 /*return*/, null];
|
|
12226
|
+
faceDetections = [];
|
|
12227
|
+
noseDetections = [];
|
|
12228
|
+
for (_i = 0, _a = prediction.detections; _i < _a.length; _i++) {
|
|
12229
|
+
detection = _a[_i];
|
|
12230
|
+
if (isNoseDetection(detection)) {
|
|
12231
|
+
noseDetections.push(detection);
|
|
12232
|
+
} else if (isFaceDetection(detection)) {
|
|
12233
|
+
faceDetections.push(detection);
|
|
12234
|
+
}
|
|
12235
|
+
}
|
|
12236
|
+
faces = [];
|
|
12237
|
+
for (_b = 0, faceDetections_1 = faceDetections; _b < faceDetections_1.length; _b++) {
|
|
12238
|
+
d = faceDetections_1[_b];
|
|
12239
|
+
faceBox = d.boundingBox;
|
|
12240
|
+
if (!faceBox) continue;
|
|
12241
|
+
nose = null;
|
|
12242
|
+
for (_c = 0, noseDetections_1 = noseDetections; _c < noseDetections_1.length; _c++) {
|
|
12243
|
+
n = noseDetections_1[_c];
|
|
12244
|
+
noseBox = n.boundingBox;
|
|
12245
|
+
if (!noseBox) continue;
|
|
12246
|
+
cX = noseBox.originX + noseBox.width / 2;
|
|
12247
|
+
cY = noseBox.originY + noseBox.height / 2;
|
|
12248
|
+
if (cX >= faceBox.originX && cX <= faceBox.originX + faceBox.width && cY >= faceBox.originY && cY <= faceBox.originY + faceBox.height) {
|
|
12249
|
+
nose = {
|
|
12250
|
+
x: cX,
|
|
12251
|
+
y: cY,
|
|
12252
|
+
name: 'nose'
|
|
12253
|
+
};
|
|
12254
|
+
break;
|
|
12255
|
+
}
|
|
12256
|
+
}
|
|
12257
|
+
faces.push({
|
|
12258
|
+
box: convertBoundingBox(d.boundingBox),
|
|
12259
|
+
keypoints: [null, null, nose, null, null]
|
|
12260
|
+
});
|
|
12261
|
+
}
|
|
12262
|
+
debug('faces', faces);
|
|
12263
|
+
return [2 /*return*/, _assign(_assign({}, prediction), {
|
|
12264
|
+
faces: faces
|
|
12265
|
+
})];
|
|
12266
|
+
}
|
|
12267
|
+
});
|
|
12353
12268
|
});
|
|
12354
12269
|
}
|
|
12355
12270
|
function processFaceDetectorPrediction(_a) {
|
|
@@ -12377,13 +12292,9 @@ function processFaceDetectorPrediction(_a) {
|
|
|
12377
12292
|
// this represents the edge that the centroid of the face should not cross -- 12.5% of video width
|
|
12378
12293
|
yCentroidBoundary = _h === void 0 ? 0.125 : _h,
|
|
12379
12294
|
// this represents the edge that the centroid of the face should not cross -- 12.5% of video height
|
|
12380
|
-
_j = _a.
|
|
12295
|
+
_j = _a.noseTrackingThreshold,
|
|
12381
12296
|
// this represents the edge that the centroid of the face should not cross -- 12.5% of video height
|
|
12382
|
-
|
|
12383
|
-
// we found that the bounding box ends at the brow and misses the forehead. this ratio represents how much we should extend the box to include the forehead.
|
|
12384
|
-
_k = _a.noseTrackingThreshold,
|
|
12385
|
-
// we found that the bounding box ends at the brow and misses the forehead. this ratio represents how much we should extend the box to include the forehead.
|
|
12386
|
-
noseTrackingThreshold = _k === void 0 ? 0.2 : _k,
|
|
12297
|
+
noseTrackingThreshold = _j === void 0 ? 0.2 : _j,
|
|
12387
12298
|
// this represents the maximum distance that the nose can be from the center of the face box -- 20% of the face box width or height
|
|
12388
12299
|
minCaptureBrightnessThreshold = _a.minCaptureBrightnessThreshold,
|
|
12389
12300
|
minCaptureRangeThreshold = _a.minCaptureRangeThreshold,
|
|
@@ -12410,10 +12321,10 @@ function processFaceDetectorPrediction(_a) {
|
|
|
12410
12321
|
var frameCX = videoWidth / 2;
|
|
12411
12322
|
var frameCY = videoHeight / 2;
|
|
12412
12323
|
// calculate head bounding box, with forehead extension
|
|
12413
|
-
|
|
12324
|
+
// const foreheadSize = face.box.height * foreheadRatio
|
|
12414
12325
|
var headXMin = face.box.xMin;
|
|
12415
12326
|
var headXMax = face.box.xMax;
|
|
12416
|
-
var headYMin = face.box.yMin - foreheadSize
|
|
12327
|
+
var headYMin = face.box.yMin; // - foreheadSize
|
|
12417
12328
|
var headYMax = face.box.yMax;
|
|
12418
12329
|
// calculate head centroids
|
|
12419
12330
|
var headCX = (headXMin + headXMax) / 2;
|
|
@@ -12469,26 +12380,6 @@ function processFaceDetectorPrediction(_a) {
|
|
|
12469
12380
|
faceVisibilityTooLow: faceVisibilityTooLow
|
|
12470
12381
|
};
|
|
12471
12382
|
}
|
|
12472
|
-
function testFaceDetectionAgainstKnownImage(detector) {
|
|
12473
|
-
return new Promise(function (resolve, reject) {
|
|
12474
|
-
var img = new Image();
|
|
12475
|
-
img.crossOrigin = 'anonymous';
|
|
12476
|
-
img.onload = function () {
|
|
12477
|
-
var prediction = detector.detectForVideo(img, performance.now());
|
|
12478
|
-
if (prediction.detections.length > 0) {
|
|
12479
|
-
debug('face detection test result', prediction.detections);
|
|
12480
|
-
resolve(void 0);
|
|
12481
|
-
} else {
|
|
12482
|
-
warn('face detection test failed');
|
|
12483
|
-
reject(new Error('testFaceDetectionAgainstKnownImage failed to predict'));
|
|
12484
|
-
}
|
|
12485
|
-
};
|
|
12486
|
-
img.onerror = function () {
|
|
12487
|
-
return reject(new Error('testFaceDetectionAgainstKnownImage failed to load image'));
|
|
12488
|
-
};
|
|
12489
|
-
img.src = "".concat(DEFAULT_CDN_URL, "/head-test.jpg");
|
|
12490
|
-
});
|
|
12491
|
-
}
|
|
12492
12383
|
|
|
12493
12384
|
function detectBrightnessAndContrast(frame, brightnessAverager) {
|
|
12494
12385
|
var ctx = frame.getContext('2d');
|
|
@@ -12640,16 +12531,27 @@ function SelfieGuidanceModelsProvider(_a) {
|
|
|
12640
12531
|
var canvasRef = React.useRef(null);
|
|
12641
12532
|
var onPredictionHandler = React.useRef();
|
|
12642
12533
|
var addToAverage = useRunningAvg(5).addToAverage;
|
|
12643
|
-
var _f =
|
|
12534
|
+
var _f = useLoadDocumentDetector({
|
|
12535
|
+
videoRef: videoRef,
|
|
12644
12536
|
onModelError: onModelError,
|
|
12645
|
-
modelLoadTimeoutMs: modelLoadTimeoutMs
|
|
12646
|
-
videoRef: videoRef
|
|
12537
|
+
modelLoadTimeoutMs: modelLoadTimeoutMs
|
|
12647
12538
|
}),
|
|
12648
12539
|
ready = _f.ready,
|
|
12649
12540
|
modelLoadState = _f.modelLoadState,
|
|
12650
12541
|
modelDownloadProgress = _f.modelDownloadProgress,
|
|
12651
12542
|
modelWarmingStartedAt = _f.modelWarmingStartedAt,
|
|
12652
12543
|
modelError = _f.modelError;
|
|
12544
|
+
// const {
|
|
12545
|
+
// ready,
|
|
12546
|
+
// modelLoadState,
|
|
12547
|
+
// modelDownloadProgress,
|
|
12548
|
+
// modelWarmingStartedAt,
|
|
12549
|
+
// modelError,
|
|
12550
|
+
// } = useLoadFaceDetector({
|
|
12551
|
+
// onModelError,
|
|
12552
|
+
// modelLoadTimeoutMs,
|
|
12553
|
+
// videoRef,
|
|
12554
|
+
// })
|
|
12653
12555
|
var _g = useFrameLoop(React.useCallback(function () {
|
|
12654
12556
|
return __awaiter(_this, void 0, void 0, function () {
|
|
12655
12557
|
var vw, vh, ctx, thresholdsProvided, brightnessResults, brightness, range, variance, prediction, processed, e_1;
|
|
@@ -12663,19 +12565,21 @@ function SelfieGuidanceModelsProvider(_a) {
|
|
|
12663
12565
|
ctx = canvasRef.current.getContext('2d');
|
|
12664
12566
|
canvasRef.current.width = vw;
|
|
12665
12567
|
canvasRef.current.height = vh;
|
|
12666
|
-
if (!(ctx && videoRef.current.readyState === 4)) return [3 /*break*/,
|
|
12568
|
+
if (!(ctx && videoRef.current.readyState === 4)) return [3 /*break*/, 5];
|
|
12667
12569
|
ctx.translate(vw, 0);
|
|
12668
12570
|
ctx.scale(-1, 1);
|
|
12669
12571
|
ctx.drawImage(videoRef.current, 0, 0, vw, vh);
|
|
12670
12572
|
_c.label = 1;
|
|
12671
12573
|
case 1:
|
|
12672
|
-
_c.trys.push([1,
|
|
12574
|
+
_c.trys.push([1, 4,, 5]);
|
|
12673
12575
|
thresholdsProvided = minCaptureBrightnessThreshold !== undefined || minCaptureRangeThreshold !== undefined || minCaptureVarianceThreshold !== undefined;
|
|
12674
12576
|
brightnessResults = thresholdsProvided ? detectBrightnessAndContrast(canvasRef.current, addToAverage) : undefined;
|
|
12675
12577
|
brightness = brightnessResults === null || brightnessResults === void 0 ? void 0 : brightnessResults.brightness;
|
|
12676
12578
|
range = brightnessResults === null || brightnessResults === void 0 ? void 0 : brightnessResults.range;
|
|
12677
12579
|
variance = brightnessResults === null || brightnessResults === void 0 ? void 0 : brightnessResults.variance;
|
|
12678
|
-
|
|
12580
|
+
return [4 /*yield*/, makeFacePredictionWithDocumentDetector(canvasRef.current)];
|
|
12581
|
+
case 2:
|
|
12582
|
+
prediction = _c.sent();
|
|
12679
12583
|
processed = processFaceDetectorPrediction({
|
|
12680
12584
|
faces: (_a = prediction === null || prediction === void 0 ? void 0 : prediction.faces) !== null && _a !== void 0 ? _a : [],
|
|
12681
12585
|
videoWidth: vw,
|
|
@@ -12691,15 +12595,15 @@ function SelfieGuidanceModelsProvider(_a) {
|
|
|
12691
12595
|
setLastFaceDetectionAt(new Date().getTime());
|
|
12692
12596
|
// setLastPrediction(processed)
|
|
12693
12597
|
return [4 /*yield*/, (_b = onPredictionHandler.current) === null || _b === void 0 ? void 0 : _b.call(onPredictionHandler, processed)];
|
|
12694
|
-
case
|
|
12598
|
+
case 3:
|
|
12695
12599
|
// setLastPrediction(processed)
|
|
12696
12600
|
_c.sent();
|
|
12697
|
-
return [3 /*break*/,
|
|
12698
|
-
case
|
|
12601
|
+
return [3 /*break*/, 5];
|
|
12602
|
+
case 4:
|
|
12699
12603
|
e_1 = _c.sent();
|
|
12700
12604
|
error('caught face detection error', e_1);
|
|
12701
|
-
return [3 /*break*/,
|
|
12702
|
-
case
|
|
12605
|
+
return [3 /*break*/, 5];
|
|
12606
|
+
case 5:
|
|
12703
12607
|
return [2 /*return*/];
|
|
12704
12608
|
}
|
|
12705
12609
|
});
|
|
@@ -15511,7 +15415,8 @@ function VideoSignatureCaptureComponent(_a, ref) {
|
|
|
15511
15415
|
var rightEdge = videoWidth * (1 - headTrackingBoundaryPercentage);
|
|
15512
15416
|
var topEdge = videoHeight * headTrackingBoundaryPercentage;
|
|
15513
15417
|
var bottomEdge = videoHeight * (1 - headTrackingBoundaryPercentage);
|
|
15514
|
-
var
|
|
15418
|
+
var nose = face === null || face === void 0 ? void 0 : face.keypoints[2];
|
|
15419
|
+
var nearBoundary = !!face && !!nose && (headTrackingBoundaryType === 'nose' ? nose.x < leftEdge || nose.x > rightEdge || nose.y < topEdge || nose.y > bottomEdge : (face === null || face === void 0 ? void 0 : face.box.xMin) < leftEdge || (face === null || face === void 0 ? void 0 : face.box.xMax) > rightEdge || (face === null || face === void 0 ? void 0 : face.box.yMin) < topEdge || (face === null || face === void 0 ? void 0 : face.box.yMax) > bottomEdge);
|
|
15515
15420
|
setLastFace(face);
|
|
15516
15421
|
setHeadTrackingSatisfied(!!face && !nearBoundary);
|
|
15517
15422
|
setNumFramesWithoutFaces(face ? 0 : function (n) {
|