idmission-web-sdk 2.3.135-dockerize-builds-0d61b07 → 2.3.135-use-doc-detect-for-face-tracking-new-model-ee6bb63
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/components/common/debug.d.ts +1 -1
- package/dist/components/common/debug.d.ts.map +1 -1
- package/dist/components/selfie_capture/SelfieGuidanceModelsProvider.d.ts.map +1 -1
- package/dist/components/video_signature_capture/VideoSignatureCapture.d.ts.map +1 -1
- package/dist/lib/models/DocumentDetection.d.ts +2 -1
- package/dist/lib/models/DocumentDetection.d.ts.map +1 -1
- package/dist/lib/models/FaceDetection.d.ts +5 -4
- package/dist/lib/models/FaceDetection.d.ts.map +1 -1
- package/dist/lib/models/defaults/DocumentDetector.d.ts +2 -2
- package/dist/sdk2.cjs.development.js +106 -201
- package/dist/sdk2.cjs.development.js.map +1 -1
- package/dist/sdk2.cjs.production.js +1 -1
- package/dist/sdk2.cjs.production.js.map +1 -1
- package/dist/sdk2.esm.js +107 -202
- package/dist/sdk2.esm.js.map +1 -1
- package/dist/sdk2.umd.development.js +106 -201
- package/dist/sdk2.umd.development.js.map +1 -1
- package/dist/sdk2.umd.production.js +1 -1
- package/dist/sdk2.umd.production.js.map +1 -1
- package/dist/version.d.ts +1 -1
- package/dist/version.d.ts.map +1 -1
- package/package.json +1 -1
|
@@ -52,7 +52,7 @@ export type SelfieCaptureFaceDebugBoxProps = {
|
|
|
52
52
|
};
|
|
53
53
|
export declare function SelfieCaptureFaceDebugBox({ face, flipX, color, scaling, }: SelfieCaptureFaceDebugBoxProps): ReactElement;
|
|
54
54
|
type SelfieCaptureFaceKeypointProps = {
|
|
55
|
-
point: FaceKeypoint;
|
|
55
|
+
point: FaceKeypoint | null;
|
|
56
56
|
scaling: DebugScalingDetails;
|
|
57
57
|
flipX?: boolean;
|
|
58
58
|
color?: string;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"debug.d.ts","sourceRoot":"","sources":["../../../src/components/common/debug.tsx"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,EAAE,YAAY,EAAE,SAAS,EAAW,MAAM,OAAO,CAAA;AAE/D,OAAO,EAAE,cAAc,EAAE,MAAM,oCAAoC,CAAA;AACnE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,MAAM,gCAAgC,CAAA;AAKnE,wBAAgB,cAAc,CAAC,EAAE,QAAQ,EAAE,EAAE;IAAE,QAAQ,EAAE,SAAS,CAAA;CAAE,qBAOnE;AAED,eAAO,MAAM,iBAAiB,2MAY7B,CAAA;AAED,eAAO,MAAM,8BAA8B;aAAyB,OAAO;YAa1E,CAAA;AAED,eAAO,MAAM,uBAAuB;aACzB,MAAM;aACN,OAAO;YAQjB,CAAA;AAED,eAAO,MAAM,2BAA2B;aAC7B,MAAM;aACN,OAAO;YAWjB,CAAA;AAED,MAAM,MAAM,mBAAmB,GAAG;IAChC,UAAU,EAAE,OAAO,CAAA;IACnB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;IAClB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,MAAM,CAAA;CAChB,CAAA;AAED,wBAAgB,sBAAsB,CAAC,EACrC,OAAc,EACd,SAAS,EACT,UAAU,GACX,EAAE;IACD,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;CACnB,GAAG,mBAAmB,CAsCtB;AAED,eAAO,MAAM,uBAAuB,mCAIjC;IACD,OAAO,CAAC,EAAE,mBAAmB,CAAA;IAC7B,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,QAAQ,EAAE,SAAS,CAAA;CACpB,sBAkBA,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,GAAG,EAAE,cAAc,CAAA;IACnB,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,+BAA+B,CAAC,EAC9C,GAAG,EACH,KAAK,EACL,KAAe,EACf,OAAO,GACR,EAAE,2BAA2B,GAAG,YAAY,CAuC5C;AAED,MAAM,MAAM,8BAA8B,GAAG;IAC3C,IAAI,EAAE,IAAI,CAAA;IACV,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,yBAAyB,CAAC,EACxC,IAAI,EACJ,KAAK,EACL,KAAe,EACf,OAAO,GACR,EAAE,8BAA8B,GAAG,YAAY,CAkD/C;AAED,KAAK,8BAA8B,GAAG;IACpC,KAAK,EAAE,YAAY,CAAA;
|
|
1
|
+
{"version":3,"file":"debug.d.ts","sourceRoot":"","sources":["../../../src/components/common/debug.tsx"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,EAAE,YAAY,EAAE,SAAS,EAAW,MAAM,OAAO,CAAA;AAE/D,OAAO,EAAE,cAAc,EAAE,MAAM,oCAAoC,CAAA;AACnE,OAAO,EAAE,IAAI,EAAE,YAAY,EAAE,MAAM,gCAAgC,CAAA;AAKnE,wBAAgB,cAAc,CAAC,EAAE,QAAQ,EAAE,EAAE;IAAE,QAAQ,EAAE,SAAS,CAAA;CAAE,qBAOnE;AAED,eAAO,MAAM,iBAAiB,2MAY7B,CAAA;AAED,eAAO,MAAM,8BAA8B;aAAyB,OAAO;YAa1E,CAAA;AAED,eAAO,MAAM,uBAAuB;aACzB,MAAM;aACN,OAAO;YAQjB,CAAA;AAED,eAAO,MAAM,2BAA2B;aAC7B,MAAM;aACN,OAAO;YAWjB,CAAA;AAED,MAAM,MAAM,mBAAmB,GAAG;IAChC,UAAU,EAAE,OAAO,CAAA;IACnB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;IAClB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IACnB,WAAW,EAAE,MAAM,CAAA;IACnB,YAAY,EAAE,MAAM,CAAA;IACpB,OAAO,EAAE,MAAM,CAAA;IACf,OAAO,EAAE,MAAM,CAAA;CAChB,CAAA;AAED,wBAAgB,sBAAsB,CAAC,EACrC,OAAc,EACd,SAAS,EACT,UAAU,GACX,EAAE;IACD,OAAO,CAAC,EAAE,OAAO,CAAA;IACjB,SAAS,EAAE,MAAM,CAAA;IACjB,UAAU,EAAE,MAAM,CAAA;CACnB,GAAG,mBAAmB,CAsCtB;AAED,eAAO,MAAM,uBAAuB,mCAIjC;IACD,OAAO,CAAC,EAAE,mBAAmB,CAAA;IAC7B,MAAM,CAAC,EAAE,OAAO,CAAA;IAChB,QAAQ,EAAE,SAAS,CAAA;CACpB,sBAkBA,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,GAAG,EAAE,cAAc,CAAA;IACnB,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,+BAA+B,CAAC,EAC9C,GAAG,EACH,KAAK,EACL,KAAe,EACf,OAAO,GACR,EAAE,2BAA2B,GAAG,YAAY,CAuC5C;AAED,MAAM,MAAM,8BAA8B,GAAG;IAC3C,IAAI,EAAE,IAAI,CAAA;IACV,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,yBAAyB,CAAC,EACxC,IAAI,EACJ,KAAK,EACL,KAAe,EACf,OAAO,GACR,EAAE,8BAA8B,GAAG,YAAY,CAkD/C;AAED,KAAK,8BAA8B,GAAG;IACpC,KAAK,EAAE,YAAY,GAAG,IAAI,CAAA;IAC1B,OAAO,EAAE,mBAAmB,CAAA;IAC5B,KAAK,CAAC,EAAE,OAAO,CAAA;IACf,KAAK,CAAC,EAAE,MAAM,CAAA;CACf,CAAA;AAED,wBAAgB,yBAAyB,CAAC,EACxC,KAAK,EACL,KAAK,EACL,KAAa,EACb,OAAO,EAAE,EACP,UAAU,EACV,SAAS,EACT,UAAU,EACV,UAAU,EACV,WAAW,EACX,WAAW,EACX,YAAY,EACZ,OAAO,EACP,OAAO,GACR,GACF,EAAE,8BAA8B,GAAG,YAAY,CAsB/C"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"SelfieGuidanceModelsProvider.d.ts","sourceRoot":"","sources":["../../../src/components/selfie_capture/SelfieGuidanceModelsProvider.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAA;AAC9B,OAAO,EAEL,gBAAgB,EAChB,YAAY,EACZ,SAAS,EAKV,MAAM,OAAO,CAAA;AAGd,OAAO,EAEL,uBAAuB,
|
|
1
|
+
{"version":3,"file":"SelfieGuidanceModelsProvider.d.ts","sourceRoot":"","sources":["../../../src/components/selfie_capture/SelfieGuidanceModelsProvider.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,KAAK,MAAM,OAAO,CAAA;AAC9B,OAAO,EAEL,gBAAgB,EAChB,YAAY,EACZ,SAAS,EAKV,MAAM,OAAO,CAAA;AAGd,OAAO,EAEL,uBAAuB,EAIxB,MAAM,gCAAgC,CAAA;AAIvC,OAAO,EAAE,cAAc,EAAE,MAAM,0BAA0B,CAAA;AAOzD,MAAM,MAAM,+BAA+B,GAAG,CAC5C,UAAU,EAAE,uBAAuB,KAChC,OAAO,CAAC,IAAI,CAAC,GAAG,IAAI,CAAA;AAEzB,KAAK,yBAAyB,GAAG;IAC/B,KAAK,EAAE,MAAM,IAAI,CAAA;IACjB,IAAI,EAAE,MAAM,IAAI,CAAA;IAChB,gBAAgB,EAAE,CAAC,OAAO,EAAE,+BAA+B,KAAK,IAAI,CAAA;IACpE,SAAS,EAAE,gBAAgB,CAAC,iBAAiB,GAAG,IAAI,CAAC,CAAA;IACrD,KAAK,EAAE,OAAO,CAAA;IACd,KAAK,EAAE,KAAK,GAAG,IAAI,CAAA;IACnB,cAAc,EAAE,cAAc,CAAA;IAC9B,qBAAqB,EAAE,MAAM,CAAA;IAC7B,qBAAqB,EAAE,MAAM,GAAG,IAAI,CAAA;CACrC,CAAA;AAED,eAAO,MAAM,2BAA2B,0CAWpC,CAAA;AAEJ,MAAM,MAAM,iCAAiC,GAAG;IAC9C,SAAS,CAAC,EAAE,OAAO,CAAA;IACnB,QAAQ,EAAE,SAAS,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAA;IACrC,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,4BAA4B,CAAC,EAAE,OAAO,CAAA;IACtC,6BAA6B,CAAC,EAAE,MAAM,CAAA;IACtC,wBAAwB,CAAC,EAAE,MAAM,CAAA;IACjC,2BAA2B,CAAC,EAAE,MAAM,CAAA;CACrC,CAAA;AAED,wBAAgB,4BAA4B,CAAC,EAC3C,SAAgB,EAChB,QAAQ,EACR,UAAU,EACV,YAAY,EACZ,kBAA2D,EAC3D,4BAAmC,EACnC,6BAA6B,EAC7B,wBAAwB,EACxB,2BAA2B,GAC5B,EAAE,iCAAiC,GAAG,YAAY,CAmJlD;AAED,wBAAgB,8BAA8B,IAAI,yBAAyB,CAQ1E"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"VideoSignatureCapture.d.ts","sourceRoot":"","sources":["../../../src/components/video_signature_capture/VideoSignatureCapture.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,EACZ,aAAa,EAMd,MAAM,OAAO,CAAA;AAad,OAAO,EAAE,wBAAwB,EAAmB,MAAM,mBAAmB,CAAA;AAC7E,OAAO,EAAE,4BAA4B,EAAE,MAAM,0CAA0C,CAAA;AACvF,OAA6B,EAC3B,8BAA8B,EAC9B,4BAA4B,EAC7B,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EACL,wBAAwB,EAEzB,MAAM,yBAAyB,CAAA;AAQhC,eAAO,MAAM,gCAAgC,KAAK,CAAA;AAClD,eAAO,MAAM,yCAAyC,OAAO,CAAA;AAC7D,eAAO,MAAM,mCAAmC,SAAS,CAAA;AAEzD,MAAM,MAAM,wBAAwB,GAAG;IACrC,iBAAiB,EAAE,MAAM,IAAI,CAAA;CAC9B,CAAA;AAED,MAAM,MAAM,+BAA+B,GAAG;IAC5C,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,wBAAwB,CAAC,EAAE,MAAM,CAAA;IACjC,eAAe,CAAC,EAAE,MAAM,CAAA;IACxB,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,MAAM,CAAC,EAAE,8BAA8B,CAAA;CACxC,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,8BAA8B,CAAC,EAAE,MAAM,CAAA;IACvC,wBAAwB,CAAC,EAAE,MAAM,CAAA;CAClC,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG;IAC1C,mBAAmB,CAAC,EAAE,wBAAwB,CAAA;IAC9C,MAAM,CAAC,EAAE,4BAA4B,CAAA;CACtC,CAAA;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,wBAAwB,CAAC,EAAE,wBAAwB,CAAA;IACnD,iBAAiB,CAAC,EAAE,MAAM,IAAI,CAAA;IAC9B,MAAM,CAAC,EAAE,MAAM,IAAI,CAAA;IACnB,iCAAiC,CAAC,EAAE,OAAO,CAAA;IAC3C,eAAe,CAAC,EAAE,aAAa,CAAC,4BAA4B,CAAC,CAAA;IAC7D,qBAAqB,CAAC,EAAE,MAAM,CAAA;IAC9B,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,8BAA8B,CAAC,EAAE,MAAM,CAAA;IACvC,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IAC1C,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,UAAU,CAAC,EAAE,+BAA+B,CAAA;IAC5C,MAAM,CAAC,EAAE,2BAA2B,CAAA;IACpC,QAAQ,CAAC,EAAE,6BAA6B,CAAA;IACxC,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB,CAAA;
|
|
1
|
+
{"version":3,"file":"VideoSignatureCapture.d.ts","sourceRoot":"","sources":["../../../src/components/video_signature_capture/VideoSignatureCapture.tsx"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,EACZ,aAAa,EAMd,MAAM,OAAO,CAAA;AAad,OAAO,EAAE,wBAAwB,EAAmB,MAAM,mBAAmB,CAAA;AAC7E,OAAO,EAAE,4BAA4B,EAAE,MAAM,0CAA0C,CAAA;AACvF,OAA6B,EAC3B,8BAA8B,EAC9B,4BAA4B,EAC7B,MAAM,wBAAwB,CAAA;AAC/B,OAAO,EACL,wBAAwB,EAEzB,MAAM,yBAAyB,CAAA;AAQhC,eAAO,MAAM,gCAAgC,KAAK,CAAA;AAClD,eAAO,MAAM,yCAAyC,OAAO,CAAA;AAC7D,eAAO,MAAM,mCAAmC,SAAS,CAAA;AAEzD,MAAM,MAAM,wBAAwB,GAAG;IACrC,iBAAiB,EAAE,MAAM,IAAI,CAAA;CAC9B,CAAA;AAED,MAAM,MAAM,+BAA+B,GAAG;IAC5C,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,wBAAwB,CAAC,EAAE,MAAM,CAAA;IACjC,eAAe,CAAC,EAAE,MAAM,CAAA;IACxB,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,MAAM,CAAC,EAAE,8BAA8B,CAAA;CACxC,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,8BAA8B,CAAC,EAAE,MAAM,CAAA;IACvC,wBAAwB,CAAC,EAAE,MAAM,CAAA;CAClC,CAAA;AAED,MAAM,MAAM,6BAA6B,GAAG;IAC1C,mBAAmB,CAAC,EAAE,wBAAwB,CAAA;IAC9C,MAAM,CAAC,EAAE,4BAA4B,CAAA;CACtC,CAAA;AAED,MAAM,MAAM,0BAA0B,GAAG;IACvC,wBAAwB,CAAC,EAAE,wBAAwB,CAAA;IACnD,iBAAiB,CAAC,EAAE,MAAM,IAAI,CAAA;IAC9B,MAAM,CAAC,EAAE,MAAM,IAAI,CAAA;IACnB,iCAAiC,CAAC,EAAE,OAAO,CAAA;IAC3C,eAAe,CAAC,EAAE,aAAa,CAAC,4BAA4B,CAAC,CAAA;IAC7D,qBAAqB,CAAC,EAAE,MAAM,CAAA;IAC9B,oBAAoB,CAAC,EAAE,OAAO,CAAA;IAC9B,8BAA8B,CAAC,EAAE,MAAM,CAAA;IACvC,wBAAwB,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IAC1C,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,UAAU,CAAC,EAAE,+BAA+B,CAAA;IAC5C,MAAM,CAAC,EAAE,2BAA2B,CAAA;IACpC,QAAQ,CAAC,EAAE,6BAA6B,CAAA;IACxC,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB,CAAA;AAuMD,eAAO,MAAM,qBAAqB,6GAGD,CAAA"}
|
|
@@ -13,7 +13,7 @@ export declare const documentTypeDisplayNames: {
|
|
|
13
13
|
singlePage: string;
|
|
14
14
|
none: string;
|
|
15
15
|
};
|
|
16
|
-
export type Label = 'Document' | 'Document back' | 'MRZ' | 'PDF417' | 'Primary face' | 'Secondary face' | 'Glare' | 'Punch Hole' | 'Passport page' | 'Single page';
|
|
16
|
+
export type Label = 'Document' | 'Document back' | 'MRZ' | 'PDF417' | 'Primary face' | 'Secondary face' | 'Nose' | 'Glare' | 'Punch Hole' | 'Passport page' | 'Single page';
|
|
17
17
|
export type DetectedObjectBox = {
|
|
18
18
|
xMin: number;
|
|
19
19
|
xMax: number;
|
|
@@ -36,6 +36,7 @@ export type DocumentThresholds = {
|
|
|
36
36
|
export type DocumentDetectionThresholds = DocumentThresholds & {
|
|
37
37
|
stability?: DocumentThresholds;
|
|
38
38
|
};
|
|
39
|
+
export declare function getDocumentDetector(): ObjectDetector | null;
|
|
39
40
|
export declare function loadDocumentDetector(modelAssetPath?: string, scoreThreshold?: number): Promise<ObjectDetector>;
|
|
40
41
|
export declare function closeDocumentDetector(): void;
|
|
41
42
|
export declare function useLoadDocumentDetector({ shouldLoadModels, modelPath, modelLoadTimeoutMs, scoreThreshold, onModelError, videoRef, }: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DocumentDetection.d.ts","sourceRoot":"","sources":["../../../src/lib/models/DocumentDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,cAAc,EACd,oBAAoB,EACrB,MAAM,yBAAyB,CAAA;AAQhC,OAAO,EAIL,cAAc,EAEf,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,gBAAgB,EAAgC,MAAM,OAAO,CAAA;AAItE,cAAc,6BAA6B,CAAA;AAC3C,eAAO,MAAM,sCAAsC,MAAM,CAAA;AACzD,eAAO,MAAM,0CAA0C,QAAQ,CAAA;AAC/D,eAAO,MAAM,kCAAkC,EAAE,2BAWhD,CAAA;AAED,MAAM,MAAM,YAAY,GACpB,MAAM,GACN,aAAa,GACb,YAAY,GACZ,UAAU,GACV,YAAY,CAAA;AAEhB,eAAO,MAAM,wBAAwB;;;;;;CAMpC,CAAA;AAED,MAAM,MAAM,KAAK,GACb,UAAU,GACV,eAAe,GACf,KAAK,GACL,QAAQ,GACR,cAAc,GACd,gBAAgB,GAChB,OAAO,GACP,YAAY,GACZ,eAAe,GACf,aAAa,CAAA;AAEjB,MAAM,MAAM,iBAAiB,GAAG;IAC9B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;CACf,CAAA;AAED,MAAM,MAAM,cAAc,GAAG;IAC3B,GAAG,EAAE,iBAAiB,CAAA;IACtB,KAAK,EAAE,KAAK,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;CACd,CAAA;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,UAAU,CAAC,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG,kBAAkB,GAAG;IAC7D,SAAS,CAAC,EAAE,kBAAkB,CAAA;CAC/B,CAAA;AAQD,wBAAsB,oBAAoB,CACxC,cAAc,SAAmC,EACjD,cAAc,SAAyC,GACtD,OAAO,CAAC,cAAc,CAAC,CA4BzB;AAED,wBAAgB,qBAAqB,SAIpC;AAED,wBAAgB,uBAAuB,CAAC,EACtC,gBAAuB,EACvB,SAA4C,EAC5C,kBAA+D,EAC/D,cAAuD,EACvD,YAAY,EACZ,QAAQ,GACT,EAAE;IACD,gBAAgB,CAAC,EAAE,OAAO,CAAA;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAA;IACrC,QAAQ,EAAE,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC,CAAA;CACpD;;;;;;;EA4FA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,UAAU,EAAE,wBAAwB,CAAA;IAEpC,eAAe,EAAE,cAAc,EAAE,CAAA;IACjC,aAAa,EAAE,MAAM,CAAA;IACrB,cAAc,EAAE,MAAM,CAAA;IACtB,qBAAqB,EAAE,OAAO,CAAA;IAC9B,oBAAoB,EAAE,YAAY,CAAA;IAElC,yBAAyB,EAAE,MAAM,CAAA;IACjC,gCAAgC,EAAE,OAAO,CAAA;IAEzC,wBAAwB,EAAE,MAAM,CAAA;IAChC,+BAA+B,EAAE,OAAO,CAAA;IAExC,sBAAsB,EAAE,MAAM,CAAA;IAC9B,6BAA6B,EAAE,OAAO,CAAA;IAEtC,wBAAwB,EAAE,MAAM,CAAA;IAChC,+BAA+B,EAAE,OAAO,CAAA;IAExC,YAAY,EAAE,cAAc,GAAG,SAAS,CAAA;IACxC,UAAU,EAAE,cAAc,GAAG,SAAS,CAAA;IAEtC,gBAAgB,EAAE,OAAO,CAAA;IACzB,gBAAgB,EAAE,OAAO,CAAA;IACzB,gBAAgB,EAAE,OAAO,CAAA;IAEzB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IAEnB,OAAO,EAAE,OAAO,CAAA;IAChB,OAAO,CAAC,EAAE,MAAM,CAAA;CACjB,CAAA;AAED,MAAM,MAAM,wBAAwB,GAAG,oBAAoB,GAAG;IAC5D,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,wBAAsB,8BAA8B,CAClD,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,wBAAwB,GAAG,IAAI,CAAC,CAiB1C;AAED,eAAO,IAAI,eAAe,QAAI,CAAA;AAC9B,eAAO,IAAI,iBAAiB,QAAI,CAAA;AAEhC,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,MAAM,QAG9C;AA8BD,MAAM,MAAM,2BAA2B,GAAG;IACxC,GAAG,EAAE,MAAM,CAAA;IACX,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb;;;;;OAKG;IACH,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,eAAO,MAAM,kCAAkC,EAAE,2BAMhD,CAAA;AAED,wBAAgB,iCAAiC,CAC/C,UAAU,EAAE,wBAAwB,EACpC,UAAU,EAAE,2BAA2B,EACvC,UAAU,GAAE,2BAAgE,GAC3E,2BAA2B,CAgK7B"}
|
|
1
|
+
{"version":3,"file":"DocumentDetection.d.ts","sourceRoot":"","sources":["../../../src/lib/models/DocumentDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,cAAc,EACd,oBAAoB,EACrB,MAAM,yBAAyB,CAAA;AAQhC,OAAO,EAIL,cAAc,EAEf,MAAM,WAAW,CAAA;AAClB,OAAO,EAAE,gBAAgB,EAAgC,MAAM,OAAO,CAAA;AAItE,cAAc,6BAA6B,CAAA;AAC3C,eAAO,MAAM,sCAAsC,MAAM,CAAA;AACzD,eAAO,MAAM,0CAA0C,QAAQ,CAAA;AAC/D,eAAO,MAAM,kCAAkC,EAAE,2BAWhD,CAAA;AAED,MAAM,MAAM,YAAY,GACpB,MAAM,GACN,aAAa,GACb,YAAY,GACZ,UAAU,GACV,YAAY,CAAA;AAEhB,eAAO,MAAM,wBAAwB;;;;;;CAMpC,CAAA;AAED,MAAM,MAAM,KAAK,GACb,UAAU,GACV,eAAe,GACf,KAAK,GACL,QAAQ,GACR,cAAc,GACd,gBAAgB,GAChB,MAAM,GACN,OAAO,GACP,YAAY,GACZ,eAAe,GACf,aAAa,CAAA;AAEjB,MAAM,MAAM,iBAAiB,GAAG;IAC9B,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;CACf,CAAA;AAED,MAAM,MAAM,cAAc,GAAG;IAC3B,GAAG,EAAE,iBAAiB,CAAA;IACtB,KAAK,EAAE,KAAK,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;CACd,CAAA;AAED,MAAM,MAAM,kBAAkB,GAAG;IAC/B,WAAW,CAAC,EAAE,MAAM,CAAA;IACpB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAA;IACjB,UAAU,CAAC,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,MAAM,MAAM,2BAA2B,GAAG,kBAAkB,GAAG;IAC7D,SAAS,CAAC,EAAE,kBAAkB,CAAA;CAC/B,CAAA;AAQD,wBAAgB,mBAAmB,IAAI,cAAc,GAAG,IAAI,CAE3D;AAED,wBAAsB,oBAAoB,CACxC,cAAc,SAAmC,EACjD,cAAc,SAAyC,GACtD,OAAO,CAAC,cAAc,CAAC,CA4BzB;AAED,wBAAgB,qBAAqB,SAIpC;AAED,wBAAgB,uBAAuB,CAAC,EACtC,gBAAuB,EACvB,SAA4C,EAC5C,kBAA+D,EAC/D,cAAuD,EACvD,YAAY,EACZ,QAAQ,GACT,EAAE;IACD,gBAAgB,CAAC,EAAE,OAAO,CAAA;IAC1B,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,cAAc,CAAC,EAAE,MAAM,CAAA;IACvB,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAA;IACrC,QAAQ,EAAE,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC,CAAA;CACpD;;;;;;;EA4FA;AAED,MAAM,MAAM,2BAA2B,GAAG;IACxC,UAAU,EAAE,wBAAwB,CAAA;IAEpC,eAAe,EAAE,cAAc,EAAE,CAAA;IACjC,aAAa,EAAE,MAAM,CAAA;IACrB,cAAc,EAAE,MAAM,CAAA;IACtB,qBAAqB,EAAE,OAAO,CAAA;IAC9B,oBAAoB,EAAE,YAAY,CAAA;IAElC,yBAAyB,EAAE,MAAM,CAAA;IACjC,gCAAgC,EAAE,OAAO,CAAA;IAEzC,wBAAwB,EAAE,MAAM,CAAA;IAChC,+BAA+B,EAAE,OAAO,CAAA;IAExC,sBAAsB,EAAE,MAAM,CAAA;IAC9B,6BAA6B,EAAE,OAAO,CAAA;IAEtC,wBAAwB,EAAE,MAAM,CAAA;IAChC,+BAA+B,EAAE,OAAO,CAAA;IAExC,YAAY,EAAE,cAAc,GAAG,SAAS,CAAA;IACxC,UAAU,EAAE,cAAc,GAAG,SAAS,CAAA;IAEtC,gBAAgB,EAAE,OAAO,CAAA;IACzB,gBAAgB,EAAE,OAAO,CAAA;IACzB,gBAAgB,EAAE,OAAO,CAAA;IAEzB,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IAEnB,OAAO,EAAE,OAAO,CAAA;IAChB,OAAO,CAAC,EAAE,MAAM,CAAA;CACjB,CAAA;AAED,MAAM,MAAM,wBAAwB,GAAG,oBAAoB,GAAG;IAC5D,IAAI,EAAE,MAAM,CAAA;IACZ,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;CACpB,CAAA;AAED,wBAAsB,8BAA8B,CAClD,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,wBAAwB,GAAG,IAAI,CAAC,CAiB1C;AAED,eAAO,IAAI,eAAe,QAAI,CAAA;AAC9B,eAAO,IAAI,iBAAiB,QAAI,CAAA;AAEhC,wBAAgB,kBAAkB,CAAC,IAAI,EAAE,MAAM,QAG9C;AA8BD,MAAM,MAAM,2BAA2B,GAAG;IACxC,GAAG,EAAE,MAAM,CAAA;IACX,MAAM,EAAE,MAAM,CAAA;IACd,IAAI,EAAE,MAAM,CAAA;IACZ,KAAK,EAAE,MAAM,CAAA;IACb;;;;;OAKG;IACH,aAAa,CAAC,EAAE,MAAM,CAAA;CACvB,CAAA;AAED,eAAO,MAAM,kCAAkC,EAAE,2BAMhD,CAAA;AAED,wBAAgB,iCAAiC,CAC/C,UAAU,EAAE,wBAAwB,EACpC,UAAU,EAAE,2BAA2B,EACvC,UAAU,GAAE,2BAAgE,GAC3E,2BAA2B,CAgK7B"}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { FaceDetector, FaceDetectorResult } from '@mediapipe/tasks-vision';
|
|
1
|
+
import { FaceDetector, FaceDetectorResult, ObjectDetectorResult } from '@mediapipe/tasks-vision';
|
|
2
2
|
import { MutableRefObject } from 'react';
|
|
3
3
|
import { Frame } from '../utils/getFrameDimensions';
|
|
4
4
|
import { ModelLoadState } from './helpers';
|
|
@@ -10,7 +10,7 @@ export type FaceKeypoint = {
|
|
|
10
10
|
name: string;
|
|
11
11
|
};
|
|
12
12
|
export type Face = {
|
|
13
|
-
keypoints: FaceKeypoint[];
|
|
13
|
+
keypoints: (FaceKeypoint | null)[];
|
|
14
14
|
box: {
|
|
15
15
|
xMin: number;
|
|
16
16
|
xMax: number;
|
|
@@ -52,11 +52,13 @@ export declare function setLastFaceDetectionAt(time: number): void;
|
|
|
52
52
|
export declare function makeFaceDetectorPrediction(imageData: Frame): (FaceDetectorResult & {
|
|
53
53
|
faces: Face[];
|
|
54
54
|
}) | null;
|
|
55
|
+
export declare function makeFacePredictionWithDocumentDetector(frame: HTMLCanvasElement): Promise<(ObjectDetectorResult & {
|
|
56
|
+
faces: Face[];
|
|
57
|
+
}) | null>;
|
|
55
58
|
export declare function processFaceDetectorPrediction({ faces, videoWidth, videoHeight, requireVerticalFaceCentering, stabilityThreshold, noseDistanceThreshold, xBoundary, // this represents the edge that the sides of the face box should not cross -- 1% of video width
|
|
56
59
|
yBoundary, // this represents the edge that the top or bottom of the face box should not cross -- 1% of video height
|
|
57
60
|
xCentroidBoundary, // this represents the edge that the centroid of the face should not cross -- 12.5% of video width
|
|
58
61
|
yCentroidBoundary, // this represents the edge that the centroid of the face should not cross -- 12.5% of video height
|
|
59
|
-
foreheadRatio, // we found that the bounding box ends at the brow and misses the forehead. this ratio represents how much we should extend the box to include the forehead.
|
|
60
62
|
noseTrackingThreshold, // this represents the maximum distance that the nose can be from the center of the face box -- 20% of the face box width or height
|
|
61
63
|
minCaptureBrightnessThreshold, minCaptureRangeThreshold, minCaptureVarianceThreshold, brightness, range, variance, }: {
|
|
62
64
|
faces: Face[];
|
|
@@ -69,7 +71,6 @@ minCaptureBrightnessThreshold, minCaptureRangeThreshold, minCaptureVarianceThres
|
|
|
69
71
|
yBoundary?: number;
|
|
70
72
|
xCentroidBoundary?: number;
|
|
71
73
|
yCentroidBoundary?: number;
|
|
72
|
-
foreheadRatio?: number;
|
|
73
74
|
noseTrackingThreshold?: number;
|
|
74
75
|
minCaptureBrightnessThreshold?: number;
|
|
75
76
|
minCaptureRangeThreshold?: number;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"FaceDetection.d.ts","sourceRoot":"","sources":["../../../src/lib/models/FaceDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,kBAAkB,
|
|
1
|
+
{"version":3,"file":"FaceDetection.d.ts","sourceRoot":"","sources":["../../../src/lib/models/FaceDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EACL,YAAY,EACZ,kBAAkB,EAElB,oBAAoB,EACrB,MAAM,yBAAyB,CAAA;AAOhC,OAAO,EAAE,gBAAgB,EAAgC,MAAM,OAAO,CAAA;AACtE,OAAO,EAAE,KAAK,EAAE,MAAM,6BAA6B,CAAA;AACnD,OAAO,EAIL,cAAc,EAEf,MAAM,WAAW,CAAA;AAMlB,cAAc,yBAAyB,CAAA;AACvC,eAAO,MAAM,sCAAsC,QAAQ,CAAA;AAE3D,MAAM,MAAM,YAAY,GAAG;IAAE,CAAC,EAAE,MAAM,CAAC;IAAC,CAAC,EAAE,MAAM,CAAC;IAAC,IAAI,EAAE,MAAM,CAAA;CAAE,CAAA;AACjE,MAAM,MAAM,IAAI,GAAG;IACjB,SAAS,EAAE,CAAC,YAAY,GAAG,IAAI,CAAC,EAAE,CAAA;IAClC,GAAG,EAAE;QACH,IAAI,EAAE,MAAM,CAAA;QACZ,IAAI,EAAE,MAAM,CAAA;QACZ,IAAI,EAAE,MAAM,CAAA;QACZ,IAAI,EAAE,MAAM,CAAA;QACZ,KAAK,EAAE,MAAM,CAAA;QACb,MAAM,EAAE,MAAM,CAAA;KACf,CAAA;CACF,CAAA;AAED,MAAM,MAAM,uBAAuB,GAAG;IACpC,IAAI,EAAE,IAAI,GAAG,IAAI,CAAA;IACjB,eAAe,EAAE,OAAO,CAAA;IACxB,eAAe,EAAE,OAAO,CAAA;IACxB,eAAe,EAAE,OAAO,CAAA;IACxB,YAAY,EAAE,OAAO,CAAA;IACrB,UAAU,EAAE,OAAO,CAAA;IACnB,SAAS,EAAE,OAAO,CAAA;IAClB,WAAW,EAAE,IAAI,GAAG,IAAI,CAAA;IACxB,YAAY,EAAE,OAAO,CAAA;IACrB,YAAY,EAAE,OAAO,CAAA;IACrB,oBAAoB,EAAE,OAAO,CAAA;CAC9B,CAAA;AAKD,wBAAsB,gBAAgB,CACpC,cAAc,SAA+B,GAC5C,OAAO,CAAC,YAAY,CAAC,CA0BvB;AAED,wBAAgB,iBAAiB,SAIhC;AAED,wBAAgB,mBAAmB,CAAC,EAClC,YAAY,EACZ,kBAA2D,EAC3D,QAAQ,GACT,EAAE;IACD,YAAY,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,KAAK,IAAI,CAAA;IACrC,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,QAAQ,EAAE,gBAAgB,CAAC,gBAAgB,GAAG,IAAI,CAAC,CAAA;CACpD;;;;;;EAwFA;AAED,eAAO,IAAI,mBAAmB,QAAI,CAAA;AAClC,eAAO,IAAI,qBAAqB,QAAI,CAAA;AAEpC,wBAAgB,sBAAsB,CAAC,IAAI,EAAE,MAAM,QAGlD;AA0DD,wBAAgB,0BAA0B,CACxC,SAAS,EAAE,KAAK,GACf,CAAC,kBAAkB,GAAG;IAAE,KAAK,EAAE,IAAI,EAAE,CAAA;CAAE,CAAC,GAAG,IAAI,CAgBjD;AAqBD,wBAAsB,sCAAsC,CAC1D,KAAK,EAAE,iBAAiB,GACvB,OAAO,CAAC,CAAC,oBAAoB,GAAG;IAAE,KAAK,EAAE,IAAI,EAAE,CAAA;CAAE,CAAC,GAAG,IAAI,CAAC,CA+C5D;AAED,wBAAgB,6BAA6B,CAAC,EAC5C,KAAK,EACL,UAAU,EACV,WAAW,EACX,4BAAmC,EACnC,kBAAwB,EACxB,qBAA6B,EAC7B,SAAgB,EAAE,gGAAgG;AAClH,SAAgB,EAAE,yGAAyG;AAC3H,iBAAyB,EAAE,kGAAkG;AAC7H,iBAAyB,EAAE,mGAAmG;AAC9H,qBAA2B,EAAE,mIAAmI;AAChK,6BAA6B,EAC7B,wBAAwB,EACxB,2BAA2B,EAC3B,UAAU,EACV,KAAK,EACL,QAAQ,GACT,EAAE;IACD,KAAK,EAAE,IAAI,EAAE,CAAA;IACb,UAAU,EAAE,MAAM,CAAA;IAClB,WAAW,EAAE,MAAM,CAAA;IACnB,4BAA4B,CAAC,EAAE,OAAO,CAAA;IACtC,kBAAkB,CAAC,EAAE,MAAM,CAAA;IAC3B,qBAAqB,CAAC,EAAE,MAAM,CAAA;IAC9B,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,SAAS,CAAC,EAAE,MAAM,CAAA;IAClB,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAA;IAC1B,qBAAqB,CAAC,EAAE,MAAM,CAAA;IAC9B,6BAA6B,CAAC,EAAE,MAAM,CAAA;IACtC,wBAAwB,CAAC,EAAE,MAAM,CAAA;IACjC,2BAA2B,CAAC,EAAE,MAAM,CAAA;IACpC,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,KAAK,CAAC,EAAE,MAAM,CAAA;IACd,QAAQ,CAAC,EAAE,MAAM,CAAA;CAClB,GAAG,uBAAuB,CAyH1B"}
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
export declare const defaultDocumentDetectorModelPath = "https://websdk-cdn-dev.idmission.com/assets/models/DocumentDetector/DocumentDetector-
|
|
2
|
-
export declare const defaultDocumentDetectorModelHash = "
|
|
1
|
+
export declare const defaultDocumentDetectorModelPath = "https://websdk-cdn-dev.idmission.com/assets/models/DocumentDetector/DocumentDetector-20251123_125827.tflite";
|
|
2
|
+
export declare const defaultDocumentDetectorModelHash = "SP321/MyOR20Dfxe8u42WCYXx+e+RxzET83f73Z8yM4n5FTrZfODY9zkt3h9z21Y";
|
|
3
3
|
//# sourceMappingURL=DocumentDetector.d.ts.map
|
|
@@ -235,7 +235,7 @@ typeof SuppressedError === "function" ? SuppressedError : function (error, suppr
|
|
|
235
235
|
return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
|
|
236
236
|
};
|
|
237
237
|
|
|
238
|
-
var webSdkVersion = '2.3.
|
|
238
|
+
var webSdkVersion = '2.3.159';
|
|
239
239
|
|
|
240
240
|
function getPlatform() {
|
|
241
241
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
@@ -2589,8 +2589,8 @@ function giveUpAfter(maxTime) {
|
|
|
2589
2589
|
|
|
2590
2590
|
var DEFAULT_CDN_URL = 'https://websdk-cdn-dev.idmission.com/assets';
|
|
2591
2591
|
|
|
2592
|
-
var defaultDocumentDetectorModelPath = "".concat(DEFAULT_CDN_URL, "/models/DocumentDetector/DocumentDetector-
|
|
2593
|
-
var defaultDocumentDetectorModelHash = '
|
|
2592
|
+
var defaultDocumentDetectorModelPath = "".concat(DEFAULT_CDN_URL, "/models/DocumentDetector/DocumentDetector-20251123_125827.tflite");
|
|
2593
|
+
var defaultDocumentDetectorModelHash = 'SP321/MyOR20Dfxe8u42WCYXx+e+RxzET83f73Z8yM4n5FTrZfODY9zkt3h9z21Y';
|
|
2594
2594
|
|
|
2595
2595
|
var defaultFocusModelPath = "".concat(DEFAULT_CDN_URL, "/models/Focus/Focus-20241008_102708.tflite");
|
|
2596
2596
|
var defaultFocusModelHash = 'HTJNLB7QybtPXIIyUI9oNqW40vE5bgSV5V24R1wXLQHknNEVOSyDQDo/QzRgJ8jb';
|
|
@@ -2907,8 +2907,8 @@ var documentTypeDisplayNames = {
|
|
|
2907
2907
|
singlePage: 'Single page',
|
|
2908
2908
|
none: 'None'
|
|
2909
2909
|
};
|
|
2910
|
-
var detector
|
|
2911
|
-
var detectorSettings
|
|
2910
|
+
var detector = null;
|
|
2911
|
+
var detectorSettings = null;
|
|
2912
2912
|
function loadDocumentDetector() {
|
|
2913
2913
|
return __awaiter(this, arguments, void 0, function (modelAssetPath, scoreThreshold) {
|
|
2914
2914
|
var _a, _b;
|
|
@@ -2921,7 +2921,7 @@ function loadDocumentDetector() {
|
|
|
2921
2921
|
return __generator(this, function (_c) {
|
|
2922
2922
|
switch (_c.label) {
|
|
2923
2923
|
case 0:
|
|
2924
|
-
if (detector
|
|
2924
|
+
if (detector && (detectorSettings === null || detectorSettings === void 0 ? void 0 : detectorSettings.modelAssetPath) === modelAssetPath && (detectorSettings === null || detectorSettings === void 0 ? void 0 : detectorSettings.scoreThreshold) === scoreThreshold) return [2 /*return*/, detector];
|
|
2925
2925
|
closeDocumentDetector();
|
|
2926
2926
|
return [4 /*yield*/, preloadDocumentDetectorDependencies()];
|
|
2927
2927
|
case 1:
|
|
@@ -2942,20 +2942,20 @@ function loadDocumentDetector() {
|
|
|
2942
2942
|
runningMode: 'VIDEO'
|
|
2943
2943
|
}])];
|
|
2944
2944
|
case 3:
|
|
2945
|
-
detector
|
|
2946
|
-
detectorSettings
|
|
2945
|
+
detector = _c.sent();
|
|
2946
|
+
detectorSettings = {
|
|
2947
2947
|
modelAssetPath: modelAssetPath,
|
|
2948
2948
|
scoreThreshold: scoreThreshold
|
|
2949
2949
|
};
|
|
2950
|
-
return [2 /*return*/, detector
|
|
2950
|
+
return [2 /*return*/, detector];
|
|
2951
2951
|
}
|
|
2952
2952
|
});
|
|
2953
2953
|
});
|
|
2954
2954
|
}
|
|
2955
2955
|
function closeDocumentDetector() {
|
|
2956
|
-
detector
|
|
2957
|
-
detector
|
|
2958
|
-
detectorSettings
|
|
2956
|
+
detector === null || detector === void 0 ? void 0 : detector.close();
|
|
2957
|
+
detector = null;
|
|
2958
|
+
detectorSettings = null;
|
|
2959
2959
|
}
|
|
2960
2960
|
function useLoadDocumentDetector(_a) {
|
|
2961
2961
|
var _b = _a.shouldLoadModels,
|
|
@@ -3056,12 +3056,12 @@ function makeDocumentDetectorPrediction(frame) {
|
|
|
3056
3056
|
return __awaiter(this, void 0, void 0, function () {
|
|
3057
3057
|
var startedAt, prediction, time, frameWidth, frameHeight;
|
|
3058
3058
|
return __generator(this, function (_a) {
|
|
3059
|
-
if (!detector
|
|
3059
|
+
if (!detector) return [2 /*return*/, null];
|
|
3060
3060
|
startedAt = performance.now();
|
|
3061
3061
|
// Detectors can throw errors, for example when using custom URLs that
|
|
3062
3062
|
// contain a model that doesn't provide the expected output.
|
|
3063
3063
|
try {
|
|
3064
|
-
prediction = detector
|
|
3064
|
+
prediction = detector.detectForVideo(frame, performance.now());
|
|
3065
3065
|
time = performance.now() - startedAt;
|
|
3066
3066
|
frameWidth = frame.width;
|
|
3067
3067
|
frameHeight = frame.height;
|
|
@@ -7061,6 +7061,7 @@ function SelfieCaptureFaceKeypoint(_a) {
|
|
|
7061
7061
|
scaledHeight = _c.scaledHeight,
|
|
7062
7062
|
xOffset = _c.xOffset,
|
|
7063
7063
|
yOffset = _c.yOffset;
|
|
7064
|
+
if (!point) return /*#__PURE__*/React__namespace.default.createElement(React__namespace.default.Fragment, null);
|
|
7064
7065
|
var left, top;
|
|
7065
7066
|
if (horizontal) {
|
|
7066
7067
|
left = point.x / videoWidth * scaledWidth - xOffset;
|
|
@@ -12152,136 +12153,6 @@ var FaceCaptureGuideOverlay = function FaceCaptureGuideOverlay(_a) {
|
|
|
12152
12153
|
var templateObject_1$n, templateObject_2$m;
|
|
12153
12154
|
|
|
12154
12155
|
var defaultSelfieCaptureModelLoadTimeoutMs = 45000;
|
|
12155
|
-
var detector = null;
|
|
12156
|
-
var detectorSettings = null;
|
|
12157
|
-
function loadFaceDetector() {
|
|
12158
|
-
return __awaiter(this, arguments, void 0, function (modelAssetPath) {
|
|
12159
|
-
var _a, _b;
|
|
12160
|
-
if (modelAssetPath === void 0) {
|
|
12161
|
-
modelAssetPath = defaultFaceDetectorModelPath;
|
|
12162
|
-
}
|
|
12163
|
-
return __generator(this, function (_c) {
|
|
12164
|
-
switch (_c.label) {
|
|
12165
|
-
case 0:
|
|
12166
|
-
if (detector && (detectorSettings === null || detectorSettings === void 0 ? void 0 : detectorSettings.modelAssetPath) === modelAssetPath) return [2 /*return*/, detector];
|
|
12167
|
-
closeFaceDetector();
|
|
12168
|
-
return [4 /*yield*/, preloadFaceDetectorDependencies()];
|
|
12169
|
-
case 1:
|
|
12170
|
-
_c.sent();
|
|
12171
|
-
if (modelCapabilities.delegate === 'NONE') {
|
|
12172
|
-
throw new Error('No available delegate for face detector.');
|
|
12173
|
-
}
|
|
12174
|
-
_b = (_a = tasksVision.FaceDetector).createFromOptions;
|
|
12175
|
-
return [4 /*yield*/, tasksVision.FilesetResolver.forVisionTasks(visionTasksBasePath)];
|
|
12176
|
-
case 2:
|
|
12177
|
-
return [4 /*yield*/, _b.apply(_a, [_c.sent(), {
|
|
12178
|
-
// canvas: document.createElement('canvas'),
|
|
12179
|
-
baseOptions: {
|
|
12180
|
-
modelAssetPath: modelAssetPath,
|
|
12181
|
-
delegate: modelCapabilities.delegate
|
|
12182
|
-
},
|
|
12183
|
-
runningMode: 'VIDEO'
|
|
12184
|
-
}])];
|
|
12185
|
-
case 3:
|
|
12186
|
-
detector = _c.sent();
|
|
12187
|
-
detectorSettings = {
|
|
12188
|
-
modelAssetPath: modelAssetPath
|
|
12189
|
-
};
|
|
12190
|
-
return [2 /*return*/, detector];
|
|
12191
|
-
}
|
|
12192
|
-
});
|
|
12193
|
-
});
|
|
12194
|
-
}
|
|
12195
|
-
function closeFaceDetector() {
|
|
12196
|
-
detector === null || detector === void 0 ? void 0 : detector.close();
|
|
12197
|
-
detector = null;
|
|
12198
|
-
detectorSettings = null;
|
|
12199
|
-
}
|
|
12200
|
-
function useLoadFaceDetector(_a) {
|
|
12201
|
-
var onModelError = _a.onModelError,
|
|
12202
|
-
_b = _a.modelLoadTimeoutMs,
|
|
12203
|
-
modelLoadTimeoutMs = _b === void 0 ? defaultSelfieCaptureModelLoadTimeoutMs : _b,
|
|
12204
|
-
videoRef = _a.videoRef;
|
|
12205
|
-
var _c = React.useState('not-started'),
|
|
12206
|
-
modelLoadState = _c[0],
|
|
12207
|
-
setModelLoadState = _c[1];
|
|
12208
|
-
var _d = React.useState(0),
|
|
12209
|
-
modelDownloadProgress = _d[0],
|
|
12210
|
-
setModelDownloadProgress = _d[1];
|
|
12211
|
-
var _e = React.useState(null),
|
|
12212
|
-
modelWarmingStartedAt = _e[0],
|
|
12213
|
-
setModelWarmingStartedAt = _e[1];
|
|
12214
|
-
var _f = React.useState(null),
|
|
12215
|
-
modelError = _f[0],
|
|
12216
|
-
setModelError = _f[1];
|
|
12217
|
-
React.useEffect(function loadModel() {
|
|
12218
|
-
var _this = this;
|
|
12219
|
-
setModelLoadState('downloading');
|
|
12220
|
-
setModelWarmingStartedAt(null);
|
|
12221
|
-
var modelLoadTimeout = setTimeout(function () {
|
|
12222
|
-
setModelError(new Error('Model loading time limit exceeded.'));
|
|
12223
|
-
}, modelLoadTimeoutMs);
|
|
12224
|
-
function handleDownloadProgress(event) {
|
|
12225
|
-
setModelDownloadProgress(progressToPercentage(event.detail));
|
|
12226
|
-
}
|
|
12227
|
-
document.addEventListener('idmission.preloadProgress.faceDetection', handleDownloadProgress);
|
|
12228
|
-
var cancelVideoReady = function cancelVideoReady() {};
|
|
12229
|
-
loadFaceDetector().then(function (model) {
|
|
12230
|
-
return __awaiter(_this, void 0, void 0, function () {
|
|
12231
|
-
var _a, videoReady, cancel, cancelled;
|
|
12232
|
-
return __generator(this, function (_b) {
|
|
12233
|
-
switch (_b.label) {
|
|
12234
|
-
case 0:
|
|
12235
|
-
setModelDownloadProgress(100);
|
|
12236
|
-
clearTimeout(modelLoadTimeout);
|
|
12237
|
-
setModelLoadState('warming');
|
|
12238
|
-
setModelWarmingStartedAt(performance.now());
|
|
12239
|
-
return [4 /*yield*/, testFaceDetectionAgainstKnownImage(model)];
|
|
12240
|
-
case 1:
|
|
12241
|
-
_b.sent();
|
|
12242
|
-
_a = waitForVideoReady(videoRef), videoReady = _a[0], cancel = _a[1];
|
|
12243
|
-
cancelled = false;
|
|
12244
|
-
cancelVideoReady = function cancelVideoReady() {
|
|
12245
|
-
cancelled = true;
|
|
12246
|
-
cancel();
|
|
12247
|
-
};
|
|
12248
|
-
return [4 /*yield*/, videoReady];
|
|
12249
|
-
case 2:
|
|
12250
|
-
_b.sent();
|
|
12251
|
-
if (cancelled) return [2 /*return*/];
|
|
12252
|
-
model.detectForVideo(videoRef.current, performance.now());
|
|
12253
|
-
setModelLoadState('ready');
|
|
12254
|
-
return [2 /*return*/];
|
|
12255
|
-
}
|
|
12256
|
-
});
|
|
12257
|
-
});
|
|
12258
|
-
})["catch"](function (e) {
|
|
12259
|
-
setModelError(e);
|
|
12260
|
-
setModelLoadState('error');
|
|
12261
|
-
})["finally"](function () {
|
|
12262
|
-
clearTimeout(modelLoadTimeout);
|
|
12263
|
-
});
|
|
12264
|
-
return function () {
|
|
12265
|
-
log('unloading face detection model');
|
|
12266
|
-
cancelVideoReady();
|
|
12267
|
-
closeFaceDetector();
|
|
12268
|
-
clearTimeout(modelLoadTimeout);
|
|
12269
|
-
document.removeEventListener('idmission.preloadProgress.faceDetection', handleDownloadProgress);
|
|
12270
|
-
};
|
|
12271
|
-
}, [modelLoadTimeoutMs, videoRef]);
|
|
12272
|
-
React.useEffect(function handleModelError() {
|
|
12273
|
-
if (modelError) onModelError === null || onModelError === void 0 ? void 0 : onModelError(modelError);
|
|
12274
|
-
}, [modelError, onModelError]);
|
|
12275
|
-
return React.useMemo(function () {
|
|
12276
|
-
return {
|
|
12277
|
-
ready: modelLoadState === 'ready',
|
|
12278
|
-
modelLoadState: modelLoadState,
|
|
12279
|
-
modelDownloadProgress: modelDownloadProgress,
|
|
12280
|
-
modelWarmingStartedAt: modelWarmingStartedAt,
|
|
12281
|
-
modelError: modelError
|
|
12282
|
-
};
|
|
12283
|
-
}, [modelLoadState, modelDownloadProgress, modelWarmingStartedAt, modelError]);
|
|
12284
|
-
}
|
|
12285
12156
|
var lastFaceDetectionAt = 0;
|
|
12286
12157
|
var lastFaceDetectionTime = 0;
|
|
12287
12158
|
function setLastFaceDetectionAt(time) {
|
|
@@ -12331,24 +12202,68 @@ function trackFace(face, framesNeeded, frameWidth, frameHeight) {
|
|
|
12331
12202
|
if (lastNNosePairs.length > framesNeeded - 1) lastNNosePairs.length = framesNeeded - 1;
|
|
12332
12203
|
}
|
|
12333
12204
|
}
|
|
12334
|
-
function
|
|
12335
|
-
|
|
12336
|
-
|
|
12337
|
-
var faces = prediction.detections.map(function (d) {
|
|
12338
|
-
return {
|
|
12339
|
-
box: convertBoundingBox(d.boundingBox),
|
|
12340
|
-
keypoints: d.keypoints.map(function (k) {
|
|
12341
|
-
var _a;
|
|
12342
|
-
return _assign(_assign({}, k), {
|
|
12343
|
-
x: k.x * imageData.width,
|
|
12344
|
-
y: k.y * imageData.height,
|
|
12345
|
-
name: (_a = k.label) !== null && _a !== void 0 ? _a : ''
|
|
12346
|
-
});
|
|
12347
|
-
})
|
|
12348
|
-
};
|
|
12205
|
+
function isFaceDetection(detection) {
|
|
12206
|
+
return detection.boundingBox && detection.categories.some(function (c) {
|
|
12207
|
+
return c.categoryName === 'Primary face' || c.categoryName === 'Secondary face';
|
|
12349
12208
|
});
|
|
12350
|
-
|
|
12351
|
-
|
|
12209
|
+
}
|
|
12210
|
+
function isNoseDetection(detection) {
|
|
12211
|
+
return detection.categories.some(function (c) {
|
|
12212
|
+
return c.categoryName === 'Nose';
|
|
12213
|
+
});
|
|
12214
|
+
}
|
|
12215
|
+
function makeFacePredictionWithDocumentDetector(frame) {
|
|
12216
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
12217
|
+
var prediction, faceDetections, noseDetections, _i, _a, detection, faces, _b, faceDetections_1, d, faceBox, nose, _c, noseDetections_1, n, noseBox, cX, cY;
|
|
12218
|
+
return __generator(this, function (_d) {
|
|
12219
|
+
switch (_d.label) {
|
|
12220
|
+
case 0:
|
|
12221
|
+
return [4 /*yield*/, makeDocumentDetectorPrediction(frame)];
|
|
12222
|
+
case 1:
|
|
12223
|
+
prediction = _d.sent();
|
|
12224
|
+
if (!prediction) return [2 /*return*/, null];
|
|
12225
|
+
faceDetections = [];
|
|
12226
|
+
noseDetections = [];
|
|
12227
|
+
for (_i = 0, _a = prediction.detections; _i < _a.length; _i++) {
|
|
12228
|
+
detection = _a[_i];
|
|
12229
|
+
if (isNoseDetection(detection)) {
|
|
12230
|
+
noseDetections.push(detection);
|
|
12231
|
+
} else if (isFaceDetection(detection)) {
|
|
12232
|
+
faceDetections.push(detection);
|
|
12233
|
+
}
|
|
12234
|
+
}
|
|
12235
|
+
faces = [];
|
|
12236
|
+
for (_b = 0, faceDetections_1 = faceDetections; _b < faceDetections_1.length; _b++) {
|
|
12237
|
+
d = faceDetections_1[_b];
|
|
12238
|
+
faceBox = d.boundingBox;
|
|
12239
|
+
if (!faceBox) continue;
|
|
12240
|
+
nose = null;
|
|
12241
|
+
for (_c = 0, noseDetections_1 = noseDetections; _c < noseDetections_1.length; _c++) {
|
|
12242
|
+
n = noseDetections_1[_c];
|
|
12243
|
+
noseBox = n.boundingBox;
|
|
12244
|
+
if (!noseBox) continue;
|
|
12245
|
+
cX = noseBox.originX + noseBox.width / 2;
|
|
12246
|
+
cY = noseBox.originY + noseBox.height / 2;
|
|
12247
|
+
if (cX >= faceBox.originX && cX <= faceBox.originX + faceBox.width && cY >= faceBox.originY && cY <= faceBox.originY + faceBox.height) {
|
|
12248
|
+
nose = {
|
|
12249
|
+
x: cX,
|
|
12250
|
+
y: cY,
|
|
12251
|
+
name: 'nose'
|
|
12252
|
+
};
|
|
12253
|
+
break;
|
|
12254
|
+
}
|
|
12255
|
+
}
|
|
12256
|
+
faces.push({
|
|
12257
|
+
box: convertBoundingBox(d.boundingBox),
|
|
12258
|
+
keypoints: [null, null, nose, null, null]
|
|
12259
|
+
});
|
|
12260
|
+
}
|
|
12261
|
+
debug('faces', faces);
|
|
12262
|
+
return [2 /*return*/, _assign(_assign({}, prediction), {
|
|
12263
|
+
faces: faces
|
|
12264
|
+
})];
|
|
12265
|
+
}
|
|
12266
|
+
});
|
|
12352
12267
|
});
|
|
12353
12268
|
}
|
|
12354
12269
|
function processFaceDetectorPrediction(_a) {
|
|
@@ -12376,13 +12291,9 @@ function processFaceDetectorPrediction(_a) {
|
|
|
12376
12291
|
// this represents the edge that the centroid of the face should not cross -- 12.5% of video width
|
|
12377
12292
|
yCentroidBoundary = _h === void 0 ? 0.125 : _h,
|
|
12378
12293
|
// this represents the edge that the centroid of the face should not cross -- 12.5% of video height
|
|
12379
|
-
_j = _a.
|
|
12294
|
+
_j = _a.noseTrackingThreshold,
|
|
12380
12295
|
// this represents the edge that the centroid of the face should not cross -- 12.5% of video height
|
|
12381
|
-
|
|
12382
|
-
// we found that the bounding box ends at the brow and misses the forehead. this ratio represents how much we should extend the box to include the forehead.
|
|
12383
|
-
_k = _a.noseTrackingThreshold,
|
|
12384
|
-
// we found that the bounding box ends at the brow and misses the forehead. this ratio represents how much we should extend the box to include the forehead.
|
|
12385
|
-
noseTrackingThreshold = _k === void 0 ? 0.2 : _k,
|
|
12296
|
+
noseTrackingThreshold = _j === void 0 ? 0.2 : _j,
|
|
12386
12297
|
// this represents the maximum distance that the nose can be from the center of the face box -- 20% of the face box width or height
|
|
12387
12298
|
minCaptureBrightnessThreshold = _a.minCaptureBrightnessThreshold,
|
|
12388
12299
|
minCaptureRangeThreshold = _a.minCaptureRangeThreshold,
|
|
@@ -12409,10 +12320,10 @@ function processFaceDetectorPrediction(_a) {
|
|
|
12409
12320
|
var frameCX = videoWidth / 2;
|
|
12410
12321
|
var frameCY = videoHeight / 2;
|
|
12411
12322
|
// calculate head bounding box, with forehead extension
|
|
12412
|
-
|
|
12323
|
+
// const foreheadSize = face.box.height * foreheadRatio
|
|
12413
12324
|
var headXMin = face.box.xMin;
|
|
12414
12325
|
var headXMax = face.box.xMax;
|
|
12415
|
-
var headYMin = face.box.yMin - foreheadSize
|
|
12326
|
+
var headYMin = face.box.yMin; // - foreheadSize
|
|
12416
12327
|
var headYMax = face.box.yMax;
|
|
12417
12328
|
// calculate head centroids
|
|
12418
12329
|
var headCX = (headXMin + headXMax) / 2;
|
|
@@ -12468,26 +12379,6 @@ function processFaceDetectorPrediction(_a) {
|
|
|
12468
12379
|
faceVisibilityTooLow: faceVisibilityTooLow
|
|
12469
12380
|
};
|
|
12470
12381
|
}
|
|
12471
|
-
function testFaceDetectionAgainstKnownImage(detector) {
|
|
12472
|
-
return new Promise(function (resolve, reject) {
|
|
12473
|
-
var img = new Image();
|
|
12474
|
-
img.crossOrigin = 'anonymous';
|
|
12475
|
-
img.onload = function () {
|
|
12476
|
-
var prediction = detector.detectForVideo(img, performance.now());
|
|
12477
|
-
if (prediction.detections.length > 0) {
|
|
12478
|
-
debug('face detection test result', prediction.detections);
|
|
12479
|
-
resolve(void 0);
|
|
12480
|
-
} else {
|
|
12481
|
-
warn('face detection test failed');
|
|
12482
|
-
reject(new Error('testFaceDetectionAgainstKnownImage failed to predict'));
|
|
12483
|
-
}
|
|
12484
|
-
};
|
|
12485
|
-
img.onerror = function () {
|
|
12486
|
-
return reject(new Error('testFaceDetectionAgainstKnownImage failed to load image'));
|
|
12487
|
-
};
|
|
12488
|
-
img.src = "".concat(DEFAULT_CDN_URL, "/head-test.jpg");
|
|
12489
|
-
});
|
|
12490
|
-
}
|
|
12491
12382
|
|
|
12492
12383
|
function detectBrightnessAndContrast(frame, brightnessAverager) {
|
|
12493
12384
|
var ctx = frame.getContext('2d');
|
|
@@ -12639,16 +12530,27 @@ function SelfieGuidanceModelsProvider(_a) {
|
|
|
12639
12530
|
var canvasRef = React.useRef(null);
|
|
12640
12531
|
var onPredictionHandler = React.useRef();
|
|
12641
12532
|
var addToAverage = useRunningAvg(5).addToAverage;
|
|
12642
|
-
var _f =
|
|
12533
|
+
var _f = useLoadDocumentDetector({
|
|
12534
|
+
videoRef: videoRef,
|
|
12643
12535
|
onModelError: onModelError,
|
|
12644
|
-
modelLoadTimeoutMs: modelLoadTimeoutMs
|
|
12645
|
-
videoRef: videoRef
|
|
12536
|
+
modelLoadTimeoutMs: modelLoadTimeoutMs
|
|
12646
12537
|
}),
|
|
12647
12538
|
ready = _f.ready,
|
|
12648
12539
|
modelLoadState = _f.modelLoadState,
|
|
12649
12540
|
modelDownloadProgress = _f.modelDownloadProgress,
|
|
12650
12541
|
modelWarmingStartedAt = _f.modelWarmingStartedAt,
|
|
12651
12542
|
modelError = _f.modelError;
|
|
12543
|
+
// const {
|
|
12544
|
+
// ready,
|
|
12545
|
+
// modelLoadState,
|
|
12546
|
+
// modelDownloadProgress,
|
|
12547
|
+
// modelWarmingStartedAt,
|
|
12548
|
+
// modelError,
|
|
12549
|
+
// } = useLoadFaceDetector({
|
|
12550
|
+
// onModelError,
|
|
12551
|
+
// modelLoadTimeoutMs,
|
|
12552
|
+
// videoRef,
|
|
12553
|
+
// })
|
|
12652
12554
|
var _g = useFrameLoop(React.useCallback(function () {
|
|
12653
12555
|
return __awaiter(_this, void 0, void 0, function () {
|
|
12654
12556
|
var vw, vh, ctx, thresholdsProvided, brightnessResults, brightness, range, variance, prediction, processed, e_1;
|
|
@@ -12662,19 +12564,21 @@ function SelfieGuidanceModelsProvider(_a) {
|
|
|
12662
12564
|
ctx = canvasRef.current.getContext('2d');
|
|
12663
12565
|
canvasRef.current.width = vw;
|
|
12664
12566
|
canvasRef.current.height = vh;
|
|
12665
|
-
if (!(ctx && videoRef.current.readyState === 4)) return [3 /*break*/,
|
|
12567
|
+
if (!(ctx && videoRef.current.readyState === 4)) return [3 /*break*/, 5];
|
|
12666
12568
|
ctx.translate(vw, 0);
|
|
12667
12569
|
ctx.scale(-1, 1);
|
|
12668
12570
|
ctx.drawImage(videoRef.current, 0, 0, vw, vh);
|
|
12669
12571
|
_c.label = 1;
|
|
12670
12572
|
case 1:
|
|
12671
|
-
_c.trys.push([1,
|
|
12573
|
+
_c.trys.push([1, 4,, 5]);
|
|
12672
12574
|
thresholdsProvided = minCaptureBrightnessThreshold !== undefined || minCaptureRangeThreshold !== undefined || minCaptureVarianceThreshold !== undefined;
|
|
12673
12575
|
brightnessResults = thresholdsProvided ? detectBrightnessAndContrast(canvasRef.current, addToAverage) : undefined;
|
|
12674
12576
|
brightness = brightnessResults === null || brightnessResults === void 0 ? void 0 : brightnessResults.brightness;
|
|
12675
12577
|
range = brightnessResults === null || brightnessResults === void 0 ? void 0 : brightnessResults.range;
|
|
12676
12578
|
variance = brightnessResults === null || brightnessResults === void 0 ? void 0 : brightnessResults.variance;
|
|
12677
|
-
|
|
12579
|
+
return [4 /*yield*/, makeFacePredictionWithDocumentDetector(canvasRef.current)];
|
|
12580
|
+
case 2:
|
|
12581
|
+
prediction = _c.sent();
|
|
12678
12582
|
processed = processFaceDetectorPrediction({
|
|
12679
12583
|
faces: (_a = prediction === null || prediction === void 0 ? void 0 : prediction.faces) !== null && _a !== void 0 ? _a : [],
|
|
12680
12584
|
videoWidth: vw,
|
|
@@ -12690,15 +12594,15 @@ function SelfieGuidanceModelsProvider(_a) {
|
|
|
12690
12594
|
setLastFaceDetectionAt(new Date().getTime());
|
|
12691
12595
|
// setLastPrediction(processed)
|
|
12692
12596
|
return [4 /*yield*/, (_b = onPredictionHandler.current) === null || _b === void 0 ? void 0 : _b.call(onPredictionHandler, processed)];
|
|
12693
|
-
case
|
|
12597
|
+
case 3:
|
|
12694
12598
|
// setLastPrediction(processed)
|
|
12695
12599
|
_c.sent();
|
|
12696
|
-
return [3 /*break*/,
|
|
12697
|
-
case
|
|
12600
|
+
return [3 /*break*/, 5];
|
|
12601
|
+
case 4:
|
|
12698
12602
|
e_1 = _c.sent();
|
|
12699
12603
|
error('caught face detection error', e_1);
|
|
12700
|
-
return [3 /*break*/,
|
|
12701
|
-
case
|
|
12604
|
+
return [3 /*break*/, 5];
|
|
12605
|
+
case 5:
|
|
12702
12606
|
return [2 /*return*/];
|
|
12703
12607
|
}
|
|
12704
12608
|
});
|
|
@@ -15510,7 +15414,8 @@ function VideoSignatureCaptureComponent(_a, ref) {
|
|
|
15510
15414
|
var rightEdge = videoWidth * (1 - headTrackingBoundaryPercentage);
|
|
15511
15415
|
var topEdge = videoHeight * headTrackingBoundaryPercentage;
|
|
15512
15416
|
var bottomEdge = videoHeight * (1 - headTrackingBoundaryPercentage);
|
|
15513
|
-
var
|
|
15417
|
+
var nose = face === null || face === void 0 ? void 0 : face.keypoints[2];
|
|
15418
|
+
var nearBoundary = !!face && !!nose && (headTrackingBoundaryType === 'nose' ? nose.x < leftEdge || nose.x > rightEdge || nose.y < topEdge || nose.y > bottomEdge : (face === null || face === void 0 ? void 0 : face.box.xMin) < leftEdge || (face === null || face === void 0 ? void 0 : face.box.xMax) > rightEdge || (face === null || face === void 0 ? void 0 : face.box.yMin) < topEdge || (face === null || face === void 0 ? void 0 : face.box.yMax) > bottomEdge);
|
|
15514
15419
|
setLastFace(face);
|
|
15515
15420
|
setHeadTrackingSatisfied(!!face && !nearBoundary);
|
|
15516
15421
|
setNumFramesWithoutFaces(face ? 0 : function (n) {
|