@aws-amplify/ui-react-liveness 2.0.10 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetector.mjs +17 -1
  2. package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +42 -1
  3. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +199 -1
  4. package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +97 -1
  5. package/dist/esm/components/FaceLivenessDetector/displayText.mjs +50 -1
  6. package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessActor.mjs +13 -1
  7. package/dist/esm/components/FaceLivenessDetector/hooks/useLivenessSelector.mjs +12 -1
  8. package/dist/esm/components/FaceLivenessDetector/hooks/useMediaStreamInVideo.mjs +38 -1
  9. package/dist/esm/components/FaceLivenessDetector/providers/FaceLivenessDetectorProvider.mjs +15 -1
  10. package/dist/esm/components/FaceLivenessDetector/service/machine/index.mjs +1130 -1
  11. package/dist/esm/components/FaceLivenessDetector/service/types/error.mjs +16 -1
  12. package/dist/esm/components/FaceLivenessDetector/service/types/faceDetection.mjs +15 -1
  13. package/dist/esm/components/FaceLivenessDetector/service/types/liveness.mjs +23 -1
  14. package/dist/esm/components/FaceLivenessDetector/service/utils/CustomWebSocketFetchHandler.mjs +200 -1
  15. package/dist/esm/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.mjs +102 -1
  16. package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +18 -1
  17. package/dist/esm/components/FaceLivenessDetector/service/utils/eventUtils.mjs +30 -1
  18. package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +131 -1
  19. package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +462 -1
  20. package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +144 -1
  21. package/dist/esm/components/FaceLivenessDetector/service/utils/support.mjs +14 -1
  22. package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +98 -1
  23. package/dist/esm/components/FaceLivenessDetector/shared/CancelButton.mjs +24 -1
  24. package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +41 -1
  25. package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +88 -1
  26. package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +114 -1
  27. package/dist/esm/components/FaceLivenessDetector/shared/LandscapeErrorModal.mjs +30 -1
  28. package/dist/esm/components/FaceLivenessDetector/shared/LivenessIconWithPopover.mjs +37 -1
  29. package/dist/esm/components/FaceLivenessDetector/shared/MatchIndicator.mjs +24 -1
  30. package/dist/esm/components/FaceLivenessDetector/shared/Overlay.mjs +9 -1
  31. package/dist/esm/components/FaceLivenessDetector/shared/RecordingIcon.mjs +13 -1
  32. package/dist/esm/components/FaceLivenessDetector/shared/Toast.mjs +12 -1
  33. package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +54 -1
  34. package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +24 -1
  35. package/dist/esm/components/FaceLivenessDetector/utils/getDisplayText.mjs +78 -1
  36. package/dist/esm/components/FaceLivenessDetector/utils/helpers.mjs +14 -0
  37. package/dist/esm/components/FaceLivenessDetector/utils/platform.mjs +8 -1
  38. package/dist/esm/index.mjs +2 -1
  39. package/dist/esm/version.mjs +3 -1
  40. package/dist/index.js +3208 -1
  41. package/dist/styles.css +343 -680
  42. package/dist/types/components/FaceLivenessDetector/FaceLivenessDetector.d.ts +1 -1
  43. package/dist/types/components/FaceLivenessDetector/FaceLivenessDetectorCore.d.ts +1 -3
  44. package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +7 -3
  45. package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.d.ts +5 -3
  46. package/dist/types/components/FaceLivenessDetector/displayText.d.ts +3 -10
  47. package/dist/types/components/FaceLivenessDetector/service/machine/index.d.ts +1 -1
  48. package/dist/types/components/FaceLivenessDetector/service/types/faceDetection.d.ts +2 -0
  49. package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +1 -1
  50. package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +3 -1
  51. package/dist/types/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.d.ts +4 -3
  52. package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +5 -2
  53. package/dist/types/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.d.ts +9 -15
  54. package/dist/types/components/FaceLivenessDetector/shared/Overlay.d.ts +2 -5
  55. package/dist/types/components/FaceLivenessDetector/shared/Toast.d.ts +1 -0
  56. package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
  57. package/dist/types/version.d.ts +1 -1
  58. package/package.json +16 -37
  59. package/dist/esm/components/FaceLivenessDetector/StartLiveness/StartLiveness.mjs +0 -1
  60. package/dist/esm/components/FaceLivenessDetector/StartLiveness/helpers.mjs +0 -1
  61. package/dist/esm/components/FaceLivenessDetector/shared/GoodFitIllustration.mjs +0 -1
  62. package/dist/esm/components/FaceLivenessDetector/shared/StartScreenFigure.mjs +0 -1
  63. package/dist/esm/components/FaceLivenessDetector/shared/TooFarIllustration.mjs +0 -1
  64. package/dist/types/components/FaceLivenessDetector/StartLiveness/StartLiveness.d.ts +0 -9
  65. package/dist/types/components/FaceLivenessDetector/StartLiveness/index.d.ts +0 -1
  66. /package/dist/types/components/FaceLivenessDetector/{StartLiveness → utils}/helpers.d.ts +0 -0
@@ -1 +1,16 @@
1
- const R={TIMEOUT:"TIMEOUT",RUNTIME_ERROR:"RUNTIME_ERROR",FRESHNESS_TIMEOUT:"FRESHNESS_TIMEOUT",SERVER_ERROR:"SERVER_ERROR",CAMERA_FRAMERATE_ERROR:"CAMERA_FRAMERATE_ERROR",CAMERA_ACCESS_ERROR:"CAMERA_ACCESS_ERROR",FACE_DISTANCE_ERROR:"FACE_DISTANCE_ERROR",MOBILE_LANDSCAPE_ERROR:"MOBILE_LANDSCAPE_ERROR",MULTIPLE_FACES_ERROR:"MULTIPLE_FACES_ERROR"};export{R as LivenessErrorState};
1
+ /**
2
+ * The liveness error states
3
+ */
4
+ const LivenessErrorState = {
5
+ TIMEOUT: 'TIMEOUT',
6
+ RUNTIME_ERROR: 'RUNTIME_ERROR',
7
+ FRESHNESS_TIMEOUT: 'FRESHNESS_TIMEOUT',
8
+ SERVER_ERROR: 'SERVER_ERROR',
9
+ CAMERA_FRAMERATE_ERROR: 'CAMERA_FRAMERATE_ERROR',
10
+ CAMERA_ACCESS_ERROR: 'CAMERA_ACCESS_ERROR',
11
+ FACE_DISTANCE_ERROR: 'FACE_DISTANCE_ERROR',
12
+ MOBILE_LANDSCAPE_ERROR: 'MOBILE_LANDSCAPE_ERROR',
13
+ MULTIPLE_FACES_ERROR: 'MULTIPLE_FACES_ERROR',
14
+ };
15
+
16
+ export { LivenessErrorState };
@@ -1 +1,15 @@
1
- class o{triggerModelLoading(){this.modelLoadingPromise=this.loadModels()}}export{o as FaceDetection};
1
+ /**
2
+ * The abstract class representing FaceDetection
3
+ * to be implemented for different libraries.
4
+ */
5
+ class FaceDetection {
6
+ /**
7
+ * Triggers the `loadModels` method and stores
8
+ * the corresponding promise to be awaited later.
9
+ */
10
+ triggerModelLoading() {
11
+ this.modelLoadingPromise = this.loadModels();
12
+ }
13
+ }
14
+
15
+ export { FaceDetection };
@@ -1 +1,23 @@
1
- var O,T;!function(O){O.DARK="dark",O.BRIGHT="bright",O.NORMAL="normal"}(O||(O={})),function(O){O.MATCHED="MATCHED",O.TOO_FAR="TOO FAR",O.TOO_CLOSE="TOO CLOSE",O.CANT_IDENTIFY="CANNOT IDENTIFY",O.FACE_IDENTIFIED="ONE FACE IDENTIFIED",O.TOO_MANY="TOO MANY FACES"}(T||(T={}));export{T as FaceMatchState,O as IlluminationState};
1
+ /**
2
+ * The illumination states
3
+ */
4
+ var IlluminationState;
5
+ (function (IlluminationState) {
6
+ IlluminationState["DARK"] = "dark";
7
+ IlluminationState["BRIGHT"] = "bright";
8
+ IlluminationState["NORMAL"] = "normal";
9
+ })(IlluminationState || (IlluminationState = {}));
10
+ /**
11
+ * The detected face states with respect to the liveness oval
12
+ */
13
+ var FaceMatchState;
14
+ (function (FaceMatchState) {
15
+ FaceMatchState["MATCHED"] = "MATCHED";
16
+ FaceMatchState["TOO_FAR"] = "TOO FAR";
17
+ FaceMatchState["TOO_CLOSE"] = "TOO CLOSE";
18
+ FaceMatchState["CANT_IDENTIFY"] = "CANNOT IDENTIFY";
19
+ FaceMatchState["FACE_IDENTIFIED"] = "ONE FACE IDENTIFIED";
20
+ FaceMatchState["TOO_MANY"] = "TOO MANY FACES";
21
+ })(FaceMatchState || (FaceMatchState = {}));
22
+
23
+ export { FaceMatchState, IlluminationState };
@@ -1 +1,200 @@
1
- import{__awaiter as t,__asyncGenerator as e,__await as o,__asyncValues as s}from"tslib";import{formatUrl as r}from"@aws-sdk/util-format-url";import{readableStreamtoIterable as n,iterableToReadableStream as i}from"@smithy/eventstream-serde-browser";import{FetchHttpHandler as c}from"@smithy/fetch-http-handler";import{HttpResponse as a}from"@smithy/protocol-http";import{WS_CLOSURE_CODE as l}from"./constants.mjs";const d=2e3,h=t=>{return t[Symbol.asyncIterator]?t:(s=t,"function"==typeof ReadableStream&&s instanceof ReadableStream?n(t):{[Symbol.asyncIterator]:function(){return e(this,arguments,(function*(){yield yield o(t)}))}});var s};class f{constructor(t,e=new c){this.metadata={handlerProtocol:"websocket/h1.1"},this.sockets={},this.utf8decoder=new TextDecoder,this.httpHandler=e,this.configPromise="function"==typeof t?t().then((t=>null!=t?t:{})):Promise.resolve(null!=t?t:{})}destroy(){for(const[t,e]of Object.entries(this.sockets)){for(const t of e)t.close(1e3,"Socket closed through destroy() call");delete this.sockets[t]}}handle(e){return t(this,void 0,void 0,(function*(){if(!(t=>"ws:"===t.protocol||"wss:"===t.protocol)(e))return this.httpHandler.handle(e);const t=r(e),o=new WebSocket(t);this.sockets[t]||(this.sockets[t]=[]),this.sockets[t].push(o),o.binaryType="arraybuffer";const{connectionTimeout:s=d}=yield this.configPromise;yield this.waitForReady(o,s);const{body:n}=e,c=h(n),l=(t=>"function"==typeof ReadableStream?i(t):t)(this.connect(o,c));return{response:new a({statusCode:200,body:l})}}))}removeNotUsableSockets(t){var e;this.sockets[t]=(null!==(e=this.sockets[t])&&void 0!==e?e:[]).filter((t=>![WebSocket.CLOSING,WebSocket.CLOSED].includes(t.readyState)))}waitForReady(t,e){return new Promise(((o,s)=>{const r=setTimeout((()=>{this.removeNotUsableSockets(t.url),s({$metadata:{httpStatusCode:500}})}),e);t.onopen=()=>{clearTimeout(r),o()}}))}connect(e,o){let r,n=!1,i=()=>{},c=()=>{};e.onmessage=t=>{c({done:!1,value:new Uint8Array(t.data)})},e.onerror=t=>{n=!0,e.close(),i(t)},e.onclose=()=>{this.removeNotUsableSockets(e.url),n||(r?i(r):c({done:!0,value:void 0}))};const a={[Symbol.asyncIterator]:()=>({next:()=>new Promise(((t,e)=>{c=t,i=e}))})};return(()=>{t(this,void 0,void 0,(function*(){var t,n,i,c;try{try{for(var a,d=!0,h=s(o);!(t=(a=yield h.next()).done);){c=a.value,d=!1;try{const t=c,o=this.utf8decoder.decode(t);if(o.includes("closeCode")){const t=o.match(/"closeCode":([0-9]*)/);if(t){const o=t[1];e.close(parseInt(o))}continue}e.send(t)}finally{d=!0}}}catch(t){n={error:t}}finally{try{d||t||!(i=h.return)||(yield i.call(h))}finally{if(n)throw n.error}}}catch(t){r=t}finally{e.close(l.SUCCESS_CODE)}}))})(),a}}export{f as CustomWebSocketFetchHandler};
1
+ import { formatUrl } from '@aws-sdk/util-format-url';
2
+ import { readableStreamtoIterable, iterableToReadableStream } from '@smithy/eventstream-serde-browser';
3
+ import { FetchHttpHandler } from '@smithy/fetch-http-handler';
4
+ import { HttpResponse } from '@smithy/protocol-http';
5
+ import { WS_CLOSURE_CODE } from './constants.mjs';
6
+
7
+ /**
8
+ * Note: This file was copied from https://github.com/aws/aws-sdk-js-v3/blob/main/packages/middleware-websocket/src/websocket-fetch-handler.ts#L176
9
+ * Because of this the file is not fully typed at this time but we should eventually work on fully typing this file.
10
+ */
11
+ const DEFAULT_WS_CONNECTION_TIMEOUT_MS = 2000;
12
+ const isWebSocketRequest = (request) => request.protocol === 'ws:' || request.protocol === 'wss:';
13
+ const isReadableStream = (payload) => typeof ReadableStream === 'function' && payload instanceof ReadableStream;
14
+ /**
15
+ * Transfer payload data to an AsyncIterable.
16
+ * When the ReadableStream API is available in the runtime(e.g. browser), and
17
+ * the request body is ReadableStream, so we need to transfer it to AsyncIterable
18
+ * to make the stream consumable by WebSocket.
19
+ */
20
+ const getIterator = (stream) => {
21
+ // Noop if stream is already an async iterable
22
+ if (stream[Symbol.asyncIterator]) {
23
+ return stream;
24
+ }
25
+ if (isReadableStream(stream)) {
26
+ //If stream is a ReadableStream, transfer the ReadableStream to async iterable.
27
+ return readableStreamtoIterable(stream);
28
+ }
29
+ //For other types, just wrap them with an async iterable.
30
+ return {
31
+ [Symbol.asyncIterator]: async function* () {
32
+ yield stream;
33
+ },
34
+ };
35
+ };
36
+ /**
37
+ * Convert async iterable to a ReadableStream when ReadableStream API
38
+ * is available(browsers). Otherwise, leave as it is(ReactNative).
39
+ */
40
+ const toReadableStream = (asyncIterable) => typeof ReadableStream === 'function'
41
+ ? iterableToReadableStream(asyncIterable)
42
+ : asyncIterable;
43
+ /**
44
+ * Base handler for websocket requests and HTTP request. By default, the request input and output
45
+ * body will be in a ReadableStream, because of interface consistency among middleware.
46
+ * If ReadableStream is not available, like in React-Native, the response body
47
+ * will be an async iterable.
48
+ */
49
+ class CustomWebSocketFetchHandler {
50
+ constructor(options, httpHandler = new FetchHttpHandler()) {
51
+ this.metadata = {
52
+ handlerProtocol: 'websocket/h1.1',
53
+ };
54
+ this.sockets = {};
55
+ this.utf8decoder = new TextDecoder(); // default 'utf-8' or 'utf8'
56
+ this.httpHandler = httpHandler;
57
+ if (typeof options === 'function') {
58
+ this.configPromise = options().then((opts) => opts ?? {});
59
+ }
60
+ else {
61
+ this.configPromise = Promise.resolve(options ?? {});
62
+ }
63
+ }
64
+ /**
65
+ * Destroys the WebSocketHandler.
66
+ * Closes all sockets from the socket pool.
67
+ */
68
+ destroy() {
69
+ for (const [key, sockets] of Object.entries(this.sockets)) {
70
+ for (const socket of sockets) {
71
+ socket.close(1000, `Socket closed through destroy() call`);
72
+ }
73
+ delete this.sockets[key];
74
+ }
75
+ }
76
+ async handle(request) {
77
+ if (!isWebSocketRequest(request)) {
78
+ return this.httpHandler.handle(request);
79
+ }
80
+ const url = formatUrl(request);
81
+ const socket = new WebSocket(url);
82
+ // Add socket to sockets pool
83
+ if (!this.sockets[url]) {
84
+ this.sockets[url] = [];
85
+ }
86
+ this.sockets[url].push(socket);
87
+ socket.binaryType = 'arraybuffer';
88
+ const { connectionTimeout = DEFAULT_WS_CONNECTION_TIMEOUT_MS } = await this
89
+ .configPromise;
90
+ await this.waitForReady(socket, connectionTimeout);
91
+ const { body } = request;
92
+ const bodyStream = getIterator(body);
93
+ const asyncIterable = this.connect(socket, bodyStream);
94
+ const outputPayload = toReadableStream(asyncIterable);
95
+ return {
96
+ response: new HttpResponse({
97
+ statusCode: 200,
98
+ body: outputPayload,
99
+ }),
100
+ };
101
+ }
102
+ /**
103
+ * Removes all closing/closed sockets from the socket pool for URL.
104
+ */
105
+ removeNotUsableSockets(url) {
106
+ this.sockets[url] = (this.sockets[url] ?? []).filter((socket) => ![WebSocket.CLOSING, WebSocket.CLOSED].includes(socket.readyState));
107
+ }
108
+ waitForReady(socket, connectionTimeout) {
109
+ return new Promise((resolve, reject) => {
110
+ const timeout = setTimeout(() => {
111
+ this.removeNotUsableSockets(socket.url);
112
+ reject({
113
+ $metadata: {
114
+ httpStatusCode: 500,
115
+ },
116
+ });
117
+ }, connectionTimeout);
118
+ socket.onopen = () => {
119
+ clearTimeout(timeout);
120
+ resolve();
121
+ };
122
+ });
123
+ }
124
+ connect(socket, data) {
125
+ // To notify output stream any error thrown after response
126
+ // is returned while data keeps streaming.
127
+ let streamError = undefined;
128
+ // To notify onclose event that error has occurred.
129
+ let socketErrorOccurred = false;
130
+ // initialize as no-op.
131
+ let reject = () => { };
132
+ let resolve = () => { };
133
+ socket.onmessage = (event) => {
134
+ resolve({
135
+ done: false,
136
+ value: new Uint8Array(event.data),
137
+ });
138
+ };
139
+ socket.onerror = (error) => {
140
+ socketErrorOccurred = true;
141
+ socket.close();
142
+ reject(error);
143
+ };
144
+ socket.onclose = () => {
145
+ this.removeNotUsableSockets(socket.url);
146
+ if (socketErrorOccurred)
147
+ return;
148
+ if (streamError) {
149
+ reject(streamError);
150
+ }
151
+ else {
152
+ resolve({
153
+ done: true,
154
+ value: undefined, // unchecked because done=true.
155
+ });
156
+ }
157
+ };
158
+ const outputStream = {
159
+ [Symbol.asyncIterator]: () => ({
160
+ next: () => {
161
+ return new Promise((_resolve, _reject) => {
162
+ resolve = _resolve;
163
+ reject = _reject;
164
+ });
165
+ },
166
+ }),
167
+ };
168
+ const send = async () => {
169
+ try {
170
+ for await (const inputChunk of data) {
171
+ const decodedString = this.utf8decoder.decode(inputChunk);
172
+ if (decodedString.includes('closeCode')) {
173
+ const match = decodedString.match(/"closeCode":([0-9]*)/);
174
+ if (match) {
175
+ const closeCode = match[1];
176
+ socket.close(parseInt(closeCode));
177
+ }
178
+ continue;
179
+ }
180
+ socket.send(inputChunk);
181
+ }
182
+ }
183
+ catch (err) {
184
+ // We don't throw the error here because the send()'s returned
185
+ // would already be settled by the time sending chunk throws error.
186
+ // Instead, the notify the output stream to throw if there's
187
+ // exceptions
188
+ streamError = err;
189
+ }
190
+ finally {
191
+ // WS status code: https://tools.ietf.org/html/rfc6455#section-7.4
192
+ socket.close(WS_CLOSURE_CODE.SUCCESS_CODE);
193
+ }
194
+ };
195
+ send();
196
+ return outputStream;
197
+ }
198
+ }
199
+
200
+ export { CustomWebSocketFetchHandler };
@@ -1 +1,102 @@
1
- import{__awaiter as e}from"tslib";import*as t from"@tensorflow/tfjs-core";import*as o from"@tensorflow-models/blazeface";import*as s from"@tensorflow/tfjs-backend-wasm";import"@tensorflow/tfjs-backend-cpu";import{jitteredExponentialRetry as a}from"@aws-amplify/core";import{isWebAssemblySupported as i}from"./support.mjs";import{FaceDetection as r}from"../types/faceDetection.mjs";import"../types/liveness.mjs";const n="0.0.7",l=`https://cdn.liveness.rekognition.amazonaws.com/face-detection/tensorflow-models/blazeface/${n}/model/model.json`,d=`https://cdn.liveness.rekognition.amazonaws.com/face-detection/tensorflow/tfjs-backend-wasm/${s.version_wasm}/`;class m extends r{constructor(e,t){super(),this.faceModelUrl=null!=t?t:l,this.binaryPath=null!=e?e:d}loadModels(){return e(this,void 0,void 0,(function*(){i()?yield this._loadWebAssemblyBackend():yield this._loadCPUBackend();try{yield t.ready(),this._model=yield a(o.load,[{modelUrl:this.faceModelUrl}])}catch(e){throw new Error("There was an error loading the blazeface model. If you are using a custom blazeface model url ensure that it is a fully qualified url that returns a json file.")}}))}detectFaces(t){return e(this,void 0,void 0,(function*(){const e=yield this._model.estimateFaces(t,!1,!0,!0),o=Date.now();return e.filter((e=>!!e.landmarks)).map((e=>{const{topLeft:t,bottomRight:s,probability:a,landmarks:i}=e,[r,n]=t,[l,d]=s,m=Math.abs(r-l),c=Math.abs(d-n),f=i[0],h=i[1],u=i[2],w=i[3];return{top:n,left:l,width:m,height:c,timestampMs:o,probability:a[0],rightEye:f,leftEye:h,mouth:w,nose:u}}))}))}_loadWebAssemblyBackend(){return e(this,void 0,void 0,(function*(){try{s.setWasmPaths(this.binaryPath),yield a((()=>e(this,void 0,void 0,(function*(){if(!(yield t.setBackend("wasm")))throw new Error("Initialization of backend wasm failed")}))),[]),this.modelBackend="wasm"}catch(e){throw new Error('There was an error loading the TFJS WASM backend. If you are using a custom WASM path ensure that it ends with "/" and that it is not the full URL as @tensorflow/tfjs-backend-wasm will append the wasm binary file name. Read more: https://github.com/tensorflow/tfjs/blob/master/tfjs-backend-wasm/src/backend_wasm.ts#L475.')}}))}_loadCPUBackend(){return e(this,void 0,void 0,(function*(){yield t.setBackend("cpu"),this.modelBackend="cpu"}))}}export{n as BLAZEFACE_VERSION,m as BlazeFaceFaceDetection,l as DEFAULT_BLAZEFACE_URL,d as DEFAULT_TFJS_WASM_URL};
1
+ import { ready, setBackend } from '@tensorflow/tfjs-core';
2
+ import { createDetector, SupportedModels } from '@tensorflow-models/face-detection';
3
+ import { setWasmPaths, version_wasm } from '@tensorflow/tfjs-backend-wasm';
4
+ import '@tensorflow/tfjs-backend-cpu';
5
+ import { jitteredExponentialRetry } from '@aws-amplify/core/internals/utils';
6
+ import { isWebAssemblySupported } from './support.mjs';
7
+ import { FaceDetection } from '../types/faceDetection.mjs';
8
+ import '../types/liveness.mjs';
9
+
10
+ const BLAZEFACE_VERSION = '1.0.2';
11
+ /**
12
+ * WARNING: When updating these links,
13
+ * also make sure to update documentation and the link in the canary/e2e test "canary/e2e/features/liveness/face-detect.feature"
14
+ */
15
+ const DEFAULT_BLAZEFACE_URL = `https://cdn.liveness.rekognition.amazonaws.com/face-detection/tensorflow-models/blazeface/${BLAZEFACE_VERSION}/model/model.json`;
16
+ const DEFAULT_TFJS_WASM_URL = `https://cdn.liveness.rekognition.amazonaws.com/face-detection/tensorflow/tfjs-backend-wasm/${version_wasm}/`;
17
+ /**
18
+ * The BlazeFace implementation of the FaceDetection interface.
19
+ */
20
+ class BlazeFaceFaceDetection extends FaceDetection {
21
+ constructor(binaryPath, faceModelUrl) {
22
+ super();
23
+ this.faceModelUrl = faceModelUrl ?? DEFAULT_BLAZEFACE_URL;
24
+ this.binaryPath = binaryPath ?? DEFAULT_TFJS_WASM_URL;
25
+ }
26
+ async loadModels() {
27
+ if (isWebAssemblySupported()) {
28
+ await this._loadWebAssemblyBackend();
29
+ }
30
+ else {
31
+ await this._loadCPUBackend();
32
+ }
33
+ try {
34
+ await ready();
35
+ this._model = await createDetector(SupportedModels.MediaPipeFaceDetector, {
36
+ runtime: 'tfjs',
37
+ detectorModelUrl: this.faceModelUrl,
38
+ });
39
+ }
40
+ catch (e) {
41
+ throw new Error('There was an error loading the blazeface model. If you are using a custom blazeface model url ensure that it is a fully qualified url that returns a json file.');
42
+ }
43
+ }
44
+ async detectFaces(videoEl) {
45
+ const flipHorizontal = true;
46
+ const predictions = await this._model.estimateFaces(videoEl, {
47
+ flipHorizontal,
48
+ });
49
+ const timestampMs = Date.now();
50
+ const faces = predictions.map((prediction) => {
51
+ const { box, keypoints } = prediction;
52
+ const { xMin: left, yMin: top, width, height } = box;
53
+ const rightEye = this._getCoordinate(keypoints, 'rightEye');
54
+ const leftEye = this._getCoordinate(keypoints, 'leftEye');
55
+ const nose = this._getCoordinate(keypoints, 'noseTip');
56
+ const mouth = this._getCoordinate(keypoints, 'mouthCenter');
57
+ const rightEar = this._getCoordinate(keypoints, 'rightEarTragion');
58
+ const leftEar = this._getCoordinate(keypoints, 'leftEarTragion');
59
+ const probability = [90];
60
+ return {
61
+ top,
62
+ left,
63
+ width,
64
+ height,
65
+ timestampMs,
66
+ probability: probability[0],
67
+ rightEye,
68
+ leftEye,
69
+ mouth,
70
+ nose,
71
+ rightEar,
72
+ leftEar,
73
+ };
74
+ });
75
+ return faces;
76
+ }
77
+ _getCoordinate(keypoints, name) {
78
+ const keypoint = keypoints.find((k) => k.name === name);
79
+ return [keypoint.x, keypoint.y];
80
+ }
81
+ async _loadWebAssemblyBackend() {
82
+ try {
83
+ setWasmPaths(this.binaryPath);
84
+ await jitteredExponentialRetry(async () => {
85
+ const success = await setBackend('wasm');
86
+ if (!success) {
87
+ throw new Error(`Initialization of backend wasm failed`);
88
+ }
89
+ }, []);
90
+ this.modelBackend = 'wasm';
91
+ }
92
+ catch (e) {
93
+ throw new Error('There was an error loading the TFJS WASM backend. If you are using a custom WASM path ensure that it ends with "/" and that it is not the full URL as @tensorflow/tfjs-backend-wasm will append the wasm binary file name. Read more: https://github.com/tensorflow/tfjs/blob/master/tfjs-backend-wasm/src/backend_wasm.ts#L475.');
94
+ }
95
+ }
96
+ async _loadCPUBackend() {
97
+ await setBackend('cpu');
98
+ this.modelBackend = 'cpu';
99
+ }
100
+ }
101
+
102
+ export { BLAZEFACE_VERSION, BlazeFaceFaceDetection, DEFAULT_BLAZEFACE_URL, DEFAULT_TFJS_WASM_URL };
@@ -1 +1,18 @@
1
- const E=.32,R=.4,_=.37,C={SUCCESS_CODE:1e3,DEFAULT_ERROR_CODE:4e3,FACE_FIT_TIMEOUT:4001,USER_CANCEL:4003,RUNTIME_ERROR:4005,USER_ERROR_DURING_CONNECTION:4007};export{E as FACE_DISTANCE_THRESHOLD,R as REDUCED_THRESHOLD,_ as REDUCED_THRESHOLD_MOBILE,C as WS_CLOSURE_CODE};
1
+ // Face distance is calculated as pupilDistance / ovalWidth.
2
+ // The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
3
+ // These FACE_DISTNACE_THRESHOLD values are determined by the science team and should only be changed with their approval.
4
+ // We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
5
+ // a certain distance away from the camera.
6
+ const FACE_DISTANCE_THRESHOLD = 0.32;
7
+ const REDUCED_THRESHOLD = 0.4;
8
+ const REDUCED_THRESHOLD_MOBILE = 0.37;
9
+ const WS_CLOSURE_CODE = {
10
+ SUCCESS_CODE: 1000,
11
+ DEFAULT_ERROR_CODE: 4000,
12
+ FACE_FIT_TIMEOUT: 4001,
13
+ USER_CANCEL: 4003,
14
+ RUNTIME_ERROR: 4005,
15
+ USER_ERROR_DURING_CONNECTION: 4007,
16
+ };
17
+
18
+ export { FACE_DISTANCE_THRESHOLD, REDUCED_THRESHOLD, REDUCED_THRESHOLD_MOBILE, WS_CLOSURE_CODE };
@@ -1 +1,30 @@
1
- const n=n=>!!(null==n?void 0:n.ServerSessionInformationEvent),e=n=>!!(null==n?void 0:n.DisconnectionEvent),i=n=>!!(null==n?void 0:n.ValidationException),o=n=>!!(null==n?void 0:n.InternalServerException),t=n=>!!(null==n?void 0:n.ThrottlingException),l=n=>!!(null==n?void 0:n.ServiceQuotaExceededException),r=n=>{const{message:e,name:i}=n;return"InvalidSignatureException"===i&&e.includes("valid region")};export{e as isDisconnectionEvent,o as isInternalServerExceptionEvent,r as isInvalidSignatureRegionException,n as isServerSesssionInformationEvent,l as isServiceQuotaExceededExceptionEvent,t as isThrottlingExceptionEvent,i as isValidationExceptionEvent};
1
+ const isServerSesssionInformationEvent = (value) => {
2
+ return !!value
3
+ ?.ServerSessionInformationEvent;
4
+ };
5
+ const isDisconnectionEvent = (value) => {
6
+ return !!value
7
+ ?.DisconnectionEvent;
8
+ };
9
+ const isValidationExceptionEvent = (value) => {
10
+ return !!value
11
+ ?.ValidationException;
12
+ };
13
+ const isInternalServerExceptionEvent = (value) => {
14
+ return !!value
15
+ ?.InternalServerException;
16
+ };
17
+ const isThrottlingExceptionEvent = (value) => {
18
+ return !!value
19
+ ?.ThrottlingException;
20
+ };
21
+ const isServiceQuotaExceededExceptionEvent = (value) => {
22
+ return !!value
23
+ ?.ServiceQuotaExceededException;
24
+ };
25
+ const isInvalidSignatureRegionException = (error) => {
26
+ const { message, name } = error;
27
+ return (name === 'InvalidSignatureException' && message.includes('valid region'));
28
+ };
29
+
30
+ export { isDisconnectionEvent, isInternalServerExceptionEvent, isInvalidSignatureRegionException, isServerSesssionInformationEvent, isServiceQuotaExceededExceptionEvent, isThrottlingExceptionEvent, isValidationExceptionEvent };
@@ -1 +1,131 @@
1
- import{__awaiter as e}from"tslib";import{fillOverlayCanvasFractional as r,getRGBArrayFromColorString as o}from"./liveness.mjs";var t;!function(e){e.SCROLLING="SCROLLING",e.FLAT="FLAT"}(t||(t={}));class s{constructor(e,r){this.context=e,this.freshnessColorsSequence=r,this.isFirstTick=!0}displayColorTick(){return e(this,void 0,void 0,(function*(){return new Promise(((e,r)=>{setTimeout((()=>{this.displayNextColorTick(e,r)}),Math.min(10))}))}))}init(){this.stageIndex=0,this.currColorIndex=0,this.currColorSequence=this.freshnessColorsSequence[0],this.prevColorSequence=this.freshnessColorsSequence[0],this.stage=t.FLAT,this.timeLastFlatOrScrollChange=Date.now(),this.timeLastFaceMatchChecked=Date.now()}displayNextColorTick(e,o){const{freshnessColorEl:s}=this.context.freshnessColorAssociatedParams,{ovalDetails:i,scaleFactor:l}=this.context.ovalAssociatedParams,{videoEl:n}=this.context.videoAssociatedParams,c=Date.now();this.isFirstTick&&(this.init(),this.isFirstTick=!1,this.sendColorStartTime({tickStartTime:c,currColor:this.currColorSequence.color,prevColor:this.currColorSequence.color,currColorIndex:this.stageIndex}));let a=c-this.timeLastFlatOrScrollChange;if(s.style.display="block",(this.stage===t.FLAT&&a>=this.currColorSequence.flatDisplayDuration||this.stage===t.SCROLLING&&a>=this.currColorSequence.downscrollDuration)&&(this.incrementStageIndex(c),a=0),this.currColorIndex<this.freshnessColorsSequence.length){const o=a/(this.stage===t.SCROLLING?this.currColorSequence.downscrollDuration:this.currColorSequence.flatDisplayDuration);r({overlayCanvas:s,prevColor:this.prevColorSequence.color,nextColor:this.currColorSequence.color,videoEl:n,ovalDetails:i,heightFraction:o,scaleFactor:l}),e(!1)}else s.style.display="none",e(!0)}incrementStageIndex(e){if(this.stageIndex+=1,this.prevColorSequence=this.freshnessColorsSequence[this.currColorIndex],this.stage===t.FLAT)this.currColorIndex+=1,this.stage=t.SCROLLING;else if(this.stage===t.SCROLLING){this.freshnessColorsSequence[this.currColorIndex].flatDisplayDuration>0?this.stage=t.FLAT:(this.stage=t.SCROLLING,this.currColorIndex+=1)}this.currColorSequence=this.freshnessColorsSequence[this.currColorIndex],this.timeLastFlatOrScrollChange=Date.now(),this.currColorSequence&&this.sendColorStartTime({tickStartTime:e,currColor:this.currColorSequence.color,prevColor:this.prevColorSequence.color,currColorIndex:this.stageIndex})}sendColorStartTime({tickStartTime:e,currColor:r,prevColor:t,currColorIndex:s}){const{livenessStreamProvider:i,challengeId:l}=this.context;i.sendClientInfo({Challenge:{FaceMovementAndLightChallenge:{ChallengeId:l,ColorDisplayed:{CurrentColor:{RGB:o(r)},PreviousColor:{RGB:o(t)},SequenceNumber:s,CurrentColorStartTimestamp:e}}}})}}export{s as FreshnessColorDisplay};
1
+ import { fillOverlayCanvasFractional, getRGBArrayFromColorString } from './liveness.mjs';
2
+
3
+ const TICK_RATE = 10; // ms -- the rate at which we will render/check colors
4
+ var COLOR_STAGE;
5
+ (function (COLOR_STAGE) {
6
+ COLOR_STAGE["SCROLLING"] = "SCROLLING";
7
+ COLOR_STAGE["FLAT"] = "FLAT";
8
+ })(COLOR_STAGE || (COLOR_STAGE = {}));
9
+ class FreshnessColorDisplay {
10
+ constructor(context, freshnessColorsSequence) {
11
+ this.context = context;
12
+ this.freshnessColorsSequence = freshnessColorsSequence;
13
+ this.isFirstTick = true;
14
+ }
15
+ async displayColorTick() {
16
+ return new Promise((resolve, reject) => {
17
+ setTimeout(() => {
18
+ this.displayNextColorTick(resolve, reject);
19
+ }, Math.min(TICK_RATE));
20
+ });
21
+ }
22
+ init() {
23
+ this.stageIndex = 0;
24
+ this.currColorIndex = 0;
25
+ this.currColorSequence = this.freshnessColorsSequence[0];
26
+ this.prevColorSequence = this.freshnessColorsSequence[0];
27
+ this.stage = COLOR_STAGE.FLAT;
28
+ this.timeLastFlatOrScrollChange = Date.now();
29
+ this.timeLastFaceMatchChecked = Date.now();
30
+ }
31
+ displayNextColorTick(resolve, _) {
32
+ const { freshnessColorEl } = this.context.freshnessColorAssociatedParams;
33
+ const { ovalDetails, scaleFactor } = this.context.ovalAssociatedParams;
34
+ const { videoEl } = this.context.videoAssociatedParams;
35
+ const tickStartTime = Date.now();
36
+ // Send a colorStart time only for the first tick of the first color
37
+ if (this.isFirstTick) {
38
+ this.init();
39
+ this.isFirstTick = false;
40
+ this.sendColorStartTime({
41
+ tickStartTime: tickStartTime,
42
+ currColor: this.currColorSequence.color,
43
+ prevColor: this.currColorSequence.color,
44
+ currColorIndex: this.stageIndex,
45
+ });
46
+ }
47
+ let timeSinceLastColorChange = tickStartTime - this.timeLastFlatOrScrollChange;
48
+ freshnessColorEl.style.display = 'block';
49
+ // Every 10 ms tick we will check if the threshold for flat or scrolling, if so we will try to go to the next stage
50
+ if ((this.stage === COLOR_STAGE.FLAT &&
51
+ timeSinceLastColorChange >=
52
+ this.currColorSequence.flatDisplayDuration) ||
53
+ (this.stage === COLOR_STAGE.SCROLLING &&
54
+ timeSinceLastColorChange >= this.currColorSequence.downscrollDuration)) {
55
+ this.incrementStageIndex(tickStartTime);
56
+ timeSinceLastColorChange = 0;
57
+ }
58
+ // Every 10 ms tick we will update the colors displayed
59
+ if (this.currColorIndex < this.freshnessColorsSequence.length) {
60
+ const heightFraction = timeSinceLastColorChange /
61
+ (this.stage === COLOR_STAGE.SCROLLING
62
+ ? this.currColorSequence.downscrollDuration
63
+ : this.currColorSequence.flatDisplayDuration);
64
+ fillOverlayCanvasFractional({
65
+ overlayCanvas: freshnessColorEl,
66
+ prevColor: this.prevColorSequence.color,
67
+ nextColor: this.currColorSequence.color,
68
+ videoEl: videoEl,
69
+ ovalDetails: ovalDetails,
70
+ heightFraction,
71
+ scaleFactor: scaleFactor,
72
+ });
73
+ resolve(false);
74
+ }
75
+ else {
76
+ freshnessColorEl.style.display = 'none';
77
+ resolve(true);
78
+ }
79
+ }
80
+ // FLAT - prev = 0, curr = 0
81
+ // SCROLL - prev = 0, curr = 1
82
+ // FLAT - prev = 1, curr = 1
83
+ // SCROLL - prev = 1, curr = 2
84
+ // SCROLL - prev = 2, curr = 3
85
+ incrementStageIndex(tickStartTime) {
86
+ this.stageIndex += 1;
87
+ this.prevColorSequence = this.freshnessColorsSequence[this.currColorIndex];
88
+ if (this.stage === COLOR_STAGE.FLAT) {
89
+ this.currColorIndex += 1;
90
+ this.stage = COLOR_STAGE.SCROLLING;
91
+ }
92
+ else if (this.stage === COLOR_STAGE.SCROLLING) {
93
+ const nextFlatColor = this.freshnessColorsSequence[this.currColorIndex];
94
+ if (nextFlatColor.flatDisplayDuration > 0) {
95
+ this.stage = COLOR_STAGE.FLAT;
96
+ }
97
+ else {
98
+ this.stage = COLOR_STAGE.SCROLLING;
99
+ this.currColorIndex += 1;
100
+ }
101
+ }
102
+ this.currColorSequence = this.freshnessColorsSequence[this.currColorIndex];
103
+ this.timeLastFlatOrScrollChange = Date.now();
104
+ if (this.currColorSequence) {
105
+ this.sendColorStartTime({
106
+ tickStartTime: tickStartTime,
107
+ currColor: this.currColorSequence.color,
108
+ prevColor: this.prevColorSequence.color,
109
+ currColorIndex: this.stageIndex,
110
+ });
111
+ }
112
+ }
113
+ sendColorStartTime({ tickStartTime, currColor, prevColor, currColorIndex, }) {
114
+ const { livenessStreamProvider, challengeId } = this.context;
115
+ livenessStreamProvider.sendClientInfo({
116
+ Challenge: {
117
+ FaceMovementAndLightChallenge: {
118
+ ChallengeId: challengeId,
119
+ ColorDisplayed: {
120
+ CurrentColor: { RGB: getRGBArrayFromColorString(currColor) },
121
+ PreviousColor: { RGB: getRGBArrayFromColorString(prevColor) },
122
+ SequenceNumber: currColorIndex,
123
+ CurrentColorStartTimestamp: tickStartTime,
124
+ },
125
+ },
126
+ },
127
+ });
128
+ }
129
+ }
130
+
131
+ export { FreshnessColorDisplay };