@aws-amplify/ui-react-liveness 3.3.8 → 3.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -15
- package/dist/esm/components/FaceLivenessDetector/FaceLivenessDetectorCore.mjs +4 -2
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/CameraSelector.mjs +13 -0
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.mjs +50 -28
- package/dist/esm/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.mjs +5 -4
- package/dist/esm/components/FaceLivenessDetector/service/machine/machine.mjs +247 -314
- package/dist/esm/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/ColorSequenceDisplay.mjs +140 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/StreamRecorder/StreamRecorder.mjs +171 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/TelemetryReporter/TelemetryReporter.mjs +27 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/constants.mjs +30 -7
- package/dist/esm/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/createRequestStreamGenerator.mjs +32 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/utils.mjs +148 -0
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.mjs +2 -3
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.mjs +36 -6
- package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.mjs +7 -6
- package/dist/esm/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.mjs +9 -5
- package/dist/esm/components/FaceLivenessDetector/service/utils/liveness.mjs +19 -34
- package/dist/esm/components/FaceLivenessDetector/service/utils/{eventUtils.mjs → responseStreamEvent.mjs} +2 -2
- package/dist/esm/components/FaceLivenessDetector/service/utils/sessionInformation.mjs +45 -0
- package/dist/esm/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.mjs +3 -2
- package/dist/esm/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.mjs +4 -2
- package/dist/esm/components/FaceLivenessDetector/shared/Hint.mjs +4 -7
- package/dist/esm/components/FaceLivenessDetector/types/classNames.mjs +3 -0
- package/dist/esm/components/FaceLivenessDetector/utils/device.mjs +12 -12
- package/dist/esm/index.mjs +12 -0
- package/dist/esm/version.mjs +1 -1
- package/dist/index.js +956 -775
- package/dist/styles.css +17 -2
- package/dist/types/components/FaceLivenessDetector/FaceLivenessDetector.d.ts +3 -3
- package/dist/types/components/FaceLivenessDetector/FaceLivenessDetectorCore.d.ts +3 -3
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/CameraSelector.d.ts +8 -0
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCameraModule.d.ts +3 -2
- package/dist/types/components/FaceLivenessDetector/LivenessCheck/LivenessCheck.d.ts +2 -2
- package/dist/types/components/FaceLivenessDetector/displayText.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/hooks/useLivenessSelector.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/index.d.ts +6 -3
- package/dist/types/components/FaceLivenessDetector/providers/FaceLivenessDetectorProvider.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/machine/machine.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/types/credentials.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/types/liveness.d.ts +2 -2
- package/dist/types/components/FaceLivenessDetector/service/types/machine.d.ts +40 -27
- package/dist/types/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/ColorSequenceDisplay.d.ts +55 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/ColorSequenceDisplay/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/StreamRecorder/StreamRecorder.d.ts +15 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/StreamRecorder/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/TelemetryReporter/TelemetryReporter.d.ts +8 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/TelemetryReporter/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/blazefaceFaceDetection.d.ts +2 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/constants.d.ts +27 -3
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/createRequestStreamGenerator.d.ts +15 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/index.d.ts +2 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createRequestStreamGenerator/utils.d.ts +30 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/CustomWebSocketFetchHandler.d.ts +3 -2
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.d.ts +1 -2
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/createStreamingClient.d.ts +28 -6
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/index.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/resolveCredentials.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/createStreamingClient/types.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/service/utils/getFaceMatchStateInLivenessOval.d.ts +4 -4
- package/dist/types/components/FaceLivenessDetector/service/utils/index.d.ts +7 -4
- package/dist/types/components/FaceLivenessDetector/service/utils/liveness.d.ts +16 -26
- package/dist/types/components/FaceLivenessDetector/service/utils/{eventUtils.d.ts → responseStreamEvent.d.ts} +2 -2
- package/dist/types/components/FaceLivenessDetector/service/utils/sessionInformation.d.ts +7 -0
- package/dist/types/components/FaceLivenessDetector/service/utils/types.d.ts +21 -0
- package/dist/types/components/FaceLivenessDetector/shared/DefaultStartScreenComponents.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/shared/FaceLivenessErrorModal.d.ts +2 -2
- package/dist/types/components/FaceLivenessDetector/shared/Hint.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/shared/Overlay.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/shared/Toast.d.ts +1 -1
- package/dist/types/components/FaceLivenessDetector/types/classNames.d.ts +3 -0
- package/dist/types/components/FaceLivenessDetector/utils/device.d.ts +1 -0
- package/dist/types/components/FaceLivenessDetector/utils/getDisplayText.d.ts +1 -1
- package/dist/types/index.d.ts +2 -1
- package/dist/types/version.d.ts +1 -1
- package/package.json +10 -10
- package/dist/esm/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.mjs +0 -131
- package/dist/esm/components/FaceLivenessDetector/service/utils/streamProvider.mjs +0 -126
- package/dist/esm/components/FaceLivenessDetector/service/utils/videoRecorder.mjs +0 -108
- package/dist/types/components/FaceLivenessDetector/service/types/service.d.ts +0 -5
- package/dist/types/components/FaceLivenessDetector/service/utils/freshnessColorDisplay.d.ts +0 -21
- package/dist/types/components/FaceLivenessDetector/service/utils/streamProvider.d.ts +0 -42
- package/dist/types/components/FaceLivenessDetector/service/utils/videoRecorder.d.ts +0 -27
|
@@ -0,0 +1,140 @@
|
|
|
1
|
+
import { __classPrivateFieldSet, __classPrivateFieldGet } from 'tslib';
|
|
2
|
+
import { isFunction } from '@aws-amplify/ui';
|
|
3
|
+
import { TICK_RATE } from '../constants.mjs';
|
|
4
|
+
|
|
5
|
+
var _ColorSequenceDisplay_instances, _ColorSequenceDisplay_sequence, _ColorSequenceDisplay_previousSequence, _ColorSequenceDisplay_colorStage, _ColorSequenceDisplay_sequenceIndex, _ColorSequenceDisplay_colorSequences, _ColorSequenceDisplay_isFirstTick, _ColorSequenceDisplay_lastColorStageChangeTimestamp, _ColorSequenceDisplay_isFlatStage, _ColorSequenceDisplay_isScrollingStage, _ColorSequenceDisplay_startColorSequence, _ColorSequenceDisplay_handleSequenceChange;
|
|
6
|
+
var ColorStageType;
|
|
7
|
+
(function (ColorStageType) {
|
|
8
|
+
ColorStageType[ColorStageType["Scrolling"] = 0] = "Scrolling";
|
|
9
|
+
ColorStageType[ColorStageType["Flat"] = 1] = "Flat";
|
|
10
|
+
})(ColorStageType || (ColorStageType = {}));
|
|
11
|
+
class ColorSequenceDisplay {
|
|
12
|
+
/**
|
|
13
|
+
* Iterates over provided color sequences and executes sequence event callbacks
|
|
14
|
+
*
|
|
15
|
+
* @param {ColorSequences} colorSequences array of color sequences to iterate over
|
|
16
|
+
*/
|
|
17
|
+
constructor(colorSequences) {
|
|
18
|
+
_ColorSequenceDisplay_instances.add(this);
|
|
19
|
+
/**
|
|
20
|
+
* the current color sequence used for flat display and the prev color when scrolling
|
|
21
|
+
*/
|
|
22
|
+
_ColorSequenceDisplay_sequence.set(this, void 0);
|
|
23
|
+
/**
|
|
24
|
+
* previous color sequence, during flat display curr === prev and during scroll it is the prev indexed color
|
|
25
|
+
*/
|
|
26
|
+
_ColorSequenceDisplay_previousSequence.set(this, void 0);
|
|
27
|
+
/**
|
|
28
|
+
* current ColorStage, initialize to 'FLAT'
|
|
29
|
+
*/
|
|
30
|
+
_ColorSequenceDisplay_colorStage.set(this, ColorStageType.Flat);
|
|
31
|
+
/**
|
|
32
|
+
* current color sequence index (black flat, red scrolling, etc)
|
|
33
|
+
*/
|
|
34
|
+
_ColorSequenceDisplay_sequenceIndex.set(this, 0);
|
|
35
|
+
_ColorSequenceDisplay_colorSequences.set(this, void 0);
|
|
36
|
+
_ColorSequenceDisplay_isFirstTick.set(this, true);
|
|
37
|
+
_ColorSequenceDisplay_lastColorStageChangeTimestamp.set(this, 0);
|
|
38
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_colorSequences, colorSequences, "f");
|
|
39
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_sequence, colorSequences[0], "f");
|
|
40
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_previousSequence, colorSequences[0], "f");
|
|
41
|
+
}
|
|
42
|
+
/**
|
|
43
|
+
* Start sequence iteration and execute event callbacks
|
|
44
|
+
*
|
|
45
|
+
* @async
|
|
46
|
+
* @param {StartSequencesParams} params Sequence event handlers
|
|
47
|
+
* @returns {Promise<boolean>} Resolves to true when complete
|
|
48
|
+
*/
|
|
49
|
+
async startSequences(params) {
|
|
50
|
+
return new Promise((resolve) => {
|
|
51
|
+
setTimeout(() => {
|
|
52
|
+
__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_startColorSequence).call(this, { ...params, resolve });
|
|
53
|
+
}, Math.min(TICK_RATE));
|
|
54
|
+
});
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
_ColorSequenceDisplay_sequence = new WeakMap(), _ColorSequenceDisplay_previousSequence = new WeakMap(), _ColorSequenceDisplay_colorStage = new WeakMap(), _ColorSequenceDisplay_sequenceIndex = new WeakMap(), _ColorSequenceDisplay_colorSequences = new WeakMap(), _ColorSequenceDisplay_isFirstTick = new WeakMap(), _ColorSequenceDisplay_lastColorStageChangeTimestamp = new WeakMap(), _ColorSequenceDisplay_instances = new WeakSet(), _ColorSequenceDisplay_isFlatStage = function _ColorSequenceDisplay_isFlatStage() {
|
|
58
|
+
return __classPrivateFieldGet(this, _ColorSequenceDisplay_colorStage, "f") === ColorStageType.Flat;
|
|
59
|
+
}, _ColorSequenceDisplay_isScrollingStage = function _ColorSequenceDisplay_isScrollingStage() {
|
|
60
|
+
return __classPrivateFieldGet(this, _ColorSequenceDisplay_colorStage, "f") === ColorStageType.Scrolling;
|
|
61
|
+
}, _ColorSequenceDisplay_startColorSequence = function _ColorSequenceDisplay_startColorSequence({ onSequenceChange, onSequenceColorChange, onSequenceStart, onSequencesComplete, resolve, }) {
|
|
62
|
+
if (isFunction(onSequenceStart)) {
|
|
63
|
+
onSequenceStart();
|
|
64
|
+
}
|
|
65
|
+
const sequenceStartTime = Date.now();
|
|
66
|
+
let timeSinceLastColorStageChange = sequenceStartTime - __classPrivateFieldGet(this, _ColorSequenceDisplay_lastColorStageChangeTimestamp, "f");
|
|
67
|
+
// Send a colorStart time only for the first tick of the first color
|
|
68
|
+
if (__classPrivateFieldGet(this, _ColorSequenceDisplay_isFirstTick, "f")) {
|
|
69
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_lastColorStageChangeTimestamp, Date.now(), "f");
|
|
70
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_isFirstTick, false, "f");
|
|
71
|
+
// initial sequence change
|
|
72
|
+
if (isFunction(onSequenceChange)) {
|
|
73
|
+
onSequenceChange({
|
|
74
|
+
prevSequenceColor: __classPrivateFieldGet(this, _ColorSequenceDisplay_previousSequence, "f").color,
|
|
75
|
+
sequenceColor: __classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").color,
|
|
76
|
+
sequenceIndex: __classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f"),
|
|
77
|
+
sequenceStartTime,
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
// Every 10 ms tick we will check if the threshold for flat or scrolling, if so we will try to go to the next stage
|
|
82
|
+
if ((__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isFlatStage).call(this) &&
|
|
83
|
+
timeSinceLastColorStageChange >= __classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").flatDisplayDuration) ||
|
|
84
|
+
(__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isScrollingStage).call(this) &&
|
|
85
|
+
timeSinceLastColorStageChange >= __classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").downscrollDuration)) {
|
|
86
|
+
__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_handleSequenceChange).call(this, { sequenceStartTime, onSequenceChange });
|
|
87
|
+
timeSinceLastColorStageChange = 0;
|
|
88
|
+
}
|
|
89
|
+
const hasRemainingSequences = __classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f") < __classPrivateFieldGet(this, _ColorSequenceDisplay_colorSequences, "f").length;
|
|
90
|
+
// Every 10 ms tick we will update the colors displayed
|
|
91
|
+
if (hasRemainingSequences) {
|
|
92
|
+
const heightFraction = timeSinceLastColorStageChange /
|
|
93
|
+
(__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isScrollingStage).call(this)
|
|
94
|
+
? __classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").downscrollDuration
|
|
95
|
+
: __classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").flatDisplayDuration);
|
|
96
|
+
if (isFunction(onSequenceColorChange)) {
|
|
97
|
+
onSequenceColorChange({
|
|
98
|
+
sequenceColor: __classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").color,
|
|
99
|
+
heightFraction,
|
|
100
|
+
prevSequenceColor: __classPrivateFieldGet(this, _ColorSequenceDisplay_previousSequence, "f").color,
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
resolve(false);
|
|
104
|
+
}
|
|
105
|
+
else {
|
|
106
|
+
if (isFunction(onSequencesComplete)) {
|
|
107
|
+
onSequencesComplete();
|
|
108
|
+
}
|
|
109
|
+
resolve(true);
|
|
110
|
+
}
|
|
111
|
+
}, _ColorSequenceDisplay_handleSequenceChange = function _ColorSequenceDisplay_handleSequenceChange({ sequenceStartTime, onSequenceChange, }) {
|
|
112
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_previousSequence, __classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f"), "f");
|
|
113
|
+
if (__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isFlatStage).call(this)) {
|
|
114
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_sequenceIndex, __classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f") + 1, "f");
|
|
115
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_colorStage, ColorStageType.Scrolling, "f");
|
|
116
|
+
}
|
|
117
|
+
else if (__classPrivateFieldGet(this, _ColorSequenceDisplay_instances, "m", _ColorSequenceDisplay_isScrollingStage).call(this)) {
|
|
118
|
+
const nextColorSequence = __classPrivateFieldGet(this, _ColorSequenceDisplay_colorSequences, "f")[__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f")];
|
|
119
|
+
if (nextColorSequence.flatDisplayDuration > 0) {
|
|
120
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_colorStage, ColorStageType.Flat, "f");
|
|
121
|
+
}
|
|
122
|
+
else {
|
|
123
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_sequenceIndex, __classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f") + 1, "f");
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_sequence, __classPrivateFieldGet(this, _ColorSequenceDisplay_colorSequences, "f")[__classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f")], "f");
|
|
127
|
+
__classPrivateFieldSet(this, _ColorSequenceDisplay_lastColorStageChangeTimestamp, Date.now(), "f");
|
|
128
|
+
if (__classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f")) {
|
|
129
|
+
if (isFunction(onSequenceChange)) {
|
|
130
|
+
onSequenceChange({
|
|
131
|
+
prevSequenceColor: __classPrivateFieldGet(this, _ColorSequenceDisplay_previousSequence, "f").color,
|
|
132
|
+
sequenceColor: __classPrivateFieldGet(this, _ColorSequenceDisplay_sequence, "f").color,
|
|
133
|
+
sequenceIndex: __classPrivateFieldGet(this, _ColorSequenceDisplay_sequenceIndex, "f"),
|
|
134
|
+
sequenceStartTime: sequenceStartTime,
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
};
|
|
139
|
+
|
|
140
|
+
export { ColorSequenceDisplay };
|
package/dist/esm/components/FaceLivenessDetector/service/utils/StreamRecorder/StreamRecorder.mjs
ADDED
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
import { __classPrivateFieldSet, __classPrivateFieldGet } from 'tslib';
|
|
2
|
+
import { isUndefined } from '@aws-amplify/ui';
|
|
3
|
+
import { TIME_SLICE } from '../constants.mjs';
|
|
4
|
+
|
|
5
|
+
var _StreamRecorder_instances, _StreamRecorder_chunks, _StreamRecorder_recorder, _StreamRecorder_initialRecorder, _StreamRecorder_recordingStarted, _StreamRecorder_firstChunkTimestamp, _StreamRecorder_recorderEndTimestamp, _StreamRecorder_recorderStartTimestamp, _StreamRecorder_recordingStartTimestamp, _StreamRecorder_recorderStopped, _StreamRecorder_videoStream, _StreamRecorder_eventListeners, _StreamRecorder_clearRecordedChunks, _StreamRecorder_createReadableStream, _StreamRecorder_attachHandlers, _StreamRecorder_setupCallbacks, _StreamRecorder_cleanUpEventListeners;
|
|
6
|
+
class StreamRecorder {
|
|
7
|
+
constructor(stream) {
|
|
8
|
+
_StreamRecorder_instances.add(this);
|
|
9
|
+
_StreamRecorder_chunks.set(this, void 0);
|
|
10
|
+
_StreamRecorder_recorder.set(this, void 0);
|
|
11
|
+
_StreamRecorder_initialRecorder.set(this, void 0);
|
|
12
|
+
_StreamRecorder_recordingStarted.set(this, false);
|
|
13
|
+
_StreamRecorder_firstChunkTimestamp.set(this, void 0);
|
|
14
|
+
_StreamRecorder_recorderEndTimestamp.set(this, void 0);
|
|
15
|
+
_StreamRecorder_recorderStartTimestamp.set(this, void 0);
|
|
16
|
+
_StreamRecorder_recordingStartTimestamp.set(this, void 0);
|
|
17
|
+
_StreamRecorder_recorderStopped.set(this, void 0);
|
|
18
|
+
_StreamRecorder_videoStream.set(this, void 0);
|
|
19
|
+
_StreamRecorder_eventListeners.set(this, void 0);
|
|
20
|
+
if (typeof MediaRecorder === 'undefined') {
|
|
21
|
+
throw Error('MediaRecorder is not supported by this browser');
|
|
22
|
+
}
|
|
23
|
+
__classPrivateFieldSet(this, _StreamRecorder_chunks, [], "f");
|
|
24
|
+
__classPrivateFieldSet(this, _StreamRecorder_recorder, new MediaRecorder(stream, { bitsPerSecond: 1000000 }), "f");
|
|
25
|
+
__classPrivateFieldSet(this, _StreamRecorder_initialRecorder, __classPrivateFieldGet(this, _StreamRecorder_recorder, "f"), "f");
|
|
26
|
+
__classPrivateFieldSet(this, _StreamRecorder_videoStream, __classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_createReadableStream).call(this), "f");
|
|
27
|
+
__classPrivateFieldSet(this, _StreamRecorder_eventListeners, {}, "f");
|
|
28
|
+
}
|
|
29
|
+
getVideoStream() {
|
|
30
|
+
return __classPrivateFieldGet(this, _StreamRecorder_videoStream, "f");
|
|
31
|
+
}
|
|
32
|
+
setNewVideoStream(stream) {
|
|
33
|
+
__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_cleanUpEventListeners).call(this);
|
|
34
|
+
__classPrivateFieldSet(this, _StreamRecorder_recorder, new MediaRecorder(stream, { bitsPerSecond: 1000000 }), "f");
|
|
35
|
+
__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_attachHandlers).call(this, __classPrivateFieldGet(this, _StreamRecorder_recorder, "f"));
|
|
36
|
+
}
|
|
37
|
+
dispatchStreamEvent(event) {
|
|
38
|
+
const { type } = event;
|
|
39
|
+
const data = type === 'streamStop' ? undefined : event.data;
|
|
40
|
+
__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").dispatchEvent(new MessageEvent(type, { data }));
|
|
41
|
+
}
|
|
42
|
+
getRecordingStartTimestamp() {
|
|
43
|
+
if (isUndefined(__classPrivateFieldGet(this, _StreamRecorder_recorderStartTimestamp, "f")) ||
|
|
44
|
+
isUndefined(__classPrivateFieldGet(this, _StreamRecorder_recordingStartTimestamp, "f"))) {
|
|
45
|
+
throw new Error('Recording has not started');
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* This calculation is provided by Science team after doing analysis
|
|
49
|
+
* of unreliable .onstart() (this.#recorderStartTimestamp) timestamp that is
|
|
50
|
+
* returned from mediaRecorder.
|
|
51
|
+
*/
|
|
52
|
+
return Math.round(0.73 * (__classPrivateFieldGet(this, _StreamRecorder_recorderStartTimestamp, "f") - __classPrivateFieldGet(this, _StreamRecorder_recordingStartTimestamp, "f")) +
|
|
53
|
+
__classPrivateFieldGet(this, _StreamRecorder_recordingStartTimestamp, "f"));
|
|
54
|
+
}
|
|
55
|
+
getRecordingEndedTimestamp() {
|
|
56
|
+
if (isUndefined(__classPrivateFieldGet(this, _StreamRecorder_recorderEndTimestamp, "f"))) {
|
|
57
|
+
throw new Error('Recording has not ended');
|
|
58
|
+
}
|
|
59
|
+
return __classPrivateFieldGet(this, _StreamRecorder_recorderEndTimestamp, "f");
|
|
60
|
+
}
|
|
61
|
+
startRecording() {
|
|
62
|
+
__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_clearRecordedChunks).call(this);
|
|
63
|
+
__classPrivateFieldSet(this, _StreamRecorder_recordingStartTimestamp, Date.now(), "f");
|
|
64
|
+
__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").start(TIME_SLICE);
|
|
65
|
+
}
|
|
66
|
+
isRecording() {
|
|
67
|
+
return __classPrivateFieldGet(this, _StreamRecorder_recorder, "f").state === 'recording';
|
|
68
|
+
}
|
|
69
|
+
getChunksLength() {
|
|
70
|
+
return __classPrivateFieldGet(this, _StreamRecorder_chunks, "f").length;
|
|
71
|
+
}
|
|
72
|
+
hasRecordingStarted() {
|
|
73
|
+
return __classPrivateFieldGet(this, _StreamRecorder_recordingStarted, "f") && __classPrivateFieldGet(this, _StreamRecorder_firstChunkTimestamp, "f") !== undefined;
|
|
74
|
+
}
|
|
75
|
+
async stopRecording() {
|
|
76
|
+
if (this.isRecording()) {
|
|
77
|
+
__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").stop();
|
|
78
|
+
}
|
|
79
|
+
return __classPrivateFieldGet(this, _StreamRecorder_recorderStopped, "f");
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
_StreamRecorder_chunks = new WeakMap(), _StreamRecorder_recorder = new WeakMap(), _StreamRecorder_initialRecorder = new WeakMap(), _StreamRecorder_recordingStarted = new WeakMap(), _StreamRecorder_firstChunkTimestamp = new WeakMap(), _StreamRecorder_recorderEndTimestamp = new WeakMap(), _StreamRecorder_recorderStartTimestamp = new WeakMap(), _StreamRecorder_recordingStartTimestamp = new WeakMap(), _StreamRecorder_recorderStopped = new WeakMap(), _StreamRecorder_videoStream = new WeakMap(), _StreamRecorder_eventListeners = new WeakMap(), _StreamRecorder_instances = new WeakSet(), _StreamRecorder_clearRecordedChunks = function _StreamRecorder_clearRecordedChunks() {
|
|
83
|
+
__classPrivateFieldSet(this, _StreamRecorder_chunks, [], "f");
|
|
84
|
+
}, _StreamRecorder_createReadableStream = function _StreamRecorder_createReadableStream() {
|
|
85
|
+
return new ReadableStream({
|
|
86
|
+
start: (controller) => {
|
|
87
|
+
__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_attachHandlers).call(this, __classPrivateFieldGet(this, _StreamRecorder_recorder, "f"), controller);
|
|
88
|
+
},
|
|
89
|
+
});
|
|
90
|
+
}, _StreamRecorder_attachHandlers = function _StreamRecorder_attachHandlers(recorder, controller) {
|
|
91
|
+
const onDataAvailableHandler = controller
|
|
92
|
+
? ({ data }) => {
|
|
93
|
+
if (data && data.size > 0) {
|
|
94
|
+
if (__classPrivateFieldGet(this, _StreamRecorder_chunks, "f").length === 0) {
|
|
95
|
+
__classPrivateFieldSet(this, _StreamRecorder_firstChunkTimestamp, Date.now(), "f");
|
|
96
|
+
}
|
|
97
|
+
__classPrivateFieldGet(this, _StreamRecorder_chunks, "f").push(data);
|
|
98
|
+
controller.enqueue({ type: 'streamVideo', data });
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
: ({ data }) => {
|
|
102
|
+
__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('dataavailable', { data }));
|
|
103
|
+
};
|
|
104
|
+
recorder.ondataavailable = onDataAvailableHandler;
|
|
105
|
+
const onSessionInfoHandler = controller
|
|
106
|
+
? (e) => {
|
|
107
|
+
const { data } = e;
|
|
108
|
+
controller.enqueue({ type: 'sessionInfo', data });
|
|
109
|
+
}
|
|
110
|
+
: (e) => {
|
|
111
|
+
const { data } = e;
|
|
112
|
+
__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('sessionInfo', { data }));
|
|
113
|
+
};
|
|
114
|
+
recorder.addEventListener('sessionInfo', onSessionInfoHandler);
|
|
115
|
+
const onStreamStopHandler = controller
|
|
116
|
+
? () => {
|
|
117
|
+
controller.enqueue({ type: 'streamStop' });
|
|
118
|
+
}
|
|
119
|
+
: () => {
|
|
120
|
+
__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('streamStop'));
|
|
121
|
+
};
|
|
122
|
+
recorder.addEventListener('streamStop', onStreamStopHandler);
|
|
123
|
+
const onCloseCodeHandler = controller
|
|
124
|
+
? (e) => {
|
|
125
|
+
const { data } = e;
|
|
126
|
+
controller.enqueue({ type: 'closeCode', data });
|
|
127
|
+
}
|
|
128
|
+
: (e) => {
|
|
129
|
+
const { data } = e;
|
|
130
|
+
__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('closeCode', { data }));
|
|
131
|
+
};
|
|
132
|
+
recorder.addEventListener('closeCode', onCloseCodeHandler);
|
|
133
|
+
const onEndStreamHandler = controller
|
|
134
|
+
? () => {
|
|
135
|
+
controller.close();
|
|
136
|
+
}
|
|
137
|
+
: () => {
|
|
138
|
+
__classPrivateFieldGet(this, _StreamRecorder_initialRecorder, "f").dispatchEvent(new MessageEvent('endStream'));
|
|
139
|
+
};
|
|
140
|
+
recorder.addEventListener('endStream', onEndStreamHandler);
|
|
141
|
+
__classPrivateFieldGet(this, _StreamRecorder_instances, "m", _StreamRecorder_setupCallbacks).call(this);
|
|
142
|
+
__classPrivateFieldSet(this, _StreamRecorder_eventListeners, {
|
|
143
|
+
endStream: onEndStreamHandler,
|
|
144
|
+
closeCode: onCloseCodeHandler,
|
|
145
|
+
streamStop: onStreamStopHandler,
|
|
146
|
+
sessionInfo: onSessionInfoHandler,
|
|
147
|
+
dataavailable: onDataAvailableHandler,
|
|
148
|
+
}, "f");
|
|
149
|
+
}, _StreamRecorder_setupCallbacks = function _StreamRecorder_setupCallbacks() {
|
|
150
|
+
__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").onstart = () => {
|
|
151
|
+
__classPrivateFieldSet(this, _StreamRecorder_recordingStarted, true, "f");
|
|
152
|
+
__classPrivateFieldSet(this, _StreamRecorder_recorderStartTimestamp, Date.now(), "f");
|
|
153
|
+
};
|
|
154
|
+
__classPrivateFieldSet(this, _StreamRecorder_recorderStopped, new Promise((resolve) => {
|
|
155
|
+
__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").onstop = () => {
|
|
156
|
+
__classPrivateFieldSet(this, _StreamRecorder_recorderEndTimestamp, Date.now(), "f");
|
|
157
|
+
resolve();
|
|
158
|
+
};
|
|
159
|
+
}), "f");
|
|
160
|
+
__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").onerror = () => {
|
|
161
|
+
this.stopRecording();
|
|
162
|
+
};
|
|
163
|
+
}, _StreamRecorder_cleanUpEventListeners = function _StreamRecorder_cleanUpEventListeners() {
|
|
164
|
+
const eventNames = Object.keys(__classPrivateFieldGet(this, _StreamRecorder_eventListeners, "f"));
|
|
165
|
+
eventNames.forEach((name) => {
|
|
166
|
+
__classPrivateFieldGet(this, _StreamRecorder_recorder, "f").removeEventListener(name, __classPrivateFieldGet(this, _StreamRecorder_eventListeners, "f")[name]);
|
|
167
|
+
});
|
|
168
|
+
__classPrivateFieldSet(this, _StreamRecorder_eventListeners, {}, "f");
|
|
169
|
+
};
|
|
170
|
+
|
|
171
|
+
export { StreamRecorder };
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
const DEFAULT_ATTEMPT_COUNT_TIMEOUT = 300000; // 5 minutes / 300000 ms
|
|
2
|
+
// Telemetry data is for internal use only and should not be depended upon or used by the customer
|
|
3
|
+
class TelemetryReporter {
|
|
4
|
+
static getAttemptCountAndUpdateTimestamp() {
|
|
5
|
+
const timeSinceLastAttempt = Date.now() - TelemetryReporter.timestamp;
|
|
6
|
+
if (timeSinceLastAttempt > DEFAULT_ATTEMPT_COUNT_TIMEOUT) {
|
|
7
|
+
TelemetryReporter.attemptCount = 1;
|
|
8
|
+
}
|
|
9
|
+
else {
|
|
10
|
+
TelemetryReporter.attemptCount += 1;
|
|
11
|
+
}
|
|
12
|
+
TelemetryReporter.timestamp = Date.now();
|
|
13
|
+
return TelemetryReporter.attemptCount;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
TelemetryReporter.attemptCount = 0;
|
|
17
|
+
TelemetryReporter.timestamp = Date.now();
|
|
18
|
+
const createTelemetryReporterMiddleware = (attemptCount, preCheckViewEnabled) => (next) => async (args) => {
|
|
19
|
+
args.request.query['attempt-count'] =
|
|
20
|
+
attemptCount.toString();
|
|
21
|
+
args.request.query['precheck-view-enabled'] =
|
|
22
|
+
preCheckViewEnabled ? '1' : '0';
|
|
23
|
+
const result = await next(args);
|
|
24
|
+
return result;
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export { TelemetryReporter, createTelemetryReporterMiddleware };
|
|
@@ -1,11 +1,5 @@
|
|
|
1
1
|
// Face distance is calculated as pupilDistance / ovalWidth.
|
|
2
2
|
// The further away you are from the camera the distance between your pupils will decrease, thus lowering the threshold values.
|
|
3
|
-
// These FACE_DISTANCE_THRESHOLD values are determined by the science team and should only be changed with their approval.
|
|
4
|
-
// We want to ensure at the start of a check that the user's pupilDistance/ovalWidth is below FACE_DISTANCE_THRESHOLD to ensure that they are starting
|
|
5
|
-
// a certain distance away from the camera.
|
|
6
|
-
const FACE_DISTANCE_THRESHOLD = 0.32;
|
|
7
|
-
const REDUCED_THRESHOLD = 0.4;
|
|
8
|
-
const REDUCED_THRESHOLD_MOBILE = 0.37;
|
|
9
3
|
// Constants from science team to determine ocular distance (space between eyes)
|
|
10
4
|
const PUPIL_DISTANCE_WEIGHT = 2.0;
|
|
11
5
|
const FACE_HEIGHT_WEIGHT = 1.8;
|
|
@@ -14,6 +8,7 @@ const FACE_MATCH_RANGE_MIN = 0;
|
|
|
14
8
|
const FACE_MATCH_RANGE_MAX = 1;
|
|
15
9
|
const FACE_MATCH_WEIGHT_MIN = 0.25;
|
|
16
10
|
const FACE_MATCH_WEIGHT_MAX = 0.75;
|
|
11
|
+
const OVAL_HEIGHT_WIDTH_RATIO = 1.618;
|
|
17
12
|
const WS_CLOSURE_CODE = {
|
|
18
13
|
SUCCESS_CODE: 1000,
|
|
19
14
|
DEFAULT_ERROR_CODE: 4000,
|
|
@@ -22,5 +17,33 @@ const WS_CLOSURE_CODE = {
|
|
|
22
17
|
RUNTIME_ERROR: 4005,
|
|
23
18
|
USER_ERROR_DURING_CONNECTION: 4007,
|
|
24
19
|
};
|
|
20
|
+
// number in milliseconds to record into each video chunk.
|
|
21
|
+
// see https://developer.mozilla.org/en-US/docs/Web/API/MediaRecorder/start#timeslice
|
|
22
|
+
const TIME_SLICE = 1000;
|
|
23
|
+
// in MS, the rate at which colors are rendered/checked
|
|
24
|
+
const TICK_RATE = 10;
|
|
25
|
+
/**
|
|
26
|
+
* The number of seconds before the presigned URL expires.
|
|
27
|
+
* Used to override aws sdk default value of 60
|
|
28
|
+
*/
|
|
29
|
+
const REQUEST_EXPIRY = 299;
|
|
30
|
+
/**
|
|
31
|
+
* The maximum time in milliseconds that the connection phase of a request
|
|
32
|
+
* may take before the connection attempt is abandoned.
|
|
33
|
+
*/
|
|
34
|
+
const CONNECTION_TIMEOUT = 10000;
|
|
35
|
+
const FACE_MOVEMENT_AND_LIGHT_CHALLENGE = {
|
|
36
|
+
type: 'FaceMovementAndLightChallenge',
|
|
37
|
+
version: '1.0.0',
|
|
38
|
+
};
|
|
39
|
+
const FACE_MOVEMENT_CHALLENGE = {
|
|
40
|
+
type: 'FaceMovementChallenge',
|
|
41
|
+
version: '1.0.0',
|
|
42
|
+
};
|
|
43
|
+
const SUPPORTED_CHALLENGES = [
|
|
44
|
+
FACE_MOVEMENT_AND_LIGHT_CHALLENGE,
|
|
45
|
+
FACE_MOVEMENT_CHALLENGE,
|
|
46
|
+
];
|
|
47
|
+
const queryParameterString = SUPPORTED_CHALLENGES.map((challenge) => `${challenge.type}_${challenge.version}`).join(',');
|
|
25
48
|
|
|
26
|
-
export {
|
|
49
|
+
export { CONNECTION_TIMEOUT, FACE_HEIGHT_WEIGHT, FACE_MATCH_RANGE_MAX, FACE_MATCH_RANGE_MIN, FACE_MATCH_WEIGHT_MAX, FACE_MATCH_WEIGHT_MIN, FACE_MOVEMENT_AND_LIGHT_CHALLENGE, FACE_MOVEMENT_CHALLENGE, OVAL_HEIGHT_WIDTH_RATIO, PUPIL_DISTANCE_WEIGHT, REQUEST_EXPIRY, SUPPORTED_CHALLENGES, TICK_RATE, TIME_SLICE, WS_CLOSURE_CODE, queryParameterString };
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import { createVideoEvent } from './utils.mjs';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* Creates an async generator that reads over the provided stream and yielding stream results
|
|
5
|
+
*
|
|
6
|
+
* @param {VideoStream} stream target video stream
|
|
7
|
+
* @returns {GetRequestStream} async request stream generator
|
|
8
|
+
*/
|
|
9
|
+
function createRequestStreamGenerator(stream) {
|
|
10
|
+
const reader = stream.getReader();
|
|
11
|
+
return {
|
|
12
|
+
getRequestStream: async function* () {
|
|
13
|
+
while (true) {
|
|
14
|
+
const { done, value } = await reader.read();
|
|
15
|
+
if (done) {
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
if (value.type === 'sessionInfo') {
|
|
19
|
+
yield { ClientSessionInformationEvent: value.data };
|
|
20
|
+
}
|
|
21
|
+
else {
|
|
22
|
+
// Unless value.type is closeCode we never want to send a 0 size video event as it signals end of stream
|
|
23
|
+
if (value.type === 'streamVideo' && value.data.size < 1)
|
|
24
|
+
continue;
|
|
25
|
+
yield { VideoEvent: await createVideoEvent(value) };
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
},
|
|
29
|
+
};
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export { createRequestStreamGenerator };
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import { isUndefined } from '@aws-amplify/ui';
|
|
2
|
+
import { isFaceMovementChallenge, isFaceMovementAndLightChallenge } from '../sessionInformation.mjs';
|
|
3
|
+
|
|
4
|
+
const createVideoEvent = async (result) => {
|
|
5
|
+
const { data, type } = result;
|
|
6
|
+
return {
|
|
7
|
+
VideoChunk: new Uint8Array(
|
|
8
|
+
// server expects an empty chunk on 'stopStream' event
|
|
9
|
+
type === 'streamVideo' ? await data.arrayBuffer() : []),
|
|
10
|
+
// @ts-expect-error for 'closeCode' event, `data` is an object which is
|
|
11
|
+
// unexpected by `VideoEvent` but is expected by the streaming service
|
|
12
|
+
TimestampMillis: type === 'closeCode' ? data : Date.now(),
|
|
13
|
+
};
|
|
14
|
+
};
|
|
15
|
+
const getTrackDimensions = (stream) => {
|
|
16
|
+
const { height: trackHeight, width: trackWidth } = stream
|
|
17
|
+
.getVideoTracks()[0]
|
|
18
|
+
.getSettings();
|
|
19
|
+
if (isUndefined(trackHeight) || isUndefined(trackWidth)) {
|
|
20
|
+
throw new Error(`Invalid Track Dimensions. height: ${trackHeight}, width: ${trackWidth} `);
|
|
21
|
+
}
|
|
22
|
+
return { trackHeight, trackWidth };
|
|
23
|
+
};
|
|
24
|
+
function getBoundingBox({ trackHeight, trackWidth, height, width, top, left, }) {
|
|
25
|
+
return {
|
|
26
|
+
Height: height / trackHeight,
|
|
27
|
+
Width: width / trackWidth,
|
|
28
|
+
Top: top / trackHeight,
|
|
29
|
+
Left: left / trackWidth,
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
const getFlippedInitialFaceLeft = (trackWidth, faceLeft, faceWidth) => trackWidth - faceLeft - faceWidth;
|
|
33
|
+
const getInitialFaceBoundingBox = (params) => {
|
|
34
|
+
const { trackWidth, left, width } = params;
|
|
35
|
+
return getBoundingBox({
|
|
36
|
+
...params,
|
|
37
|
+
left: getFlippedInitialFaceLeft(trackWidth, left, width),
|
|
38
|
+
});
|
|
39
|
+
};
|
|
40
|
+
const getTargetFaceBoundingBox = (params) => {
|
|
41
|
+
const { height, width, centerX, centerY } = params;
|
|
42
|
+
return getBoundingBox({
|
|
43
|
+
...params,
|
|
44
|
+
top: centerY - height / 2,
|
|
45
|
+
left: centerX - width / 2,
|
|
46
|
+
});
|
|
47
|
+
};
|
|
48
|
+
function createClientSessionInformationEvent({ parsedSessionInformation, clientChallenge, }) {
|
|
49
|
+
if (isFaceMovementChallenge(parsedSessionInformation)) {
|
|
50
|
+
return {
|
|
51
|
+
Challenge: {
|
|
52
|
+
FaceMovementChallenge: clientChallenge,
|
|
53
|
+
},
|
|
54
|
+
};
|
|
55
|
+
}
|
|
56
|
+
if (isFaceMovementAndLightChallenge(parsedSessionInformation)) {
|
|
57
|
+
return {
|
|
58
|
+
Challenge: {
|
|
59
|
+
FaceMovementAndLightChallenge: clientChallenge,
|
|
60
|
+
},
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
throw new Error('Unable to create ClientSessionInformationEvent');
|
|
64
|
+
}
|
|
65
|
+
function createSessionEndEvent({ parsedSessionInformation, challengeId, faceMatchAssociatedParams, ovalAssociatedParams, recordingEndedTimestamp, trackHeight, trackWidth, }) {
|
|
66
|
+
const { initialFace, ovalDetails } = ovalAssociatedParams;
|
|
67
|
+
const { startFace, endFace } = faceMatchAssociatedParams;
|
|
68
|
+
const initialFaceBoundingBox = getInitialFaceBoundingBox({
|
|
69
|
+
trackHeight,
|
|
70
|
+
trackWidth,
|
|
71
|
+
...initialFace,
|
|
72
|
+
});
|
|
73
|
+
const targetFaceBoundingBox = getTargetFaceBoundingBox({
|
|
74
|
+
trackHeight,
|
|
75
|
+
trackWidth,
|
|
76
|
+
...ovalDetails,
|
|
77
|
+
});
|
|
78
|
+
const clientChallenge = {
|
|
79
|
+
ChallengeId: challengeId,
|
|
80
|
+
InitialFace: {
|
|
81
|
+
InitialFaceDetectedTimestamp: initialFace.timestampMs,
|
|
82
|
+
BoundingBox: initialFaceBoundingBox,
|
|
83
|
+
},
|
|
84
|
+
TargetFace: {
|
|
85
|
+
FaceDetectedInTargetPositionStartTimestamp: startFace.timestampMs,
|
|
86
|
+
FaceDetectedInTargetPositionEndTimestamp: endFace.timestampMs,
|
|
87
|
+
BoundingBox: targetFaceBoundingBox,
|
|
88
|
+
},
|
|
89
|
+
VideoEndTimestamp: recordingEndedTimestamp,
|
|
90
|
+
};
|
|
91
|
+
return createClientSessionInformationEvent({
|
|
92
|
+
parsedSessionInformation,
|
|
93
|
+
clientChallenge,
|
|
94
|
+
});
|
|
95
|
+
}
|
|
96
|
+
function createSessionStartEvent({ parsedSessionInformation, challengeId, ovalAssociatedParams, recordingStartedTimestamp, trackHeight, trackWidth, }) {
|
|
97
|
+
const { initialFace } = ovalAssociatedParams;
|
|
98
|
+
const initialFaceBoundingBox = getInitialFaceBoundingBox({
|
|
99
|
+
trackHeight,
|
|
100
|
+
trackWidth,
|
|
101
|
+
...initialFace,
|
|
102
|
+
});
|
|
103
|
+
const clientChallenge = {
|
|
104
|
+
ChallengeId: challengeId,
|
|
105
|
+
VideoStartTimestamp: recordingStartedTimestamp,
|
|
106
|
+
InitialFace: {
|
|
107
|
+
InitialFaceDetectedTimestamp: initialFace.timestampMs,
|
|
108
|
+
BoundingBox: initialFaceBoundingBox,
|
|
109
|
+
},
|
|
110
|
+
};
|
|
111
|
+
return createClientSessionInformationEvent({
|
|
112
|
+
parsedSessionInformation,
|
|
113
|
+
clientChallenge,
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
/**
|
|
117
|
+
* Translates provided sequence color string to an RGB array
|
|
118
|
+
*
|
|
119
|
+
* @param {SequenceColorValue} color
|
|
120
|
+
* @returns {number[]}
|
|
121
|
+
*/
|
|
122
|
+
const colorToRgb = (color) => {
|
|
123
|
+
return color
|
|
124
|
+
.slice(color.indexOf('(') + 1, color.indexOf(')'))
|
|
125
|
+
.split(',')
|
|
126
|
+
.map((str) => parseInt(str));
|
|
127
|
+
};
|
|
128
|
+
function createColorDisplayEvent({ challengeId, sequenceStartTime, sequenceIndex, sequenceColor, prevSequenceColor, }) {
|
|
129
|
+
const CurrentColor = { RGB: colorToRgb(sequenceColor) };
|
|
130
|
+
const PreviousColor = {
|
|
131
|
+
RGB: colorToRgb(prevSequenceColor),
|
|
132
|
+
};
|
|
133
|
+
return {
|
|
134
|
+
Challenge: {
|
|
135
|
+
FaceMovementAndLightChallenge: {
|
|
136
|
+
ChallengeId: challengeId,
|
|
137
|
+
ColorDisplayed: {
|
|
138
|
+
CurrentColor,
|
|
139
|
+
PreviousColor,
|
|
140
|
+
SequenceNumber: sequenceIndex,
|
|
141
|
+
CurrentColorStartTimestamp: sequenceStartTime,
|
|
142
|
+
},
|
|
143
|
+
},
|
|
144
|
+
},
|
|
145
|
+
};
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
export { createColorDisplayEvent, createSessionEndEvent, createSessionStartEvent, createVideoEvent, getTrackDimensions };
|
package/dist/esm/components/FaceLivenessDetector/service/utils/createStreamingClient/Signer.mjs
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import { SignatureV4 } from '@smithy/signature-v4';
|
|
2
|
+
import { REQUEST_EXPIRY } from '../constants.mjs';
|
|
2
3
|
|
|
3
|
-
// override aws sdk default value of 60
|
|
4
|
-
const REQUEST_EXPIRY = 299;
|
|
5
4
|
class Signer extends SignatureV4 {
|
|
6
5
|
presign(request, options) {
|
|
7
6
|
return super.presign(request, {
|
|
@@ -15,4 +14,4 @@ class Signer extends SignatureV4 {
|
|
|
15
14
|
}
|
|
16
15
|
}
|
|
17
16
|
|
|
18
|
-
export {
|
|
17
|
+
export { Signer };
|
|
@@ -1,16 +1,17 @@
|
|
|
1
|
-
import { RekognitionStreamingClient } from '@aws-sdk/client-rekognitionstreaming';
|
|
1
|
+
import { StartFaceLivenessSessionCommand, RekognitionStreamingClient } from '@aws-sdk/client-rekognitionstreaming';
|
|
2
2
|
import { getAmplifyUserAgent } from '@aws-amplify/core/internals/utils';
|
|
3
|
+
import { isString } from '@aws-amplify/ui';
|
|
3
4
|
import { getLivenessUserAgent } from '../../../utils/platform.mjs';
|
|
5
|
+
import { queryParameterString, CONNECTION_TIMEOUT } from '../constants.mjs';
|
|
4
6
|
import { CustomWebSocketFetchHandler } from './CustomWebSocketFetchHandler.mjs';
|
|
5
7
|
import { resolveCredentials } from './resolveCredentials.mjs';
|
|
6
8
|
import { Signer } from './Signer.mjs';
|
|
9
|
+
import { createTelemetryReporterMiddleware } from '../TelemetryReporter/TelemetryReporter.mjs';
|
|
7
10
|
|
|
8
|
-
const CONNECTION_TIMEOUT = 10000;
|
|
9
11
|
const CUSTOM_USER_AGENT = `${getAmplifyUserAgent()} ${getLivenessUserAgent()}`;
|
|
10
|
-
async function
|
|
11
|
-
const credentials = await resolveCredentials(credentialsProvider);
|
|
12
|
+
async function getStreamingClient({ credentialsProvider, endpointOverride, region, systemClockOffset, }) {
|
|
12
13
|
const clientconfig = {
|
|
13
|
-
credentials,
|
|
14
|
+
credentials: await resolveCredentials(credentialsProvider),
|
|
14
15
|
customUserAgent: CUSTOM_USER_AGENT,
|
|
15
16
|
region,
|
|
16
17
|
requestHandler: new CustomWebSocketFetchHandler({
|
|
@@ -19,10 +20,39 @@ async function createStreamingClient({ credentialsProvider, endpointOverride, re
|
|
|
19
20
|
signerConstructor: Signer,
|
|
20
21
|
systemClockOffset,
|
|
21
22
|
};
|
|
22
|
-
if (endpointOverride) {
|
|
23
|
+
if (isString(endpointOverride)) {
|
|
23
24
|
clientconfig.endpointProvider = () => ({ url: new URL(endpointOverride) });
|
|
24
25
|
}
|
|
25
26
|
return new RekognitionStreamingClient(clientconfig);
|
|
26
27
|
}
|
|
28
|
+
const createCommandInput = ({ requestStream, sessionId, videoWidth, videoHeight, }) => ({
|
|
29
|
+
ChallengeVersions: queryParameterString,
|
|
30
|
+
SessionId: sessionId,
|
|
31
|
+
LivenessRequestStream: requestStream,
|
|
32
|
+
VideoWidth: videoWidth,
|
|
33
|
+
VideoHeight: videoHeight,
|
|
34
|
+
});
|
|
35
|
+
/**
|
|
36
|
+
* Initializes an instance of the Rekognition streaming client, returns `getResponseStream`
|
|
37
|
+
*
|
|
38
|
+
* @async
|
|
39
|
+
* @param clientConfig configuration fpr the client
|
|
40
|
+
* @returns {Promise<{ getResponseStream: GetReponseStream }>}
|
|
41
|
+
*/
|
|
42
|
+
async function createStreamingClient(clientConfig) {
|
|
43
|
+
const client = await getStreamingClient(clientConfig);
|
|
44
|
+
client.middlewareStack.add(createTelemetryReporterMiddleware(clientConfig.attemptCount, clientConfig.preCheckViewEnabled), {
|
|
45
|
+
step: 'build',
|
|
46
|
+
name: 'telemetryMiddleware',
|
|
47
|
+
tags: ['liveness', 'amplify-ui'],
|
|
48
|
+
});
|
|
49
|
+
return {
|
|
50
|
+
async getResponseStream(input) {
|
|
51
|
+
const command = new StartFaceLivenessSessionCommand(createCommandInput(input));
|
|
52
|
+
const { LivenessResponseStream } = await client.send(command);
|
|
53
|
+
return LivenessResponseStream;
|
|
54
|
+
},
|
|
55
|
+
};
|
|
56
|
+
}
|
|
27
57
|
|
|
28
58
|
export { createStreamingClient };
|