@incodetech/welcome 1.85.0-20251217155726.0 → 1.85.0-20251217192812.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/incode-welcome/src/Face/api/sendAuthFaceImage.d.ts +0 -1
- package/dist/incode-welcome/src/mediaRecorder/DeepsightVideoRecorder.d.ts +0 -1
- package/dist/incode-welcome/src/mediaRecorder/uploadDeepsightVideo.d.ts +1 -2
- package/dist/incode-welcome/src/publishKeys.d.ts +1 -1
- package/dist/incode-welcome/src/wasmUtils/mlWasmJSApi.d.ts +1 -0
- package/dist/onBoarding.mjs +67 -59
- package/dist/onBoarding.umd.js +3 -3
- package/package.json +1 -1
|
@@ -18,7 +18,6 @@ export type SendAuthFaceImageResponse = {
|
|
|
18
18
|
imageBase64?: string;
|
|
19
19
|
selfieEncryptedBase64?: string;
|
|
20
20
|
metadata?: string;
|
|
21
|
-
videoFileURL?: string;
|
|
22
21
|
};
|
|
23
22
|
export declare const sendAuthFaceImage: ({ canvas, faceCoordinates, sessionToken, }: {
|
|
24
23
|
canvas: IncodeCanvas;
|
|
@@ -7,7 +7,6 @@ export declare class DeepsightVideoRecorder extends InterruptionHandler {
|
|
|
7
7
|
private stream;
|
|
8
8
|
recordingId: string | null;
|
|
9
9
|
qsvWarning: string | null;
|
|
10
|
-
videoFileURL: string | null;
|
|
11
10
|
trimmedVideoBlob: Blob | null;
|
|
12
11
|
setStreamAndStartRecording(stream: MediaStream): void;
|
|
13
12
|
startRecording(): void;
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import { Session } from '../../../types/src';
|
|
2
|
-
export declare const uploadDeepsightVideo: (video:
|
|
2
|
+
export declare const uploadDeepsightVideo: (video: string, token: Session['token']) => Promise<{
|
|
3
3
|
recordingId: string;
|
|
4
|
-
url: string;
|
|
5
4
|
}>;
|
|
6
5
|
export default uploadDeepsightVideo;
|
|
@@ -1 +1 @@
|
|
|
1
|
-
export default function publishKeys(token
|
|
1
|
+
export default function publishKeys(token?: string): Promise<any>;
|
|
@@ -89,6 +89,7 @@ declare class MlWasmJSApi {
|
|
|
89
89
|
allocateImageBuffers(imageWidth: number | null, imageHeight: number | null): Promise<void>;
|
|
90
90
|
handleDetectionCallAndUpdateState(type: WasmPipelineType): Promise<void>;
|
|
91
91
|
encryptImage(image: string): string;
|
|
92
|
+
ckvcks(data: ArrayBuffer): void;
|
|
92
93
|
isVirtualCamera(label: string | null): boolean;
|
|
93
94
|
estimatePerformance(): string;
|
|
94
95
|
analyzeFrame(image: ImageData): Promise<void>;
|
package/dist/onBoarding.mjs
CHANGED
|
@@ -7699,7 +7699,7 @@ function getProfile(Be) {
|
|
|
7699
7699
|
throw new Error(`Profile not found for type: ${Be}`);
|
|
7700
7700
|
return Le;
|
|
7701
7701
|
}
|
|
7702
|
-
const wasmVersion = "v2.12.
|
|
7702
|
+
const wasmVersion = "v2.12.46";
|
|
7703
7703
|
function generateBundle() {
|
|
7704
7704
|
const Be = getFeatureFlag("id_text_barcode_readability") ? PipelineProfileName.IdCapture_3_02 : getFeatureFlag("id_model_hebrew_back_id") ? PipelineProfileName.IdCapture_2_02 : PipelineProfileName.IdCapture_2_01, Le = PipelineProfileName.SelfieWithQualityMetrics, Ye = PipelineProfileName.SelfieVideoSelfie, tt = PipelineProfileName.IdVideoSelfie_2_01;
|
|
7705
7705
|
return [
|
|
@@ -7801,6 +7801,11 @@ class MlWasmJSApi {
|
|
|
7801
7801
|
"Unable to encrypt the image, cpp API hasn't been initialized"
|
|
7802
7802
|
), this.utilityApi.encryptImage(Le);
|
|
7803
7803
|
}
|
|
7804
|
+
ckvcks(Le) {
|
|
7805
|
+
this.checkWasmInitialization(
|
|
7806
|
+
"Unable to compute video checksum, cpp API hasn't been initialized"
|
|
7807
|
+
), this.utilityApi.ckvcks(Le);
|
|
7808
|
+
}
|
|
7804
7809
|
isVirtualCamera(Le) {
|
|
7805
7810
|
if (this.checkWasmInitialization(
|
|
7806
7811
|
"Unable to check if the camera is virtual, cpp API hasn't been initialized"
|
|
@@ -8282,7 +8287,7 @@ class MlWasmJSApi {
|
|
|
8282
8287
|
);
|
|
8283
8288
|
}
|
|
8284
8289
|
}
|
|
8285
|
-
const MlWasmJSApi$1 = MlWasmJSApi.getInstance(), version$4 = "1.85.0-
|
|
8290
|
+
const MlWasmJSApi$1 = MlWasmJSApi.getInstance(), version$4 = "1.85.0-20251217192812.0";
|
|
8286
8291
|
function e$2(Be, Le) {
|
|
8287
8292
|
return function(Ye, tt) {
|
|
8288
8293
|
return Object.prototype.hasOwnProperty.call(Ye, tt);
|
|
@@ -161231,41 +161236,37 @@ const ERROR_RESPONSE_BODY_READER = new Error("failed to get response body reader
|
|
|
161231
161236
|
}, toBlobURL = async (Be, Le, Ye = !1, tt) => {
|
|
161232
161237
|
const nt = Ye ? await downloadWithProgress(Be, tt) : await (await fetch(Be)).arrayBuffer(), ft = new Blob([nt], { type: Le });
|
|
161233
161238
|
return URL.createObjectURL(ft);
|
|
161234
|
-
}
|
|
161235
|
-
async function generateSessionRecordingUploadUrl({
|
|
161236
|
-
token: Be,
|
|
161237
|
-
type: Le
|
|
161238
|
-
}) {
|
|
161239
|
-
return api.get(
|
|
161240
|
-
"/omni/generateSessionRecordingUploadUrl",
|
|
161241
|
-
{
|
|
161242
|
-
params: { type: Le },
|
|
161243
|
-
headers: {
|
|
161244
|
-
"X-Incode-Hardware-Id": Be
|
|
161245
|
-
}
|
|
161246
|
-
}
|
|
161247
|
-
).then((Ye) => Ye.data);
|
|
161248
|
-
}
|
|
161249
|
-
const uploadDeepsightVideo = async (Be, Le, Ye) => {
|
|
161250
|
-
const { recordingId: tt, url: nt } = await generateSessionRecordingUploadUrl({
|
|
161251
|
-
token: Le,
|
|
161252
|
-
type: "selfie"
|
|
161253
|
-
});
|
|
161239
|
+
}, uploadDeepsightVideo = async (Be, Le) => {
|
|
161254
161240
|
try {
|
|
161255
|
-
return await
|
|
161256
|
-
|
|
161257
|
-
|
|
161258
|
-
|
|
161259
|
-
"
|
|
161241
|
+
return (await api.post(
|
|
161242
|
+
"/omni/recordings/import",
|
|
161243
|
+
{
|
|
161244
|
+
video: Be,
|
|
161245
|
+
type: "selfie"
|
|
161246
|
+
},
|
|
161247
|
+
{
|
|
161248
|
+
headers: {
|
|
161249
|
+
"X-Incode-Hardware-Id": Le
|
|
161250
|
+
}
|
|
161260
161251
|
}
|
|
161261
|
-
|
|
161262
|
-
} catch (
|
|
161263
|
-
return console.error("Error uploading video:",
|
|
161264
|
-
recordingId: void 0
|
|
161265
|
-
url: void 0
|
|
161252
|
+
)).data;
|
|
161253
|
+
} catch (Ye) {
|
|
161254
|
+
return console.error("Error uploading video:", Ye), {
|
|
161255
|
+
recordingId: void 0
|
|
161266
161256
|
};
|
|
161267
161257
|
}
|
|
161268
161258
|
};
|
|
161259
|
+
function toBase64(Be) {
|
|
161260
|
+
return new Promise((Le, Ye) => {
|
|
161261
|
+
const tt = new FileReader();
|
|
161262
|
+
tt.onloadend = () => {
|
|
161263
|
+
const ft = tt.result.split(",")[1];
|
|
161264
|
+
Le(ft);
|
|
161265
|
+
}, tt.onerror = () => {
|
|
161266
|
+
Ye(tt.error ?? new Error("FileReader error"));
|
|
161267
|
+
}, tt.readAsDataURL(Be);
|
|
161268
|
+
});
|
|
161269
|
+
}
|
|
161269
161270
|
function getAdaptiveMediaRecorderOptions() {
|
|
161270
161271
|
const Be = getSupportedMediaRecorderMimeType();
|
|
161271
161272
|
return mobile.iOS() ? {
|
|
@@ -161413,7 +161414,7 @@ async function trimLastNSecondsUsingPlayback(Be, Le) {
|
|
|
161413
161414
|
}
|
|
161414
161415
|
class DeepsightVideoRecorder extends InterruptionHandler {
|
|
161415
161416
|
constructor() {
|
|
161416
|
-
super(...arguments), this.mediaRecorder = null, this.isRecording = !1, this.error = null, this.mimeType = "", this.stream = null, this.recordingId = null, this.qsvWarning = null, this.
|
|
161417
|
+
super(...arguments), this.mediaRecorder = null, this.isRecording = !1, this.error = null, this.mimeType = "", this.stream = null, this.recordingId = null, this.qsvWarning = null, this.trimmedVideoBlob = null;
|
|
161417
161418
|
}
|
|
161418
161419
|
setStreamAndStartRecording(Le) {
|
|
161419
161420
|
this.stream = Le, this.startRecording();
|
|
@@ -161432,23 +161433,30 @@ class DeepsightVideoRecorder extends InterruptionHandler {
|
|
|
161432
161433
|
}
|
|
161433
161434
|
}
|
|
161434
161435
|
async stopRecording(Le, Ye = !1) {
|
|
161435
|
-
return new Promise((tt) => {
|
|
161436
|
+
return new Promise((tt, nt) => {
|
|
161436
161437
|
if (this.removeEventListeners(), this.mediaRecorder && this.isRecording) {
|
|
161437
|
-
const
|
|
161438
|
-
this.mediaRecorder.ondataavailable = (
|
|
161439
|
-
|
|
161438
|
+
const ft = [];
|
|
161439
|
+
this.mediaRecorder.ondataavailable = (ht) => {
|
|
161440
|
+
ht.data.size > 0 && ft.push(ht.data);
|
|
161440
161441
|
}, this.mediaRecorder.onstop = async () => {
|
|
161441
|
-
var
|
|
161442
|
-
|
|
161443
|
-
|
|
161444
|
-
|
|
161445
|
-
|
|
161446
|
-
|
|
161447
|
-
this.mimeType
|
|
161442
|
+
var ht, gt;
|
|
161443
|
+
try {
|
|
161444
|
+
const At = new Blob(ft, { type: this.mimeType }), xt = await trimLastNSeconds(At, 10);
|
|
161445
|
+
this.trimmedVideoBlob = xt;
|
|
161446
|
+
const Et = MlWasmJSApi$1.encryptImage(
|
|
161447
|
+
await toBase64(xt)
|
|
161448
161448
|
);
|
|
161449
|
-
|
|
161449
|
+
if (MlWasmJSApi$1.ckvcks(await xt.arrayBuffer()), !Ye) {
|
|
161450
|
+
const { recordingId: yt } = await uploadDeepsightVideo(
|
|
161451
|
+
Et,
|
|
161452
|
+
Le
|
|
161453
|
+
);
|
|
161454
|
+
this.recordingId = yt ?? null;
|
|
161455
|
+
}
|
|
161456
|
+
this.isRecording = !1, (gt = (ht = this.mediaRecorder) == null ? void 0 : ht.stream) == null || gt.getTracks().forEach((yt) => yt.stop()), tt();
|
|
161457
|
+
} catch (At) {
|
|
161458
|
+
this.isRecording = !1, this.error = `Recording stop failed: ${At}`, nt(At);
|
|
161450
161459
|
}
|
|
161451
|
-
this.isRecording = !1, (At = (gt = this.mediaRecorder) == null ? void 0 : gt.stream) == null || At.getTracks().forEach((xt) => xt.stop()), tt();
|
|
161452
161460
|
}, this.mediaRecorder.stop(), this.isRecording = !1;
|
|
161453
161461
|
} else
|
|
161454
161462
|
tt();
|
|
@@ -172636,12 +172644,12 @@ const faceFlowManager = new FaceFlowManager(), useFaceFlowState = () => {
|
|
|
172636
172644
|
faceCoordinates: Le,
|
|
172637
172645
|
sessionToken: Ye
|
|
172638
172646
|
}) => {
|
|
172639
|
-
var tt
|
|
172647
|
+
var tt;
|
|
172640
172648
|
try {
|
|
172641
172649
|
faceFlowManager.incrementAttempts(), handleScreenEvent(Ye, eventScreenNames.authFaceUpload, !0);
|
|
172642
|
-
const
|
|
172643
|
-
await onboardingAnalytics.analyzeFrame(
|
|
172644
|
-
const
|
|
172650
|
+
const nt = Be.getBase64Image();
|
|
172651
|
+
await onboardingAnalytics.analyzeFrame(nt), onboardingAnalytics.update(), MlWasmJSApi$1.reset();
|
|
172652
|
+
const ft = await MlWasmJSApi$1.encryptImage(nt);
|
|
172645
172653
|
addEvent({
|
|
172646
172654
|
token: Ye,
|
|
172647
172655
|
code: "captureAttemptFinished",
|
|
@@ -172656,17 +172664,17 @@ const faceFlowManager = new FaceFlowManager(), useFaceFlowState = () => {
|
|
|
172656
172664
|
selfieStatsAnalysisStatus: onboardingAnalytics.getAnalysisStatus(),
|
|
172657
172665
|
motionStatus: onboardingAnalytics.getMotionStatus()
|
|
172658
172666
|
});
|
|
172659
|
-
const
|
|
172667
|
+
const ht = await capture({
|
|
172660
172668
|
type: "authFace",
|
|
172661
172669
|
token: Ye,
|
|
172662
|
-
image:
|
|
172670
|
+
image: ft,
|
|
172663
172671
|
authHint: faceFlowManager.authHint,
|
|
172664
172672
|
encrypted: !0,
|
|
172665
172673
|
faceCoordinates: Le,
|
|
172666
172674
|
recordingId: (tt = faceFlowManager.deepsightVideoRecorder) == null ? void 0 : tt.recordingId,
|
|
172667
172675
|
metadata: onboardingAnalytics.getMetadata()
|
|
172668
172676
|
});
|
|
172669
|
-
if (
|
|
172677
|
+
if (ht.overallStatus !== "PASS") {
|
|
172670
172678
|
if (faceFlowManager.getAttemptsLeft() === 0) {
|
|
172671
172679
|
handleScreenEvent(Ye, eventScreenNames.authFaceUpload, !1), handleScreenEvent(Ye, eventScreenNames.authFaceError, !0), faceFlowManager.completeWithError({
|
|
172672
172680
|
code: "NO_MORE_ATTEMPTS",
|
|
@@ -172675,19 +172683,19 @@ const faceFlowManager = new FaceFlowManager(), useFaceFlowState = () => {
|
|
|
172675
172683
|
});
|
|
172676
172684
|
return;
|
|
172677
172685
|
}
|
|
172678
|
-
throw
|
|
172686
|
+
throw ht.error.name === "SPOOF_ATTEMPT_DETECTED" && handleError(Ye, FACE_ERROR_CODES.SPOOF_ATTEMPT_DETECTED), ht.error.name === "FACE_TOO_DARK" && handleError(Ye, FACE_ERROR_CODES.FACE_TOO_DARK), ht.error.name === "LENSES_DETECTED" && handleError(Ye, FACE_ERROR_CODES.LENSES_DETECTED), ht.error.name === "FACE_MASK_DETECTED" && handleError(Ye, FACE_ERROR_CODES.FACE_MASK_DETECTED), ht.error.name === "CLOSED_EYES_DETECTED" && handleError(Ye, FACE_ERROR_CODES.CLOSED_EYES_DETECTED), ht.error.name === "HEAD_COVER_DETECTED" && handleError(Ye, FACE_ERROR_CODES.HEAD_COVER_DETECTED), ht.error.name === "FACE_NOT_FOUND" && handleError(Ye, FACE_ERROR_CODES.FACE_NOT_FOUND), ht.error.name === "FACE_CROPPING_FAILED" && handleError(Ye, FACE_ERROR_CODES.FACE_CROPPING_FAILED), ht.error.name === "FACE_TOO_SMALL" && handleError(Ye, FACE_ERROR_CODES.FACE_TOO_SMALL), ht.error.name === "FACE_TOO_BLURRY" && handleError(Ye, FACE_ERROR_CODES.FACE_TOO_BLURRY), ht.error.name === "BAD_PHOTO_QUALITY" && handleError(Ye, FACE_ERROR_CODES.BAD_PHOTO_QUALITY), ht.error.name === "PROCESSING_ERROR" && handleError(Ye, FACE_ERROR_CODES.PROCESSING_ERROR), ht.error.name === "BAD_REQUEST" && handleError(Ye, FACE_ERROR_CODES.BAD_REQUEST), ht.error.name === "SERVER" && handleError(Ye, FACE_ERROR_CODES.SERVER), ht.error.name === "USER_IS_NOT_RECOGNIZED" && handleError(Ye, FACE_ERROR_CODES.USER_IS_NOT_RECOGNIZED), ht.error.name === "NONEXISTENT_CUSTOMER" && (handleScreenEvent(Ye, eventScreenNames.authFaceUpload, !1), handleScreenEvent(Ye, eventScreenNames.authFaceError, !0), faceFlowManager.completeWithError({
|
|
172679
172687
|
code: "NONEXISTENT_CUSTOMER",
|
|
172680
172688
|
message: "Non-existent customer",
|
|
172681
172689
|
error: new Error("Non-existent customer")
|
|
172682
|
-
})),
|
|
172690
|
+
})), ht.error.name === "HINT_NOT_PROVIDED" && (handleScreenEvent(Ye, eventScreenNames.authFaceUpload, !1), handleScreenEvent(Ye, eventScreenNames.authFaceError, !0), faceFlowManager.completeWithError({
|
|
172683
172691
|
code: "HINT_NOT_PROVIDED",
|
|
172684
172692
|
message: "Hint not provided",
|
|
172685
172693
|
error: new Error("Hint not provided")
|
|
172686
|
-
})), new Error(
|
|
172694
|
+
})), new Error(ht.error.name);
|
|
172687
172695
|
}
|
|
172688
|
-
return
|
|
172689
|
-
} catch (
|
|
172690
|
-
throw
|
|
172696
|
+
return ht.imageBase64 = nt, ht.selfieEncryptedBase64 = ft, ht.metadata = onboardingAnalytics.getMetadata(), ht;
|
|
172697
|
+
} catch (nt) {
|
|
172698
|
+
throw nt instanceof Error && isKnownError(nt) ? nt : new Error(FACE_ERROR_CODES.SERVER);
|
|
172691
172699
|
}
|
|
172692
172700
|
};
|
|
172693
172701
|
class DetectionManager {
|