@incodetech/core 2.0.0-alpha.10 → 2.0.0-alpha.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/dist/{OpenViduLogger-CQyDxBvM.esm.js → OpenViduLogger-CRbRNZA7.esm.js} +1 -1
  2. package/dist/OpenViduLogger-Dy5P806a.esm.js +3 -0
  3. package/dist/{warmup-CEJTfxQr.d.ts → StateMachine-pi8byl8C.d.ts} +4 -1
  4. package/dist/{addEvent-W0ORK0jT.esm.js → addEvent-BGKc_lHF.esm.js} +1 -1
  5. package/dist/deepsightLoader-B36_XZ7r.esm.js +25 -0
  6. package/dist/deepsightService-BWxcc4OC.esm.js +225 -0
  7. package/dist/email.d.ts +1 -1
  8. package/dist/email.esm.js +3 -3
  9. package/dist/{endpoints-BSTFaHYo.esm.js → endpoints-D9TGnxRK.esm.js} +966 -22
  10. package/dist/flow.d.ts +4 -303
  11. package/dist/flow.esm.js +4 -5
  12. package/dist/id-CJKLe8HS.esm.js +1818 -0
  13. package/dist/id.d.ts +6 -0
  14. package/dist/id.esm.js +8 -0
  15. package/dist/index-CbF_uI-x.d.ts +618 -0
  16. package/dist/index.d.ts +8 -3
  17. package/dist/index.esm.js +7 -3
  18. package/dist/{lib-Bu9XGMBW.esm.js → lib-BJoLTN_W.esm.js} +2 -2
  19. package/dist/phone.d.ts +1 -1
  20. package/dist/phone.esm.js +3 -3
  21. package/dist/recordingsRepository-D5MURoVB.esm.js +40 -0
  22. package/dist/selfie.d.ts +77 -317
  23. package/dist/selfie.esm.js +165 -62
  24. package/dist/{permissionServices-I6vX6DBy.esm.js → streamingEvents-B3hNanPl.esm.js} +34 -9
  25. package/dist/types-BpCrZLU6.d.ts +302 -0
  26. package/dist/types-DZbrbPgj.d.ts +335 -0
  27. package/package.json +6 -2
  28. package/dist/OpenViduLogger-BdPfiZO6.esm.js +0 -3
  29. package/dist/StateMachine-DRE1oH2B.d.ts +0 -2
  30. package/dist/types-iZi2rawo.d.ts +0 -5
  31. /package/dist/{Manager-BGfxEmyv.d.ts → Manager-BZUZTRPx.d.ts} +0 -0
  32. /package/dist/{chunk-C_Yo44FK.esm.js → chunk-FbsBJI8u.esm.js} +0 -0
  33. /package/dist/{xstate.esm-B_rda9yU.esm.js → xstate.esm-2hDiAXvZ.esm.js} +0 -0
@@ -1,5 +1,82 @@
1
- import { a as __toDynamicImportESM } from "./chunk-C_Yo44FK.esm.js";
1
+ import { a as __toDynamicImportESM } from "./chunk-FbsBJI8u.esm.js";
2
2
 
3
+ //#region ../infra/src/capabilities/platform.ts
4
+ /**
5
+ * Platform detection utilities for camera operations.
6
+ * These are pure functions with no external dependencies and can be used by all layers.
7
+ */
8
+ let uxType = null;
9
+ function isTouchDevice() {
10
+ return (typeof navigator !== "undefined" ? navigator.maxTouchPoints > 0 : false) || "ontouchstart" in (typeof document !== "undefined" ? document.documentElement : {});
11
+ }
12
+ function isTabletWithoutAndroid(userAgent) {
13
+ const isLinux = /Linux/i.test(userAgent);
14
+ const isChrome = /Chrome/i.test(userAgent) && !/Edge/i.test(userAgent);
15
+ return !/Mobile|Android/i.test(userAgent) && isLinux && isChrome && isTouchDevice();
16
+ }
17
+ function isIOS(userAgent) {
18
+ const ua = userAgent || (typeof navigator !== "undefined" ? navigator.userAgent : "");
19
+ const hasTouchPoints = typeof navigator !== "undefined" ? navigator.maxTouchPoints > 0 : false;
20
+ return /iPad|iPhone|iPod/.test(ua) || /Mac OS/.test(ua) && hasTouchPoints || /iPadOS/.test(ua);
21
+ }
22
+ function isAndroid(userAgent) {
23
+ const ua = userAgent || (typeof navigator !== "undefined" ? navigator.userAgent : "");
24
+ return /Android/i.test(ua);
25
+ }
26
+ function isDesktop(userAgent) {
27
+ if (uxType) return uxType === "desktop";
28
+ const ua = userAgent || (typeof navigator !== "undefined" ? navigator.userAgent : "");
29
+ return !(/Mobile|Android/i.test(ua) || isTabletWithoutAndroid(ua) || isIOS(ua));
30
+ }
31
+ function isSafari(userAgent) {
32
+ const ua = userAgent || (typeof navigator !== "undefined" ? navigator.userAgent : "");
33
+ return /Safari/i.test(ua) && !/Chrome|Chromium|Edge/i.test(ua);
34
+ }
35
+ const IPHONE_14_PLUS_DIMENSIONS = [
36
+ {
37
+ outerHeight: 852,
38
+ outerWidth: 393
39
+ },
40
+ {
41
+ outerHeight: 932,
42
+ outerWidth: 430
43
+ },
44
+ {
45
+ innerHeight: 631,
46
+ innerWidth: 375
47
+ },
48
+ {
49
+ innerHeight: 920,
50
+ innerWidth: 402
51
+ },
52
+ {
53
+ outerHeight: 874,
54
+ outerWidth: 402
55
+ },
56
+ {
57
+ innerHeight: 874,
58
+ innerWidth: 402
59
+ },
60
+ {
61
+ outerHeight: 912,
62
+ outerWidth: 420
63
+ },
64
+ {
65
+ outerHeight: 873,
66
+ outerWidth: 402
67
+ },
68
+ {
69
+ outerHeight: 956,
70
+ outerWidth: 440
71
+ }
72
+ ];
73
+ function isIPhone14OrHigher() {
74
+ if (typeof window === "undefined") return false;
75
+ const { outerHeight, outerWidth, innerHeight, innerWidth } = window;
76
+ return IPHONE_14_PLUS_DIMENSIONS.some((dims) => dims.outerHeight === outerHeight && dims.outerWidth === outerWidth || dims.innerHeight === innerHeight && dims.innerWidth === innerWidth);
77
+ }
78
+
79
+ //#endregion
3
80
  //#region ../infra/src/http/createApi.ts
4
81
  var FetchHttpError = class extends Error {
5
82
  constructor(status, statusText, url, method, headers, data) {
@@ -48,6 +125,56 @@ const parseResponse = async (response, parseType) => {
48
125
  return await response.text();
49
126
  };
50
127
  const DEFAULT_TIMEOUT = 3e4;
128
+ const requestWithXHR = (fullUrl, method, mergedHeaders, requestBody, timeout, signal, onUploadProgress) => {
129
+ return new Promise((resolve, reject) => {
130
+ const xhr = new XMLHttpRequest();
131
+ xhr.open(method, fullUrl, true);
132
+ Object.entries(mergedHeaders).forEach(([key, value]) => {
133
+ xhr.setRequestHeader(key, value);
134
+ });
135
+ xhr.timeout = timeout;
136
+ xhr.upload.onprogress = (event) => {
137
+ if (event.lengthComputable) {
138
+ const percentComplete = event.loaded / event.total * 100;
139
+ onUploadProgress(Math.round(percentComplete));
140
+ }
141
+ };
142
+ xhr.onload = () => {
143
+ const responseHeaders = {};
144
+ xhr.getAllResponseHeaders().split("\r\n").forEach((line) => {
145
+ const parts = line.split(": ");
146
+ if (parts.length === 2) responseHeaders[parts[0].toLowerCase()] = parts[1];
147
+ });
148
+ let data;
149
+ try {
150
+ if ((xhr.getResponseHeader("content-type") ?? "").includes("application/json")) data = JSON.parse(xhr.responseText);
151
+ else data = xhr.responseText;
152
+ } catch {
153
+ data = xhr.responseText;
154
+ }
155
+ if (xhr.status >= 200 && xhr.status < 300) resolve({
156
+ ok: true,
157
+ status: xhr.status,
158
+ statusText: xhr.statusText,
159
+ url: fullUrl,
160
+ headers: responseHeaders,
161
+ data
162
+ });
163
+ else reject(new FetchHttpError(xhr.status, xhr.statusText, fullUrl, method, responseHeaders, data));
164
+ };
165
+ xhr.onerror = () => {
166
+ reject(new FetchHttpError(0, "Network Error", fullUrl, method, {}, null));
167
+ };
168
+ xhr.ontimeout = () => {
169
+ reject(new FetchHttpError(0, "Request timeout", fullUrl, method, {}, null));
170
+ };
171
+ if (signal) signal.addEventListener("abort", () => {
172
+ xhr.abort();
173
+ reject(new FetchHttpError(0, "Request aborted", fullUrl, method, {}, null));
174
+ });
175
+ xhr.send(requestBody);
176
+ });
177
+ };
51
178
  const createApi = (config) => {
52
179
  const headers = {
53
180
  "Content-Type": "application/json",
@@ -62,22 +189,26 @@ const createApi = (config) => {
62
189
  const client$1 = {
63
190
  defaults,
64
191
  async request(requestConfig) {
65
- const { method = "GET", url, headers: headers$1 = {}, query, params, body, signal, timeout = config.timeout ?? DEFAULT_TIMEOUT, parse } = requestConfig;
192
+ const { method = "GET", url, headers: headers$1 = {}, query, params, body, signal, timeout = config.timeout ?? DEFAULT_TIMEOUT, parse, onUploadProgress } = requestConfig;
66
193
  const fullUrl = buildUrl(defaults.baseURL, url, params ?? query);
67
194
  const mergedHeaders = {
68
195
  ...defaults.headers,
69
196
  ...headers$1
70
197
  };
198
+ const requestBody = prepareBody(body);
199
+ let finalHeaders = mergedHeaders;
200
+ if (requestBody === null && (method === "POST" || method === "PUT" || method === "PATCH")) {
201
+ const { "Content-Type": _, ...headersWithoutContentType } = mergedHeaders;
202
+ finalHeaders = headersWithoutContentType;
203
+ }
204
+ if (onUploadProgress) {
205
+ if (requestBody !== null && typeof ReadableStream !== "undefined" && requestBody instanceof ReadableStream) throw new Error("Upload progress tracking is not supported for ReadableStream bodies");
206
+ return requestWithXHR(fullUrl, method, finalHeaders, requestBody, timeout, signal, onUploadProgress);
207
+ }
71
208
  const controller = new AbortController();
72
209
  const abortSignal = signal ?? controller.signal;
73
210
  const timeoutId = setTimeout(() => controller.abort(), timeout);
74
211
  try {
75
- const requestBody = prepareBody(body);
76
- let finalHeaders = mergedHeaders;
77
- if (requestBody === null && (method === "POST" || method === "PUT" || method === "PATCH")) {
78
- const { "Content-Type": _, ...headersWithoutContentType } = mergedHeaders;
79
- finalHeaders = headersWithoutContentType;
80
- }
81
212
  const response = await fetch(fullUrl, {
82
213
  method,
83
214
  headers: finalHeaders,
@@ -229,6 +360,10 @@ function createManager(options) {
229
360
 
230
361
  //#endregion
231
362
  //#region ../infra/src/media/camera.ts
363
+ /**
364
+ * Request camera access with specific constraints.
365
+ * Throws if the request fails - no automatic fallback.
366
+ */
232
367
  async function requestCameraAccess(constraints) {
233
368
  if (typeof navigator === "undefined" || !navigator.mediaDevices) throw new Error("MediaDevices API not available");
234
369
  return navigator.mediaDevices.getUserMedia(constraints);
@@ -236,6 +371,13 @@ async function requestCameraAccess(constraints) {
236
371
  function stopCameraStream(stream) {
237
372
  for (const track of stream.getTracks()) track.stop();
238
373
  }
374
+ async function enumerateVideoDevices() {
375
+ if (typeof navigator === "undefined" || !navigator.mediaDevices) return [];
376
+ return (await navigator.mediaDevices.enumerateDevices()).filter((d) => d.kind === "videoinput");
377
+ }
378
+ async function applyTrackConstraints(track, constraints) {
379
+ await track.applyConstraints(constraints);
380
+ }
239
381
 
240
382
  //#endregion
241
383
  //#region ../infra/src/media/canvas.ts
@@ -472,14 +614,20 @@ var StreamCanvasCapture = class {
472
614
  this.video.playsInline = true;
473
615
  this.video.muted = true;
474
616
  const settings = stream.getVideoTracks()[0]?.getSettings();
475
- const width = options?.width ?? settings?.width ?? 640;
476
- const height = options?.height ?? settings?.height ?? 480;
617
+ const initialWidth = options?.width ?? settings?.width ?? 1280;
618
+ const initialHeight = options?.height ?? settings?.height ?? 720;
477
619
  this.canvas = document.createElement("canvas");
478
- this.canvas.width = width;
479
- this.canvas.height = height;
620
+ this.canvas.width = initialWidth;
621
+ this.canvas.height = initialHeight;
480
622
  this.ctx = this.canvas.getContext("2d", { willReadFrequently: true });
481
623
  const fps = options?.fps ?? 10;
482
624
  const intervalMs = fps > 0 ? Math.max(16, Math.floor(1e3 / fps)) : 0;
625
+ this.video.addEventListener("loadedmetadata", () => {
626
+ if (this.video.videoWidth > 0 && this.video.videoHeight > 0) {
627
+ this.canvas.width = this.video.videoWidth;
628
+ this.canvas.height = this.video.videoHeight;
629
+ }
630
+ });
483
631
  try {
484
632
  this.video.play();
485
633
  } catch {}
@@ -552,8 +700,15 @@ var StreamCanvasCapture = class {
552
700
  tick() {
553
701
  if (!this.ctx) return;
554
702
  if (this.video.readyState < HTMLMediaElement.HAVE_CURRENT_DATA) return;
703
+ const videoWidth = this.video.videoWidth;
704
+ const videoHeight = this.video.videoHeight;
705
+ if (videoWidth === 0 || videoHeight === 0) return;
706
+ if (this.canvas.width !== videoWidth || this.canvas.height !== videoHeight) {
707
+ this.canvas.width = videoWidth;
708
+ this.canvas.height = videoHeight;
709
+ }
555
710
  try {
556
- this.ctx.drawImage(this.video, 0, 0, this.canvas.width, this.canvas.height);
711
+ this.ctx.drawImage(this.video, 0, 0);
557
712
  this.hasFrame = true;
558
713
  } catch {
559
714
  this.hasFrame = false;
@@ -603,17 +758,528 @@ var StreamCanvasProcessingSession = class {
603
758
  }
604
759
  };
605
760
 
761
+ //#endregion
762
+ //#region ../infra/src/media/VideoTrimmer.ts
763
+ async function getDurationBySeek(videoBlob) {
764
+ const video = document.createElement("video");
765
+ video.preload = "metadata";
766
+ video.src = URL.createObjectURL(videoBlob);
767
+ try {
768
+ if (!Number.isFinite(video.duration)) {
769
+ video.currentTime = Number.MAX_SAFE_INTEGER;
770
+ await new Promise((resolve) => {
771
+ const onDurationChange = () => {
772
+ if (Number.isFinite(video.duration)) {
773
+ video.removeEventListener("durationchange", onDurationChange);
774
+ video.removeEventListener("timeupdate", onTimeUpdate);
775
+ resolve(video.duration);
776
+ }
777
+ };
778
+ const onTimeUpdate = () => {
779
+ if (Number.isFinite(video.duration)) {
780
+ video.removeEventListener("timeupdate", onTimeUpdate);
781
+ video.removeEventListener("durationchange", onDurationChange);
782
+ resolve(video.duration);
783
+ }
784
+ };
785
+ video.addEventListener("durationchange", onDurationChange);
786
+ video.addEventListener("timeupdate", onTimeUpdate);
787
+ });
788
+ }
789
+ const duration = video.duration;
790
+ return Number.isFinite(duration) ? duration : null;
791
+ } finally {
792
+ URL.revokeObjectURL(video.src);
793
+ video.src = "";
794
+ }
795
+ }
796
+ async function trimLastNSecondsUsingPlayback(videoBlob, seconds) {
797
+ console.log("[VideoTrimmer] Starting playback-based trimming...");
798
+ const video = document.createElement("video");
799
+ video.preload = "metadata";
800
+ video.playsInline = true;
801
+ video.muted = true;
802
+ const videoURL = URL.createObjectURL(videoBlob);
803
+ video.src = videoURL;
804
+ const duration = await getDurationBySeek(videoBlob);
805
+ console.log("[VideoTrimmer] Video duration:", duration);
806
+ if (!duration || duration < seconds) {
807
+ console.log("[VideoTrimmer] Video too short, returning original");
808
+ URL.revokeObjectURL(videoURL);
809
+ return videoBlob;
810
+ }
811
+ const startTime = Math.max(0, Math.floor(duration) - seconds);
812
+ console.log("[VideoTrimmer] Trimming from", startTime, "to", duration);
813
+ await new Promise((resolve) => {
814
+ if (video.readyState >= 2) resolve();
815
+ else video.addEventListener("loadedmetadata", () => resolve(), { once: true });
816
+ });
817
+ const canvas = document.createElement("canvas");
818
+ canvas.width = 230;
819
+ canvas.height = 320;
820
+ const captureFps = isAndroid() ? 15 : 24;
821
+ const stream = canvas.captureStream(captureFps);
822
+ const ctx = canvas.getContext("2d");
823
+ video.currentTime = startTime;
824
+ await new Promise((resolve) => {
825
+ video.addEventListener("seeked", () => resolve(), { once: true });
826
+ });
827
+ ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
828
+ const mimeType = videoBlob.type || (MediaRecorder.isTypeSupported("video/webm") ? "video/webm" : "video/mp4");
829
+ const mediaRecorder = new MediaRecorder(stream.clone(), {
830
+ mimeType,
831
+ videoBitsPerSecond: 5e5,
832
+ bitsPerSecond: 5e5
833
+ });
834
+ const chunks = [];
835
+ mediaRecorder.ondataavailable = (event) => {
836
+ if (event.data.size > 0) chunks.push(event.data);
837
+ };
838
+ const recordingPromise = new Promise((resolve) => {
839
+ mediaRecorder.onstop = () => {
840
+ console.log("[VideoTrimmer] Recording stopped, chunks:", chunks.length);
841
+ const trimmedBlob = new Blob(chunks, { type: mimeType });
842
+ URL.revokeObjectURL(videoURL);
843
+ mediaRecorder.stream?.getTracks().forEach((track) => track.stop());
844
+ stream.getTracks().forEach((track) => track.stop());
845
+ video.src = "";
846
+ console.log("[VideoTrimmer] Playback-based trimming complete, size:", trimmedBlob.size);
847
+ resolve(trimmedBlob);
848
+ };
849
+ });
850
+ video.addEventListener("play", () => {
851
+ console.log("[VideoTrimmer] Video playing, starting recording...");
852
+ function drawVideo() {
853
+ if (video.currentTime >= duration) {
854
+ console.log("[VideoTrimmer] Reached end, stopping...");
855
+ mediaRecorder.stop();
856
+ video.pause();
857
+ return;
858
+ }
859
+ ctx.drawImage(video, 0, 0, canvas.width, canvas.height);
860
+ requestAnimationFrame(drawVideo);
861
+ }
862
+ drawVideo();
863
+ setTimeout(() => {
864
+ console.log("[VideoTrimmer] Starting MediaRecorder...");
865
+ mediaRecorder.start(100);
866
+ }, 500);
867
+ });
868
+ console.log("[VideoTrimmer] Starting video playback...");
869
+ video.play().catch((err) => {
870
+ console.error("[VideoTrimmer] Play failed:", err);
871
+ URL.revokeObjectURL(videoURL);
872
+ });
873
+ return await recordingPromise;
874
+ }
875
+ async function trimLastNSeconds(videoBlob, seconds) {
876
+ console.log(`[VideoTrimmer] trimLastNSeconds called (trim to ${seconds}s from ${videoBlob.size} bytes)`);
877
+ console.log("[VideoTrimmer] Using playback-based trimming");
878
+ return await trimLastNSecondsUsingPlayback(videoBlob, seconds);
879
+ }
880
+
881
+ //#endregion
882
+ //#region ../infra/src/providers/browser/DeepsightRecordingProvider.ts
883
+ function toBase64(videoBlob) {
884
+ return new Promise((resolve, reject) => {
885
+ const reader = new FileReader();
886
+ reader.onloadend = () => {
887
+ const base64 = reader.result.split(",")[1];
888
+ resolve(base64);
889
+ };
890
+ reader.onerror = () => reject(reader.error ?? /* @__PURE__ */ new Error("FileReader error"));
891
+ reader.readAsDataURL(videoBlob);
892
+ });
893
+ }
894
+ function getSupportedMediaRecorderMimeType() {
895
+ const possibleTypes = isIOS() ? [
896
+ "video/mp4",
897
+ "video/webm",
898
+ "video/webm;codecs=vp9",
899
+ "video/webm;codecs=vp8"
900
+ ] : [
901
+ "video/webm",
902
+ "video/webm;codecs=vp9",
903
+ "video/webm;codecs=vp8",
904
+ "video/mp4"
905
+ ];
906
+ for (const type of possibleTypes) if (MediaRecorder.isTypeSupported(type)) return type;
907
+ return "";
908
+ }
909
+ function getAdaptiveMediaRecorderOptions() {
910
+ const mimeType = getSupportedMediaRecorderMimeType();
911
+ if (isIOS()) return {
912
+ mimeType,
913
+ videoBitsPerSecond: 1e6,
914
+ bitsPerSecond: 1e6
915
+ };
916
+ return {
917
+ mimeType,
918
+ videoBitsPerSecond: 5e5,
919
+ bitsPerSecond: 5e5
920
+ };
921
+ }
922
+ var DeepsightRecordingProvider = class {
923
+ constructor() {
924
+ this.mediaRecorder = null;
925
+ this._isRecording = false;
926
+ this._hasError = false;
927
+ this._error = null;
928
+ this.mimeType = "";
929
+ this.stream = null;
930
+ this.pauseRecordingBound = this.pauseRecording.bind(this);
931
+ }
932
+ get isRecording() {
933
+ return this._isRecording;
934
+ }
935
+ get hasError() {
936
+ return this._hasError;
937
+ }
938
+ get error() {
939
+ return this._error;
940
+ }
941
+ startRecording(stream) {
942
+ this.reset();
943
+ this.registerEventListeners();
944
+ this.stream = stream;
945
+ try {
946
+ const options = getAdaptiveMediaRecorderOptions();
947
+ this.mimeType = options.mimeType;
948
+ const recorder = new MediaRecorder(stream.clone(), options);
949
+ recorder.onerror = (event) => {
950
+ this._error = `Recording error: ${event}`;
951
+ this._isRecording = false;
952
+ this._hasError = true;
953
+ };
954
+ recorder.start();
955
+ this.mediaRecorder = recorder;
956
+ this._isRecording = true;
957
+ this._error = null;
958
+ this._hasError = false;
959
+ } catch (err) {
960
+ console.error(`Failed to start recording: ${err}`);
961
+ this._error = `Failed to start recording: ${err}`;
962
+ this._hasError = true;
963
+ }
964
+ }
965
+ async stopRecording(trimSeconds, encrypt, generateChecksum) {
966
+ const recorder = this.mediaRecorder;
967
+ return new Promise((resolve, reject) => {
968
+ this.removeEventListeners();
969
+ if (recorder && this._isRecording) {
970
+ const chunks = [];
971
+ recorder.ondataavailable = (event) => {
972
+ if (event.data.size > 0) chunks.push(event.data);
973
+ };
974
+ recorder.onstop = async () => {
975
+ try {
976
+ const trimmedVideo = await trimLastNSeconds(new Blob(chunks, { type: this.mimeType }), trimSeconds);
977
+ const encryptedVideoBase64 = encrypt(await toBase64(trimmedVideo));
978
+ generateChecksum(await trimmedVideo.arrayBuffer());
979
+ this._isRecording = false;
980
+ resolve({
981
+ trimmedBlob: trimmedVideo,
982
+ encryptedVideo: encryptedVideoBase64
983
+ });
984
+ } catch (error) {
985
+ this._isRecording = false;
986
+ this._error = `Recording stop failed: ${error}`;
987
+ this._hasError = true;
988
+ reject(error);
989
+ }
990
+ };
991
+ recorder.stop();
992
+ this._isRecording = false;
993
+ } else resolve({
994
+ trimmedBlob: new Blob([], { type: this.mimeType }),
995
+ encryptedVideo: ""
996
+ });
997
+ recorder?.stream?.getTracks().forEach((track) => track.stop());
998
+ });
999
+ }
1000
+ reset() {
1001
+ this._isRecording = false;
1002
+ this._error = null;
1003
+ this._hasError = false;
1004
+ }
1005
+ pauseRecording() {
1006
+ if (!this._isRecording) return;
1007
+ if (this.mediaRecorder?.state === "recording") try {
1008
+ this.mediaRecorder.pause();
1009
+ } catch {}
1010
+ }
1011
+ registerEventListeners() {
1012
+ document.addEventListener("visibilitychange", this.pauseRecordingBound);
1013
+ }
1014
+ removeEventListeners() {
1015
+ document.removeEventListener("visibilitychange", this.pauseRecordingBound);
1016
+ }
1017
+ };
1018
+
1019
+ //#endregion
1020
+ //#region ../infra/src/providers/browser/MotionSensorProvider.ts
1021
+ var MotionSensorProvider = class {
1022
+ constructor(thresholds) {
1023
+ this._isRunning = false;
1024
+ this._hasPermission = true;
1025
+ this.acl = null;
1026
+ this.gyro = null;
1027
+ this.minNumberOfFrames = 3;
1028
+ this.maxFrequency = 60;
1029
+ this.emptyParams = {
1030
+ meanX: 0,
1031
+ meanY: 0,
1032
+ meanZ: 0,
1033
+ m2X: 0,
1034
+ m2Y: 0,
1035
+ m2Z: 0,
1036
+ cumulativeAbsErrorX: 0,
1037
+ cumulativeAbsErrorY: 0,
1038
+ cumulativeAbsErrorZ: 0,
1039
+ ptsNum: 0
1040
+ };
1041
+ this.paramsAcc = { ...this.emptyParams };
1042
+ this.paramsGyro = { ...this.emptyParams };
1043
+ this.paramsAccGrOld = { ...this.emptyParams };
1044
+ this.paramsAccOld = { ...this.emptyParams };
1045
+ this.paramsGyroscopeOld = { ...this.emptyParams };
1046
+ this.paramsOrientationOld = { ...this.emptyParams };
1047
+ this.deviceMotionListener = (event) => {
1048
+ if (event.accelerationIncludingGravity) {
1049
+ const accGrVal = {
1050
+ x: event.accelerationIncludingGravity.x,
1051
+ y: event.accelerationIncludingGravity.y,
1052
+ z: event.accelerationIncludingGravity.z
1053
+ };
1054
+ if (this.checkFields(accGrVal)) {
1055
+ this.paramsAccGrOld.ptsNum += 1;
1056
+ this.updateParams(this.paramsAccGrOld, accGrVal);
1057
+ }
1058
+ }
1059
+ if (event.acceleration) {
1060
+ const accVal = {
1061
+ x: event.acceleration.x,
1062
+ y: event.acceleration.y,
1063
+ z: event.acceleration.z
1064
+ };
1065
+ if (this.checkFields(accVal)) {
1066
+ this.paramsAccOld.ptsNum += 1;
1067
+ this.updateParams(this.paramsAccOld, accVal);
1068
+ }
1069
+ }
1070
+ if (event.rotationRate) {
1071
+ const gyroVal = {
1072
+ x: event.rotationRate.alpha,
1073
+ y: event.rotationRate.beta,
1074
+ z: event.rotationRate.gamma
1075
+ };
1076
+ if (this.checkFields(gyroVal)) {
1077
+ this.paramsGyroscopeOld.ptsNum += 1;
1078
+ this.updateParams(this.paramsGyroscopeOld, gyroVal);
1079
+ }
1080
+ }
1081
+ };
1082
+ this.deviceOrientationListener = (event) => {
1083
+ const orientationVal = {
1084
+ x: event.alpha,
1085
+ y: event.beta,
1086
+ z: event.gamma
1087
+ };
1088
+ if (this.checkFields(orientationVal)) {
1089
+ this.paramsOrientationOld.ptsNum += 1;
1090
+ this.updateParams(this.paramsOrientationOld, orientationVal);
1091
+ }
1092
+ };
1093
+ this.thresholds = {
1094
+ accThreshold: thresholds?.accThreshold ?? .3,
1095
+ gyroThreshold: thresholds?.gyroThreshold ?? .3,
1096
+ accGrOldThreshold: thresholds?.accGrOldThreshold ?? .3,
1097
+ accOldThreshold: thresholds?.accOldThreshold ?? .3,
1098
+ gyroscopeOldThreshold: thresholds?.gyroscopeOldThreshold ?? 10,
1099
+ orientationOldThreshold: thresholds?.orientationOldThreshold ?? 10,
1100
+ maeAccThreshold: thresholds?.maeAccThreshold ?? .3,
1101
+ maeGyroThreshold: thresholds?.maeGyroThreshold ?? .3,
1102
+ maeAccGrOldThreshold: thresholds?.maeAccGrOldThreshold ?? .3,
1103
+ maeAccOldThreshold: thresholds?.maeAccOldThreshold ?? .3,
1104
+ maeGyroscopeOldThreshold: thresholds?.maeGyroscopeOldThreshold ?? 10,
1105
+ maeOrientationOldThreshold: thresholds?.maeOrientationOldThreshold ?? 10
1106
+ };
1107
+ }
1108
+ get isRunning() {
1109
+ return this._isRunning;
1110
+ }
1111
+ get hasPermission() {
1112
+ return this._hasPermission;
1113
+ }
1114
+ async requestPermission() {
1115
+ if (typeof DeviceMotionEvent !== "undefined" && typeof DeviceMotionEvent.requestPermission === "function") {
1116
+ sessionStorage.setItem("motionSensorsPermissionsRequested", String(true));
1117
+ try {
1118
+ if (await DeviceMotionEvent.requestPermission() !== "granted") {
1119
+ this._hasPermission = false;
1120
+ return "denied";
1121
+ }
1122
+ return "granted";
1123
+ } catch {
1124
+ this._hasPermission = false;
1125
+ return "denied";
1126
+ }
1127
+ }
1128
+ return "not-required";
1129
+ }
1130
+ initializeAccelerometer() {
1131
+ if (!("Accelerometer" in window)) return;
1132
+ try {
1133
+ const AccelerometerConstructor = window.Accelerometer;
1134
+ if (AccelerometerConstructor) {
1135
+ this.acl = new AccelerometerConstructor({ frequency: this.maxFrequency });
1136
+ this.acl.addEventListener("reading", () => this.updateAcc());
1137
+ }
1138
+ } catch {}
1139
+ }
1140
+ initializeGyroscope() {
1141
+ if (!("Gyroscope" in window)) return;
1142
+ try {
1143
+ const GyroscopeConstructor = window.Gyroscope;
1144
+ if (GyroscopeConstructor) {
1145
+ this.gyro = new GyroscopeConstructor({ frequency: this.maxFrequency });
1146
+ this.gyro.addEventListener("reading", () => this.updateGyro());
1147
+ }
1148
+ } catch {}
1149
+ }
1150
+ async start() {
1151
+ if (this._isRunning || !this._hasPermission) return;
1152
+ try {
1153
+ this.initializeAccelerometer();
1154
+ if (this.acl) this.acl.start();
1155
+ } catch {}
1156
+ try {
1157
+ this.initializeGyroscope();
1158
+ if (this.gyro) this.gyro.start();
1159
+ } catch {}
1160
+ window.addEventListener("devicemotion", this.deviceMotionListener);
1161
+ window.addEventListener("deviceorientation", this.deviceOrientationListener);
1162
+ this._isRunning = true;
1163
+ }
1164
+ stop() {
1165
+ this._isRunning = false;
1166
+ if (this.acl) this.acl.stop();
1167
+ if (this.gyro) this.gyro.stop();
1168
+ window.removeEventListener("devicemotion", this.deviceMotionListener);
1169
+ window.removeEventListener("deviceorientation", this.deviceOrientationListener);
1170
+ this.paramsAcc = { ...this.emptyParams };
1171
+ this.paramsGyro = { ...this.emptyParams };
1172
+ this.paramsAccGrOld = { ...this.emptyParams };
1173
+ this.paramsAccOld = { ...this.emptyParams };
1174
+ this.paramsGyroscopeOld = { ...this.emptyParams };
1175
+ this.paramsOrientationOld = { ...this.emptyParams };
1176
+ }
1177
+ updateAcc() {
1178
+ if (!this._isRunning || !this.acl) return;
1179
+ this.paramsAcc.ptsNum += 1;
1180
+ const val = {
1181
+ x: this.acl.x,
1182
+ y: this.acl.y,
1183
+ z: this.acl.z
1184
+ };
1185
+ if (this.checkFields(val)) this.updateParams(this.paramsAcc, val);
1186
+ }
1187
+ updateGyro() {
1188
+ if (!this._isRunning || !this.gyro) return;
1189
+ this.paramsGyro.ptsNum += 1;
1190
+ const val = {
1191
+ x: this.gyro.x,
1192
+ y: this.gyro.y,
1193
+ z: this.gyro.z
1194
+ };
1195
+ if (this.checkFields(val)) this.updateParams(this.paramsGyro, val);
1196
+ }
1197
+ check() {
1198
+ if (!this._hasPermission) return "UNCLEAR";
1199
+ const result = this.checkStability();
1200
+ return result === null ? "UNCLEAR" : result ? "FAIL" : "PASS";
1201
+ }
1202
+ checkStability() {
1203
+ if (!this._isRunning) return null;
1204
+ const filteredStdChecks = [
1205
+ this.gyro ? this.isBelowStdThreshold(this.paramsGyro, this.thresholds.gyroThreshold) : null,
1206
+ this.acl ? this.isBelowStdThreshold(this.paramsAcc, this.thresholds.accThreshold) : null,
1207
+ this.isBelowStdThreshold(this.paramsAccGrOld, this.thresholds.accGrOldThreshold),
1208
+ this.isBelowStdThreshold(this.paramsAccOld, this.thresholds.accOldThreshold),
1209
+ this.isBelowStdThreshold(this.paramsGyroscopeOld, this.thresholds.gyroscopeOldThreshold),
1210
+ this.isBelowStdThreshold(this.paramsOrientationOld, this.thresholds.orientationOldThreshold)
1211
+ ].filter((check) => check !== null);
1212
+ const finalStdCheck = filteredStdChecks.length > 0 ? filteredStdChecks.every((check) => check) : null;
1213
+ const filteredMaeChecks = [
1214
+ this.gyro ? this.isBelowMaeThreshold(this.paramsGyro, this.thresholds.maeGyroThreshold) : null,
1215
+ this.acl ? this.isBelowMaeThreshold(this.paramsAcc, this.thresholds.maeAccThreshold) : null,
1216
+ this.isBelowMaeThreshold(this.paramsAccGrOld, this.thresholds.maeAccGrOldThreshold),
1217
+ this.isBelowMaeThreshold(this.paramsAccOld, this.thresholds.maeAccOldThreshold),
1218
+ this.isBelowMaeThreshold(this.paramsGyroscopeOld, this.thresholds.maeGyroscopeOldThreshold),
1219
+ this.isBelowMaeThreshold(this.paramsOrientationOld, this.thresholds.maeOrientationOldThreshold)
1220
+ ].filter((check) => check !== null);
1221
+ const finalMaeCheck = filteredMaeChecks.length > 0 ? filteredMaeChecks.every((check) => check) : null;
1222
+ return finalStdCheck === null ? finalMaeCheck === null ? null : finalMaeCheck : finalMaeCheck !== null ? finalStdCheck && finalMaeCheck : finalStdCheck;
1223
+ }
1224
+ updateParams(params, val) {
1225
+ const deltaX = val.x - params.meanX;
1226
+ const deltaY = val.y - params.meanY;
1227
+ const deltaZ = val.z - params.meanZ;
1228
+ params.meanX += deltaX / params.ptsNum;
1229
+ params.meanY += deltaY / params.ptsNum;
1230
+ params.meanZ += deltaZ / params.ptsNum;
1231
+ params.m2X += deltaX * (val.x - params.meanX);
1232
+ params.m2Y += deltaY * (val.y - params.meanY);
1233
+ params.m2Z += deltaZ * (val.z - params.meanZ);
1234
+ params.cumulativeAbsErrorX += Math.abs(val.x - params.meanX);
1235
+ params.cumulativeAbsErrorY += Math.abs(val.y - params.meanY);
1236
+ params.cumulativeAbsErrorZ += Math.abs(val.z - params.meanZ);
1237
+ }
1238
+ calculateStd(params) {
1239
+ const variance = {
1240
+ x: params.m2X / params.ptsNum,
1241
+ y: params.m2Y / params.ptsNum,
1242
+ z: params.m2Z / params.ptsNum
1243
+ };
1244
+ return {
1245
+ x: Math.sqrt(variance.x),
1246
+ y: Math.sqrt(variance.y),
1247
+ z: Math.sqrt(variance.z)
1248
+ };
1249
+ }
1250
+ calculateMae(params) {
1251
+ return {
1252
+ x: params.cumulativeAbsErrorX / params.ptsNum,
1253
+ y: params.cumulativeAbsErrorY / params.ptsNum,
1254
+ z: params.cumulativeAbsErrorZ / params.ptsNum
1255
+ };
1256
+ }
1257
+ checkFields(point) {
1258
+ return point.x !== null && point.y !== null && point.z !== null;
1259
+ }
1260
+ isBelowStdThreshold(params, threshold) {
1261
+ if (params.ptsNum < this.minNumberOfFrames) return null;
1262
+ const std = this.calculateStd(params);
1263
+ return std.x < threshold && std.y < threshold && std.z < threshold;
1264
+ }
1265
+ isBelowMaeThreshold(params, threshold) {
1266
+ if (params.ptsNum < this.minNumberOfFrames) return null;
1267
+ const mae = this.calculateMae(params);
1268
+ return mae.x < threshold && mae.y < threshold && mae.z < threshold;
1269
+ }
1270
+ };
1271
+
606
1272
  //#endregion
607
1273
  //#region ../infra/src/providers/browser/openviduLazy.ts
608
1274
  let openViduImport;
609
1275
  async function enableProdModeBeforeLoad() {
610
- const { OpenViduLogger } = await import("./OpenViduLogger-BdPfiZO6.esm.js").then(__toDynamicImportESM(1));
1276
+ const { OpenViduLogger } = await import("./OpenViduLogger-Dy5P806a.esm.js").then(__toDynamicImportESM(1));
611
1277
  OpenViduLogger.getInstance().enableProdMode();
612
1278
  }
613
1279
  async function loadOpenVidu() {
614
1280
  if (openViduImport) return openViduImport;
615
1281
  await enableProdModeBeforeLoad();
616
- openViduImport = import("./lib-Bu9XGMBW.esm.js").then(__toDynamicImportESM(1));
1282
+ openViduImport = import("./lib-BJoLTN_W.esm.js").then(__toDynamicImportESM(1));
617
1283
  return openViduImport;
618
1284
  }
619
1285
 
@@ -701,6 +1367,28 @@ var OpenViduRecordingProvider = class {
701
1367
  }
702
1368
  };
703
1369
 
1370
+ //#endregion
1371
+ //#region ../infra/src/providers/browser/VisibilityProvider.ts
1372
+ var VisibilityProvider = class {
1373
+ constructor() {
1374
+ this._wasBackgrounded = false;
1375
+ this.visibilityChangeHandler = this.onVisibilityChange.bind(this);
1376
+ document.addEventListener("visibilitychange", this.visibilityChangeHandler);
1377
+ }
1378
+ get wasBackgrounded() {
1379
+ return this._wasBackgrounded;
1380
+ }
1381
+ reset() {
1382
+ this._wasBackgrounded = false;
1383
+ }
1384
+ cleanup() {
1385
+ document.removeEventListener("visibilitychange", this.visibilityChangeHandler);
1386
+ }
1387
+ onVisibilityChange() {
1388
+ if (document.hidden) this._wasBackgrounded = true;
1389
+ }
1390
+ };
1391
+
704
1392
  //#endregion
705
1393
  //#region ../infra/src/wasm/IdCaptureModelType.ts
706
1394
  let IdCaptureModelType = /* @__PURE__ */ function(IdCaptureModelType$1) {
@@ -993,9 +1681,9 @@ var MlWasmJSApi = class MlWasmJSApi {
993
1681
  this.checkWasmInitialization("Unable to set geometry params, cpp API hasn't been initialized");
994
1682
  this.idCaptureWasmApi.setIdCaptureGeometryParams(this.pipelineTypeToWasmEnum(type), areaDown, areaUp, areaIOSPassportUp, areaIOSPassportDown, widthIOSUp, widthIOSDown, widthDown, widthUp, windowOuterWidth, windowOuterHeight, windowInnerWidth, windowInnerHeight);
995
1683
  }
996
- async setIdCaptureConfigParams(type, isFixedMask, isIPhone14OrHigher, idType, isBlurCheckEnabled, isGlareCheckEnabled, isIdFaceQualityCheckEnabled, isIouCheckEnabled) {
1684
+ async setIdCaptureConfigParams(type, isFixedMask, isIPhone14OrHigher$1, idType, isBlurCheckEnabled, isGlareCheckEnabled, isIdFaceQualityCheckEnabled, isIouCheckEnabled) {
997
1685
  this.checkWasmInitialization("Unable to set config params, cpp API hasn't been initialized");
998
- this.idCaptureWasmApi.setIdCaptureConfigParams(this.pipelineTypeToWasmEnum(type), isFixedMask, isIPhone14OrHigher, idType, isBlurCheckEnabled, isGlareCheckEnabled, isIdFaceQualityCheckEnabled, isIouCheckEnabled);
1686
+ this.idCaptureWasmApi.setIdCaptureConfigParams(this.pipelineTypeToWasmEnum(type), isFixedMask, isIPhone14OrHigher$1, idType, isBlurCheckEnabled, isGlareCheckEnabled, isIdFaceQualityCheckEnabled, isIouCheckEnabled);
999
1687
  }
1000
1688
  setIdCaptureModelType(pipelineType, modelType) {
1001
1689
  this.checkWasmInitialization("Unable to set model type, cpp API hasn't been initialized");
@@ -1129,6 +1817,10 @@ var MlWasmJSApi = class MlWasmJSApi {
1129
1817
  this.checkWasmInitialization("Unable to pc, cpp API hasn't been initialized");
1130
1818
  await this.wasmCallWrapper(this.utilityApi.pc.bind(this.utilityApi), [deviceId]);
1131
1819
  }
1820
+ ckvcks(data) {
1821
+ this.checkWasmInitialization("Unable to ckvcks, cpp API hasn't been initialized");
1822
+ this.utilityApi.ckvcks(data);
1823
+ }
1132
1824
  pipelineTypeToWasmEnum(type) {
1133
1825
  switch (type) {
1134
1826
  case WasmPipelineType.IdBlurGlarePipeline: return this.wasmModule.PipelineType.IdBlurGlarePipeline;
@@ -1423,12 +2115,14 @@ var BaseWasmProvider = class {
1423
2115
  /**
1424
2116
  * Processes a frame through the WASM pipeline
1425
2117
  * @param image - Image data to process
2118
+ * @returns The pipeline result (type depends on pipeline - WASM returns any)
1426
2119
  */
1427
- async processFrame(image) {
2120
+ async processFrameWasm(image) {
1428
2121
  this.ensureInitialized();
1429
2122
  const pipelineType = this.getPipelineType();
1430
2123
  await mlWasmJSApi_default.allocateImageBuffers(image.width, image.height);
1431
- await mlWasmJSApi_default.process(image, pipelineType);
2124
+ await mlWasmJSApi_default.handleDetectionCallAndUpdateState(pipelineType);
2125
+ return await mlWasmJSApi_default.process(image, pipelineType);
1432
2126
  }
1433
2127
  /**
1434
2128
  * Resets the pipeline to its initial state.
@@ -1474,7 +2168,7 @@ var FaceDetectionProvider = class extends BaseWasmProvider {
1474
2168
  }
1475
2169
  async processFrame(image) {
1476
2170
  this.currentFrame = image;
1477
- await super.processFrame(image);
2171
+ await this.processFrameWasm(image);
1478
2172
  }
1479
2173
  async initialize(config) {
1480
2174
  await this.initializeBase(config, "selfie");
@@ -1605,6 +2299,151 @@ var FaceDetectionProvider = class extends BaseWasmProvider {
1605
2299
  }
1606
2300
  };
1607
2301
 
2302
+ //#endregion
2303
+ //#region ../infra/src/providers/wasm/IdCaptureProvider.ts
2304
+ var IdCaptureProvider = class extends BaseWasmProvider {
2305
+ constructor() {
2306
+ super(WasmPipelineType.IdBlurGlarePipeline);
2307
+ this.lastProcessResult = null;
2308
+ this.capturedCanvas = null;
2309
+ this.originalCapturedCanvas = null;
2310
+ }
2311
+ async initialize(config) {
2312
+ await this.initializeBase(config, "idCapture");
2313
+ }
2314
+ setCallbacks(callbacks) {
2315
+ this.ensureInitialized();
2316
+ const onCaptureWrapper = () => {
2317
+ callbacks.onCapture?.();
2318
+ };
2319
+ mlWasmJSApi_default.setIdCaptureCallbacks(this.getPipelineType(), callbacks.onFarAway ?? (() => {}), callbacks.onDetectionStarted ?? (() => {}), callbacks.onMaskChange ? (show, mask, top, orientation) => callbacks.onMaskChange?.(show, mask, top, orientation) : () => {}, callbacks.onBlur ?? (() => {}), callbacks.onGlare ?? (() => {}), callbacks.onCapturing ?? (() => {}), callbacks.onCapture ? onCaptureWrapper : () => {}, callbacks.onBestFrame ? (blur, glare, orientation) => callbacks.onBestFrame?.(blur, glare, orientation) : () => {}, callbacks.onIdNotDetected ?? (() => {}), callbacks.onSwitchToManualCapture ?? (() => {}), callbacks.onIdTypeChange ? (idType) => callbacks.onIdTypeChange?.(idType) : () => {}, callbacks.onIdSideChange ? (side) => callbacks.onIdSideChange?.(side) : () => {}, callbacks.onCapturingCounterValueChange ? (value) => callbacks.onCapturingCounterValueChange?.(value) : () => {});
2320
+ }
2321
+ setThresholds(thresholds) {
2322
+ this.ensureInitialized();
2323
+ mlWasmJSApi_default.setIdCaptureThresholds(this.getPipelineType(), thresholds.blurThreshold, thresholds.blurChangeThreshold, thresholds.glareThreshold, thresholds.clsThreshold, thresholds.sideThreshold, thresholds.iouThreshold, thresholds.idDetectedTimeout, thresholds.autocaptureTimeout, thresholds.framesAggregationInterval, thresholds.minFaceIdQualityScore);
2324
+ }
2325
+ setGeometry(geometry) {
2326
+ this.ensureInitialized();
2327
+ mlWasmJSApi_default.setIdCaptureGeometryParams(this.getPipelineType(), geometry.areaDown, geometry.areaUp, geometry.areaIOSPassportUp, geometry.areaIOSPassportDown, geometry.widthIOSUp, geometry.widthIOSDown, geometry.widthDown, geometry.widthUp, geometry.windowOuterWidth, geometry.windowOuterHeight, geometry.windowInnerWidth, geometry.windowInnerHeight);
2328
+ }
2329
+ setSettings(settings) {
2330
+ this.ensureInitialized();
2331
+ mlWasmJSApi_default.setIdCaptureConfigParams(this.getPipelineType(), settings.isFixedMask, settings.isIPhone14OrHigher, settings.idType, settings.blurCheckEnabled, settings.glareCheckEnabled, settings.faceQualityCheckEnabled, settings.iouCheckEnabled);
2332
+ }
2333
+ setModelType(modelType) {
2334
+ this.ensureInitialized();
2335
+ let wasmModelType;
2336
+ switch (modelType) {
2337
+ case "v1":
2338
+ wasmModelType = IdCaptureModelType.IdCaptureV1x;
2339
+ break;
2340
+ case "v2":
2341
+ wasmModelType = IdCaptureModelType.IdCaptureV2x;
2342
+ break;
2343
+ case "v3":
2344
+ wasmModelType = IdCaptureModelType.IdCaptureV3x;
2345
+ break;
2346
+ default: throw new Error(`Unknown model type: ${modelType}`);
2347
+ }
2348
+ mlWasmJSApi_default.setIdCaptureModelType(this.getPipelineType(), wasmModelType);
2349
+ }
2350
+ /**
2351
+ * Processes a frame through the WASM pipeline and stores the result.
2352
+ */
2353
+ async processFrame(image) {
2354
+ const result = await this.processFrameWasm(image);
2355
+ const pipelineType = this.getPipelineType();
2356
+ if (result && pipelineType === WasmPipelineType.IdBlurGlarePipeline) this.lastProcessResult = result;
2357
+ else this.lastProcessResult = null;
2358
+ }
2359
+ /**
2360
+ * Gets the last process result from the most recent frame processing.
2361
+ * @returns The last process result with quad coordinates, or null if not available
2362
+ */
2363
+ getLastProcessResult() {
2364
+ return this.lastProcessResult;
2365
+ }
2366
+ transformPerspective(canvas, frameRect) {
2367
+ this.ensureInitialized();
2368
+ const originalWidth = canvas.width();
2369
+ const originalHeight = canvas.height();
2370
+ try {
2371
+ const imageData = canvas.getImageData();
2372
+ if (!imageData) return canvas;
2373
+ const transformedCanvas = mlWasmJSApi_default.IdPerspectiveTransform(imageData, frameRect);
2374
+ if (transformedCanvas) {
2375
+ const wrappedCanvas = new IncodeCanvas(transformedCanvas);
2376
+ const wrappedWidth = wrappedCanvas.width();
2377
+ const wrappedHeight = wrappedCanvas.height();
2378
+ if (wrappedWidth === originalWidth && wrappedHeight === originalHeight) {
2379
+ const croppedCanvas = this.cropCanvasToRect(wrappedCanvas, {
2380
+ x: Math.round(frameRect.x),
2381
+ y: Math.round(frameRect.y),
2382
+ w: Math.round(frameRect.w),
2383
+ h: Math.round(frameRect.h)
2384
+ });
2385
+ if (croppedCanvas) return croppedCanvas;
2386
+ }
2387
+ return wrappedCanvas;
2388
+ }
2389
+ return canvas;
2390
+ } catch (_error) {
2391
+ return canvas;
2392
+ }
2393
+ }
2394
+ /**
2395
+ * Crops a canvas to the specified rectangle region.
2396
+ * @param canvas - The canvas to crop
2397
+ * @param rect - The rectangle to crop to (x, y, w, h)
2398
+ * @returns A new IncodeCanvas with the cropped region, or null if cropping fails
2399
+ */
2400
+ cropCanvasToRect(canvas, rect) {
2401
+ const canvasWidth = canvas.width();
2402
+ const canvasHeight = canvas.height();
2403
+ if (!canvasWidth || !canvasHeight) return null;
2404
+ const x = Math.max(0, Math.min(rect.x, canvasWidth));
2405
+ const y = Math.max(0, Math.min(rect.y, canvasHeight));
2406
+ const w = Math.max(1, Math.min(rect.w, canvasWidth - x));
2407
+ const h = Math.max(1, Math.min(rect.h, canvasHeight - y));
2408
+ const croppedCanvasElement = document.createElement("canvas");
2409
+ croppedCanvasElement.width = w;
2410
+ croppedCanvasElement.height = h;
2411
+ const ctx = croppedCanvasElement.getContext("2d");
2412
+ if (!ctx) return null;
2413
+ ctx.drawImage(canvas.canvas, x, y, w, h, 0, 0, w, h);
2414
+ return new IncodeCanvas(croppedCanvasElement);
2415
+ }
2416
+ /**
2417
+ * Gets the captured canvas (transformed for preview).
2418
+ * @returns The captured canvas, or null if not available
2419
+ */
2420
+ getCapturedCanvas() {
2421
+ return this.capturedCanvas;
2422
+ }
2423
+ /**
2424
+ * Gets the original captured canvas (full frame for upload).
2425
+ * @returns The original captured canvas, or null if not available
2426
+ */
2427
+ getOriginalCapturedCanvas() {
2428
+ return this.originalCapturedCanvas;
2429
+ }
2430
+ /**
2431
+ * Sets the captured canvases (original and transformed).
2432
+ * @param original - The original full-frame canvas (for upload)
2433
+ * @param transformed - The transformed canvas (for preview)
2434
+ */
2435
+ setCapturedCanvases(original, transformed) {
2436
+ this.originalCapturedCanvas = original;
2437
+ this.capturedCanvas = transformed;
2438
+ }
2439
+ reset() {
2440
+ super.reset();
2441
+ this.lastProcessResult = null;
2442
+ this.capturedCanvas = null;
2443
+ this.originalCapturedCanvas = null;
2444
+ }
2445
+ };
2446
+
1608
2447
  //#endregion
1609
2448
  //#region ../infra/src/providers/wasm/WasmUtilProvider.ts
1610
2449
  var WasmUtilProvider = class WasmUtilProvider extends BaseWasmProvider {
@@ -1631,11 +2470,106 @@ var WasmUtilProvider = class WasmUtilProvider extends BaseWasmProvider {
1631
2470
  const pipeline = config.pipelines?.[0] ?? "selfie";
1632
2471
  await this.initializeBase(config, pipeline);
1633
2472
  }
2473
+ async processFrame(_image) {
2474
+ throw new Error("WasmUtilProvider does not support frame processing. Use encryptImage() instead.");
2475
+ }
1634
2476
  encryptImage(image) {
2477
+ this.ensureInitialized();
1635
2478
  return mlWasmJSApi_default.ens(image);
1636
2479
  }
2480
+ setSdkVersion(version) {
2481
+ this.ensureInitialized();
2482
+ mlWasmJSApi_default.setSdkVersion(version);
2483
+ }
2484
+ setSdkPlatform(platform) {
2485
+ this.ensureInitialized();
2486
+ mlWasmJSApi_default.setSdkPlatform(platform);
2487
+ }
2488
+ setDeviceInfo(deviceInfo, overrideExisting = true) {
2489
+ this.ensureInitialized();
2490
+ mlWasmJSApi_default.setDeviceInfo(deviceInfo, overrideExisting);
2491
+ }
2492
+ setBrowserInfo(browserInfo, overrideExisting = true) {
2493
+ this.ensureInitialized();
2494
+ mlWasmJSApi_default.setBrowserInfo(browserInfo, overrideExisting);
2495
+ }
2496
+ setCameraInfo(cameraInfo, overrideExisting = true) {
2497
+ this.ensureInitialized();
2498
+ mlWasmJSApi_default.setCameraInfo(cameraInfo, overrideExisting);
2499
+ }
2500
+ setMotionStatus(status) {
2501
+ this.ensureInitialized();
2502
+ mlWasmJSApi_default.setMotionStatus(status);
2503
+ }
2504
+ setBackgroundMode(backgroundMode) {
2505
+ this.ensureInitialized();
2506
+ mlWasmJSApi_default.setBackgroundMode(backgroundMode);
2507
+ }
2508
+ setZc(zc) {
2509
+ this.ensureInitialized();
2510
+ mlWasmJSApi_default.setZc(zc);
2511
+ }
2512
+ setInspectorOpened(opened) {
2513
+ this.ensureInitialized();
2514
+ mlWasmJSApi_default.setInspectorOpened(opened);
2515
+ }
2516
+ getMetadata() {
2517
+ this.ensureInitialized();
2518
+ return mlWasmJSApi_default.getMetadata();
2519
+ }
2520
+ async analyzeFrame(image) {
2521
+ this.ensureInitialized();
2522
+ await mlWasmJSApi_default.analyzeFrame(image);
2523
+ }
2524
+ getCheck() {
2525
+ this.ensureInitialized();
2526
+ return mlWasmJSApi_default.getCheck();
2527
+ }
2528
+ estimatePerformance() {
2529
+ this.ensureInitialized();
2530
+ return mlWasmJSApi_default.estimatePerformance();
2531
+ }
2532
+ isVirtualCamera(label) {
2533
+ this.ensureInitialized();
2534
+ return mlWasmJSApi_default.isVirtualCamera(label);
2535
+ }
2536
+ async poc(output) {
2537
+ this.ensureInitialized();
2538
+ await mlWasmJSApi_default.poc(output);
2539
+ }
2540
+ ckvcks(data) {
2541
+ this.ensureInitialized();
2542
+ mlWasmJSApi_default.ckvcks(data);
2543
+ }
2544
+ async getVersions() {
2545
+ this.ensureInitialized();
2546
+ return mlWasmJSApi_default.getVersions();
2547
+ }
1637
2548
  };
1638
2549
 
2550
+ //#endregion
2551
+ //#region ../infra/src/wasm/idCaptureDefaults.ts
2552
+ /**
2553
+ * Default WASM thresholds for ID capture quality checks.
2554
+ * Based on legacy pipelinesConfig.ts values.
2555
+ */
2556
+ const DEFAULT_ID_CAPTURE_THRESHOLDS = {
2557
+ blurThreshold: .2,
2558
+ blurChangeThreshold: .2,
2559
+ glareThreshold: .3,
2560
+ clsThreshold: .98,
2561
+ sideThreshold: .8,
2562
+ iouThreshold: .8,
2563
+ idDetectedTimeout: 1e4,
2564
+ autocaptureTimeout: 5e3,
2565
+ framesAggregationInterval: 3e3,
2566
+ minFaceIdQualityScore: .62
2567
+ };
2568
+ /**
2569
+ * Default model version for ID capture.
2570
+ */
2571
+ const DEFAULT_ID_CAPTURE_MODEL_VERSION = "v2";
2572
+
1639
2573
  //#endregion
1640
2574
  //#region src/internal/http/api.ts
1641
2575
  let client = null;
@@ -1646,6 +2580,9 @@ function setClient(httpClient) {
1646
2580
  function setToken(token) {
1647
2581
  currentToken = token;
1648
2582
  }
2583
+ function getToken() {
2584
+ return currentToken;
2585
+ }
1649
2586
  function getApi() {
1650
2587
  if (!client) throw new Error("SDK not configured. Call setup({ apiURL: \"...\" }) first.");
1651
2588
  return client;
@@ -1692,6 +2629,7 @@ const endpoints = {
1692
2629
  recordingCreateSessionV2: "/omni/recordings/create-session/v2",
1693
2630
  recordingStartV2: "/omni/recordings/record-start/v2",
1694
2631
  recordingStopV2: "/omni/recordings/record-stop/v2",
2632
+ deepsightVideoImport: "/omni/recordings/import",
1695
2633
  phone: "/omni/add/phone",
1696
2634
  phoneInstant: "/omni/instant/add/phone",
1697
2635
  getPhone: "/omni/get/phone",
@@ -1699,8 +2637,14 @@ const endpoints = {
1699
2637
  sendSmsOtp: "/omni/send/sms-otp",
1700
2638
  compareOtp: "/omni/compare/otp",
1701
2639
  email: "/omni/add/email",
1702
- getEmail: "/omni/get/email"
2640
+ getEmail: "/omni/get/email",
2641
+ frontId: "/omni/add/front-id/v2",
2642
+ backId: "/omni/add/back-id/v2",
2643
+ getImages: "/omni/get/images",
2644
+ processId: "/omni/process/id",
2645
+ processSecondId: "/omni/process/second-id",
2646
+ processFace: "/omni/process/face"
1703
2647
  };
1704
2648
 
1705
2649
  //#endregion
1706
- export { createApi_default as _, setClient as a, FaceDetectionProvider as c, StreamCanvasProcessingSession as d, StreamCanvasCapture as f, createManager as g, stopCameraStream as h, resetApi as i, warmupWasm as l, requestCameraAccess as m, api as n, setToken as o, queryCameraPermission as p, getApi as r, WasmUtilProvider as s, endpoints as t, OpenViduRecordingProvider as u };
2650
+ export { isIOS as A, enumerateVideoDevices as C, createApi_default as D, createManager as E, isSafari as M, isAndroid as O, applyTrackConstraints as S, stopCameraStream as T, DeepsightRecordingProvider as _, resetApi as a, queryCameraPermission as b, DEFAULT_ID_CAPTURE_MODEL_VERSION as c, IdCaptureProvider as d, FaceDetectionProvider as f, MotionSensorProvider as g, OpenViduRecordingProvider as h, getToken as i, isIPhone14OrHigher as j, isDesktop as k, DEFAULT_ID_CAPTURE_THRESHOLDS as l, VisibilityProvider as m, api as n, setClient as o, warmupWasm as p, getApi as r, setToken as s, endpoints as t, WasmUtilProvider as u, StreamCanvasProcessingSession as v, requestCameraAccess as w, IncodeCanvas as x, StreamCanvasCapture as y };