@incodetech/core 2.0.0-alpha.11 → 2.0.0-alpha.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/{OpenViduLogger-5b7KqNTo.esm.js → OpenViduLogger-CRbRNZA7.esm.js} +1 -1
  2. package/dist/OpenViduLogger-Dy5P806a.esm.js +3 -0
  3. package/dist/{warmup-Cijuyvoc.d.ts → StateMachine-pi8byl8C.d.ts} +4 -1
  4. package/dist/{addEvent-s2Za-pK3.esm.js → addEvent-BGKc_lHF.esm.js} +1 -1
  5. package/dist/{deepsightLoader-Bn2D0REl.esm.js → deepsightLoader-B36_XZ7r.esm.js} +3 -3
  6. package/dist/{recordingsRepository-CTjaf-ER.esm.js → deepsightService-BWxcc4OC.esm.js} +2 -33
  7. package/dist/email.d.ts +1 -1
  8. package/dist/email.esm.js +3 -3
  9. package/dist/{endpoints-B0ltwtb5.esm.js → endpoints-D9TGnxRK.esm.js} +336 -21
  10. package/dist/flow.d.ts +4 -303
  11. package/dist/flow.esm.js +4 -5
  12. package/dist/id-DHVSW_wJ.esm.js +1825 -0
  13. package/dist/id.d.ts +6 -0
  14. package/dist/id.esm.js +8 -0
  15. package/dist/index-CbF_uI-x.d.ts +618 -0
  16. package/dist/index.d.ts +8 -3
  17. package/dist/index.esm.js +7 -4
  18. package/dist/{lib-CykGFCEr.esm.js → lib-BJoLTN_W.esm.js} +2 -2
  19. package/dist/phone.d.ts +1 -1
  20. package/dist/phone.esm.js +3 -3
  21. package/dist/recordingsRepository-D5MURoVB.esm.js +40 -0
  22. package/dist/selfie.d.ts +22 -324
  23. package/dist/selfie.esm.js +34 -23
  24. package/dist/{permissionServices-BhD0KxsO.esm.js → streamingEvents-B3hNanPl.esm.js} +34 -3
  25. package/dist/types-BpCrZLU6.d.ts +302 -0
  26. package/dist/types-DZbrbPgj.d.ts +335 -0
  27. package/package.json +6 -2
  28. package/dist/OpenViduLogger-20ZYS-mT.esm.js +0 -3
  29. package/dist/StateMachine-BqPpBhOz.d.ts +0 -2
  30. package/dist/getBrowser-CLEzz0Hi.esm.js +0 -8
  31. package/dist/types-Dif6MQmX.d.ts +0 -5
  32. /package/dist/{Manager-Cy9-TMC9.d.ts → Manager-BZUZTRPx.d.ts} +0 -0
  33. /package/dist/{chunk-C_Yo44FK.esm.js → chunk-FbsBJI8u.esm.js} +0 -0
  34. /package/dist/{xstate.esm-2T5fOCTq.esm.js → xstate.esm-2hDiAXvZ.esm.js} +0 -0
@@ -1,4 +1,4 @@
1
- import { t as __commonJS } from "./chunk-C_Yo44FK.esm.js";
1
+ import { t as __commonJS } from "./chunk-FbsBJI8u.esm.js";
2
2
 
3
3
  //#region ../../node_modules/.pnpm/jsnlog@2.30.0/node_modules/jsnlog/jsnlog.js
4
4
  var require_jsnlog = /* @__PURE__ */ __commonJS({ "../../node_modules/.pnpm/jsnlog@2.30.0/node_modules/jsnlog/jsnlog.js": ((exports) => {
@@ -0,0 +1,3 @@
1
+ import { t as require_OpenViduLogger } from "./OpenViduLogger-CRbRNZA7.esm.js";
2
+
3
+ export default require_OpenViduLogger();
@@ -1,4 +1,7 @@
1
+ import { AnyStateMachine, StateMachine } from "xstate";
2
+
1
3
  //#region ../infra/src/wasm/warmup.d.ts
4
+
2
5
  /**
3
6
  * All available ML pipelines in the SDK.
4
7
  * - 'selfie' - Face detection for selfie capture
@@ -52,4 +55,4 @@ interface WarmupConfig {
52
55
  */
53
56
  declare function warmupWasm(config: WarmupConfig): Promise<void>;
54
57
  //#endregion
55
- export { warmupWasm as n, WasmPipeline as t };
58
+ export { warmupWasm as i, StateMachine as n, WasmPipeline as r, AnyStateMachine as t };
@@ -1,4 +1,4 @@
1
- import { n as api, t as endpoints } from "./endpoints-B0ltwtb5.esm.js";
1
+ import { n as api, t as endpoints } from "./endpoints-D9TGnxRK.esm.js";
2
2
 
3
3
  //#region src/internal/events/addEvent.ts
4
4
  function addEvent(event) {
@@ -1,6 +1,6 @@
1
- import { c as WasmUtilProvider, d as VisibilityProvider, m as DeepsightRecordingProvider, p as MotionSensorProvider } from "./endpoints-B0ltwtb5.esm.js";
2
- import "./getBrowser-CLEzz0Hi.esm.js";
3
- import { a as createDeepsightService, i as uploadDeepsightVideo } from "./recordingsRepository-CTjaf-ER.esm.js";
1
+ import { _ as DeepsightRecordingProvider, g as MotionSensorProvider, m as VisibilityProvider, u as WasmUtilProvider } from "./endpoints-D9TGnxRK.esm.js";
2
+ import { i as uploadDeepsightVideo } from "./recordingsRepository-D5MURoVB.esm.js";
3
+ import { t as createDeepsightService } from "./deepsightService-BWxcc4OC.esm.js";
4
4
 
5
5
  //#region src/modules/selfie/deepsightLoader.ts
6
6
  const SDK_VERSION = "2.0.0";
@@ -1,5 +1,4 @@
1
- import { n as api, t as endpoints } from "./endpoints-B0ltwtb5.esm.js";
2
- import { t as getUserAgent } from "./getBrowser-CLEzz0Hi.esm.js";
1
+ import { a as getUserAgent } from "./recordingsRepository-D5MURoVB.esm.js";
3
2
 
4
3
  //#region src/internal/deepsight/metadataService.ts
5
4
  function getWebGLFingerprint() {
@@ -223,34 +222,4 @@ function createDeepsightService(config) {
223
222
  }
224
223
 
225
224
  //#endregion
226
- //#region src/internal/recordings/recordingsRepository.ts
227
- async function createRecordingSession(type) {
228
- return (await api.post(endpoints.recordingCreateSessionV2, { type })).data;
229
- }
230
- async function startRecording(params) {
231
- return (await api.post(endpoints.recordingStartV2, {
232
- videoRecordingId: params.videoRecordingId,
233
- frameRate: 30,
234
- outputMode: "COMPOSED",
235
- resolution: params.resolution,
236
- type: params.type,
237
- hasAudio: params.hasAudio ?? false
238
- })).data;
239
- }
240
- async function stopRecording(videoRecordingId) {
241
- return (await api.post(endpoints.recordingStopV2, { videoRecordingId })).data;
242
- }
243
- async function uploadDeepsightVideo(encryptedVideo, token) {
244
- try {
245
- return (await api.post(endpoints.deepsightVideoImport, {
246
- video: encryptedVideo,
247
- type: "selfie"
248
- }, { headers: { "X-Incode-Hardware-Id": token } })).data.recordingId ?? "";
249
- } catch (error) {
250
- console.error("Error uploading deepsight video:", error);
251
- return "";
252
- }
253
- }
254
-
255
- //#endregion
256
- export { createDeepsightService as a, uploadDeepsightVideo as i, startRecording as n, stopRecording as r, createRecordingSession as t };
225
+ export { createDeepsightService as t };
package/dist/email.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { t as Manager } from "./Manager-Cy9-TMC9.js";
1
+ import { t as Manager } from "./Manager-BZUZTRPx.js";
2
2
 
3
3
  //#region src/modules/email/types.d.ts
4
4
 
package/dist/email.esm.js CHANGED
@@ -1,6 +1,6 @@
1
- import { b as createManager, n as api, t as endpoints } from "./endpoints-B0ltwtb5.esm.js";
2
- import { a as createActor, i as fromPromise, n as assign, r as fromCallback, t as setup } from "./xstate.esm-2T5fOCTq.esm.js";
3
- import { t as addEvent } from "./addEvent-s2Za-pK3.esm.js";
1
+ import { E as createManager, n as api, t as endpoints } from "./endpoints-D9TGnxRK.esm.js";
2
+ import { a as createActor, i as fromPromise, n as assign, r as fromCallback, t as setup } from "./xstate.esm-2hDiAXvZ.esm.js";
3
+ import { t as addEvent } from "./addEvent-BGKc_lHF.esm.js";
4
4
 
5
5
  //#region src/modules/email/emailServices.ts
6
6
  async function fetchEmail(signal) {
@@ -1,6 +1,19 @@
1
- import { a as __toDynamicImportESM } from "./chunk-C_Yo44FK.esm.js";
1
+ import { a as __toDynamicImportESM } from "./chunk-FbsBJI8u.esm.js";
2
2
 
3
3
  //#region ../infra/src/capabilities/platform.ts
4
+ /**
5
+ * Platform detection utilities for camera operations.
6
+ * These are pure functions with no external dependencies and can be used by all layers.
7
+ */
8
+ let uxType = null;
9
+ function isTouchDevice() {
10
+ return (typeof navigator !== "undefined" ? navigator.maxTouchPoints > 0 : false) || "ontouchstart" in (typeof document !== "undefined" ? document.documentElement : {});
11
+ }
12
+ function isTabletWithoutAndroid(userAgent) {
13
+ const isLinux = /Linux/i.test(userAgent);
14
+ const isChrome = /Chrome/i.test(userAgent) && !/Edge/i.test(userAgent);
15
+ return !/Mobile|Android/i.test(userAgent) && isLinux && isChrome && isTouchDevice();
16
+ }
4
17
  function isIOS(userAgent) {
5
18
  const ua = userAgent || (typeof navigator !== "undefined" ? navigator.userAgent : "");
6
19
  const hasTouchPoints = typeof navigator !== "undefined" ? navigator.maxTouchPoints > 0 : false;
@@ -10,6 +23,58 @@ function isAndroid(userAgent) {
10
23
  const ua = userAgent || (typeof navigator !== "undefined" ? navigator.userAgent : "");
11
24
  return /Android/i.test(ua);
12
25
  }
26
+ function isDesktop(userAgent) {
27
+ if (uxType) return uxType === "desktop";
28
+ const ua = userAgent || (typeof navigator !== "undefined" ? navigator.userAgent : "");
29
+ return !(/Mobile|Android/i.test(ua) || isTabletWithoutAndroid(ua) || isIOS(ua));
30
+ }
31
+ function isSafari(userAgent) {
32
+ const ua = userAgent || (typeof navigator !== "undefined" ? navigator.userAgent : "");
33
+ return /Safari/i.test(ua) && !/Chrome|Chromium|Edge/i.test(ua);
34
+ }
35
+ const IPHONE_14_PLUS_DIMENSIONS = [
36
+ {
37
+ outerHeight: 852,
38
+ outerWidth: 393
39
+ },
40
+ {
41
+ outerHeight: 932,
42
+ outerWidth: 430
43
+ },
44
+ {
45
+ innerHeight: 631,
46
+ innerWidth: 375
47
+ },
48
+ {
49
+ innerHeight: 920,
50
+ innerWidth: 402
51
+ },
52
+ {
53
+ outerHeight: 874,
54
+ outerWidth: 402
55
+ },
56
+ {
57
+ innerHeight: 874,
58
+ innerWidth: 402
59
+ },
60
+ {
61
+ outerHeight: 912,
62
+ outerWidth: 420
63
+ },
64
+ {
65
+ outerHeight: 873,
66
+ outerWidth: 402
67
+ },
68
+ {
69
+ outerHeight: 956,
70
+ outerWidth: 440
71
+ }
72
+ ];
73
+ function isIPhone14OrHigher() {
74
+ if (typeof window === "undefined") return false;
75
+ const { outerHeight, outerWidth, innerHeight, innerWidth } = window;
76
+ return IPHONE_14_PLUS_DIMENSIONS.some((dims) => dims.outerHeight === outerHeight && dims.outerWidth === outerWidth || dims.innerHeight === innerHeight && dims.innerWidth === innerWidth);
77
+ }
13
78
 
14
79
  //#endregion
15
80
  //#region ../infra/src/http/createApi.ts
@@ -60,6 +125,56 @@ const parseResponse = async (response, parseType) => {
60
125
  return await response.text();
61
126
  };
62
127
  const DEFAULT_TIMEOUT = 3e4;
128
+ const requestWithXHR = (fullUrl, method, mergedHeaders, requestBody, timeout, signal, onUploadProgress) => {
129
+ return new Promise((resolve, reject) => {
130
+ const xhr = new XMLHttpRequest();
131
+ xhr.open(method, fullUrl, true);
132
+ Object.entries(mergedHeaders).forEach(([key, value]) => {
133
+ xhr.setRequestHeader(key, value);
134
+ });
135
+ xhr.timeout = timeout;
136
+ xhr.upload.onprogress = (event) => {
137
+ if (event.lengthComputable) {
138
+ const percentComplete = event.loaded / event.total * 100;
139
+ onUploadProgress(Math.round(percentComplete));
140
+ }
141
+ };
142
+ xhr.onload = () => {
143
+ const responseHeaders = {};
144
+ xhr.getAllResponseHeaders().split("\r\n").forEach((line) => {
145
+ const parts = line.split(": ");
146
+ if (parts.length === 2) responseHeaders[parts[0].toLowerCase()] = parts[1];
147
+ });
148
+ let data;
149
+ try {
150
+ if ((xhr.getResponseHeader("content-type") ?? "").includes("application/json")) data = JSON.parse(xhr.responseText);
151
+ else data = xhr.responseText;
152
+ } catch {
153
+ data = xhr.responseText;
154
+ }
155
+ if (xhr.status >= 200 && xhr.status < 300) resolve({
156
+ ok: true,
157
+ status: xhr.status,
158
+ statusText: xhr.statusText,
159
+ url: fullUrl,
160
+ headers: responseHeaders,
161
+ data
162
+ });
163
+ else reject(new FetchHttpError(xhr.status, xhr.statusText, fullUrl, method, responseHeaders, data));
164
+ };
165
+ xhr.onerror = () => {
166
+ reject(new FetchHttpError(0, "Network Error", fullUrl, method, {}, null));
167
+ };
168
+ xhr.ontimeout = () => {
169
+ reject(new FetchHttpError(0, "Request timeout", fullUrl, method, {}, null));
170
+ };
171
+ if (signal) signal.addEventListener("abort", () => {
172
+ xhr.abort();
173
+ reject(new FetchHttpError(0, "Request aborted", fullUrl, method, {}, null));
174
+ });
175
+ xhr.send(requestBody);
176
+ });
177
+ };
63
178
  const createApi = (config) => {
64
179
  const headers = {
65
180
  "Content-Type": "application/json",
@@ -74,22 +189,26 @@ const createApi = (config) => {
74
189
  const client$1 = {
75
190
  defaults,
76
191
  async request(requestConfig) {
77
- const { method = "GET", url, headers: headers$1 = {}, query, params, body, signal, timeout = config.timeout ?? DEFAULT_TIMEOUT, parse } = requestConfig;
192
+ const { method = "GET", url, headers: headers$1 = {}, query, params, body, signal, timeout = config.timeout ?? DEFAULT_TIMEOUT, parse, onUploadProgress } = requestConfig;
78
193
  const fullUrl = buildUrl(defaults.baseURL, url, params ?? query);
79
194
  const mergedHeaders = {
80
195
  ...defaults.headers,
81
196
  ...headers$1
82
197
  };
198
+ const requestBody = prepareBody(body);
199
+ let finalHeaders = mergedHeaders;
200
+ if (requestBody === null && (method === "POST" || method === "PUT" || method === "PATCH")) {
201
+ const { "Content-Type": _, ...headersWithoutContentType } = mergedHeaders;
202
+ finalHeaders = headersWithoutContentType;
203
+ }
204
+ if (onUploadProgress) {
205
+ if (requestBody !== null && typeof ReadableStream !== "undefined" && requestBody instanceof ReadableStream) throw new Error("Upload progress tracking is not supported for ReadableStream bodies");
206
+ return requestWithXHR(fullUrl, method, finalHeaders, requestBody, timeout, signal, onUploadProgress);
207
+ }
83
208
  const controller = new AbortController();
84
209
  const abortSignal = signal ?? controller.signal;
85
210
  const timeoutId = setTimeout(() => controller.abort(), timeout);
86
211
  try {
87
- const requestBody = prepareBody(body);
88
- let finalHeaders = mergedHeaders;
89
- if (requestBody === null && (method === "POST" || method === "PUT" || method === "PATCH")) {
90
- const { "Content-Type": _, ...headersWithoutContentType } = mergedHeaders;
91
- finalHeaders = headersWithoutContentType;
92
- }
93
212
  const response = await fetch(fullUrl, {
94
213
  method,
95
214
  headers: finalHeaders,
@@ -241,6 +360,10 @@ function createManager(options) {
241
360
 
242
361
  //#endregion
243
362
  //#region ../infra/src/media/camera.ts
363
+ /**
364
+ * Request camera access with specific constraints.
365
+ * Throws if the request fails - no automatic fallback.
366
+ */
244
367
  async function requestCameraAccess(constraints) {
245
368
  if (typeof navigator === "undefined" || !navigator.mediaDevices) throw new Error("MediaDevices API not available");
246
369
  return navigator.mediaDevices.getUserMedia(constraints);
@@ -248,6 +371,13 @@ async function requestCameraAccess(constraints) {
248
371
  function stopCameraStream(stream) {
249
372
  for (const track of stream.getTracks()) track.stop();
250
373
  }
374
+ async function enumerateVideoDevices() {
375
+ if (typeof navigator === "undefined" || !navigator.mediaDevices) return [];
376
+ return (await navigator.mediaDevices.enumerateDevices()).filter((d) => d.kind === "videoinput");
377
+ }
378
+ async function applyTrackConstraints(track, constraints) {
379
+ await track.applyConstraints(constraints);
380
+ }
251
381
 
252
382
  //#endregion
253
383
  //#region ../infra/src/media/canvas.ts
@@ -484,14 +614,20 @@ var StreamCanvasCapture = class {
484
614
  this.video.playsInline = true;
485
615
  this.video.muted = true;
486
616
  const settings = stream.getVideoTracks()[0]?.getSettings();
487
- const width = options?.width ?? settings?.width ?? 640;
488
- const height = options?.height ?? settings?.height ?? 480;
617
+ const initialWidth = options?.width ?? settings?.width ?? 1280;
618
+ const initialHeight = options?.height ?? settings?.height ?? 720;
489
619
  this.canvas = document.createElement("canvas");
490
- this.canvas.width = width;
491
- this.canvas.height = height;
620
+ this.canvas.width = initialWidth;
621
+ this.canvas.height = initialHeight;
492
622
  this.ctx = this.canvas.getContext("2d", { willReadFrequently: true });
493
623
  const fps = options?.fps ?? 10;
494
624
  const intervalMs = fps > 0 ? Math.max(16, Math.floor(1e3 / fps)) : 0;
625
+ this.video.addEventListener("loadedmetadata", () => {
626
+ if (this.video.videoWidth > 0 && this.video.videoHeight > 0) {
627
+ this.canvas.width = this.video.videoWidth;
628
+ this.canvas.height = this.video.videoHeight;
629
+ }
630
+ });
495
631
  try {
496
632
  this.video.play();
497
633
  } catch {}
@@ -1137,13 +1273,13 @@ var MotionSensorProvider = class {
1137
1273
  //#region ../infra/src/providers/browser/openviduLazy.ts
1138
1274
  let openViduImport;
1139
1275
  async function enableProdModeBeforeLoad() {
1140
- const { OpenViduLogger } = await import("./OpenViduLogger-20ZYS-mT.esm.js").then(__toDynamicImportESM(1));
1276
+ const { OpenViduLogger } = await import("./OpenViduLogger-Dy5P806a.esm.js").then(__toDynamicImportESM(1));
1141
1277
  OpenViduLogger.getInstance().enableProdMode();
1142
1278
  }
1143
1279
  async function loadOpenVidu() {
1144
1280
  if (openViduImport) return openViduImport;
1145
1281
  await enableProdModeBeforeLoad();
1146
- openViduImport = import("./lib-CykGFCEr.esm.js").then(__toDynamicImportESM(1));
1282
+ openViduImport = import("./lib-BJoLTN_W.esm.js").then(__toDynamicImportESM(1));
1147
1283
  return openViduImport;
1148
1284
  }
1149
1285
 
@@ -1545,9 +1681,9 @@ var MlWasmJSApi = class MlWasmJSApi {
1545
1681
  this.checkWasmInitialization("Unable to set geometry params, cpp API hasn't been initialized");
1546
1682
  this.idCaptureWasmApi.setIdCaptureGeometryParams(this.pipelineTypeToWasmEnum(type), areaDown, areaUp, areaIOSPassportUp, areaIOSPassportDown, widthIOSUp, widthIOSDown, widthDown, widthUp, windowOuterWidth, windowOuterHeight, windowInnerWidth, windowInnerHeight);
1547
1683
  }
1548
- async setIdCaptureConfigParams(type, isFixedMask, isIPhone14OrHigher, idType, isBlurCheckEnabled, isGlareCheckEnabled, isIdFaceQualityCheckEnabled, isIouCheckEnabled) {
1684
+ async setIdCaptureConfigParams(type, isFixedMask, isIPhone14OrHigher$1, idType, isBlurCheckEnabled, isGlareCheckEnabled, isIdFaceQualityCheckEnabled, isIouCheckEnabled) {
1549
1685
  this.checkWasmInitialization("Unable to set config params, cpp API hasn't been initialized");
1550
- this.idCaptureWasmApi.setIdCaptureConfigParams(this.pipelineTypeToWasmEnum(type), isFixedMask, isIPhone14OrHigher, idType, isBlurCheckEnabled, isGlareCheckEnabled, isIdFaceQualityCheckEnabled, isIouCheckEnabled);
1686
+ this.idCaptureWasmApi.setIdCaptureConfigParams(this.pipelineTypeToWasmEnum(type), isFixedMask, isIPhone14OrHigher$1, idType, isBlurCheckEnabled, isGlareCheckEnabled, isIdFaceQualityCheckEnabled, isIouCheckEnabled);
1551
1687
  }
1552
1688
  setIdCaptureModelType(pipelineType, modelType) {
1553
1689
  this.checkWasmInitialization("Unable to set model type, cpp API hasn't been initialized");
@@ -1979,12 +2115,14 @@ var BaseWasmProvider = class {
1979
2115
  /**
1980
2116
  * Processes a frame through the WASM pipeline
1981
2117
  * @param image - Image data to process
2118
+ * @returns The pipeline result (type depends on pipeline - WASM returns any)
1982
2119
  */
1983
- async processFrame(image) {
2120
+ async processFrameWasm(image) {
1984
2121
  this.ensureInitialized();
1985
2122
  const pipelineType = this.getPipelineType();
1986
2123
  await mlWasmJSApi_default.allocateImageBuffers(image.width, image.height);
1987
- await mlWasmJSApi_default.process(image, pipelineType);
2124
+ await mlWasmJSApi_default.handleDetectionCallAndUpdateState(pipelineType);
2125
+ return await mlWasmJSApi_default.process(image, pipelineType);
1988
2126
  }
1989
2127
  /**
1990
2128
  * Resets the pipeline to its initial state.
@@ -2030,7 +2168,7 @@ var FaceDetectionProvider = class extends BaseWasmProvider {
2030
2168
  }
2031
2169
  async processFrame(image) {
2032
2170
  this.currentFrame = image;
2033
- await super.processFrame(image);
2171
+ await this.processFrameWasm(image);
2034
2172
  }
2035
2173
  async initialize(config) {
2036
2174
  await this.initializeBase(config, "selfie");
@@ -2161,6 +2299,151 @@ var FaceDetectionProvider = class extends BaseWasmProvider {
2161
2299
  }
2162
2300
  };
2163
2301
 
2302
+ //#endregion
2303
+ //#region ../infra/src/providers/wasm/IdCaptureProvider.ts
2304
+ var IdCaptureProvider = class extends BaseWasmProvider {
2305
+ constructor() {
2306
+ super(WasmPipelineType.IdBlurGlarePipeline);
2307
+ this.lastProcessResult = null;
2308
+ this.capturedCanvas = null;
2309
+ this.originalCapturedCanvas = null;
2310
+ }
2311
+ async initialize(config) {
2312
+ await this.initializeBase(config, "idCapture");
2313
+ }
2314
+ setCallbacks(callbacks) {
2315
+ this.ensureInitialized();
2316
+ const onCaptureWrapper = () => {
2317
+ callbacks.onCapture?.();
2318
+ };
2319
+ mlWasmJSApi_default.setIdCaptureCallbacks(this.getPipelineType(), callbacks.onFarAway ?? (() => {}), callbacks.onDetectionStarted ?? (() => {}), callbacks.onMaskChange ? (show, mask, top, orientation) => callbacks.onMaskChange?.(show, mask, top, orientation) : () => {}, callbacks.onBlur ?? (() => {}), callbacks.onGlare ?? (() => {}), callbacks.onCapturing ?? (() => {}), callbacks.onCapture ? onCaptureWrapper : () => {}, callbacks.onBestFrame ? (blur, glare, orientation) => callbacks.onBestFrame?.(blur, glare, orientation) : () => {}, callbacks.onIdNotDetected ?? (() => {}), callbacks.onSwitchToManualCapture ?? (() => {}), callbacks.onIdTypeChange ? (idType) => callbacks.onIdTypeChange?.(idType) : () => {}, callbacks.onIdSideChange ? (side) => callbacks.onIdSideChange?.(side) : () => {}, callbacks.onCapturingCounterValueChange ? (value) => callbacks.onCapturingCounterValueChange?.(value) : () => {});
2320
+ }
2321
+ setThresholds(thresholds) {
2322
+ this.ensureInitialized();
2323
+ mlWasmJSApi_default.setIdCaptureThresholds(this.getPipelineType(), thresholds.blurThreshold, thresholds.blurChangeThreshold, thresholds.glareThreshold, thresholds.clsThreshold, thresholds.sideThreshold, thresholds.iouThreshold, thresholds.idDetectedTimeout, thresholds.autocaptureTimeout, thresholds.framesAggregationInterval, thresholds.minFaceIdQualityScore);
2324
+ }
2325
+ setGeometry(geometry) {
2326
+ this.ensureInitialized();
2327
+ mlWasmJSApi_default.setIdCaptureGeometryParams(this.getPipelineType(), geometry.areaDown, geometry.areaUp, geometry.areaIOSPassportUp, geometry.areaIOSPassportDown, geometry.widthIOSUp, geometry.widthIOSDown, geometry.widthDown, geometry.widthUp, geometry.windowOuterWidth, geometry.windowOuterHeight, geometry.windowInnerWidth, geometry.windowInnerHeight);
2328
+ }
2329
+ setSettings(settings) {
2330
+ this.ensureInitialized();
2331
+ mlWasmJSApi_default.setIdCaptureConfigParams(this.getPipelineType(), settings.isFixedMask, settings.isIPhone14OrHigher, settings.idType, settings.blurCheckEnabled, settings.glareCheckEnabled, settings.faceQualityCheckEnabled, settings.iouCheckEnabled);
2332
+ }
2333
+ setModelType(modelType) {
2334
+ this.ensureInitialized();
2335
+ let wasmModelType;
2336
+ switch (modelType) {
2337
+ case "v1":
2338
+ wasmModelType = IdCaptureModelType.IdCaptureV1x;
2339
+ break;
2340
+ case "v2":
2341
+ wasmModelType = IdCaptureModelType.IdCaptureV2x;
2342
+ break;
2343
+ case "v3":
2344
+ wasmModelType = IdCaptureModelType.IdCaptureV3x;
2345
+ break;
2346
+ default: throw new Error(`Unknown model type: ${modelType}`);
2347
+ }
2348
+ mlWasmJSApi_default.setIdCaptureModelType(this.getPipelineType(), wasmModelType);
2349
+ }
2350
+ /**
2351
+ * Processes a frame through the WASM pipeline and stores the result.
2352
+ */
2353
+ async processFrame(image) {
2354
+ const result = await this.processFrameWasm(image);
2355
+ const pipelineType = this.getPipelineType();
2356
+ if (result && pipelineType === WasmPipelineType.IdBlurGlarePipeline) this.lastProcessResult = result;
2357
+ else this.lastProcessResult = null;
2358
+ }
2359
+ /**
2360
+ * Gets the last process result from the most recent frame processing.
2361
+ * @returns The last process result with quad coordinates, or null if not available
2362
+ */
2363
+ getLastProcessResult() {
2364
+ return this.lastProcessResult;
2365
+ }
2366
+ transformPerspective(canvas, frameRect) {
2367
+ this.ensureInitialized();
2368
+ const originalWidth = canvas.width();
2369
+ const originalHeight = canvas.height();
2370
+ try {
2371
+ const imageData = canvas.getImageData();
2372
+ if (!imageData) return canvas;
2373
+ const transformedCanvas = mlWasmJSApi_default.IdPerspectiveTransform(imageData, frameRect);
2374
+ if (transformedCanvas) {
2375
+ const wrappedCanvas = new IncodeCanvas(transformedCanvas);
2376
+ const wrappedWidth = wrappedCanvas.width();
2377
+ const wrappedHeight = wrappedCanvas.height();
2378
+ if (wrappedWidth === originalWidth && wrappedHeight === originalHeight) {
2379
+ const croppedCanvas = this.cropCanvasToRect(wrappedCanvas, {
2380
+ x: Math.round(frameRect.x),
2381
+ y: Math.round(frameRect.y),
2382
+ w: Math.round(frameRect.w),
2383
+ h: Math.round(frameRect.h)
2384
+ });
2385
+ if (croppedCanvas) return croppedCanvas;
2386
+ }
2387
+ return wrappedCanvas;
2388
+ }
2389
+ return canvas;
2390
+ } catch (_error) {
2391
+ return canvas;
2392
+ }
2393
+ }
2394
+ /**
2395
+ * Crops a canvas to the specified rectangle region.
2396
+ * @param canvas - The canvas to crop
2397
+ * @param rect - The rectangle to crop to (x, y, w, h)
2398
+ * @returns A new IncodeCanvas with the cropped region, or null if cropping fails
2399
+ */
2400
+ cropCanvasToRect(canvas, rect) {
2401
+ const canvasWidth = canvas.width();
2402
+ const canvasHeight = canvas.height();
2403
+ if (!canvasWidth || !canvasHeight) return null;
2404
+ const x = Math.max(0, Math.min(rect.x, canvasWidth));
2405
+ const y = Math.max(0, Math.min(rect.y, canvasHeight));
2406
+ const w = Math.max(1, Math.min(rect.w, canvasWidth - x));
2407
+ const h = Math.max(1, Math.min(rect.h, canvasHeight - y));
2408
+ const croppedCanvasElement = document.createElement("canvas");
2409
+ croppedCanvasElement.width = w;
2410
+ croppedCanvasElement.height = h;
2411
+ const ctx = croppedCanvasElement.getContext("2d");
2412
+ if (!ctx) return null;
2413
+ ctx.drawImage(canvas.canvas, x, y, w, h, 0, 0, w, h);
2414
+ return new IncodeCanvas(croppedCanvasElement);
2415
+ }
2416
+ /**
2417
+ * Gets the captured canvas (transformed for preview).
2418
+ * @returns The captured canvas, or null if not available
2419
+ */
2420
+ getCapturedCanvas() {
2421
+ return this.capturedCanvas;
2422
+ }
2423
+ /**
2424
+ * Gets the original captured canvas (full frame for upload).
2425
+ * @returns The original captured canvas, or null if not available
2426
+ */
2427
+ getOriginalCapturedCanvas() {
2428
+ return this.originalCapturedCanvas;
2429
+ }
2430
+ /**
2431
+ * Sets the captured canvases (original and transformed).
2432
+ * @param original - The original full-frame canvas (for upload)
2433
+ * @param transformed - The transformed canvas (for preview)
2434
+ */
2435
+ setCapturedCanvases(original, transformed) {
2436
+ this.originalCapturedCanvas = original;
2437
+ this.capturedCanvas = transformed;
2438
+ }
2439
+ reset() {
2440
+ super.reset();
2441
+ this.lastProcessResult = null;
2442
+ this.capturedCanvas = null;
2443
+ this.originalCapturedCanvas = null;
2444
+ }
2445
+ };
2446
+
2164
2447
  //#endregion
2165
2448
  //#region ../infra/src/providers/wasm/WasmUtilProvider.ts
2166
2449
  var WasmUtilProvider = class WasmUtilProvider extends BaseWasmProvider {
@@ -2187,6 +2470,9 @@ var WasmUtilProvider = class WasmUtilProvider extends BaseWasmProvider {
2187
2470
  const pipeline = config.pipelines?.[0] ?? "selfie";
2188
2471
  await this.initializeBase(config, pipeline);
2189
2472
  }
2473
+ async processFrame(_image) {
2474
+ throw new Error("WasmUtilProvider does not support frame processing. Use encryptImage() instead.");
2475
+ }
2190
2476
  encryptImage(image) {
2191
2477
  this.ensureInitialized();
2192
2478
  return mlWasmJSApi_default.ens(image);
@@ -2261,6 +2547,29 @@ var WasmUtilProvider = class WasmUtilProvider extends BaseWasmProvider {
2261
2547
  }
2262
2548
  };
2263
2549
 
2550
+ //#endregion
2551
+ //#region ../infra/src/wasm/idCaptureDefaults.ts
2552
+ /**
2553
+ * Default WASM thresholds for ID capture quality checks.
2554
+ * Based on legacy pipelinesConfig.ts values.
2555
+ */
2556
+ const DEFAULT_ID_CAPTURE_THRESHOLDS = {
2557
+ blurThreshold: .2,
2558
+ blurChangeThreshold: .2,
2559
+ glareThreshold: .3,
2560
+ clsThreshold: .98,
2561
+ sideThreshold: .8,
2562
+ iouThreshold: .8,
2563
+ idDetectedTimeout: 1e4,
2564
+ autocaptureTimeout: 5e3,
2565
+ framesAggregationInterval: 3e3,
2566
+ minFaceIdQualityScore: .62
2567
+ };
2568
+ /**
2569
+ * Default model version for ID capture.
2570
+ */
2571
+ const DEFAULT_ID_CAPTURE_MODEL_VERSION = "v2";
2572
+
2264
2573
  //#endregion
2265
2574
  //#region src/internal/http/api.ts
2266
2575
  let client = null;
@@ -2328,8 +2637,14 @@ const endpoints = {
2328
2637
  sendSmsOtp: "/omni/send/sms-otp",
2329
2638
  compareOtp: "/omni/compare/otp",
2330
2639
  email: "/omni/add/email",
2331
- getEmail: "/omni/get/email"
2640
+ getEmail: "/omni/get/email",
2641
+ frontId: "/omni/add/front-id/v2",
2642
+ backId: "/omni/add/back-id/v2",
2643
+ getImages: "/omni/get/images",
2644
+ processId: "/omni/process/id",
2645
+ processSecondId: "/omni/process/second-id",
2646
+ processFace: "/omni/process/face"
2332
2647
  };
2333
2648
 
2334
2649
  //#endregion
2335
- export { queryCameraPermission as _, resetApi as a, createManager as b, WasmUtilProvider as c, VisibilityProvider as d, OpenViduRecordingProvider as f, StreamCanvasCapture as g, StreamCanvasProcessingSession as h, getToken as i, FaceDetectionProvider as l, DeepsightRecordingProvider as m, api as n, setClient as o, MotionSensorProvider as p, getApi as r, setToken as s, endpoints as t, warmupWasm as u, requestCameraAccess as v, createApi_default as x, stopCameraStream as y };
2650
+ export { isIOS as A, enumerateVideoDevices as C, createApi_default as D, createManager as E, isSafari as M, isAndroid as O, applyTrackConstraints as S, stopCameraStream as T, DeepsightRecordingProvider as _, resetApi as a, queryCameraPermission as b, DEFAULT_ID_CAPTURE_MODEL_VERSION as c, IdCaptureProvider as d, FaceDetectionProvider as f, MotionSensorProvider as g, OpenViduRecordingProvider as h, getToken as i, isIPhone14OrHigher as j, isDesktop as k, DEFAULT_ID_CAPTURE_THRESHOLDS as l, VisibilityProvider as m, api as n, setClient as o, warmupWasm as p, getApi as r, setToken as s, endpoints as t, WasmUtilProvider as u, StreamCanvasProcessingSession as v, requestCameraAccess as w, IncodeCanvas as x, StreamCanvasCapture as y };