bry-biometric-collector 3.3.0-RC11 → 3.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (21) hide show
  1. package/dist/{CaptureBar_e46c45ec_3.3.0-RC11.js → CaptureBar_c06537b7_3.3.0.js} +1 -1
  2. package/dist/{DocumentCollector_de7baafa_3.3.0-RC11.js → DocumentCollector_8466b150_3.3.0.js} +171 -169
  3. package/dist/{FaceCollector_536c7465_3.3.0-RC11.js → FaceCollector_ef8c1d72_3.3.0.js} +679 -679
  4. package/dist/{FaceOval_73747bd6_3.3.0-RC11.js → FaceOval_8e4052f2_3.3.0.js} +2 -2
  5. package/dist/StreamCollector_786a300a_3.3.0.js +339 -0
  6. package/dist/{ValidationAlert_bba56eb3_3.3.0-RC11.js → ValidationAlert_f888dfba_3.3.0.js} +1 -1
  7. package/dist/{___vite-browser-external_commonjs-proxy_07f4ac3b_3.3.0-RC11.js → ___vite-browser-external_commonjs-proxy_c43aa35e_3.3.0.js} +41 -41
  8. package/dist/bry-biometric-collector-main.js +2 -2
  9. package/dist/{fingersApi_da2bebe6_3.3.0-RC11.js → fingersApi_affad03e_3.3.0.js} +2 -2
  10. package/dist/index.js +2 -2
  11. package/dist/{index_fcfc4554_3.3.0-RC11.js → index_4b99238a_3.3.0.js} +3 -3
  12. package/dist/{index_6fff4b84_3.3.0-RC11.js → index_d197fdcb_3.3.0.js} +3 -3
  13. package/dist/{main_f17fc861_3.3.0-RC11.js → main_bafbae03_3.3.0.js} +36 -37
  14. package/package.json +1 -1
  15. package/dist/StreamCollector_1adea6c1_3.3.0-RC11.js +0 -324
  16. /package/dist/{en-us_c02e99cd_3.3.0-RC11.js → en-us_c02e99cd_3.3.0.js} +0 -0
  17. /package/dist/{es-cl_6f3eda53_3.3.0-RC11.js → es-cl_6f3eda53_3.3.0.js} +0 -0
  18. /package/dist/{face_api_0e3ce8c8_3.3.0-RC11.js → face_api_0e3ce8c8_3.3.0.js} +0 -0
  19. /package/dist/{index-idlf-detector_39cfdffa_3.3.0-RC11.js → index-idlf-detector_39cfdffa_3.3.0.js} +0 -0
  20. /package/dist/{index-idlf-detector_a819b3c8_3.3.0-RC11.js → index-idlf-detector_a819b3c8_3.3.0.js} +0 -0
  21. /package/dist/{pt-br_e7d46668_3.3.0-RC11.js → pt-br_e7d46668_3.3.0.js} +0 -0
@@ -1,4 +1,4 @@
1
- import { detectSingleFace as SB, Box as RB } from "./face_api_0e3ce8c8_3.3.0-RC11.js";
1
+ import { detectSingleFace as SB, Box as RB } from "./face_api_0e3ce8c8_3.3.0.js";
2
2
  function $() {
3
3
  }
4
4
  function un(A, e) {
@@ -7217,18 +7217,18 @@ async function cf(A) {
7217
7217
  A && (e = A), wi("requesting weights from", e);
7218
7218
  try {
7219
7219
  await Promise.all([
7220
- (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).nets.tinyFaceDetector.load(e),
7221
- (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).loadFaceLandmarkModel(e)
7220
+ (await import("./face_api_0e3ce8c8_3.3.0.js")).nets.tinyFaceDetector.load(e),
7221
+ (await import("./face_api_0e3ce8c8_3.3.0.js")).loadFaceLandmarkModel(e)
7222
7222
  ]);
7223
7223
  } catch {
7224
7224
  bi("Your browser doesn't support WebGL! Trying to use WASM backend now...");
7225
7225
  try {
7226
7226
  await Promise.all([
7227
- (await import("./index_fcfc4554_3.3.0-RC11.js")).setWasmPaths("https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/"),
7228
- (await import("./index_6fff4b84_3.3.0-RC11.js")).setBackend("wasm"),
7229
- (await import("./index_6fff4b84_3.3.0-RC11.js")).ready(),
7230
- (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).nets.tinyFaceDetector.load(e),
7231
- (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).loadFaceLandmarkModel(e)
7227
+ (await import("./index_4b99238a_3.3.0.js")).setWasmPaths("https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/"),
7228
+ (await import("./index_d197fdcb_3.3.0.js")).setBackend("wasm"),
7229
+ (await import("./index_d197fdcb_3.3.0.js")).ready(),
7230
+ (await import("./face_api_0e3ce8c8_3.3.0.js")).nets.tinyFaceDetector.load(e),
7231
+ (await import("./face_api_0e3ce8c8_3.3.0.js")).loadFaceLandmarkModel(e)
7232
7232
  ]);
7233
7233
  } catch {
7234
7234
  throw bi("WASM backend failed to load, face detection will be disabled"), new Error("Face detection failed to load");
@@ -7873,7 +7873,7 @@ async function wE(A, e, t, r) {
7873
7873
  const i = {
7874
7874
  audio: !1,
7875
7875
  video: {
7876
- deviceId: t ?? "",
7876
+ deviceId: t ? { exact: t } : void 0,
7877
7877
  facingMode: {},
7878
7878
  height: {
7879
7879
  min: 480,
@@ -10316,13 +10316,13 @@ async function iQ(A) {
10316
10316
  e = 0.2;
10317
10317
  break;
10318
10318
  }
10319
- return new (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).TinyFaceDetectorOptions({
10319
+ return new (await import("./face_api_0e3ce8c8_3.3.0.js")).TinyFaceDetectorOptions({
10320
10320
  scoreThreshold: e
10321
10321
  });
10322
10322
  }
10323
10323
  async function Kf(A, e, t, r = !1) {
10324
10324
  if (t) {
10325
- const l = (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).draw;
10325
+ const l = (await import("./face_api_0e3ce8c8_3.3.0.js")).draw;
10326
10326
  l.drawDetections(e, A), new l.DrawFaceLandmarks(A.landmarks, { pointColor: "gold" }).draw(e);
10327
10327
  }
10328
10328
  const i = A.landmarks.positions, o = oQ(i), n = await nQ(i), c = A.detection.box, s = await aQ(c, n, e, t);
@@ -10333,17 +10333,17 @@ function oQ(A) {
10333
10333
  return Ro(e, t);
10334
10334
  }
10335
10335
  async function nQ(A) {
10336
- const e = (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).Point;
10336
+ const e = (await import("./face_api_0e3ce8c8_3.3.0.js")).Point;
10337
10337
  let t = A[37].add(A[38]).add(A[40]).add(A[41]), r = A[43].add(A[44]).add(A[46]).add(A[47]);
10338
10338
  t = t.div(new e(4, 4)), r = r.div(new e(4, 4));
10339
10339
  const i = Ro(r, t);
10340
10340
  return ec.set(i), i;
10341
10341
  }
10342
10342
  async function aQ(A, e, t, r) {
10343
- const i = (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).Point;
10343
+ const i = (await import("./face_api_0e3ce8c8_3.3.0.js")).Point;
10344
10344
  let o = 2.5, n = 0.8;
10345
10345
  const c = A.topLeft.x - e, s = A.topLeft.y - o * e, l = A.bottomLeft.x - e, C = A.bottomLeft.y + n * e, h = A.bottomRight.x + e, b = A.bottomRight.y + e, v = Ro(new i(l, C), new i(c, s)), D = Ro(new i(l, C), new i(h, b)), U = t.getContext("2d");
10346
- return U.beginPath(), U.strokeStyle = "gold", Ac.set(new (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).Box({ x: c, y: s, width: D, height: v })), U.rect(c, s, D, v), r && U.stroke(), {
10346
+ return U.beginPath(), U.strokeStyle = "gold", Ac.set(new (await import("./face_api_0e3ce8c8_3.3.0.js")).Box({ x: c, y: s, width: D, height: v })), U.rect(c, s, D, v), r && U.stroke(), {
10347
10347
  topLeftX: A.topLeft.x,
10348
10348
  topLeftY: A.topLeft.y,
10349
10349
  bottomRightX: A.bottomRight.x,
@@ -10416,7 +10416,7 @@ async function BQ(A, e, t) {
10416
10416
  height: e.height * C
10417
10417
  }), b = document.createElement("canvas");
10418
10418
  b.width = h.width, b.height = h.height, b.getContext("2d")?.clearRect(0, 0, b.width, b.height), s && lQ(s, A, c);
10419
- const [D] = await (await import("./face_api_0e3ce8c8_3.3.0-RC11.js")).extractFaces(c, [h]);
10419
+ const [D] = await (await import("./face_api_0e3ce8c8_3.3.0.js")).extractFaces(c, [h]);
10420
10420
  return D;
10421
10421
  }
10422
10422
  function lQ(A, e, t) {
@@ -23324,7 +23324,7 @@ function hc(A) {
23324
23324
  return this;
23325
23325
  }
23326
23326
  async function pc(A, e) {
23327
- return await import("./fingersApi_da2bebe6_3.3.0-RC11.js").then((t) => t.captureSingleFinger(A, e));
23327
+ return await import("./fingersApi_affad03e_3.3.0.js").then((t) => t.captureSingleFinger(A, e));
23328
23328
  }
23329
23329
  function Hd(A, e, t) {
23330
23330
  let r, i, o, n, c, s, l, C, h, b, v, D, U, f, P, W, Z, wA, IA, eA, cA, V, QA, GA, ge, Le, He, iA, Ge, ae, mA, le, Et, S, k;
@@ -23332,7 +23332,7 @@ function Hd(A, e, t) {
23332
23332
  let { lottie_color: oA = "71 89 124" } = e, { extension_name: tA = "BryWebExtension" } = e, { extension_url_chrome: uA } = e, { extension_url_mozilla: BA } = e, { min_nfiq_acceptable: fA = 3 } = e;
23333
23333
  kE(fA);
23334
23334
  let { show_theme_picker: aA = "true" } = e, { video_dimensions: DA } = e, { enable_minutiae: LA } = e, { collector_select: RA = "BOTH" } = e, { import_lottie: MA = "true" } = e, { hide_tabs: JA = "false" } = e, { import_extension_script: Ae = "false" } = e, { extension_url_script: FA } = e, { liveness_enable: ee = "false" } = e, { weights_path: se = "https://cdn.jsdelivr.net/npm/bry-biometric-collector@latest/dist/weights/" } = e, { evaluate_photo: ZA = "true" } = e, { show_capture_button: de = "true" } = e, { show_unconformities: YA = "true" } = e, { video_overlay: HA = "white" } = e, { video_overlay_transparency: Ke = 0 } = e, { show_config_button: Ce = "true" } = e, { show_auto_capture_button: At = "false" } = e, { show_camera_button: De = "true" } = e, { language: $A = yI() } = e, { show_justification_field: vA = "true" } = e, { min_eye_to_eye_dist: ve = 0 } = e, { show_watermark: Oe = "true" } = e, { face_detection_sensitivity: dA = "permissive" } = e, { hidden: kA = "false" } = e, { liveness_blob_size: jA = "normal" } = e, { camera_device_id: ct = "" } = e;
23335
- Wr("en", () => import("./en-us_c02e99cd_3.3.0-RC11.js")), Wr("pt", () => import("./pt-br_e7d46668_3.3.0-RC11.js")), Wr("es", () => import("./es-cl_6f3eda53_3.3.0-RC11.js")), Nn({
23335
+ Wr("en", () => import("./en-us_c02e99cd_3.3.0.js")), Wr("pt", () => import("./pt-br_e7d46668_3.3.0.js")), Wr("es", () => import("./es-cl_6f3eda53_3.3.0.js")), Nn({
23336
23336
  fallbackLocale: "pt",
23337
23337
  initialLocale: $A
23338
23338
  });
@@ -23552,13 +23552,13 @@ function Hd(A, e, t) {
23552
23552
  return bA(Pn, C = w, C), this;
23553
23553
  }
23554
23554
  async function Ut() {
23555
- return t(29, sr = (await import("./FaceCollector_536c7465_3.3.0-RC11.js")).default), this;
23555
+ return t(29, sr = (await import("./FaceCollector_ef8c1d72_3.3.0.js")).default), this;
23556
23556
  }
23557
23557
  async function Cr() {
23558
- return t(31, St = (await import("./DocumentCollector_de7baafa_3.3.0-RC11.js")).default), this;
23558
+ return t(31, St = (await import("./DocumentCollector_8466b150_3.3.0.js")).default), this;
23559
23559
  }
23560
23560
  async function Bt() {
23561
- return t(32, fe = (await import("./StreamCollector_1adea6c1_3.3.0-RC11.js")).default), this;
23561
+ return t(32, fe = (await import("./StreamCollector_786a300a_3.3.0.js")).default), this;
23562
23562
  }
23563
23563
  function Lt(w) {
23564
23564
  Tt(w);
@@ -24265,9 +24265,9 @@ class wc extends ye {
24265
24265
  }
24266
24266
  }
24267
24267
  customElements.define("biometric-collector", wc);
24268
- Wr("en", () => import("./en-us_c02e99cd_3.3.0-RC11.js"));
24269
- Wr("pt", () => import("./pt-br_e7d46668_3.3.0-RC11.js"));
24270
- Wr("es", () => import("./es-cl_6f3eda53_3.3.0-RC11.js"));
24268
+ Wr("en", () => import("./en-us_c02e99cd_3.3.0.js"));
24269
+ Wr("pt", () => import("./pt-br_e7d46668_3.3.0.js"));
24270
+ Wr("es", () => import("./es-cl_6f3eda53_3.3.0.js"));
24271
24271
  Nn({
24272
24272
  fallbackLocale: "pt",
24273
24273
  initialLocale: yI()
@@ -24404,8 +24404,7 @@ export {
24404
24404
  oe as b7,
24405
24405
  Jt as b8,
24406
24406
  Pf as b9,
24407
- sf as bA,
24408
- Of as bB,
24407
+ Of as bA,
24409
24408
  Bf as ba,
24410
24409
  lf as bb,
24411
24410
  me as bc,
@@ -24420,18 +24419,18 @@ export {
24420
24419
  Cf as bl,
24421
24420
  rc as bm,
24422
24421
  pf as bn,
24423
- wE as bo,
24424
- Jf as bp,
24425
- sQ as bq,
24426
- EE as br,
24427
- un as bs,
24428
- _a as bt,
24429
- $a as bu,
24430
- MB as bv,
24431
- ic as bw,
24432
- tc as bx,
24433
- _e as by,
24434
- Ws as bz,
24422
+ Jf as bo,
24423
+ sQ as bp,
24424
+ EE as bq,
24425
+ un as br,
24426
+ _a as bs,
24427
+ $a as bt,
24428
+ MB as bu,
24429
+ ic as bv,
24430
+ tc as bw,
24431
+ _e as bx,
24432
+ Ws as by,
24433
+ sf as bz,
24435
24434
  yA as c,
24436
24435
  bi as d,
24437
24436
  IE as e,
package/package.json CHANGED
@@ -2,7 +2,7 @@
2
2
  "name": "bry-biometric-collector",
3
3
  "private": false,
4
4
  "description": "BRy web-collector component for biometric capture.",
5
- "version": "3.3.0-RC11",
5
+ "version": "3.3.0",
6
6
  "main": "dist/bry-biometric-collector-main.js",
7
7
  "type": "module",
8
8
  "license": "UNLICENSED",
@@ -1,324 +0,0 @@
1
- import { S as me, x as _e, y as be, z as pe, B as N, C as I, D as V, N as A, E as te, bd as f, G as ae, X as D, a0 as p, a1 as H, a2 as v, a3 as Q, H as re, I as b, aa as F, am as W, au as X, av as Z, ah as z, T as E, ar as S, a9 as J, aA as ge, f as he, aB as m, aF as ve, l as Y, aK as we, aM as x, b4 as Ee, Z as M, _ as O, a4 as T, w as ye, aQ as Ce, bc as Ie } from "./main_f17fc861_3.3.0-RC11.js";
2
- import { V as ze, C as Se } from "./ValidationAlert_bba56eb3_3.3.0-RC11.js";
3
- import { F as De } from "./FaceOval_73747bd6_3.3.0-RC11.js";
4
- import "./face_api_0e3ce8c8_3.3.0-RC11.js";
5
- function $(s) {
6
- let t, a, r, l, _;
7
- return a = new ze({}), l = new De({
8
- props: { borderColor: (
9
- /*ovalColor*/
10
- s[5]
11
- ) }
12
- }), {
13
- c() {
14
- t = V("div"), M(a.$$.fragment), r = A(), M(l.$$.fragment), ae(t, "class", "absolute inset-0 flex flex-col items-center mt-[4%] mb-[8%] z-50");
15
- },
16
- m(c, o) {
17
- N(c, t, o), O(a, t, null), D(t, r), O(l, t, null), _ = !0;
18
- },
19
- p(c, o) {
20
- const n = {};
21
- o & /*ovalColor*/
22
- 32 && (n.borderColor = /*ovalColor*/
23
- c[5]), l.$set(n);
24
- },
25
- i(c) {
26
- _ || (p(a.$$.fragment, c), p(l.$$.fragment, c), _ = !0);
27
- },
28
- o(c) {
29
- v(a.$$.fragment, c), v(l.$$.fragment, c), _ = !1;
30
- },
31
- d(c) {
32
- c && re(t), T(a), T(l);
33
- }
34
- };
35
- }
36
- function ee(s) {
37
- let t, a;
38
- return t = new Se({ props: { handler: (
39
- /*func*/
40
- s[12]
41
- ) } }), {
42
- c() {
43
- M(t.$$.fragment);
44
- },
45
- m(r, l) {
46
- O(t, r, l), a = !0;
47
- },
48
- p: te,
49
- i(r) {
50
- a || (p(t.$$.fragment, r), a = !0);
51
- },
52
- o(r) {
53
- v(t.$$.fragment, r), a = !1;
54
- },
55
- d(r) {
56
- T(t, r);
57
- }
58
- };
59
- }
60
- function ke(s) {
61
- let t, a, r, l, _, c, o = (
62
- /*$ready*/
63
- s[7] && !/*idrndClosed*/
64
- s[3] && !/*idrndLoading*/
65
- s[4] && $(s)
66
- ), n = (
67
- /*idrndClosed*/
68
- s[3] && !/*idrndLoading*/
69
- s[4] && ee(s)
70
- );
71
- return {
72
- c() {
73
- t = V("div"), o && o.c(), a = A(), r = V("idlive-face-capture"), _ = A(), n && n.c(), this.c = te, f(r, "class", "w-full h-full"), f(r, "mask_hidden", ""), f(r, "auto_close_disabled", ""), f(
74
- r,
75
- "device_id",
76
- /*$selectedVideoDeviceId*/
77
- s[6]
78
- ), f(r, "face_not_centered_check_enabled", ""), f(r, "sunglasses_detected_check_enabled", ""), f(r, "mouth_open_check_enabled", ""), f(r, "face_blurry_check_enabled", ""), f(r, "dark_image_check_enabled", ""), f(r, "eyes_closed_check_enabled", ""), f(
79
- r,
80
- "payload_size",
81
- /*livenessBlobSize*/
82
- s[1]
83
- ), f(r, "style", l = /*idrndClosed*/
84
- s[3] ? "opacity: 0; pointer-events: none;" : "opacity: 1;"), ae(t, "class", "flex justify-center w-full h-full relative bg-black");
85
- },
86
- m(i, d) {
87
- N(i, t, d), o && o.m(t, null), D(t, a), D(t, r), s[11](r), D(t, _), n && n.m(t, null), c = !0;
88
- },
89
- p(i, [d]) {
90
- /*$ready*/
91
- i[7] && !/*idrndClosed*/
92
- i[3] && !/*idrndLoading*/
93
- i[4] ? o ? (o.p(i, d), d & /*$ready, idrndClosed, idrndLoading*/
94
- 152 && p(o, 1)) : (o = $(i), o.c(), p(o, 1), o.m(t, a)) : o && (H(), v(o, 1, 1, () => {
95
- o = null;
96
- }), Q()), (!c || d & /*$selectedVideoDeviceId*/
97
- 64) && f(
98
- r,
99
- "device_id",
100
- /*$selectedVideoDeviceId*/
101
- i[6]
102
- ), (!c || d & /*livenessBlobSize*/
103
- 2) && f(
104
- r,
105
- "payload_size",
106
- /*livenessBlobSize*/
107
- i[1]
108
- ), (!c || d & /*idrndClosed*/
109
- 8 && l !== (l = /*idrndClosed*/
110
- i[3] ? "opacity: 0; pointer-events: none;" : "opacity: 1;")) && f(r, "style", l), /*idrndClosed*/
111
- i[3] && !/*idrndLoading*/
112
- i[4] ? n ? (n.p(i, d), d & /*idrndClosed, idrndLoading*/
113
- 24 && p(n, 1)) : (n = ee(i), n.c(), p(n, 1), n.m(t, null)) : n && (H(), v(n, 1, 1, () => {
114
- n = null;
115
- }), Q());
116
- },
117
- i(i) {
118
- c || (p(o), p(n), c = !0);
119
- },
120
- o(i) {
121
- v(o), v(n), c = !1;
122
- },
123
- d(i) {
124
- i && re(t), o && o.d(), s[11](null), n && n.d();
125
- }
126
- };
127
- }
128
- function Le() {
129
- ({}).VITE_USE_DEV === "true" ? import("./index-idlf-detector_39cfdffa_3.3.0-RC11.js") : import("./index-idlf-detector_a819b3c8_3.3.0-RC11.js");
130
- }
131
- function Be(s, t, a) {
132
- let r, l, _, c, o, n, i, d, k;
133
- b(s, F, (e) => a(6, r = e)), b(s, W, (e) => a(15, l = e)), b(s, X, (e) => a(17, c = e)), b(s, Z, (e) => a(18, o = e)), b(s, z, (e) => a(19, n = e)), b(s, E, (e) => a(7, i = e)), b(s, S, (e) => a(20, d = e)), b(s, J, (e) => a(21, k = e));
134
- let { livenessBlobSize: R } = t, { cameraDeviceId: y } = t;
135
- const L = ye("data:image/png;base64");
136
- b(s, L, (e) => a(16, _ = e));
137
- let { widgetInstance: u } = t, K = !1, g = !0, h = !0, w = !0, B = "text-blue-400";
138
- ge(async () => {
139
- he("Mounting Stream Collector!"), m(S, d = !0, d), m(E, i = !1, i), se(), Le(), ne();
140
- });
141
- function se() {
142
- w = !1, m(J, k = ve.FRONT_CAMERA, k), m(z, n = "faceCollector.stayInFrame", n);
143
- }
144
- function ne() {
145
- u.addEventListener("open", async (e) => {
146
- a(3, g = !1), a(4, h = !1), m(E, i = !0, i), m(S, d = !1, d), window.dispatchEvent(new CustomEvent(
147
- "stream-ready",
148
- {
149
- detail: {
150
- message: "Stream de vídeo pronto para captura"
151
- }
152
- }
153
- ));
154
- }), u.addEventListener("error", (e) => {
155
- Y("Stream Collector error:", e), a(3, g = !0), a(4, h = !1), m(E, i = !1, i), window.dispatchEvent(new CustomEvent(
156
- "stream-error",
157
- {
158
- detail: {
159
- message: e.detail[0]?.message || "Erro na câmera",
160
- error: e
161
- }
162
- }
163
- ));
164
- }), u.addEventListener("faceDetection", (e) => {
165
- m(
166
- z,
167
- n = e.detail[0].errors[0] ? "faceCollector.stayInFrame" : "faceCollector.capturing",
168
- n
169
- ), a(5, B = e.detail[0].errors[0] ? "text-blue-400" : "text-green-400");
170
- }), u.addEventListener("detection", (e) => {
171
- m(
172
- z,
173
- n = e.detail[0].errors[0] ? "faceCollector.stayInFrame" : "faceCollector.capturing",
174
- n
175
- ), a(5, B = e.detail[0].errors[0] ? "text-blue-400" : "text-green-400");
176
- }), u.addEventListener("close", async (e) => {
177
- a(3, g = !0), a(4, h = !1), console.log("Stream closed:", w), w || window.dispatchEvent(new CustomEvent(
178
- "stream-lost-focus",
179
- {
180
- detail: { message: "Stream de vídeo foi fechado" }
181
- }
182
- ));
183
- }), u.addEventListener("initialize", async (e) => {
184
- K = !0, m(Z, o = !0, o);
185
- try {
186
- u.setLicense(we, "faceDetector");
187
- } catch (C) {
188
- Y("Error setting license:", C);
189
- }
190
- !w && K && await P();
191
- }), u.addEventListener("capture", async (e) => {
192
- const { encryptedFile: C, photo: de } = e.detail[0];
193
- m(X, c = C, c);
194
- const U = (await x(de)).split(",");
195
- w = !0, m(L, _ = U[0], _), m(W, l = U[1], l);
196
- try {
197
- window.dispatchEvent(new CustomEvent(
198
- "stream-capture",
199
- {
200
- detail: {
201
- image: l,
202
- blob: (await x(C)).split(",")[1],
203
- status: "success"
204
- }
205
- }
206
- ));
207
- } catch (q) {
208
- let G = q instanceof Error ? q.message : "UNKNOWN", fe = Ce[G] || "Erro desconhecido";
209
- window.dispatchEvent(new CustomEvent(
210
- "stream-capture",
211
- {
212
- detail: {
213
- errorCode: G,
214
- translatedError: fe,
215
- status: "error"
216
- }
217
- }
218
- ));
219
- }
220
- });
221
- }
222
- async function P() {
223
- y && m(F, r = y, r), a(4, h = !0), g && u && "openCamera" in u && u.openCamera(), a(3, g = !1);
224
- }
225
- async function ie() {
226
- u.takePhoto();
227
- }
228
- async function j() {
229
- !g && !h || (a(4, h = !0), m(F, r = "", r), await P());
230
- }
231
- const oe = E.subscribe((e) => {
232
- e && window.dispatchEvent(new CustomEvent("brycc-camera-ready"));
233
- }), ce = S.subscribe((e) => {
234
- e && window.dispatchEvent(new CustomEvent("brycc-face-initialized"));
235
- });
236
- Ee(() => {
237
- oe(), ce();
238
- });
239
- function le(e) {
240
- Ie[e ? "unshift" : "push"](() => {
241
- u = e, a(0, u);
242
- });
243
- }
244
- const ue = async () => {
245
- await j();
246
- };
247
- return s.$$set = (e) => {
248
- "livenessBlobSize" in e && a(1, R = e.livenessBlobSize), "cameraDeviceId" in e && a(9, y = e.cameraDeviceId), "widgetInstance" in e && a(0, u = e.widgetInstance);
249
- }, [
250
- u,
251
- R,
252
- j,
253
- g,
254
- h,
255
- B,
256
- r,
257
- i,
258
- L,
259
- y,
260
- ie,
261
- le,
262
- ue
263
- ];
264
- }
265
- class Fe extends me {
266
- constructor(t) {
267
- super(), _e(
268
- this,
269
- {
270
- target: this.shadowRoot,
271
- props: pe(this.attributes),
272
- customElement: !0
273
- },
274
- Be,
275
- ke,
276
- be,
277
- {
278
- livenessBlobSize: 1,
279
- cameraDeviceId: 9,
280
- widgetInstance: 0,
281
- capture: 10,
282
- reOpenCamera: 2
283
- },
284
- null
285
- ), t && (t.target && N(t.target, this, t.anchor), t.props && (this.$set(t.props), I()));
286
- }
287
- static get observedAttributes() {
288
- return [
289
- "livenessBlobSize",
290
- "cameraDeviceId",
291
- "widgetInstance",
292
- "capture",
293
- "reOpenCamera"
294
- ];
295
- }
296
- get livenessBlobSize() {
297
- return this.$$.ctx[1];
298
- }
299
- set livenessBlobSize(t) {
300
- this.$$set({ livenessBlobSize: t }), I();
301
- }
302
- get cameraDeviceId() {
303
- return this.$$.ctx[9];
304
- }
305
- set cameraDeviceId(t) {
306
- this.$$set({ cameraDeviceId: t }), I();
307
- }
308
- get widgetInstance() {
309
- return this.$$.ctx[0];
310
- }
311
- set widgetInstance(t) {
312
- this.$$set({ widgetInstance: t }), I();
313
- }
314
- get capture() {
315
- return this.$$.ctx[10];
316
- }
317
- get reOpenCamera() {
318
- return this.$$.ctx[2];
319
- }
320
- }
321
- customElements.define("stream-collector", Fe);
322
- export {
323
- Fe as default
324
- };