@genpact/genome.mfe.mgtwin-app 1.1.295-alpha → 1.1.296-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{ArrowLeftCircleIcon-de9fd811.mjs → ArrowLeftCircleIcon-aff6c45f.mjs} +1 -1
- package/dist/{AssessmentAMA-389ff21d.mjs → AssessmentAMA-26f1d9f6.mjs} +4 -4
- package/dist/{AssessmentSimulation-689a3730.mjs → AssessmentSimulation-3d0db758.mjs} +401 -392
- package/dist/{EditSkillAdminData-f9eb4a8a.mjs → EditSkillAdminData-3d2b4cc5.mjs} +3 -3
- package/dist/{EditSkillData-3f273102.mjs → EditSkillData-63f9ca4c.mjs} +3 -3
- package/dist/{HomeView-a1d3ccba.mjs → HomeView-0b96da78.mjs} +5 -5
- package/dist/{Loading-55808073.mjs → Loading-4b5bead3.mjs} +1 -1
- package/dist/{LoadingSimulation-0bb7742a.mjs → LoadingSimulation-10f4935d.mjs} +1 -1
- package/dist/{Modal-a494cc45.mjs → Modal-3cef5b9d.mjs} +1 -1
- package/dist/{PromptTesting-3fbbb758.mjs → PromptTesting-dcbfff81.mjs} +2 -2
- package/dist/{SkillAutomation-13590a50.mjs → SkillAutomation-2a42e15a.mjs} +3 -3
- package/dist/{SkillData-d2136f5d.mjs → SkillData-835ec5aa.mjs} +3 -3
- package/dist/{UnderMaintenance-ed05379e.mjs → UnderMaintenance-d3c18a6c.mjs} +1 -1
- package/dist/{_commonjsHelpers-56974270.mjs → _commonjsHelpers-d76b39c8.mjs} +1 -1
- package/dist/{api-77b5b181.mjs → api-80456485.mjs} +1 -1
- package/dist/{main-be295e1d.mjs → main-b5660b8a.mjs} +1149 -1149
- package/dist/mgtwin.es.min.js +1 -1
- package/dist/mgtwin.umd.min.js +10 -10
- package/package.json +1 -1
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { _ as Cl, a as cp, L as yl } from "./LoadingSimulation-
|
|
2
|
-
import { _ as Pl, a as up, Q as zt, M as ba } from "./Modal-
|
|
3
|
-
import { d as we, r as T, H as
|
|
4
|
-
import { g as Bn, p as
|
|
5
|
-
import { c as q, a as Il, r as _l } from "./_commonjsHelpers-
|
|
1
|
+
import { _ as Cl, a as cp, L as yl } from "./LoadingSimulation-10f4935d.mjs";
|
|
2
|
+
import { _ as Pl, a as up, Q as zt, M as ba } from "./Modal-3cef5b9d.mjs";
|
|
3
|
+
import { d as we, r as T, H as yt, o as D, c as k, a as P, k as Et, u as Oe, C as Ht, q as Re, y as qo, h as Se, s as pp, t as Je, b as O, f as Ce, i as jn, I as Ji, F as Fo, e as Uo, w as Zo, J as Rl, p as Wn, g as $n, G as Zi, z as El, K as wl, n as jt, m as dp, L as lp, l as Al } from "./main-b5660b8a.mjs";
|
|
4
|
+
import { g as Bn, p as Pt } from "./api-80456485.mjs";
|
|
5
|
+
import { c as q, a as Il, r as _l } from "./_commonjsHelpers-d76b39c8.mjs";
|
|
6
6
|
import { _ as Vt } from "./_plugin-vue_export-helper-dad06003.mjs";
|
|
7
7
|
import "./loadernew-ac153a51.mjs";
|
|
8
8
|
const Tl = { class: "flex h-14 w-full flex-1 items-center justify-between bg-[#021c42] md:pr-6 lg:pr-6" }, bl = ["href"], Ml = /* @__PURE__ */ P("img", {
|
|
@@ -21,7 +21,7 @@ const Tl = { class: "flex h-14 w-full flex-1 items-center justify-between bg-[#0
|
|
|
21
21
|
__name: "headerSimulation",
|
|
22
22
|
setup(t) {
|
|
23
23
|
const e = T(""), r = T("");
|
|
24
|
-
return
|
|
24
|
+
return yt === "prod" ? e.value = "https://genome.genpact.com" : e.value = "https://genome-dev.genpact.com", yt === "prod" ? r.value = "https://genome.genpact.com/#aiguru" : r.value = "https://genome-dev.genpact.com/#aiguru", (i, u) => (D(), k("nav", Tl, [
|
|
25
25
|
P("a", {
|
|
26
26
|
class: "px-2 md:px-0 lg:px-0",
|
|
27
27
|
href: e.value,
|
|
@@ -47,17 +47,17 @@ function Ll() {
|
|
|
47
47
|
throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");
|
|
48
48
|
return Fi(jl);
|
|
49
49
|
}
|
|
50
|
-
var
|
|
50
|
+
var Xe = {}, wt = {}, rs = {};
|
|
51
51
|
Object.defineProperty(rs, "__esModule", {
|
|
52
52
|
value: !0
|
|
53
53
|
});
|
|
54
54
|
rs.default = void 0;
|
|
55
55
|
var xl = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;
|
|
56
56
|
rs.default = xl;
|
|
57
|
-
Object.defineProperty(
|
|
57
|
+
Object.defineProperty(wt, "__esModule", {
|
|
58
58
|
value: !0
|
|
59
59
|
});
|
|
60
|
-
|
|
60
|
+
wt.default = void 0;
|
|
61
61
|
var Bl = ql(rs);
|
|
62
62
|
function ql(t) {
|
|
63
63
|
return t && t.__esModule ? t : { default: t };
|
|
@@ -66,13 +66,13 @@ function Fl(t) {
|
|
|
66
66
|
return typeof t == "string" && Bl.default.test(t);
|
|
67
67
|
}
|
|
68
68
|
var Ul = Fl;
|
|
69
|
-
|
|
70
|
-
Object.defineProperty(
|
|
69
|
+
wt.default = Ul;
|
|
70
|
+
Object.defineProperty(Xe, "__esModule", {
|
|
71
71
|
value: !0
|
|
72
72
|
});
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
var Hl = Vl(
|
|
73
|
+
Xe.default = void 0;
|
|
74
|
+
Xe.unsafeStringify = vp;
|
|
75
|
+
var Hl = Vl(wt);
|
|
76
76
|
function Vl(t) {
|
|
77
77
|
return t && t.__esModule ? t : { default: t };
|
|
78
78
|
}
|
|
@@ -89,12 +89,12 @@ function Wl(t, e = 0) {
|
|
|
89
89
|
return r;
|
|
90
90
|
}
|
|
91
91
|
var $l = Wl;
|
|
92
|
-
|
|
92
|
+
Xe.default = $l;
|
|
93
93
|
Object.defineProperty(es, "__esModule", {
|
|
94
94
|
value: !0
|
|
95
95
|
});
|
|
96
96
|
es.default = void 0;
|
|
97
|
-
var Kl = Gl(ts), Jl =
|
|
97
|
+
var Kl = Gl(ts), Jl = Xe;
|
|
98
98
|
function Gl(t) {
|
|
99
99
|
return t && t.__esModule ? t : { default: t };
|
|
100
100
|
}
|
|
@@ -123,12 +123,12 @@ function Ql(t, e, r) {
|
|
|
123
123
|
}
|
|
124
124
|
var Xl = Ql;
|
|
125
125
|
es.default = Xl;
|
|
126
|
-
var ns = {},
|
|
126
|
+
var ns = {}, Ge = {}, Kn = {};
|
|
127
127
|
Object.defineProperty(Kn, "__esModule", {
|
|
128
128
|
value: !0
|
|
129
129
|
});
|
|
130
130
|
Kn.default = void 0;
|
|
131
|
-
var Zl = Yl(
|
|
131
|
+
var Zl = Yl(wt);
|
|
132
132
|
function Yl(t) {
|
|
133
133
|
return t && t.__esModule ? t : { default: t };
|
|
134
134
|
}
|
|
@@ -141,12 +141,12 @@ function eh(t) {
|
|
|
141
141
|
}
|
|
142
142
|
var th = eh;
|
|
143
143
|
Kn.default = th;
|
|
144
|
-
Object.defineProperty(
|
|
144
|
+
Object.defineProperty(Ge, "__esModule", {
|
|
145
145
|
value: !0
|
|
146
146
|
});
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
var rh =
|
|
147
|
+
Ge.URL = Ge.DNS = void 0;
|
|
148
|
+
Ge.default = oh;
|
|
149
|
+
var rh = Xe, nh = ih(Kn);
|
|
150
150
|
function ih(t) {
|
|
151
151
|
return t && t.__esModule ? t : { default: t };
|
|
152
152
|
}
|
|
@@ -158,9 +158,9 @@ function sh(t) {
|
|
|
158
158
|
return e;
|
|
159
159
|
}
|
|
160
160
|
const fp = "6ba7b810-9dad-11d1-80b4-00c04fd430c8";
|
|
161
|
-
|
|
161
|
+
Ge.DNS = fp;
|
|
162
162
|
const gp = "6ba7b811-9dad-11d1-80b4-00c04fd430c8";
|
|
163
|
-
|
|
163
|
+
Ge.URL = gp;
|
|
164
164
|
function oh(t, e, r) {
|
|
165
165
|
function i(u, a, o, n) {
|
|
166
166
|
var s;
|
|
@@ -211,7 +211,7 @@ function uh(t, e) {
|
|
|
211
211
|
let r = 1732584193, i = -271733879, u = -1732584194, a = 271733878;
|
|
212
212
|
for (let o = 0; o < t.length; o += 16) {
|
|
213
213
|
const n = r, s = i, c = u, p = a;
|
|
214
|
-
r = oe(r, i, u, a, t[o], 7, -680876936), a = oe(a, r, i, u, t[o + 1], 12, -389564586), u = oe(u, a, r, i, t[o + 2], 17, 606105819), i = oe(i, u, a, r, t[o + 3], 22, -1044525330), r = oe(r, i, u, a, t[o + 4], 7, -176418897), a = oe(a, r, i, u, t[o + 5], 12, 1200080426), u = oe(u, a, r, i, t[o + 6], 17, -1473231341), i = oe(i, u, a, r, t[o + 7], 22, -45705983), r = oe(r, i, u, a, t[o + 8], 7, 1770035416), a = oe(a, r, i, u, t[o + 9], 12, -1958414417), u = oe(u, a, r, i, t[o + 10], 17, -42063), i = oe(i, u, a, r, t[o + 11], 22, -1990404162), r = oe(r, i, u, a, t[o + 12], 7, 1804603682), a = oe(a, r, i, u, t[o + 13], 12, -40341101), u = oe(u, a, r, i, t[o + 14], 17, -1502002290), i = oe(i, u, a, r, t[o + 15], 22, 1236535329), r = ae(r, i, u, a, t[o + 1], 5, -165796510), a = ae(a, r, i, u, t[o + 6], 9, -1069501632), u = ae(u, a, r, i, t[o + 11], 14, 643717713), i = ae(i, u, a, r, t[o], 20, -373897302), r = ae(r, i, u, a, t[o + 5], 5, -701558691), a = ae(a, r, i, u, t[o + 10], 9, 38016083), u = ae(u, a, r, i, t[o + 15], 14, -660478335), i = ae(i, u, a, r, t[o + 4], 20, -405537848), r = ae(r, i, u, a, t[o + 9], 5, 568446438), a = ae(a, r, i, u, t[o + 14], 9, -1019803690), u = ae(u, a, r, i, t[o + 3], 14, -187363961), i = ae(i, u, a, r, t[o + 8], 20, 1163531501), r = ae(r, i, u, a, t[o + 13], 5, -1444681467), a = ae(a, r, i, u, t[o + 2], 9, -51403784), u = ae(u, a, r, i, t[o + 7], 14, 1735328473), i = ae(i, u, a, r, t[o + 12], 20, -1926607734), r = ce(r, i, u, a, t[o + 5], 4, -378558), a = ce(a, r, i, u, t[o + 8], 11, -2022574463), u = ce(u, a, r, i, t[o + 11], 16, 1839030562), i = ce(i, u, a, r, t[o + 14], 23, -35309556), r = ce(r, i, u, a, t[o + 1], 4, -1530992060), a = ce(a, r, i, u, t[o + 4], 11, 1272893353), u = ce(u, a, r, i, t[o + 7], 16, -155497632), i = ce(i, u, a, r, t[o + 10], 23, -1094730640), r = ce(r, i, u, a, t[o + 13], 4, 681279174), a = ce(a, r, i, u, t[o], 11, -358537222), u = ce(u, a, r, i, t[o + 3], 16, -722521979), i = ce(i, u, a, r, t[o + 6], 23, 76029189), r = ce(r, i, u, a, t[o + 9], 4, -640364487), a = ce(a, r, i, u, t[o + 12], 11, -421815835), u = ce(u, a, r, i, t[o + 15], 16, 530742520), i = ce(i, u, a, r, t[o + 2], 23, -995338651), r = ue(r, i, u, a, t[o], 6, -198630844), a = ue(a, r, i, u, t[o + 7], 10, 1126891415), u = ue(u, a, r, i, t[o + 14], 15, -1416354905), i = ue(i, u, a, r, t[o + 5], 21, -57434055), r = ue(r, i, u, a, t[o + 12], 6, 1700485571), a = ue(a, r, i, u, t[o + 3], 10, -1894986606), u = ue(u, a, r, i, t[o + 10], 15, -1051523), i = ue(i, u, a, r, t[o + 1], 21, -2054922799), r = ue(r, i, u, a, t[o + 8], 6, 1873313359), a = ue(a, r, i, u, t[o + 15], 10, -30611744), u = ue(u, a, r, i, t[o + 6], 15, -1560198380), i = ue(i, u, a, r, t[o + 13], 21, 1309151649), r = ue(r, i, u, a, t[o + 4], 6, -145523070), a = ue(a, r, i, u, t[o + 11], 10, -1120210379), u = ue(u, a, r, i, t[o + 2], 15, 718787259), i = ue(i, u, a, r, t[o + 9], 21, -343485551), r =
|
|
214
|
+
r = oe(r, i, u, a, t[o], 7, -680876936), a = oe(a, r, i, u, t[o + 1], 12, -389564586), u = oe(u, a, r, i, t[o + 2], 17, 606105819), i = oe(i, u, a, r, t[o + 3], 22, -1044525330), r = oe(r, i, u, a, t[o + 4], 7, -176418897), a = oe(a, r, i, u, t[o + 5], 12, 1200080426), u = oe(u, a, r, i, t[o + 6], 17, -1473231341), i = oe(i, u, a, r, t[o + 7], 22, -45705983), r = oe(r, i, u, a, t[o + 8], 7, 1770035416), a = oe(a, r, i, u, t[o + 9], 12, -1958414417), u = oe(u, a, r, i, t[o + 10], 17, -42063), i = oe(i, u, a, r, t[o + 11], 22, -1990404162), r = oe(r, i, u, a, t[o + 12], 7, 1804603682), a = oe(a, r, i, u, t[o + 13], 12, -40341101), u = oe(u, a, r, i, t[o + 14], 17, -1502002290), i = oe(i, u, a, r, t[o + 15], 22, 1236535329), r = ae(r, i, u, a, t[o + 1], 5, -165796510), a = ae(a, r, i, u, t[o + 6], 9, -1069501632), u = ae(u, a, r, i, t[o + 11], 14, 643717713), i = ae(i, u, a, r, t[o], 20, -373897302), r = ae(r, i, u, a, t[o + 5], 5, -701558691), a = ae(a, r, i, u, t[o + 10], 9, 38016083), u = ae(u, a, r, i, t[o + 15], 14, -660478335), i = ae(i, u, a, r, t[o + 4], 20, -405537848), r = ae(r, i, u, a, t[o + 9], 5, 568446438), a = ae(a, r, i, u, t[o + 14], 9, -1019803690), u = ae(u, a, r, i, t[o + 3], 14, -187363961), i = ae(i, u, a, r, t[o + 8], 20, 1163531501), r = ae(r, i, u, a, t[o + 13], 5, -1444681467), a = ae(a, r, i, u, t[o + 2], 9, -51403784), u = ae(u, a, r, i, t[o + 7], 14, 1735328473), i = ae(i, u, a, r, t[o + 12], 20, -1926607734), r = ce(r, i, u, a, t[o + 5], 4, -378558), a = ce(a, r, i, u, t[o + 8], 11, -2022574463), u = ce(u, a, r, i, t[o + 11], 16, 1839030562), i = ce(i, u, a, r, t[o + 14], 23, -35309556), r = ce(r, i, u, a, t[o + 1], 4, -1530992060), a = ce(a, r, i, u, t[o + 4], 11, 1272893353), u = ce(u, a, r, i, t[o + 7], 16, -155497632), i = ce(i, u, a, r, t[o + 10], 23, -1094730640), r = ce(r, i, u, a, t[o + 13], 4, 681279174), a = ce(a, r, i, u, t[o], 11, -358537222), u = ce(u, a, r, i, t[o + 3], 16, -722521979), i = ce(i, u, a, r, t[o + 6], 23, 76029189), r = ce(r, i, u, a, t[o + 9], 4, -640364487), a = ce(a, r, i, u, t[o + 12], 11, -421815835), u = ce(u, a, r, i, t[o + 15], 16, 530742520), i = ce(i, u, a, r, t[o + 2], 23, -995338651), r = ue(r, i, u, a, t[o], 6, -198630844), a = ue(a, r, i, u, t[o + 7], 10, 1126891415), u = ue(u, a, r, i, t[o + 14], 15, -1416354905), i = ue(i, u, a, r, t[o + 5], 21, -57434055), r = ue(r, i, u, a, t[o + 12], 6, 1700485571), a = ue(a, r, i, u, t[o + 3], 10, -1894986606), u = ue(u, a, r, i, t[o + 10], 15, -1051523), i = ue(i, u, a, r, t[o + 1], 21, -2054922799), r = ue(r, i, u, a, t[o + 8], 6, 1873313359), a = ue(a, r, i, u, t[o + 15], 10, -30611744), u = ue(u, a, r, i, t[o + 6], 15, -1560198380), i = ue(i, u, a, r, t[o + 13], 21, 1309151649), r = ue(r, i, u, a, t[o + 4], 6, -145523070), a = ue(a, r, i, u, t[o + 11], 10, -1120210379), u = ue(u, a, r, i, t[o + 2], 15, 718787259), i = ue(i, u, a, r, t[o + 9], 21, -343485551), r = $e(r, n), i = $e(i, s), u = $e(u, c), a = $e(a, p);
|
|
215
215
|
}
|
|
216
216
|
return [r, i, u, a];
|
|
217
217
|
}
|
|
@@ -223,7 +223,7 @@ function ph(t) {
|
|
|
223
223
|
r[i >> 5] |= (t[i / 8] & 255) << i % 32;
|
|
224
224
|
return r;
|
|
225
225
|
}
|
|
226
|
-
function
|
|
226
|
+
function $e(t, e) {
|
|
227
227
|
const r = (t & 65535) + (e & 65535);
|
|
228
228
|
return (t >> 16) + (e >> 16) + (r >> 16) << 16 | r & 65535;
|
|
229
229
|
}
|
|
@@ -231,7 +231,7 @@ function dh(t, e) {
|
|
|
231
231
|
return t << e | t >>> 32 - e;
|
|
232
232
|
}
|
|
233
233
|
function ss(t, e, r, i, u, a) {
|
|
234
|
-
return
|
|
234
|
+
return $e(dh($e($e(e, t), $e(i, a)), u), r);
|
|
235
235
|
}
|
|
236
236
|
function oe(t, e, r, i, u, a, o) {
|
|
237
237
|
return ss(e & r | ~e & i, t, e, u, a, o);
|
|
@@ -251,7 +251,7 @@ Object.defineProperty(ns, "__esModule", {
|
|
|
251
251
|
value: !0
|
|
252
252
|
});
|
|
253
253
|
ns.default = void 0;
|
|
254
|
-
var hh = Sp(
|
|
254
|
+
var hh = Sp(Ge), vh = Sp(is);
|
|
255
255
|
function Sp(t) {
|
|
256
256
|
return t && t.__esModule ? t : { default: t };
|
|
257
257
|
}
|
|
@@ -272,7 +272,7 @@ Object.defineProperty(os, "__esModule", {
|
|
|
272
272
|
value: !0
|
|
273
273
|
});
|
|
274
274
|
os.default = void 0;
|
|
275
|
-
var Oa = Cp(as), Ch = Cp(ts), yh =
|
|
275
|
+
var Oa = Cp(as), Ch = Cp(ts), yh = Xe;
|
|
276
276
|
function Cp(t) {
|
|
277
277
|
return t && t.__esModule ? t : { default: t };
|
|
278
278
|
}
|
|
@@ -350,7 +350,7 @@ Object.defineProperty(cs, "__esModule", {
|
|
|
350
350
|
value: !0
|
|
351
351
|
});
|
|
352
352
|
cs.default = void 0;
|
|
353
|
-
var Ih = yp(
|
|
353
|
+
var Ih = yp(Ge), _h = yp(us);
|
|
354
354
|
function yp(t) {
|
|
355
355
|
return t && t.__esModule ? t : { default: t };
|
|
356
356
|
}
|
|
@@ -369,7 +369,7 @@ Object.defineProperty(ds, "__esModule", {
|
|
|
369
369
|
value: !0
|
|
370
370
|
});
|
|
371
371
|
ds.default = void 0;
|
|
372
|
-
var Oh = Dh(
|
|
372
|
+
var Oh = Dh(wt);
|
|
373
373
|
function Dh(t) {
|
|
374
374
|
return t && t.__esModule ? t : { default: t };
|
|
375
375
|
}
|
|
@@ -429,7 +429,7 @@ ds.default = Nh;
|
|
|
429
429
|
return o.default;
|
|
430
430
|
}
|
|
431
431
|
});
|
|
432
|
-
var e = p(es), r = p(ns), i = p(os), u = p(cs), a = p(ps), o = p(ds), n = p(
|
|
432
|
+
var e = p(es), r = p(ns), i = p(os), u = p(cs), a = p(ps), o = p(ds), n = p(wt), s = p(Xe), c = p(Kn);
|
|
433
433
|
function p(d) {
|
|
434
434
|
return d && d.__esModule ? d : { default: d };
|
|
435
435
|
}
|
|
@@ -546,9 +546,9 @@ K.AudioStreamNodeErrorEvent = Vh;
|
|
|
546
546
|
var J = {};
|
|
547
547
|
Object.defineProperty(J, "__esModule", { value: !0 });
|
|
548
548
|
J.ConnectionRedirectEvent = J.ConnectionMessageSentEvent = J.ConnectionMessageReceivedEvent = J.ConnectionEstablishErrorEvent = J.ConnectionErrorEvent = J.ConnectionClosedEvent = J.ConnectionEstablishedEvent = J.ConnectionStartEvent = J.ConnectionEvent = J.ServiceEvent = void 0;
|
|
549
|
-
const
|
|
550
|
-
class Wh extends
|
|
551
|
-
constructor(e, r, i =
|
|
549
|
+
const Qe = Wt;
|
|
550
|
+
class Wh extends Qe.PlatformEvent {
|
|
551
|
+
constructor(e, r, i = Qe.EventType.Info) {
|
|
552
552
|
super(e, i), this.privJsonResult = r;
|
|
553
553
|
}
|
|
554
554
|
get jsonString() {
|
|
@@ -556,8 +556,8 @@ class Wh extends Ge.PlatformEvent {
|
|
|
556
556
|
}
|
|
557
557
|
}
|
|
558
558
|
J.ServiceEvent = Wh;
|
|
559
|
-
class Le extends
|
|
560
|
-
constructor(e, r, i =
|
|
559
|
+
class Le extends Qe.PlatformEvent {
|
|
560
|
+
constructor(e, r, i = Qe.EventType.Info) {
|
|
561
561
|
super(e, i), this.privConnectionId = r;
|
|
562
562
|
}
|
|
563
563
|
get connectionId() {
|
|
@@ -585,7 +585,7 @@ class Kh extends Le {
|
|
|
585
585
|
J.ConnectionEstablishedEvent = Kh;
|
|
586
586
|
class Jh extends Le {
|
|
587
587
|
constructor(e, r, i) {
|
|
588
|
-
super("ConnectionClosedEvent", e,
|
|
588
|
+
super("ConnectionClosedEvent", e, Qe.EventType.Debug), this.privReason = i, this.privStatusCode = r;
|
|
589
589
|
}
|
|
590
590
|
get reason() {
|
|
591
591
|
return this.privReason;
|
|
@@ -597,7 +597,7 @@ class Jh extends Le {
|
|
|
597
597
|
J.ConnectionClosedEvent = Jh;
|
|
598
598
|
class Gh extends Le {
|
|
599
599
|
constructor(e, r, i) {
|
|
600
|
-
super("ConnectionErrorEvent", e,
|
|
600
|
+
super("ConnectionErrorEvent", e, Qe.EventType.Debug), this.privMessage = r, this.privType = i;
|
|
601
601
|
}
|
|
602
602
|
get message() {
|
|
603
603
|
return this.privMessage;
|
|
@@ -609,7 +609,7 @@ class Gh extends Le {
|
|
|
609
609
|
J.ConnectionErrorEvent = Gh;
|
|
610
610
|
class Qh extends Le {
|
|
611
611
|
constructor(e, r, i) {
|
|
612
|
-
super("ConnectionEstablishErrorEvent", e,
|
|
612
|
+
super("ConnectionEstablishErrorEvent", e, Qe.EventType.Error), this.privStatusCode = r, this.privReason = i;
|
|
613
613
|
}
|
|
614
614
|
get reason() {
|
|
615
615
|
return this.privReason;
|
|
@@ -645,7 +645,7 @@ class Zh extends Le {
|
|
|
645
645
|
J.ConnectionMessageSentEvent = Zh;
|
|
646
646
|
class Yh extends Le {
|
|
647
647
|
constructor(e, r, i, u) {
|
|
648
|
-
super("ConnectionRedirectEvent", e,
|
|
648
|
+
super("ConnectionRedirectEvent", e, Qe.EventType.Info), this.privRedirectUrl = r, this.privOriginalUrl = i, this.privContext = u;
|
|
649
649
|
}
|
|
650
650
|
get redirectUrl() {
|
|
651
651
|
return this.privRedirectUrl;
|
|
@@ -786,16 +786,16 @@ class iv {
|
|
|
786
786
|
}
|
|
787
787
|
}
|
|
788
788
|
hs.DeferralMap = iv;
|
|
789
|
-
var
|
|
790
|
-
Object.defineProperty(
|
|
791
|
-
|
|
789
|
+
var Rt = {};
|
|
790
|
+
Object.defineProperty(Rt, "__esModule", { value: !0 });
|
|
791
|
+
Rt.SendingAgentContextMessageEvent = Rt.DialogEvent = void 0;
|
|
792
792
|
const Da = Wt;
|
|
793
793
|
class Rp extends Da.PlatformEvent {
|
|
794
794
|
constructor(e, r = Da.EventType.Info) {
|
|
795
795
|
super(e, r);
|
|
796
796
|
}
|
|
797
797
|
}
|
|
798
|
-
|
|
798
|
+
Rt.DialogEvent = Rp;
|
|
799
799
|
class sv extends Rp {
|
|
800
800
|
constructor(e) {
|
|
801
801
|
super("SendingAgentContextMessageEvent"), this.privAgentConfig = e;
|
|
@@ -804,7 +804,7 @@ class sv extends Rp {
|
|
|
804
804
|
return this.privAgentConfig;
|
|
805
805
|
}
|
|
806
806
|
}
|
|
807
|
-
|
|
807
|
+
Rt.SendingAgentContextMessageEvent = sv;
|
|
808
808
|
var vs = {}, Gn = {};
|
|
809
809
|
Object.defineProperty(Gn, "__esModule", { value: !0 });
|
|
810
810
|
Gn.EventSource = void 0;
|
|
@@ -1611,7 +1611,7 @@ function M() {
|
|
|
1611
1611
|
for (var o in u)
|
|
1612
1612
|
o !== "default" && !Object.prototype.hasOwnProperty.call(a, o) && e(a, u, o);
|
|
1613
1613
|
};
|
|
1614
|
-
Object.defineProperty(t, "__esModule", { value: !0 }), t.TranslationStatus = void 0, r(K, t), r(J, t), r(Yo, t), r(ls, t), r(hs, t), r(
|
|
1614
|
+
Object.defineProperty(t, "__esModule", { value: !0 }), t.TranslationStatus = void 0, r(K, t), r(J, t), r(Yo, t), r(ls, t), r(hs, t), r(Rt, t), r(ne, t), r(vs, t), r(Gn, t), r(Ee, t), r(Ep, t), r(wp, t), r(Ap, t), r(Ip, t), r(_p, t), r(Tp, t), r(bp, t), r(Mp, t), r(Op, t), r(Dp, t), r(Qn, t), r(Wt, t), r(ea, t), r(Xn, t), r(fs, t), r(gs, t), r(ms, t);
|
|
1615
1615
|
var i = Ss;
|
|
1616
1616
|
Object.defineProperty(t, "TranslationStatus", { enumerable: !0, get: function() {
|
|
1617
1617
|
return i.TranslationStatus;
|
|
@@ -1716,7 +1716,7 @@ var er = {}, Ao = {}, Ps = {};
|
|
|
1716
1716
|
const Fv = {}, Uv = /* @__PURE__ */ Object.freeze(/* @__PURE__ */ Object.defineProperty({
|
|
1717
1717
|
__proto__: null,
|
|
1718
1718
|
default: Fv
|
|
1719
|
-
}, Symbol.toStringTag, { value: "Module" })),
|
|
1719
|
+
}, Symbol.toStringTag, { value: "Module" })), At = /* @__PURE__ */ Il(Uv);
|
|
1720
1720
|
var ta = {};
|
|
1721
1721
|
(function(t) {
|
|
1722
1722
|
Object.defineProperty(t, "__esModule", { value: !0 }), t.LogLevel = void 0;
|
|
@@ -1789,7 +1789,7 @@ var Hv = q && q.__createBinding || (Object.create ? function(t, e, r, i) {
|
|
|
1789
1789
|
};
|
|
1790
1790
|
Object.defineProperty(Ps, "__esModule", { value: !0 });
|
|
1791
1791
|
Ps.ConsoleLoggingListener = void 0;
|
|
1792
|
-
const Ua = Wv(
|
|
1792
|
+
const Ua = Wv(At), tr = ta, $v = z;
|
|
1793
1793
|
class Kv {
|
|
1794
1794
|
constructor(e = tr.LogLevel.None) {
|
|
1795
1795
|
this.privLogPath = void 0, this.privEnableConsoleOutput = !0, this.privLogLevelFilter = e;
|
|
@@ -1842,7 +1842,7 @@ File System access not available`), this.privLogPath = e;
|
|
|
1842
1842
|
Ps.ConsoleLoggingListener = Kv;
|
|
1843
1843
|
var jp = {};
|
|
1844
1844
|
Object.defineProperty(jp, "__esModule", { value: !0 });
|
|
1845
|
-
var Io = {},
|
|
1845
|
+
var Io = {}, Ze = {};
|
|
1846
1846
|
(function(t) {
|
|
1847
1847
|
Object.defineProperty(t, "__esModule", { value: !0 }), t.AudioStreamFormatImpl = t.AudioStreamFormat = t.AudioFormatTag = void 0;
|
|
1848
1848
|
var e;
|
|
@@ -1965,12 +1965,12 @@ var Io = {}, Xe = {};
|
|
|
1965
1965
|
}
|
|
1966
1966
|
}
|
|
1967
1967
|
t.AudioStreamFormatImpl = i;
|
|
1968
|
-
})(
|
|
1968
|
+
})(Ze);
|
|
1969
1969
|
var Ha;
|
|
1970
1970
|
function Jv() {
|
|
1971
1971
|
return Ha || (Ha = 1, function(t) {
|
|
1972
1972
|
Object.defineProperty(t, "__esModule", { value: !0 }), t.MicAudioSource = t.AudioWorkletSourceURLPropertyName = void 0;
|
|
1973
|
-
const e = _(), r = M(), i =
|
|
1973
|
+
const e = _(), r = M(), i = Ze;
|
|
1974
1974
|
t.AudioWorkletSourceURLPropertyName = "MICROPHONE-WorkletSourceUrl";
|
|
1975
1975
|
class u {
|
|
1976
1976
|
constructor(o, n, s, c) {
|
|
@@ -2112,7 +2112,7 @@ function Gv() {
|
|
|
2112
2112
|
if (Va)
|
|
2113
2113
|
return rr;
|
|
2114
2114
|
Va = 1, Object.defineProperty(rr, "__esModule", { value: !0 }), rr.FileAudioSource = void 0;
|
|
2115
|
-
const t = _(), e = M(), r =
|
|
2115
|
+
const t = _(), e = M(), r = Ze;
|
|
2116
2116
|
let i = class {
|
|
2117
2117
|
constructor(a, o, n) {
|
|
2118
2118
|
this.privStreams = {}, this.privHeaderEnd = 44, this.privId = n || (0, e.createNoDashGuid)(), this.privEvents = new e.EventSource(), this.privSource = a, typeof window < "u" && typeof Blob < "u" && this.privSource instanceof Blob ? this.privFilename = a.name : this.privFilename = o || "unknown.wav", this.privAudioFormatPromise = this.readHeader();
|
|
@@ -2350,7 +2350,7 @@ var Es = {}, Zn = {}, Zv = q && q.__createBinding || (Object.create ? function(t
|
|
|
2350
2350
|
};
|
|
2351
2351
|
Object.defineProperty(Zn, "__esModule", { value: !0 });
|
|
2352
2352
|
Zn.WebsocketMessageAdapter = void 0;
|
|
2353
|
-
const ef = Lp(
|
|
2353
|
+
const ef = Lp(At), tf = Lp(At), rf = ra(At), nf = ra(At), sf = ra(At), of = X, j = M();
|
|
2354
2354
|
class Vn {
|
|
2355
2355
|
constructor(e, r, i, u, a, o) {
|
|
2356
2356
|
if (!e)
|
|
@@ -2670,7 +2670,7 @@ var nr = {}, To = {}, De = {}, Yn = {}, df = q && q.__createBinding || (Object.c
|
|
|
2670
2670
|
};
|
|
2671
2671
|
Object.defineProperty(Yn, "__esModule", { value: !0 });
|
|
2672
2672
|
Yn.AudioFileWriter = void 0;
|
|
2673
|
-
const ir = hf(
|
|
2673
|
+
const ir = hf(At), bo = z;
|
|
2674
2674
|
class vf {
|
|
2675
2675
|
constructor(e) {
|
|
2676
2676
|
bo.Contracts.throwIfNullOrUndefined(ir.openSync, `
|
|
@@ -2699,7 +2699,7 @@ function xp() {
|
|
|
2699
2699
|
if (Wa)
|
|
2700
2700
|
return me;
|
|
2701
2701
|
Wa = 1, Object.defineProperty(me, "__esModule", { value: !0 }), me.PullAudioInputStreamImpl = me.PullAudioInputStream = me.PushAudioInputStreamImpl = me.PushAudioInputStream = me.AudioInputStream = void 0;
|
|
2702
|
-
const t = _(), e = M(), r = Ee, i = R(), u =
|
|
2702
|
+
const t = _(), e = M(), r = Ee, i = R(), u = Ze;
|
|
2703
2703
|
let a = class {
|
|
2704
2704
|
/**
|
|
2705
2705
|
* Creates and initializes an instance.
|
|
@@ -2938,16 +2938,16 @@ function xp() {
|
|
|
2938
2938
|
}
|
|
2939
2939
|
return me.PullAudioInputStreamImpl = c, me;
|
|
2940
2940
|
}
|
|
2941
|
-
var re = {},
|
|
2941
|
+
var re = {}, Ye = {}, na = {};
|
|
2942
2942
|
(function(t) {
|
|
2943
2943
|
Object.defineProperty(t, "__esModule", { value: !0 }), t.SpeechSynthesisOutputFormat = void 0, function(e) {
|
|
2944
2944
|
e[e.Raw8Khz8BitMonoMULaw = 0] = "Raw8Khz8BitMonoMULaw", e[e.Riff16Khz16KbpsMonoSiren = 1] = "Riff16Khz16KbpsMonoSiren", e[e.Audio16Khz16KbpsMonoSiren = 2] = "Audio16Khz16KbpsMonoSiren", e[e.Audio16Khz32KBitRateMonoMp3 = 3] = "Audio16Khz32KBitRateMonoMp3", e[e.Audio16Khz128KBitRateMonoMp3 = 4] = "Audio16Khz128KBitRateMonoMp3", e[e.Audio16Khz64KBitRateMonoMp3 = 5] = "Audio16Khz64KBitRateMonoMp3", e[e.Audio24Khz48KBitRateMonoMp3 = 6] = "Audio24Khz48KBitRateMonoMp3", e[e.Audio24Khz96KBitRateMonoMp3 = 7] = "Audio24Khz96KBitRateMonoMp3", e[e.Audio24Khz160KBitRateMonoMp3 = 8] = "Audio24Khz160KBitRateMonoMp3", e[e.Raw16Khz16BitMonoTrueSilk = 9] = "Raw16Khz16BitMonoTrueSilk", e[e.Riff16Khz16BitMonoPcm = 10] = "Riff16Khz16BitMonoPcm", e[e.Riff8Khz16BitMonoPcm = 11] = "Riff8Khz16BitMonoPcm", e[e.Riff24Khz16BitMonoPcm = 12] = "Riff24Khz16BitMonoPcm", e[e.Riff8Khz8BitMonoMULaw = 13] = "Riff8Khz8BitMonoMULaw", e[e.Raw16Khz16BitMonoPcm = 14] = "Raw16Khz16BitMonoPcm", e[e.Raw24Khz16BitMonoPcm = 15] = "Raw24Khz16BitMonoPcm", e[e.Raw8Khz16BitMonoPcm = 16] = "Raw8Khz16BitMonoPcm", e[e.Ogg16Khz16BitMonoOpus = 17] = "Ogg16Khz16BitMonoOpus", e[e.Ogg24Khz16BitMonoOpus = 18] = "Ogg24Khz16BitMonoOpus", e[e.Raw48Khz16BitMonoPcm = 19] = "Raw48Khz16BitMonoPcm", e[e.Riff48Khz16BitMonoPcm = 20] = "Riff48Khz16BitMonoPcm", e[e.Audio48Khz96KBitRateMonoMp3 = 21] = "Audio48Khz96KBitRateMonoMp3", e[e.Audio48Khz192KBitRateMonoMp3 = 22] = "Audio48Khz192KBitRateMonoMp3", e[e.Ogg48Khz16BitMonoOpus = 23] = "Ogg48Khz16BitMonoOpus", e[e.Webm16Khz16BitMonoOpus = 24] = "Webm16Khz16BitMonoOpus", e[e.Webm24Khz16BitMonoOpus = 25] = "Webm24Khz16BitMonoOpus", e[e.Raw24Khz16BitMonoTrueSilk = 26] = "Raw24Khz16BitMonoTrueSilk", e[e.Raw8Khz8BitMonoALaw = 27] = "Raw8Khz8BitMonoALaw", e[e.Riff8Khz8BitMonoALaw = 28] = "Riff8Khz8BitMonoALaw", e[e.Webm24Khz16Bit24KbpsMonoOpus = 29] = "Webm24Khz16Bit24KbpsMonoOpus", e[e.Audio16Khz16Bit32KbpsMonoOpus = 30] = "Audio16Khz16Bit32KbpsMonoOpus", e[e.Audio24Khz16Bit48KbpsMonoOpus = 31] = "Audio24Khz16Bit48KbpsMonoOpus", e[e.Audio24Khz16Bit24KbpsMonoOpus = 32] = "Audio24Khz16Bit24KbpsMonoOpus", e[e.Raw22050Hz16BitMonoPcm = 33] = "Raw22050Hz16BitMonoPcm", e[e.Riff22050Hz16BitMonoPcm = 34] = "Riff22050Hz16BitMonoPcm", e[e.Raw44100Hz16BitMonoPcm = 35] = "Raw44100Hz16BitMonoPcm", e[e.Riff44100Hz16BitMonoPcm = 36] = "Riff44100Hz16BitMonoPcm", e[e.AmrWb16000Hz = 37] = "AmrWb16000Hz", e[e.G72216Khz64Kbps = 38] = "G72216Khz64Kbps";
|
|
2945
2945
|
}(t.SpeechSynthesisOutputFormat || (t.SpeechSynthesisOutputFormat = {}));
|
|
2946
2946
|
})(na);
|
|
2947
|
-
Object.defineProperty(
|
|
2948
|
-
|
|
2949
|
-
const B = na, L =
|
|
2950
|
-
class
|
|
2947
|
+
Object.defineProperty(Ye, "__esModule", { value: !0 });
|
|
2948
|
+
Ye.AudioOutputFormatImpl = void 0;
|
|
2949
|
+
const B = na, L = Ze;
|
|
2950
|
+
class N extends L.AudioStreamFormatImpl {
|
|
2951
2951
|
/**
|
|
2952
2952
|
* Creates an instance with the given values.
|
|
2953
2953
|
* @constructor
|
|
@@ -2965,97 +2965,97 @@ class D extends L.AudioStreamFormatImpl {
|
|
|
2965
2965
|
super(i, o, r, e), this.formatTag = e, this.avgBytesPerSec = u, this.blockAlign = a, this.priAudioFormatString = n, this.priRequestAudioFormatString = s, this.priHasHeader = c;
|
|
2966
2966
|
}
|
|
2967
2967
|
static fromSpeechSynthesisOutputFormat(e) {
|
|
2968
|
-
return e === void 0 ?
|
|
2968
|
+
return e === void 0 ? N.getDefaultOutputFormat() : N.fromSpeechSynthesisOutputFormatString(N.SpeechSynthesisOutputFormatToString[e]);
|
|
2969
2969
|
}
|
|
2970
2970
|
static fromSpeechSynthesisOutputFormatString(e) {
|
|
2971
2971
|
switch (e) {
|
|
2972
2972
|
case "raw-8khz-8bit-mono-mulaw":
|
|
2973
|
-
return new
|
|
2973
|
+
return new N(L.AudioFormatTag.MuLaw, 1, 8e3, 8e3, 1, 8, e, e, !1);
|
|
2974
2974
|
case "riff-16khz-16kbps-mono-siren":
|
|
2975
|
-
return new
|
|
2975
|
+
return new N(L.AudioFormatTag.Siren, 1, 16e3, 2e3, 40, 0, e, "audio-16khz-16kbps-mono-siren", !0);
|
|
2976
2976
|
case "audio-16khz-16kbps-mono-siren":
|
|
2977
|
-
return new
|
|
2977
|
+
return new N(L.AudioFormatTag.Siren, 1, 16e3, 2e3, 40, 0, e, e, !1);
|
|
2978
2978
|
case "audio-16khz-32kbitrate-mono-mp3":
|
|
2979
|
-
return new
|
|
2979
|
+
return new N(L.AudioFormatTag.MP3, 1, 16e3, 32 << 7, 2, 16, e, e, !1);
|
|
2980
2980
|
case "audio-16khz-128kbitrate-mono-mp3":
|
|
2981
|
-
return new
|
|
2981
|
+
return new N(L.AudioFormatTag.MP3, 1, 16e3, 128 << 7, 2, 16, e, e, !1);
|
|
2982
2982
|
case "audio-16khz-64kbitrate-mono-mp3":
|
|
2983
|
-
return new
|
|
2983
|
+
return new N(L.AudioFormatTag.MP3, 1, 16e3, 64 << 7, 2, 16, e, e, !1);
|
|
2984
2984
|
case "audio-24khz-48kbitrate-mono-mp3":
|
|
2985
|
-
return new
|
|
2985
|
+
return new N(L.AudioFormatTag.MP3, 1, 24e3, 48 << 7, 2, 16, e, e, !1);
|
|
2986
2986
|
case "audio-24khz-96kbitrate-mono-mp3":
|
|
2987
|
-
return new
|
|
2987
|
+
return new N(L.AudioFormatTag.MP3, 1, 24e3, 96 << 7, 2, 16, e, e, !1);
|
|
2988
2988
|
case "audio-24khz-160kbitrate-mono-mp3":
|
|
2989
|
-
return new
|
|
2989
|
+
return new N(L.AudioFormatTag.MP3, 1, 24e3, 160 << 7, 2, 16, e, e, !1);
|
|
2990
2990
|
case "raw-16khz-16bit-mono-truesilk":
|
|
2991
|
-
return new
|
|
2991
|
+
return new N(L.AudioFormatTag.SILKSkype, 1, 16e3, 32e3, 2, 16, e, e, !1);
|
|
2992
2992
|
case "riff-8khz-16bit-mono-pcm":
|
|
2993
|
-
return new
|
|
2993
|
+
return new N(L.AudioFormatTag.PCM, 1, 8e3, 16e3, 2, 16, e, "raw-8khz-16bit-mono-pcm", !0);
|
|
2994
2994
|
case "riff-24khz-16bit-mono-pcm":
|
|
2995
|
-
return new
|
|
2995
|
+
return new N(L.AudioFormatTag.PCM, 1, 24e3, 48e3, 2, 16, e, "raw-24khz-16bit-mono-pcm", !0);
|
|
2996
2996
|
case "riff-8khz-8bit-mono-mulaw":
|
|
2997
|
-
return new
|
|
2997
|
+
return new N(L.AudioFormatTag.MuLaw, 1, 8e3, 8e3, 1, 8, e, "raw-8khz-8bit-mono-mulaw", !0);
|
|
2998
2998
|
case "raw-16khz-16bit-mono-pcm":
|
|
2999
|
-
return new
|
|
2999
|
+
return new N(L.AudioFormatTag.PCM, 1, 16e3, 32e3, 2, 16, e, "raw-16khz-16bit-mono-pcm", !1);
|
|
3000
3000
|
case "raw-24khz-16bit-mono-pcm":
|
|
3001
|
-
return new
|
|
3001
|
+
return new N(L.AudioFormatTag.PCM, 1, 24e3, 48e3, 2, 16, e, "raw-24khz-16bit-mono-pcm", !1);
|
|
3002
3002
|
case "raw-8khz-16bit-mono-pcm":
|
|
3003
|
-
return new
|
|
3003
|
+
return new N(L.AudioFormatTag.PCM, 1, 8e3, 16e3, 2, 16, e, "raw-8khz-16bit-mono-pcm", !1);
|
|
3004
3004
|
case "ogg-16khz-16bit-mono-opus":
|
|
3005
|
-
return new
|
|
3005
|
+
return new N(L.AudioFormatTag.OGG_OPUS, 1, 16e3, 8192, 2, 16, e, e, !1);
|
|
3006
3006
|
case "ogg-24khz-16bit-mono-opus":
|
|
3007
|
-
return new
|
|
3007
|
+
return new N(L.AudioFormatTag.OGG_OPUS, 1, 24e3, 8192, 2, 16, e, e, !1);
|
|
3008
3008
|
case "raw-48khz-16bit-mono-pcm":
|
|
3009
|
-
return new
|
|
3009
|
+
return new N(L.AudioFormatTag.PCM, 1, 48e3, 96e3, 2, 16, e, "raw-48khz-16bit-mono-pcm", !1);
|
|
3010
3010
|
case "riff-48khz-16bit-mono-pcm":
|
|
3011
|
-
return new
|
|
3011
|
+
return new N(L.AudioFormatTag.PCM, 1, 48e3, 96e3, 2, 16, e, "raw-48khz-16bit-mono-pcm", !0);
|
|
3012
3012
|
case "audio-48khz-96kbitrate-mono-mp3":
|
|
3013
|
-
return new
|
|
3013
|
+
return new N(L.AudioFormatTag.MP3, 1, 48e3, 96 << 7, 2, 16, e, e, !1);
|
|
3014
3014
|
case "audio-48khz-192kbitrate-mono-mp3":
|
|
3015
|
-
return new
|
|
3015
|
+
return new N(L.AudioFormatTag.MP3, 1, 48e3, 192 << 7, 2, 16, e, e, !1);
|
|
3016
3016
|
case "ogg-48khz-16bit-mono-opus":
|
|
3017
|
-
return new
|
|
3017
|
+
return new N(L.AudioFormatTag.OGG_OPUS, 1, 48e3, 12e3, 2, 16, e, e, !1);
|
|
3018
3018
|
case "webm-16khz-16bit-mono-opus":
|
|
3019
|
-
return new
|
|
3019
|
+
return new N(L.AudioFormatTag.WEBM_OPUS, 1, 16e3, 4e3, 2, 16, e, e, !1);
|
|
3020
3020
|
case "webm-24khz-16bit-mono-opus":
|
|
3021
|
-
return new
|
|
3021
|
+
return new N(L.AudioFormatTag.WEBM_OPUS, 1, 24e3, 6e3, 2, 16, e, e, !1);
|
|
3022
3022
|
case "webm-24khz-16bit-24kbps-mono-opus":
|
|
3023
|
-
return new
|
|
3023
|
+
return new N(L.AudioFormatTag.WEBM_OPUS, 1, 24e3, 3e3, 2, 16, e, e, !1);
|
|
3024
3024
|
case "audio-16khz-16bit-32kbps-mono-opus":
|
|
3025
|
-
return new
|
|
3025
|
+
return new N(L.AudioFormatTag.OPUS, 1, 16e3, 4e3, 2, 16, e, e, !1);
|
|
3026
3026
|
case "audio-24khz-16bit-48kbps-mono-opus":
|
|
3027
|
-
return new
|
|
3027
|
+
return new N(L.AudioFormatTag.OPUS, 1, 24e3, 6e3, 2, 16, e, e, !1);
|
|
3028
3028
|
case "audio-24khz-16bit-24kbps-mono-opus":
|
|
3029
|
-
return new
|
|
3029
|
+
return new N(L.AudioFormatTag.OPUS, 1, 24e3, 3e3, 2, 16, e, e, !1);
|
|
3030
3030
|
case "audio-24khz-16bit-mono-flac":
|
|
3031
|
-
return new
|
|
3031
|
+
return new N(L.AudioFormatTag.FLAC, 1, 24e3, 24e3, 2, 16, e, e, !1);
|
|
3032
3032
|
case "audio-48khz-16bit-mono-flac":
|
|
3033
|
-
return new
|
|
3033
|
+
return new N(L.AudioFormatTag.FLAC, 1, 48e3, 3e4, 2, 16, e, e, !1);
|
|
3034
3034
|
case "raw-24khz-16bit-mono-truesilk":
|
|
3035
|
-
return new
|
|
3035
|
+
return new N(L.AudioFormatTag.SILKSkype, 1, 24e3, 48e3, 2, 16, e, e, !1);
|
|
3036
3036
|
case "raw-8khz-8bit-mono-alaw":
|
|
3037
|
-
return new
|
|
3037
|
+
return new N(L.AudioFormatTag.ALaw, 1, 8e3, 8e3, 1, 8, e, e, !1);
|
|
3038
3038
|
case "riff-8khz-8bit-mono-alaw":
|
|
3039
|
-
return new
|
|
3039
|
+
return new N(L.AudioFormatTag.ALaw, 1, 8e3, 8e3, 1, 8, e, "raw-8khz-8bit-mono-alaw", !0);
|
|
3040
3040
|
case "raw-22050hz-16bit-mono-pcm":
|
|
3041
|
-
return new
|
|
3041
|
+
return new N(L.AudioFormatTag.PCM, 1, 22050, 44100, 2, 16, e, e, !1);
|
|
3042
3042
|
case "riff-22050hz-16bit-mono-pcm":
|
|
3043
|
-
return new
|
|
3043
|
+
return new N(L.AudioFormatTag.PCM, 1, 22050, 44100, 2, 16, e, "raw-22050hz-16bit-mono-pcm", !0);
|
|
3044
3044
|
case "raw-44100hz-16bit-mono-pcm":
|
|
3045
|
-
return new
|
|
3045
|
+
return new N(L.AudioFormatTag.PCM, 1, 44100, 88200, 2, 16, e, e, !1);
|
|
3046
3046
|
case "riff-44100hz-16bit-mono-pcm":
|
|
3047
|
-
return new
|
|
3047
|
+
return new N(L.AudioFormatTag.PCM, 1, 44100, 88200, 2, 16, e, "raw-44100hz-16bit-mono-pcm", !0);
|
|
3048
3048
|
case "amr-wb-16000h":
|
|
3049
|
-
return new
|
|
3049
|
+
return new N(L.AudioFormatTag.AMR_WB, 1, 16e3, 3052, 2, 16, e, e, !1);
|
|
3050
3050
|
case "g722-16khz-64kbps":
|
|
3051
|
-
return new
|
|
3051
|
+
return new N(L.AudioFormatTag.G722, 1, 16e3, 8e3, 2, 16, e, e, !1);
|
|
3052
3052
|
case "riff-16khz-16bit-mono-pcm":
|
|
3053
3053
|
default:
|
|
3054
|
-
return new
|
|
3054
|
+
return new N(L.AudioFormatTag.PCM, 1, 16e3, 32e3, 2, 16, "riff-16khz-16bit-mono-pcm", "raw-16khz-16bit-mono-pcm", !0);
|
|
3055
3055
|
}
|
|
3056
3056
|
}
|
|
3057
3057
|
static getDefaultOutputFormat() {
|
|
3058
|
-
return
|
|
3058
|
+
return N.fromSpeechSynthesisOutputFormatString(typeof window < "u" ? "audio-24khz-48kbitrate-mono-mp3" : "riff-16khz-16bit-mono-pcm");
|
|
3059
3059
|
}
|
|
3060
3060
|
/**
|
|
3061
3061
|
* Specifies if this audio output format has a header
|
|
@@ -3111,8 +3111,8 @@ class D extends L.AudioStreamFormatImpl {
|
|
|
3111
3111
|
return r.set(new Uint8Array(this.header), 0), r.set(new Uint8Array(e), this.header.byteLength), r.buffer;
|
|
3112
3112
|
}
|
|
3113
3113
|
}
|
|
3114
|
-
|
|
3115
|
-
|
|
3114
|
+
Ye.AudioOutputFormatImpl = N;
|
|
3115
|
+
N.SpeechSynthesisOutputFormatToString = {
|
|
3116
3116
|
[B.SpeechSynthesisOutputFormat.Raw8Khz8BitMonoMULaw]: "raw-8khz-8bit-mono-mulaw",
|
|
3117
3117
|
[B.SpeechSynthesisOutputFormat.Riff16Khz16KbpsMonoSiren]: "riff-16khz-16kbps-mono-siren",
|
|
3118
3118
|
[B.SpeechSynthesisOutputFormat.Audio16Khz16KbpsMonoSiren]: "audio-16khz-16kbps-mono-siren",
|
|
@@ -3155,7 +3155,7 @@ D.SpeechSynthesisOutputFormatToString = {
|
|
|
3155
3155
|
};
|
|
3156
3156
|
Object.defineProperty(re, "__esModule", { value: !0 });
|
|
3157
3157
|
re.PushAudioOutputStreamImpl = re.PushAudioOutputStream = re.PullAudioOutputStreamImpl = re.PullAudioOutputStream = re.AudioOutputStream = void 0;
|
|
3158
|
-
const Vo = M(), ff = z, gf =
|
|
3158
|
+
const Vo = M(), ff = z, gf = Ye;
|
|
3159
3159
|
class ia {
|
|
3160
3160
|
/**
|
|
3161
3161
|
* Creates and initializes an instance.
|
|
@@ -3933,7 +3933,7 @@ function Af() {
|
|
|
3933
3933
|
var Lt = {};
|
|
3934
3934
|
Object.defineProperty(Lt, "__esModule", { value: !0 });
|
|
3935
3935
|
Lt.LanguageUnderstandingModelImpl = Lt.LanguageUnderstandingModel = void 0;
|
|
3936
|
-
const
|
|
3936
|
+
const _t = z;
|
|
3937
3937
|
class $p {
|
|
3938
3938
|
/**
|
|
3939
3939
|
* Creates and initializes a new instance
|
|
@@ -3950,7 +3950,7 @@ class $p {
|
|
|
3950
3950
|
* @returns {LanguageUnderstandingModel} The language understanding model being created.
|
|
3951
3951
|
*/
|
|
3952
3952
|
static fromEndpoint(e) {
|
|
3953
|
-
|
|
3953
|
+
_t.Contracts.throwIfNull(e, "uri"), _t.Contracts.throwIfNullOrWhitespace(e.hostname, "uri");
|
|
3954
3954
|
const r = new Wi(), i = e.host.indexOf(".");
|
|
3955
3955
|
if (i === -1)
|
|
3956
3956
|
throw new Error("Could not determine region from endpoint");
|
|
@@ -3971,7 +3971,7 @@ class $p {
|
|
|
3971
3971
|
* @returns {LanguageUnderstandingModel} The language understanding model being created.
|
|
3972
3972
|
*/
|
|
3973
3973
|
static fromAppId(e) {
|
|
3974
|
-
|
|
3974
|
+
_t.Contracts.throwIfNullOrWhitespace(e, "appId");
|
|
3975
3975
|
const r = new Wi();
|
|
3976
3976
|
return r.appId = e, r;
|
|
3977
3977
|
}
|
|
@@ -3990,7 +3990,7 @@ class $p {
|
|
|
3990
3990
|
* @returns {LanguageUnderstandingModel} The language understanding model being created.
|
|
3991
3991
|
*/
|
|
3992
3992
|
static fromSubscription(e, r, i) {
|
|
3993
|
-
|
|
3993
|
+
_t.Contracts.throwIfNullOrWhitespace(e, "subscriptionKey"), _t.Contracts.throwIfNullOrWhitespace(r, "appId"), _t.Contracts.throwIfNullOrWhitespace(i, "region");
|
|
3994
3994
|
const u = new Wi();
|
|
3995
3995
|
return u.appId = r, u.region = i, u.subscriptionKey = e, u;
|
|
3996
3996
|
}
|
|
@@ -4257,11 +4257,11 @@ var Gp = {};
|
|
|
4257
4257
|
e[e.NoMatch = 0] = "NoMatch", e[e.Canceled = 1] = "Canceled", e[e.RecognizingSpeech = 2] = "RecognizingSpeech", e[e.RecognizedSpeech = 3] = "RecognizedSpeech", e[e.RecognizedKeyword = 4] = "RecognizedKeyword", e[e.RecognizingIntent = 5] = "RecognizingIntent", e[e.RecognizedIntent = 6] = "RecognizedIntent", e[e.TranslatingSpeech = 7] = "TranslatingSpeech", e[e.TranslatedSpeech = 8] = "TranslatedSpeech", e[e.SynthesizingAudio = 9] = "SynthesizingAudio", e[e.SynthesizingAudioCompleted = 10] = "SynthesizingAudioCompleted", e[e.SynthesizingAudioStarted = 11] = "SynthesizingAudioStarted", e[e.EnrollingVoiceProfile = 12] = "EnrollingVoiceProfile", e[e.EnrolledVoiceProfile = 13] = "EnrolledVoiceProfile", e[e.RecognizedSpeakers = 14] = "RecognizedSpeakers", e[e.RecognizedSpeaker = 15] = "RecognizedSpeaker", e[e.ResetVoiceProfile = 16] = "ResetVoiceProfile", e[e.DeletedVoiceProfile = 17] = "DeletedVoiceProfile", e[e.VoicesListRetrieved = 18] = "VoicesListRetrieved", e[e.TranslatingParticipantSpeech = 19] = "TranslatingParticipantSpeech", e[e.TranslatedParticipantSpeech = 20] = "TranslatedParticipantSpeech", e[e.TranslatedInstantMessage = 21] = "TranslatedInstantMessage", e[e.TranslatedParticipantInstantMessage = 22] = "TranslatedParticipantInstantMessage";
|
|
4258
4258
|
}(t.ResultReason || (t.ResultReason = {}));
|
|
4259
4259
|
})(Gp);
|
|
4260
|
-
var
|
|
4260
|
+
var rt = {}, ic;
|
|
4261
4261
|
function Of() {
|
|
4262
4262
|
if (ic)
|
|
4263
|
-
return
|
|
4264
|
-
ic = 1, Object.defineProperty(
|
|
4263
|
+
return rt;
|
|
4264
|
+
ic = 1, Object.defineProperty(rt, "__esModule", { value: !0 }), rt.SpeechConfigImpl = rt.SpeechConfig = void 0;
|
|
4265
4265
|
const t = _(), e = z, r = R();
|
|
4266
4266
|
let i = class {
|
|
4267
4267
|
/**
|
|
@@ -4348,7 +4348,7 @@ function Of() {
|
|
|
4348
4348
|
close() {
|
|
4349
4349
|
}
|
|
4350
4350
|
};
|
|
4351
|
-
|
|
4351
|
+
rt.SpeechConfig = i;
|
|
4352
4352
|
class u extends i {
|
|
4353
4353
|
constructor(o) {
|
|
4354
4354
|
super(), this.privProperties = new r.PropertyCollection(), this.speechRecognitionLanguage = "en-US", this.outputFormat = r.OutputFormat.Simple, this.privTokenCredential = o;
|
|
@@ -4446,13 +4446,13 @@ function Of() {
|
|
|
4446
4446
|
this.privProperties.setProperty(r.PropertyId.SpeechServiceConnection_SynthOutputFormat, r.SpeechSynthesisOutputFormat[o]);
|
|
4447
4447
|
}
|
|
4448
4448
|
}
|
|
4449
|
-
return
|
|
4449
|
+
return rt.SpeechConfigImpl = u, rt;
|
|
4450
4450
|
}
|
|
4451
|
-
var
|
|
4451
|
+
var nt = {}, sc;
|
|
4452
4452
|
function Df() {
|
|
4453
4453
|
if (sc)
|
|
4454
|
-
return
|
|
4455
|
-
sc = 1, Object.defineProperty(
|
|
4454
|
+
return nt;
|
|
4455
|
+
sc = 1, Object.defineProperty(nt, "__esModule", { value: !0 }), nt.SpeechTranslationConfigImpl = nt.SpeechTranslationConfig = void 0;
|
|
4456
4456
|
const t = _(), e = z, r = R();
|
|
4457
4457
|
let i = class extends r.SpeechConfig {
|
|
4458
4458
|
/**
|
|
@@ -4530,7 +4530,7 @@ function Df() {
|
|
|
4530
4530
|
return p.setProperty(r.PropertyId.SpeechServiceConnection_Endpoint, o.href), typeof n == "string" && n.trim().length > 0 && p.setProperty(r.PropertyId.SpeechServiceConnection_Key, n), p;
|
|
4531
4531
|
}
|
|
4532
4532
|
};
|
|
4533
|
-
|
|
4533
|
+
nt.SpeechTranslationConfig = i;
|
|
4534
4534
|
class u extends i {
|
|
4535
4535
|
constructor(o) {
|
|
4536
4536
|
super(), this.privSpeechProperties = new r.PropertyCollection(), this.outputFormat = r.OutputFormat.Simple, this.privTokenCredential = o;
|
|
@@ -4744,7 +4744,7 @@ function Df() {
|
|
|
4744
4744
|
this.privSpeechProperties.setProperty(r.PropertyId.SpeechServiceConnection_SynthOutputFormat, r.SpeechSynthesisOutputFormat[o]);
|
|
4745
4745
|
}
|
|
4746
4746
|
}
|
|
4747
|
-
return
|
|
4747
|
+
return nt.SpeechTranslationConfigImpl = u, nt;
|
|
4748
4748
|
}
|
|
4749
4749
|
var vr = {}, oc;
|
|
4750
4750
|
function Xp() {
|
|
@@ -4962,18 +4962,18 @@ function kf() {
|
|
|
4962
4962
|
};
|
|
4963
4963
|
return fr.Recognizer = u, fr;
|
|
4964
4964
|
}
|
|
4965
|
-
var gr = {},
|
|
4965
|
+
var gr = {}, ye = {};
|
|
4966
4966
|
(function(t) {
|
|
4967
4967
|
Object.defineProperty(t, "__esModule", { value: !0 }), t.RecognitionMode = void 0, function(e) {
|
|
4968
4968
|
e.Interactive = "Interactive", e.Dictation = "Dictation", e.Conversation = "Conversation", e.None = "None";
|
|
4969
4969
|
}(t.RecognitionMode || (t.RecognitionMode = {}));
|
|
4970
|
-
})(
|
|
4970
|
+
})(ye);
|
|
4971
4971
|
var cc;
|
|
4972
4972
|
function Nf() {
|
|
4973
4973
|
if (cc)
|
|
4974
4974
|
return gr;
|
|
4975
4975
|
cc = 1, Object.defineProperty(gr, "__esModule", { value: !0 }), gr.SpeechRecognizer = void 0;
|
|
4976
|
-
const t = _(), e =
|
|
4976
|
+
const t = _(), e = ye, r = M(), i = z, u = R();
|
|
4977
4977
|
let a = class Yp extends u.Recognizer {
|
|
4978
4978
|
/**
|
|
4979
4979
|
* SpeechRecognizer constructor.
|
|
@@ -5158,7 +5158,7 @@ function zf() {
|
|
|
5158
5158
|
if (uc)
|
|
5159
5159
|
return mr;
|
|
5160
5160
|
uc = 1, Object.defineProperty(mr, "__esModule", { value: !0 }), mr.IntentRecognizer = void 0;
|
|
5161
|
-
const t = _(), e =
|
|
5161
|
+
const t = _(), e = ye, r = M(), i = z, u = R();
|
|
5162
5162
|
let a = class extends u.Recognizer {
|
|
5163
5163
|
/**
|
|
5164
5164
|
* Initializes an instance of the IntentRecognizer.
|
|
@@ -5392,15 +5392,15 @@ var ed = {};
|
|
|
5392
5392
|
e[e.TextIndependentIdentification = 0] = "TextIndependentIdentification", e[e.TextDependentVerification = 1] = "TextDependentVerification", e[e.TextIndependentVerification = 2] = "TextIndependentVerification";
|
|
5393
5393
|
}(t.VoiceProfileType || (t.VoiceProfileType = {}));
|
|
5394
5394
|
})(ed);
|
|
5395
|
-
var Sr = {}, Cr = {},
|
|
5395
|
+
var Sr = {}, Cr = {}, it = {}, pc;
|
|
5396
5396
|
function td() {
|
|
5397
5397
|
if (pc)
|
|
5398
|
-
return
|
|
5399
|
-
pc = 1, Object.defineProperty(
|
|
5398
|
+
return it;
|
|
5399
|
+
pc = 1, Object.defineProperty(it, "__esModule", { value: !0 }), it.ConnectionMessageImpl = it.ConnectionMessage = void 0;
|
|
5400
5400
|
const t = X, e = M(), r = Xp(), i = Os;
|
|
5401
5401
|
let u = class {
|
|
5402
5402
|
};
|
|
5403
|
-
|
|
5403
|
+
it.ConnectionMessage = u;
|
|
5404
5404
|
class a {
|
|
5405
5405
|
constructor(n) {
|
|
5406
5406
|
this.privConnectionMessage = n, this.privProperties = new r.PropertyCollection(), this.privConnectionMessage.headers[t.HeaderNames.ConnectionId] && this.privProperties.setProperty(i.PropertyId.Speech_SessionId, this.privConnectionMessage.headers[t.HeaderNames.ConnectionId]), Object.keys(this.privConnectionMessage.headers).forEach((s) => {
|
|
@@ -5455,7 +5455,7 @@ function td() {
|
|
|
5455
5455
|
return "";
|
|
5456
5456
|
}
|
|
5457
5457
|
}
|
|
5458
|
-
return
|
|
5458
|
+
return it.ConnectionMessageImpl = a, it;
|
|
5459
5459
|
}
|
|
5460
5460
|
var dc;
|
|
5461
5461
|
function rd() {
|
|
@@ -5556,7 +5556,7 @@ function jf() {
|
|
|
5556
5556
|
if (lc)
|
|
5557
5557
|
return Sr;
|
|
5558
5558
|
lc = 1, Object.defineProperty(Sr, "__esModule", { value: !0 }), Sr.TranslationRecognizer = void 0;
|
|
5559
|
-
const t = _(), e =
|
|
5559
|
+
const t = _(), e = ye, r = M(), i = rd(), u = z, a = R();
|
|
5560
5560
|
let o = class nd extends a.Recognizer {
|
|
5561
5561
|
/**
|
|
5562
5562
|
* Initializes an instance of the TranslationRecognizer.
|
|
@@ -6114,11 +6114,11 @@ class oa {
|
|
|
6114
6114
|
}
|
|
6115
6115
|
}
|
|
6116
6116
|
ks.PhraseListGrammar = oa;
|
|
6117
|
-
var
|
|
6117
|
+
var st = {}, Cc;
|
|
6118
6118
|
function aa() {
|
|
6119
6119
|
if (Cc)
|
|
6120
|
-
return
|
|
6121
|
-
Cc = 1, Object.defineProperty(
|
|
6120
|
+
return st;
|
|
6121
|
+
Cc = 1, Object.defineProperty(st, "__esModule", { value: !0 }), st.DialogServiceConfigImpl = st.DialogServiceConfig = void 0;
|
|
6122
6122
|
const t = z, e = R();
|
|
6123
6123
|
let r = class {
|
|
6124
6124
|
/**
|
|
@@ -6144,7 +6144,7 @@ function aa() {
|
|
|
6144
6144
|
};
|
|
6145
6145
|
}
|
|
6146
6146
|
};
|
|
6147
|
-
|
|
6147
|
+
st.DialogServiceConfig = r;
|
|
6148
6148
|
class i extends r {
|
|
6149
6149
|
/**
|
|
6150
6150
|
* Creates an instance of dialogService config.
|
|
@@ -6234,7 +6234,7 @@ function aa() {
|
|
|
6234
6234
|
close() {
|
|
6235
6235
|
}
|
|
6236
6236
|
}
|
|
6237
|
-
return
|
|
6237
|
+
return st.DialogServiceConfigImpl = i, st;
|
|
6238
6238
|
}
|
|
6239
6239
|
var Ir = {}, yc;
|
|
6240
6240
|
function Wf() {
|
|
@@ -6529,7 +6529,7 @@ function Jf() {
|
|
|
6529
6529
|
if (wc)
|
|
6530
6530
|
return Tr;
|
|
6531
6531
|
wc = 1, Object.defineProperty(Tr, "__esModule", { value: !0 }), Tr.DialogServiceConnector = void 0;
|
|
6532
|
-
const t = Kf(), e = _(), r =
|
|
6532
|
+
const t = Kf(), e = _(), r = ye, i = M(), u = z, a = R(), o = Os;
|
|
6533
6533
|
let n = class extends a.Recognizer {
|
|
6534
6534
|
/**
|
|
6535
6535
|
* Initializes an instance of the DialogServiceConnector.
|
|
@@ -6792,7 +6792,7 @@ function Zf() {
|
|
|
6792
6792
|
if (Ac)
|
|
6793
6793
|
return Or;
|
|
6794
6794
|
Ac = 1, Object.defineProperty(Or, "__esModule", { value: !0 }), Or.BaseAudioPlayer = void 0;
|
|
6795
|
-
const t = ne, e = R(), r =
|
|
6795
|
+
const t = ne, e = R(), r = Ze;
|
|
6796
6796
|
let i = class {
|
|
6797
6797
|
/**
|
|
6798
6798
|
* Creates and initializes an instance of this class.
|
|
@@ -6936,11 +6936,11 @@ class eg {
|
|
|
6936
6936
|
}
|
|
6937
6937
|
}
|
|
6938
6938
|
xs.VoiceProfile = eg;
|
|
6939
|
-
var
|
|
6939
|
+
var ot = {}, Ic;
|
|
6940
6940
|
function tg() {
|
|
6941
6941
|
if (Ic)
|
|
6942
|
-
return
|
|
6943
|
-
Ic = 1, Object.defineProperty(
|
|
6942
|
+
return ot;
|
|
6943
|
+
Ic = 1, Object.defineProperty(ot, "__esModule", { value: !0 }), ot.VoiceProfileEnrollmentCancellationDetails = ot.VoiceProfileEnrollmentResult = void 0;
|
|
6944
6944
|
const t = _(), e = R();
|
|
6945
6945
|
let r = class $o {
|
|
6946
6946
|
constructor(a, o, n) {
|
|
@@ -7006,7 +7006,7 @@ function tg() {
|
|
|
7006
7006
|
};
|
|
7007
7007
|
}
|
|
7008
7008
|
};
|
|
7009
|
-
|
|
7009
|
+
ot.VoiceProfileEnrollmentResult = r;
|
|
7010
7010
|
class i extends e.CancellationDetailsBase {
|
|
7011
7011
|
constructor(a, o, n) {
|
|
7012
7012
|
super(a, o, n);
|
|
@@ -7025,13 +7025,13 @@ function tg() {
|
|
|
7025
7025
|
return a.properties && (n = e.CancellationErrorCode[a.properties.getProperty(t.CancellationErrorCodePropertyName, e.CancellationErrorCode[e.CancellationErrorCode.NoError])]), new i(o, a.errorDetails, n);
|
|
7026
7026
|
}
|
|
7027
7027
|
}
|
|
7028
|
-
return
|
|
7028
|
+
return ot.VoiceProfileEnrollmentCancellationDetails = i, ot;
|
|
7029
7029
|
}
|
|
7030
|
-
var
|
|
7030
|
+
var at = {}, _c;
|
|
7031
7031
|
function rg() {
|
|
7032
7032
|
if (_c)
|
|
7033
|
-
return
|
|
7034
|
-
_c = 1, Object.defineProperty(
|
|
7033
|
+
return at;
|
|
7034
|
+
_c = 1, Object.defineProperty(at, "__esModule", { value: !0 }), at.VoiceProfileCancellationDetails = at.VoiceProfileResult = void 0;
|
|
7035
7035
|
const t = _(), e = z, r = R();
|
|
7036
7036
|
let i = class {
|
|
7037
7037
|
constructor(o, n) {
|
|
@@ -7047,7 +7047,7 @@ function rg() {
|
|
|
7047
7047
|
return this.privErrorDetails;
|
|
7048
7048
|
}
|
|
7049
7049
|
};
|
|
7050
|
-
|
|
7050
|
+
at.VoiceProfileResult = i;
|
|
7051
7051
|
class u extends r.CancellationDetailsBase {
|
|
7052
7052
|
constructor(o, n, s) {
|
|
7053
7053
|
super(o, n, s);
|
|
@@ -7066,7 +7066,7 @@ function rg() {
|
|
|
7066
7066
|
return o.properties && (s = r.CancellationErrorCode[o.properties.getProperty(t.CancellationErrorCodePropertyName, r.CancellationErrorCode[r.CancellationErrorCode.NoError])]), new u(n, o.errorDetails, s);
|
|
7067
7067
|
}
|
|
7068
7068
|
}
|
|
7069
|
-
return
|
|
7069
|
+
return at.VoiceProfileCancellationDetails = u, at;
|
|
7070
7070
|
}
|
|
7071
7071
|
var Dr = {}, Tc;
|
|
7072
7072
|
function ng() {
|
|
@@ -7602,11 +7602,11 @@ function pg() {
|
|
|
7602
7602
|
t.SpeakerRecognitionCancellationDetails = a;
|
|
7603
7603
|
}(Mo)), Mo;
|
|
7604
7604
|
}
|
|
7605
|
-
var Oo = {},
|
|
7605
|
+
var Oo = {}, ct = {}, jc;
|
|
7606
7606
|
function ld() {
|
|
7607
7607
|
if (jc)
|
|
7608
|
-
return
|
|
7609
|
-
jc = 1, Object.defineProperty(
|
|
7608
|
+
return ct;
|
|
7609
|
+
jc = 1, Object.defineProperty(ct, "__esModule", { value: !0 }), ct.ConversationImpl = ct.Conversation = void 0;
|
|
7610
7610
|
const t = _(), e = M(), r = z, i = R();
|
|
7611
7611
|
class u {
|
|
7612
7612
|
constructor() {
|
|
@@ -7628,7 +7628,7 @@ function ld() {
|
|
|
7628
7628
|
})), d;
|
|
7629
7629
|
}
|
|
7630
7630
|
}
|
|
7631
|
-
|
|
7631
|
+
ct.Conversation = u;
|
|
7632
7632
|
class a extends u {
|
|
7633
7633
|
/**
|
|
7634
7634
|
* Create a conversation impl
|
|
@@ -8203,7 +8203,7 @@ function ld() {
|
|
|
8203
8203
|
});
|
|
8204
8204
|
}
|
|
8205
8205
|
}
|
|
8206
|
-
return
|
|
8206
|
+
return ct.ConversationImpl = a, ct;
|
|
8207
8207
|
}
|
|
8208
8208
|
var Fs = {};
|
|
8209
8209
|
Object.defineProperty(Fs, "__esModule", { value: !0 });
|
|
@@ -8647,7 +8647,7 @@ function yg() {
|
|
|
8647
8647
|
if (Vc)
|
|
8648
8648
|
return Vr;
|
|
8649
8649
|
Vc = 1, Object.defineProperty(Vr, "__esModule", { value: !0 }), Vr.ConversationTranscriber = void 0;
|
|
8650
|
-
const t = _(), e =
|
|
8650
|
+
const t = _(), e = ye, r = M(), i = z, u = R();
|
|
8651
8651
|
let a = class vd extends u.Recognizer {
|
|
8652
8652
|
/**
|
|
8653
8653
|
* ConversationTranscriber constructor.
|
|
@@ -8782,11 +8782,11 @@ function yg() {
|
|
|
8782
8782
|
};
|
|
8783
8783
|
return Vr.ConversationTranscriber = a, Vr;
|
|
8784
8784
|
}
|
|
8785
|
-
var
|
|
8785
|
+
var ut = {}, Wc;
|
|
8786
8786
|
function Pg() {
|
|
8787
8787
|
if (Wc)
|
|
8788
|
-
return
|
|
8789
|
-
Wc = 1, Object.defineProperty(
|
|
8788
|
+
return ut;
|
|
8789
|
+
Wc = 1, Object.defineProperty(ut, "__esModule", { value: !0 }), ut.Participant = ut.User = void 0;
|
|
8790
8790
|
const t = R();
|
|
8791
8791
|
class e {
|
|
8792
8792
|
constructor(u) {
|
|
@@ -8796,7 +8796,7 @@ function Pg() {
|
|
|
8796
8796
|
return this.privUserId;
|
|
8797
8797
|
}
|
|
8798
8798
|
}
|
|
8799
|
-
|
|
8799
|
+
ut.User = e;
|
|
8800
8800
|
class r {
|
|
8801
8801
|
constructor(u, a, o, n, s, c, p, d) {
|
|
8802
8802
|
this.privId = u, this.privAvatar = a, this.privDisplayName = o, this.privIsHost = n, this.privIsMuted = s, this.privIsUsingTts = c, this.privPreferredLanguage = p, this.privVoice = d, this.privProperties = new t.PropertyCollection();
|
|
@@ -8832,7 +8832,7 @@ function Pg() {
|
|
|
8832
8832
|
return new r(u, "", u, !1, !1, !1, a, o);
|
|
8833
8833
|
}
|
|
8834
8834
|
}
|
|
8835
|
-
return
|
|
8835
|
+
return ut.Participant = r, ut;
|
|
8836
8836
|
}
|
|
8837
8837
|
var fd = {};
|
|
8838
8838
|
(function(t) {
|
|
@@ -8840,11 +8840,11 @@ var fd = {};
|
|
|
8840
8840
|
e[e.JoinedConversation = 0] = "JoinedConversation", e[e.LeftConversation = 1] = "LeftConversation", e[e.Updated = 2] = "Updated";
|
|
8841
8841
|
}(t.ParticipantChangedReason || (t.ParticipantChangedReason = {}));
|
|
8842
8842
|
})(fd);
|
|
8843
|
-
var
|
|
8843
|
+
var pt = {}, $c;
|
|
8844
8844
|
function Rg() {
|
|
8845
8845
|
if ($c)
|
|
8846
|
-
return
|
|
8847
|
-
$c = 1, Object.defineProperty(
|
|
8846
|
+
return pt;
|
|
8847
|
+
$c = 1, Object.defineProperty(pt, "__esModule", { value: !0 }), pt.MeetingImpl = pt.Meeting = void 0;
|
|
8848
8848
|
const t = _(), e = M(), r = z, i = R();
|
|
8849
8849
|
let u = class {
|
|
8850
8850
|
constructor() {
|
|
@@ -8865,7 +8865,7 @@ function Rg() {
|
|
|
8865
8865
|
})(), c, p), d;
|
|
8866
8866
|
}
|
|
8867
8867
|
};
|
|
8868
|
-
|
|
8868
|
+
pt.Meeting = u;
|
|
8869
8869
|
class a extends u {
|
|
8870
8870
|
/**
|
|
8871
8871
|
* Create a Meeting impl
|
|
@@ -9427,7 +9427,7 @@ function Rg() {
|
|
|
9427
9427
|
});
|
|
9428
9428
|
}
|
|
9429
9429
|
}
|
|
9430
|
-
return
|
|
9430
|
+
return pt.MeetingImpl = a, pt;
|
|
9431
9431
|
}
|
|
9432
9432
|
var Wr = {}, Kc;
|
|
9433
9433
|
function Eg() {
|
|
@@ -9681,11 +9681,11 @@ function da() {
|
|
|
9681
9681
|
} });
|
|
9682
9682
|
}(Oo)), Oo;
|
|
9683
9683
|
}
|
|
9684
|
-
var
|
|
9684
|
+
var dt = {}, Xc;
|
|
9685
9685
|
function la() {
|
|
9686
9686
|
if (Xc)
|
|
9687
|
-
return
|
|
9688
|
-
Xc = 1, Object.defineProperty(
|
|
9687
|
+
return dt;
|
|
9688
|
+
Xc = 1, Object.defineProperty(dt, "__esModule", { value: !0 }), dt.SynthesisRequest = dt.Synthesizer = void 0;
|
|
9689
9689
|
const t = _(), e = M(), r = z, i = R();
|
|
9690
9690
|
let u = class gd {
|
|
9691
9691
|
/**
|
|
@@ -9926,20 +9926,20 @@ function la() {
|
|
|
9926
9926
|
return n.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">").replace(/"/g, """).replace(/'/g, "'");
|
|
9927
9927
|
}
|
|
9928
9928
|
};
|
|
9929
|
-
|
|
9929
|
+
dt.Synthesizer = u;
|
|
9930
9930
|
class a {
|
|
9931
9931
|
constructor(n, s, c, p, d, l) {
|
|
9932
9932
|
this.requestId = n, this.text = s, this.isSSML = c, this.cb = p, this.err = d, this.dataStream = l;
|
|
9933
9933
|
}
|
|
9934
9934
|
}
|
|
9935
|
-
return
|
|
9935
|
+
return dt.SynthesisRequest = a, dt;
|
|
9936
9936
|
}
|
|
9937
9937
|
var Jr = {}, Zc;
|
|
9938
9938
|
function Ig() {
|
|
9939
9939
|
if (Zc)
|
|
9940
9940
|
return Jr;
|
|
9941
9941
|
Zc = 1, Object.defineProperty(Jr, "__esModule", { value: !0 }), Jr.SpeechSynthesizer = void 0;
|
|
9942
|
-
const t = _(), e = M(), r = Yn, i =
|
|
9942
|
+
const t = _(), e = M(), r = Yn, i = Ye, u = re, a = z, o = R(), n = la();
|
|
9943
9943
|
let s = class md extends o.Synthesizer {
|
|
9944
9944
|
/**
|
|
9945
9945
|
* SpeechSynthesizer constructor.
|
|
@@ -10479,7 +10479,7 @@ var Cd = {};
|
|
|
10479
10479
|
var Ks = {};
|
|
10480
10480
|
Object.defineProperty(Ks, "__esModule", { value: !0 });
|
|
10481
10481
|
Ks.SpeakerAudioDestination = void 0;
|
|
10482
|
-
const
|
|
10482
|
+
const lt = M(), Ng = re, ge = Ze, zg = 60 * 30, tu = {
|
|
10483
10483
|
[ge.AudioFormatTag.PCM]: "audio/wav",
|
|
10484
10484
|
[ge.AudioFormatTag.MuLaw]: "audio/x-wav",
|
|
10485
10485
|
[ge.AudioFormatTag.MP3]: "audio/mpeg",
|
|
@@ -10492,7 +10492,7 @@ const dt = M(), Ng = re, ge = Xe, zg = 60 * 30, tu = {
|
|
|
10492
10492
|
};
|
|
10493
10493
|
class jg {
|
|
10494
10494
|
constructor(e) {
|
|
10495
|
-
this.privPlaybackStarted = !1, this.privAppendingToBuffer = !1, this.privMediaSourceOpened = !1, this.privBytesReceived = 0, this.privId = e || (0,
|
|
10495
|
+
this.privPlaybackStarted = !1, this.privAppendingToBuffer = !1, this.privMediaSourceOpened = !1, this.privBytesReceived = 0, this.privId = e || (0, lt.createNoDashGuid)(), this.privIsPaused = !1, this.privIsClosed = !1;
|
|
10496
10496
|
}
|
|
10497
10497
|
id() {
|
|
10498
10498
|
return this.privId;
|
|
@@ -10538,17 +10538,17 @@ class jg {
|
|
|
10538
10538
|
r === void 0 ? console.warn(`Unknown mimeType for format ${ge.AudioFormatTag[this.privFormat.formatTag]}; playback is not supported.`) : typeof MediaSource < "u" && MediaSource.isTypeSupported(r) ? (this.privAudio = new Audio(), this.privAudioBuffer = [], this.privMediaSource = new MediaSource(), this.privAudio.src = URL.createObjectURL(this.privMediaSource), this.privAudio.load(), this.privMediaSource.onsourceopen = () => {
|
|
10539
10539
|
this.privMediaSourceOpened = !0, this.privMediaSource.duration = zg, this.privSourceBuffer = this.privMediaSource.addSourceBuffer(r), this.privSourceBuffer.onupdate = () => {
|
|
10540
10540
|
this.updateSourceBuffer().catch((i) => {
|
|
10541
|
-
|
|
10541
|
+
lt.Events.instance.onEvent(new lt.BackgroundEvent(i));
|
|
10542
10542
|
});
|
|
10543
10543
|
}, this.privSourceBuffer.onupdateend = () => {
|
|
10544
10544
|
this.handleSourceBufferUpdateEnd().catch((i) => {
|
|
10545
|
-
|
|
10545
|
+
lt.Events.instance.onEvent(new lt.BackgroundEvent(i));
|
|
10546
10546
|
});
|
|
10547
10547
|
}, this.privSourceBuffer.onupdatestart = () => {
|
|
10548
10548
|
this.privAppendingToBuffer = !1;
|
|
10549
10549
|
};
|
|
10550
10550
|
}, this.updateSourceBuffer().catch((i) => {
|
|
10551
|
-
|
|
10551
|
+
lt.Events.instance.onEvent(new lt.BackgroundEvent(i));
|
|
10552
10552
|
})) : (console.warn(`Format ${ge.AudioFormatTag[this.privFormat.formatTag]} could not be played by MSE, streaming playback is not enabled.`), this.privAudioOutputStream = new Ng.PullAudioOutputStreamImpl(), this.privAudioOutputStream.format = this.privFormat, this.privAudio = new Audio());
|
|
10553
10553
|
}
|
|
10554
10554
|
}
|
|
@@ -10796,11 +10796,11 @@ function Bg() {
|
|
|
10796
10796
|
};
|
|
10797
10797
|
return Yr.PronunciationAssessmentConfig = r, Yr;
|
|
10798
10798
|
}
|
|
10799
|
-
var
|
|
10799
|
+
var ht = {}, su;
|
|
10800
10800
|
function qg() {
|
|
10801
10801
|
if (su)
|
|
10802
|
-
return
|
|
10803
|
-
su = 1, Object.defineProperty(
|
|
10802
|
+
return ht;
|
|
10803
|
+
su = 1, Object.defineProperty(ht, "__esModule", { value: !0 }), ht.PronunciationAssessmentResult = ht.ContentAssessmentResult = void 0;
|
|
10804
10804
|
const t = z, e = R();
|
|
10805
10805
|
class r {
|
|
10806
10806
|
/**
|
|
@@ -10847,7 +10847,7 @@ function qg() {
|
|
|
10847
10847
|
return this.privPronJson.ContentAssessment.TopicScore;
|
|
10848
10848
|
}
|
|
10849
10849
|
}
|
|
10850
|
-
|
|
10850
|
+
ht.ContentAssessmentResult = r;
|
|
10851
10851
|
let i = class Ed {
|
|
10852
10852
|
constructor(a) {
|
|
10853
10853
|
const o = JSON.parse(a);
|
|
@@ -10946,7 +10946,7 @@ function qg() {
|
|
|
10946
10946
|
return new r(this.detailResult);
|
|
10947
10947
|
}
|
|
10948
10948
|
};
|
|
10949
|
-
return
|
|
10949
|
+
return ht.PronunciationAssessmentResult = i, ht;
|
|
10950
10950
|
}
|
|
10951
10951
|
var en = {}, ou;
|
|
10952
10952
|
function Fg() {
|
|
@@ -11106,7 +11106,7 @@ function Ug() {
|
|
|
11106
11106
|
if (cu)
|
|
11107
11107
|
return tn;
|
|
11108
11108
|
cu = 1, Object.defineProperty(tn, "__esModule", { value: !0 }), tn.AvatarSynthesizer = void 0;
|
|
11109
|
-
const t = Ad(), e = _(), r = M(), i =
|
|
11109
|
+
const t = Ad(), e = _(), r = M(), i = Ye, u = R(), a = z, o = la();
|
|
11110
11110
|
let n = class extends u.Synthesizer {
|
|
11111
11111
|
/**
|
|
11112
11112
|
* Creates and initializes an instance of this class.
|
|
@@ -11331,7 +11331,7 @@ function R() {
|
|
|
11331
11331
|
Object.defineProperty(t, "AudioConfig", { enumerable: !0, get: function() {
|
|
11332
11332
|
return e.AudioConfig;
|
|
11333
11333
|
} });
|
|
11334
|
-
var r =
|
|
11334
|
+
var r = Ze;
|
|
11335
11335
|
Object.defineProperty(t, "AudioStreamFormat", { enumerable: !0, get: function() {
|
|
11336
11336
|
return r.AudioStreamFormat;
|
|
11337
11337
|
} }), Object.defineProperty(t, "AudioFormatTag", { enumerable: !0, get: function() {
|
|
@@ -11465,9 +11465,9 @@ function R() {
|
|
|
11465
11465
|
Object.defineProperty(t, "IntentRecognizer", { enumerable: !0, get: function() {
|
|
11466
11466
|
return $.IntentRecognizer;
|
|
11467
11467
|
} });
|
|
11468
|
-
var
|
|
11468
|
+
var tt = ed;
|
|
11469
11469
|
Object.defineProperty(t, "VoiceProfileType", { enumerable: !0, get: function() {
|
|
11470
|
-
return
|
|
11470
|
+
return tt.VoiceProfileType;
|
|
11471
11471
|
} });
|
|
11472
11472
|
var oi = jf();
|
|
11473
11473
|
Object.defineProperty(t, "TranslationRecognizer", { enumerable: !0, get: function() {
|
|
@@ -11613,13 +11613,13 @@ function R() {
|
|
|
11613
11613
|
Object.defineProperty(t, "SourceLanguageConfig", { enumerable: !0, get: function() {
|
|
11614
11614
|
return xi.SourceLanguageConfig;
|
|
11615
11615
|
} });
|
|
11616
|
-
var
|
|
11616
|
+
var It = pg();
|
|
11617
11617
|
Object.defineProperty(t, "SpeakerRecognitionResult", { enumerable: !0, get: function() {
|
|
11618
|
-
return
|
|
11618
|
+
return It.SpeakerRecognitionResult;
|
|
11619
11619
|
} }), Object.defineProperty(t, "SpeakerRecognitionResultType", { enumerable: !0, get: function() {
|
|
11620
|
-
return
|
|
11620
|
+
return It.SpeakerRecognitionResultType;
|
|
11621
11621
|
} }), Object.defineProperty(t, "SpeakerRecognitionCancellationDetails", { enumerable: !0, get: function() {
|
|
11622
|
-
return
|
|
11622
|
+
return It.SpeakerRecognitionCancellationDetails;
|
|
11623
11623
|
} });
|
|
11624
11624
|
var Y = da();
|
|
11625
11625
|
Object.defineProperty(t, "Conversation", { enumerable: !0, get: function() {
|
|
@@ -11930,26 +11930,26 @@ var Zg = Qg(Xg);
|
|
|
11930
11930
|
var ni = {};
|
|
11931
11931
|
Object.defineProperty(ni, "__esModule", { value: !0 });
|
|
11932
11932
|
ni.RestConfigBase = void 0;
|
|
11933
|
-
class
|
|
11933
|
+
class Ke {
|
|
11934
11934
|
static get requestOptions() {
|
|
11935
|
-
return
|
|
11935
|
+
return Ke.privDefaultRequestOptions;
|
|
11936
11936
|
}
|
|
11937
11937
|
static get configParams() {
|
|
11938
|
-
return
|
|
11938
|
+
return Ke.privDefaultParams;
|
|
11939
11939
|
}
|
|
11940
11940
|
static get restErrors() {
|
|
11941
|
-
return
|
|
11941
|
+
return Ke.privRestErrors;
|
|
11942
11942
|
}
|
|
11943
11943
|
}
|
|
11944
|
-
ni.RestConfigBase =
|
|
11945
|
-
|
|
11944
|
+
ni.RestConfigBase = Ke;
|
|
11945
|
+
Ke.privDefaultRequestOptions = {
|
|
11946
11946
|
headers: {
|
|
11947
11947
|
Accept: "application/json"
|
|
11948
11948
|
},
|
|
11949
11949
|
ignoreCache: !1,
|
|
11950
11950
|
timeout: 1e4
|
|
11951
11951
|
};
|
|
11952
|
-
|
|
11952
|
+
Ke.privRestErrors = {
|
|
11953
11953
|
authInvalidSubscriptionKey: "You must specify either an authentication token to use, or a Cognitive Speech subscription key.",
|
|
11954
11954
|
authInvalidSubscriptionRegion: "You must specify the Cognitive Speech region to use.",
|
|
11955
11955
|
invalidArgs: "Required input not found: {arg}.",
|
|
@@ -11961,7 +11961,7 @@ $e.privRestErrors = {
|
|
|
11961
11961
|
permissionDeniedSend: "Invalid operation: the conversation is not in a connected state.",
|
|
11962
11962
|
permissionDeniedStart: "Invalid operation: there is already an active conversation."
|
|
11963
11963
|
};
|
|
11964
|
-
|
|
11964
|
+
Ke.privDefaultParams = {
|
|
11965
11965
|
apiVersion: "api-version",
|
|
11966
11966
|
authorization: "Authorization",
|
|
11967
11967
|
clientAppId: "X-ClientAppId",
|
|
@@ -12073,11 +12073,11 @@ function Yg() {
|
|
|
12073
12073
|
};
|
|
12074
12074
|
return er.IntentConnectionFactory = a, er;
|
|
12075
12075
|
}
|
|
12076
|
-
var
|
|
12076
|
+
var vt = {}, fu;
|
|
12077
12077
|
function em() {
|
|
12078
12078
|
if (fu)
|
|
12079
|
-
return
|
|
12080
|
-
fu = 1, Object.defineProperty(
|
|
12079
|
+
return vt;
|
|
12080
|
+
fu = 1, Object.defineProperty(vt, "__esModule", { value: !0 }), vt.VoiceProfileConnectionFactory = vt.SpeakerRecognitionConnectionFactory = void 0;
|
|
12081
12081
|
const t = Z(), e = R(), r = _e(), i = _(), u = X;
|
|
12082
12082
|
class a extends r.ConnectionFactoryBase {
|
|
12083
12083
|
create(c, p, d, l) {
|
|
@@ -12114,13 +12114,13 @@ function em() {
|
|
|
12114
12114
|
return super.create(c, p, "recognition", d);
|
|
12115
12115
|
}
|
|
12116
12116
|
};
|
|
12117
|
-
|
|
12117
|
+
vt.SpeakerRecognitionConnectionFactory = o;
|
|
12118
12118
|
class n extends a {
|
|
12119
12119
|
create(c, p, d) {
|
|
12120
12120
|
return super.create(c, p, "profile", d);
|
|
12121
12121
|
}
|
|
12122
12122
|
}
|
|
12123
|
-
return
|
|
12123
|
+
return vt.VoiceProfileConnectionFactory = n, vt;
|
|
12124
12124
|
}
|
|
12125
12125
|
var Js = {};
|
|
12126
12126
|
(function(t) {
|
|
@@ -12317,7 +12317,7 @@ function tm() {
|
|
|
12317
12317
|
if (gu)
|
|
12318
12318
|
return on;
|
|
12319
12319
|
gu = 1, Object.defineProperty(on, "__esModule", { value: !0 }), on.ServiceRecognizerBase = void 0;
|
|
12320
|
-
const t = Z(), e = M(), r = R(), i = _(), u = xe, a = Td, o =
|
|
12320
|
+
const t = Z(), e = M(), r = R(), i = _(), u = xe, a = Td, o = ye, n = bd, s = Md, c = Od, p = Dd, d = kd, l = Nd, h = zd, v = jd;
|
|
12321
12321
|
let g = class Ln {
|
|
12322
12322
|
constructor(f, S, C, y, w) {
|
|
12323
12323
|
if (this.privConnectionConfigurationPromise = void 0, this.privConnectionPromise = void 0, this.privSetTimeout = setTimeout, this.privIsLiveAudio = !1, this.privAverageBytesPerMs = 0, this.privEnableSpeakerId = !1, this.privExpectContentAssessmentResponse = !1, this.recognizeOverride = void 0, this.recognizeSpeaker = void 0, this.disconnectOverride = void 0, this.receiveMessageOverride = void 0, this.sendPrePayloadJSONOverride = void 0, this.postConnectImplOverride = void 0, this.configConnectionOverride = void 0, this.handleSpeechPhraseMessage = void 0, this.handleSpeechHypothesisMessage = void 0, !f)
|
|
@@ -12815,7 +12815,7 @@ var No = {}, Su;
|
|
|
12815
12815
|
function nm() {
|
|
12816
12816
|
return Su || (Su = 1, function(t) {
|
|
12817
12817
|
Object.defineProperty(t, "__esModule", { value: !0 }), t.RecognizerConfig = t.SpeechResultFormat = void 0;
|
|
12818
|
-
const e = R(), r = _(), i =
|
|
12818
|
+
const e = R(), r = _(), i = ye;
|
|
12819
12819
|
(function(a) {
|
|
12820
12820
|
a[a.Simple = 0] = "Simple", a[a.Detailed = 1] = "Detailed";
|
|
12821
12821
|
})(t.SpeechResultFormat || (t.SpeechResultFormat = {}));
|
|
@@ -12881,13 +12881,13 @@ Object.defineProperty(Ld, "__esModule", { value: !0 });
|
|
|
12881
12881
|
var Gs = {};
|
|
12882
12882
|
Object.defineProperty(Gs, "__esModule", { value: !0 });
|
|
12883
12883
|
Gs.WebsocketMessageFormatter = void 0;
|
|
12884
|
-
const
|
|
12884
|
+
const Pe = M(), Cu = `\r
|
|
12885
12885
|
`;
|
|
12886
12886
|
class im {
|
|
12887
12887
|
toConnectionMessage(e) {
|
|
12888
|
-
const r = new
|
|
12888
|
+
const r = new Pe.Deferred();
|
|
12889
12889
|
try {
|
|
12890
|
-
if (e.messageType ===
|
|
12890
|
+
if (e.messageType === Pe.MessageType.Text) {
|
|
12891
12891
|
const i = e.textContent;
|
|
12892
12892
|
let u = {}, a = null;
|
|
12893
12893
|
if (i) {
|
|
@@ -12896,8 +12896,8 @@ class im {
|
|
|
12896
12896
|
`);
|
|
12897
12897
|
o && o.length > 0 && (u = this.parseHeaders(o[0]), o.length > 1 && (a = o[1]));
|
|
12898
12898
|
}
|
|
12899
|
-
r.resolve(new
|
|
12900
|
-
} else if (e.messageType ===
|
|
12899
|
+
r.resolve(new Pe.ConnectionMessage(e.messageType, a, u, e.id));
|
|
12900
|
+
} else if (e.messageType === Pe.MessageType.Binary) {
|
|
12901
12901
|
const i = e.binaryContent;
|
|
12902
12902
|
let u = {}, a = null;
|
|
12903
12903
|
if (!i || i.byteLength < 2)
|
|
@@ -12908,7 +12908,7 @@ class im {
|
|
|
12908
12908
|
let s = "";
|
|
12909
12909
|
for (let c = 0; c < n; c++)
|
|
12910
12910
|
s += String.fromCharCode(o.getInt8(c + 2));
|
|
12911
|
-
u = this.parseHeaders(s), i.byteLength > n + 2 && (a = i.slice(2 + n)), r.resolve(new
|
|
12911
|
+
u = this.parseHeaders(s), i.byteLength > n + 2 && (a = i.slice(2 + n)), r.resolve(new Pe.ConnectionMessage(e.messageType, a, u, e.id));
|
|
12912
12912
|
}
|
|
12913
12913
|
} catch (i) {
|
|
12914
12914
|
r.reject(`Error formatting the message. Error: ${i}`);
|
|
@@ -12916,19 +12916,19 @@ class im {
|
|
|
12916
12916
|
return r.promise;
|
|
12917
12917
|
}
|
|
12918
12918
|
fromConnectionMessage(e) {
|
|
12919
|
-
const r = new
|
|
12919
|
+
const r = new Pe.Deferred();
|
|
12920
12920
|
try {
|
|
12921
|
-
if (e.messageType ===
|
|
12921
|
+
if (e.messageType === Pe.MessageType.Text) {
|
|
12922
12922
|
const i = `${this.makeHeaders(e)}${Cu}${e.textBody ? e.textBody : ""}`;
|
|
12923
|
-
r.resolve(new
|
|
12924
|
-
} else if (e.messageType ===
|
|
12923
|
+
r.resolve(new Pe.RawWebsocketMessage(Pe.MessageType.Text, i, e.id));
|
|
12924
|
+
} else if (e.messageType === Pe.MessageType.Binary) {
|
|
12925
12925
|
const i = this.makeHeaders(e), u = e.binaryBody, a = this.stringToArrayBuffer(i), o = new Int8Array(a), n = o.byteLength, s = new Int8Array(2 + n + (u ? u.byteLength : 0));
|
|
12926
12926
|
if (s[0] = n >> 8 & 255, s[1] = n & 255, s.set(o, 2), u) {
|
|
12927
12927
|
const p = new Int8Array(u);
|
|
12928
12928
|
s.set(p, 2 + n);
|
|
12929
12929
|
}
|
|
12930
12930
|
const c = s.buffer;
|
|
12931
|
-
r.resolve(new
|
|
12931
|
+
r.resolve(new Pe.RawWebsocketMessage(Pe.MessageType.Binary, c, e.id));
|
|
12932
12932
|
}
|
|
12933
12933
|
} catch (i) {
|
|
12934
12934
|
r.reject(`Error formatting the message. ${i}`);
|
|
@@ -12969,7 +12969,7 @@ function sm() {
|
|
|
12969
12969
|
if (yu)
|
|
12970
12970
|
return cn;
|
|
12971
12971
|
yu = 1, Object.defineProperty(cn, "__esModule", { value: !0 }), cn.SpeechConnectionFactory = void 0;
|
|
12972
|
-
const t = Z(), e = _(), r = R(), i = _e(), u = _(), a = X, o = Ie, n =
|
|
12972
|
+
const t = Z(), e = _(), r = R(), i = _e(), u = _(), a = X, o = Ie, n = ye;
|
|
12973
12973
|
let s = class extends i.ConnectionFactoryBase {
|
|
12974
12974
|
constructor() {
|
|
12975
12975
|
super(...arguments), this.interactiveRelativeUri = "/speech/recognition/interactive/cognitiveservices/v1", this.conversationRelativeUri = "/speech/recognition/conversation/cognitiveservices/v1", this.dictationRelativeUri = "/speech/recognition/dictation/cognitiveservices/v1", this.universalUri = "/stt/speech/universal/v";
|
|
@@ -13077,7 +13077,7 @@ function cm() {
|
|
|
13077
13077
|
if (Eu)
|
|
13078
13078
|
return dn;
|
|
13079
13079
|
Eu = 1, Object.defineProperty(dn, "__esModule", { value: !0 }), dn.TranslationConnectionFactory = void 0;
|
|
13080
|
-
const t = Z(), e = ri, r = R(), i = _e(), u = _(), a = X, o = Ie, n =
|
|
13080
|
+
const t = Z(), e = ri, r = R(), i = _e(), u = _(), a = X, o = Ie, n = ye;
|
|
13081
13081
|
let s = class extends i.ConnectionFactoryBase {
|
|
13082
13082
|
constructor() {
|
|
13083
13083
|
super(...arguments), this.universalUri = "/stt/speech/universal/v2", this.translationV1Uri = "/speech/translation/cognitiveservices/v1";
|
|
@@ -13760,7 +13760,7 @@ function fm() {
|
|
|
13760
13760
|
if (Mu)
|
|
13761
13761
|
return mn;
|
|
13762
13762
|
Mu = 1, Object.defineProperty(mn, "__esModule", { value: !0 }), mn.ConversationTranscriptionServiceRecognizer = void 0;
|
|
13763
|
-
const t = R(), e = _(), r = qd, i =
|
|
13763
|
+
const t = R(), e = _(), r = qd, i = ye;
|
|
13764
13764
|
let u = class extends e.ServiceRecognizerBase {
|
|
13765
13765
|
constructor(o, n, s, c, p) {
|
|
13766
13766
|
super(o, n, s, c, p), this.privConversationTranscriber = p, this.setSpeakerDiarizationJson();
|
|
@@ -14324,10 +14324,10 @@ class Pm {
|
|
|
14324
14324
|
no.ServiceTelemetryListener = Pm;
|
|
14325
14325
|
Object.defineProperty(ro, "__esModule", { value: !0 });
|
|
14326
14326
|
ro.RequestSession = void 0;
|
|
14327
|
-
const
|
|
14327
|
+
const Tt = M(), Hi = Js, Rm = no;
|
|
14328
14328
|
class Em {
|
|
14329
14329
|
constructor(e) {
|
|
14330
|
-
this.privIsDisposed = !1, this.privDetachables = new Array(), this.privIsAudioNodeDetached = !1, this.privIsRecognizing = !1, this.privIsSpeechEnded = !1, this.privTurnStartAudioOffset = 0, this.privLastRecoOffset = 0, this.privHypothesisReceived = !1, this.privBytesSent = 0, this.privRecognitionBytesSent = 0, this.privRecogNumber = 0, this.privInTurn = !1, this.privConnectionAttempts = 0, this.privAudioSourceId = e, this.privRequestId = (0,
|
|
14330
|
+
this.privIsDisposed = !1, this.privDetachables = new Array(), this.privIsAudioNodeDetached = !1, this.privIsRecognizing = !1, this.privIsSpeechEnded = !1, this.privTurnStartAudioOffset = 0, this.privLastRecoOffset = 0, this.privHypothesisReceived = !1, this.privBytesSent = 0, this.privRecognitionBytesSent = 0, this.privRecogNumber = 0, this.privInTurn = !1, this.privConnectionAttempts = 0, this.privAudioSourceId = e, this.privRequestId = (0, Tt.createNoDashGuid)(), this.privAudioNodeId = (0, Tt.createNoDashGuid)(), this.privTurnDeferral = new Tt.Deferred(), this.privTurnDeferral.resolve();
|
|
14331
14331
|
}
|
|
14332
14332
|
get sessionId() {
|
|
14333
14333
|
return this.privSessionId;
|
|
@@ -14393,11 +14393,11 @@ class Em {
|
|
|
14393
14393
|
this.privTurnDeferral.resolve(), !e || this.isSpeechEnded ? (await this.onComplete(), this.privInTurn = !1) : (this.privTurnStartAudioOffset = this.privLastRecoOffset, this.privAudioNode.replay());
|
|
14394
14394
|
}
|
|
14395
14395
|
onSpeechContext() {
|
|
14396
|
-
this.privRequestId = (0,
|
|
14396
|
+
this.privRequestId = (0, Tt.createNoDashGuid)();
|
|
14397
14397
|
}
|
|
14398
14398
|
onServiceTurnStartResponse() {
|
|
14399
14399
|
this.privTurnDeferral && this.privInTurn && (this.privTurnDeferral.reject("Another turn started before current completed."), this.privTurnDeferral.promise.then().catch(() => {
|
|
14400
|
-
})), this.privInTurn = !0, this.privTurnDeferral = new
|
|
14400
|
+
})), this.privInTurn = !0, this.privTurnDeferral = new Tt.Deferred();
|
|
14401
14401
|
}
|
|
14402
14402
|
onHypothesis(e) {
|
|
14403
14403
|
this.privHypothesisReceived || (this.privHypothesisReceived = !0, this.privServiceTelemetryListener.hypothesisReceived(this.privAudioNode.findTimeAtOffset(e)));
|
|
@@ -14433,7 +14433,7 @@ class Em {
|
|
|
14433
14433
|
this.privIsSpeechEnded = !0;
|
|
14434
14434
|
}
|
|
14435
14435
|
onEvent(e) {
|
|
14436
|
-
this.privServiceTelemetryListener && this.privServiceTelemetryListener.onEvent(e),
|
|
14436
|
+
this.privServiceTelemetryListener && this.privServiceTelemetryListener.onEvent(e), Tt.Events.instance.onEvent(e);
|
|
14437
14437
|
}
|
|
14438
14438
|
async onComplete() {
|
|
14439
14439
|
this.privIsRecognizing && (this.privIsRecognizing = !1, await this.detachAudioNode());
|
|
@@ -14457,7 +14457,7 @@ var io = {}, Vd = {};
|
|
|
14457
14457
|
})(Vd);
|
|
14458
14458
|
Object.defineProperty(io, "__esModule", { value: !0 });
|
|
14459
14459
|
io.SpeechContext = void 0;
|
|
14460
|
-
const wm =
|
|
14460
|
+
const wm = ye, qe = Vd;
|
|
14461
14461
|
class Am {
|
|
14462
14462
|
constructor(e) {
|
|
14463
14463
|
this.privContext = {}, this.privDynamicGrammar = e;
|
|
@@ -14585,7 +14585,7 @@ var Rn = {}, oo = {}, ao = {}, Ca = {};
|
|
|
14585
14585
|
})(Ca);
|
|
14586
14586
|
Object.defineProperty(ao, "__esModule", { value: !0 });
|
|
14587
14587
|
ao.DialogServiceTurnState = void 0;
|
|
14588
|
-
const Tm =
|
|
14588
|
+
const Tm = Ye, bm = re, Mm = Ca;
|
|
14589
14589
|
class Om {
|
|
14590
14590
|
constructor(e, r) {
|
|
14591
14591
|
this.privRequestId = r, this.privIsCompleted = !1, this.privAudioStream = null, this.privTurnManager = e, this.resetTurnEndTimeout();
|
|
@@ -14652,7 +14652,7 @@ function Nm() {
|
|
|
14652
14652
|
if (Lu)
|
|
14653
14653
|
return Rn;
|
|
14654
14654
|
Lu = 1, Object.defineProperty(Rn, "__esModule", { value: !0 }), Rn.DialogServiceAdapter = void 0;
|
|
14655
|
-
const t = Z(), e =
|
|
14655
|
+
const t = Z(), e = Rt, r = M(), i = Ye, u = R(), a = oo, o = _(), n = Ca, s = $d, c = Kd, p = xe;
|
|
14656
14656
|
let d = class extends o.ServiceRecognizerBase {
|
|
14657
14657
|
constructor(h, v, g, m, f) {
|
|
14658
14658
|
super(h, v, g, m, f), this.privEvents = new r.EventSource(), this.privDialogServiceConnector = f, this.receiveMessageOverride = () => this.receiveDialogMessageOverride(), this.privTurnStateManager = new a.DialogServiceTurnStateManager(), this.recognizeOverride = (S, C, y) => this.listenOnce(S, C, y), this.postConnectImplOverride = (S) => this.dialogConnectImpl(S), this.configConnectionOverride = (S) => this.configConnection(S), this.disconnectOverride = () => this.privDisconnect(), this.privDialogAudioSource = g, this.agentConfigSent = !1, this.privLastResult = null, this.connectionEvents.attach((S) => {
|
|
@@ -15095,7 +15095,7 @@ function Lm() {
|
|
|
15095
15095
|
};
|
|
15096
15096
|
return En.ConversationManager = u, En;
|
|
15097
15097
|
}
|
|
15098
|
-
var
|
|
15098
|
+
var ft = {}, wn = {}, uo = {}, ii = {};
|
|
15099
15099
|
Object.defineProperty(ii, "__esModule", { value: !0 });
|
|
15100
15100
|
ii.ConversationConnectionMessage = void 0;
|
|
15101
15101
|
const xm = M();
|
|
@@ -15112,19 +15112,19 @@ class Bm extends xm.ConnectionMessage {
|
|
|
15112
15112
|
ii.ConversationConnectionMessage = Bm;
|
|
15113
15113
|
Object.defineProperty(uo, "__esModule", { value: !0 });
|
|
15114
15114
|
uo.ConversationWebsocketMessageFormatter = void 0;
|
|
15115
|
-
const
|
|
15115
|
+
const gt = M(), Bu = ii;
|
|
15116
15116
|
class qm {
|
|
15117
15117
|
/**
|
|
15118
15118
|
* Format incoming messages: text (speech partial/final, IM) or binary (tts)
|
|
15119
15119
|
*/
|
|
15120
15120
|
toConnectionMessage(e) {
|
|
15121
|
-
const r = new
|
|
15121
|
+
const r = new gt.Deferred();
|
|
15122
15122
|
try {
|
|
15123
|
-
if (e.messageType ===
|
|
15123
|
+
if (e.messageType === gt.MessageType.Text) {
|
|
15124
15124
|
const i = new Bu.ConversationConnectionMessage(e.messageType, e.textContent, {}, e.id);
|
|
15125
15125
|
r.resolve(i);
|
|
15126
15126
|
} else
|
|
15127
|
-
e.messageType ===
|
|
15127
|
+
e.messageType === gt.MessageType.Binary && r.resolve(new Bu.ConversationConnectionMessage(e.messageType, e.binaryContent, void 0, e.id));
|
|
15128
15128
|
} catch (i) {
|
|
15129
15129
|
r.reject(`Error formatting the message. Error: ${i}`);
|
|
15130
15130
|
}
|
|
@@ -15134,11 +15134,11 @@ class qm {
|
|
|
15134
15134
|
* Format outgoing messages: text (commands or IM)
|
|
15135
15135
|
*/
|
|
15136
15136
|
fromConnectionMessage(e) {
|
|
15137
|
-
const r = new
|
|
15137
|
+
const r = new gt.Deferred();
|
|
15138
15138
|
try {
|
|
15139
|
-
if (e.messageType ===
|
|
15139
|
+
if (e.messageType === gt.MessageType.Text) {
|
|
15140
15140
|
const i = `${e.textBody ? e.textBody : ""}`;
|
|
15141
|
-
r.resolve(new
|
|
15141
|
+
r.resolve(new gt.RawWebsocketMessage(gt.MessageType.Text, i, e.id));
|
|
15142
15142
|
}
|
|
15143
15143
|
} catch (i) {
|
|
15144
15144
|
r.reject(`Error formatting the message. ${i}`);
|
|
@@ -15777,15 +15777,15 @@ function Gm() {
|
|
|
15777
15777
|
var Hu;
|
|
15778
15778
|
function Qm() {
|
|
15779
15779
|
if (Hu)
|
|
15780
|
-
return
|
|
15781
|
-
Hu = 1, Object.defineProperty(
|
|
15780
|
+
return ft;
|
|
15781
|
+
Hu = 1, Object.defineProperty(ft, "__esModule", { value: !0 }), ft.ConversationTranslatorRecognizer = ft.ConversationRecognizerFactory = void 0;
|
|
15782
15782
|
const t = _(), e = M(), r = z, i = R(), u = Fm(), a = Gm();
|
|
15783
15783
|
class o {
|
|
15784
15784
|
static fromConfig(c, p, d) {
|
|
15785
15785
|
return new n(c, p, d);
|
|
15786
15786
|
}
|
|
15787
15787
|
}
|
|
15788
|
-
|
|
15788
|
+
ft.ConversationRecognizerFactory = o;
|
|
15789
15789
|
let n = class extends i.Recognizer {
|
|
15790
15790
|
constructor(c, p, d) {
|
|
15791
15791
|
const l = p;
|
|
@@ -15934,14 +15934,14 @@ function Qm() {
|
|
|
15934
15934
|
}, 6e4);
|
|
15935
15935
|
}
|
|
15936
15936
|
};
|
|
15937
|
-
return
|
|
15937
|
+
return ft.ConversationTranslatorRecognizer = n, ft;
|
|
15938
15938
|
}
|
|
15939
15939
|
var In = {}, Vu;
|
|
15940
15940
|
function Xm() {
|
|
15941
15941
|
if (Vu)
|
|
15942
15942
|
return In;
|
|
15943
15943
|
Vu = 1, Object.defineProperty(In, "__esModule", { value: !0 }), In.TranscriberRecognizer = void 0;
|
|
15944
|
-
const t = M(), e = z, r = R(), i = _(), u =
|
|
15944
|
+
const t = M(), e = z, r = R(), i = _(), u = ye;
|
|
15945
15945
|
let a = class extends r.Recognizer {
|
|
15946
15946
|
/**
|
|
15947
15947
|
* TranscriberRecognizer constructor.
|
|
@@ -17230,29 +17230,29 @@ function gS(t, e, r) {
|
|
|
17230
17230
|
}
|
|
17231
17231
|
return lS(i);
|
|
17232
17232
|
}
|
|
17233
|
-
const Ft =
|
|
17234
|
-
let
|
|
17233
|
+
const Ft = Et(), sp = Oe(), bt = T(""), mt = T(""), Ve = T(""), ve = T(""), Xd = T("");
|
|
17234
|
+
let We = null;
|
|
17235
17235
|
const op = 4e3;
|
|
17236
17236
|
Ht(async () => {
|
|
17237
17237
|
await Ia();
|
|
17238
17238
|
});
|
|
17239
|
-
let Q = null,
|
|
17239
|
+
let Q = null, Ot = null, Te = null, Ki = null;
|
|
17240
17240
|
const Ut = () => {
|
|
17241
|
-
Te && Te.mute(), Q && (Q.close(), Q = null),
|
|
17241
|
+
Te && Te.mute(), Q && (Q.close(), Q = null), Ot = null;
|
|
17242
17242
|
}, Ia = async () => {
|
|
17243
17243
|
var t, e, r;
|
|
17244
17244
|
try {
|
|
17245
17245
|
const i = await Bn("/voiceTesting");
|
|
17246
17246
|
return (t = i == null ? void 0 : i.data) == null ? void 0 : t.token;
|
|
17247
17247
|
} catch (i) {
|
|
17248
|
-
if (console.error(i),
|
|
17248
|
+
if (console.error(i), Re.isAxiosError(i)) {
|
|
17249
17249
|
const u = i;
|
|
17250
17250
|
console.log("Error found!"), console.error((e = u.response) == null ? void 0 : e.data), Ft.setError(((r = u.response) == null ? void 0 : r.data).message);
|
|
17251
17251
|
} else
|
|
17252
17252
|
Ft.setError("There was an issue while fetching data");
|
|
17253
17253
|
}
|
|
17254
17254
|
}, Bo = () => {
|
|
17255
|
-
|
|
17255
|
+
We && clearTimeout(We), Ki && (Ki.stopContinuousRecognitionAsync(
|
|
17256
17256
|
() => {
|
|
17257
17257
|
},
|
|
17258
17258
|
(t) => {
|
|
@@ -17267,17 +17267,17 @@ const Ut = () => {
|
|
|
17267
17267
|
const a = be.AudioConfig.fromSpeakerOutput(Te);
|
|
17268
17268
|
Q = new be.SpeechSynthesizer(u, a);
|
|
17269
17269
|
const o = gS();
|
|
17270
|
-
|
|
17270
|
+
Ot = o, Q.speakTextAsync(
|
|
17271
17271
|
t,
|
|
17272
17272
|
(n) => {
|
|
17273
|
-
if (o !==
|
|
17273
|
+
if (o !== Ot)
|
|
17274
17274
|
return Te == null || Te.mute(), Q == null || Q.close(), Q = null, r();
|
|
17275
|
-
n.reason === be.ResultReason.SynthesizingAudioCompleted ? r() : i(new Error("Speech synthesis failed")), Q == null || Q.close(), Q = null,
|
|
17275
|
+
n.reason === be.ResultReason.SynthesizingAudioCompleted ? r() : i(new Error("Speech synthesis failed")), Q == null || Q.close(), Q = null, Ot = null;
|
|
17276
17276
|
},
|
|
17277
17277
|
(n) => {
|
|
17278
|
-
if (o !==
|
|
17278
|
+
if (o !== Ot)
|
|
17279
17279
|
return Te == null || Te.mute(), Q == null || Q.close(), Q = null, r();
|
|
17280
|
-
Q == null || Q.close(), Q = null,
|
|
17280
|
+
Q == null || Q.close(), Q = null, Ot = null, i(n);
|
|
17281
17281
|
}
|
|
17282
17282
|
);
|
|
17283
17283
|
});
|
|
@@ -17285,26 +17285,26 @@ const Ut = () => {
|
|
|
17285
17285
|
const e = await Ia(), r = be.SpeechConfig.fromAuthorizationToken(e, "eastus2");
|
|
17286
17286
|
r.speechRecognitionLanguage = "en-US";
|
|
17287
17287
|
const i = be.AudioConfig.fromDefaultMicrophoneInput(), u = new be.SpeechRecognizer(r, i);
|
|
17288
|
-
return Ki = u,
|
|
17289
|
-
o.result.text && o.result.text.trim() && (
|
|
17290
|
-
|
|
17288
|
+
return Ki = u, bt.value = "", mt.value = "", u.recognizing = (a, o) => {
|
|
17289
|
+
o.result.text && o.result.text.trim() && (bt.value = o.result.text.trim(), Ut(), sp.userspeaking = !0, sp.userSpeechText = (mt.value ? mt.value + " " : "") + bt.value, ve.value = "user", We && clearTimeout(We), We = setTimeout(() => {
|
|
17290
|
+
bt.value && (t(bt.value.trim()), bt.value = "");
|
|
17291
17291
|
}, op));
|
|
17292
17292
|
}, u.recognized = (a, o) => {
|
|
17293
|
-
o.result.reason === be.ResultReason.RecognizedSpeech && o.result.text && o.result.text.trim() && (
|
|
17294
|
-
|
|
17293
|
+
o.result.reason === be.ResultReason.RecognizedSpeech && o.result.text && o.result.text.trim() && (mt.value += " " + o.result.text.trim(), We && clearTimeout(We), We = setTimeout(() => {
|
|
17294
|
+
mt.value && (t(mt.value), mt.value = "");
|
|
17295
17295
|
}, op));
|
|
17296
17296
|
}, u.canceled = (a, o) => {
|
|
17297
17297
|
console.error("Recognition canceled:", o);
|
|
17298
17298
|
}, u.startContinuousRecognitionAsync(), u;
|
|
17299
17299
|
}, CS = async () => {
|
|
17300
17300
|
var t, e;
|
|
17301
|
-
|
|
17301
|
+
Ve.value = "", Xd.value = "";
|
|
17302
17302
|
try {
|
|
17303
17303
|
Ut(), await SS((r) => {
|
|
17304
|
-
r && (
|
|
17304
|
+
r && (Ve.value = r);
|
|
17305
17305
|
});
|
|
17306
17306
|
} catch (r) {
|
|
17307
|
-
if (console.error(r),
|
|
17307
|
+
if (console.error(r), Re.isAxiosError(r)) {
|
|
17308
17308
|
const i = r;
|
|
17309
17309
|
console.log("Error found!"), console.error((t = i.response) == null ? void 0 : t.data), Ft.setError(((e = i.response) == null ? void 0 : e.data).message);
|
|
17310
17310
|
} else
|
|
@@ -17316,7 +17316,7 @@ const Ut = () => {
|
|
|
17316
17316
|
try {
|
|
17317
17317
|
(ve.value == "user" || ve.value == "listening") && Ut(), await mS(t);
|
|
17318
17318
|
} catch (i) {
|
|
17319
|
-
if (console.error(i),
|
|
17319
|
+
if (console.error(i), Re.isAxiosError(i)) {
|
|
17320
17320
|
const u = i;
|
|
17321
17321
|
console.log("Error found!"), console.error((e = u.response) == null ? void 0 : e.data), Ft.setError(((r = u.response) == null ? void 0 : r.data).message);
|
|
17322
17322
|
} else
|
|
@@ -17324,7 +17324,7 @@ const Ut = () => {
|
|
|
17324
17324
|
}
|
|
17325
17325
|
};
|
|
17326
17326
|
Oe();
|
|
17327
|
-
const ap =
|
|
17327
|
+
const ap = Et(), Dt = T(!1), xn = T("");
|
|
17328
17328
|
T("");
|
|
17329
17329
|
T("");
|
|
17330
17330
|
Ht(() => {
|
|
@@ -17336,7 +17336,7 @@ const yS = async () => {
|
|
|
17336
17336
|
const u = await Bn("/voiceTesting?ellevenLabs=true");
|
|
17337
17337
|
return console.log("response", (t = u == null ? void 0 : u.data) == null ? void 0 : t.token), (e = u == null ? void 0 : u.data) == null ? void 0 : e.token;
|
|
17338
17338
|
} catch (u) {
|
|
17339
|
-
if (console.error(u),
|
|
17339
|
+
if (console.error(u), Re.isAxiosError(u)) {
|
|
17340
17340
|
const a = u;
|
|
17341
17341
|
console.log("Error found!"), console.error((r = a.response) == null ? void 0 : r.data), ap.setError(((i = a.response) == null ? void 0 : i.data).message);
|
|
17342
17342
|
} else
|
|
@@ -17349,22 +17349,22 @@ T(!1);
|
|
|
17349
17349
|
const RS = T("");
|
|
17350
17350
|
let Vi = [];
|
|
17351
17351
|
const Zd = T(!1);
|
|
17352
|
-
let
|
|
17352
|
+
let Mt = !1, fe = null, Fn = null, Fe = null;
|
|
17353
17353
|
T("");
|
|
17354
|
-
let
|
|
17354
|
+
let St, Ct, kt, Nt;
|
|
17355
17355
|
function Qo() {
|
|
17356
|
-
Nt && Nt.disconnect(),
|
|
17356
|
+
Nt && Nt.disconnect(), Ct && Ct.disconnect(), console.log("microphone outside->", kt), kt && (console.log("microphone->", kt), console.log("Stopping microphone connection"), kt.disconnect()), St && St.close();
|
|
17357
17357
|
}
|
|
17358
17358
|
function ES(t, e = 0.09) {
|
|
17359
|
-
console.log("monitorMicDuringTTS called with threshold:", e),
|
|
17360
|
-
const r = new Uint8Array(
|
|
17361
|
-
|
|
17359
|
+
console.log("monitorMicDuringTTS called with threshold:", e), St = new (window.AudioContext || window.webkitAudioContext)(), Ct = St.createAnalyser(), kt = St.createMediaStreamSource(Fn), Nt = St.createScriptProcessor(2048, 1, 1), kt.connect(Ct), Ct.connect(Nt), Nt.connect(St.destination), Nt.onaudioprocess = () => {
|
|
17360
|
+
const r = new Uint8Array(Ct.frequencyBinCount);
|
|
17361
|
+
Ct.getByteFrequencyData(r);
|
|
17362
17362
|
const i = r.reduce((u, a) => u + a) / r.length;
|
|
17363
17363
|
console.log("Average volume:", i), console.log("threshold * 100:", e * 100), i > e * 100 && (t(), console.log("onInterrupt", t), Qo());
|
|
17364
17364
|
};
|
|
17365
17365
|
}
|
|
17366
17366
|
async function Xo() {
|
|
17367
|
-
Zd.value = !0,
|
|
17367
|
+
Zd.value = !0, Dt.value = !0, console.log("Starting voice recognition with ElevenLabs"), fe && fe.pause(), Fn = await navigator.mediaDevices.getUserMedia({ audio: !0 }), await wS();
|
|
17368
17368
|
}
|
|
17369
17369
|
async function wS() {
|
|
17370
17370
|
if (!Fn)
|
|
@@ -17401,7 +17401,7 @@ async function IS(t) {
|
|
|
17401
17401
|
return r.ok ? (i == null ? void 0 : i.text) || "[No transcription result]" : (console.error("Transcription error:", i), "[Error in transcription]");
|
|
17402
17402
|
}
|
|
17403
17403
|
async function Yd(t) {
|
|
17404
|
-
console.log("speakText11Labs called with text:", t), Xi = await yS(), fe && fe.pause(),
|
|
17404
|
+
console.log("speakText11Labs called with text:", t), Xi = await yS(), fe && fe.pause(), Mt = !0;
|
|
17405
17405
|
const r = await (await fetch(`https://api.elevenlabs.io/v1/text-to-speech/${PS}`, {
|
|
17406
17406
|
method: "POST",
|
|
17407
17407
|
headers: {
|
|
@@ -17420,9 +17420,9 @@ async function Yd(t) {
|
|
|
17420
17420
|
})).blob(), i = URL.createObjectURL(r);
|
|
17421
17421
|
return fe = new Audio(i), new Promise((u) => {
|
|
17422
17422
|
fe && (fe.onended = async () => {
|
|
17423
|
-
|
|
17424
|
-
}, fe.play(), console.log("Audio is playing:",
|
|
17425
|
-
fe && (fe.pause(), fe.currentTime = 0),
|
|
17423
|
+
Mt = !1, console.log("Audio is playing:", Mt), Dt.value ? (console.log("micEnabled before calling startrecognition:", Dt.value), u(await Xo())) : u();
|
|
17424
|
+
}, fe.play(), console.log("Audio is playing:", Mt), console.log("micEnabled:", Dt.value), Dt.value && ES(() => {
|
|
17425
|
+
fe && (fe.pause(), fe.currentTime = 0), Mt = !1, console.log("monitorMicDuringTTS:", Mt), console.log("micEnabled before calling startrecognition:", Dt.value), Xo();
|
|
17426
17426
|
}));
|
|
17427
17427
|
});
|
|
17428
17428
|
}
|
|
@@ -17447,7 +17447,7 @@ const _S = { class: "max-h-screen overflow-auto md:p-6 p-3 flex flex-col items-c
|
|
|
17447
17447
|
This is a role-play simulation designed to mirror real-world scenarios. Just follow the prompts, speak naturally, and let the AI guide you. Based on your responses, you’ll receive personalized feedback to help sharpen your client engagement skills.`, LS = /* @__PURE__ */ we({
|
|
17448
17448
|
__name: "greetingsSimulation",
|
|
17449
17449
|
setup(t) {
|
|
17450
|
-
const e = Oe(), r =
|
|
17450
|
+
const e = Oe(), r = Et(), i = T(!1), u = T(!1), a = T(!0), o = () => {
|
|
17451
17451
|
e.simulationSkill === "7bd03cb6-79ad-4e2a-8a8b-88dc16147369" ? (e.beginFlag = !1, i.value = !0) : (e.beginFlag = !1, v());
|
|
17452
17452
|
}, n = `Hi, get ready to test your ${e.skillNameForSimulation || "Unknown"} knowledge! Here’s what to expect:`, s = ` Hi ${e.learnerName || "Expert"}, and welcome!`, c = ` Hi, ${e.learnerName || "Expert"}. Get ready to test your ${e.skillNameForSimulation || "Unknown"} knowledge! Here’s what to expect:`, p = qo(() => ["32b7d6d8-f7a8-40a0-ab84-8784f25897ef"].includes(e.simulationSkill) ? n : ["7bd03cb6-79ad-4e2a-8a8b-88dc16147369"].includes(e.simulationSkill) ? s : c), d = `Hi, get ready to test your ${e.skillNameForSimulation || "Unknown"} knowledge! Here’s what to expect:`, l = ` AI Guru will ask you a few questions on '${e.skillNameForSimulation.trim() || "Unknown"} '. Please provide detailed and thoughtful answers to the best of your knowledge. At the end, you'll receive a feedback, outlining your strengths and improvement areas.`, h = qo(() => ["32b7d6d8-f7a8-40a0-ab84-8784f25897ef"].includes(e.simulationSkill) ? d : ["7bd03cb6-79ad-4e2a-8a8b-88dc16147369"].includes(e.simulationSkill) ? jS : l), v = async () => {
|
|
17453
17453
|
var g, m, f, S, C, y;
|
|
@@ -17466,7 +17466,7 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17466
17466
|
skillId: e.simulationSkill || "937e84ef-e95d-4327-9afe-e7be2bf420eb"
|
|
17467
17467
|
}, I = T("");
|
|
17468
17468
|
e.dotLoader = !0, ve.value = "listening";
|
|
17469
|
-
const E = await
|
|
17469
|
+
const E = await Pt("/sqlTest", A);
|
|
17470
17470
|
e.addConversation((g = E.data) == null ? void 0 : g.body, "new"), I.value = (f = (m = E == null ? void 0 : E.data) == null ? void 0 : m.body) == null ? void 0 : f.answer, e.dotLoader = !1, ve.value = "bot", e.showVoiceComponent && (e.elevenLabs ? await Yd(I.value.replace(/[*#]/g, "")) : await Go(I.value.replace(/[*#]/g, "")));
|
|
17471
17471
|
} else {
|
|
17472
17472
|
u.value = !0, e.dotLoader = !0, console.log("firstQuestion fired==> else part");
|
|
@@ -17484,19 +17484,19 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17484
17484
|
storeFinalScore: e.storeFinalScore
|
|
17485
17485
|
};
|
|
17486
17486
|
let I;
|
|
17487
|
-
|
|
17488
|
-
const E = await
|
|
17487
|
+
yt === "prod" ? I = "/ManualExportActivity" : I = "/fetchCompletionDemo";
|
|
17488
|
+
const E = await Pt(I, A);
|
|
17489
17489
|
e.addConversation((S = E.data) == null ? void 0 : S.body, "assessment"), e.dotLoader = !1;
|
|
17490
17490
|
}
|
|
17491
17491
|
} catch (w) {
|
|
17492
|
-
if (console.error(w),
|
|
17492
|
+
if (console.error(w), Re.isAxiosError(w)) {
|
|
17493
17493
|
const A = w;
|
|
17494
17494
|
console.log("Error found!"), console.error((C = A.response) == null ? void 0 : C.data), r.setError(((y = A.response) == null ? void 0 : y.data).message);
|
|
17495
17495
|
} else
|
|
17496
17496
|
r.setError("There was an issue, please contact helpmate");
|
|
17497
17497
|
}
|
|
17498
17498
|
};
|
|
17499
|
-
return (g, m) => (
|
|
17499
|
+
return (g, m) => (D(), k("section", null, [
|
|
17500
17500
|
Se(up, { "is-visible": i.value }, {
|
|
17501
17501
|
default: pp(() => [
|
|
17502
17502
|
P("div", _S, [
|
|
@@ -17514,18 +17514,18 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17514
17514
|
}, 8, ["is-visible"]),
|
|
17515
17515
|
P("div", MS, [
|
|
17516
17516
|
P("div", OS, [
|
|
17517
|
-
P("div", DS,
|
|
17518
|
-
P("div", kS,
|
|
17517
|
+
P("div", DS, Je(p.value), 1),
|
|
17518
|
+
P("div", kS, Je(h.value), 1)
|
|
17519
17519
|
])
|
|
17520
17520
|
]),
|
|
17521
|
-
|
|
17521
|
+
O(e).beginFlag ? (D(), k("span", NS, [
|
|
17522
17522
|
P("button", {
|
|
17523
17523
|
type: "button",
|
|
17524
17524
|
id: "button",
|
|
17525
17525
|
onClick: o,
|
|
17526
17526
|
class: "mt-4 bg-[#021c42] rounded-md px-4 py-1.5 text-center text-[16px] text-white cursor-pointer italic hover:bg-blue-700"
|
|
17527
17527
|
}, " Let's begin ")
|
|
17528
|
-
])) :
|
|
17528
|
+
])) : Ce("", !0),
|
|
17529
17529
|
zS
|
|
17530
17530
|
]));
|
|
17531
17531
|
}
|
|
@@ -17677,20 +17677,20 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17677
17677
|
{ immediate: !0 }
|
|
17678
17678
|
), Ji(i, (h) => {
|
|
17679
17679
|
a.question = (h ?? []).join(", ");
|
|
17680
|
-
}), (h, v) => (
|
|
17680
|
+
}), (h, v) => (D(), k("div", BS, [
|
|
17681
17681
|
P("div", qS, [
|
|
17682
|
-
|
|
17682
|
+
O(a).simulationSkill === "7bd03cb6-79ad-4e2a-8a8b-88dc16147369" && O(a).showVoiceComponent === !0 && O(a).conversations.length - 1 === r.ansId && !r.answer.includes("BEGIN ROLE PLAY") ? (D(), k("span", {
|
|
17683
17683
|
key: 0,
|
|
17684
17684
|
class: "fontFormattingAMA",
|
|
17685
17685
|
innerHTML: o.value
|
|
17686
|
-
}, null, 8, FS)) : (
|
|
17686
|
+
}, null, 8, FS)) : (D(), k("span", {
|
|
17687
17687
|
key: 1,
|
|
17688
17688
|
class: "fontFormattingAMA",
|
|
17689
17689
|
innerHTML: l.value
|
|
17690
17690
|
}, null, 8, US)),
|
|
17691
17691
|
P("div", null, [
|
|
17692
|
-
t.optionsArray.length &&
|
|
17693
|
-
(
|
|
17692
|
+
t.optionsArray.length && O(a).mcqType === "multi_choice" ? (D(), k("div", HS, [
|
|
17693
|
+
(D(!0), k(Fo, null, Uo(t.optionsArray, (g, m) => (D(), k("div", {
|
|
17694
17694
|
key: m,
|
|
17695
17695
|
class: "italic"
|
|
17696
17696
|
}, [
|
|
@@ -17700,16 +17700,16 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17700
17700
|
type: "checkbox",
|
|
17701
17701
|
value: g,
|
|
17702
17702
|
"onUpdate:modelValue": v[0] || (v[0] = (f) => i.value = f),
|
|
17703
|
-
disabled:
|
|
17703
|
+
disabled: O(a).conversations.length - 1 !== r.ansId,
|
|
17704
17704
|
name: l.value
|
|
17705
17705
|
}, null, 8, WS), [
|
|
17706
17706
|
[Rl, i.value]
|
|
17707
17707
|
]),
|
|
17708
|
-
P("label", $S,
|
|
17708
|
+
P("label", $S, Je(g), 1)
|
|
17709
17709
|
])
|
|
17710
17710
|
]))), 128))
|
|
17711
|
-
])) : t.optionsArray.length ? (
|
|
17712
|
-
(
|
|
17711
|
+
])) : t.optionsArray.length ? (D(), k("div", KS, [
|
|
17712
|
+
(D(!0), k(Fo, null, Uo(t.optionsArray, (g, m) => (D(), k("div", {
|
|
17713
17713
|
key: m,
|
|
17714
17714
|
class: "italic"
|
|
17715
17715
|
}, [
|
|
@@ -17717,17 +17717,17 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17717
17717
|
P("input", {
|
|
17718
17718
|
class: "mr-4 cursor-pointer",
|
|
17719
17719
|
type: "radio",
|
|
17720
|
-
onClick: (f) =>
|
|
17721
|
-
disabled:
|
|
17720
|
+
onClick: (f) => O(a).question = String(t.optionsArray[m]),
|
|
17721
|
+
disabled: O(a).conversations.length - 1 !== r.ansId,
|
|
17722
17722
|
name: l.value
|
|
17723
17723
|
}, null, 8, GS),
|
|
17724
|
-
P("label", QS,
|
|
17724
|
+
P("label", QS, Je(g), 1)
|
|
17725
17725
|
])
|
|
17726
17726
|
]))), 128))
|
|
17727
|
-
])) :
|
|
17727
|
+
])) : Ce("", !0)
|
|
17728
17728
|
]),
|
|
17729
|
-
u.value &&
|
|
17730
|
-
|
|
17729
|
+
u.value && O(a).simulationSkill === "32b7d6d8-f7a8-40a0-ab84-8784f25897ef" ? (D(), k("div", XS, eC)) : Ce("", !0),
|
|
17730
|
+
O(a).isSpeechToggle && O(a).showSelectTypeConvesation && O(a).simulationSkill === "7bd03cb6-79ad-4e2a-8a8b-88dc16147369" && O(a).conversations.length - 1 === r.ansId ? (D(), k("div", tC, [
|
|
17731
17731
|
rC,
|
|
17732
17732
|
P("div", nC, [
|
|
17733
17733
|
P("button", {
|
|
@@ -17743,7 +17743,7 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17743
17743
|
class: "mt-4 w-32 cursor-pointer bg-[#021c42] hover:bg-blue-700 italic rounded-md px-4 py-1.5 text-center text-white text-[16px]"
|
|
17744
17744
|
}, " Voice Mode ")
|
|
17745
17745
|
])
|
|
17746
|
-
])) :
|
|
17746
|
+
])) : Ce("", !0)
|
|
17747
17747
|
])
|
|
17748
17748
|
]));
|
|
17749
17749
|
}
|
|
@@ -17761,9 +17761,9 @@ const sC = /* @__PURE__ */ Vt(iC, [["__scopeId", "data-v-e7974fa1"]]), oC = { cl
|
|
|
17761
17761
|
}
|
|
17762
17762
|
},
|
|
17763
17763
|
setup(t) {
|
|
17764
|
-
return (e, r) => (
|
|
17764
|
+
return (e, r) => (D(), k("div", oC, [
|
|
17765
17765
|
P("div", aC, [
|
|
17766
|
-
P("div", cC,
|
|
17766
|
+
P("div", cC, Je(t.question), 1)
|
|
17767
17767
|
])
|
|
17768
17768
|
]));
|
|
17769
17769
|
}
|
|
@@ -17786,22 +17786,25 @@ const sC = /* @__PURE__ */ Vt(iC, [["__scopeId", "data-v-e7974fa1"]]), oC = { cl
|
|
|
17786
17786
|
return setTimeout(() => {
|
|
17787
17787
|
const i = Math.floor(Math.random() * r.value.length);
|
|
17788
17788
|
e.value = r.value[i];
|
|
17789
|
-
}, 15e3), (i, u) => (
|
|
17789
|
+
}, 15e3), (i, u) => (D(), k("div", dC, [
|
|
17790
17790
|
P("div", lC, [
|
|
17791
|
-
P("div", hC,
|
|
17791
|
+
P("div", hC, Je(e.value), 1),
|
|
17792
17792
|
vC
|
|
17793
17793
|
]),
|
|
17794
17794
|
fC
|
|
17795
17795
|
]));
|
|
17796
17796
|
}
|
|
17797
17797
|
});
|
|
17798
|
-
const mC = /* @__PURE__ */ Vt(gC, [["__scopeId", "data-v-347ad6de"]]), SC = { class: "flex mx-auto justify-end items-end py-3" }, CC = { class: "max-w-[80%]" }, yC = {
|
|
17798
|
+
const mC = /* @__PURE__ */ Vt(gC, [["__scopeId", "data-v-347ad6de"]]), SC = { class: "flex mx-auto justify-end items-end py-3" }, CC = { class: "max-w-[80%]" }, yC = {
|
|
17799
|
+
key: 0,
|
|
17800
|
+
class: "w-fit md:text-[15px] fontFormattingAMA text-sm text-white bg-genpact-blue-box leading-6 p-3 rounded-lg"
|
|
17801
|
+
}, PC = /* @__PURE__ */ we({
|
|
17799
17802
|
__name: "QuestionStreaming",
|
|
17800
17803
|
setup(t) {
|
|
17801
17804
|
const e = Oe();
|
|
17802
|
-
return (r, i) => (
|
|
17805
|
+
return (r, i) => (D(), k("div", SC, [
|
|
17803
17806
|
P("div", CC, [
|
|
17804
|
-
|
|
17807
|
+
O(e).userSpeechText.toLowerCase().includes("play") && O(e).userSpeechText.length < 10 ? Ce("", !0) : (D(), k("div", yC, Je(O(e).userSpeechText), 1))
|
|
17805
17808
|
])
|
|
17806
17809
|
]));
|
|
17807
17810
|
}
|
|
@@ -17829,14 +17832,14 @@ const mC = /* @__PURE__ */ Vt(gC, [["__scopeId", "data-v-347ad6de"]]), SC = { cl
|
|
|
17829
17832
|
o.length && ((s = n == null ? void 0 : n.pop()) == null || s.scrollIntoView({ behavior: "smooth", block: "center" }));
|
|
17830
17833
|
}), wl(() => {
|
|
17831
17834
|
window.removeEventListener("custom-event", a);
|
|
17832
|
-
}), (o, n) => (
|
|
17835
|
+
}), (o, n) => (D(), k("div", {
|
|
17833
17836
|
ref_key: "nestedElement",
|
|
17834
17837
|
ref: u,
|
|
17835
17838
|
class: "smooth-scroll overflow-auto md:px-6 px-2"
|
|
17836
17839
|
}, [
|
|
17837
17840
|
Se(LS),
|
|
17838
|
-
(
|
|
17839
|
-
s.question ? (
|
|
17841
|
+
(D(!0), k(Fo, null, Uo(O(i).conversations, (s, c) => (D(), k("div", { key: c }, [
|
|
17842
|
+
s.question ? (D(), k("div", RC, [
|
|
17840
17843
|
Se(uC, {
|
|
17841
17844
|
question: s.question,
|
|
17842
17845
|
c2question: s.c2question,
|
|
@@ -17845,28 +17848,28 @@ const mC = /* @__PURE__ */ Vt(gC, [["__scopeId", "data-v-347ad6de"]]), SC = { cl
|
|
|
17845
17848
|
name: "askQuestion",
|
|
17846
17849
|
"que-id": c
|
|
17847
17850
|
}, null, 8, ["question", "c2question", "show-feedback", "id", "que-id"])
|
|
17848
|
-
])) :
|
|
17849
|
-
s.answer ? (
|
|
17851
|
+
])) : Ce("", !0),
|
|
17852
|
+
s.answer ? (D(), k("div", EC, [
|
|
17850
17853
|
Se(sC, {
|
|
17851
17854
|
question: s == null ? void 0 : s.question,
|
|
17852
17855
|
c2question: s == null ? void 0 : s.c2question,
|
|
17853
17856
|
"show-sources": s == null ? void 0 : s.showSources,
|
|
17854
17857
|
questionId: s.id,
|
|
17855
|
-
answer: s.answer.replaceAll("<a",
|
|
17858
|
+
answer: s.answer.replaceAll("<a", O(e)).replaceAll("<ul", O(r)),
|
|
17856
17859
|
"show-feedback": s.showFeedback,
|
|
17857
17860
|
"related-ques": s.relatedQues,
|
|
17858
17861
|
"options-array": s.optionsArray,
|
|
17859
17862
|
"ans-id": c,
|
|
17860
17863
|
onCharTyped: a
|
|
17861
17864
|
}, null, 8, ["question", "c2question", "show-sources", "questionId", "answer", "show-feedback", "related-ques", "options-array", "ans-id"])
|
|
17862
|
-
])) :
|
|
17865
|
+
])) : Ce("", !0)
|
|
17863
17866
|
]))), 128)),
|
|
17864
|
-
|
|
17867
|
+
O(i).dotLoader ? (D(), k("div", wC, [
|
|
17865
17868
|
Se(mC)
|
|
17866
|
-
])) :
|
|
17867
|
-
|
|
17869
|
+
])) : Ce("", !0),
|
|
17870
|
+
O(i).userspeaking ? (D(), k("div", AC, [
|
|
17868
17871
|
Se(PC)
|
|
17869
|
-
])) :
|
|
17872
|
+
])) : Ce("", !0)
|
|
17870
17873
|
], 512));
|
|
17871
17874
|
}
|
|
17872
17875
|
});
|
|
@@ -17926,7 +17929,7 @@ const _C = /* @__PURE__ */ Vt(IC, [["__scopeId", "data-v-cd418105"]]), TC = { cl
|
|
|
17926
17929
|
], WC = /* @__PURE__ */ we({
|
|
17927
17930
|
__name: "NewVoiceConversation",
|
|
17928
17931
|
setup(t) {
|
|
17929
|
-
const e = Oe(), r =
|
|
17932
|
+
const e = Oe(), r = Et(), i = T(""), u = T(!1), a = T("Turn on mic"), o = () => {
|
|
17930
17933
|
u.value === !1 ? e.elevenLabs ? (console.log("mic enabled true 11labs"), u.value = !0, Xo(), ve.value = "user", a.value = "Turn off mic") : (console.log("mic enabled true"), u.value = !0, Ut(), CS(), ve.value = "user", a.value = "Turn off mic") : e.elevenLabs ? (console.log("mic enabled false"), Qo(), u.value = !1, a.value = "Turn on mic") : (console.log("mic enabled false"), Bo(), u.value = !1, a.value = "Turn on mic");
|
|
17931
17934
|
}, n = () => {
|
|
17932
17935
|
e.elevenLabs ? (console.log("closeConversation 11 labs"), Qo(), ve.value = "none", u.value = !1) : (console.log("closeConversation"), Bo(), Ut(), ve.value = "none", u.value = !1);
|
|
@@ -17934,20 +17937,20 @@ const _C = /* @__PURE__ */ Vt(IC, [["__scopeId", "data-v-cd418105"]]), TC = { cl
|
|
|
17934
17937
|
var c, p, d, l, h, v;
|
|
17935
17938
|
try {
|
|
17936
17939
|
const g = {
|
|
17937
|
-
question:
|
|
17940
|
+
question: Ve.value || xn.value
|
|
17938
17941
|
};
|
|
17939
17942
|
e.conversations.push(g), e.dotLoader = !0, e.question = "";
|
|
17940
17943
|
const m = {
|
|
17941
17944
|
past_messages: e.past_messages,
|
|
17942
|
-
question:
|
|
17945
|
+
question: Ve.value || xn.value,
|
|
17943
17946
|
bandCode: "4A",
|
|
17944
17947
|
userGenomeFunction: "HR",
|
|
17945
17948
|
userGenomeRole: "Consultant",
|
|
17946
17949
|
skillId: "7bd03cb6-79ad-4e2a-8a8b-88dc16147369"
|
|
17947
|
-
}, f = await
|
|
17950
|
+
}, f = await Pt("/sqlTest", m);
|
|
17948
17951
|
if (ve.value = "bot", i.value = (p = (c = f == null ? void 0 : f.data) == null ? void 0 : c.body) == null ? void 0 : p.answer, i.value === "") {
|
|
17949
17952
|
console.log("in the if case calling api again");
|
|
17950
|
-
const S = await
|
|
17953
|
+
const S = await Pt("/sqlTest", m);
|
|
17951
17954
|
e.addConversation((d = S.data) == null ? void 0 : d.body, "new");
|
|
17952
17955
|
} else
|
|
17953
17956
|
console.log("in the else case"), e.addConversation((l = f.data) == null ? void 0 : l.body, "new");
|
|
@@ -17966,36 +17969,42 @@ const _C = /* @__PURE__ */ Vt(IC, [["__scopeId", "data-v-cd418105"]]), TC = { cl
|
|
|
17966
17969
|
else
|
|
17967
17970
|
await Go(i.value.replace(/[*#]/g, ""));
|
|
17968
17971
|
} catch (g) {
|
|
17969
|
-
if (console.error(g),
|
|
17972
|
+
if (console.error(g), Re.isAxiosError(g)) {
|
|
17970
17973
|
const m = g;
|
|
17971
17974
|
console.log("Error found!"), console.error((h = m.response) == null ? void 0 : h.data), r.setError(((v = m.response) == null ? void 0 : v.data).message);
|
|
17972
17975
|
} else
|
|
17973
17976
|
r.setError("There was an issue, please contact helpmate");
|
|
17974
17977
|
}
|
|
17975
17978
|
};
|
|
17976
|
-
return Ji(
|
|
17977
|
-
|
|
17979
|
+
return Ji(Ve, async (c) => {
|
|
17980
|
+
if (c) {
|
|
17981
|
+
if (console.log("Recognized text:", c), Ve.value.toLowerCase().includes("play") && Ve.value.length < 10) {
|
|
17982
|
+
console.log("play command detected, ignoring input");
|
|
17983
|
+
return;
|
|
17984
|
+
}
|
|
17985
|
+
ve.value = "listening", e.userspeaking = !1, await s(), Ve.value = "";
|
|
17986
|
+
}
|
|
17978
17987
|
}), Ji(xn, async (c) => {
|
|
17979
17988
|
c && (console.log("Recognized text:", c), ve.value = "listening", await s(), xn.value = "");
|
|
17980
|
-
}), (c, p) => (
|
|
17981
|
-
|
|
17989
|
+
}), (c, p) => (D(), k("div", TC, [
|
|
17990
|
+
O(ve) === "bot" ? (D(), k("div", bC, [
|
|
17982
17991
|
P("span", null, [
|
|
17983
|
-
(
|
|
17992
|
+
(D(), k("svg", MC, DC))
|
|
17984
17993
|
])
|
|
17985
|
-
])) :
|
|
17994
|
+
])) : O(ve) === "user" ? (D(), k("div", kC, [
|
|
17986
17995
|
P("span", null, [
|
|
17987
|
-
(
|
|
17996
|
+
(D(), k("svg", NC, jC))
|
|
17988
17997
|
])
|
|
17989
|
-
])) :
|
|
17998
|
+
])) : O(ve) === "listening" ? (D(), k("div", LC, BC)) : Ce("", !0),
|
|
17990
17999
|
P("div", {
|
|
17991
18000
|
class: jt([u.value ? "" : "animate-pulse", "tooltip"])
|
|
17992
18001
|
}, [
|
|
17993
|
-
P("span", qC,
|
|
17994
|
-
u.value ? (
|
|
18002
|
+
P("span", qC, Je(a.value), 1),
|
|
18003
|
+
u.value ? (D(), k("button", {
|
|
17995
18004
|
key: 0,
|
|
17996
18005
|
class: "rounded-full bg-white p-2 transition hover:bg-gray-300",
|
|
17997
18006
|
onClick: p[0] || (p[0] = (d) => o())
|
|
17998
|
-
}, UC)) : (
|
|
18007
|
+
}, UC)) : (D(), k("button", {
|
|
17999
18008
|
key: 1,
|
|
18000
18009
|
class: "rounded-full bg-white p-2 transition hover:bg-gray-300",
|
|
18001
18010
|
onClick: p[1] || (p[1] = (d) => o())
|
|
@@ -18005,18 +18014,18 @@ const _C = /* @__PURE__ */ Vt(IC, [["__scopeId", "data-v-cd418105"]]), TC = { cl
|
|
|
18005
18014
|
class: "rounded-full bg-white p-2 hover:bg-gray-300",
|
|
18006
18015
|
onClick: p[2] || (p[2] = (d) => n())
|
|
18007
18016
|
}, [
|
|
18008
|
-
Se(
|
|
18017
|
+
Se(O(_l), { class: "h-[10px] w-[10px] text-gray-700" })
|
|
18009
18018
|
])
|
|
18010
18019
|
]));
|
|
18011
18020
|
}
|
|
18012
|
-
}),
|
|
18021
|
+
}), et = (t) => (Wn("data-v-115227f8"), t = t(), $n(), t), $C = { class: "z-3 absolute bottom-0 mt-4 w-full bg-white px-2 md:px-6" }, KC = { class: "py-2" }, JC = { class: "flex" }, GC = { class: "z-1 relative w-full" }, QC = { class: "flex justify-between my-2 w-full" }, XC = /* @__PURE__ */ et(() => /* @__PURE__ */ P("div", null, null, -1)), ZC = {
|
|
18013
18022
|
key: 0,
|
|
18014
18023
|
class: "flex justify-center bg-genpact-darkest-blue px-4 rounded-full"
|
|
18015
|
-
}, YC = { class: "relative w-full" }, ey = { class: "absolute inset-y-0 left-0 flex items-center pl-3" }, ty = { class: "tooltip" }, ry = /* @__PURE__ */
|
|
18024
|
+
}, YC = { class: "relative w-full" }, ey = { class: "absolute inset-y-0 left-0 flex items-center pl-3" }, ty = { class: "tooltip" }, ry = /* @__PURE__ */ et(() => /* @__PURE__ */ P("span", { class: "tooltiptext text-genpact-darkest-teal" }, "Restart", -1)), ny = /* @__PURE__ */ et(() => /* @__PURE__ */ P("path", {
|
|
18016
18025
|
d: "M12 4C8.229 4 6.343 4 5.172 5.172C4.001 6.344 4 8.229 4 12V18C4 18.943 4 19.414 4.293 19.707C4.586 20 5.057 20 6 20H12C15.771 20 17.657 20 18.828 18.828C19.999 17.656 20 15.771 20 12",
|
|
18017
18026
|
stroke: "currentColor",
|
|
18018
18027
|
"stroke-width": "2"
|
|
18019
|
-
}, null, -1)), iy = /* @__PURE__ */
|
|
18028
|
+
}, null, -1)), iy = /* @__PURE__ */ et(() => /* @__PURE__ */ P("path", {
|
|
18020
18029
|
d: "M9 10H15M9 14H12M19 8V2M16 5H22",
|
|
18021
18030
|
stroke: "currentColor",
|
|
18022
18031
|
"stroke-width": "2",
|
|
@@ -18025,24 +18034,24 @@ const _C = /* @__PURE__ */ Vt(IC, [["__scopeId", "data-v-cd418105"]]), TC = { cl
|
|
|
18025
18034
|
}, null, -1)), sy = [
|
|
18026
18035
|
ny,
|
|
18027
18036
|
iy
|
|
18028
|
-
], oy = { class: "absolute inset-y-0 right-0 flex items-center md:pr-8 pr-3" }, ay = { key: 0 }, cy = ["disabled"], uy = /* @__PURE__ */
|
|
18037
|
+
], oy = { class: "absolute inset-y-0 right-0 flex items-center md:pr-8 pr-3" }, ay = { key: 0 }, cy = ["disabled"], uy = /* @__PURE__ */ et(() => /* @__PURE__ */ P("path", { d: "M15.854.146a.5.5 0 0 1 .11.54l-5.819 14.547a.75.75 0 0 1-1.329.124l-3.178-4.995L.643 7.184a.75.75 0 0 1 .124-1.33L15.314.037a.5.5 0 0 1 .54.11ZM6.636 10.07l2.761 4.338L14.13 2.576 6.636 10.07Zm6.787-8.201L1.591 6.602l4.339 2.76 7.494-7.493Z" }, null, -1)), py = [
|
|
18029
18038
|
uy
|
|
18030
18039
|
], dy = {
|
|
18031
18040
|
key: 1,
|
|
18032
18041
|
class: "tooltip"
|
|
18033
|
-
}, ly = /* @__PURE__ */
|
|
18042
|
+
}, ly = /* @__PURE__ */ et(() => /* @__PURE__ */ P("span", { class: "tooltiptext fontFormattingAMA" }, "Waiting for response", -1)), hy = /* @__PURE__ */ et(() => /* @__PURE__ */ P("img", {
|
|
18034
18043
|
class: "w-7 h-7",
|
|
18035
18044
|
src: cp,
|
|
18036
18045
|
alt: "watingIcon"
|
|
18037
18046
|
}, null, -1)), vy = [
|
|
18038
18047
|
ly,
|
|
18039
18048
|
hy
|
|
18040
|
-
], fy = { class: "flex justify-start gap-2 md:gap-1" }, gy = ["disabled"], my = /* @__PURE__ */
|
|
18049
|
+
], fy = { class: "flex justify-start gap-2 md:gap-1" }, gy = ["disabled"], my = /* @__PURE__ */ et(() => /* @__PURE__ */ P("div", { class: "pt-1 text-start font-sans text-xs font-bold text-gray-500 md:text-sm" }, [
|
|
18041
18050
|
/* @__PURE__ */ P("div", null, " Note: AI-generated response may not be fully accurate; check with experts for critical information. ")
|
|
18042
18051
|
], -1)), Sy = /* @__PURE__ */ we({
|
|
18043
18052
|
__name: "NewQuestionSimulation",
|
|
18044
18053
|
setup(t) {
|
|
18045
|
-
const e = Oe(), r =
|
|
18054
|
+
const e = Oe(), r = Et(), i = T(""), u = T("");
|
|
18046
18055
|
e.question = "";
|
|
18047
18056
|
const a = T(null), o = T(!1), n = T(zt), s = T(!1);
|
|
18048
18057
|
u.value = e.recognizedText, Ht(() => {
|
|
@@ -18075,37 +18084,37 @@ const _C = /* @__PURE__ */ Vt(IC, [["__scopeId", "data-v-cd418105"]]), TC = { cl
|
|
|
18075
18084
|
userGenomeFunction: e.genomeFunctionValue,
|
|
18076
18085
|
userGenomeRole: e.genomeRoleValue,
|
|
18077
18086
|
skillId: e.simulationSkill || "937e84ef-e95d-4327-9afe-e7be2bf420eb"
|
|
18078
|
-
}, I = await
|
|
18087
|
+
}, I = await Pt("/sqlTest", A);
|
|
18079
18088
|
if (i.value = (h = (l = I.data) == null ? void 0 : l.body) == null ? void 0 : h.ohr, ((g = (v = I == null ? void 0 : I.data) == null ? void 0 : v.body) == null ? void 0 : g.answer) === "") {
|
|
18080
18089
|
console.log("in the if case calling api again");
|
|
18081
|
-
const x = await
|
|
18090
|
+
const x = await Pt("/sqlTest", A);
|
|
18082
18091
|
e.addConversation((m = x.data) == null ? void 0 : m.body, "new");
|
|
18083
18092
|
} else
|
|
18084
18093
|
console.log("in the else case"), e.addConversation((f = I.data) == null ? void 0 : f.body, "new");
|
|
18085
18094
|
s.value = !1, e.dotLoader = !1, e.recognizedText = "";
|
|
18086
18095
|
} catch (y) {
|
|
18087
|
-
if (console.error(y), e.dotLoader = !1,
|
|
18096
|
+
if (console.error(y), e.dotLoader = !1, Re.isAxiosError(y)) {
|
|
18088
18097
|
const w = y;
|
|
18089
18098
|
console.log("Error found!"), console.error((S = w.response) == null ? void 0 : S.data), r.setError(((C = w.response) == null ? void 0 : C.data).message);
|
|
18090
18099
|
} else
|
|
18091
18100
|
r.setError("There was an issue, please contact helpmate");
|
|
18092
18101
|
}
|
|
18093
18102
|
};
|
|
18094
|
-
return (l, h) => (
|
|
18103
|
+
return (l, h) => (D(), k("div", $C, [
|
|
18095
18104
|
P("div", KC, [
|
|
18096
18105
|
P("div", JC, [
|
|
18097
18106
|
P("div", GC, [
|
|
18098
18107
|
P("div", QC, [
|
|
18099
18108
|
XC,
|
|
18100
|
-
|
|
18109
|
+
O(e).showVoiceComponent ? (D(), k("div", ZC, [
|
|
18101
18110
|
Se(WC)
|
|
18102
|
-
])) :
|
|
18111
|
+
])) : Ce("", !0)
|
|
18103
18112
|
]),
|
|
18104
18113
|
P("div", YC, [
|
|
18105
18114
|
P("div", ey, [
|
|
18106
18115
|
P("div", ty, [
|
|
18107
18116
|
ry,
|
|
18108
|
-
(
|
|
18117
|
+
(D(), k("svg", {
|
|
18109
18118
|
onClick: c,
|
|
18110
18119
|
class: "ml-2 h-7 w-7 rounded",
|
|
18111
18120
|
id: "newTopicDisable",
|
|
@@ -18119,14 +18128,14 @@ const _C = /* @__PURE__ */ Vt(IC, [["__scopeId", "data-v-cd418105"]]), TC = { cl
|
|
|
18119
18128
|
])
|
|
18120
18129
|
]),
|
|
18121
18130
|
P("div", oy, [
|
|
18122
|
-
s.value ? (
|
|
18123
|
-
(
|
|
18124
|
-
disabled:
|
|
18131
|
+
s.value ? (D(), k("div", dy, vy)) : (D(), k("div", ay, [
|
|
18132
|
+
(D(), k("svg", {
|
|
18133
|
+
disabled: O(e).beginFlag === !0,
|
|
18125
18134
|
class: jt([
|
|
18126
|
-
|
|
18135
|
+
O(e).beginFlag === !0 ? "cursor-not-allowed text-gray-600" : "cursor-pointer",
|
|
18127
18136
|
"bi bi-send ml-2 h-7 w-7 rotate-45 rounded"
|
|
18128
18137
|
]),
|
|
18129
|
-
onClick: h[0] || (h[0] = (v) =>
|
|
18138
|
+
onClick: h[0] || (h[0] = (v) => O(e).searchBoxdisable === !1 || O(e).beginFlag === !0 ? "" : d()),
|
|
18130
18139
|
xmlns: "http://www.w3.org/2000/svg",
|
|
18131
18140
|
fill: "currentColor",
|
|
18132
18141
|
viewBox: "0 0 16 16"
|
|
@@ -18137,20 +18146,20 @@ const _C = /* @__PURE__ */ Vt(IC, [["__scopeId", "data-v-cd418105"]]), TC = { cl
|
|
|
18137
18146
|
Zo(P("input", {
|
|
18138
18147
|
onKeyup: [
|
|
18139
18148
|
h[1] || (h[1] = lp((v) => s.value ? "" : d(), ["enter"])),
|
|
18140
|
-
h[3] || (h[3] = (v) => p(
|
|
18149
|
+
h[3] || (h[3] = (v) => p(O(e).question))
|
|
18141
18150
|
],
|
|
18142
|
-
disabled:
|
|
18151
|
+
disabled: O(e).beginFlag === !0 || O(e).searchBoxdisable === !1,
|
|
18143
18152
|
class: jt([
|
|
18144
|
-
|
|
18153
|
+
O(e).beginFlag === !0 || O(e).searchBoxdisable === !1 ? "cursor-not-allowed bg-[#e5e7eb]" : "bg-white",
|
|
18145
18154
|
"fontFormattingAMA rounded-lg flex w-full border border-genpact-darkest-teal py-[18px] px-[70px]"
|
|
18146
18155
|
]),
|
|
18147
|
-
"onUpdate:modelValue": h[2] || (h[2] = (v) =>
|
|
18156
|
+
"onUpdate:modelValue": h[2] || (h[2] = (v) => O(e).question = v),
|
|
18148
18157
|
placeholder: "Write your answer here...",
|
|
18149
18158
|
type: "text",
|
|
18150
18159
|
ref_key: "AiSearch",
|
|
18151
18160
|
ref: a
|
|
18152
18161
|
}, null, 42, gy), [
|
|
18153
|
-
[dp,
|
|
18162
|
+
[dp, O(e).question]
|
|
18154
18163
|
])
|
|
18155
18164
|
])
|
|
18156
18165
|
]),
|
|
@@ -18178,7 +18187,7 @@ const Cy = /* @__PURE__ */ Vt(Sy, [["__scopeId", "data-v-115227f8"]]), vo = (t)
|
|
|
18178
18187
|
], -1)), Ly = /* @__PURE__ */ we({
|
|
18179
18188
|
__name: "NewMDMQuestionSimulation",
|
|
18180
18189
|
setup(t) {
|
|
18181
|
-
const e = Oe(), r =
|
|
18190
|
+
const e = Oe(), r = Et(), i = T("");
|
|
18182
18191
|
e.question = "";
|
|
18183
18192
|
const u = T(null), a = T(!1), o = T(zt), n = T(!1);
|
|
18184
18193
|
i.value = e.recognizedText, Ht(() => {
|
|
@@ -18217,31 +18226,31 @@ const Cy = /* @__PURE__ */ Vt(Sy, [["__scopeId", "data-v-115227f8"]]), vo = (t)
|
|
|
18217
18226
|
};
|
|
18218
18227
|
console.log("Payload for MDM1==> ", g);
|
|
18219
18228
|
let m;
|
|
18220
|
-
|
|
18221
|
-
const f = await
|
|
18229
|
+
yt === "prod" ? m = "/ManualExportActivity" : m = "/fetchCompletionDemo";
|
|
18230
|
+
const f = await Pt(m, g);
|
|
18222
18231
|
e.addConversation((p = f.data) == null ? void 0 : p.body, "new"), n.value = !1, e.dotLoader = !1, e.recognizedText = "";
|
|
18223
18232
|
} catch (h) {
|
|
18224
|
-
if (console.error(h),
|
|
18233
|
+
if (console.error(h), Re.isAxiosError(h)) {
|
|
18225
18234
|
const v = h;
|
|
18226
18235
|
console.log("Error found!"), console.error((d = v.response) == null ? void 0 : d.data), r.setError(((l = v.response) == null ? void 0 : l.data).message);
|
|
18227
18236
|
} else
|
|
18228
18237
|
r.setError("There was an issue, please contact helpmate");
|
|
18229
18238
|
}
|
|
18230
18239
|
};
|
|
18231
|
-
return (p, d) => (
|
|
18240
|
+
return (p, d) => (D(), k("div", yy, [
|
|
18232
18241
|
P("div", Py, [
|
|
18233
18242
|
P("div", Ry, [
|
|
18234
18243
|
P("div", Ey, [
|
|
18235
18244
|
P("div", wy, [
|
|
18236
18245
|
P("div", Ay, [
|
|
18237
|
-
n.value ? (
|
|
18238
|
-
(
|
|
18239
|
-
disabled:
|
|
18246
|
+
n.value ? (D(), k("div", My, ky)) : (D(), k("div", Iy, [
|
|
18247
|
+
(D(), k("svg", {
|
|
18248
|
+
disabled: O(e).beginFlag === !0,
|
|
18240
18249
|
class: jt([
|
|
18241
|
-
|
|
18250
|
+
O(e).beginFlag === !0 ? "cursor-not-allowed text-gray-600" : "cursor-pointer",
|
|
18242
18251
|
"bi bi-send ml-2 h-7 w-7 rotate-45 rounded"
|
|
18243
18252
|
]),
|
|
18244
|
-
onClick: d[0] || (d[0] = (l) =>
|
|
18253
|
+
onClick: d[0] || (d[0] = (l) => O(e).searchBoxdisable === !1 || O(e).beginFlag === !0 ? "" : c()),
|
|
18245
18254
|
xmlns: "http://www.w3.org/2000/svg",
|
|
18246
18255
|
fill: "currentColor",
|
|
18247
18256
|
viewBox: "0 0 16 16"
|
|
@@ -18252,20 +18261,20 @@ const Cy = /* @__PURE__ */ Vt(Sy, [["__scopeId", "data-v-115227f8"]]), vo = (t)
|
|
|
18252
18261
|
Zo(P("input", {
|
|
18253
18262
|
onKeyup: [
|
|
18254
18263
|
d[1] || (d[1] = lp((l) => n.value ? "" : c(), ["enter"])),
|
|
18255
|
-
d[3] || (d[3] = (l) => s(
|
|
18264
|
+
d[3] || (d[3] = (l) => s(O(e).question))
|
|
18256
18265
|
],
|
|
18257
|
-
disabled:
|
|
18266
|
+
disabled: O(e).beginFlag === !0 || O(e).searchBoxdisable === !1,
|
|
18258
18267
|
class: jt([
|
|
18259
|
-
|
|
18268
|
+
O(e).beginFlag === !0 || O(e).searchBoxdisable === !1 ? "cursor-not-allowed bg-[#e5e7eb]" : "bg-white",
|
|
18260
18269
|
"fontFormattingAMA rounded-lg flex w-full border border-genpact-darkest-teal py-[18px] pl-[30px] pr-[70px]"
|
|
18261
18270
|
]),
|
|
18262
|
-
"onUpdate:modelValue": d[2] || (d[2] = (l) =>
|
|
18271
|
+
"onUpdate:modelValue": d[2] || (d[2] = (l) => O(e).question = l),
|
|
18263
18272
|
placeholder: "Write your answer here...",
|
|
18264
18273
|
type: "text",
|
|
18265
18274
|
ref_key: "AiSearch",
|
|
18266
18275
|
ref: u
|
|
18267
18276
|
}, null, 42, zy), [
|
|
18268
|
-
[dp,
|
|
18277
|
+
[dp, O(e).question]
|
|
18269
18278
|
])
|
|
18270
18279
|
])
|
|
18271
18280
|
]),
|
|
@@ -18302,7 +18311,7 @@ const xy = /* @__PURE__ */ Vt(Ly, [["__scopeId", "data-v-bbd70acc"]]), si = (t)
|
|
|
18302
18311
|
setup(t) {
|
|
18303
18312
|
const e = T(!0), r = T(!1), i = T(!1);
|
|
18304
18313
|
T(!1), T(!1);
|
|
18305
|
-
const u = Oe(), a =
|
|
18314
|
+
const u = Oe(), a = Et(), o = T(""), n = T(""), s = T(""), c = T(""), p = localStorage.getItem("accessToken");
|
|
18306
18315
|
let d = "";
|
|
18307
18316
|
const l = T("");
|
|
18308
18317
|
let h = "", v = "";
|
|
@@ -18320,7 +18329,7 @@ const xy = /* @__PURE__ */ Vt(Ly, [["__scopeId", "data-v-bbd70acc"]]), si = (t)
|
|
|
18320
18329
|
);
|
|
18321
18330
|
u.initialize(U.data), u.assessmentQuestion || (console.log("getAssessmentDetails========================>"), await y()), e.value = !1, u.simulationSkill === "937e84ef-e95d-4327-9afe-e7be2bf420eb" ? u.skillNameForSimulation = "GenpactNext" : u.simulationSkill === "32b7d6d8-f7a8-40a0-ab84-8784f25897ea" || u.simulationSkill === "32b7d6d8-f7a8-40a0-ab84-8784f25897ef" ? u.skillNameForSimulation = "Agentic AI" : u.simulationSkill === "7bd03cb6-79ad-4e2a-8a8b-88dc16147369" && (u.skillNameForSimulation = "Data");
|
|
18322
18331
|
} catch (U) {
|
|
18323
|
-
if (console.error(U), e.value = !1,
|
|
18332
|
+
if (console.error(U), e.value = !1, Re.isAxiosError(U) && ((A = U.response) != null && A.status) || U && typeof U == "object" && "response" in U && U.response && typeof U.response == "object" && "status" in U.response && U.response.status) {
|
|
18324
18333
|
const $ = U;
|
|
18325
18334
|
if (console.log("Error found!"), console.error((I = $.response) == null ? void 0 : I.data), ((E = $.response) == null ? void 0 : E.status) === 424 || ((x = $.response) == null ? void 0 : x.status) === 425 || ((H = $.response) == null ? void 0 : H.status) === 400) {
|
|
18326
18335
|
c.value = (G = $.response) == null ? void 0 : G.status, console.log("errorCode value", c.value), i.value = !0;
|
|
@@ -18331,19 +18340,19 @@ const xy = /* @__PURE__ */ Vt(Ly, [["__scopeId", "data-v-bbd70acc"]]), si = (t)
|
|
|
18331
18340
|
a.setError("There was an issue, please contact helpmate");
|
|
18332
18341
|
}
|
|
18333
18342
|
}, y = async () => {
|
|
18334
|
-
var A, I, E, x, H, G, le, U, $,
|
|
18343
|
+
var A, I, E, x, H, G, le, U, $, tt, oi, ai, ci, ui, pi, di, li, hi, vi, fi, gi, mi, Si, Ci, yi, Pi, Ri, Ei, wi, Ai, Ii, _i, Ti, bi, Mi, Gt, Qt, Oi, Di, ki, Ni, zi, ji, Li, xi, It, Y, Bi;
|
|
18335
18344
|
console.log("Start getAssessmentDetails");
|
|
18336
18345
|
try {
|
|
18337
|
-
|
|
18346
|
+
yt === "prod" ? d = `https://api-v2.genpact.com/api/my-profile/assessment-master/getAssessmentDetailsById?id=${u.simulationSkill}` : d = `https://api-v2-dev.genpact.com/api/my-profile/assessment-master/getAssessmentDetailsById?id=${u.simulationSkill}`;
|
|
18338
18347
|
const b = await Bn(d);
|
|
18339
|
-
o.value = (I = (A = b == null ? void 0 : b.data) == null ? void 0 : A.data) == null ? void 0 : I.name, u.skillNameForSimulation = o.value, u.generatedPrompt = (x = (E = b == null ? void 0 : b.data) == null ? void 0 : E.data) == null ? void 0 : x.generatedPrompt, u.assessmentType = (G = (H = b == null ? void 0 : b.data) == null ? void 0 : H.data) == null ? void 0 : G.assessmentType, s.value = (U = (le = b == null ? void 0 : b.data) == null ? void 0 : le.data) == null ? void 0 : U.status, g.value = (
|
|
18348
|
+
o.value = (I = (A = b == null ? void 0 : b.data) == null ? void 0 : A.data) == null ? void 0 : I.name, u.skillNameForSimulation = o.value, u.generatedPrompt = (x = (E = b == null ? void 0 : b.data) == null ? void 0 : E.data) == null ? void 0 : x.generatedPrompt, u.assessmentType = (G = (H = b == null ? void 0 : b.data) == null ? void 0 : H.data) == null ? void 0 : G.assessmentType, s.value = (U = (le = b == null ? void 0 : b.data) == null ? void 0 : le.data) == null ? void 0 : U.status, g.value = (tt = ($ = b == null ? void 0 : b.data) == null ? void 0 : $.data) == null ? void 0 : tt.editMode, u.empSeniority = (ai = (oi = b == null ? void 0 : b.data) == null ? void 0 : oi.data) == null ? void 0 : ai.empSeniority, u.functionRole = (ui = (ci = b == null ? void 0 : b.data) == null ? void 0 : ci.data) == null ? void 0 : ui.functionRole, h = (di = (pi = b == null ? void 0 : b.data) == null ? void 0 : pi.data) == null ? void 0 : di.accessBand, v = (hi = (li = b == null ? void 0 : b.data) == null ? void 0 : li.data) == null ? void 0 : hi.accessOhrList;
|
|
18340
18349
|
const Be = (fi = (vi = b == null ? void 0 : b.data) == null ? void 0 : vi.data) == null ? void 0 : fi.createdBy;
|
|
18341
18350
|
if (u.mcqType = (mi = (gi = b == null ? void 0 : b.data) == null ? void 0 : gi.data) == null ? void 0 : mi.mcqTypes, u.storeFinalFeedback = (Ci = (Si = b == null ? void 0 : b.data) == null ? void 0 : Si.data) == null ? void 0 : Ci.storeFinalFeedback, u.storeFinalScore = (Pi = (yi = b == null ? void 0 : b.data) == null ? void 0 : yi.data) == null ? void 0 : Pi.storeFinalScore, s.value === "inactive")
|
|
18342
18351
|
r.value = !0;
|
|
18343
18352
|
else if (s.value === "draft")
|
|
18344
18353
|
n.value !== ba.DRAFT && (r.value = !0);
|
|
18345
18354
|
else if (s.value === "active" && g.value && n.value === ba.DRAFT) {
|
|
18346
|
-
|
|
18355
|
+
yt === "prod" ? d = `https://api-v2.genpact.com/api/my-profile/assessment-master/getDraftAssessmentDetailsById?assessmentId=${u.simulationSkill}` : d = `https://api-v2-dev.genpact.com/api/my-profile/assessment-master/getDraftAssessmentDetailsById?assessmentId=${u.simulationSkill}`;
|
|
18347
18356
|
const V = await Bn(d);
|
|
18348
18357
|
u.generatedPrompt = (Ei = (Ri = V == null ? void 0 : V.data) == null ? void 0 : Ri.data) == null ? void 0 : Ei.generatedPrompt, u.assessmentType = (Ai = (wi = V == null ? void 0 : V.data) == null ? void 0 : wi.data) == null ? void 0 : Ai.assessmentType, u.empSeniority = (_i = (Ii = V == null ? void 0 : V.data) == null ? void 0 : Ii.data) == null ? void 0 : _i.empSeniority, u.functionRole = (bi = (Ti = V == null ? void 0 : V.data) == null ? void 0 : Ti.data) == null ? void 0 : bi.functionRole, u.storeFinalFeedback = (Gt = (Mi = V == null ? void 0 : V.data) == null ? void 0 : Mi.data) == null ? void 0 : Gt.storeFinalFeedback, u.storeFinalScore = (Oi = (Qt = V == null ? void 0 : V.data) == null ? void 0 : Qt.data) == null ? void 0 : Oi.storeFinalScore, h = (ki = (Di = V == null ? void 0 : V.data) == null ? void 0 : Di.data) == null ? void 0 : ki.accessBand, v = (zi = (Ni = V == null ? void 0 : V.data) == null ? void 0 : Ni.data) == null ? void 0 : zi.accessOhrList;
|
|
18349
18358
|
}
|
|
@@ -18374,9 +18383,9 @@ const xy = /* @__PURE__ */ Vt(Ly, [["__scopeId", "data-v-bbd70acc"]]), si = (t)
|
|
|
18374
18383
|
}
|
|
18375
18384
|
console.log("end getAssessmentDetails");
|
|
18376
18385
|
} catch (b) {
|
|
18377
|
-
if (console.error(b), e.value = !1,
|
|
18386
|
+
if (console.error(b), e.value = !1, Re.isAxiosError(b) && ((Li = b.response) != null && Li.status) || b && typeof b == "object" && "response" in b && b.response && typeof b.response == "object" && "status" in b.response && b.response.status) {
|
|
18378
18387
|
const Be = b;
|
|
18379
|
-
if (console.log("Error found!"), console.error((xi = Be.response) == null ? void 0 : xi.data), ((
|
|
18388
|
+
if (console.log("Error found!"), console.error((xi = Be.response) == null ? void 0 : xi.data), ((It = Be.response) == null ? void 0 : It.status) === 424 || ((Y = Be.response) == null ? void 0 : Y.status) === 425) {
|
|
18380
18389
|
i.value = !0;
|
|
18381
18390
|
return;
|
|
18382
18391
|
}
|
|
@@ -18387,8 +18396,8 @@ const xy = /* @__PURE__ */ Vt(Ly, [["__scopeId", "data-v-bbd70acc"]]), si = (t)
|
|
|
18387
18396
|
}, w = async () => {
|
|
18388
18397
|
var A, I, E, x, H, G, le, U;
|
|
18389
18398
|
try {
|
|
18390
|
-
|
|
18391
|
-
const $ = await
|
|
18399
|
+
yt === "prod" ? l.value = "https://api-v2.genpact.com/api/delphi/v2/myprofile" : l.value = "https://api-v2-dev.genpact.com/api/delphi/v2/myprofile";
|
|
18400
|
+
const $ = await Re({
|
|
18392
18401
|
url: l.value,
|
|
18393
18402
|
headers: {
|
|
18394
18403
|
authorization: `Bearer ${p}`,
|
|
@@ -18398,25 +18407,25 @@ const xy = /* @__PURE__ */ Vt(Ly, [["__scopeId", "data-v-bbd70acc"]]), si = (t)
|
|
|
18398
18407
|
});
|
|
18399
18408
|
u.genomeFunctionValue = (E = (I = (A = $ == null ? void 0 : $.data) == null ? void 0 : A.role_function) == null ? void 0 : I.function) == null ? void 0 : E.name, u.genomeRoleValue = (G = (H = (x = $ == null ? void 0 : $.data) == null ? void 0 : x.role_function) == null ? void 0 : H.role) == null ? void 0 : G.name;
|
|
18400
18409
|
} catch ($) {
|
|
18401
|
-
if (console.error($), e.value = !1,
|
|
18402
|
-
const
|
|
18403
|
-
console.log("Error found!"), console.error((le =
|
|
18410
|
+
if (console.error($), e.value = !1, Re.isAxiosError($)) {
|
|
18411
|
+
const tt = $;
|
|
18412
|
+
console.log("Error found!"), console.error((le = tt.response) == null ? void 0 : le.data), a.setError(((U = tt.response) == null ? void 0 : U.data).message);
|
|
18404
18413
|
} else
|
|
18405
18414
|
a.setError("There was an issue, please contact helpmate");
|
|
18406
18415
|
}
|
|
18407
18416
|
};
|
|
18408
|
-
return (A, I) => (
|
|
18417
|
+
return (A, I) => (D(), k("section", null, [
|
|
18409
18418
|
Se(up, { "is-visible": i.value }, {
|
|
18410
18419
|
default: pp(() => [
|
|
18411
18420
|
P("div", By, [
|
|
18412
|
-
c.value === 424 || c.value === "425" ? (
|
|
18413
|
-
c.value === 400 ? (
|
|
18421
|
+
c.value === 424 || c.value === "425" ? (D(), k("div", qy, Hy)) : Ce("", !0),
|
|
18422
|
+
c.value === 400 ? (D(), k("div", Vy, Ky)) : Ce("", !0)
|
|
18414
18423
|
])
|
|
18415
18424
|
]),
|
|
18416
18425
|
_: 1
|
|
18417
18426
|
}, 8, ["is-visible"]),
|
|
18418
18427
|
P("div", null, [
|
|
18419
|
-
e.value ? (
|
|
18428
|
+
e.value ? (D(), Al(yl, { key: 0 })) : (D(), k("main", Jy, [
|
|
18420
18429
|
P("div", Gy, [
|
|
18421
18430
|
Se(zl)
|
|
18422
18431
|
]),
|
|
@@ -18426,12 +18435,12 @@ const xy = /* @__PURE__ */ Vt(Ly, [["__scopeId", "data-v-bbd70acc"]]), si = (t)
|
|
|
18426
18435
|
P("div", Yy, [
|
|
18427
18436
|
Se(_C),
|
|
18428
18437
|
P("div", {
|
|
18429
|
-
class: jt(
|
|
18438
|
+
class: jt(O(u).showVoiceComponent === !0 ? "mt-[160px]" : "mt-[120px]")
|
|
18430
18439
|
}, null, 2)
|
|
18431
18440
|
]),
|
|
18432
|
-
|
|
18441
|
+
O(u).assessmentQuestion ? (D(), k("div", eP, [
|
|
18433
18442
|
Se(Cy)
|
|
18434
|
-
])) : (
|
|
18443
|
+
])) : (D(), k("div", tP, [
|
|
18435
18444
|
Se(xy)
|
|
18436
18445
|
]))
|
|
18437
18446
|
])
|