@genpact/genome.mfe.mgtwin-app 1.1.268-alpha → 1.1.269-alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{ArrowLeftCircleIcon-7da4942f.mjs → ArrowLeftCircleIcon-f6041bde.mjs} +1 -1
- package/dist/{AssessmentAMA-9ebd7c2f.mjs → AssessmentAMA-9b2b0033.mjs} +4 -4
- package/dist/{AssessmentSimulation-379b1bc6.mjs → AssessmentSimulation-6466fc51.mjs} +226 -226
- package/dist/{EditSkillAdminData-4d5a95a7.mjs → EditSkillAdminData-a9ccdeba.mjs} +3 -3
- package/dist/{EditSkillData-cedb6423.mjs → EditSkillData-a2915cd0.mjs} +3 -3
- package/dist/{HomeView-b83045cc.mjs → HomeView-8adcee4f.mjs} +5 -5
- package/dist/{Loading-faa9873d.mjs → Loading-5ea7160e.mjs} +1 -1
- package/dist/{LoadingSimulation-3c6cd16a.mjs → LoadingSimulation-27c136fb.mjs} +1 -1
- package/dist/{Modal-834a7fc9.mjs → Modal-cdd2991e.mjs} +1 -1
- package/dist/{PromptTesting-55a3de8a.mjs → PromptTesting-1a40feb5.mjs} +2 -2
- package/dist/{SkillAutomation-eecfa0eb.mjs → SkillAutomation-dc35824a.mjs} +3 -3
- package/dist/{SkillData-15e06c61.mjs → SkillData-0e630c17.mjs} +3 -3
- package/dist/{UnderMaintenance-b8331982.mjs → UnderMaintenance-bd135520.mjs} +1 -1
- package/dist/{_commonjsHelpers-41258e4b.mjs → _commonjsHelpers-e1f9766e.mjs} +1 -1
- package/dist/{api-78f19c9a.mjs → api-0bcaa8e3.mjs} +1 -1
- package/dist/{main-aead3d79.mjs → main-fb55944a.mjs} +1257 -1257
- package/dist/mgtwin.es.min.js +1 -1
- package/dist/mgtwin.umd.min.js +8 -8
- package/package.json +1 -1
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import { _ as Cl, a as ip, L as yl } from "./LoadingSimulation-
|
|
2
|
-
import { _ as Pl, a as sp, Q as Ot, M as Aa } from "./Modal-
|
|
3
|
-
import { d as Pe, r as M, H as Dt, o as N, c as j, a as R, k as St, u as be, C as Bt, q as Ce, y as Lo, h as ge, s as op, t as Ve, b as
|
|
4
|
-
import { g as xn, p as gt } from "./api-
|
|
5
|
-
import { c as B, a as Il, r as _l } from "./_commonjsHelpers-
|
|
1
|
+
import { _ as Cl, a as ip, L as yl } from "./LoadingSimulation-27c136fb.mjs";
|
|
2
|
+
import { _ as Pl, a as sp, Q as Ot, M as Aa } from "./Modal-cdd2991e.mjs";
|
|
3
|
+
import { d as Pe, r as M, H as Dt, o as N, c as j, a as R, k as St, u as be, C as Bt, q as Ce, y as Lo, h as ge, s as op, t as Ve, b as k, f as _e, i as zn, I as Vi, F as xo, e as qo, w as Jo, J as Rl, p as Wn, g as Vn, G as Gi, z as El, K as wl, n as kt, m as ap, L as cp, l as Al } from "./main-fb55944a.mjs";
|
|
4
|
+
import { g as xn, p as gt } from "./api-0bcaa8e3.mjs";
|
|
5
|
+
import { c as B, a as Il, r as _l } from "./_commonjsHelpers-e1f9766e.mjs";
|
|
6
6
|
import { _ as Ft } from "./_plugin-vue_export-helper-dad06003.mjs";
|
|
7
7
|
import "./loadernew-ac153a51.mjs";
|
|
8
8
|
const Tl = { class: "flex h-14 w-full flex-1 items-center justify-between bg-[#021c42] md:pr-6 lg:pr-6" }, bl = ["href"], Ml = /* @__PURE__ */ R("img", {
|
|
@@ -2947,7 +2947,7 @@ var te = {}, Qe = {}, Yo = {};
|
|
|
2947
2947
|
Object.defineProperty(Qe, "__esModule", { value: !0 });
|
|
2948
2948
|
Qe.AudioOutputFormatImpl = void 0;
|
|
2949
2949
|
const q = Yo, x = Ge;
|
|
2950
|
-
class
|
|
2950
|
+
class D extends x.AudioStreamFormatImpl {
|
|
2951
2951
|
/**
|
|
2952
2952
|
* Creates an instance with the given values.
|
|
2953
2953
|
* @constructor
|
|
@@ -2965,97 +2965,97 @@ class k extends x.AudioStreamFormatImpl {
|
|
|
2965
2965
|
super(i, o, r, e), this.formatTag = e, this.avgBytesPerSec = c, this.blockAlign = a, this.priAudioFormatString = n, this.priRequestAudioFormatString = s, this.priHasHeader = u;
|
|
2966
2966
|
}
|
|
2967
2967
|
static fromSpeechSynthesisOutputFormat(e) {
|
|
2968
|
-
return e === void 0 ?
|
|
2968
|
+
return e === void 0 ? D.getDefaultOutputFormat() : D.fromSpeechSynthesisOutputFormatString(D.SpeechSynthesisOutputFormatToString[e]);
|
|
2969
2969
|
}
|
|
2970
2970
|
static fromSpeechSynthesisOutputFormatString(e) {
|
|
2971
2971
|
switch (e) {
|
|
2972
2972
|
case "raw-8khz-8bit-mono-mulaw":
|
|
2973
|
-
return new
|
|
2973
|
+
return new D(x.AudioFormatTag.MuLaw, 1, 8e3, 8e3, 1, 8, e, e, !1);
|
|
2974
2974
|
case "riff-16khz-16kbps-mono-siren":
|
|
2975
|
-
return new
|
|
2975
|
+
return new D(x.AudioFormatTag.Siren, 1, 16e3, 2e3, 40, 0, e, "audio-16khz-16kbps-mono-siren", !0);
|
|
2976
2976
|
case "audio-16khz-16kbps-mono-siren":
|
|
2977
|
-
return new
|
|
2977
|
+
return new D(x.AudioFormatTag.Siren, 1, 16e3, 2e3, 40, 0, e, e, !1);
|
|
2978
2978
|
case "audio-16khz-32kbitrate-mono-mp3":
|
|
2979
|
-
return new
|
|
2979
|
+
return new D(x.AudioFormatTag.MP3, 1, 16e3, 32 << 7, 2, 16, e, e, !1);
|
|
2980
2980
|
case "audio-16khz-128kbitrate-mono-mp3":
|
|
2981
|
-
return new
|
|
2981
|
+
return new D(x.AudioFormatTag.MP3, 1, 16e3, 128 << 7, 2, 16, e, e, !1);
|
|
2982
2982
|
case "audio-16khz-64kbitrate-mono-mp3":
|
|
2983
|
-
return new
|
|
2983
|
+
return new D(x.AudioFormatTag.MP3, 1, 16e3, 64 << 7, 2, 16, e, e, !1);
|
|
2984
2984
|
case "audio-24khz-48kbitrate-mono-mp3":
|
|
2985
|
-
return new
|
|
2985
|
+
return new D(x.AudioFormatTag.MP3, 1, 24e3, 48 << 7, 2, 16, e, e, !1);
|
|
2986
2986
|
case "audio-24khz-96kbitrate-mono-mp3":
|
|
2987
|
-
return new
|
|
2987
|
+
return new D(x.AudioFormatTag.MP3, 1, 24e3, 96 << 7, 2, 16, e, e, !1);
|
|
2988
2988
|
case "audio-24khz-160kbitrate-mono-mp3":
|
|
2989
|
-
return new
|
|
2989
|
+
return new D(x.AudioFormatTag.MP3, 1, 24e3, 160 << 7, 2, 16, e, e, !1);
|
|
2990
2990
|
case "raw-16khz-16bit-mono-truesilk":
|
|
2991
|
-
return new
|
|
2991
|
+
return new D(x.AudioFormatTag.SILKSkype, 1, 16e3, 32e3, 2, 16, e, e, !1);
|
|
2992
2992
|
case "riff-8khz-16bit-mono-pcm":
|
|
2993
|
-
return new
|
|
2993
|
+
return new D(x.AudioFormatTag.PCM, 1, 8e3, 16e3, 2, 16, e, "raw-8khz-16bit-mono-pcm", !0);
|
|
2994
2994
|
case "riff-24khz-16bit-mono-pcm":
|
|
2995
|
-
return new
|
|
2995
|
+
return new D(x.AudioFormatTag.PCM, 1, 24e3, 48e3, 2, 16, e, "raw-24khz-16bit-mono-pcm", !0);
|
|
2996
2996
|
case "riff-8khz-8bit-mono-mulaw":
|
|
2997
|
-
return new
|
|
2997
|
+
return new D(x.AudioFormatTag.MuLaw, 1, 8e3, 8e3, 1, 8, e, "raw-8khz-8bit-mono-mulaw", !0);
|
|
2998
2998
|
case "raw-16khz-16bit-mono-pcm":
|
|
2999
|
-
return new
|
|
2999
|
+
return new D(x.AudioFormatTag.PCM, 1, 16e3, 32e3, 2, 16, e, "raw-16khz-16bit-mono-pcm", !1);
|
|
3000
3000
|
case "raw-24khz-16bit-mono-pcm":
|
|
3001
|
-
return new
|
|
3001
|
+
return new D(x.AudioFormatTag.PCM, 1, 24e3, 48e3, 2, 16, e, "raw-24khz-16bit-mono-pcm", !1);
|
|
3002
3002
|
case "raw-8khz-16bit-mono-pcm":
|
|
3003
|
-
return new
|
|
3003
|
+
return new D(x.AudioFormatTag.PCM, 1, 8e3, 16e3, 2, 16, e, "raw-8khz-16bit-mono-pcm", !1);
|
|
3004
3004
|
case "ogg-16khz-16bit-mono-opus":
|
|
3005
|
-
return new
|
|
3005
|
+
return new D(x.AudioFormatTag.OGG_OPUS, 1, 16e3, 8192, 2, 16, e, e, !1);
|
|
3006
3006
|
case "ogg-24khz-16bit-mono-opus":
|
|
3007
|
-
return new
|
|
3007
|
+
return new D(x.AudioFormatTag.OGG_OPUS, 1, 24e3, 8192, 2, 16, e, e, !1);
|
|
3008
3008
|
case "raw-48khz-16bit-mono-pcm":
|
|
3009
|
-
return new
|
|
3009
|
+
return new D(x.AudioFormatTag.PCM, 1, 48e3, 96e3, 2, 16, e, "raw-48khz-16bit-mono-pcm", !1);
|
|
3010
3010
|
case "riff-48khz-16bit-mono-pcm":
|
|
3011
|
-
return new
|
|
3011
|
+
return new D(x.AudioFormatTag.PCM, 1, 48e3, 96e3, 2, 16, e, "raw-48khz-16bit-mono-pcm", !0);
|
|
3012
3012
|
case "audio-48khz-96kbitrate-mono-mp3":
|
|
3013
|
-
return new
|
|
3013
|
+
return new D(x.AudioFormatTag.MP3, 1, 48e3, 96 << 7, 2, 16, e, e, !1);
|
|
3014
3014
|
case "audio-48khz-192kbitrate-mono-mp3":
|
|
3015
|
-
return new
|
|
3015
|
+
return new D(x.AudioFormatTag.MP3, 1, 48e3, 192 << 7, 2, 16, e, e, !1);
|
|
3016
3016
|
case "ogg-48khz-16bit-mono-opus":
|
|
3017
|
-
return new
|
|
3017
|
+
return new D(x.AudioFormatTag.OGG_OPUS, 1, 48e3, 12e3, 2, 16, e, e, !1);
|
|
3018
3018
|
case "webm-16khz-16bit-mono-opus":
|
|
3019
|
-
return new
|
|
3019
|
+
return new D(x.AudioFormatTag.WEBM_OPUS, 1, 16e3, 4e3, 2, 16, e, e, !1);
|
|
3020
3020
|
case "webm-24khz-16bit-mono-opus":
|
|
3021
|
-
return new
|
|
3021
|
+
return new D(x.AudioFormatTag.WEBM_OPUS, 1, 24e3, 6e3, 2, 16, e, e, !1);
|
|
3022
3022
|
case "webm-24khz-16bit-24kbps-mono-opus":
|
|
3023
|
-
return new
|
|
3023
|
+
return new D(x.AudioFormatTag.WEBM_OPUS, 1, 24e3, 3e3, 2, 16, e, e, !1);
|
|
3024
3024
|
case "audio-16khz-16bit-32kbps-mono-opus":
|
|
3025
|
-
return new
|
|
3025
|
+
return new D(x.AudioFormatTag.OPUS, 1, 16e3, 4e3, 2, 16, e, e, !1);
|
|
3026
3026
|
case "audio-24khz-16bit-48kbps-mono-opus":
|
|
3027
|
-
return new
|
|
3027
|
+
return new D(x.AudioFormatTag.OPUS, 1, 24e3, 6e3, 2, 16, e, e, !1);
|
|
3028
3028
|
case "audio-24khz-16bit-24kbps-mono-opus":
|
|
3029
|
-
return new
|
|
3029
|
+
return new D(x.AudioFormatTag.OPUS, 1, 24e3, 3e3, 2, 16, e, e, !1);
|
|
3030
3030
|
case "audio-24khz-16bit-mono-flac":
|
|
3031
|
-
return new
|
|
3031
|
+
return new D(x.AudioFormatTag.FLAC, 1, 24e3, 24e3, 2, 16, e, e, !1);
|
|
3032
3032
|
case "audio-48khz-16bit-mono-flac":
|
|
3033
|
-
return new
|
|
3033
|
+
return new D(x.AudioFormatTag.FLAC, 1, 48e3, 3e4, 2, 16, e, e, !1);
|
|
3034
3034
|
case "raw-24khz-16bit-mono-truesilk":
|
|
3035
|
-
return new
|
|
3035
|
+
return new D(x.AudioFormatTag.SILKSkype, 1, 24e3, 48e3, 2, 16, e, e, !1);
|
|
3036
3036
|
case "raw-8khz-8bit-mono-alaw":
|
|
3037
|
-
return new
|
|
3037
|
+
return new D(x.AudioFormatTag.ALaw, 1, 8e3, 8e3, 1, 8, e, e, !1);
|
|
3038
3038
|
case "riff-8khz-8bit-mono-alaw":
|
|
3039
|
-
return new
|
|
3039
|
+
return new D(x.AudioFormatTag.ALaw, 1, 8e3, 8e3, 1, 8, e, "raw-8khz-8bit-mono-alaw", !0);
|
|
3040
3040
|
case "raw-22050hz-16bit-mono-pcm":
|
|
3041
|
-
return new
|
|
3041
|
+
return new D(x.AudioFormatTag.PCM, 1, 22050, 44100, 2, 16, e, e, !1);
|
|
3042
3042
|
case "riff-22050hz-16bit-mono-pcm":
|
|
3043
|
-
return new
|
|
3043
|
+
return new D(x.AudioFormatTag.PCM, 1, 22050, 44100, 2, 16, e, "raw-22050hz-16bit-mono-pcm", !0);
|
|
3044
3044
|
case "raw-44100hz-16bit-mono-pcm":
|
|
3045
|
-
return new
|
|
3045
|
+
return new D(x.AudioFormatTag.PCM, 1, 44100, 88200, 2, 16, e, e, !1);
|
|
3046
3046
|
case "riff-44100hz-16bit-mono-pcm":
|
|
3047
|
-
return new
|
|
3047
|
+
return new D(x.AudioFormatTag.PCM, 1, 44100, 88200, 2, 16, e, "raw-44100hz-16bit-mono-pcm", !0);
|
|
3048
3048
|
case "amr-wb-16000h":
|
|
3049
|
-
return new
|
|
3049
|
+
return new D(x.AudioFormatTag.AMR_WB, 1, 16e3, 3052, 2, 16, e, e, !1);
|
|
3050
3050
|
case "g722-16khz-64kbps":
|
|
3051
|
-
return new
|
|
3051
|
+
return new D(x.AudioFormatTag.G722, 1, 16e3, 8e3, 2, 16, e, e, !1);
|
|
3052
3052
|
case "riff-16khz-16bit-mono-pcm":
|
|
3053
3053
|
default:
|
|
3054
|
-
return new
|
|
3054
|
+
return new D(x.AudioFormatTag.PCM, 1, 16e3, 32e3, 2, 16, "riff-16khz-16bit-mono-pcm", "raw-16khz-16bit-mono-pcm", !0);
|
|
3055
3055
|
}
|
|
3056
3056
|
}
|
|
3057
3057
|
static getDefaultOutputFormat() {
|
|
3058
|
-
return
|
|
3058
|
+
return D.fromSpeechSynthesisOutputFormatString(typeof window < "u" ? "audio-24khz-48kbitrate-mono-mp3" : "riff-16khz-16bit-mono-pcm");
|
|
3059
3059
|
}
|
|
3060
3060
|
/**
|
|
3061
3061
|
* Specifies if this audio output format has a header
|
|
@@ -3111,8 +3111,8 @@ class k extends x.AudioStreamFormatImpl {
|
|
|
3111
3111
|
return r.set(new Uint8Array(this.header), 0), r.set(new Uint8Array(e), this.header.byteLength), r.buffer;
|
|
3112
3112
|
}
|
|
3113
3113
|
}
|
|
3114
|
-
Qe.AudioOutputFormatImpl =
|
|
3115
|
-
|
|
3114
|
+
Qe.AudioOutputFormatImpl = D;
|
|
3115
|
+
D.SpeechSynthesisOutputFormatToString = {
|
|
3116
3116
|
[q.SpeechSynthesisOutputFormat.Raw8Khz8BitMonoMULaw]: "raw-8khz-8bit-mono-mulaw",
|
|
3117
3117
|
[q.SpeechSynthesisOutputFormat.Riff16Khz16KbpsMonoSiren]: "riff-16khz-16kbps-mono-siren",
|
|
3118
3118
|
[q.SpeechSynthesisOutputFormat.Audio16Khz16KbpsMonoSiren]: "audio-16khz-16kbps-mono-siren",
|
|
@@ -6514,12 +6514,12 @@ function Kf() {
|
|
|
6514
6514
|
p.token != null && p.token !== "" && (y[p.headerName] = p.token);
|
|
6515
6515
|
let P = u.parameters.getProperty(r.PropertyId.SpeechServiceConnection_Endpoint, "");
|
|
6516
6516
|
if (!P) {
|
|
6517
|
-
const I = i.ConnectionFactoryBase.getHostSuffix(v),
|
|
6518
|
-
P = `${
|
|
6517
|
+
const I = i.ConnectionFactoryBase.getHostSuffix(v), w = u.parameters.getProperty(r.PropertyId.SpeechServiceConnection_Host, `wss://${v}.${n.BaseUrl}${I}`);
|
|
6518
|
+
P = `${w.endsWith("/") ? w : w + "/"}${S}${n.ApiKey}/${C}`;
|
|
6519
6519
|
}
|
|
6520
6520
|
this.setCommonUrlParams(u, f, P);
|
|
6521
|
-
const
|
|
6522
|
-
return Promise.resolve(new t.WebsocketConnection(P, f, y, new c.WebsocketMessageFormatter(), t.ProxyInfo.fromRecognizerConfig(u),
|
|
6521
|
+
const A = u.parameters.getProperty("SPEECH-EnableWebsocketCompression", "false") === "true";
|
|
6522
|
+
return Promise.resolve(new t.WebsocketConnection(P, f, y, new c.WebsocketMessageFormatter(), t.ProxyInfo.fromRecognizerConfig(u), A, d));
|
|
6523
6523
|
}
|
|
6524
6524
|
}
|
|
6525
6525
|
return Tr.DialogConnectionFactory = n, n.ApiKey = "api", n.BaseUrl = "convai.speech", Tr;
|
|
@@ -8393,10 +8393,10 @@ function Sg() {
|
|
|
8393
8393
|
if (h) {
|
|
8394
8394
|
f = p.parameters.getProperty(i.PropertyId.SpeechServiceConnection_Endpoint), f || (f = "wss://" + p.parameters.getProperty(i.PropertyId.SpeechServiceConnection_Host, "transcribe.{region}.cts.speech{hostSuffix}") + "{path}"), f = e.StringUtils.formatString(f, m);
|
|
8395
8395
|
const P = new URL(f);
|
|
8396
|
-
P.searchParams.forEach((I,
|
|
8397
|
-
S[
|
|
8398
|
-
}), new n.TranscriberConnectionFactory().setQueryParams(S, p, f), S[a.QueryParameterNames.CtsMeetingId] = m[a.QueryParameterNames.CtsMeetingId], S[a.QueryParameterNames.CtsDeviceId] = m[a.QueryParameterNames.CtsDeviceId], v.isHost || (S[a.QueryParameterNames.CtsIsParticipant] = ""), a.QueryParameterNames.Format in S || (S[a.QueryParameterNames.Format] = "simple"), P.searchParams.forEach((I,
|
|
8399
|
-
P.searchParams.set(
|
|
8396
|
+
P.searchParams.forEach((I, w) => {
|
|
8397
|
+
S[w] = I;
|
|
8398
|
+
}), new n.TranscriberConnectionFactory().setQueryParams(S, p, f), S[a.QueryParameterNames.CtsMeetingId] = m[a.QueryParameterNames.CtsMeetingId], S[a.QueryParameterNames.CtsDeviceId] = m[a.QueryParameterNames.CtsDeviceId], v.isHost || (S[a.QueryParameterNames.CtsIsParticipant] = ""), a.QueryParameterNames.Format in S || (S[a.QueryParameterNames.Format] = "simple"), P.searchParams.forEach((I, w) => {
|
|
8399
|
+
P.searchParams.set(w, S[w]), delete S[w];
|
|
8400
8400
|
}), f = P.toString();
|
|
8401
8401
|
} else {
|
|
8402
8402
|
const P = new n.TranslationConnectionFactory();
|
|
@@ -8518,8 +8518,8 @@ function Cg() {
|
|
|
8518
8518
|
for (const y of S) {
|
|
8519
8519
|
const P = this.privProperties.getProperty(y);
|
|
8520
8520
|
if (P) {
|
|
8521
|
-
const
|
|
8522
|
-
this.privSpeechTranslationConfig.setProperty(
|
|
8521
|
+
const A = typeof y == "string" ? y : a.PropertyId[y];
|
|
8522
|
+
this.privSpeechTranslationConfig.setProperty(A, P);
|
|
8523
8523
|
}
|
|
8524
8524
|
}
|
|
8525
8525
|
const C = JSON.parse(this.privProperties.getProperty(e.ServicePropertiesPropertyName, "{}"));
|
|
@@ -11425,19 +11425,19 @@ function E() {
|
|
|
11425
11425
|
Object.defineProperty(t, "TranslationRecognitionResult", { enumerable: !0, get: function() {
|
|
11426
11426
|
return P.TranslationRecognitionResult;
|
|
11427
11427
|
} });
|
|
11428
|
-
var
|
|
11428
|
+
var A = _s;
|
|
11429
11429
|
Object.defineProperty(t, "TranslationSynthesisResult", { enumerable: !0, get: function() {
|
|
11430
|
-
return
|
|
11430
|
+
return A.TranslationSynthesisResult;
|
|
11431
11431
|
} });
|
|
11432
11432
|
var I = Vp;
|
|
11433
11433
|
Object.defineProperty(t, "ResultReason", { enumerable: !0, get: function() {
|
|
11434
11434
|
return I.ResultReason;
|
|
11435
11435
|
} });
|
|
11436
|
-
var
|
|
11436
|
+
var w = Of();
|
|
11437
11437
|
Object.defineProperty(t, "SpeechConfig", { enumerable: !0, get: function() {
|
|
11438
|
-
return
|
|
11438
|
+
return w.SpeechConfig;
|
|
11439
11439
|
} }), Object.defineProperty(t, "SpeechConfigImpl", { enumerable: !0, get: function() {
|
|
11440
|
-
return
|
|
11440
|
+
return w.SpeechConfigImpl;
|
|
11441
11441
|
} });
|
|
11442
11442
|
var O = Df();
|
|
11443
11443
|
Object.defineProperty(t, "SpeechTranslationConfig", { enumerable: !0, get: function() {
|
|
@@ -12330,8 +12330,8 @@ function tm() {
|
|
|
12330
12330
|
throw new e.ArgumentNullError("recognizerConfig");
|
|
12331
12331
|
this.privEnableSpeakerId = y.isSpeakerDiarizationEnabled, this.privMustReportEndOfStream = !1, this.privAuthentication = f, this.privConnectionFactory = S, this.privAudioSource = C, this.privRecognizerConfig = y, this.privIsDisposed = !1, this.privRecognizer = P, this.privRequestSession = new i.RequestSession(this.privAudioSource.id()), this.privConnectionEvents = new e.EventSource(), this.privServiceEvents = new e.EventSource(), this.privDynamicGrammar = new i.DynamicGrammarBuilder(), this.privSpeechContext = new i.SpeechContext(this.privDynamicGrammar), this.privAgentConfig = new i.AgentConfig(), this.privRecognizerConfig.parameters.getProperty(r.PropertyId.WebWorkerLoadType, "on").toLowerCase() === "on" && typeof Blob < "u" && typeof Worker < "u" ? this.privSetTimeout = e.Timeout.setTimeout : (typeof window < "u" && (this.privSetTimeout = window.setTimeout.bind(window)), typeof globalThis < "u" && (this.privSetTimeout = globalThis.setTimeout.bind(globalThis))), this.connectionEvents.attach((I) => {
|
|
12332
12332
|
if (I.name === "ConnectionClosedEvent") {
|
|
12333
|
-
const
|
|
12334
|
-
(
|
|
12333
|
+
const w = I;
|
|
12334
|
+
(w.statusCode === 1003 || w.statusCode === 1007 || w.statusCode === 1002 || w.statusCode === 4e3 || this.privRequestSession.numConnectionAttempts > this.privRecognizerConfig.maxRetryCount) && this.cancelRecognitionLocal(r.CancellationReason.Error, w.statusCode === 1007 ? r.CancellationErrorCode.BadRequestParameters : r.CancellationErrorCode.ConnectionFailure, `${w.reason} websocket error code: ${w.statusCode}`);
|
|
12335
12335
|
}
|
|
12336
12336
|
}), this.privEnableSpeakerId && (this.privDiarizationSessionId = (0, e.createNoDashGuid)());
|
|
12337
12337
|
}
|
|
@@ -12349,14 +12349,14 @@ function tm() {
|
|
|
12349
12349
|
targetLanguages: S
|
|
12350
12350
|
}, y !== void 0 && (this.privSpeechContext.getContext().translation.category = y), C !== void 0) {
|
|
12351
12351
|
const I = {};
|
|
12352
|
-
for (const
|
|
12353
|
-
I[
|
|
12352
|
+
for (const w of S)
|
|
12353
|
+
I[w] = C;
|
|
12354
12354
|
this.privSpeechContext.getContext().synthesis = {
|
|
12355
12355
|
defaultVoices: I
|
|
12356
12356
|
};
|
|
12357
12357
|
}
|
|
12358
|
-
const
|
|
12359
|
-
|
|
12358
|
+
const A = this.privSpeechContext.getContext().phraseDetection || {};
|
|
12359
|
+
A.onSuccess = { action: v.NextAction.Translate }, A.onInterim = { action: v.NextAction.Translate }, this.privSpeechContext.getContext().phraseDetection = A;
|
|
12360
12360
|
}
|
|
12361
12361
|
}
|
|
12362
12362
|
setSpeechSegmentationTimeoutJson() {
|
|
@@ -12368,43 +12368,43 @@ function tm() {
|
|
|
12368
12368
|
let P = !1;
|
|
12369
12369
|
if (C !== void 0) {
|
|
12370
12370
|
P = !0;
|
|
12371
|
-
let
|
|
12371
|
+
let A = a.SegmentationMode.Normal;
|
|
12372
12372
|
switch (C.toLowerCase()) {
|
|
12373
12373
|
case "default":
|
|
12374
12374
|
break;
|
|
12375
12375
|
case "time":
|
|
12376
|
-
|
|
12376
|
+
A = a.SegmentationMode.Custom;
|
|
12377
12377
|
break;
|
|
12378
12378
|
case "semantic":
|
|
12379
|
-
|
|
12379
|
+
A = a.SegmentationMode.Semantic;
|
|
12380
12380
|
break;
|
|
12381
12381
|
}
|
|
12382
|
-
y.mode =
|
|
12382
|
+
y.mode = A;
|
|
12383
12383
|
}
|
|
12384
12384
|
if (f !== void 0) {
|
|
12385
12385
|
P = !0;
|
|
12386
|
-
const
|
|
12387
|
-
y.mode = a.SegmentationMode.Custom, y.segmentationSilenceTimeoutMs =
|
|
12386
|
+
const A = parseInt(f, 10);
|
|
12387
|
+
y.mode = a.SegmentationMode.Custom, y.segmentationSilenceTimeoutMs = A;
|
|
12388
12388
|
}
|
|
12389
12389
|
if (S !== void 0) {
|
|
12390
12390
|
P = !0;
|
|
12391
|
-
const
|
|
12392
|
-
y.mode = a.SegmentationMode.Custom, y.segmentationForcedTimeoutMs =
|
|
12391
|
+
const A = parseInt(S, 10);
|
|
12392
|
+
y.mode = a.SegmentationMode.Custom, y.segmentationForcedTimeoutMs = A;
|
|
12393
12393
|
}
|
|
12394
12394
|
if (P) {
|
|
12395
|
-
const
|
|
12396
|
-
switch (
|
|
12395
|
+
const A = this.privSpeechContext.getContext().phraseDetection || {};
|
|
12396
|
+
switch (A.mode = this.recognitionMode, this.recognitionMode) {
|
|
12397
12397
|
case o.RecognitionMode.Conversation:
|
|
12398
|
-
|
|
12398
|
+
A.conversation = A.conversation ?? { segmentation: {} }, A.conversation.segmentation = y;
|
|
12399
12399
|
break;
|
|
12400
12400
|
case o.RecognitionMode.Interactive:
|
|
12401
|
-
|
|
12401
|
+
A.interactive = A.interactive ?? { segmentation: {} }, A.interactive.segmentation = y;
|
|
12402
12402
|
break;
|
|
12403
12403
|
case o.RecognitionMode.Dictation:
|
|
12404
|
-
|
|
12404
|
+
A.dictation = A.dictation ?? {}, A.dictation.segmentation = y;
|
|
12405
12405
|
break;
|
|
12406
12406
|
}
|
|
12407
|
-
this.privSpeechContext.getContext().phraseDetection =
|
|
12407
|
+
this.privSpeechContext.getContext().phraseDetection = A;
|
|
12408
12408
|
}
|
|
12409
12409
|
}
|
|
12410
12410
|
setLanguageIdJson() {
|
|
@@ -12485,27 +12485,27 @@ function tm() {
|
|
|
12485
12485
|
return;
|
|
12486
12486
|
}
|
|
12487
12487
|
if (this.privConnectionConfigurationPromise = void 0, this.privRecognizerConfig.recognitionMode = f, this.privRecognizerConfig.recognitionEndpointVersion === "2") {
|
|
12488
|
-
const
|
|
12489
|
-
|
|
12488
|
+
const w = this.privSpeechContext.getContext().phraseDetection || {};
|
|
12489
|
+
w.mode = f, this.privSpeechContext.getContext().phraseDetection = w;
|
|
12490
12490
|
}
|
|
12491
12491
|
this.setLanguageIdJson(), this.setTranslationJson(), this.privRecognizerConfig.autoDetectSourceLanguages !== void 0 && this.privRecognizerConfig.parameters.getProperty(r.PropertyId.SpeechServiceConnection_TranslationToLanguages, void 0) !== void 0 && this.setupTranslationWithLanguageId(), this.setSpeechSegmentationTimeoutJson(), this.setOutputDetailLevelJson(), this.privSuccessCallback = S, this.privErrorCallback = C, this.privRequestSession.startNewRecognition(), this.privRequestSession.listenForServiceTelemetry(this.privAudioSource.events);
|
|
12492
12492
|
const y = this.connectImpl();
|
|
12493
12493
|
let P;
|
|
12494
12494
|
try {
|
|
12495
|
-
const
|
|
12496
|
-
this.privIsLiveAudio = F.type && F.type === i.type.Microphones, P = new t.ReplayableAudioNode(
|
|
12497
|
-
} catch (
|
|
12498
|
-
throw await this.privRequestSession.onStopRecognizing(),
|
|
12495
|
+
const w = await this.audioSource.attach(this.privRequestSession.audioNodeId), O = await this.audioSource.format, F = await this.audioSource.deviceInfo;
|
|
12496
|
+
this.privIsLiveAudio = F.type && F.type === i.type.Microphones, P = new t.ReplayableAudioNode(w, O.avgBytesPerSec), await this.privRequestSession.onAudioSourceAttachCompleted(P, !1), this.privRecognizerConfig.SpeechServiceConfig.Context.audio = { source: F };
|
|
12497
|
+
} catch (w) {
|
|
12498
|
+
throw await this.privRequestSession.onStopRecognizing(), w;
|
|
12499
12499
|
}
|
|
12500
12500
|
try {
|
|
12501
12501
|
await y;
|
|
12502
|
-
} catch (
|
|
12503
|
-
await this.cancelRecognitionLocal(r.CancellationReason.Error, r.CancellationErrorCode.ConnectionFailure,
|
|
12502
|
+
} catch (w) {
|
|
12503
|
+
await this.cancelRecognitionLocal(r.CancellationReason.Error, r.CancellationErrorCode.ConnectionFailure, w);
|
|
12504
12504
|
return;
|
|
12505
12505
|
}
|
|
12506
|
-
const
|
|
12507
|
-
this.privRecognizer.sessionStarted && this.privRecognizer.sessionStarted(this.privRecognizer,
|
|
12508
|
-
await this.cancelRecognitionLocal(r.CancellationReason.Error, r.CancellationErrorCode.RuntimeError,
|
|
12506
|
+
const A = new r.SessionEventArgs(this.privRequestSession.sessionId);
|
|
12507
|
+
this.privRecognizer.sessionStarted && this.privRecognizer.sessionStarted(this.privRecognizer, A), this.receiveMessage(), this.sendAudio(P).catch(async (w) => {
|
|
12508
|
+
await this.cancelRecognitionLocal(r.CancellationReason.Error, r.CancellationErrorCode.RuntimeError, w);
|
|
12509
12509
|
});
|
|
12510
12510
|
}
|
|
12511
12511
|
async stopRecognizing() {
|
|
@@ -12594,10 +12594,10 @@ function tm() {
|
|
|
12594
12594
|
this.privRecognizer.speechStartDetected && this.privRecognizer.speechStartDetected(this.privRecognizer, P);
|
|
12595
12595
|
break;
|
|
12596
12596
|
case "speech.enddetected":
|
|
12597
|
-
let
|
|
12598
|
-
C.textBody.length > 0 ?
|
|
12599
|
-
const I = i.SpeechDetected.fromJSON(
|
|
12600
|
-
this.privRecognizer.speechEndDetected && this.privRecognizer.speechEndDetected(this.privRecognizer,
|
|
12597
|
+
let A;
|
|
12598
|
+
C.textBody.length > 0 ? A = C.textBody : A = "{ Offset: 0 }";
|
|
12599
|
+
const I = i.SpeechDetected.fromJSON(A, this.privRequestSession.currentTurnAudioOffset), w = new r.RecognitionEventArgs(I.Offset + this.privRequestSession.currentTurnAudioOffset, this.privRequestSession.sessionId);
|
|
12600
|
+
this.privRecognizer.speechEndDetected && this.privRecognizer.speechEndDetected(this.privRecognizer, w);
|
|
12601
12601
|
break;
|
|
12602
12602
|
case "turn.end":
|
|
12603
12603
|
await this.sendTelemetryData(), this.privRequestSession.isSpeechEnded && this.privMustReportEndOfStream && (this.privMustReportEndOfStream = !1, await this.cancelRecognitionLocal(r.CancellationReason.EndOfStream, r.CancellationErrorCode.NoError, void 0));
|
|
@@ -12641,8 +12641,8 @@ function tm() {
|
|
|
12641
12641
|
if (C) {
|
|
12642
12642
|
const y = this.privRecognizerConfig.sourceLanguageModels;
|
|
12643
12643
|
if (y !== void 0 && y.length > 0) {
|
|
12644
|
-
const
|
|
12645
|
-
|
|
12644
|
+
const A = this.privSpeechContext.getContext().phraseDetection || {};
|
|
12645
|
+
A.customModels = y, this.privSpeechContext.getContext().phraseDetection = A;
|
|
12646
12646
|
}
|
|
12647
12647
|
this.privRecognizerConfig.parameters.getProperty(r.PropertyId.SpeechServiceConnection_TranslationVoice, void 0) !== void 0 && (C.onSuccess = { action: n.NextAction.Synthesize }, C.onPassthrough = { action: n.NextAction.Synthesize });
|
|
12648
12648
|
}
|
|
@@ -12688,14 +12688,14 @@ function tm() {
|
|
|
12688
12688
|
const S = await this.audioSource.format;
|
|
12689
12689
|
this.privAverageBytesPerMs = S.avgBytesPerSec / 1e3;
|
|
12690
12690
|
let C = Date.now();
|
|
12691
|
-
const y = this.privRecognizerConfig.parameters.getProperty("SPEECH-TransmitLengthBeforThrottleMs", "5000"), P = S.avgBytesPerSec / 1e3 * parseInt(y, 10),
|
|
12692
|
-
if (!this.privIsDisposed && !this.privRequestSession.isSpeechEnded && this.privRequestSession.isRecognizing && this.privRequestSession.recogNumber ===
|
|
12693
|
-
const
|
|
12691
|
+
const y = this.privRecognizerConfig.parameters.getProperty("SPEECH-TransmitLengthBeforThrottleMs", "5000"), P = S.avgBytesPerSec / 1e3 * parseInt(y, 10), A = this.privRequestSession.recogNumber, I = async () => {
|
|
12692
|
+
if (!this.privIsDisposed && !this.privRequestSession.isSpeechEnded && this.privRequestSession.isRecognizing && this.privRequestSession.recogNumber === A) {
|
|
12693
|
+
const w = await this.fetchConnection(), O = await f.read();
|
|
12694
12694
|
if (this.privRequestSession.isSpeechEnded)
|
|
12695
12695
|
return;
|
|
12696
12696
|
let F, Q;
|
|
12697
|
-
if (!O || O.isEnd ? (F = null, Q = 0) : (F = O.buffer, this.privRequestSession.onAudioSent(F.byteLength), P >= this.privRequestSession.bytesSent ? Q = 0 : Q = Math.max(0, C - Date.now())), Q !== 0 && await this.delay(Q), F !== null && (C = Date.now() + F.byteLength * 1e3 / (S.avgBytesPerSec * 2)), !this.privIsDisposed && !this.privRequestSession.isSpeechEnded && this.privRequestSession.isRecognizing && this.privRequestSession.recogNumber ===
|
|
12698
|
-
if (
|
|
12697
|
+
if (!O || O.isEnd ? (F = null, Q = 0) : (F = O.buffer, this.privRequestSession.onAudioSent(F.byteLength), P >= this.privRequestSession.bytesSent ? Q = 0 : Q = Math.max(0, C - Date.now())), Q !== 0 && await this.delay(Q), F !== null && (C = Date.now() + F.byteLength * 1e3 / (S.avgBytesPerSec * 2)), !this.privIsDisposed && !this.privRequestSession.isSpeechEnded && this.privRequestSession.isRecognizing && this.privRequestSession.recogNumber === A)
|
|
12698
|
+
if (w.send(new c.SpeechConnectionMessage(e.MessageType.Binary, "audio", this.privRequestSession.requestId, null, F)).catch(() => {
|
|
12699
12699
|
this.privRequestSession.onServiceTurnEndResponse(this.privRecognizerConfig.isContinuousRecognition).catch(() => {
|
|
12700
12700
|
});
|
|
12701
12701
|
}), O != null && O.isEnd)
|
|
@@ -12714,16 +12714,16 @@ function tm() {
|
|
|
12714
12714
|
let C = 0, y = "";
|
|
12715
12715
|
for (; this.privRequestSession.numConnectionAttempts <= this.privRecognizerConfig.maxRetryCount; ) {
|
|
12716
12716
|
this.privRequestSession.onRetryConnection();
|
|
12717
|
-
const
|
|
12717
|
+
const A = await (f ? this.privAuthentication.fetchOnExpiry(this.privAuthFetchEventId) : this.privAuthentication.fetch(this.privAuthFetchEventId));
|
|
12718
12718
|
await this.privRequestSession.onAuthCompleted(!1);
|
|
12719
|
-
const I = await this.privConnectionFactory.create(this.privRecognizerConfig,
|
|
12719
|
+
const I = await this.privConnectionFactory.create(this.privRecognizerConfig, A, this.privConnectionId);
|
|
12720
12720
|
this.privRequestSession.listenForServiceTelemetry(I.events), I.events.attach((O) => {
|
|
12721
12721
|
this.connectionEvents.onEvent(O);
|
|
12722
12722
|
});
|
|
12723
|
-
const
|
|
12724
|
-
if (
|
|
12725
|
-
return await this.privRequestSession.onConnectionEstablishCompleted(
|
|
12726
|
-
|
|
12723
|
+
const w = await I.open();
|
|
12724
|
+
if (w.statusCode === 200)
|
|
12725
|
+
return await this.privRequestSession.onConnectionEstablishCompleted(w.statusCode), Promise.resolve(I);
|
|
12726
|
+
w.statusCode === 1006 && (f = !0), C = w.statusCode, y = w.reason;
|
|
12727
12727
|
}
|
|
12728
12728
|
return await this.privRequestSession.onConnectionEstablishCompleted(C, y), Promise.reject(`Unable to contact server. StatusCode: ${C}, ${this.privRecognizerConfig.parameters.getProperty(r.PropertyId.SpeechServiceConnection_Endpoint)} Reason: ${y}`);
|
|
12729
12729
|
}
|
|
@@ -12978,8 +12978,8 @@ function sm() {
|
|
|
12978
12978
|
let h = p.parameters.getProperty(r.PropertyId.SpeechServiceConnection_Endpoint, void 0);
|
|
12979
12979
|
const v = p.parameters.getProperty(r.PropertyId.SpeechServiceConnection_Region, void 0), g = i.ConnectionFactoryBase.getHostSuffix(v), m = p.parameters.getProperty(r.PropertyId.SpeechServiceConnection_Host, "wss://" + v + ".stt.speech" + g), f = {}, S = p.parameters.getProperty(r.PropertyId.SpeechServiceConnection_EndpointId, void 0), C = p.parameters.getProperty(r.PropertyId.SpeechServiceConnection_RecoLanguage, void 0);
|
|
12980
12980
|
if (S ? (!h || h.search(o.QueryParameterNames.CustomSpeechDeploymentId) === -1) && (f[o.QueryParameterNames.CustomSpeechDeploymentId] = S) : C && (!h || h.search(o.QueryParameterNames.Language) === -1) && (f[o.QueryParameterNames.Language] = C), (!h || h.search(o.QueryParameterNames.Format) === -1) && (f[o.QueryParameterNames.Format] = p.parameters.getProperty(e.OutputFormatPropertyName, r.OutputFormat[r.OutputFormat.Simple]).toLowerCase()), p.autoDetectSourceLanguages !== void 0 && (f[o.QueryParameterNames.EnableLanguageId] = "true"), this.setCommonUrlParams(p, f, h), h) {
|
|
12981
|
-
const
|
|
12982
|
-
(O === "" || O === "/") && (
|
|
12981
|
+
const w = new URL(h), O = w.pathname;
|
|
12982
|
+
(O === "" || O === "/") && (w.pathname = this.universalUri + p.recognitionEndpointVersion, h = await i.ConnectionFactoryBase.getRedirectUrlFromEndpoint(w.toString()));
|
|
12983
12983
|
}
|
|
12984
12984
|
if (!h)
|
|
12985
12985
|
switch (p.recognitionMode) {
|
|
@@ -12995,8 +12995,8 @@ function sm() {
|
|
|
12995
12995
|
}
|
|
12996
12996
|
const y = {};
|
|
12997
12997
|
d.token !== void 0 && d.token !== "" && (y[d.headerName] = d.token), y[a.HeaderNames.ConnectionId] = l, y.connectionId = l;
|
|
12998
|
-
const P = p.parameters.getProperty("SPEECH-EnableWebsocketCompression", "false") === "true",
|
|
12999
|
-
return p.parameters.setProperty(r.PropertyId.SpeechServiceConnection_Url, I),
|
|
12998
|
+
const P = p.parameters.getProperty("SPEECH-EnableWebsocketCompression", "false") === "true", A = new t.WebsocketConnection(h, f, y, new c.WebsocketMessageFormatter(), t.ProxyInfo.fromRecognizerConfig(p), P, l), I = A.uri;
|
|
12999
|
+
return p.parameters.setProperty(r.PropertyId.SpeechServiceConnection_Url, I), A;
|
|
13000
13000
|
}
|
|
13001
13001
|
};
|
|
13002
13002
|
return an.SpeechConnectionFactory = s, an;
|
|
@@ -13015,14 +13015,14 @@ function om() {
|
|
|
13015
13015
|
let l = u.parameters.getProperty(e.PropertyId.SpeechServiceConnection_Endpoint, void 0);
|
|
13016
13016
|
const h = u.parameters.getProperty(e.PropertyId.SpeechServiceConnection_Region, void 0), v = i.ConnectionFactoryBase.getHostSuffix(h), g = u.parameters.getProperty(e.PropertyId.SpeechServiceConnection_Host, "wss://" + h + ".stt.speech" + v), m = {}, f = u.parameters.getProperty(e.PropertyId.SpeechServiceConnection_EndpointId, void 0), S = u.parameters.getProperty(e.PropertyId.SpeechServiceConnection_RecoLanguage, void 0);
|
|
13017
13017
|
if (f ? (!l || l.search(o.QueryParameterNames.CustomSpeechDeploymentId) === -1) && (m[o.QueryParameterNames.CustomSpeechDeploymentId] = f) : S && (!l || l.search(o.QueryParameterNames.Language) === -1) && (m[o.QueryParameterNames.Language] = S), u.autoDetectSourceLanguages !== void 0 && (m[o.QueryParameterNames.EnableLanguageId] = "true"), this.setV2UrlParams(u, m, l), l) {
|
|
13018
|
-
const I = new URL(l),
|
|
13019
|
-
(
|
|
13018
|
+
const I = new URL(l), w = I.pathname;
|
|
13019
|
+
(w === "" || w === "/") && (I.pathname = this.universalUri, l = await i.ConnectionFactoryBase.getRedirectUrlFromEndpoint(I.toString()));
|
|
13020
13020
|
}
|
|
13021
13021
|
l || (l = `${g}${this.universalUri}`);
|
|
13022
13022
|
const C = {};
|
|
13023
13023
|
p.token !== void 0 && p.token !== "" && (C[p.headerName] = p.token), C[a.HeaderNames.ConnectionId] = d;
|
|
13024
|
-
const y = u.parameters.getProperty("SPEECH-EnableWebsocketCompression", "false") === "true", P = new t.WebsocketConnection(l, m, C, new c.WebsocketMessageFormatter(), t.ProxyInfo.fromRecognizerConfig(u), y, d),
|
|
13025
|
-
return u.parameters.setProperty(e.PropertyId.SpeechServiceConnection_Url,
|
|
13024
|
+
const y = u.parameters.getProperty("SPEECH-EnableWebsocketCompression", "false") === "true", P = new t.WebsocketConnection(l, m, C, new c.WebsocketMessageFormatter(), t.ProxyInfo.fromRecognizerConfig(u), y, d), A = P.uri;
|
|
13025
|
+
return u.parameters.setProperty(e.PropertyId.SpeechServiceConnection_Url, A), P;
|
|
13026
13026
|
}
|
|
13027
13027
|
setV2UrlParams(u, p, d) {
|
|
13028
13028
|
(/* @__PURE__ */ new Map([
|
|
@@ -14704,19 +14704,19 @@ function Nm() {
|
|
|
14704
14704
|
case "speech.keyword":
|
|
14705
14705
|
const P = o.SpeechKeyword.fromJSON(h.textBody, this.privRequestSession.currentTurnAudioOffset);
|
|
14706
14706
|
g = new c.SpeechRecognitionResult(this.privRequestSession.requestId, P.Status === "Accepted" ? c.ResultReason.RecognizedKeyword : c.ResultReason.NoMatch, P.Text, P.Duration, P.Offset, void 0, void 0, void 0, void 0, P.asJson(), v), P.Status !== "Accepted" && (this.privLastResult = g);
|
|
14707
|
-
const
|
|
14707
|
+
const A = new c.SpeechRecognitionEventArgs(g, g.duration, g.resultId);
|
|
14708
14708
|
if (this.privDialogServiceConnector.recognized)
|
|
14709
14709
|
try {
|
|
14710
|
-
this.privDialogServiceConnector.recognized(this.privDialogServiceConnector,
|
|
14710
|
+
this.privDialogServiceConnector.recognized(this.privDialogServiceConnector, A);
|
|
14711
14711
|
} catch {
|
|
14712
14712
|
}
|
|
14713
14713
|
m = !0;
|
|
14714
14714
|
break;
|
|
14715
14715
|
case "audio":
|
|
14716
14716
|
{
|
|
14717
|
-
const I = h.requestId.toUpperCase(),
|
|
14717
|
+
const I = h.requestId.toUpperCase(), w = this.privTurnStateManager.GetTurn(I);
|
|
14718
14718
|
try {
|
|
14719
|
-
h.binaryBody ?
|
|
14719
|
+
h.binaryBody ? w.audioStream.write(h.binaryBody) : w.endAudioStream();
|
|
14720
14720
|
} catch {
|
|
14721
14721
|
}
|
|
14722
14722
|
}
|
|
@@ -14774,13 +14774,13 @@ function Nm() {
|
|
|
14774
14774
|
await this.privRequestSession.onAudioSourceAttachCompleted(P, !1), this.privRecognizerConfig.SpeechServiceConfig.Context.audio = { source: y };
|
|
14775
14775
|
try {
|
|
14776
14776
|
await m, await f;
|
|
14777
|
-
} catch (
|
|
14778
|
-
return await this.cancelRecognition(this.privRequestSession.sessionId, this.privRequestSession.requestId, c.CancellationReason.Error, c.CancellationErrorCode.ConnectionFailure,
|
|
14777
|
+
} catch (w) {
|
|
14778
|
+
return await this.cancelRecognition(this.privRequestSession.sessionId, this.privRequestSession.requestId, c.CancellationReason.Error, c.CancellationErrorCode.ConnectionFailure, w), Promise.resolve();
|
|
14779
14779
|
}
|
|
14780
|
-
const
|
|
14781
|
-
this.privRecognizer.sessionStarted && this.privRecognizer.sessionStarted(this.privRecognizer,
|
|
14782
|
-
}, async (
|
|
14783
|
-
await this.cancelRecognition(this.privRequestSession.sessionId, this.privRequestSession.requestId, c.CancellationReason.Error, c.CancellationErrorCode.RuntimeError,
|
|
14780
|
+
const A = new c.SessionEventArgs(this.privRequestSession.sessionId);
|
|
14781
|
+
this.privRecognizer.sessionStarted && this.privRecognizer.sessionStarted(this.privRecognizer, A), this.sendAudio(P).then(() => {
|
|
14782
|
+
}, async (w) => {
|
|
14783
|
+
await this.cancelRecognition(this.privRequestSession.sessionId, this.privRequestSession.requestId, c.CancellationReason.Error, c.CancellationErrorCode.RuntimeError, w);
|
|
14784
14784
|
});
|
|
14785
14785
|
}
|
|
14786
14786
|
// Establishes a websocket connection to the end point.
|
|
@@ -14807,15 +14807,15 @@ function Nm() {
|
|
|
14807
14807
|
}
|
|
14808
14808
|
break;
|
|
14809
14809
|
case "speech.startdetected":
|
|
14810
|
-
const P = o.SpeechDetected.fromJSON(C.textBody, this.privRequestSession.currentTurnAudioOffset),
|
|
14811
|
-
this.privRecognizer.speechStartDetected && this.privRecognizer.speechStartDetected(this.privRecognizer,
|
|
14810
|
+
const P = o.SpeechDetected.fromJSON(C.textBody, this.privRequestSession.currentTurnAudioOffset), A = new c.RecognitionEventArgs(P.Offset, this.privRequestSession.sessionId);
|
|
14811
|
+
this.privRecognizer.speechStartDetected && this.privRecognizer.speechStartDetected(this.privRecognizer, A);
|
|
14812
14812
|
break;
|
|
14813
14813
|
case "speech.enddetected":
|
|
14814
14814
|
let I;
|
|
14815
14815
|
C.textBody.length > 0 ? I = C.textBody : I = "{ Offset: 0 }";
|
|
14816
|
-
const
|
|
14817
|
-
this.privRequestSession.onServiceRecognized(
|
|
14818
|
-
const O = new c.RecognitionEventArgs(
|
|
14816
|
+
const w = o.SpeechDetected.fromJSON(I, this.privRequestSession.currentTurnAudioOffset);
|
|
14817
|
+
this.privRequestSession.onServiceRecognized(w.Offset);
|
|
14818
|
+
const O = new c.RecognitionEventArgs(w.Offset, this.privRequestSession.sessionId);
|
|
14819
14819
|
this.privRecognizer.speechEndDetected && this.privRecognizer.speechEndDetected(this.privRecognizer, O);
|
|
14820
14820
|
break;
|
|
14821
14821
|
case "turn.end":
|
|
@@ -15029,25 +15029,25 @@ function Lm() {
|
|
|
15029
15029
|
C.headers = S, this.privRestAdapter.options = C;
|
|
15030
15030
|
const y = `https://${l}${this.privRestPath}`;
|
|
15031
15031
|
this.privRestAdapter.request(t.RestRequestType.Post, y, f, null).then((P) => {
|
|
15032
|
-
const
|
|
15032
|
+
const A = t.RestMessageAdapter.extractHeaderValue(this.privRequestParams.requestId, P.headers);
|
|
15033
15033
|
if (!P.ok) {
|
|
15034
15034
|
if (u) {
|
|
15035
|
-
let
|
|
15035
|
+
let w = this.privErrors.invalidCreateJoinConversationResponse.replace("{status}", P.status.toString()), O;
|
|
15036
15036
|
try {
|
|
15037
|
-
O = JSON.parse(P.data),
|
|
15037
|
+
O = JSON.parse(P.data), w += ` [${O.error.code}: ${O.error.message}]`;
|
|
15038
15038
|
} catch {
|
|
15039
|
-
|
|
15039
|
+
w += ` [${P.data}]`;
|
|
15040
15040
|
}
|
|
15041
|
-
|
|
15041
|
+
A && (w += ` ${A}`), u(w);
|
|
15042
15042
|
}
|
|
15043
15043
|
return;
|
|
15044
15044
|
}
|
|
15045
15045
|
const I = JSON.parse(P.data);
|
|
15046
|
-
if (I && (I.requestId =
|
|
15046
|
+
if (I && (I.requestId = A), s) {
|
|
15047
15047
|
try {
|
|
15048
15048
|
s(I);
|
|
15049
|
-
} catch (
|
|
15050
|
-
u && u(
|
|
15049
|
+
} catch (w) {
|
|
15050
|
+
u && u(w);
|
|
15051
15051
|
}
|
|
15052
15052
|
s = void 0;
|
|
15053
15053
|
}
|
|
@@ -15655,7 +15655,7 @@ function Gm() {
|
|
|
15655
15655
|
const m = n.CommandResponsePayload.fromJSON(l.textBody);
|
|
15656
15656
|
switch (m.command.toLowerCase()) {
|
|
15657
15657
|
case "participantlist":
|
|
15658
|
-
const
|
|
15658
|
+
const A = n.ParticipantsListPayloadResponse.fromJSON(l.textBody), I = A.participants.map((K) => ({
|
|
15659
15659
|
avatar: K.avatar,
|
|
15660
15660
|
displayName: K.nickname,
|
|
15661
15661
|
id: K.participantId,
|
|
@@ -15664,7 +15664,7 @@ function Gm() {
|
|
|
15664
15664
|
isUsingTts: K.usetts,
|
|
15665
15665
|
preferredLanguage: K.locale
|
|
15666
15666
|
}));
|
|
15667
|
-
this.privConversationServiceConnector.participantsListReceived && this.privConversationServiceConnector.participantsListReceived(this.privConversationServiceConnector, new a.ParticipantsListEventArgs(
|
|
15667
|
+
this.privConversationServiceConnector.participantsListReceived && this.privConversationServiceConnector.participantsListReceived(this.privConversationServiceConnector, new a.ParticipantsListEventArgs(A.roomid, A.token, A.translateTo, A.profanityFilter, A.roomProfanityFilter, A.roomLocked, A.muteAll, I, h));
|
|
15668
15668
|
break;
|
|
15669
15669
|
case "settranslatetolanguages":
|
|
15670
15670
|
this.privConversationServiceConnector.participantUpdateCommandReceived && this.privConversationServiceConnector.participantUpdateCommandReceived(this.privConversationServiceConnector, new a.ParticipantAttributeEventArgs(m.participantId, o.ConversationTranslatorCommandTypes.setTranslateToLanguages, m.value, h));
|
|
@@ -15691,14 +15691,14 @@ function Gm() {
|
|
|
15691
15691
|
this.privConversationServiceConnector.participantUpdateCommandReceived && this.privConversationServiceConnector.participantUpdateCommandReceived(this.privConversationServiceConnector, new a.ParticipantAttributeEventArgs(m.participantId, o.ConversationTranslatorCommandTypes.changeNickname, m.value, h));
|
|
15692
15692
|
break;
|
|
15693
15693
|
case "joinsession":
|
|
15694
|
-
const
|
|
15695
|
-
avatar:
|
|
15696
|
-
displayName:
|
|
15697
|
-
id:
|
|
15698
|
-
isHost:
|
|
15699
|
-
isMuted:
|
|
15700
|
-
isUsingTts:
|
|
15701
|
-
preferredLanguage:
|
|
15694
|
+
const w = n.ParticipantPayloadResponse.fromJSON(l.textBody), O = {
|
|
15695
|
+
avatar: w.avatar,
|
|
15696
|
+
displayName: w.nickname,
|
|
15697
|
+
id: w.participantId,
|
|
15698
|
+
isHost: w.ishost,
|
|
15699
|
+
isMuted: w.ismuted,
|
|
15700
|
+
isUsingTts: w.usetts,
|
|
15701
|
+
preferredLanguage: w.locale
|
|
15702
15702
|
};
|
|
15703
15703
|
this.privConversationServiceConnector.participantJoinCommandReceived && this.privConversationServiceConnector.participantJoinCommandReceived(this.privConversationServiceConnector, new a.ParticipantEventArgs(O, h));
|
|
15704
15704
|
break;
|
|
@@ -17447,28 +17447,30 @@ const _S = { class: "max-h-screen overflow-auto md:p-6 p-3 flex flex-col items-c
|
|
|
17447
17447
|
This is a role-play simulation designed to mirror real-world scenarios. Just follow the prompts, speak naturally, and let the AI guide you. Based on your responses, you’ll receive personalized feedback to help sharpen your client engagement skills.`, LS = /* @__PURE__ */ Pe({
|
|
17448
17448
|
__name: "greetingsSimulation",
|
|
17449
17449
|
setup(t) {
|
|
17450
|
-
const e = be(), r = St(), i = M(!1), c = M(!1), a = M(!0), o =
|
|
17451
|
-
|
|
17450
|
+
const e = be(), r = St(), i = M(!1), c = M(!1), a = M(!0), o = () => {
|
|
17451
|
+
e.simulationSkill === "7bd03cb6-79ad-4e2a-8a8b-88dc16147369" ? (e.beginFlag = !1, i.value = !0) : (e.beginFlag = !1, v());
|
|
17452
|
+
}, n = `Hi, get ready to test your ${e.skillNameForSimulation || "GenpactNext"} knowledge! Here’s what to expect:`, s = ` Hi ${e.learnerName || "Expert"}, and welcome!`, u = ` Hi, ${e.learnerName || "Expert"}. Get ready to test your ${e.skillNameForSimulation || "GenpactNext"} knowledge! Here’s what to expect:`, p = Lo(() => ["32b7d6d8-f7a8-40a0-ab84-8784f25897ef"].includes(e.simulationSkill) ? n : ["7bd03cb6-79ad-4e2a-8a8b-88dc16147369"].includes(e.simulationSkill) ? s : u), d = `Hi, get ready to test your ${e.skillNameForSimulation || "GenpactNext"} knowledge! Here’s what to expect:`, l = ` AI Guru will ask you a few questions on '${e.skillNameForSimulation.trim() || "GenpactNext"} '. Please provide detailed and thoughtful answers to the best of your knowledge. At the end, you'll receive a feedback, outlining your strengths and improvement areas.`, h = Lo(() => ["32b7d6d8-f7a8-40a0-ab84-8784f25897ef"].includes(e.simulationSkill) ? d : ["7bd03cb6-79ad-4e2a-8a8b-88dc16147369"].includes(e.simulationSkill) ? jS : l), v = async () => {
|
|
17453
|
+
var g, m, f, S, C, y;
|
|
17452
17454
|
console.log("firstQuestion fired==>"), a.value = !1;
|
|
17453
17455
|
try {
|
|
17454
|
-
const
|
|
17456
|
+
const P = {
|
|
17455
17457
|
question: "Hello, let's start"
|
|
17456
17458
|
};
|
|
17457
|
-
if (e.conversations.push(
|
|
17458
|
-
const
|
|
17459
|
+
if (e.conversations.push(P), e.assessmentQuestion) {
|
|
17460
|
+
const A = {
|
|
17459
17461
|
past_messages: e.past_messages,
|
|
17460
17462
|
question: "Let's begin",
|
|
17461
17463
|
bandCode: e.band_code,
|
|
17462
17464
|
userGenomeFunction: e.genomeFunctionValue,
|
|
17463
17465
|
userGenomeRole: e.genomeRoleValue,
|
|
17464
17466
|
skillId: e.simulationSkill || "937e84ef-e95d-4327-9afe-e7be2bf420eb"
|
|
17465
|
-
},
|
|
17467
|
+
}, I = M("");
|
|
17466
17468
|
e.dotLoader = !0, le.value = "listening";
|
|
17467
|
-
const
|
|
17468
|
-
e.addConversation((
|
|
17469
|
+
const w = await gt("/sqlTest", A);
|
|
17470
|
+
e.addConversation((g = w.data) == null ? void 0 : g.body, "new"), I.value = (f = (m = w == null ? void 0 : w.data) == null ? void 0 : m.body) == null ? void 0 : f.answer, e.dotLoader = !1, le.value = "bot", e.showVoiceComponent && (e.elevenLabs ? await Qd(I.value.replace(/[*#]/g, "")) : await Jd(I.value.replace(/[*#]/g, "")));
|
|
17469
17471
|
} else {
|
|
17470
17472
|
c.value = !0, e.dotLoader = !0, console.log("firstQuestion fired==> else part");
|
|
17471
|
-
const
|
|
17473
|
+
const A = {
|
|
17472
17474
|
assessmentId: e.simulationSkill,
|
|
17473
17475
|
question: "Hello, Let's begin",
|
|
17474
17476
|
past_messages: e.past_messages,
|
|
@@ -17481,28 +17483,28 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17481
17483
|
storeFinalFeedback: e.storeFinalFeedback,
|
|
17482
17484
|
storeFinalScore: e.storeFinalScore
|
|
17483
17485
|
};
|
|
17484
|
-
let
|
|
17485
|
-
Dt === "prod" ?
|
|
17486
|
-
const
|
|
17487
|
-
e.addConversation((
|
|
17486
|
+
let I;
|
|
17487
|
+
Dt === "prod" ? I = "/ManualExportActivity" : I = "/fetchCompletionDemo";
|
|
17488
|
+
const w = await gt(I, A);
|
|
17489
|
+
e.addConversation((S = w.data) == null ? void 0 : S.body, "assessment"), e.dotLoader = !1;
|
|
17488
17490
|
}
|
|
17489
|
-
} catch (
|
|
17490
|
-
if (console.error(
|
|
17491
|
-
const
|
|
17492
|
-
console.log("Error found!"), console.error((
|
|
17491
|
+
} catch (P) {
|
|
17492
|
+
if (console.error(P), Ce.isAxiosError(P)) {
|
|
17493
|
+
const A = P;
|
|
17494
|
+
console.log("Error found!"), console.error((C = A.response) == null ? void 0 : C.data), r.setError(((y = A.response) == null ? void 0 : y.data).message);
|
|
17493
17495
|
} else
|
|
17494
17496
|
r.setError("There was an issue, please contact helpmate");
|
|
17495
17497
|
}
|
|
17496
17498
|
};
|
|
17497
|
-
return (
|
|
17499
|
+
return (g, m) => (N(), j("section", null, [
|
|
17498
17500
|
ge(sp, { "is-visible": i.value }, {
|
|
17499
17501
|
default: op(() => [
|
|
17500
17502
|
R("div", _S, [
|
|
17501
17503
|
TS,
|
|
17502
17504
|
bS,
|
|
17503
17505
|
R("button", {
|
|
17504
|
-
onClick:
|
|
17505
|
-
i.value = !1,
|
|
17506
|
+
onClick: m[0] || (m[0] = (f) => {
|
|
17507
|
+
i.value = !1, v();
|
|
17506
17508
|
}),
|
|
17507
17509
|
class: "bg-[#021c42] rounded-md px-4 py-1.5 text-center text-[16px] text-white cursor-pointer italic hover:bg-blue-700"
|
|
17508
17510
|
}, " Proceed ")
|
|
@@ -17512,17 +17514,15 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17512
17514
|
}, 8, ["is-visible"]),
|
|
17513
17515
|
R("div", MS, [
|
|
17514
17516
|
R("div", OS, [
|
|
17515
|
-
R("div", DS, Ve(
|
|
17516
|
-
R("div", kS, Ve(
|
|
17517
|
+
R("div", DS, Ve(p.value), 1),
|
|
17518
|
+
R("div", kS, Ve(h.value), 1)
|
|
17517
17519
|
])
|
|
17518
17520
|
]),
|
|
17519
|
-
|
|
17521
|
+
k(e).beginFlag ? (N(), j("span", NS, [
|
|
17520
17522
|
R("button", {
|
|
17521
17523
|
type: "button",
|
|
17522
17524
|
id: "button",
|
|
17523
|
-
onClick:
|
|
17524
|
-
D(e).beginFlag = !1, i.value = !0;
|
|
17525
|
-
}),
|
|
17525
|
+
onClick: o,
|
|
17526
17526
|
class: "mt-4 bg-[#021c42] rounded-md px-4 py-1.5 text-center text-[16px] text-white cursor-pointer italic hover:bg-blue-700"
|
|
17527
17527
|
}, " Let's begin ")
|
|
17528
17528
|
])) : _e("", !0),
|
|
@@ -17651,7 +17651,7 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17651
17651
|
a.question = (h ?? []).join(", "), console.log("selectedOptions", h);
|
|
17652
17652
|
}), (h, v) => (N(), j("div", qS, [
|
|
17653
17653
|
R("div", BS, [
|
|
17654
|
-
|
|
17654
|
+
k(a).simulationSkill === "7bd03cb6-79ad-4e2a-8a8b-88dc16147369" && k(a).showVoiceComponent === !0 && k(a).conversations.length - 1 === r.ansId && !r.answer.includes("BEGIN ROLE PLAY") ? (N(), j("span", {
|
|
17655
17655
|
key: 0,
|
|
17656
17656
|
class: "fontFormattingAMA",
|
|
17657
17657
|
innerHTML: o.value
|
|
@@ -17661,7 +17661,7 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17661
17661
|
innerHTML: l.value
|
|
17662
17662
|
}, null, 8, US)),
|
|
17663
17663
|
R("div", null, [
|
|
17664
|
-
t.optionsArray.length &&
|
|
17664
|
+
t.optionsArray.length && k(a).mcqType === "multi_choice" ? (N(), j("div", HS, [
|
|
17665
17665
|
(N(!0), j(xo, null, qo(t.optionsArray, (g, m) => (N(), j("div", {
|
|
17666
17666
|
key: m,
|
|
17667
17667
|
class: "italic"
|
|
@@ -17672,7 +17672,7 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17672
17672
|
type: "checkbox",
|
|
17673
17673
|
value: g,
|
|
17674
17674
|
"onUpdate:modelValue": v[0] || (v[0] = (f) => i.value = f),
|
|
17675
|
-
disabled:
|
|
17675
|
+
disabled: k(a).conversations.length - 1 !== r.ansId,
|
|
17676
17676
|
name: l.value
|
|
17677
17677
|
}, null, 8, VS), [
|
|
17678
17678
|
[Rl, i.value]
|
|
@@ -17689,8 +17689,8 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17689
17689
|
R("input", {
|
|
17690
17690
|
class: "mr-4 cursor-pointer",
|
|
17691
17691
|
type: "radio",
|
|
17692
|
-
onClick: (f) =>
|
|
17693
|
-
disabled:
|
|
17692
|
+
onClick: (f) => k(a).question = String(t.optionsArray[m]),
|
|
17693
|
+
disabled: k(a).conversations.length - 1 !== r.ansId,
|
|
17694
17694
|
name: l.value
|
|
17695
17695
|
}, null, 8, GS),
|
|
17696
17696
|
R("label", QS, Ve(g), 1)
|
|
@@ -17698,8 +17698,8 @@ This is a role-play simulation designed to mirror real-world scenarios. Just fol
|
|
|
17698
17698
|
]))), 128))
|
|
17699
17699
|
])) : _e("", !0)
|
|
17700
17700
|
]),
|
|
17701
|
-
c.value &&
|
|
17702
|
-
|
|
17701
|
+
c.value && k(a).simulationSkill === "32b7d6d8-f7a8-40a0-ab84-8784f25897ef" ? (N(), j("div", XS, eC)) : _e("", !0),
|
|
17702
|
+
k(a).isSpeechToggle && k(a).showSelectTypeConvesation && k(a).simulationSkill === "7bd03cb6-79ad-4e2a-8a8b-88dc16147369" && k(a).conversations.length - 1 === r.ansId ? (N(), j("div", tC, [
|
|
17703
17703
|
rC,
|
|
17704
17704
|
R("div", nC, [
|
|
17705
17705
|
R("button", {
|
|
@@ -17773,7 +17773,7 @@ const mC = /* @__PURE__ */ Ft(gC, [["__scopeId", "data-v-347ad6de"]]), SC = { cl
|
|
|
17773
17773
|
const e = be();
|
|
17774
17774
|
return (r, i) => (N(), j("div", SC, [
|
|
17775
17775
|
R("div", CC, [
|
|
17776
|
-
R("div", yC, Ve(
|
|
17776
|
+
R("div", yC, Ve(k(e).userSpeechText), 1)
|
|
17777
17777
|
])
|
|
17778
17778
|
]));
|
|
17779
17779
|
}
|
|
@@ -17807,7 +17807,7 @@ const mC = /* @__PURE__ */ Ft(gC, [["__scopeId", "data-v-347ad6de"]]), SC = { cl
|
|
|
17807
17807
|
class: "smooth-scroll overflow-auto md:px-6 px-2"
|
|
17808
17808
|
}, [
|
|
17809
17809
|
ge(LS),
|
|
17810
|
-
(N(!0), j(xo, null, qo(
|
|
17810
|
+
(N(!0), j(xo, null, qo(k(i).conversations, (s, u) => (N(), j("div", { key: u }, [
|
|
17811
17811
|
s.question ? (N(), j("div", RC, [
|
|
17812
17812
|
ge(uC, {
|
|
17813
17813
|
question: s.question,
|
|
@@ -17824,7 +17824,7 @@ const mC = /* @__PURE__ */ Ft(gC, [["__scopeId", "data-v-347ad6de"]]), SC = { cl
|
|
|
17824
17824
|
c2question: s == null ? void 0 : s.c2question,
|
|
17825
17825
|
"show-sources": s == null ? void 0 : s.showSources,
|
|
17826
17826
|
questionId: s.id,
|
|
17827
|
-
answer: s.answer.replaceAll("<a",
|
|
17827
|
+
answer: s.answer.replaceAll("<a", k(e)).replaceAll("<ul", k(r)),
|
|
17828
17828
|
"show-feedback": s.showFeedback,
|
|
17829
17829
|
"related-ques": s.relatedQues,
|
|
17830
17830
|
"options-array": s.optionsArray,
|
|
@@ -17833,10 +17833,10 @@ const mC = /* @__PURE__ */ Ft(gC, [["__scopeId", "data-v-347ad6de"]]), SC = { cl
|
|
|
17833
17833
|
}, null, 8, ["question", "c2question", "show-sources", "questionId", "answer", "show-feedback", "related-ques", "options-array", "ans-id"])
|
|
17834
17834
|
])) : _e("", !0)
|
|
17835
17835
|
]))), 128)),
|
|
17836
|
-
|
|
17836
|
+
k(i).dotLoader ? (N(), j("div", wC, [
|
|
17837
17837
|
ge(mC)
|
|
17838
17838
|
])) : _e("", !0),
|
|
17839
|
-
|
|
17839
|
+
k(i).userspeaking ? (N(), j("div", AC, [
|
|
17840
17840
|
ge(PC)
|
|
17841
17841
|
])) : _e("", !0)
|
|
17842
17842
|
], 512));
|
|
@@ -17937,15 +17937,15 @@ const _C = /* @__PURE__ */ Ft(IC, [["__scopeId", "data-v-e55a322f"]]), TC = { cl
|
|
|
17937
17937
|
}), Vi(Ln, async (u) => {
|
|
17938
17938
|
u && (console.log("Recognized text:", u), le.value = "listening", await s(), Ln.value = "");
|
|
17939
17939
|
}), (u, p) => (N(), j("div", TC, [
|
|
17940
|
-
|
|
17940
|
+
k(le) === "bot" ? (N(), j("div", bC, [
|
|
17941
17941
|
R("span", null, [
|
|
17942
17942
|
(N(), j("svg", MC, DC))
|
|
17943
17943
|
])
|
|
17944
|
-
])) :
|
|
17944
|
+
])) : k(le) === "user" ? (N(), j("div", kC, [
|
|
17945
17945
|
R("span", null, [
|
|
17946
17946
|
(N(), j("svg", NC, jC))
|
|
17947
17947
|
])
|
|
17948
|
-
])) :
|
|
17948
|
+
])) : k(le) === "listening" ? (N(), j("div", LC, qC)) : _e("", !0),
|
|
17949
17949
|
R("div", {
|
|
17950
17950
|
class: kt([c.value ? "" : "animate-pulse", "tooltip"])
|
|
17951
17951
|
}, [
|
|
@@ -17964,7 +17964,7 @@ const _C = /* @__PURE__ */ Ft(IC, [["__scopeId", "data-v-e55a322f"]]), TC = { cl
|
|
|
17964
17964
|
class: "rounded-full bg-white p-2 hover:bg-gray-300",
|
|
17965
17965
|
onClick: p[2] || (p[2] = (d) => n())
|
|
17966
17966
|
}, [
|
|
17967
|
-
ge(
|
|
17967
|
+
ge(k(_l), { class: "h-[10px] w-[10px] text-gray-700" })
|
|
17968
17968
|
])
|
|
17969
17969
|
]));
|
|
17970
17970
|
}
|
|
@@ -18027,7 +18027,7 @@ const _C = /* @__PURE__ */ Ft(IC, [["__scopeId", "data-v-e55a322f"]]), TC = { cl
|
|
|
18027
18027
|
console.log("ques", y), e.conversations.push(y);
|
|
18028
18028
|
const P = M("");
|
|
18029
18029
|
P.value = e.question || e.recognizedText, e.question = "";
|
|
18030
|
-
const
|
|
18030
|
+
const A = {
|
|
18031
18031
|
ohr_id: i.value,
|
|
18032
18032
|
past_messages: e.past_messages,
|
|
18033
18033
|
question: P.value || e.recognizedText,
|
|
@@ -18035,10 +18035,10 @@ const _C = /* @__PURE__ */ Ft(IC, [["__scopeId", "data-v-e55a322f"]]), TC = { cl
|
|
|
18035
18035
|
userGenomeFunction: e.genomeFunctionValue,
|
|
18036
18036
|
userGenomeRole: e.genomeRoleValue,
|
|
18037
18037
|
skillId: e.simulationSkill || "937e84ef-e95d-4327-9afe-e7be2bf420eb"
|
|
18038
|
-
}, I = await gt("/sqlTest",
|
|
18038
|
+
}, I = await gt("/sqlTest", A);
|
|
18039
18039
|
if (i.value = (h = (l = I.data) == null ? void 0 : l.body) == null ? void 0 : h.ohr, ((g = (v = I == null ? void 0 : I.data) == null ? void 0 : v.body) == null ? void 0 : g.answer) === "") {
|
|
18040
18040
|
console.log("in the if case calling api again");
|
|
18041
|
-
const O = await gt("/sqlTest",
|
|
18041
|
+
const O = await gt("/sqlTest", A);
|
|
18042
18042
|
e.addConversation((m = O.data) == null ? void 0 : m.body, "new");
|
|
18043
18043
|
} else
|
|
18044
18044
|
console.log("in the else case"), e.addConversation((f = I.data) == null ? void 0 : f.body, "new");
|
|
@@ -18057,7 +18057,7 @@ const _C = /* @__PURE__ */ Ft(IC, [["__scopeId", "data-v-e55a322f"]]), TC = { cl
|
|
|
18057
18057
|
R("div", GC, [
|
|
18058
18058
|
R("div", QC, [
|
|
18059
18059
|
XC,
|
|
18060
|
-
|
|
18060
|
+
k(e).showVoiceComponent ? (N(), j("div", ZC, [
|
|
18061
18061
|
ge(VC)
|
|
18062
18062
|
])) : _e("", !0)
|
|
18063
18063
|
]),
|
|
@@ -18081,12 +18081,12 @@ const _C = /* @__PURE__ */ Ft(IC, [["__scopeId", "data-v-e55a322f"]]), TC = { cl
|
|
|
18081
18081
|
R("div", oy, [
|
|
18082
18082
|
s.value ? (N(), j("div", dy, vy)) : (N(), j("div", ay, [
|
|
18083
18083
|
(N(), j("svg", {
|
|
18084
|
-
disabled:
|
|
18084
|
+
disabled: k(e).beginFlag === !0,
|
|
18085
18085
|
class: kt([
|
|
18086
|
-
|
|
18086
|
+
k(e).beginFlag === !0 ? "cursor-not-allowed text-gray-600" : "cursor-pointer",
|
|
18087
18087
|
"bi bi-send ml-2 h-7 w-7 rotate-45 rounded"
|
|
18088
18088
|
]),
|
|
18089
|
-
onClick: h[0] || (h[0] = (v) =>
|
|
18089
|
+
onClick: h[0] || (h[0] = (v) => k(e).searchBoxdisable === !1 || k(e).beginFlag === !0 ? "" : d()),
|
|
18090
18090
|
xmlns: "http://www.w3.org/2000/svg",
|
|
18091
18091
|
fill: "currentColor",
|
|
18092
18092
|
viewBox: "0 0 16 16"
|
|
@@ -18097,20 +18097,20 @@ const _C = /* @__PURE__ */ Ft(IC, [["__scopeId", "data-v-e55a322f"]]), TC = { cl
|
|
|
18097
18097
|
Jo(R("input", {
|
|
18098
18098
|
onKeyup: [
|
|
18099
18099
|
h[1] || (h[1] = cp((v) => s.value ? "" : d(), ["enter"])),
|
|
18100
|
-
h[3] || (h[3] = (v) => p(
|
|
18100
|
+
h[3] || (h[3] = (v) => p(k(e).question))
|
|
18101
18101
|
],
|
|
18102
|
-
disabled:
|
|
18102
|
+
disabled: k(e).beginFlag === !0 || k(e).searchBoxdisable === !1,
|
|
18103
18103
|
class: kt([
|
|
18104
|
-
|
|
18104
|
+
k(e).beginFlag === !0 || k(e).searchBoxdisable === !1 ? "cursor-not-allowed bg-[#e5e7eb]" : "bg-white",
|
|
18105
18105
|
"fontFormattingAMA rounded-lg flex w-full border border-genpact-darkest-teal py-[18px] px-[70px]"
|
|
18106
18106
|
]),
|
|
18107
|
-
"onUpdate:modelValue": h[2] || (h[2] = (v) =>
|
|
18107
|
+
"onUpdate:modelValue": h[2] || (h[2] = (v) => k(e).question = v),
|
|
18108
18108
|
placeholder: "Write your answer here...",
|
|
18109
18109
|
type: "text",
|
|
18110
18110
|
ref_key: "AiSearch",
|
|
18111
18111
|
ref: a
|
|
18112
18112
|
}, null, 42, gy), [
|
|
18113
|
-
[ap,
|
|
18113
|
+
[ap, k(e).question]
|
|
18114
18114
|
])
|
|
18115
18115
|
])
|
|
18116
18116
|
]),
|
|
@@ -18196,12 +18196,12 @@ const Cy = /* @__PURE__ */ Ft(Sy, [["__scopeId", "data-v-e951c0ed"]]), po = (t)
|
|
|
18196
18196
|
R("div", Ay, [
|
|
18197
18197
|
n.value ? (N(), j("div", My, ky)) : (N(), j("div", Iy, [
|
|
18198
18198
|
(N(), j("svg", {
|
|
18199
|
-
disabled:
|
|
18199
|
+
disabled: k(e).beginFlag === !0,
|
|
18200
18200
|
class: kt([
|
|
18201
|
-
|
|
18201
|
+
k(e).beginFlag === !0 ? "cursor-not-allowed text-gray-600" : "cursor-pointer",
|
|
18202
18202
|
"bi bi-send ml-2 h-7 w-7 rotate-45 rounded"
|
|
18203
18203
|
]),
|
|
18204
|
-
onClick: d[0] || (d[0] = (l) =>
|
|
18204
|
+
onClick: d[0] || (d[0] = (l) => k(e).searchBoxdisable === !1 || k(e).beginFlag === !0 ? "" : u()),
|
|
18205
18205
|
xmlns: "http://www.w3.org/2000/svg",
|
|
18206
18206
|
fill: "currentColor",
|
|
18207
18207
|
viewBox: "0 0 16 16"
|
|
@@ -18212,20 +18212,20 @@ const Cy = /* @__PURE__ */ Ft(Sy, [["__scopeId", "data-v-e951c0ed"]]), po = (t)
|
|
|
18212
18212
|
Jo(R("input", {
|
|
18213
18213
|
onKeyup: [
|
|
18214
18214
|
d[1] || (d[1] = cp((l) => n.value ? "" : u(), ["enter"])),
|
|
18215
|
-
d[3] || (d[3] = (l) => s(
|
|
18215
|
+
d[3] || (d[3] = (l) => s(k(e).question))
|
|
18216
18216
|
],
|
|
18217
|
-
disabled:
|
|
18217
|
+
disabled: k(e).beginFlag === !0 || k(e).searchBoxdisable === !1,
|
|
18218
18218
|
class: kt([
|
|
18219
|
-
|
|
18219
|
+
k(e).beginFlag === !0 || k(e).searchBoxdisable === !1 ? "cursor-not-allowed bg-[#e5e7eb]" : "bg-white",
|
|
18220
18220
|
"fontFormattingAMA rounded-lg flex w-full border border-genpact-darkest-teal py-[18px] pl-[30px] pr-[70px]"
|
|
18221
18221
|
]),
|
|
18222
|
-
"onUpdate:modelValue": d[2] || (d[2] = (l) =>
|
|
18222
|
+
"onUpdate:modelValue": d[2] || (d[2] = (l) => k(e).question = l),
|
|
18223
18223
|
placeholder: "Write your answer here...",
|
|
18224
18224
|
type: "text",
|
|
18225
18225
|
ref_key: "AiSearch",
|
|
18226
18226
|
ref: c
|
|
18227
18227
|
}, null, 42, zy), [
|
|
18228
|
-
[ap,
|
|
18228
|
+
[ap, k(e).question]
|
|
18229
18229
|
])
|
|
18230
18230
|
])
|
|
18231
18231
|
]),
|
|
@@ -18262,12 +18262,12 @@ const xy = /* @__PURE__ */ Ft(Ly, [["__scopeId", "data-v-bbd70acc"]]), Xd = (t)
|
|
|
18262
18262
|
console.log("uuid. value", g), e.value = !0, await S(), await C(), c.assessmentQuestion || (console.log("getAssessmentDetails========================>"), await f()), e.value = !1;
|
|
18263
18263
|
});
|
|
18264
18264
|
const f = async () => {
|
|
18265
|
-
var y, P,
|
|
18265
|
+
var y, P, A, I, w, O, F, Q, G, K, $t, ii, si, oi, ai, ci, ui, pi, di, li, hi, vi, fi, gi, mi, Si, Ci, yi, Pi, Ri, Ei, wi, Ai, Ii, _i, Ti, bi, Kt, Jt, Mi, Oi, Di, ki, Ni, zi, ji, Li, Pt;
|
|
18266
18266
|
console.log("Start getAssessmentDetails");
|
|
18267
18267
|
try {
|
|
18268
18268
|
Dt === "prod" ? p = `https://api-v2.genpact.com/api/my-profile/assessment-master/getAssessmentDetailsById?id=${c.simulationSkill}` : p = `https://api-v2-dev.genpact.com/api/my-profile/assessment-master/getAssessmentDetailsById?id=${c.simulationSkill}`;
|
|
18269
18269
|
const _ = await xn(p);
|
|
18270
|
-
o.value = (P = (y = _ == null ? void 0 : _.data) == null ? void 0 : y.data) == null ? void 0 : P.name, c.skillNameForSimulation = o.value, c.generatedPrompt = (I = (
|
|
18270
|
+
o.value = (P = (y = _ == null ? void 0 : _.data) == null ? void 0 : y.data) == null ? void 0 : P.name, c.skillNameForSimulation = o.value, c.generatedPrompt = (I = (A = _ == null ? void 0 : _.data) == null ? void 0 : A.data) == null ? void 0 : I.generatedPrompt, c.assessmentType = (O = (w = _ == null ? void 0 : _.data) == null ? void 0 : w.data) == null ? void 0 : O.assessmentType, s.value = (Q = (F = _ == null ? void 0 : _.data) == null ? void 0 : F.data) == null ? void 0 : Q.status, h.value = (K = (G = _ == null ? void 0 : _.data) == null ? void 0 : G.data) == null ? void 0 : K.editMode, c.empSeniority = (ii = ($t = _ == null ? void 0 : _.data) == null ? void 0 : $t.data) == null ? void 0 : ii.empSeniority, c.functionRole = (oi = (si = _ == null ? void 0 : _.data) == null ? void 0 : si.data) == null ? void 0 : oi.functionRole, d = (ci = (ai = _ == null ? void 0 : _.data) == null ? void 0 : ai.data) == null ? void 0 : ci.accessBand, l = (pi = (ui = _ == null ? void 0 : _.data) == null ? void 0 : ui.data) == null ? void 0 : pi.accessOhrList;
|
|
18271
18271
|
const Le = (li = (di = _ == null ? void 0 : _.data) == null ? void 0 : di.data) == null ? void 0 : li.createdBy;
|
|
18272
18272
|
if (c.mcqType = (vi = (hi = _ == null ? void 0 : _.data) == null ? void 0 : hi.data) == null ? void 0 : vi.mcqTypes, c.storeFinalFeedback = (gi = (fi = _ == null ? void 0 : _.data) == null ? void 0 : fi.data) == null ? void 0 : gi.storeFinalFeedback, c.storeFinalScore = (Si = (mi = _ == null ? void 0 : _.data) == null ? void 0 : mi.data) == null ? void 0 : Si.storeFinalScore, s.value === "inactive")
|
|
18273
18273
|
r.value = !0;
|
|
@@ -18319,7 +18319,7 @@ const xy = /* @__PURE__ */ Ft(Ly, [["__scopeId", "data-v-bbd70acc"]]), Xd = (t)
|
|
|
18319
18319
|
a.setError("There was an issue, please contact helpmate");
|
|
18320
18320
|
}
|
|
18321
18321
|
}, S = async () => {
|
|
18322
|
-
var y, P,
|
|
18322
|
+
var y, P, A, I, w;
|
|
18323
18323
|
try {
|
|
18324
18324
|
if (g === null)
|
|
18325
18325
|
throw {
|
|
@@ -18340,16 +18340,16 @@ const xy = /* @__PURE__ */ Ft(Ly, [["__scopeId", "data-v-bbd70acc"]]), Xd = (t)
|
|
|
18340
18340
|
} catch (O) {
|
|
18341
18341
|
if (console.error(O), e.value = !1, Ce.isAxiosError(O) && ((y = O.response) != null && y.status) || O && typeof O == "object" && "response" in O && O.response && typeof O.response == "object" && "status" in O.response && O.response.status) {
|
|
18342
18342
|
const F = O;
|
|
18343
|
-
if (console.log("Error found!"), console.error((P = F.response) == null ? void 0 : P.data), ((
|
|
18343
|
+
if (console.log("Error found!"), console.error((P = F.response) == null ? void 0 : P.data), ((A = F.response) == null ? void 0 : A.status) === 424 || ((I = F.response) == null ? void 0 : I.status) === 425) {
|
|
18344
18344
|
i.value = !0;
|
|
18345
18345
|
return;
|
|
18346
18346
|
}
|
|
18347
|
-
a.setError(((
|
|
18347
|
+
a.setError(((w = F.response) == null ? void 0 : w.data).message);
|
|
18348
18348
|
} else
|
|
18349
18349
|
a.setError("There was an issue, please contact helpmate");
|
|
18350
18350
|
}
|
|
18351
18351
|
}, C = async () => {
|
|
18352
|
-
var y, P,
|
|
18352
|
+
var y, P, A, I, w, O, F, Q;
|
|
18353
18353
|
try {
|
|
18354
18354
|
const G = await Ce({
|
|
18355
18355
|
url: "https://api-v2-dev.genpact.com/api/delphi/v2/myprofile",
|
|
@@ -18359,7 +18359,7 @@ const xy = /* @__PURE__ */ Ft(Ly, [["__scopeId", "data-v-bbd70acc"]]), Xd = (t)
|
|
|
18359
18359
|
},
|
|
18360
18360
|
method: "get"
|
|
18361
18361
|
});
|
|
18362
|
-
c.genomeFunctionValue = (
|
|
18362
|
+
c.genomeFunctionValue = (A = (P = (y = G == null ? void 0 : G.data) == null ? void 0 : y.role_function) == null ? void 0 : P.function) == null ? void 0 : A.name, c.genomeRoleValue = (O = (w = (I = G == null ? void 0 : G.data) == null ? void 0 : I.role_function) == null ? void 0 : w.role) == null ? void 0 : O.name, console.log("profile data", c.genomeFunctionValue, c.genomeRoleValue);
|
|
18363
18363
|
} catch (G) {
|
|
18364
18364
|
if (console.error(G), e.value = !1, Ce.isAxiosError(G)) {
|
|
18365
18365
|
const K = G;
|
|
@@ -18386,10 +18386,10 @@ const xy = /* @__PURE__ */ Ft(Ly, [["__scopeId", "data-v-bbd70acc"]]), Xd = (t)
|
|
|
18386
18386
|
R("div", Vy, [
|
|
18387
18387
|
ge(_C),
|
|
18388
18388
|
R("div", {
|
|
18389
|
-
class: kt(
|
|
18389
|
+
class: kt(k(c).showVoiceComponent === !0 ? "mt-[160px]" : "mt-[120px]")
|
|
18390
18390
|
}, null, 2)
|
|
18391
18391
|
]),
|
|
18392
|
-
|
|
18392
|
+
k(c).assessmentQuestion ? (N(), j("div", $y, [
|
|
18393
18393
|
ge(Cy)
|
|
18394
18394
|
])) : (N(), j("div", Ky, [
|
|
18395
18395
|
ge(xy)
|