deep-chat-dev 1.1.18 → 1.1.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/custom-elements.json +1837 -1868
- package/dist/deepChat.bundle.js +1 -1
- package/dist/deepChat.js +51 -53
- package/dist/types/demo.d.ts +2 -2
- package/dist/types/demo.d.ts.map +1 -1
- package/dist/utils/demo/demo.d.ts.map +1 -1
- package/package.json +1 -1
package/dist/deepChat.js
CHANGED
@@ -106,8 +106,7 @@ const _TextInputEl = class {
|
|
106
106
|
this.elementRef = _TextInputEl.createContainerElement((r = t == null ? void 0 : t.styles) == null ? void 0 : r.container), this.inputElementRef = this.createInputElement(t), this._config = t, this.elementRef.appendChild(this.inputElementRef), e != null && e.characterLimit && InputLimit.add(this.inputElementRef, e == null ? void 0 : e.characterLimit);
|
107
107
|
}
|
108
108
|
static processConfig(n, e) {
|
109
|
-
|
110
|
-
return e ?? (e = {}), e.disabled ?? (e.disabled = n.isTextInputDisabled), e.placeholder ?? (e.placeholder = {}), (t = e.placeholder).text ?? (t.text = n.textInputPlaceholderText), e;
|
109
|
+
return e || (e = {}), e.disabled || (e.disabled = n.isTextInputDisabled), e.placeholder || (e.placeholder = {}), e.placeholder.text || (e.placeholder.text = n.textInputPlaceholderText), e;
|
111
110
|
}
|
112
111
|
// this is is a bug fix where if the browser is scrolled down and the user types in text that creates new line
|
113
112
|
// the browser scrollbar will move up which leads to undesirable UX.
|
@@ -304,7 +303,7 @@ function fetchEventSource(n, e) {
|
|
304
303
|
f.accept || (f.accept = EventStreamContentType);
|
305
304
|
let b;
|
306
305
|
function m() {
|
307
|
-
b.abort(), document.hidden ||
|
306
|
+
b.abort(), document.hidden || x();
|
308
307
|
}
|
309
308
|
c || document.addEventListener("visibilitychange", m);
|
310
309
|
let g = DefaultRetryInterval, v = 0;
|
@@ -315,27 +314,27 @@ function fetchEventSource(n, e) {
|
|
315
314
|
y(), h();
|
316
315
|
});
|
317
316
|
const _ = u ?? window.fetch, E = s ?? defaultOnOpen;
|
318
|
-
async function
|
317
|
+
async function x() {
|
319
318
|
var A;
|
320
319
|
b = new AbortController();
|
321
320
|
try {
|
322
321
|
const T = await _(n, Object.assign(Object.assign({}, d), { headers: f, signal: b.signal }));
|
323
|
-
await E(T), await getBytes(T.body, getLines(getMessages((
|
324
|
-
|
325
|
-
}, (
|
326
|
-
g =
|
322
|
+
await E(T), await getBytes(T.body, getLines(getMessages((S) => {
|
323
|
+
S ? f[LastEventId] = S : delete f[LastEventId];
|
324
|
+
}, (S) => {
|
325
|
+
g = S;
|
327
326
|
}, o))), a == null || a(), y(), h();
|
328
327
|
} catch (T) {
|
329
328
|
if (!b.signal.aborted)
|
330
329
|
try {
|
331
|
-
const
|
332
|
-
window.clearTimeout(v), v = window.setTimeout(
|
333
|
-
} catch (
|
334
|
-
y(), p(
|
330
|
+
const S = (A = l == null ? void 0 : l(T)) !== null && A !== void 0 ? A : g;
|
331
|
+
window.clearTimeout(v), v = window.setTimeout(x, S);
|
332
|
+
} catch (S) {
|
333
|
+
y(), p(S);
|
335
334
|
}
|
336
335
|
}
|
337
336
|
}
|
338
|
-
|
337
|
+
x();
|
339
338
|
});
|
340
339
|
}
|
341
340
|
function defaultOnOpen(n) {
|
@@ -366,10 +365,10 @@ class Stream {
|
|
366
365
|
var _;
|
367
366
|
if (JSON.stringify(y.data) !== JSON.stringify("[DONE]")) {
|
368
367
|
const E = JSON.parse(y.data);
|
369
|
-
(_ = e.extractResultData) == null || _.call(e, E).then((
|
370
|
-
(
|
371
|
-
${ErrorMessages.INVALID_STREAM_RESPONSE}`) : h && r.updateStreamedMessage(
|
372
|
-
}).catch((
|
368
|
+
(_ = e.extractResultData) == null || _.call(e, E).then((x) => {
|
369
|
+
(x == null ? void 0 : x.text) === void 0 ? console.error(`Response data: ${y.data}
|
370
|
+
${ErrorMessages.INVALID_STREAM_RESPONSE}`) : h && r.updateStreamedMessage(x.text, h);
|
371
|
+
}).catch((x) => RequestUtils.displayError(r, x));
|
373
372
|
}
|
374
373
|
},
|
375
374
|
onerror(y) {
|
@@ -433,11 +432,11 @@ const _Demo = class {
|
|
433
432
|
return typeof n == "function" ? n(e) : n;
|
434
433
|
}
|
435
434
|
static getResponse(n) {
|
436
|
-
return n.customDemoResponse ? _Demo.getCustomResponse(n.customDemoResponse, n.messages[n.messages.length - 1]) : { text: _Demo.generateResponse(n) };
|
435
|
+
return n.customDemoResponse ? _Demo.getCustomResponse(n.customDemoResponse, n.messages[n.messages.length - 1]) : { result: { text: _Demo.generateResponse(n) } };
|
437
436
|
}
|
438
437
|
// timeout is used to simulate a timeout for a response to come back
|
439
438
|
static request(n, e, t) {
|
440
|
-
const r =
|
439
|
+
const r = _Demo.getResponse(n);
|
441
440
|
setTimeout(async () => {
|
442
441
|
const s = await (t == null ? void 0 : t(r)) || r;
|
443
442
|
s.error ? n.addNewErrorMessage("service", s.error) : n.addNewMessage(s.result, !0, !0), e();
|
@@ -446,8 +445,8 @@ const _Demo = class {
|
|
446
445
|
// timeout is used to simulate a timeout for a response to come back
|
447
446
|
static requestStream(n, e) {
|
448
447
|
setTimeout(() => {
|
449
|
-
var r;
|
450
|
-
const t = (r = _Demo.getResponse(n)) == null ? void 0 : r.text;
|
448
|
+
var r, s;
|
449
|
+
const t = (s = (r = _Demo.getResponse(n)) == null ? void 0 : r.result) == null ? void 0 : s.text;
|
451
450
|
Stream.simulate(n, e, t);
|
452
451
|
}, 400);
|
453
452
|
}
|
@@ -1445,7 +1444,7 @@ function markTightParagraphs(n, e) {
|
|
1445
1444
|
n.tokens[t].level === s && n.tokens[t].type === "paragraph_open" && (n.tokens[t + 2].tight = !0, n.tokens[t].tight = !0, t += 2);
|
1446
1445
|
}
|
1447
1446
|
function list(n, e, t, r) {
|
1448
|
-
var s, o, a, l, c, u, d, h, p, f, b, m, g, v, y, _, E,
|
1447
|
+
var s, o, a, l, c, u, d, h, p, f, b, m, g, v, y, _, E, x, A = !0, T, S, C, w;
|
1449
1448
|
if ((h = skipOrderedListMarker(n, e)) >= 0)
|
1450
1449
|
g = !0;
|
1451
1450
|
else if ((h = skipBulletListMarker(n, e)) >= 0)
|
@@ -1467,18 +1466,18 @@ function list(n, e, t, r) {
|
|
1467
1466
|
level: n.level++
|
1468
1467
|
}), s = e, _ = !1, T = n.parser.ruler.getRules("list"); s < t && (v = n.skipSpaces(h), p = n.eMarks[s], v >= p ? f = 1 : f = v - h, f > 4 && (f = 1), f < 1 && (f = 1), o = h - n.bMarks[s] + f, n.tokens.push({
|
1469
1468
|
type: "list_item_open",
|
1470
|
-
lines:
|
1469
|
+
lines: x = [e, 0],
|
1471
1470
|
level: n.level++
|
1472
1471
|
}), l = n.blkIndent, c = n.tight, a = n.tShift[e], u = n.parentType, n.tShift[e] = v - n.bMarks[e], n.blkIndent = o, n.tight = !0, n.parentType = "list", n.parser.tokenize(n, e, t, !0), (!n.tight || _) && (A = !1), _ = n.line - e > 1 && n.isEmpty(n.line - 1), n.blkIndent = l, n.tShift[e] = a, n.tight = c, n.parentType = u, n.tokens.push({
|
1473
1472
|
type: "list_item_close",
|
1474
1473
|
level: --n.level
|
1475
|
-
}), s = e = n.line,
|
1476
|
-
for (
|
1477
|
-
if (T[
|
1478
|
-
|
1474
|
+
}), s = e = n.line, x[1] = s, v = n.bMarks[e], !(s >= t || n.isEmpty(s) || n.tShift[s] < n.blkIndent)); ) {
|
1475
|
+
for (w = !1, S = 0, C = T.length; S < C; S++)
|
1476
|
+
if (T[S](n, s, t, !0)) {
|
1477
|
+
w = !0;
|
1479
1478
|
break;
|
1480
1479
|
}
|
1481
|
-
if (
|
1480
|
+
if (w)
|
1482
1481
|
break;
|
1483
1482
|
if (g) {
|
1484
1483
|
if (h = skipOrderedListMarker(n, s), h < 0)
|
@@ -2687,9 +2686,9 @@ class SetFileTypes {
|
|
2687
2686
|
// needs to be set after audio to overwrite maxNumberOfFiles
|
2688
2687
|
// prettier-ignore
|
2689
2688
|
static processMicrophone(e, t, r, s) {
|
2690
|
-
var l, c, u, d
|
2689
|
+
var l, c, u, d;
|
2691
2690
|
const a = { acceptedFormats: "audio/*", ...((l = e.fileTypes.audio) == null ? void 0 : l.files) || {} };
|
2692
|
-
r && (navigator.mediaDevices.getUserMedia !== void 0 ? (e.recordAudio = SetFileTypes.parseConfig(e.requestSettings, a, t, r), typeof r == "object" && r.files && (
|
2691
|
+
r && (navigator.mediaDevices.getUserMedia !== void 0 ? (e.recordAudio = SetFileTypes.parseConfig(e.requestSettings, a, t, r), typeof r == "object" && r.files && (e.recordAudio.files || (e.recordAudio.files = {}), e.recordAudio.files.format = (c = r.files) == null ? void 0 : c.format, e.recordAudio.files.maxDurationSeconds = (u = r.files) == null ? void 0 : u.maxDurationSeconds, (d = e.fileTypes.audio) != null && d.files && !e.fileTypes.audio.files.maxNumberOfFiles && (e.fileTypes.audio.files.maxNumberOfFiles = r.files.maxNumberOfFiles))) : s || (e.fileTypes.audio = SetFileTypes.parseConfig(e.requestSettings, a, t, r)));
|
2693
2692
|
}
|
2694
2693
|
// prettier-ignore
|
2695
2694
|
static processAudioConfig(e, t, r, s) {
|
@@ -2708,9 +2707,9 @@ class SetFileTypes {
|
|
2708
2707
|
// needs to be set after images to overwrite maxNumberOfFiles
|
2709
2708
|
// prettier-ignore
|
2710
2709
|
static processCamera(e, t, r, s) {
|
2711
|
-
var l, c, u
|
2710
|
+
var l, c, u;
|
2712
2711
|
const a = { acceptedFormats: "image/*", ...((l = e.fileTypes.images) == null ? void 0 : l.files) || {} };
|
2713
|
-
r && (navigator.mediaDevices.getUserMedia !== void 0 ? (e.camera = SetFileTypes.parseConfig(e.requestSettings, a, t, r), typeof r == "object" && (e.camera.modalContainerStyle = r.modalContainerStyle, r.files && (
|
2712
|
+
r && (navigator.mediaDevices.getUserMedia !== void 0 ? (e.camera = SetFileTypes.parseConfig(e.requestSettings, a, t, r), typeof r == "object" && (e.camera.modalContainerStyle = r.modalContainerStyle, r.files && (e.camera.files || (e.camera.files = {}), e.camera.files.format = (c = r.files) == null ? void 0 : c.format, e.camera.files.dimensions = (u = r.files) == null ? void 0 : u.dimensions))) : s || (e.fileTypes.images = SetFileTypes.parseConfig(e.requestSettings, a, t, r)));
|
2714
2713
|
}
|
2715
2714
|
// prettier-ignore
|
2716
2715
|
static processImagesConfig(e, t, r, s) {
|
@@ -2721,8 +2720,7 @@ class SetFileTypes {
|
|
2721
2720
|
}
|
2722
2721
|
// default for direct service
|
2723
2722
|
static populateDefaultFileIO(e, t) {
|
2724
|
-
|
2725
|
-
e && (e.files ?? (e.files = {}), (r = e.files).acceptedFormats ?? (r.acceptedFormats = t), (s = e.files).maxNumberOfFiles ?? (s.maxNumberOfFiles = 1));
|
2723
|
+
e && (e.files || (e.files = {}), e.files.acceptedFormats || (e.files.acceptedFormats = t), e.files.maxNumberOfFiles || (e.files.maxNumberOfFiles = 1));
|
2726
2724
|
}
|
2727
2725
|
static set(e, t, r) {
|
2728
2726
|
SetFileTypes.populateDefaultFileIO(r == null ? void 0 : r.audio, ".4a,.mp3,.webm,.mp4,.mpga,.wav,.mpeg,.m4a"), SetFileTypes.populateDefaultFileIO(r == null ? void 0 : r.images, ".png,.jpg");
|
@@ -2732,8 +2730,8 @@ class SetFileTypes {
|
|
2732
2730
|
}
|
2733
2731
|
class BaseServiceIO {
|
2734
2732
|
constructor(e, t, r) {
|
2735
|
-
var s, o, a
|
2736
|
-
this.rawBody = {}, this.validateConfigKey = !1, this.canSendMessage = BaseServiceIO.canSendMessage, this.requestSettings = {}, this.fileTypes = {}, this.completionsHandlers = {}, this.streamHandlers = {}, this.deepChat = e, this.demo = r, Object.assign(this.rawBody, (s = e.request) == null ? void 0 : s.additionalBodyProps), this.totalMessagesMaxCharLength = (o = e == null ? void 0 : e.requestBodyLimits) == null ? void 0 : o.totalMessagesMaxCharLength, this.maxMessages = (a = e == null ? void 0 : e.requestBodyLimits) == null ? void 0 : a.maxMessages, SetFileTypes.set(e, this, t), e.request && (this.requestSettings = e.request), this.demo &&
|
2733
|
+
var s, o, a;
|
2734
|
+
this.rawBody = {}, this.validateConfigKey = !1, this.canSendMessage = BaseServiceIO.canSendMessage, this.requestSettings = {}, this.fileTypes = {}, this.completionsHandlers = {}, this.streamHandlers = {}, this.deepChat = e, this.demo = r, Object.assign(this.rawBody, (s = e.request) == null ? void 0 : s.additionalBodyProps), this.totalMessagesMaxCharLength = (o = e == null ? void 0 : e.requestBodyLimits) == null ? void 0 : o.totalMessagesMaxCharLength, this.maxMessages = (a = e == null ? void 0 : e.requestBodyLimits) == null ? void 0 : a.maxMessages, SetFileTypes.set(e, this, t), e.request && (this.requestSettings = e.request), this.demo && !this.requestSettings.url && (this.requestSettings.url = Demo.URL), this._directServiceRequiresFiles = !!t && Object.keys(t).length > 0, this.requestSettings.websocket && Websocket.setup(this, this.requestSettings.websocket);
|
2737
2735
|
}
|
2738
2736
|
static canSendMessage(e, t) {
|
2739
2737
|
return !!(e && e.trim() !== "") || !!(t && t.length > 0);
|
@@ -2762,12 +2760,12 @@ class BaseServiceIO {
|
|
2762
2760
|
return this.fileTypes.mixedFiles;
|
2763
2761
|
}
|
2764
2762
|
callServiceAPI(e, t, r) {
|
2765
|
-
var l, c
|
2763
|
+
var l, c;
|
2766
2764
|
const s = { messages: t, ...this.rawBody };
|
2767
2765
|
let o = !1;
|
2768
|
-
(l = this.requestSettings.headers) != null && l["Content-Type"] || (
|
2766
|
+
(l = this.requestSettings.headers) != null && l["Content-Type"] || (this.requestSettings.headers || (this.requestSettings.headers = {}), this.requestSettings.headers["Content-Type"] || (this.requestSettings.headers["Content-Type"] = "application/json"), o = !0);
|
2769
2767
|
const { stream: a } = this.deepChat;
|
2770
|
-
a && (this.demo || typeof a != "object" || !a.simulation) ? Stream.request(this, s, e) : HTTPRequest.request(this, s, e), o && ((
|
2768
|
+
a && (this.demo || typeof a != "object" || !a.simulation) ? Stream.request(this, s, e) : HTTPRequest.request(this, s, e), o && ((c = this.requestSettings.headers) == null || delete c["Content-Type"]);
|
2771
2769
|
}
|
2772
2770
|
callApiWithFiles(e, t, r, s) {
|
2773
2771
|
const o = BaseServiceIO.createCustomFormDataBody(e, r, s), a = this.requestSettings, l = this.getServiceIOByType(s[0]);
|
@@ -2805,7 +2803,7 @@ class DirectServiceIO extends BaseServiceIO {
|
|
2805
2803
|
this.key = e.key, e.validateKeyProperty && (this.validateConfigKey = e.validateKeyProperty);
|
2806
2804
|
}
|
2807
2805
|
buildRequestSettings(e, t) {
|
2808
|
-
const r = t
|
2806
|
+
const r = t !== void 0 ? t : {};
|
2809
2807
|
return r.headers = this.buildHeadersFunc(e), r;
|
2810
2808
|
}
|
2811
2809
|
keyAuthenticated(e, t) {
|
@@ -3057,7 +3055,7 @@ const _HuggingFaceIO = class extends DirectServiceIO {
|
|
3057
3055
|
preprocessBody(n, e, t) {
|
3058
3056
|
const r = JSON.parse(JSON.stringify(n)), s = e[e.length - 1].text;
|
3059
3057
|
if (s)
|
3060
|
-
return r.options
|
3058
|
+
return (r.options === null || r.options === void 0) && (r.options = {}), r.options.wait_for_model = !0, { inputs: s, ...r };
|
3061
3059
|
}
|
3062
3060
|
callServiceAPI(n, e, t) {
|
3063
3061
|
if (!this.requestSettings)
|
@@ -3587,9 +3585,9 @@ class AzureLanguageIO extends DirectServiceIO {
|
|
3587
3585
|
}
|
3588
3586
|
class AzureSummarizationIO extends AzureLanguageIO {
|
3589
3587
|
constructor(e) {
|
3590
|
-
var s, o, a
|
3588
|
+
var s, o, a;
|
3591
3589
|
const t = (o = (s = e.directConnection) == null ? void 0 : s.azure) == null ? void 0 : o.summarization, r = (a = e.directConnection) == null ? void 0 : a.azure;
|
3592
|
-
super(e, AzureUtils.buildSummarizationHeader, t.endpoint, r), this.url = "", this.textInputPlaceholderText = "Insert text to summarize",
|
3590
|
+
super(e, AzureUtils.buildSummarizationHeader, t.endpoint, r), this.url = "", this.textInputPlaceholderText = "Insert text to summarize", this.rawBody.language || (this.rawBody.language = "en"), Object.assign(this.rawBody, t), this.url = `${t.endpoint}/language/analyze-text/jobs?api-version=2022-10-01-preview`;
|
3593
3591
|
}
|
3594
3592
|
preprocessBody(e, t) {
|
3595
3593
|
const r = t[t.length - 1].text;
|
@@ -3681,11 +3679,11 @@ OpenAIUtils.CONVERSE_MAX_CHAR_LENGTH = 13352;
|
|
3681
3679
|
OpenAIUtils.FILE_MAX_CHAR_LENGTH = 1e3;
|
3682
3680
|
class OpenAICompletionsIO extends DirectServiceIO {
|
3683
3681
|
constructor(e) {
|
3684
|
-
var a
|
3682
|
+
var a;
|
3685
3683
|
const { directConnection: t, textInput: r } = e, s = t == null ? void 0 : t.openAI;
|
3686
3684
|
super(e, OpenAIUtils.buildKeyVerificationDetails(), OpenAIUtils.buildHeaders, s), this.insertKeyPlaceholderText = "OpenAI API Key", this.getKeyLink = "https://platform.openai.com/account/api-keys", this.url = "https://api.openai.com/v1/completions", this.permittedErrorPrefixes = ["Incorrect"], this._maxCharLength = OpenAIUtils.CONVERSE_MAX_CHAR_LENGTH, this.full_transaction_max_tokens = 4e3, this.numberOfCharsPerToken = 3.5;
|
3687
3685
|
const o = (a = t == null ? void 0 : t.openAI) == null ? void 0 : a.completions;
|
3688
|
-
r != null && r.characterLimit && (this._maxCharLength = r.characterLimit), typeof o == "object" && Object.assign(this.rawBody, o),
|
3686
|
+
r != null && r.characterLimit && (this._maxCharLength = r.characterLimit), typeof o == "object" && Object.assign(this.rawBody, o), this.rawBody.model || (this.rawBody.model = OpenAIConverseBaseBody.GPT_COMPLETIONS_DAVINCI_MODEL);
|
3689
3687
|
}
|
3690
3688
|
// prettier-ignore
|
3691
3689
|
preprocessBody(e, t) {
|
@@ -3796,7 +3794,7 @@ class AzureSpeechIO extends DirectServiceIO {
|
|
3796
3794
|
const _AzureTextToSpeechIO = class extends AzureSpeechIO {
|
3797
3795
|
// prettier-ignore
|
3798
3796
|
constructor(n) {
|
3799
|
-
var r, s, o
|
3797
|
+
var r, s, o;
|
3800
3798
|
const e = (s = (r = n.directConnection) == null ? void 0 : r.azure) == null ? void 0 : s.textToSpeech, t = (o = n.directConnection) == null ? void 0 : o.azure;
|
3801
3799
|
super(
|
3802
3800
|
n,
|
@@ -3808,7 +3806,7 @@ const _AzureTextToSpeechIO = class extends AzureSpeechIO {
|
|
3808
3806
|
<p>Insert text to synthesize it to audio.
|
3809
3807
|
<p>
|
3810
3808
|
Click <a href="${_AzureTextToSpeechIO.HELP_LINK}">here</a> for more info.
|
3811
|
-
</p>`, this.url = "", Object.assign(this.rawBody, e),
|
3809
|
+
</p>`, this.url = "", Object.assign(this.rawBody, e), this.rawBody.lang || (this.rawBody.lang = "en-US"), this.rawBody.name || (this.rawBody.name = "en-US-JennyNeural"), this.rawBody.gender || (this.rawBody.gender = "Female"), this.url = `https://${e.region}.tts.speech.microsoft.com/cognitiveservices/v1`;
|
3812
3810
|
}
|
3813
3811
|
preprocessBody(n, e) {
|
3814
3812
|
const t = e[e.length - 1].text;
|
@@ -4408,7 +4406,7 @@ class AvatarEl {
|
|
4408
4406
|
static getPosition(e, t) {
|
4409
4407
|
var s, o, a, l, c, u;
|
4410
4408
|
let r = e ? (o = (s = t == null ? void 0 : t.ai) == null ? void 0 : s.styles) == null ? void 0 : o.position : (l = (a = t == null ? void 0 : t.user) == null ? void 0 : a.styles) == null ? void 0 : l.position;
|
4411
|
-
return r
|
4409
|
+
return r || (r = (u = (c = t == null ? void 0 : t.default) == null ? void 0 : c.styles) == null ? void 0 : u.position), r || (r = e ? "left" : "right"), r;
|
4412
4410
|
}
|
4413
4411
|
static add(e, t, r) {
|
4414
4412
|
const s = typeof r == "boolean" ? void 0 : r, o = AvatarEl.createAvatar(t, s), a = AvatarEl.getPosition(t, s);
|
@@ -4448,7 +4446,7 @@ class Messages {
|
|
4448
4446
|
});
|
4449
4447
|
}
|
4450
4448
|
static getDisplayLoadingMessage(e, t) {
|
4451
|
-
return t.websocket ? !1 : e.displayLoadingBubble
|
4449
|
+
return t.websocket ? !1 : e.displayLoadingBubble !== void 0 ? e.displayLoadingBubble : !0;
|
4452
4450
|
}
|
4453
4451
|
prepareDemo(e) {
|
4454
4452
|
typeof e == "object" && (e.response && (this.customDemoResponse = e.response), e.displayErrors && (e.displayErrors.default && this.addNewErrorMessage("", ""), e.displayErrors.service && this.addNewErrorMessage("service", ""), e.displayErrors.speechToText && this.addNewErrorMessage("speechToText", "")), e.displayLoadingBubble && this.addLoadingMessage());
|
@@ -6192,10 +6190,10 @@ class SpeechToText extends MicrophoneButton {
|
|
6192
6190
|
static processConfiguration(e, t) {
|
6193
6191
|
var u;
|
6194
6192
|
const r = typeof t == "object" ? t : {}, s = typeof r.webSpeech == "object" ? r.webSpeech : {}, o = r.azure || {}, a = {
|
6195
|
-
displayInterimResults: r.displayInterimResults
|
6196
|
-
textColor: r.textColor
|
6197
|
-
translations: r.translations
|
6198
|
-
commands: r.commands
|
6193
|
+
displayInterimResults: r.displayInterimResults !== void 0 ? r.displayInterimResults : void 0,
|
6194
|
+
textColor: r.textColor !== void 0 ? r.textColor : void 0,
|
6195
|
+
translations: r.translations !== void 0 ? r.translations : void 0,
|
6196
|
+
commands: r.commands !== void 0 ? r.commands : void 0,
|
6199
6197
|
...s,
|
6200
6198
|
...o
|
6201
6199
|
}, l = (u = r.commands) == null ? void 0 : u.submit;
|
package/dist/types/demo.d.ts
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
import { ErrorMessageOverrides, MessageContent } from './messages';
|
2
|
-
import {
|
2
|
+
import { CustomServiceResponse } from './customService';
|
3
3
|
export type DemoErrors = {
|
4
4
|
[key in keyof ErrorMessageOverrides]?: boolean;
|
5
5
|
};
|
6
|
-
export type DemoResponse =
|
6
|
+
export type DemoResponse = CustomServiceResponse | ((message: MessageContent) => CustomServiceResponse);
|
7
7
|
export type Demo = true | {
|
8
8
|
response?: DemoResponse;
|
9
9
|
displayErrors?: DemoErrors;
|
package/dist/types/demo.d.ts.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"demo.d.ts","sourceRoot":"","sources":["../../../src/types/demo.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,qBAAqB,EAAE,cAAc,EAAC,MAAM,YAAY,CAAC;AACjE,OAAO,EAAC,
|
1
|
+
{"version":3,"file":"demo.d.ts","sourceRoot":"","sources":["../../../src/types/demo.ts"],"names":[],"mappings":"AAAA,OAAO,EAAC,qBAAqB,EAAE,cAAc,EAAC,MAAM,YAAY,CAAC;AACjE,OAAO,EAAC,qBAAqB,EAAC,MAAM,iBAAiB,CAAC;AAEtD,MAAM,MAAM,UAAU,GAAG;KAAE,GAAG,IAAI,MAAM,qBAAqB,CAAC,CAAC,EAAE,OAAO;CAAC,CAAC;AAE1E,MAAM,MAAM,YAAY,GAAG,qBAAqB,GAAG,CAAC,CAAC,OAAO,EAAE,cAAc,KAAK,qBAAqB,CAAC,CAAC;AAExG,MAAM,MAAM,IAAI,GACZ,IAAI,GACJ;IACE,QAAQ,CAAC,EAAE,YAAY,CAAC;IACxB,aAAa,CAAC,EAAE,UAAU,CAAC;IAC3B,oBAAoB,CAAC,EAAE,OAAO,CAAC;IAC/B,8BAA8B,CAAC,EAAE,OAAO,CAAC;CAC1C,CAAC"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"demo.d.ts","sourceRoot":"","sources":["../../../../src/utils/demo/demo.ts"],"names":[],"mappings":"
|
1
|
+
{"version":3,"file":"demo.d.ts","sourceRoot":"","sources":["../../../../src/utils/demo/demo.ts"],"names":[],"mappings":"AACA,OAAO,EAAC,mBAAmB,EAAC,MAAM,0BAA0B,CAAC;AAC7D,OAAO,EAAC,QAAQ,EAAC,MAAM,oCAAoC,CAAC;AAC5D,OAAO,EAAC,cAAc,EAAC,MAAM,0BAA0B,CAAC;AAKxD,KAAK,MAAM,GAAG,MAAM,IAAI,CAAC;AAEzB,qBAAa,IAAI;IACf,gBAAuB,GAAG,oBAAoB;IAE9C,OAAO,CAAC,MAAM,CAAC,6BAA6B;IAwB5C,OAAO,CAAC,MAAM,CAAC,gBAAgB;IAiB/B,OAAO,CAAC,MAAM,CAAC,iBAAiB;IAKhC,OAAO,CAAC,MAAM,CAAC,WAAW;WAOZ,OAAO,CAAC,QAAQ,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,EAAE,mBAAmB,CAAC,EAAE,mBAAmB;WAcvF,aAAa,CAAC,QAAQ,EAAE,QAAQ,EAAE,EAAE,EAAE,cAAc;CAMnE"}
|