uneeq-js 2.54.0 → 2.55.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -16,12 +16,14 @@ export declare class SpeechHandler {
16
16
  private wsReconnectMessageQueue;
17
17
  private storedTranscription?;
18
18
  private awaitingFinalTranscript;
19
+ private interruptionBlocked;
19
20
  constructor(options: SpeechHandlerOptions);
20
21
  startRecognition(): void;
21
22
  stopRecognition(): void;
22
23
  pause(): boolean;
23
24
  resume(): boolean;
24
25
  setChatMetadata(chatMetadata: ChatMetadata): void;
26
+ pauseVadIfInterruptNotAllowed(): void;
25
27
  private loadScript;
26
28
  private loadScripts;
27
29
  private handleAppMessages;
@@ -29,6 +31,8 @@ export declare class SpeechHandler {
29
31
  private sendWebsocketMessageQueue;
30
32
  private loadSavedAudioHeaders;
31
33
  private initVoiceActivityDetection;
34
+ private startMicVad;
35
+ private pauseMicVad;
32
36
  private onVadSpeechStart;
33
37
  private onVadSpeechEnd;
34
38
  private sendStoredTranscriptionIfReady;
@@ -44,4 +48,5 @@ export declare class SpeechHandler {
44
48
  private mediaRecorderOnData;
45
49
  private handlePttTranscriptionResult;
46
50
  private handleVadTranscriptionResult;
51
+ private sendChatPrompt;
47
52
  }
@@ -54,7 +54,8 @@ export declare enum UneeqMessageType {
54
54
  DigitalHumanFailedToPlay = "DigitalHumanFailedToPlay",
55
55
  DigitalHumanPlayedInMutedModeSuccess = "DigitalHumanPlayedInMutedModeSuccess",
56
56
  DigitalHumanUnmuted = "DigitalHumanUnmuted",
57
- CustomMetadataUpdated = "CustomMetadataUpdated"
57
+ CustomMetadataUpdated = "CustomMetadataUpdated",
58
+ VadInterruptionAllowed = "VadInterruptionAllowed"
58
59
  }
59
60
  /**
60
61
  * Service is ready to be used
@@ -257,6 +258,14 @@ export declare class SpeechTranscriptionMessage implements UneeqMessage {
257
258
  uneeqMessageType: UneeqMessageType;
258
259
  constructor(speechTranscription: SpeechTranscriptionResult);
259
260
  }
261
+ /**
262
+ * VAD interruption status update
263
+ */
264
+ export declare class VadInterruptionAllowedMessage implements UneeqMessage {
265
+ readonly interruptionAllowed: boolean;
266
+ uneeqMessageType: UneeqMessageType;
267
+ constructor(interruptionAllowed: boolean);
268
+ }
260
269
  /**
261
270
  * Digital human has started speaking the message
262
271
  */
@@ -13,4 +13,5 @@ export interface SpeechHandlerOptions {
13
13
  jwtToken: string;
14
14
  chatMetadata: ChatMetadata;
15
15
  enableVad?: boolean;
16
+ enableInterrupt?: boolean;
16
17
  }
@@ -98,4 +98,11 @@ export interface UneeqOptions {
98
98
  * This can be set to any stringified value.
99
99
  */
100
100
  customMetadata?: string;
101
+ /**
102
+ * Enable Interrupt By Speech
103
+ * When using SPEECH_RECOGNITION mode, this option will allow the user to interrupt the Digital Human by speaking over them.
104
+ * If this value is false, the Digital Human will not be interrupted by the user. The default value is false - users cannot
105
+ * interrupt the Digital Human. Note: Sending a text message will interrupt the digital human irrespective of this setting.
106
+ */
107
+ enableInterruptBySpeech?: boolean;
101
108
  }
package/dist/umd/index.js CHANGED
@@ -3880,6 +3880,9 @@ class Uneeq {
3880
3880
  sendTranscript(text) {
3881
3881
  if (this.serviceReadyCheck('sendTranscript')) {
3882
3882
  if (!this.sessionPaused) {
3883
+ if (this.speechHandler) {
3884
+ this.speechHandler.pauseVadIfInterruptNotAllowed();
3885
+ }
3883
3886
  this.api.avatarAsk(text, this.chatMetadata);
3884
3887
  }
3885
3888
  else {
@@ -4001,6 +4004,9 @@ class Uneeq {
4001
4004
  * Trigger this conversation's welcome message
4002
4005
  */
4003
4006
  playWelcomeMessage() {
4007
+ if (this.speechHandler) {
4008
+ this.speechHandler.pauseVadIfInterruptNotAllowed();
4009
+ }
4004
4010
  return this.api.avatarWelcome(this.chatMetadata);
4005
4011
  }
4006
4012
  /* Unmute the digital human video - this might be called if the video started in muted mode.*/
@@ -4073,7 +4079,8 @@ class Uneeq {
4073
4079
  hintPhrasesBoost: this.options.speechRecognitionHintPhrasesBoost,
4074
4080
  jwtToken: token,
4075
4081
  chatMetadata: this.chatMetadata,
4076
- enableVad: this.options.enableVad
4082
+ enableVad: this.options.enableVad,
4083
+ enableInterrupt: this.options.enableInterruptBySpeech
4077
4084
  });
4078
4085
  }
4079
4086
  else {
@@ -13313,7 +13320,7 @@ exports.race = race;
13313
13320
  /***/ ((module) => {
13314
13321
 
13315
13322
  "use strict";
13316
- module.exports = JSON.parse('{"name":"uneeq-js","version":"2.54.0","description":"","main":"dist/index.js","types":"dist/src/index.d.ts","scripts":{"start":"npx webpack -w","test-local":"npx karma start karma.conf.js -logLevel=DEBUG","test":"npx karma start --browsers ChromeHeadless --single-run","test:windows":"karma start karma.conf.js","build":"webpack --config webpack.config.prod.js && webpack --config webpack.config.umd.js","lint":"npx tslint -p tsconfig.json --fix","docs":"npx typedoc --options"},"files":["dist","!dist/test"],"author":"","license":"ISC","dependencies":{"@stomp/stompjs":"^6.0.0","@uehreka/seriously":"^1.0.1","fast-text-encoding":"^1.0.6","intrinsic-scale":"^3.0.4","onnxruntime-web":"^1.15.1","promjs":"^0.4.1","ring-buffer-ts":"^1.2.0","rxjs":"^7.8.1","rxjs-compat":"^6.6.7","simple-peer":"^9.11.1","webrtc-adapter":"^8.2.3"},"devDependencies":{"@types/dom-mediacapture-record":"^1.0.16","@types/jasmine":"^2.8.8","@types/node":"^10.9.4","fetch-mock":"7.7.3","ignore-styles":"^5.0.1","jasmine":"^5.1.0","jasmine-core":"^5.1.0","karma":"6.4.3","karma-chrome-launcher":"^3.2.0","karma-firefox-launcher":"^2.1.2","karma-jasmine":"^5.1.0","karma-jasmine-html-reporter":"^2.1.0","karma-requirejs":"^1.1.0","karma-typescript":"^5.5.4","karma-typescript-es6-transform":"^5.5.4","nock":"^9.6.1","requirejs":"^2.3.6","ts-loader":"^9.4.4","ts-node":"^7.0.1","tslint":"^5.11.0","tslint-no-focused-test":"^0.5.0","typedoc":"^0.18.0","typescript":"^5.1.6","webpack":"^5.88.2","webpack-cli":"^5.1.4"}}');
13323
+ module.exports = JSON.parse('{"name":"uneeq-js","version":"2.55.0","description":"","main":"dist/index.js","types":"dist/src/index.d.ts","scripts":{"start":"npx webpack -w","test-local":"npx karma start karma.conf.js -logLevel=DEBUG","test":"npx karma start --browsers ChromeHeadless --single-run","test:windows":"karma start karma.conf.js","build":"webpack --config webpack.config.prod.js && webpack --config webpack.config.umd.js","lint":"npx tslint -p tsconfig.json","lint-fix":"npx tslint -p tsconfig.json --fix","docs":"npx typedoc --options"},"files":["dist","!dist/test"],"author":"","license":"ISC","dependencies":{"@stomp/stompjs":"^6.0.0","@uehreka/seriously":"^1.0.1","fast-text-encoding":"^1.0.6","intrinsic-scale":"^3.0.4","onnxruntime-web":"^1.15.1","promjs":"^0.4.1","ring-buffer-ts":"^1.2.0","rxjs":"^7.8.1","rxjs-compat":"^6.6.7","simple-peer":"^9.11.1","webrtc-adapter":"^8.2.3"},"devDependencies":{"@types/dom-mediacapture-record":"^1.0.16","@types/jasmine":"^2.8.8","@types/node":"^10.9.4","fetch-mock":"7.7.3","ignore-styles":"^5.0.1","jasmine":"^5.1.0","jasmine-core":"^5.1.0","karma":"6.4.3","karma-chrome-launcher":"^3.2.0","karma-firefox-launcher":"^2.1.2","karma-jasmine":"^5.1.0","karma-jasmine-html-reporter":"^2.1.0","karma-requirejs":"^1.1.0","karma-typescript":"^5.5.4","karma-typescript-es6-transform":"^5.5.4","nock":"^9.6.1","requirejs":"^2.3.6","ts-loader":"^9.4.4","ts-node":"^7.0.1","tslint":"^5.11.0","tslint-no-focused-test":"^0.5.0","typedoc":"^0.18.0","typescript":"^5.1.6","webpack":"^5.88.2","webpack-cli":"^5.1.4"}}');
13317
13324
 
13318
13325
  /***/ }),
13319
13326
  /* 242 */
@@ -29975,7 +29982,7 @@ exports.DigitalHumanVideoPlayManager = DigitalHumanVideoPlayManager;
29975
29982
  "use strict";
29976
29983
 
29977
29984
  Object.defineProperty(exports, "__esModule", ({ value: true }));
29978
- exports.CustomMetadataUpdated = exports.DigitalHumanUnmuted = exports.DigitalHumanPlayedInMutedModeSuccess = exports.DigitalHumanFailedToPlay = exports.DigitalHumanVideoError = exports.ClientPerformanceMessage = exports.ClientMediaStreamUpdateMessage = exports.OnlineStatusUpdateMessage = exports.SessionInfoMessage = exports.AvatarRequestIgnored = exports.AvatarRequestCompleted = exports.SessionErrorMessage = exports.WebRtcStatsMessage = exports.WebRtcDataMessage = exports.SessionLiveMessage = exports.WarningMessage = exports.RecordingStoppedMessage = exports.RecordingStartedMessage = exports.DevicePermissionAllowedMessage = exports.AvatarTextInputFinishedMessage = exports.FinishedSpeakingMessage = exports.StartedSpeakingMessage = exports.UserStoppedSpeakingMessage = exports.UserStartedSpeakingMessage = exports.SpeechTranscriptionMessage = exports.AvatarQuestionMessage = exports.InstructionsMessage = exports.AvatarAnswerContentMessage = exports.AvatarAnswerMessage = exports.ResumeSessionUnavailable = exports.ConnectionLostMessage = exports.VoiceActivityInputModeNotSupportedMessage = exports.MicActivityNotSupportedMessage = exports.MicActivityMessage = exports.MicActivityErrorMessage = exports.ServiceUnavailableMessage = exports.AvatarUnavailableMessage = exports.AvatarAvailableMessage = exports.SessionEndedMessage = exports.SessionResumedMessage = exports.SessionPausedMessage = exports.DeviceListUpdatedMessage = exports.DeviceErrorMessage = exports.SetSpeakerSuccessMessage = exports.SetMicSuccessMessage = exports.SetCameraSuccessMessage = exports.DeviceNotFoundErrorMessage = exports.ReadyMessage = exports.UneeqMessageType = void 0;
29985
+ exports.CustomMetadataUpdated = exports.DigitalHumanUnmuted = exports.DigitalHumanPlayedInMutedModeSuccess = exports.DigitalHumanFailedToPlay = exports.DigitalHumanVideoError = exports.ClientPerformanceMessage = exports.ClientMediaStreamUpdateMessage = exports.OnlineStatusUpdateMessage = exports.SessionInfoMessage = exports.AvatarRequestIgnored = exports.AvatarRequestCompleted = exports.SessionErrorMessage = exports.WebRtcStatsMessage = exports.WebRtcDataMessage = exports.SessionLiveMessage = exports.WarningMessage = exports.RecordingStoppedMessage = exports.RecordingStartedMessage = exports.DevicePermissionAllowedMessage = exports.AvatarTextInputFinishedMessage = exports.FinishedSpeakingMessage = exports.StartedSpeakingMessage = exports.UserStoppedSpeakingMessage = exports.UserStartedSpeakingMessage = exports.VadInterruptionAllowedMessage = exports.SpeechTranscriptionMessage = exports.AvatarQuestionMessage = exports.InstructionsMessage = exports.AvatarAnswerContentMessage = exports.AvatarAnswerMessage = exports.ResumeSessionUnavailable = exports.ConnectionLostMessage = exports.VoiceActivityInputModeNotSupportedMessage = exports.MicActivityNotSupportedMessage = exports.MicActivityMessage = exports.MicActivityErrorMessage = exports.ServiceUnavailableMessage = exports.AvatarUnavailableMessage = exports.AvatarAvailableMessage = exports.SessionEndedMessage = exports.SessionResumedMessage = exports.SessionPausedMessage = exports.DeviceListUpdatedMessage = exports.DeviceErrorMessage = exports.SetSpeakerSuccessMessage = exports.SetMicSuccessMessage = exports.SetCameraSuccessMessage = exports.DeviceNotFoundErrorMessage = exports.ReadyMessage = exports.UneeqMessageType = void 0;
29979
29986
  var UneeqMessageType;
29980
29987
  (function (UneeqMessageType) {
29981
29988
  UneeqMessageType["Ready"] = "Ready";
@@ -30026,6 +30033,7 @@ var UneeqMessageType;
30026
30033
  UneeqMessageType["DigitalHumanPlayedInMutedModeSuccess"] = "DigitalHumanPlayedInMutedModeSuccess";
30027
30034
  UneeqMessageType["DigitalHumanUnmuted"] = "DigitalHumanUnmuted";
30028
30035
  UneeqMessageType["CustomMetadataUpdated"] = "CustomMetadataUpdated";
30036
+ UneeqMessageType["VadInterruptionAllowed"] = "VadInterruptionAllowed";
30029
30037
  })(UneeqMessageType || (exports.UneeqMessageType = UneeqMessageType = {}));
30030
30038
  /**
30031
30039
  * Service is ready to be used
@@ -30276,6 +30284,16 @@ class SpeechTranscriptionMessage {
30276
30284
  }
30277
30285
  }
30278
30286
  exports.SpeechTranscriptionMessage = SpeechTranscriptionMessage;
30287
+ /**
30288
+ * VAD interruption status update
30289
+ */
30290
+ class VadInterruptionAllowedMessage {
30291
+ constructor(interruptionAllowed) {
30292
+ this.interruptionAllowed = interruptionAllowed;
30293
+ this.uneeqMessageType = UneeqMessageType.VadInterruptionAllowed;
30294
+ }
30295
+ }
30296
+ exports.VadInterruptionAllowedMessage = VadInterruptionAllowedMessage;
30279
30297
  /**
30280
30298
  * Digital human has started speaking the message
30281
30299
  */
@@ -51271,6 +51289,7 @@ const websocketReconnectionQueueLimit = 200;
51271
51289
  const transcriptionStabilityThreshold = 0.5;
51272
51290
  class SpeechHandler {
51273
51291
  constructor(options) {
51292
+ var _a, _b, _c;
51274
51293
  this.options = options;
51275
51294
  this.logPrefix = 'UneeQ: Speech Recognition: ';
51276
51295
  this.speechBuffer = new ring_buffer_ts_1.RingBuffer(speechBufferLength);
@@ -51279,8 +51298,10 @@ class SpeechHandler {
51279
51298
  this.reconnectWs = true;
51280
51299
  this.wsReconnectMessageQueue = [];
51281
51300
  this.awaitingFinalTranscript = false;
51282
- this.options.assetBasePath = this.options.assetBasePath || defaultAssetPath;
51283
- this.options.enableVad = this.options.enableVad === undefined ? true : this.options.enableVad;
51301
+ this.interruptionBlocked = false;
51302
+ this.options.assetBasePath = (_a = this.options.assetBasePath) !== null && _a !== void 0 ? _a : defaultAssetPath;
51303
+ this.options.enableVad = (_b = this.options.enableVad) !== null && _b !== void 0 ? _b : true;
51304
+ this.options.enableInterrupt = (_c = this.options.enableInterrupt) !== null && _c !== void 0 ? _c : false;
51284
51305
  this.loadSavedAudioHeaders();
51285
51306
  this.loadScripts();
51286
51307
  this.handleAppMessages();
@@ -51297,15 +51318,13 @@ class SpeechHandler {
51297
51318
  track.stop();
51298
51319
  });
51299
51320
  }
51300
- if (this.micVad) {
51301
- this.micVad.pause();
51302
- }
51321
+ this.pauseMicVad();
51303
51322
  }
51304
51323
  pause() {
51305
51324
  if (this.stream && this.micVad) {
51306
51325
  this.sendStoredTranscriptionIfReady();
51307
51326
  this.stream.getTracks().forEach((track) => track.enabled = false);
51308
- this.micVad.pause();
51327
+ this.pauseMicVad();
51309
51328
  this.onVadSpeechEnd();
51310
51329
  return true;
51311
51330
  }
@@ -51321,8 +51340,7 @@ class SpeechHandler {
51321
51340
  this.storedTranscription = undefined;
51322
51341
  if (this.stream && this.micVad) {
51323
51342
  this.stream.getTracks().forEach((track) => track.enabled = true);
51324
- this.micVad.start();
51325
- return true;
51343
+ return this.startMicVad();
51326
51344
  }
51327
51345
  logger_1.logger.warn(this.logPrefix + 'Could not resume speech recognition.');
51328
51346
  return false;
@@ -51330,6 +51348,15 @@ class SpeechHandler {
51330
51348
  setChatMetadata(chatMetadata) {
51331
51349
  this.options.chatMetadata = chatMetadata;
51332
51350
  }
51351
+ pauseVadIfInterruptNotAllowed() {
51352
+ if (this.micVad && !this.options.enableInterrupt) {
51353
+ this.micVad.pause();
51354
+ this.options.messages.next(new MessageTypes_1.VadInterruptionAllowedMessage(false));
51355
+ }
51356
+ if (!this.options.enableInterrupt) {
51357
+ this.interruptionBlocked = true;
51358
+ }
51359
+ }
51333
51360
  loadScript(url) {
51334
51361
  return new Promise((resolve) => {
51335
51362
  const script = document.createElement('script');
@@ -51356,9 +51383,15 @@ class SpeechHandler {
51356
51383
  switch (msg.uneeqMessageType) {
51357
51384
  case MessageTypes_1.UneeqMessageType.StartedSpeaking:
51358
51385
  this.digitalHumanSpeaking = true;
51386
+ this.pauseVadIfInterruptNotAllowed();
51359
51387
  break;
51360
51388
  case MessageTypes_1.UneeqMessageType.FinishedSpeaking:
51361
51389
  this.digitalHumanSpeaking = false;
51390
+ this.interruptionBlocked = false;
51391
+ this.startMicVad();
51392
+ if (!this.options.enableInterrupt) {
51393
+ this.options.messages.next(new MessageTypes_1.VadInterruptionAllowedMessage(true));
51394
+ }
51362
51395
  break;
51363
51396
  case MessageTypes_1.UneeqMessageType.SessionEnded:
51364
51397
  this.reconnectWs = false;
@@ -51422,12 +51455,35 @@ class SpeechHandler {
51422
51455
  stream: stream,
51423
51456
  }).then((vad) => {
51424
51457
  this.micVad = vad;
51425
- this.micVad.start();
51458
+ // Don't start VAD if interruption has been blocked
51459
+ if (!this.interruptionBlocked) {
51460
+ this.startMicVad();
51461
+ }
51462
+ else {
51463
+ this.options.messages.next(new MessageTypes_1.VadInterruptionAllowedMessage(false));
51464
+ }
51426
51465
  logger_1.logger.log(this.logPrefix + 'VAD module has started');
51427
51466
  }).catch((err) => {
51428
51467
  console.error(this.logPrefix + 'Could not initialize VAD module', err);
51429
51468
  });
51430
51469
  }
51470
+ startMicVad() {
51471
+ if (this.micVad) {
51472
+ if (!this.digitalHumanSpeaking) {
51473
+ this.micVad.start();
51474
+ return true;
51475
+ }
51476
+ }
51477
+ logger_1.logger.log(this.logPrefix + 'startMicVad: VAD has not been started.');
51478
+ return false;
51479
+ }
51480
+ pauseMicVad() {
51481
+ if (this.micVad) {
51482
+ this.micVad.pause();
51483
+ }
51484
+ logger_1.logger.log(this.logPrefix + 'pauseMicVad: VAD has not been paused.');
51485
+ return false;
51486
+ }
51431
51487
  onVadSpeechStart() {
51432
51488
  logger_1.logger.log(this.logPrefix + 'User started speaking');
51433
51489
  this.options.messages.next(new MessageTypes_1.UserStartedSpeakingMessage());
@@ -51454,7 +51510,7 @@ class SpeechHandler {
51454
51510
  sendStoredTranscriptionIfReady() {
51455
51511
  if (!this.options.enableVad && this.storedTranscription !== undefined && !this.awaitingFinalTranscript) {
51456
51512
  // send the transcript
51457
- this.options.api.avatarAsk(this.storedTranscription.transcript, this.options.chatMetadata);
51513
+ this.sendChatPrompt(this.storedTranscription.transcript);
51458
51514
  this.options.messages.next(new MessageTypes_1.SpeechTranscriptionMessage(this.storedTranscription));
51459
51515
  this.storedTranscription = undefined;
51460
51516
  }
@@ -51598,7 +51654,7 @@ class SpeechHandler {
51598
51654
  result.transcript = this.storedTranscription.transcript + result.transcript;
51599
51655
  }
51600
51656
  this.options.chatMetadata.userSpokenLocale = result.language_code;
51601
- this.options.api.avatarAsk(result.transcript, this.options.chatMetadata);
51657
+ this.sendChatPrompt(result.transcript);
51602
51658
  this.options.messages.next(new MessageTypes_1.SpeechTranscriptionMessage(result));
51603
51659
  this.storedTranscription = undefined;
51604
51660
  }
@@ -51616,18 +51672,24 @@ class SpeechHandler {
51616
51672
  }
51617
51673
  }
51618
51674
  handleVadTranscriptionResult(result) {
51619
- if (result.final) {
51620
- this.options.chatMetadata.userSpokenLocale = result.language_code;
51621
- this.options.api.avatarAsk(result.transcript, this.options.chatMetadata);
51622
- }
51623
- else if (this.digitalHumanSpeaking) {
51624
- this.options.api.stopSpeaking();
51625
- this.digitalHumanSpeaking = false;
51675
+ if (result.transcript !== '') {
51676
+ if (result.final) {
51677
+ this.options.chatMetadata.userSpokenLocale = result.language_code;
51678
+ this.sendChatPrompt(result.transcript);
51679
+ }
51680
+ else if (this.digitalHumanSpeaking) {
51681
+ this.options.api.stopSpeaking();
51682
+ this.digitalHumanSpeaking = false;
51683
+ }
51626
51684
  }
51627
51685
  if (result.stability > transcriptionStabilityThreshold || result.final) {
51628
51686
  this.options.messages.next(new MessageTypes_1.SpeechTranscriptionMessage(result));
51629
51687
  }
51630
51688
  }
51689
+ sendChatPrompt(transcript) {
51690
+ this.pauseVadIfInterruptNotAllowed();
51691
+ this.options.api.avatarAsk(transcript, this.options.chatMetadata);
51692
+ }
51631
51693
  }
51632
51694
  exports.SpeechHandler = SpeechHandler;
51633
51695
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "uneeq-js",
3
- "version": "2.54.0",
3
+ "version": "2.55.0",
4
4
  "description": "",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/src/index.d.ts",
@@ -10,7 +10,8 @@
10
10
  "test": "npx karma start --browsers ChromeHeadless --single-run",
11
11
  "test:windows": "karma start karma.conf.js",
12
12
  "build": "webpack --config webpack.config.prod.js && webpack --config webpack.config.umd.js",
13
- "lint": "npx tslint -p tsconfig.json --fix",
13
+ "lint": "npx tslint -p tsconfig.json",
14
+ "lint-fix": "npx tslint -p tsconfig.json --fix",
14
15
  "docs": "npx typedoc --options"
15
16
  },
16
17
  "files": [
package/readme.md CHANGED
@@ -8,6 +8,12 @@ https://docs.uneeq.io/build-your-own-experience
8
8
  To use uneeq-js with a typescript version < 3.8, 'skipLibCheck' must be set to true.
9
9
 
10
10
  ## Release Notes
11
+ #### 2.55.0
12
+ * Added a new option 'enableInterrupt' that can be used to enable interruption of the digital human. The default value is false, users cannot interrupt the digital human.
13
+
14
+ #### 2.54.1
15
+ * Fixed an edge case that caused the digital human to stop speaking when a blank STT result was received.
16
+
11
17
  #### 2.54.0
12
18
  * Re-enabled echoCancellation and fixed an issue that caused speech recognition to perform poorly in high latency environments.
13
19