@webex/internal-plugin-voicea 2.23.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +90 -0
- package/dist/constants.js +40 -0
- package/dist/constants.js.map +1 -0
- package/dist/index.js +33 -0
- package/dist/index.js.map +1 -0
- package/dist/utils.js +19 -0
- package/dist/utils.js.map +1 -0
- package/dist/voicea.js +376 -0
- package/dist/voicea.js.map +1 -0
- package/dist/voicea.types.js +8 -0
- package/dist/voicea.types.js.map +1 -0
- package/package.json +23 -0
- package/src/constants.ts +31 -0
- package/src/index.js +6 -0
- package/src/utils.ts +8 -0
- package/src/voicea.ts +306 -0
- package/src/voicea.types.ts +81 -0
- package/test/unit/spec/utils.js +12 -0
- package/test/unit/spec/voicea.js +580 -0
package/src/constants.ts
ADDED
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
export const EVENT_TRIGGERS = {
|
|
2
|
+
VOICEA_ANNOUNCEMENT: 'voicea:announcement',
|
|
3
|
+
CAPTION_LANGUAGE_UPDATE: 'voicea:captionLanguageUpdate',
|
|
4
|
+
SPOKEN_LANGUAGE_UPDATE: 'voicea:spokenLanguageUpdate',
|
|
5
|
+
CAPTIONS_TURNED_ON: 'voicea:captionOn',
|
|
6
|
+
TRANSCRIBING_ON: 'voicea:transcribingOn',
|
|
7
|
+
TRANSCRIBING_OFF: 'voicea:transcribingOff',
|
|
8
|
+
|
|
9
|
+
NEW_CAPTION: 'voicea:newCaption',
|
|
10
|
+
EVA_COMMAND: 'voicea:wxa',
|
|
11
|
+
HIGHLIGHT_CREATED: 'voicea:highlightCreated'
|
|
12
|
+
};
|
|
13
|
+
|
|
14
|
+
export const VOICEA_RELAY_TYPES = {
|
|
15
|
+
ANNOUNCEMENT: 'voicea.annc',
|
|
16
|
+
CLIENT_ANNOUNCEMENT: 'client.annc',
|
|
17
|
+
TRANSLATION_REQUEST: 'voicea.transl.req',
|
|
18
|
+
TRANSLATION_RESPONSE: 'voicea.transl.rsp',
|
|
19
|
+
TRANSCRIPTION: 'voicea.transcription'
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
export const TRANSCRIPTION_TYPE = {
|
|
23
|
+
UNKNOWN: 'unknown',
|
|
24
|
+
EVA_WAKE: 'eva_wake',
|
|
25
|
+
EVA_THANKS: 'eva_thanks',
|
|
26
|
+
EVA_CANCEL: 'eva_cancel',
|
|
27
|
+
HIGHLIGHT_CREATED: 'highlight_created',
|
|
28
|
+
TRANSCRIPT_INTERIM_RESULTS: 'transcript_interim_results',
|
|
29
|
+
TRANSCRIPT_FINAL_RESULT: 'transcript_final_result'
|
|
30
|
+
|
|
31
|
+
};
|
package/src/index.js
ADDED
package/src/utils.ts
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
// eslint-disable-next-line import/prefer-default-export
|
|
2
|
+
export const millisToMinutesAndSeconds = (millis: number) => {
|
|
3
|
+
const minutes = Math.floor(millis / 60000);
|
|
4
|
+
const seconds = ((millis % 60000) / 1000).toFixed(0);
|
|
5
|
+
|
|
6
|
+
return `${minutes}:${+(seconds) < 10 ? '0' : ''}${seconds}`;
|
|
7
|
+
};
|
|
8
|
+
|
package/src/voicea.ts
ADDED
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
import uuid from 'uuid';
|
|
2
|
+
import LLMChannel from '@webex/internal-plugin-llm';
|
|
3
|
+
import config from '@webex/webex-core/src/config';
|
|
4
|
+
import Trigger from '@webex/plugin-meetings/src/common/events/trigger-proxy';
|
|
5
|
+
|
|
6
|
+
import {EVENT_TRIGGERS, VOICEA_RELAY_TYPES, TRANSCRIPTION_TYPE} from './constants';
|
|
7
|
+
// eslint-disable-next-line no-unused-vars
|
|
8
|
+
import {AnnouncementPayload, CaptionLanguageResponse, TranscriptionResponse, IVoiceaChannel} from './voicea.types';
|
|
9
|
+
import {millisToMinutesAndSeconds} from './utils';
|
|
10
|
+
/**
|
|
11
|
+
* VoiceaChannel to hold single instance of LLM
|
|
12
|
+
*/
|
|
13
|
+
export class VoiceaChannel extends LLMChannel implements IVoiceaChannel {
|
|
14
|
+
private seqNum: number;
|
|
15
|
+
|
|
16
|
+
private hasVoiceaJoined: boolean;
|
|
17
|
+
|
|
18
|
+
private areCaptionsEnabled: boolean;
|
|
19
|
+
|
|
20
|
+
private isTranscribingEnabled:boolean;
|
|
21
|
+
|
|
22
|
+
private vmcDeviceId: string;
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Initializes Voicea plugin
|
|
26
|
+
* @param {any} args
|
|
27
|
+
*/
|
|
28
|
+
constructor(...args) {
|
|
29
|
+
super(...args);
|
|
30
|
+
this.seqNum = 1;
|
|
31
|
+
this.hasVoiceaJoined = false;
|
|
32
|
+
this.areCaptionsEnabled = false;
|
|
33
|
+
this.isTranscribingEnabled = false;
|
|
34
|
+
this.vmcDeviceId = undefined;
|
|
35
|
+
|
|
36
|
+
this.on('event:relay.event', (e) => {
|
|
37
|
+
this.seqNum = e.sequenceNumber + 1;
|
|
38
|
+
switch (e.data.relayType) {
|
|
39
|
+
case VOICEA_RELAY_TYPES.ANNOUNCEMENT:
|
|
40
|
+
this.vmcDeviceId = e.headers.from;
|
|
41
|
+
this.hasVoiceaJoined = true;
|
|
42
|
+
this.processAnnouncementMessage(e.voiceaPayload);
|
|
43
|
+
break;
|
|
44
|
+
case VOICEA_RELAY_TYPES.TRANSLATION_RESPONSE:
|
|
45
|
+
this.processCaptionLanguageResponse(e.voiceaPayload);
|
|
46
|
+
break;
|
|
47
|
+
case VOICEA_RELAY_TYPES.TRANSCRIPTION:
|
|
48
|
+
this.processTranscription(e.voiceaPayload);
|
|
49
|
+
break;
|
|
50
|
+
default:
|
|
51
|
+
break;
|
|
52
|
+
}
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Process Transcript and send alert
|
|
58
|
+
* @param {TranscriptionResponse} voiceaPayload
|
|
59
|
+
* @returns {void}
|
|
60
|
+
*/
|
|
61
|
+
private processTranscription = (voiceaPayload: TranscriptionResponse): void => {
|
|
62
|
+
switch (voiceaPayload.type) {
|
|
63
|
+
case TRANSCRIPTION_TYPE.TRANSCRIPT_INTERIM_RESULTS:
|
|
64
|
+
Trigger.trigger(this, {
|
|
65
|
+
file: 'voicea',
|
|
66
|
+
function: 'processTranscription'
|
|
67
|
+
},
|
|
68
|
+
EVENT_TRIGGERS.NEW_CAPTION, {
|
|
69
|
+
isFinal: false,
|
|
70
|
+
transcriptId: voiceaPayload.transcript_id,
|
|
71
|
+
translations: voiceaPayload.translations,
|
|
72
|
+
transcript: {
|
|
73
|
+
csis: voiceaPayload.csis,
|
|
74
|
+
text: voiceaPayload.data
|
|
75
|
+
},
|
|
76
|
+
});
|
|
77
|
+
break;
|
|
78
|
+
|
|
79
|
+
case TRANSCRIPTION_TYPE.TRANSCRIPT_FINAL_RESULT:
|
|
80
|
+
Trigger.trigger(this, {
|
|
81
|
+
file: 'voicea',
|
|
82
|
+
function: 'processTranscription'
|
|
83
|
+
},
|
|
84
|
+
EVENT_TRIGGERS.NEW_CAPTION, {
|
|
85
|
+
isFinal: true,
|
|
86
|
+
transcriptId: voiceaPayload.transcript_id,
|
|
87
|
+
translations: voiceaPayload.translations,
|
|
88
|
+
transcript: {
|
|
89
|
+
csis: voiceaPayload.csis,
|
|
90
|
+
text: voiceaPayload.data
|
|
91
|
+
},
|
|
92
|
+
timestamp: millisToMinutesAndSeconds(voiceaPayload.transcript.end_millis - voiceaPayload.transcript.start_millis)
|
|
93
|
+
});
|
|
94
|
+
break;
|
|
95
|
+
|
|
96
|
+
case TRANSCRIPTION_TYPE.HIGHLIGHT_CREATED:
|
|
97
|
+
Trigger.trigger(this, {
|
|
98
|
+
file: 'voicea',
|
|
99
|
+
function: 'processTranscription'
|
|
100
|
+
},
|
|
101
|
+
EVENT_TRIGGERS.HIGHLIGHT_CREATED, {
|
|
102
|
+
csis: voiceaPayload.highlight.csis,
|
|
103
|
+
highlightId: voiceaPayload.highlight.highlight_id,
|
|
104
|
+
text: voiceaPayload.highlight.transcript,
|
|
105
|
+
highlightLabel: voiceaPayload.highlight.highlight_label,
|
|
106
|
+
highlightSource: voiceaPayload.highlight.highlight_source,
|
|
107
|
+
timestamp: millisToMinutesAndSeconds(voiceaPayload.highlight.end_millis - voiceaPayload.highlight.start_millis)
|
|
108
|
+
});
|
|
109
|
+
break;
|
|
110
|
+
|
|
111
|
+
case TRANSCRIPTION_TYPE.EVA_THANKS:
|
|
112
|
+
Trigger.trigger(this, {
|
|
113
|
+
file: 'voicea',
|
|
114
|
+
function: 'processTranscription'
|
|
115
|
+
},
|
|
116
|
+
EVENT_TRIGGERS.EVA_COMMAND, {
|
|
117
|
+
isListening: false, text: voiceaPayload.command_response
|
|
118
|
+
});
|
|
119
|
+
break;
|
|
120
|
+
|
|
121
|
+
case TRANSCRIPTION_TYPE.EVA_WAKE:
|
|
122
|
+
case TRANSCRIPTION_TYPE.EVA_CANCEL:
|
|
123
|
+
Trigger.trigger(this, {
|
|
124
|
+
file: 'voicea',
|
|
125
|
+
function: 'processTranscription'
|
|
126
|
+
},
|
|
127
|
+
EVENT_TRIGGERS.EVA_COMMAND, {
|
|
128
|
+
isListening: voiceaPayload.type === TRANSCRIPTION_TYPE.EVA_WAKE
|
|
129
|
+
});
|
|
130
|
+
break;
|
|
131
|
+
|
|
132
|
+
default:
|
|
133
|
+
break;
|
|
134
|
+
}
|
|
135
|
+
};
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Processes Caption Language Response
|
|
139
|
+
* @param {CaptionLanguageResponse} voiceaPayload
|
|
140
|
+
* @returns {void}
|
|
141
|
+
*/
|
|
142
|
+
private processCaptionLanguageResponse = (voiceaPayload: CaptionLanguageResponse):void => {
|
|
143
|
+
if (voiceaPayload.statusCode === 200) {
|
|
144
|
+
Trigger.trigger(this, {
|
|
145
|
+
file: 'voicea',
|
|
146
|
+
function: 'processCaptionLanguageResponse'
|
|
147
|
+
},
|
|
148
|
+
EVENT_TRIGGERS.CAPTION_LANGUAGE_UPDATE, {statusCode: 200});
|
|
149
|
+
}
|
|
150
|
+
else {
|
|
151
|
+
Trigger.trigger(this, {
|
|
152
|
+
file: 'voicea',
|
|
153
|
+
function: 'processCaptionLanguageResponse'
|
|
154
|
+
},
|
|
155
|
+
EVENT_TRIGGERS.CAPTION_LANGUAGE_UPDATE, {statusCode: voiceaPayload.errorCode, errorMessage: voiceaPayload.message});
|
|
156
|
+
}
|
|
157
|
+
};
|
|
158
|
+
|
|
159
|
+
/**
|
|
160
|
+
* processes voicea announcement response and triggers event
|
|
161
|
+
* @param {Object} voiceaPayload
|
|
162
|
+
* @returns {void}
|
|
163
|
+
*/
|
|
164
|
+
private processAnnouncementMessage = (voiceaPayload: AnnouncementPayload):void => {
|
|
165
|
+
const voiceaLanguageOptions = {
|
|
166
|
+
captionLanguages: voiceaPayload?.translation?.allowed_languages ?? [],
|
|
167
|
+
maxLanguages: voiceaPayload?.translation?.max_languages ?? 0,
|
|
168
|
+
spokenLanguages: voiceaPayload?.ASR?.spoken_languages ?? [],
|
|
169
|
+
};
|
|
170
|
+
|
|
171
|
+
Trigger.trigger(
|
|
172
|
+
this,
|
|
173
|
+
{
|
|
174
|
+
file: 'voicea',
|
|
175
|
+
function: 'processAnnouncementMessage'
|
|
176
|
+
},
|
|
177
|
+
EVENT_TRIGGERS.VOICEA_ANNOUNCEMENT,
|
|
178
|
+
voiceaLanguageOptions
|
|
179
|
+
);
|
|
180
|
+
};
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Sends Announcement to add voicea to the meeting
|
|
185
|
+
* @returns {void}
|
|
186
|
+
*/
|
|
187
|
+
private sendAnnouncement = ():void => {
|
|
188
|
+
if (this.hasVoiceaJoined) return;
|
|
189
|
+
this.socket.send({
|
|
190
|
+
id: `${this.seqNum}`,
|
|
191
|
+
type: 'publishRequest',
|
|
192
|
+
recipients: {
|
|
193
|
+
route: this.getBinding(),
|
|
194
|
+
},
|
|
195
|
+
headers: {},
|
|
196
|
+
data: {
|
|
197
|
+
clientPayload: {
|
|
198
|
+
version: 'v2',
|
|
199
|
+
},
|
|
200
|
+
eventType: 'relay.event',
|
|
201
|
+
relayType: VOICEA_RELAY_TYPES.CLIENT_ANNOUNCEMENT,
|
|
202
|
+
},
|
|
203
|
+
trackingId: `${config.trackingIdPrefix}_${uuid.v4().toString()}`
|
|
204
|
+
});
|
|
205
|
+
this.seqNum += 1;
|
|
206
|
+
};
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Set Spoken Language for the meeting
|
|
210
|
+
* @param {string} languageCode
|
|
211
|
+
* @returns {Promise}
|
|
212
|
+
*/
|
|
213
|
+
public setSpokenLanguage = (languageCode: string):Promise<void> => this.request({
|
|
214
|
+
method: 'PUT',
|
|
215
|
+
url: `${this.locusUrl}/controls/`,
|
|
216
|
+
body: {
|
|
217
|
+
languageCode
|
|
218
|
+
}
|
|
219
|
+
}).then(() => {
|
|
220
|
+
Trigger.trigger(this, {
|
|
221
|
+
file: 'voicea',
|
|
222
|
+
function: 'setSpokenLanguage'
|
|
223
|
+
},
|
|
224
|
+
EVENT_TRIGGERS.SPOKEN_LANGUAGE_UPDATE,
|
|
225
|
+
{languageCode});
|
|
226
|
+
});
|
|
227
|
+
|
|
228
|
+
/**
|
|
229
|
+
* Request Language translation
|
|
230
|
+
* @param {string} languageCode
|
|
231
|
+
* @returns {void}
|
|
232
|
+
*/
|
|
233
|
+
public requestLanguage = (languageCode: string): void => {
|
|
234
|
+
this.socket.send({
|
|
235
|
+
id: `${this.seqNum}`,
|
|
236
|
+
type: 'publishRequest',
|
|
237
|
+
recipients: {
|
|
238
|
+
route: this.getBinding(),
|
|
239
|
+
},
|
|
240
|
+
headers: {
|
|
241
|
+
to: this.vmcDeviceId,
|
|
242
|
+
},
|
|
243
|
+
data: {
|
|
244
|
+
clientPayload: {
|
|
245
|
+
translationLanguage: languageCode,
|
|
246
|
+
id: uuid.v4(),
|
|
247
|
+
},
|
|
248
|
+
eventType: 'relay.event',
|
|
249
|
+
relayType: VOICEA_RELAY_TYPES.TRANSLATION_REQUEST,
|
|
250
|
+
},
|
|
251
|
+
trackingId: `${config.trackingIdPrefix}_${uuid.v4().toString()}`
|
|
252
|
+
});
|
|
253
|
+
this.seqNum += 1;
|
|
254
|
+
};
|
|
255
|
+
|
|
256
|
+
/**
|
|
257
|
+
* Turn on Captions
|
|
258
|
+
* @returns {Promise}
|
|
259
|
+
*/
|
|
260
|
+
public turnOnCaptions = async (): undefined | Promise<void> => {
|
|
261
|
+
if (this.hasVoiceaJoined && this.areCaptionsEnabled) return undefined;
|
|
262
|
+
|
|
263
|
+
return this.request({
|
|
264
|
+
method: 'PUT',
|
|
265
|
+
url: `${this.locusUrl}/controls/`,
|
|
266
|
+
body: {
|
|
267
|
+
transcribe: {caption: true}
|
|
268
|
+
}
|
|
269
|
+
}).then(() => {
|
|
270
|
+
Trigger.trigger(this, {
|
|
271
|
+
file: 'voicea',
|
|
272
|
+
function: 'turnOnCaptions'
|
|
273
|
+
},
|
|
274
|
+
EVENT_TRIGGERS.CAPTIONS_TURNED_ON);
|
|
275
|
+
this.areCaptionsEnabled = true;
|
|
276
|
+
this.sendAnnouncement();
|
|
277
|
+
});
|
|
278
|
+
};
|
|
279
|
+
|
|
280
|
+
/**
|
|
281
|
+
* Toggle transcribing for highlights
|
|
282
|
+
* @param {bool} activate if true transcribing is turned on
|
|
283
|
+
* @returns {Promise}
|
|
284
|
+
*/
|
|
285
|
+
public toggleTranscribing = async (activate:boolean):undefined|Promise<void> => {
|
|
286
|
+
if (this.isTranscribingEnabled === activate) return undefined;
|
|
287
|
+
|
|
288
|
+
return this.request({
|
|
289
|
+
method: 'PUT',
|
|
290
|
+
url: `${this.locusUrl}/controls/`,
|
|
291
|
+
body: {
|
|
292
|
+
transcribe: {transcribing: activate}
|
|
293
|
+
}
|
|
294
|
+
}).then(() => {
|
|
295
|
+
Trigger.trigger(this, {
|
|
296
|
+
file: 'voicea',
|
|
297
|
+
function: 'toggleTranscribing'
|
|
298
|
+
},
|
|
299
|
+
activate ? EVENT_TRIGGERS.TRANSCRIBING_ON : EVENT_TRIGGERS.TRANSCRIBING_OFF);
|
|
300
|
+
this.isTranscribingEnabled = activate;
|
|
301
|
+
if (activate && !this.areCaptionsEnabled && !this.hasVoiceaJoined) this.turnOnCaptions();
|
|
302
|
+
});
|
|
303
|
+
};
|
|
304
|
+
}
|
|
305
|
+
|
|
306
|
+
export default VoiceaChannel;
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
/* eslint-disable camelcase */
|
|
2
|
+
/* eslint-disable no-undef */
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Type for payload
|
|
6
|
+
*/
|
|
7
|
+
interface AnnouncementPayload {
|
|
8
|
+
translation:{
|
|
9
|
+
max_languages: number;
|
|
10
|
+
allowed_languages: string[]
|
|
11
|
+
};
|
|
12
|
+
|
|
13
|
+
ASR: {
|
|
14
|
+
spoken_languages: string[];
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
// eslint-disable-next-line no-undef
|
|
19
|
+
type TRANSCRIPTION_TYPES = 'unknown'|
|
|
20
|
+
'eva_wake' |
|
|
21
|
+
'eva_thanks' |
|
|
22
|
+
'eva_cancel' |
|
|
23
|
+
'highlight_created'|
|
|
24
|
+
'transcript_interim_results'|
|
|
25
|
+
'transcript_final_result';
|
|
26
|
+
|
|
27
|
+
/**
|
|
28
|
+
* Class for an Transcription Object
|
|
29
|
+
*/
|
|
30
|
+
interface Transcription {
|
|
31
|
+
start_millis:number;
|
|
32
|
+
end_millis:number;
|
|
33
|
+
text:string;
|
|
34
|
+
transcript_language_code: string;
|
|
35
|
+
translations:{[x:string]:string};
|
|
36
|
+
csis:number[];
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
/**
|
|
40
|
+
* Highlights
|
|
41
|
+
*/
|
|
42
|
+
interface Highlight {
|
|
43
|
+
highlight_id: string;
|
|
44
|
+
transcript: string;
|
|
45
|
+
highlight_label: string;
|
|
46
|
+
highlight_source: string;
|
|
47
|
+
start_millis:number;
|
|
48
|
+
end_millis:number;
|
|
49
|
+
csis:number[];
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Type for Transcription message
|
|
53
|
+
*/
|
|
54
|
+
interface TranscriptionResponse {
|
|
55
|
+
type: TRANSCRIPTION_TYPES;
|
|
56
|
+
transcript_id: string;
|
|
57
|
+
translations?: {[x:string]:string};
|
|
58
|
+
transcripts?: Transcription[];
|
|
59
|
+
transcript?: Transcription;
|
|
60
|
+
highlight?: Highlight;
|
|
61
|
+
csis: number[];
|
|
62
|
+
data:string;
|
|
63
|
+
command_response: string;
|
|
64
|
+
}
|
|
65
|
+
/**
|
|
66
|
+
* Type for CaptionLanguageResponse
|
|
67
|
+
*/
|
|
68
|
+
interface CaptionLanguageResponse {
|
|
69
|
+
requestId: string;
|
|
70
|
+
statusCode: number;
|
|
71
|
+
errorCode:number;
|
|
72
|
+
message:string;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
interface IVoiceaChannel {
|
|
76
|
+
setSpokenLanguage: (languageCode: string) => Promise<void>;
|
|
77
|
+
requestLanguage: (languageCode: string) => void;
|
|
78
|
+
turnOnCaptions: () => undefined | Promise<void>;
|
|
79
|
+
toggleTranscribing: (activate:boolean) => undefined|Promise<void>;
|
|
80
|
+
}
|
|
81
|
+
export {AnnouncementPayload, CaptionLanguageResponse, TranscriptionResponse, Transcription, Highlight, IVoiceaChannel};
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import {assert} from '@webex/test-helper-chai';
|
|
2
|
+
import {millisToMinutesAndSeconds} from '@webex/internal-plugin-voicea/src/utils';
|
|
3
|
+
|
|
4
|
+
describe('Voicea utils', () => {
|
|
5
|
+
describe('#millisToMinutesAndSeconds()', () => {
|
|
6
|
+
it('returns the correct timestamp', () => {
|
|
7
|
+
const result = millisToMinutesAndSeconds(2000);
|
|
8
|
+
|
|
9
|
+
assert.equal(result, '0:02');
|
|
10
|
+
});
|
|
11
|
+
});
|
|
12
|
+
});
|