web-speech-cognitive-services 7.1.4-master.151bc9b → 8.0.0-main.15b930d

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/dist/web-speech-cognitive-services.d.mts +329 -0
  2. package/dist/web-speech-cognitive-services.d.ts +329 -0
  3. package/dist/web-speech-cognitive-services.development.js +25746 -0
  4. package/dist/web-speech-cognitive-services.development.js.map +1 -0
  5. package/dist/web-speech-cognitive-services.js +1409 -0
  6. package/dist/web-speech-cognitive-services.js.map +1 -0
  7. package/dist/web-speech-cognitive-services.mjs +1374 -0
  8. package/dist/web-speech-cognitive-services.mjs.map +1 -0
  9. package/dist/web-speech-cognitive-services.production.min.js +31 -0
  10. package/dist/web-speech-cognitive-services.production.min.js.map +1 -0
  11. package/package.json +67 -47
  12. package/CHANGELOG.md +0 -372
  13. package/lib/BingSpeech/SpeechToText/SpeechGrammarList.js +0 -94
  14. package/lib/BingSpeech/SpeechToText/SpeechGrammarList.js.map +0 -1
  15. package/lib/BingSpeech/SpeechToText/createSpeechRecognitionPonyfill.js +0 -483
  16. package/lib/BingSpeech/SpeechToText/createSpeechRecognitionPonyfill.js.map +0 -1
  17. package/lib/BingSpeech/SpeechToText.js +0 -14
  18. package/lib/BingSpeech/SpeechToText.js.map +0 -1
  19. package/lib/BingSpeech/TextToSpeech/AudioContextConsumer.js +0 -122
  20. package/lib/BingSpeech/TextToSpeech/AudioContextConsumer.js.map +0 -1
  21. package/lib/BingSpeech/TextToSpeech/AudioContextQueue.js +0 -104
  22. package/lib/BingSpeech/TextToSpeech/AudioContextQueue.js.map +0 -1
  23. package/lib/BingSpeech/TextToSpeech/SpeechSynthesisUtterance.js +0 -264
  24. package/lib/BingSpeech/TextToSpeech/SpeechSynthesisUtterance.js.map +0 -1
  25. package/lib/BingSpeech/TextToSpeech/SpeechSynthesisVoice.js +0 -61
  26. package/lib/BingSpeech/TextToSpeech/SpeechSynthesisVoice.js.map +0 -1
  27. package/lib/BingSpeech/TextToSpeech/buildSSML.js +0 -32
  28. package/lib/BingSpeech/TextToSpeech/buildSSML.js.map +0 -1
  29. package/lib/BingSpeech/TextToSpeech/createSpeechSynthesisPonyfill.js +0 -220
  30. package/lib/BingSpeech/TextToSpeech/createSpeechSynthesisPonyfill.js.map +0 -1
  31. package/lib/BingSpeech/TextToSpeech/fetchSpeechData.js +0 -74
  32. package/lib/BingSpeech/TextToSpeech/fetchSpeechData.js.map +0 -1
  33. package/lib/BingSpeech/TextToSpeech/fetchVoices.js +0 -335
  34. package/lib/BingSpeech/TextToSpeech/fetchVoices.js.map +0 -1
  35. package/lib/BingSpeech/TextToSpeech/isSSML.js +0 -13
  36. package/lib/BingSpeech/TextToSpeech/isSSML.js.map +0 -1
  37. package/lib/BingSpeech/TextToSpeech/subscribeEvent.js +0 -14
  38. package/lib/BingSpeech/TextToSpeech/subscribeEvent.js.map +0 -1
  39. package/lib/BingSpeech/TextToSpeech.js +0 -14
  40. package/lib/BingSpeech/TextToSpeech.js.map +0 -1
  41. package/lib/BingSpeech/Util/DOMEventEmitter.js +0 -61
  42. package/lib/BingSpeech/Util/DOMEventEmitter.js.map +0 -1
  43. package/lib/BingSpeech/Util/createFetchTokenUsingSubscriptionKey.js +0 -41
  44. package/lib/BingSpeech/Util/createFetchTokenUsingSubscriptionKey.js.map +0 -1
  45. package/lib/BingSpeech/fetchAuthorizationToken.js +0 -57
  46. package/lib/BingSpeech/fetchAuthorizationToken.js.map +0 -1
  47. package/lib/BingSpeech/index.js +0 -84
  48. package/lib/BingSpeech/index.js.map +0 -1
  49. package/lib/SpeechServices/SpeechSDK.js +0 -19
  50. package/lib/SpeechServices/SpeechSDK.js.map +0 -1
  51. package/lib/SpeechServices/SpeechToText/SpeechGrammarList.js +0 -45
  52. package/lib/SpeechServices/SpeechToText/SpeechGrammarList.js.map +0 -1
  53. package/lib/SpeechServices/SpeechToText/cognitiveServiceEventResultToWebSpeechRecognitionResultList.js +0 -56
  54. package/lib/SpeechServices/SpeechToText/cognitiveServiceEventResultToWebSpeechRecognitionResultList.js.map +0 -1
  55. package/lib/SpeechServices/SpeechToText/createSpeechRecognitionPonyfill.js +0 -984
  56. package/lib/SpeechServices/SpeechToText/createSpeechRecognitionPonyfill.js.map +0 -1
  57. package/lib/SpeechServices/SpeechToText.js +0 -24
  58. package/lib/SpeechServices/SpeechToText.js.map +0 -1
  59. package/lib/SpeechServices/TextToSpeech/AudioContextConsumer.js +0 -92
  60. package/lib/SpeechServices/TextToSpeech/AudioContextConsumer.js.map +0 -1
  61. package/lib/SpeechServices/TextToSpeech/AudioContextQueue.js +0 -111
  62. package/lib/SpeechServices/TextToSpeech/AudioContextQueue.js.map +0 -1
  63. package/lib/SpeechServices/TextToSpeech/SpeechSynthesisEvent.js +0 -40
  64. package/lib/SpeechServices/TextToSpeech/SpeechSynthesisEvent.js.map +0 -1
  65. package/lib/SpeechServices/TextToSpeech/SpeechSynthesisUtterance.js +0 -283
  66. package/lib/SpeechServices/TextToSpeech/SpeechSynthesisUtterance.js.map +0 -1
  67. package/lib/SpeechServices/TextToSpeech/SpeechSynthesisVoice.js +0 -63
  68. package/lib/SpeechServices/TextToSpeech/SpeechSynthesisVoice.js.map +0 -1
  69. package/lib/SpeechServices/TextToSpeech/buildSSML.js +0 -32
  70. package/lib/SpeechServices/TextToSpeech/buildSSML.js.map +0 -1
  71. package/lib/SpeechServices/TextToSpeech/createSpeechSynthesisPonyfill.js +0 -282
  72. package/lib/SpeechServices/TextToSpeech/createSpeechSynthesisPonyfill.js.map +0 -1
  73. package/lib/SpeechServices/TextToSpeech/fetchCustomVoices.js +0 -110
  74. package/lib/SpeechServices/TextToSpeech/fetchCustomVoices.js.map +0 -1
  75. package/lib/SpeechServices/TextToSpeech/fetchSpeechData.js +0 -127
  76. package/lib/SpeechServices/TextToSpeech/fetchSpeechData.js.map +0 -1
  77. package/lib/SpeechServices/TextToSpeech/fetchVoices.js +0 -87
  78. package/lib/SpeechServices/TextToSpeech/fetchVoices.js.map +0 -1
  79. package/lib/SpeechServices/TextToSpeech/isSSML.js +0 -13
  80. package/lib/SpeechServices/TextToSpeech/isSSML.js.map +0 -1
  81. package/lib/SpeechServices/TextToSpeech/subscribeEvent.js +0 -14
  82. package/lib/SpeechServices/TextToSpeech/subscribeEvent.js.map +0 -1
  83. package/lib/SpeechServices/TextToSpeech.js +0 -14
  84. package/lib/SpeechServices/TextToSpeech.js.map +0 -1
  85. package/lib/SpeechServices/fetchAuthorizationToken.js +0 -58
  86. package/lib/SpeechServices/fetchAuthorizationToken.js.map +0 -1
  87. package/lib/SpeechServices/patchOptions.js +0 -213
  88. package/lib/SpeechServices/patchOptions.js.map +0 -1
  89. package/lib/SpeechServices/resolveFunctionOrReturnValue.js +0 -11
  90. package/lib/SpeechServices/resolveFunctionOrReturnValue.js.map +0 -1
  91. package/lib/SpeechServices.js +0 -73
  92. package/lib/SpeechServices.js.map +0 -1
  93. package/lib/Util/arrayToMap.js +0 -28
  94. package/lib/Util/arrayToMap.js.map +0 -1
  95. package/lib/Util/createPromiseQueue.js +0 -40
  96. package/lib/Util/createPromiseQueue.js.map +0 -1
  97. package/lib/index.js +0 -14
  98. package/lib/index.js.map +0 -1
  99. package/umd/web-speech-cognitive-services.development.js +0 -4740
  100. package/umd/web-speech-cognitive-services.production.min.js +0 -2
@@ -1,483 +0,0 @@
1
- "use strict";
2
-
3
- var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
-
5
- var _typeof = require("@babel/runtime/helpers/typeof");
6
-
7
- Object.defineProperty(exports, "__esModule", {
8
- value: true
9
- });
10
- exports.default = void 0;
11
-
12
- var _objectWithoutProperties2 = _interopRequireDefault(require("@babel/runtime/helpers/objectWithoutProperties"));
13
-
14
- var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
15
-
16
- var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
17
-
18
- var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
19
-
20
- var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
21
-
22
- var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
23
-
24
- var _inherits2 = _interopRequireDefault(require("@babel/runtime/helpers/inherits"));
25
-
26
- var _possibleConstructorReturn2 = _interopRequireDefault(require("@babel/runtime/helpers/possibleConstructorReturn"));
27
-
28
- var _getPrototypeOf2 = _interopRequireDefault(require("@babel/runtime/helpers/getPrototypeOf"));
29
-
30
- var CognitiveSpeech = _interopRequireWildcard(require("microsoft-speech-browser-sdk"));
31
-
32
- var _eventAsPromise = _interopRequireDefault(require("event-as-promise"));
33
-
34
- var _memoizeOne = _interopRequireDefault(require("memoize-one"));
35
-
36
- var _DOMEventEmitter2 = _interopRequireDefault(require("../Util/DOMEventEmitter"));
37
-
38
- var _fetchAuthorizationToken = _interopRequireDefault(require("../fetchAuthorizationToken"));
39
-
40
- var _SpeechGrammarList = _interopRequireDefault(require("./SpeechGrammarList"));
41
-
42
- var _excluded = ["eventListener"];
43
-
44
- function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function _getRequireWildcardCache(nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); }
45
-
46
- function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || _typeof(obj) !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; }
47
-
48
- function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; }
49
-
50
- function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; }
51
-
52
- function _createSuper(Derived) { var hasNativeReflectConstruct = _isNativeReflectConstruct(); return function _createSuperInternal() { var Super = (0, _getPrototypeOf2.default)(Derived), result; if (hasNativeReflectConstruct) { var NewTarget = (0, _getPrototypeOf2.default)(this).constructor; result = Reflect.construct(Super, arguments, NewTarget); } else { result = Super.apply(this, arguments); } return (0, _possibleConstructorReturn2.default)(this, result); }; }
53
-
54
- function _isNativeReflectConstruct() { if (typeof Reflect === "undefined" || !Reflect.construct) return false; if (Reflect.construct.sham) return false; if (typeof Proxy === "function") return true; try { Boolean.prototype.valueOf.call(Reflect.construct(Boolean, [], function () {})); return true; } catch (e) { return false; } }
55
-
56
- var VERSION = "7.1.4-master.151bc9b";
57
-
58
- function buildSpeechResult(transcript, confidence, isFinal) {
59
- var result = [{
60
- confidence: confidence,
61
- transcript: transcript
62
- }];
63
- result.isFinal = isFinal;
64
- return {
65
- results: [result],
66
- type: 'result'
67
- };
68
- }
69
-
70
- function bingSpeechPromisify(fn) {
71
- return function () {
72
- try {
73
- var _sink = new CognitiveSpeech.Sink();
74
-
75
- fn().then(_sink.Resolve, _sink.Reject);
76
- return new CognitiveSpeech.Promise(_sink);
77
- } catch (err) {
78
- sink.Reject(err.message);
79
- }
80
- };
81
- }
82
-
83
- var _default = function _default(_ref) {
84
- var authorizationToken = _ref.authorizationToken,
85
- subscriptionKey = _ref.subscriptionKey,
86
- textNormalization = _ref.textNormalization;
87
-
88
- if (!authorizationToken && !subscriptionKey) {
89
- console.warn('Either authorization token or subscription key must be specified');
90
- return {};
91
- } else if (!window.navigator.mediaDevices || !window.navigator.mediaDevices.getUserMedia) {
92
- console.warn('This browser does not support WebRTC and it will not work with Cognitive Services Speech Services.');
93
- return {};
94
- }
95
-
96
- var SpeechRecognition = /*#__PURE__*/function (_DOMEventEmitter) {
97
- (0, _inherits2.default)(SpeechRecognition, _DOMEventEmitter);
98
-
99
- var _super = _createSuper(SpeechRecognition);
100
-
101
- function SpeechRecognition() {
102
- var _this;
103
-
104
- (0, _classCallCheck2.default)(this, SpeechRecognition);
105
- _this = _super.call(this, ['audiostart', 'soundstart', 'speechstart', 'speechend', 'soundend', 'audioend', 'result', 'nomatch', 'error', 'start', 'end', 'cognitiveservices']);
106
- _this._lang = typeof window !== 'undefined' ? window.document.documentElement.getAttribute('lang') || window.navigator.language : 'en-US';
107
- _this.readyState = 0;
108
- _this.createRecognizer = (0, _memoizeOne.default)(function (language) {
109
- var mode = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : CognitiveSpeech.RecognitionMode.Interactive;
110
- var osPlatform = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : window.navigator.userAgent;
111
- var osName = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : window.navigator.appName;
112
- var osVersion = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : window.navigator.appVersion;
113
- var deviceManufacturer = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : 'microsoft-speech-browser-sdk';
114
- var deviceModel = arguments.length > 6 && arguments[6] !== undefined ? arguments[6] : 'web-speech-cognitive-services';
115
- var deviceVersion = arguments.length > 7 && arguments[7] !== undefined ? arguments[7] : VERSION;
116
- var config = new CognitiveSpeech.RecognizerConfig(new CognitiveSpeech.SpeechConfig(new CognitiveSpeech.Context(new CognitiveSpeech.OS(osPlatform, osName, osVersion), new CognitiveSpeech.Device(deviceManufacturer, deviceModel, deviceVersion))), mode, language, CognitiveSpeech.SpeechResultFormat.Detailed);
117
- var fetchToken;
118
-
119
- if (authorizationToken) {
120
- fetchToken = bingSpeechPromisify( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee() {
121
- return _regenerator.default.wrap(function _callee$(_context) {
122
- while (1) {
123
- switch (_context.prev = _context.next) {
124
- case 0:
125
- if (!(typeof authorizationToken === 'function')) {
126
- _context.next = 6;
127
- break;
128
- }
129
-
130
- _context.next = 3;
131
- return authorizationToken();
132
-
133
- case 3:
134
- _context.t0 = _context.sent;
135
- _context.next = 7;
136
- break;
137
-
138
- case 6:
139
- _context.t0 = authorizationToken;
140
-
141
- case 7:
142
- return _context.abrupt("return", _context.t0);
143
-
144
- case 8:
145
- case "end":
146
- return _context.stop();
147
- }
148
- }
149
- }, _callee);
150
- })));
151
- } else if (subscriptionKey) {
152
- fetchToken = bingSpeechPromisify( /*#__PURE__*/(0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee2() {
153
- return _regenerator.default.wrap(function _callee2$(_context2) {
154
- while (1) {
155
- switch (_context2.prev = _context2.next) {
156
- case 0:
157
- return _context2.abrupt("return", (0, _fetchAuthorizationToken.default)(subscriptionKey));
158
-
159
- case 1:
160
- case "end":
161
- return _context2.stop();
162
- }
163
- }
164
- }, _callee2);
165
- })));
166
- }
167
-
168
- return CognitiveSpeech.CreateRecognizer(config, new CognitiveSpeech.CognitiveTokenAuthentication(fetchToken, fetchToken));
169
- });
170
- return _this;
171
- }
172
-
173
- (0, _createClass2.default)(SpeechRecognition, [{
174
- key: "grammars",
175
- get: function get() {
176
- return this._grammars;
177
- },
178
- set: function set(nextGrammars) {
179
- if (nextGrammars && !(nextGrammars instanceof _SpeechGrammarList.default)) {
180
- throw new Error('must be instance of SpeechGrammarList from "web-speech-cognitive-services"');
181
- }
182
-
183
- this._grammars = nextGrammars;
184
- }
185
- }, {
186
- key: "lang",
187
- get: function get() {
188
- return this._lang;
189
- },
190
- set: function set(nextLang) {
191
- this._lang = nextLang;
192
- }
193
- }, {
194
- key: "continuous",
195
- get: function get() {
196
- return false;
197
- },
198
- set: function set(nextContinuous) {
199
- nextContinuous && console.warn("Bing Speech: Cannot set continuous to ".concat(nextContinuous, ", this feature is not supported."));
200
- }
201
- }, {
202
- key: "interimResults",
203
- get: function get() {
204
- return true;
205
- },
206
- set: function set(nextInterimResults) {
207
- !nextInterimResults && console.warn("Bing Speech: Cannot set interimResults to ".concat(nextInterimResults, ", this feature is not supported."));
208
- }
209
- }, {
210
- key: "maxAlternatives",
211
- get: function get() {
212
- return 1;
213
- },
214
- set: function set(nextMaxAlternatives) {
215
- nextMaxAlternatives !== 1 && console.warn("Bing Speech: Cannot set maxAlternatives to ".concat(nextMaxAlternatives, ", this feature is not supported."));
216
- }
217
- }, {
218
- key: "serviceURI",
219
- get: function get() {
220
- return null;
221
- },
222
- set: function set(nextServiceURI) {
223
- nextServiceURI && console.warn("Bing Speech: Cannot set serviceURI to ".concat(nextServiceURI, ", this feature is not supported."));
224
- }
225
- }, {
226
- key: "abort",
227
- value: function abort() {
228
- // TODO: Should redesign how to stop a recognition session
229
- // After abort is called, we should not saw it is a "success", "silent", or "no match"
230
- var _ref4 = this.recognizer || {},
231
- AudioSource = _ref4.AudioSource;
232
-
233
- AudioSource && AudioSource.TurnOff();
234
- this._aborted = true;
235
- }
236
- }, {
237
- key: "emitCognitiveServices",
238
- value: function emitCognitiveServices(type, event) {
239
- this.emit('cognitiveservices', _objectSpread(_objectSpread({}, event), {}, {
240
- subType: type
241
- }));
242
- }
243
- }, {
244
- key: "stop",
245
- value: function stop() {
246
- // TODO: Support stop
247
- var _ref5 = this.recognizer || {},
248
- AudioSource = _ref5.AudioSource;
249
-
250
- AudioSource && AudioSource.TurnOff();
251
- }
252
- }, {
253
- key: "start",
254
- value: function () {
255
- var _start = (0, _asyncToGenerator2.default)( /*#__PURE__*/_regenerator.default.mark(function _callee3() {
256
- var recognizer, _toPromise, eventListener, promises, speechContext, recognitionTriggered, error, listeningStarted, connectingToService, recognitionStarted, gotFirstHypothesis, speechHypothesis, recognitionEnded, speechDetailedPhrase, recognitionResult, best, _recognitionEnded;
257
-
258
- return _regenerator.default.wrap(function _callee3$(_context3) {
259
- while (1) {
260
- switch (_context3.prev = _context3.next) {
261
- case 0:
262
- recognizer = this.recognizer = this.createRecognizer(this.lang, this.osPlatform || window.navigator.userAgent, this.osName || window.navigator.appName, this.osVersion || window.navigator.appVersion, this.deviceManufacturer || 'web-speech-cognitive-services', this.deviceModel || 'web-speech-cognitive-services', this.deviceVersion || VERSION);
263
- _toPromise = toPromise(), eventListener = _toPromise.eventListener, promises = (0, _objectWithoutProperties2.default)(_toPromise, _excluded);
264
- speechContext = this.grammars && this.grammars.createSpeechContext();
265
- recognizer.Recognize(eventListener, speechContext && JSON.stringify(speechContext));
266
- this._aborted = false;
267
- _context3.next = 7;
268
- return promises.recognitionTriggered;
269
-
270
- case 7:
271
- recognitionTriggered = _context3.sent;
272
- this.emitCognitiveServices('recognitionTriggered', recognitionTriggered);
273
- _context3.next = 11;
274
- return Promise.race([promises.listeningStarted, promises.recognitionEnded]);
275
-
276
- case 11:
277
- listeningStarted = _context3.sent;
278
- this.emitCognitiveServices(listeningStarted.Name === 'RecognitionEndedEvent' ? 'recognitionEnded' : ' listeningStarted', listeningStarted);
279
-
280
- if (!(listeningStarted.Name === 'RecognitionEndedEvent')) {
281
- _context3.next = 17;
282
- break;
283
- }
284
-
285
- // Possibly not authorized to use microphone
286
- if (listeningStarted.Status === CognitiveSpeech.RecognitionCompletionStatus.AudioSourceError) {
287
- error = 'not-allowed';
288
- } else {
289
- error = CognitiveSpeech.RecognitionCompletionStatus[listeningStarted.Status];
290
- }
291
-
292
- _context3.next = 62;
293
- break;
294
-
295
- case 17:
296
- this.emit('start');
297
- _context3.next = 20;
298
- return promises.connectingToService;
299
-
300
- case 20:
301
- connectingToService = _context3.sent;
302
- this.emitCognitiveServices('connectingToService', connectingToService);
303
- _context3.next = 24;
304
- return Promise.race([promises.recognitionStarted, promises.recognitionEnded]);
305
-
306
- case 24:
307
- recognitionStarted = _context3.sent;
308
- this.emitCognitiveServices(recognitionStarted.Name === 'RecognitionEndedEvent' ? 'recognitionEnded' : 'recognitionStarted', recognitionStarted);
309
- this.emit('audiostart');
310
-
311
- if (!(recognitionStarted.Name === 'RecognitionEndedEvent')) {
312
- _context3.next = 31;
313
- break;
314
- }
315
-
316
- // Possibly network error
317
- if (recognitionStarted.Status === CognitiveSpeech.RecognitionCompletionStatus.ConnectError) {
318
- error = 'network';
319
- } else {
320
- error = CognitiveSpeech.RecognitionCompletionStatus[recognitionStarted.Status];
321
- }
322
-
323
- _context3.next = 42;
324
- break;
325
-
326
- case 31:
327
- _context3.next = 33;
328
- return Promise.race([promises.getSpeechHypothesisPromise(), promises.speechEndDetected]);
329
-
330
- case 33:
331
- speechHypothesis = _context3.sent;
332
- this.emitCognitiveServices(speechHypothesis.Name === 'SpeechEndDetectedEvent' ? 'speechEndDetected' : 'speechHypothesis', speechHypothesis);
333
-
334
- if (!(speechHypothesis.Name === 'SpeechEndDetectedEvent')) {
335
- _context3.next = 37;
336
- break;
337
- }
338
-
339
- return _context3.abrupt("break", 41);
340
-
341
- case 37:
342
- if (!gotFirstHypothesis) {
343
- gotFirstHypothesis = true;
344
- this.emit('soundstart');
345
- this.emit('speechstart');
346
- }
347
-
348
- this.emit('result', buildSpeechResult(speechHypothesis.Result.Text, .5, false));
349
-
350
- case 39:
351
- _context3.next = 31;
352
- break;
353
-
354
- case 41:
355
- if (gotFirstHypothesis) {
356
- this.emit('speechend');
357
- this.emit('soundend');
358
- }
359
-
360
- case 42:
361
- this.emit('audioend');
362
-
363
- if (!this._aborted) {
364
- _context3.next = 51;
365
- break;
366
- }
367
-
368
- error = 'aborted';
369
- _context3.next = 47;
370
- return promises.recognitionEnded;
371
-
372
- case 47:
373
- recognitionEnded = _context3.sent;
374
- this.emitCognitiveServices('recognitionEnded', recognitionEnded);
375
- _context3.next = 62;
376
- break;
377
-
378
- case 51:
379
- _context3.next = 53;
380
- return Promise.race([promises.speechDetailedPhrase, promises.recognitionEnded]);
381
-
382
- case 53:
383
- speechDetailedPhrase = _context3.sent;
384
- this.emitCognitiveServices(speechDetailedPhrase.Name === 'RecognitionEndedEvent' ? 'recognitionEnded' : 'speechDetailedPhrase', speechDetailedPhrase);
385
-
386
- if (!(speechDetailedPhrase.Name !== 'RecognitionEndedEvent')) {
387
- _context3.next = 62;
388
- break;
389
- }
390
-
391
- recognitionResult = CognitiveSpeech.RecognitionStatus[speechDetailedPhrase.Result.RecognitionStatus];
392
-
393
- if (recognitionResult === CognitiveSpeech.RecognitionStatus.Success) {
394
- // TODO: [P2] Support maxAlternatives
395
- best = speechDetailedPhrase.Result.NBest[0];
396
- this.emit('result', buildSpeechResult(textNormalization === 'itn' ? best.ITN : textNormalization === 'lexical' ? best.Lexical : textNormalization === 'maskeditn' ? best.MaskedITN : best.Display, best.Confidence, true));
397
- } else if (recognitionResult !== CognitiveSpeech.RecognitionStatus.NoMatch) {
398
- // Possibly silent or muted
399
- if (recognitionResult === CognitiveSpeech.RecognitionStatus.InitialSilenceTimeout) {
400
- error = 'no-speech';
401
- } else {
402
- error = speechDetailedPhrase.Result.RecognitionStatus;
403
- }
404
- }
405
-
406
- _context3.next = 60;
407
- return promises.recognitionEnded;
408
-
409
- case 60:
410
- _recognitionEnded = _context3.sent;
411
- this.emitCognitiveServices('recognitionEnded', _recognitionEnded);
412
-
413
- case 62:
414
- error && this.emit('error', {
415
- error: error
416
- });
417
- this.emit('end');
418
-
419
- case 64:
420
- case "end":
421
- return _context3.stop();
422
- }
423
- }
424
- }, _callee3, this);
425
- }));
426
-
427
- function start() {
428
- return _start.apply(this, arguments);
429
- }
430
-
431
- return start;
432
- }()
433
- }]);
434
- return SpeechRecognition;
435
- }(_DOMEventEmitter2.default);
436
-
437
- return {
438
- SpeechGrammarList: _SpeechGrammarList.default,
439
- SpeechRecognition: SpeechRecognition
440
- };
441
- };
442
-
443
- exports.default = _default;
444
-
445
- function toPromise() {
446
- var events = {
447
- ConnectingToServiceEvent: new _eventAsPromise.default(),
448
- ListeningStartedEvent: new _eventAsPromise.default(),
449
- RecognitionEndedEvent: new _eventAsPromise.default(),
450
- RecognitionStartedEvent: new _eventAsPromise.default(),
451
- RecognitionTriggeredEvent: new _eventAsPromise.default(),
452
- SpeechDetailedPhraseEvent: new _eventAsPromise.default(),
453
- SpeechEndDetectedEvent: new _eventAsPromise.default(),
454
- SpeechHypothesisEvent: new _eventAsPromise.default(),
455
- SpeechSimplePhraseEvent: new _eventAsPromise.default(),
456
- SpeechStartDetectedEvent: new _eventAsPromise.default()
457
- };
458
- return {
459
- connectingToService: events.ConnectingToServiceEvent.upcoming(),
460
- listeningStarted: events.ListeningStartedEvent.upcoming(),
461
- recognitionEnded: events.RecognitionEndedEvent.upcoming(),
462
- recognitionStarted: events.RecognitionStartedEvent.upcoming(),
463
- recognitionTriggered: events.RecognitionTriggeredEvent.upcoming(),
464
- speechDetailedPhrase: events.SpeechDetailedPhraseEvent.upcoming(),
465
- speechEndDetected: events.SpeechEndDetectedEvent.upcoming(),
466
- getSpeechHypothesisPromise: function getSpeechHypothesisPromise() {
467
- return events.SpeechHypothesisEvent.upcoming();
468
- },
469
- speechSimplePhrase: events.SpeechSimplePhraseEvent.upcoming(),
470
- speechStartDetected: events.SpeechStartDetectedEvent.upcoming(),
471
- eventListener: function eventListener(event) {
472
- var name = event.Name;
473
- var eventAsPromise = events[name];
474
-
475
- if (eventAsPromise) {
476
- eventAsPromise.eventListener.call(null, event);
477
- } else {
478
- console.warn("Unexpected event \"".concat(name, "\" from Cognitive Services, please file a bug to https://github.com/compulim/web-speech-cognitive-services"));
479
- }
480
- }
481
- };
482
- }
483
- //# sourceMappingURL=createSpeechRecognitionPonyfill.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"createSpeechRecognitionPonyfill.js","names":["VERSION","buildSpeechResult","transcript","confidence","isFinal","result","results","type","bingSpeechPromisify","fn","sink","CognitiveSpeech","Sink","then","Resolve","Reject","Promise","err","message","authorizationToken","subscriptionKey","textNormalization","console","warn","window","navigator","mediaDevices","getUserMedia","SpeechRecognition","_lang","document","documentElement","getAttribute","language","readyState","createRecognizer","memoize","mode","RecognitionMode","Interactive","osPlatform","userAgent","osName","appName","osVersion","appVersion","deviceManufacturer","deviceModel","deviceVersion","config","RecognizerConfig","SpeechConfig","Context","OS","Device","SpeechResultFormat","Detailed","fetchToken","fetchAuthorizationToken","CreateRecognizer","CognitiveTokenAuthentication","_grammars","nextGrammars","SpeechGrammarList","Error","nextLang","nextContinuous","nextInterimResults","nextMaxAlternatives","nextServiceURI","recognizer","AudioSource","TurnOff","_aborted","event","emit","subType","lang","toPromise","eventListener","promises","speechContext","grammars","createSpeechContext","Recognize","JSON","stringify","recognitionTriggered","emitCognitiveServices","race","listeningStarted","recognitionEnded","Name","Status","RecognitionCompletionStatus","AudioSourceError","error","connectingToService","recognitionStarted","ConnectError","getSpeechHypothesisPromise","speechEndDetected","speechHypothesis","gotFirstHypothesis","Result","Text","speechDetailedPhrase","recognitionResult","RecognitionStatus","Success","best","NBest","ITN","Lexical","MaskedITN","Display","Confidence","NoMatch","InitialSilenceTimeout","DOMEventEmitter","events","ConnectingToServiceEvent","EventAsPromise","ListeningStartedEvent","RecognitionEndedEvent","RecognitionStartedEvent","RecognitionTriggeredEvent","SpeechDetailedPhraseEvent","SpeechEndDetectedEvent","SpeechHypothesisEvent","SpeechSimplePhraseEvent","SpeechStartDetectedEvent","upcoming","speechSimplePhrase","speechStartDetected","name","eventAsPromise","call"],"sources":["../../../src/BingSpeech/SpeechToText/createSpeechRecognitionPonyfill.js"],"sourcesContent":["import * as CognitiveSpeech from 'microsoft-speech-browser-sdk';\nimport EventAsPromise from 'event-as-promise';\nimport memoize from 'memoize-one';\n\nimport DOMEventEmitter from '../Util/DOMEventEmitter';\nimport fetchAuthorizationToken from '../fetchAuthorizationToken';\nimport SpeechGrammarList from './SpeechGrammarList';\n\nconst { npm_package_version: VERSION } = process.env;\n\nfunction buildSpeechResult(transcript, confidence, isFinal) {\n const result = [{ confidence, transcript }];\n\n result.isFinal = isFinal;\n\n return { results: [result], type: 'result' };\n}\n\nfunction bingSpeechPromisify(fn) {\n return () => {\n try {\n const sink = new CognitiveSpeech.Sink();\n\n fn().then(sink.Resolve, sink.Reject);\n\n return new CognitiveSpeech.Promise(sink);\n } catch (err) {\n sink.Reject(err.message);\n }\n };\n}\n\nexport default ({\n authorizationToken,\n subscriptionKey,\n textNormalization\n}) => {\n if (!authorizationToken && !subscriptionKey) {\n console.warn('Either authorization token or subscription key must be specified');\n\n return {};\n } else if (!window.navigator.mediaDevices || !window.navigator.mediaDevices.getUserMedia) {\n console.warn('This browser does not support WebRTC and it will not work with Cognitive Services Speech Services.');\n\n return {};\n }\n\n class SpeechRecognition extends DOMEventEmitter {\n constructor() {\n super([\n 'audiostart',\n 'soundstart',\n 'speechstart',\n 'speechend',\n 'soundend',\n 'audioend',\n 'result',\n 'nomatch',\n 'error',\n 'start',\n 'end',\n 'cognitiveservices'\n ]);\n\n this._lang = typeof window !== 'undefined' ? (window.document.documentElement.getAttribute('lang') || window.navigator.language) : 'en-US';\n\n this.readyState = 0;\n\n this.createRecognizer = memoize((\n language,\n mode = CognitiveSpeech.RecognitionMode.Interactive,\n osPlatform = window.navigator.userAgent,\n osName = window.navigator.appName,\n osVersion = window.navigator.appVersion,\n deviceManufacturer = 'microsoft-speech-browser-sdk',\n deviceModel = 'web-speech-cognitive-services',\n deviceVersion = VERSION\n ) => {\n const config = new CognitiveSpeech.RecognizerConfig(\n new CognitiveSpeech.SpeechConfig(\n new CognitiveSpeech.Context(\n new CognitiveSpeech.OS(\n osPlatform,\n osName,\n osVersion\n ),\n new CognitiveSpeech.Device(\n deviceManufacturer,\n deviceModel,\n deviceVersion\n )\n )\n ),\n mode,\n language,\n CognitiveSpeech.SpeechResultFormat.Detailed\n );\n\n let fetchToken;\n\n if (authorizationToken) {\n fetchToken = bingSpeechPromisify(async () => typeof authorizationToken === 'function' ? await authorizationToken() : authorizationToken);\n } else if (subscriptionKey) {\n fetchToken = bingSpeechPromisify(async () => fetchAuthorizationToken(subscriptionKey));\n }\n\n return CognitiveSpeech.CreateRecognizer(config, new CognitiveSpeech.CognitiveTokenAuthentication(fetchToken, fetchToken));\n });\n }\n\n get grammars() { return this._grammars; }\n set grammars(nextGrammars) {\n if (nextGrammars && !(nextGrammars instanceof SpeechGrammarList)) {\n throw new Error('must be instance of SpeechGrammarList from \"web-speech-cognitive-services\"');\n }\n\n this._grammars = nextGrammars;\n }\n\n get lang() { return this._lang; }\n set lang(nextLang) { this._lang = nextLang; }\n\n get continuous() { return false; }\n set continuous(nextContinuous) { nextContinuous && console.warn(`Bing Speech: Cannot set continuous to ${ nextContinuous }, this feature is not supported.`); }\n\n get interimResults() { return true; }\n set interimResults(nextInterimResults) { !nextInterimResults && console.warn(`Bing Speech: Cannot set interimResults to ${ nextInterimResults }, this feature is not supported.`); }\n\n get maxAlternatives() { return 1; }\n set maxAlternatives(nextMaxAlternatives) { nextMaxAlternatives !== 1 && console.warn(`Bing Speech: Cannot set maxAlternatives to ${ nextMaxAlternatives }, this feature is not supported.`); }\n\n get serviceURI() { return null; }\n set serviceURI(nextServiceURI) { nextServiceURI && console.warn(`Bing Speech: Cannot set serviceURI to ${ nextServiceURI }, this feature is not supported.`); }\n\n abort() {\n // TODO: Should redesign how to stop a recognition session\n // After abort is called, we should not saw it is a \"success\", \"silent\", or \"no match\"\n const { AudioSource } = this.recognizer || {};\n\n AudioSource && AudioSource.TurnOff();\n\n this._aborted = true;\n }\n\n emitCognitiveServices(type, event) {\n this.emit('cognitiveservices', {\n ...event,\n subType: type\n });\n }\n\n stop() {\n // TODO: Support stop\n\n const { AudioSource } = this.recognizer || {};\n\n AudioSource && AudioSource.TurnOff();\n }\n\n async start() {\n const recognizer = this.recognizer = this.createRecognizer(\n this.lang,\n this.osPlatform || window.navigator.userAgent,\n this.osName || window.navigator.appName,\n this.osVersion || window.navigator.appVersion,\n this.deviceManufacturer || 'web-speech-cognitive-services',\n this.deviceModel || 'web-speech-cognitive-services',\n this.deviceVersion || VERSION\n );\n\n const { eventListener, ...promises } = toPromise();\n\n const speechContext = this.grammars && this.grammars.createSpeechContext();\n\n recognizer.Recognize(eventListener, speechContext && JSON.stringify(speechContext));\n this._aborted = false;\n\n const recognitionTriggered = await promises.recognitionTriggered;\n\n this.emitCognitiveServices('recognitionTriggered', recognitionTriggered);\n\n let error;\n\n const listeningStarted = await Promise.race([\n promises.listeningStarted,\n promises.recognitionEnded\n ]);\n\n this.emitCognitiveServices(listeningStarted.Name === 'RecognitionEndedEvent' ? 'recognitionEnded' : ' listeningStarted', listeningStarted);\n\n if (listeningStarted.Name === 'RecognitionEndedEvent') {\n // Possibly not authorized to use microphone\n if (listeningStarted.Status === CognitiveSpeech.RecognitionCompletionStatus.AudioSourceError) {\n error = 'not-allowed';\n } else {\n error = CognitiveSpeech.RecognitionCompletionStatus[listeningStarted.Status];\n }\n } else {\n this.emit('start');\n\n const connectingToService = await promises.connectingToService;\n\n this.emitCognitiveServices('connectingToService', connectingToService);\n\n const recognitionStarted = await Promise.race([\n promises.recognitionStarted,\n promises.recognitionEnded\n ]);\n\n this.emitCognitiveServices(\n recognitionStarted.Name === 'RecognitionEndedEvent' ? 'recognitionEnded' : 'recognitionStarted',\n recognitionStarted\n );\n\n this.emit('audiostart');\n\n if (recognitionStarted.Name === 'RecognitionEndedEvent') {\n // Possibly network error\n if (recognitionStarted.Status === CognitiveSpeech.RecognitionCompletionStatus.ConnectError) {\n error = 'network';\n } else {\n error = CognitiveSpeech.RecognitionCompletionStatus[recognitionStarted.Status];\n }\n } else {\n let gotFirstHypothesis;\n\n for (;;) {\n const speechHypothesis = await Promise.race([\n promises.getSpeechHypothesisPromise(),\n promises.speechEndDetected\n ]);\n\n this.emitCognitiveServices(\n speechHypothesis.Name === 'SpeechEndDetectedEvent' ? 'speechEndDetected' : 'speechHypothesis',\n speechHypothesis\n );\n\n if (speechHypothesis.Name === 'SpeechEndDetectedEvent') {\n break;\n }\n\n if (!gotFirstHypothesis) {\n gotFirstHypothesis = true;\n this.emit('soundstart');\n this.emit('speechstart');\n }\n\n this.emit('result', buildSpeechResult(speechHypothesis.Result.Text, .5, false));\n }\n\n if (gotFirstHypothesis) {\n this.emit('speechend');\n this.emit('soundend');\n }\n }\n\n this.emit('audioend');\n\n if (this._aborted) {\n error = 'aborted';\n\n const recognitionEnded = await promises.recognitionEnded;\n\n this.emitCognitiveServices('recognitionEnded', recognitionEnded);\n } else {\n const speechDetailedPhrase = await Promise.race([\n promises.speechDetailedPhrase,\n promises.recognitionEnded\n ]);\n\n this.emitCognitiveServices(\n speechDetailedPhrase.Name === 'RecognitionEndedEvent' ? 'recognitionEnded' : 'speechDetailedPhrase',\n speechDetailedPhrase\n );\n\n if (speechDetailedPhrase.Name !== 'RecognitionEndedEvent') {\n const recognitionResult = CognitiveSpeech.RecognitionStatus[speechDetailedPhrase.Result.RecognitionStatus];\n\n if (recognitionResult === CognitiveSpeech.RecognitionStatus.Success) {\n // TODO: [P2] Support maxAlternatives\n const best = speechDetailedPhrase.Result.NBest[0];\n\n this.emit(\n 'result',\n buildSpeechResult(\n textNormalization === 'itn' ?\n best.ITN\n : textNormalization === 'lexical' ?\n best.Lexical\n : textNormalization === 'maskeditn' ?\n best.MaskedITN\n :\n best.Display,\n best.Confidence,\n true\n )\n );\n } else if (recognitionResult !== CognitiveSpeech.RecognitionStatus.NoMatch) {\n // Possibly silent or muted\n if (recognitionResult === CognitiveSpeech.RecognitionStatus.InitialSilenceTimeout) {\n error = 'no-speech';\n } else {\n error = speechDetailedPhrase.Result.RecognitionStatus;\n }\n }\n\n const recognitionEnded = await promises.recognitionEnded;\n\n this.emitCognitiveServices('recognitionEnded', recognitionEnded);\n }\n }\n }\n\n error && this.emit('error', { error });\n this.emit('end');\n }\n }\n\n return {\n SpeechGrammarList,\n SpeechRecognition\n };\n}\n\nfunction toPromise() {\n const events = {\n ConnectingToServiceEvent: new EventAsPromise(),\n ListeningStartedEvent: new EventAsPromise(),\n RecognitionEndedEvent: new EventAsPromise(),\n RecognitionStartedEvent: new EventAsPromise(),\n RecognitionTriggeredEvent: new EventAsPromise(),\n SpeechDetailedPhraseEvent: new EventAsPromise(),\n SpeechEndDetectedEvent: new EventAsPromise(),\n SpeechHypothesisEvent: new EventAsPromise(),\n SpeechSimplePhraseEvent: new EventAsPromise(),\n SpeechStartDetectedEvent: new EventAsPromise()\n };\n\n return {\n connectingToService: events.ConnectingToServiceEvent.upcoming(),\n listeningStarted: events.ListeningStartedEvent.upcoming(),\n recognitionEnded: events.RecognitionEndedEvent.upcoming(),\n recognitionStarted: events.RecognitionStartedEvent.upcoming(),\n recognitionTriggered: events.RecognitionTriggeredEvent.upcoming(),\n speechDetailedPhrase: events.SpeechDetailedPhraseEvent.upcoming(),\n speechEndDetected: events.SpeechEndDetectedEvent.upcoming(),\n getSpeechHypothesisPromise: () => events.SpeechHypothesisEvent.upcoming(),\n speechSimplePhrase: events.SpeechSimplePhraseEvent.upcoming(),\n speechStartDetected: events.SpeechStartDetectedEvent.upcoming(),\n eventListener: event => {\n const { Name: name } = event;\n const eventAsPromise = events[name];\n\n if (eventAsPromise) {\n eventAsPromise.eventListener.call(null, event);\n } else {\n console.warn(`Unexpected event \\\"${ name }\\\" from Cognitive Services, please file a bug to https://github.com/compulim/web-speech-cognitive-services`);\n }\n }\n };\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;AACA;;AACA;;AAEA;;AACA;;AACA;;;;;;;;;;;;;;;;AAEA,IAA6BA,OAA7B;;AAEA,SAASC,iBAAT,CAA2BC,UAA3B,EAAuCC,UAAvC,EAAmDC,OAAnD,EAA4D;EAC1D,IAAMC,MAAM,GAAG,CAAC;IAAEF,UAAU,EAAVA,UAAF;IAAcD,UAAU,EAAVA;EAAd,CAAD,CAAf;EAEAG,MAAM,CAACD,OAAP,GAAiBA,OAAjB;EAEA,OAAO;IAAEE,OAAO,EAAE,CAACD,MAAD,CAAX;IAAqBE,IAAI,EAAE;EAA3B,CAAP;AACD;;AAED,SAASC,mBAAT,CAA6BC,EAA7B,EAAiC;EAC/B,OAAO,YAAM;IACX,IAAI;MACF,IAAMC,KAAI,GAAG,IAAIC,eAAe,CAACC,IAApB,EAAb;;MAEAH,EAAE,GAAGI,IAAL,CAAUH,KAAI,CAACI,OAAf,EAAwBJ,KAAI,CAACK,MAA7B;MAEA,OAAO,IAAIJ,eAAe,CAACK,OAApB,CAA4BN,KAA5B,CAAP;IACD,CAND,CAME,OAAOO,GAAP,EAAY;MACZP,IAAI,CAACK,MAAL,CAAYE,GAAG,CAACC,OAAhB;IACD;EACF,CAVD;AAWD;;eAEc,wBAIT;EAAA,IAHJC,kBAGI,QAHJA,kBAGI;EAAA,IAFJC,eAEI,QAFJA,eAEI;EAAA,IADJC,iBACI,QADJA,iBACI;;EACJ,IAAI,CAACF,kBAAD,IAAuB,CAACC,eAA5B,EAA6C;IAC3CE,OAAO,CAACC,IAAR,CAAa,kEAAb;IAEA,OAAO,EAAP;EACD,CAJD,MAIO,IAAI,CAACC,MAAM,CAACC,SAAP,CAAiBC,YAAlB,IAAkC,CAACF,MAAM,CAACC,SAAP,CAAiBC,YAAjB,CAA8BC,YAArE,EAAmF;IACxFL,OAAO,CAACC,IAAR,CAAa,oGAAb;IAEA,OAAO,EAAP;EACD;;EATG,IAWEK,iBAXF;IAAA;;IAAA;;IAYF,6BAAc;MAAA;;MAAA;MACZ,0BAAM,CACJ,YADI,EAEJ,YAFI,EAGJ,aAHI,EAIJ,WAJI,EAKJ,UALI,EAMJ,UANI,EAOJ,QAPI,EAQJ,SARI,EASJ,OATI,EAUJ,OAVI,EAWJ,KAXI,EAYJ,mBAZI,CAAN;MAeA,MAAKC,KAAL,GAAa,OAAOL,MAAP,KAAkB,WAAlB,GAAiCA,MAAM,CAACM,QAAP,CAAgBC,eAAhB,CAAgCC,YAAhC,CAA6C,MAA7C,KAAwDR,MAAM,CAACC,SAAP,CAAiBQ,QAA1G,GAAsH,OAAnI;MAEA,MAAKC,UAAL,GAAkB,CAAlB;MAEA,MAAKC,gBAAL,GAAwB,IAAAC,mBAAA,EAAQ,UAC9BH,QAD8B,EAS3B;QAAA,IAPHI,IAOG,uEAPI1B,eAAe,CAAC2B,eAAhB,CAAgCC,WAOpC;QAAA,IANHC,UAMG,uEANUhB,MAAM,CAACC,SAAP,CAAiBgB,SAM3B;QAAA,IALHC,MAKG,uEALMlB,MAAM,CAACC,SAAP,CAAiBkB,OAKvB;QAAA,IAJHC,SAIG,uEAJSpB,MAAM,CAACC,SAAP,CAAiBoB,UAI1B;QAAA,IAHHC,kBAGG,uEAHkB,8BAGlB;QAAA,IAFHC,WAEG,uEAFW,+BAEX;QAAA,IADHC,aACG,uEADahD,OACb;QACH,IAAMiD,MAAM,GAAG,IAAItC,eAAe,CAACuC,gBAApB,CACb,IAAIvC,eAAe,CAACwC,YAApB,CACE,IAAIxC,eAAe,CAACyC,OAApB,CACE,IAAIzC,eAAe,CAAC0C,EAApB,CACEb,UADF,EAEEE,MAFF,EAGEE,SAHF,CADF,EAME,IAAIjC,eAAe,CAAC2C,MAApB,CACER,kBADF,EAEEC,WAFF,EAGEC,aAHF,CANF,CADF,CADa,EAebX,IAfa,EAgBbJ,QAhBa,EAiBbtB,eAAe,CAAC4C,kBAAhB,CAAmCC,QAjBtB,CAAf;QAoBA,IAAIC,UAAJ;;QAEA,IAAItC,kBAAJ,EAAwB;UACtBsC,UAAU,GAAGjD,mBAAmB,uFAAC;YAAA;cAAA;gBAAA;kBAAA;oBAAA,MAAY,OAAOW,kBAAP,KAA8B,UAA1C;sBAAA;sBAAA;oBAAA;;oBAAA;oBAAA,OAA6DA,kBAAkB,EAA/E;;kBAAA;oBAAA;oBAAA;oBAAA;;kBAAA;oBAAA,cAAoFA,kBAApF;;kBAAA;oBAAA;;kBAAA;kBAAA;oBAAA;gBAAA;cAAA;YAAA;UAAA,CAAD,GAAhC;QACD,CAFD,MAEO,IAAIC,eAAJ,EAAqB;UAC1BqC,UAAU,GAAGjD,mBAAmB,uFAAC;YAAA;cAAA;gBAAA;kBAAA;oBAAA,kCAAY,IAAAkD,gCAAA,EAAwBtC,eAAxB,CAAZ;;kBAAA;kBAAA;oBAAA;gBAAA;cAAA;YAAA;UAAA,CAAD,GAAhC;QACD;;QAED,OAAOT,eAAe,CAACgD,gBAAhB,CAAiCV,MAAjC,EAAyC,IAAItC,eAAe,CAACiD,4BAApB,CAAiDH,UAAjD,EAA6DA,UAA7D,CAAzC,CAAP;MACD,CAvCuB,CAAxB;MApBY;IA4Db;;IAxEC;MAAA;MAAA,KA0EF,eAAe;QAAE,OAAO,KAAKI,SAAZ;MAAwB,CA1EvC;MAAA,KA2EF,aAAaC,YAAb,EAA2B;QACzB,IAAIA,YAAY,IAAI,EAAEA,YAAY,YAAYC,0BAA1B,CAApB,EAAkE;UAChE,MAAM,IAAIC,KAAJ,CAAU,4EAAV,CAAN;QACD;;QAED,KAAKH,SAAL,GAAiBC,YAAjB;MACD;IAjFC;MAAA;MAAA,KAmFF,eAAW;QAAE,OAAO,KAAKjC,KAAZ;MAAoB,CAnF/B;MAAA,KAoFF,aAASoC,QAAT,EAAmB;QAAE,KAAKpC,KAAL,GAAaoC,QAAb;MAAwB;IApF3C;MAAA;MAAA,KAsFF,eAAiB;QAAE,OAAO,KAAP;MAAe,CAtFhC;MAAA,KAuFF,aAAeC,cAAf,EAA+B;QAAEA,cAAc,IAAI5C,OAAO,CAACC,IAAR,iDAAuD2C,cAAvD,sCAAlB;MAA8H;IAvF7J;MAAA;MAAA,KAyFF,eAAqB;QAAE,OAAO,IAAP;MAAc,CAzFnC;MAAA,KA0FF,aAAmBC,kBAAnB,EAAuC;QAAE,CAACA,kBAAD,IAAuB7C,OAAO,CAACC,IAAR,qDAA2D4C,kBAA3D,sCAAvB;MAA2I;IA1FlL;MAAA;MAAA,KA4FF,eAAsB;QAAE,OAAO,CAAP;MAAW,CA5FjC;MAAA,KA6FF,aAAoBC,mBAApB,EAAyC;QAAEA,mBAAmB,KAAK,CAAxB,IAA6B9C,OAAO,CAACC,IAAR,sDAA4D6C,mBAA5D,sCAA7B;MAAmJ;IA7F5L;MAAA;MAAA,KA+FF,eAAiB;QAAE,OAAO,IAAP;MAAc,CA/F/B;MAAA,KAgGF,aAAeC,cAAf,EAA+B;QAAEA,cAAc,IAAI/C,OAAO,CAACC,IAAR,iDAAuD8C,cAAvD,sCAAlB;MAA8H;IAhG7J;MAAA;MAAA,OAkGF,iBAAQ;QACN;QACA;QACA,YAAwB,KAAKC,UAAL,IAAmB,EAA3C;QAAA,IAAQC,WAAR,SAAQA,WAAR;;QAEAA,WAAW,IAAIA,WAAW,CAACC,OAAZ,EAAf;QAEA,KAAKC,QAAL,GAAgB,IAAhB;MACD;IA1GC;MAAA;MAAA,OA4GF,+BAAsBlE,IAAtB,EAA4BmE,KAA5B,EAAmC;QACjC,KAAKC,IAAL,CAAU,mBAAV,kCACKD,KADL;UAEEE,OAAO,EAAErE;QAFX;MAID;IAjHC;MAAA;MAAA,OAmHF,gBAAO;QACL;QAEA,YAAwB,KAAK+D,UAAL,IAAmB,EAA3C;QAAA,IAAQC,WAAR,SAAQA,WAAR;;QAEAA,WAAW,IAAIA,WAAW,CAACC,OAAZ,EAAf;MACD;IAzHC;MAAA;MAAA;QAAA,qFA2HF;UAAA;;UAAA;YAAA;cAAA;gBAAA;kBACQF,UADR,GACqB,KAAKA,UAAL,GAAkB,KAAKnC,gBAAL,CACnC,KAAK0C,IAD8B,EAEnC,KAAKrC,UAAL,IAAmBhB,MAAM,CAACC,SAAP,CAAiBgB,SAFD,EAGnC,KAAKC,MAAL,IAAelB,MAAM,CAACC,SAAP,CAAiBkB,OAHG,EAInC,KAAKC,SAAL,IAAkBpB,MAAM,CAACC,SAAP,CAAiBoB,UAJA,EAKnC,KAAKC,kBAAL,IAA2B,+BALQ,EAMnC,KAAKC,WAAL,IAAoB,+BANe,EAOnC,KAAKC,aAAL,IAAsBhD,OAPa,CADvC;kBAAA,aAWyC8E,SAAS,EAXlD,EAWUC,aAXV,cAWUA,aAXV,EAW4BC,QAX5B;kBAaQC,aAbR,GAawB,KAAKC,QAAL,IAAiB,KAAKA,QAAL,CAAcC,mBAAd,EAbzC;kBAeEb,UAAU,CAACc,SAAX,CAAqBL,aAArB,EAAoCE,aAAa,IAAII,IAAI,CAACC,SAAL,CAAeL,aAAf,CAArD;kBACA,KAAKR,QAAL,GAAgB,KAAhB;kBAhBF;kBAAA,OAkBqCO,QAAQ,CAACO,oBAlB9C;;gBAAA;kBAkBQA,oBAlBR;kBAoBE,KAAKC,qBAAL,CAA2B,sBAA3B,EAAmDD,oBAAnD;kBApBF;kBAAA,OAwBiCvE,OAAO,CAACyE,IAAR,CAAa,CAC1CT,QAAQ,CAACU,gBADiC,EAE1CV,QAAQ,CAACW,gBAFiC,CAAb,CAxBjC;;gBAAA;kBAwBQD,gBAxBR;kBA6BE,KAAKF,qBAAL,CAA2BE,gBAAgB,CAACE,IAAjB,KAA0B,uBAA1B,GAAoD,kBAApD,GAAyE,mBAApG,EAAyHF,gBAAzH;;kBA7BF,MA+BMA,gBAAgB,CAACE,IAAjB,KAA0B,uBA/BhC;oBAAA;oBAAA;kBAAA;;kBAgCI;kBACA,IAAIF,gBAAgB,CAACG,MAAjB,KAA4BlF,eAAe,CAACmF,2BAAhB,CAA4CC,gBAA5E,EAA8F;oBAC5FC,KAAK,GAAG,aAAR;kBACD,CAFD,MAEO;oBACLA,KAAK,GAAGrF,eAAe,CAACmF,2BAAhB,CAA4CJ,gBAAgB,CAACG,MAA7D,CAAR;kBACD;;kBArCL;kBAAA;;gBAAA;kBAuCI,KAAKlB,IAAL,CAAU,OAAV;kBAvCJ;kBAAA,OAyCsCK,QAAQ,CAACiB,mBAzC/C;;gBAAA;kBAyCUA,mBAzCV;kBA2CI,KAAKT,qBAAL,CAA2B,qBAA3B,EAAkDS,mBAAlD;kBA3CJ;kBAAA,OA6CqCjF,OAAO,CAACyE,IAAR,CAAa,CAC5CT,QAAQ,CAACkB,kBADmC,EAE5ClB,QAAQ,CAACW,gBAFmC,CAAb,CA7CrC;;gBAAA;kBA6CUO,kBA7CV;kBAkDI,KAAKV,qBAAL,CACEU,kBAAkB,CAACN,IAAnB,KAA4B,uBAA5B,GAAsD,kBAAtD,GAA2E,oBAD7E,EAEEM,kBAFF;kBAKA,KAAKvB,IAAL,CAAU,YAAV;;kBAvDJ,MAyDQuB,kBAAkB,CAACN,IAAnB,KAA4B,uBAzDpC;oBAAA;oBAAA;kBAAA;;kBA0DM;kBACA,IAAIM,kBAAkB,CAACL,MAAnB,KAA8BlF,eAAe,CAACmF,2BAAhB,CAA4CK,YAA9E,EAA4F;oBAC1FH,KAAK,GAAG,SAAR;kBACD,CAFD,MAEO;oBACLA,KAAK,GAAGrF,eAAe,CAACmF,2BAAhB,CAA4CI,kBAAkB,CAACL,MAA/D,CAAR;kBACD;;kBA/DP;kBAAA;;gBAAA;kBAAA;kBAAA,OAoEuC7E,OAAO,CAACyE,IAAR,CAAa,CAC1CT,QAAQ,CAACoB,0BAAT,EAD0C,EAE1CpB,QAAQ,CAACqB,iBAFiC,CAAb,CApEvC;;gBAAA;kBAoEcC,gBApEd;kBAyEQ,KAAKd,qBAAL,CACEc,gBAAgB,CAACV,IAAjB,KAA0B,wBAA1B,GAAqD,mBAArD,GAA2E,kBAD7E,EAEEU,gBAFF;;kBAzER,MA8EYA,gBAAgB,CAACV,IAAjB,KAA0B,wBA9EtC;oBAAA;oBAAA;kBAAA;;kBAAA;;gBAAA;kBAkFQ,IAAI,CAACW,kBAAL,EAAyB;oBACvBA,kBAAkB,GAAG,IAArB;oBACA,KAAK5B,IAAL,CAAU,YAAV;oBACA,KAAKA,IAAL,CAAU,aAAV;kBACD;;kBAED,KAAKA,IAAL,CAAU,QAAV,EAAoB1E,iBAAiB,CAACqG,gBAAgB,CAACE,MAAjB,CAAwBC,IAAzB,EAA+B,EAA/B,EAAmC,KAAnC,CAArC;;gBAxFR;kBAAA;kBAAA;;gBAAA;kBA2FM,IAAIF,kBAAJ,EAAwB;oBACtB,KAAK5B,IAAL,CAAU,WAAV;oBACA,KAAKA,IAAL,CAAU,UAAV;kBACD;;gBA9FP;kBAiGI,KAAKA,IAAL,CAAU,UAAV;;kBAjGJ,KAmGQ,KAAKF,QAnGb;oBAAA;oBAAA;kBAAA;;kBAoGMuB,KAAK,GAAG,SAAR;kBApGN;kBAAA,OAsGqChB,QAAQ,CAACW,gBAtG9C;;gBAAA;kBAsGYA,gBAtGZ;kBAwGM,KAAKH,qBAAL,CAA2B,kBAA3B,EAA+CG,gBAA/C;kBAxGN;kBAAA;;gBAAA;kBAAA;kBAAA,OA0GyC3E,OAAO,CAACyE,IAAR,CAAa,CAC9CT,QAAQ,CAAC0B,oBADqC,EAE9C1B,QAAQ,CAACW,gBAFqC,CAAb,CA1GzC;;gBAAA;kBA0GYe,oBA1GZ;kBA+GM,KAAKlB,qBAAL,CACEkB,oBAAoB,CAACd,IAArB,KAA8B,uBAA9B,GAAwD,kBAAxD,GAA6E,sBAD/E,EAEEc,oBAFF;;kBA/GN,MAoHUA,oBAAoB,CAACd,IAArB,KAA8B,uBApHxC;oBAAA;oBAAA;kBAAA;;kBAqHce,iBArHd,GAqHkChG,eAAe,CAACiG,iBAAhB,CAAkCF,oBAAoB,CAACF,MAArB,CAA4BI,iBAA9D,CArHlC;;kBAuHQ,IAAID,iBAAiB,KAAKhG,eAAe,CAACiG,iBAAhB,CAAkCC,OAA5D,EAAqE;oBACnE;oBACMC,IAF6D,GAEtDJ,oBAAoB,CAACF,MAArB,CAA4BO,KAA5B,CAAkC,CAAlC,CAFsD;oBAInE,KAAKpC,IAAL,CACE,QADF,EAEE1E,iBAAiB,CACfoB,iBAAiB,KAAK,KAAtB,GACEyF,IAAI,CAACE,GADP,GAEE3F,iBAAiB,KAAK,SAAtB,GACAyF,IAAI,CAACG,OADL,GAEA5F,iBAAiB,KAAK,WAAtB,GACAyF,IAAI,CAACI,SADL,GAGAJ,IAAI,CAACK,OARQ,EASfL,IAAI,CAACM,UATU,EAUf,IAVe,CAFnB;kBAeD,CAnBD,MAmBO,IAAIT,iBAAiB,KAAKhG,eAAe,CAACiG,iBAAhB,CAAkCS,OAA5D,EAAqE;oBAC1E;oBACA,IAAIV,iBAAiB,KAAKhG,eAAe,CAACiG,iBAAhB,CAAkCU,qBAA5D,EAAmF;sBACjFtB,KAAK,GAAG,WAAR;oBACD,CAFD,MAEO;sBACLA,KAAK,GAAGU,oBAAoB,CAACF,MAArB,CAA4BI,iBAApC;oBACD;kBACF;;kBAjJT;kBAAA,OAmJuC5B,QAAQ,CAACW,gBAnJhD;;gBAAA;kBAmJcA,iBAnJd;kBAqJQ,KAAKH,qBAAL,CAA2B,kBAA3B,EAA+CG,iBAA/C;;gBArJR;kBA0JEK,KAAK,IAAI,KAAKrB,IAAL,CAAU,OAAV,EAAmB;oBAAEqB,KAAK,EAALA;kBAAF,CAAnB,CAAT;kBACA,KAAKrB,IAAL,CAAU,KAAV;;gBA3JF;gBAAA;kBAAA;cAAA;YAAA;UAAA;QAAA,CA3HE;;QAAA;UAAA;QAAA;;QAAA;MAAA;IAAA;IAAA;EAAA,EAW4B4C,yBAX5B;;EA0RJ,OAAO;IACLxD,iBAAiB,EAAjBA,0BADK;IAELnC,iBAAiB,EAAjBA;EAFK,CAAP;AAID,C;;;;AAED,SAASkD,SAAT,GAAqB;EACnB,IAAM0C,MAAM,GAAG;IACbC,wBAAwB,EAAE,IAAIC,uBAAJ,EADb;IAEbC,qBAAqB,EAAE,IAAID,uBAAJ,EAFV;IAGbE,qBAAqB,EAAE,IAAIF,uBAAJ,EAHV;IAIbG,uBAAuB,EAAE,IAAIH,uBAAJ,EAJZ;IAKbI,yBAAyB,EAAE,IAAIJ,uBAAJ,EALd;IAMbK,yBAAyB,EAAE,IAAIL,uBAAJ,EANd;IAObM,sBAAsB,EAAE,IAAIN,uBAAJ,EAPX;IAQbO,qBAAqB,EAAE,IAAIP,uBAAJ,EARV;IASbQ,uBAAuB,EAAE,IAAIR,uBAAJ,EATZ;IAUbS,wBAAwB,EAAE,IAAIT,uBAAJ;EAVb,CAAf;EAaA,OAAO;IACLzB,mBAAmB,EAAEuB,MAAM,CAACC,wBAAP,CAAgCW,QAAhC,EADhB;IAEL1C,gBAAgB,EAAE8B,MAAM,CAACG,qBAAP,CAA6BS,QAA7B,EAFb;IAGLzC,gBAAgB,EAAE6B,MAAM,CAACI,qBAAP,CAA6BQ,QAA7B,EAHb;IAILlC,kBAAkB,EAAEsB,MAAM,CAACK,uBAAP,CAA+BO,QAA/B,EAJf;IAKL7C,oBAAoB,EAAEiC,MAAM,CAACM,yBAAP,CAAiCM,QAAjC,EALjB;IAML1B,oBAAoB,EAAEc,MAAM,CAACO,yBAAP,CAAiCK,QAAjC,EANjB;IAOL/B,iBAAiB,EAAEmB,MAAM,CAACQ,sBAAP,CAA8BI,QAA9B,EAPd;IAQLhC,0BAA0B,EAAE;MAAA,OAAMoB,MAAM,CAACS,qBAAP,CAA6BG,QAA7B,EAAN;IAAA,CARvB;IASLC,kBAAkB,EAAEb,MAAM,CAACU,uBAAP,CAA+BE,QAA/B,EATf;IAULE,mBAAmB,EAAEd,MAAM,CAACW,wBAAP,CAAgCC,QAAhC,EAVhB;IAWLrD,aAAa,EAAE,uBAAAL,KAAK,EAAI;MACtB,IAAc6D,IAAd,GAAuB7D,KAAvB,CAAQkB,IAAR;MACA,IAAM4C,cAAc,GAAGhB,MAAM,CAACe,IAAD,CAA7B;;MAEA,IAAIC,cAAJ,EAAoB;QAClBA,cAAc,CAACzD,aAAf,CAA6B0D,IAA7B,CAAkC,IAAlC,EAAwC/D,KAAxC;MACD,CAFD,MAEO;QACLpD,OAAO,CAACC,IAAR,8BAAoCgH,IAApC;MACD;IACF;EApBI,CAAP;AAsBD"}
@@ -1,14 +0,0 @@
1
- "use strict";
2
-
3
- var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
4
-
5
- Object.defineProperty(exports, "__esModule", {
6
- value: true
7
- });
8
- exports.default = void 0;
9
-
10
- var _createSpeechRecognitionPonyfill = _interopRequireDefault(require("./SpeechToText/createSpeechRecognitionPonyfill"));
11
-
12
- var _default = _createSpeechRecognitionPonyfill.default;
13
- exports.default = _default;
14
- //# sourceMappingURL=SpeechToText.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"SpeechToText.js","names":["createSpeechRecognitionPonyfill"],"sources":["../../src/BingSpeech/SpeechToText.js"],"sourcesContent":["import createSpeechRecognitionPonyfill from './SpeechToText/createSpeechRecognitionPonyfill';\n\nexport default createSpeechRecognitionPonyfill\n"],"mappings":";;;;;;;;;AAAA;;eAEeA,wC"}