@superinterface/react 3.14.1 → 3.14.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -11068,7 +11068,7 @@ var useWebrtcAudioRuntime = function() {
11068
11068
  };
11069
11069
  openaiEventsDataChannel = peerConn.createDataChannel("oai-events");
11070
11070
  openaiEventsDataChannel.addEventListener("message", /* @__PURE__ */ function() {
11071
- var _ref4 = _asyncToGenerator12(function(e) {
11071
+ var _ref6 = _asyncToGenerator12(function(e) {
11072
11072
  var parsedData, searchParams, eventsResponse, reader, decoder, _ref, value, done, buffer, lines, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, line, event, ref;
11073
11073
  return _ts_generator(this, function(_state) {
11074
11074
  switch(_state.label){
@@ -11077,6 +11077,9 @@ var useWebrtcAudioRuntime = function() {
11077
11077
  if (!sentTypes.includes(parsedData.type)) return [
11078
11078
  2
11079
11079
  ];
11080
+ if (parsedData.type === "response.done" && parsedData.response.status !== "completed") return [
11081
+ 2
11082
+ ];
11080
11083
  searchParams = new URLSearchParams(variableParams({
11081
11084
  variables: superinterfaceContext.variables,
11082
11085
  superinterfaceContext: superinterfaceContext
@@ -11165,7 +11168,7 @@ var useWebrtcAudioRuntime = function() {
11165
11168
  });
11166
11169
  });
11167
11170
  return function(_x) {
11168
- return _ref4.apply(this, arguments);
11171
+ return _ref6.apply(this, arguments);
11169
11172
  };
11170
11173
  }());
11171
11174
  return [
@@ -11260,6 +11263,61 @@ var useWebrtcAudioRuntime = function() {
11260
11263
  });
11261
11264
  return _initRealtimeSession.apply(this, arguments);
11262
11265
  }
11266
+ var start = /* @__PURE__ */ function() {
11267
+ var _ref4 = _asyncToGenerator12(function() {
11268
+ return _ts_generator(this, function(_state) {
11269
+ switch(_state.label){
11270
+ case 0:
11271
+ return [
11272
+ 4,
11273
+ startSessionIfNeeded()
11274
+ ];
11275
+ case 1:
11276
+ _state.sent();
11277
+ setAssistantPaused(false);
11278
+ setAssistantPlaying(true);
11279
+ if (assistantAudioElRef.current) {
11280
+ assistantAudioElRef.current.play().catch(function(err_1) {
11281
+ console.error("Assistant play error:", err_1);
11282
+ });
11283
+ }
11284
+ if (localStreamRef.current) {
11285
+ localStreamRef.current.getAudioTracks().forEach(function(t_0) {
11286
+ return t_0.enabled = true;
11287
+ });
11288
+ }
11289
+ return [
11290
+ 2
11291
+ ];
11292
+ }
11293
+ });
11294
+ });
11295
+ return function start2() {
11296
+ return _ref4.apply(this, arguments);
11297
+ };
11298
+ }();
11299
+ var pause = /* @__PURE__ */ function() {
11300
+ var _ref5 = _asyncToGenerator12(function() {
11301
+ return _ts_generator(this, function(_state) {
11302
+ if (!sessionStartedRef.current) return [
11303
+ 2
11304
+ ];
11305
+ setAssistantPaused(true);
11306
+ setAssistantPlaying(false);
11307
+ if (localStreamRef.current) {
11308
+ localStreamRef.current.getAudioTracks().forEach(function(t_1) {
11309
+ return t_1.enabled = false;
11310
+ });
11311
+ }
11312
+ return [
11313
+ 2
11314
+ ];
11315
+ });
11316
+ });
11317
+ return function pause2() {
11318
+ return _ref5.apply(this, arguments);
11319
+ };
11320
+ }();
11263
11321
  return (0, import_react68.useMemo)(function() {
11264
11322
  return {
11265
11323
  webrtcAudioRuntime: {
@@ -11267,174 +11325,27 @@ var useWebrtcAudioRuntime = function() {
11267
11325
  start: function() {
11268
11326
  var _start = _asyncToGenerator12(function() {
11269
11327
  return _ts_generator(this, function(_state) {
11270
- switch(_state.label){
11271
- case 0:
11272
- return [
11273
- 4,
11274
- startSessionIfNeeded()
11275
- ];
11276
- case 1:
11277
- _state.sent();
11278
- setRecorderStatus("recording");
11279
- if (localStreamRef.current) {
11280
- localStreamRef.current.getAudioTracks().forEach(function(t_0) {
11281
- return t_0.enabled = true;
11282
- });
11283
- }
11284
- return [
11285
- 2
11286
- ];
11287
- }
11288
- });
11289
- });
11290
- function start() {
11291
- return _start.apply(this, arguments);
11292
- }
11293
- return start;
11294
- }(),
11295
- pause: function() {
11296
- var _pause = _asyncToGenerator12(function() {
11297
- return _ts_generator(this, function(_state) {
11298
- if (!sessionStartedRef.current) return [
11299
- 2
11300
- ];
11301
- setRecorderStatus("paused");
11302
- if (localStreamRef.current) {
11303
- localStreamRef.current.getAudioTracks().forEach(function(t_1) {
11304
- return t_1.enabled = false;
11305
- });
11306
- }
11307
11328
  return [
11308
11329
  2
11309
11330
  ];
11310
11331
  });
11311
11332
  });
11312
- function pause() {
11313
- return _pause.apply(this, arguments);
11314
- }
11315
- return pause;
11316
- }(),
11317
- resume: function() {
11318
- var _resume = _asyncToGenerator12(function() {
11319
- return _ts_generator(this, function(_state) {
11320
- if (!sessionStartedRef.current) return [
11321
- 2
11322
- ];
11323
- setRecorderStatus("recording");
11324
- if (localStreamRef.current) {
11325
- localStreamRef.current.getAudioTracks().forEach(function(t_2) {
11326
- return t_2.enabled = true;
11327
- });
11328
- }
11329
- return [
11330
- 2
11331
- ];
11332
- });
11333
- });
11334
- function resume() {
11335
- return _resume.apply(this, arguments);
11336
- }
11337
- return resume;
11338
- }(),
11339
- stop: function() {
11340
- var _stop = _asyncToGenerator12(function() {
11341
- return _ts_generator(this, function(_state) {
11342
- if (!sessionStartedRef.current) return [
11343
- 2
11344
- ];
11345
- setRecorderStatus("stopped");
11346
- if (localStreamRef.current) {
11347
- localStreamRef.current.getTracks().forEach(function(track) {
11348
- return track.stop();
11349
- });
11350
- }
11351
- return [
11352
- 2
11353
- ];
11354
- });
11355
- });
11356
- function stop() {
11357
- return _stop.apply(this, arguments);
11333
+ function start2() {
11334
+ return _start.apply(this, arguments);
11358
11335
  }
11359
- return stop;
11336
+ return start2;
11360
11337
  }(),
11338
+ pause: pause,
11339
+ resume: start,
11340
+ stop: pause,
11361
11341
  isPending: userIsPending,
11362
11342
  visualizationAnalyser: userAnalyserRef.current,
11363
11343
  rawStatus: recorderStatus
11364
11344
  },
11365
11345
  assistant: {
11366
- play: function() {
11367
- var _play = _asyncToGenerator12(function() {
11368
- return _ts_generator(this, function(_state) {
11369
- switch(_state.label){
11370
- case 0:
11371
- return [
11372
- 4,
11373
- startSessionIfNeeded()
11374
- ];
11375
- case 1:
11376
- _state.sent();
11377
- setAssistantPaused(false);
11378
- setAssistantPlaying(true);
11379
- if (assistantAudioElRef.current) {
11380
- assistantAudioElRef.current.play().catch(function(err_1) {
11381
- console.error("Assistant play error:", err_1);
11382
- });
11383
- }
11384
- return [
11385
- 2
11386
- ];
11387
- }
11388
- });
11389
- });
11390
- function play() {
11391
- return _play.apply(this, arguments);
11392
- }
11393
- return play;
11394
- }(),
11395
- pause: function() {
11396
- var _pause2 = _asyncToGenerator12(function() {
11397
- return _ts_generator(this, function(_state) {
11398
- if (!sessionStartedRef.current) return [
11399
- 2
11400
- ];
11401
- setAssistantPaused(true);
11402
- setAssistantPlaying(false);
11403
- if (assistantAudioElRef.current) {
11404
- assistantAudioElRef.current.pause();
11405
- }
11406
- return [
11407
- 2
11408
- ];
11409
- });
11410
- });
11411
- function pause() {
11412
- return _pause2.apply(this, arguments);
11413
- }
11414
- return pause;
11415
- }(),
11416
- stop: function() {
11417
- var _stop2 = _asyncToGenerator12(function() {
11418
- return _ts_generator(this, function(_state) {
11419
- if (!sessionStartedRef.current) return [
11420
- 2
11421
- ];
11422
- setAssistantPaused(false);
11423
- setAssistantPlaying(false);
11424
- if (assistantAudioElRef.current) {
11425
- assistantAudioElRef.current.pause();
11426
- assistantAudioElRef.current.currentTime = 0;
11427
- }
11428
- return [
11429
- 2
11430
- ];
11431
- });
11432
- });
11433
- function stop() {
11434
- return _stop2.apply(this, arguments);
11435
- }
11436
- return stop;
11437
- }(),
11346
+ play: start,
11347
+ pause: pause,
11348
+ stop: pause,
11438
11349
  visualizationAnalyser: assistantAnalyserRef.current,
11439
11350
  playing: assistantPlaying,
11440
11351
  paused: assistantPaused,