@superinterface/react 3.14.3 → 3.14.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -10947,12 +10947,20 @@ var handleThreadEvent = function(_ref) {
10947
10947
  });
10948
10948
  }
10949
10949
  };
10950
- var handleOpenaiEvent = function(_ref2) {
10951
- var event = _ref2.event, openaiEventsDataChannel = _ref2.openaiEventsDataChannel;
10950
+ var createAnalyser = function(_ref2) {
10951
+ var mediaStream = _ref2.mediaStream;
10952
+ var audioCtx = new AudioContext();
10953
+ var source = audioCtx.createMediaStreamSource(mediaStream);
10954
+ var analyser = audioCtx.createAnalyser();
10955
+ source.connect(analyser);
10956
+ return analyser;
10957
+ };
10958
+ var handleOpenaiEvent = function(_ref3) {
10959
+ var event = _ref3.event, openaiEventsDataChannel = _ref3.openaiEventsDataChannel;
10952
10960
  openaiEventsDataChannel.send(JSON.stringify(event.data));
10953
10961
  };
10954
- var handleEvent = function(_ref3) {
10955
- var event = _ref3.event, superinterfaceContext = _ref3.superinterfaceContext, openaiEventsDataChannel = _ref3.openaiEventsDataChannel;
10962
+ var handleEvent = function(_ref4) {
10963
+ var event = _ref4.event, superinterfaceContext = _ref4.superinterfaceContext, openaiEventsDataChannel = _ref4.openaiEventsDataChannel;
10956
10964
  if (event.type === "openaiEvent") {
10957
10965
  return handleOpenaiEvent({
10958
10966
  event: event,
@@ -10972,20 +10980,14 @@ var useWebrtcAudioRuntime = function() {
10972
10980
  var initRealtimeSession = function initRealtimeSession() {
10973
10981
  return _initRealtimeSession.apply(this, arguments);
10974
10982
  };
10975
- var buildAnalyzers = function buildAnalyzers(localStream, audioEl_0) {
10983
+ var buildAnalyzers = function buildAnalyzers(_ref5) {
10984
+ var localStream = _ref5.localStream, remoteStream = _ref5.remoteStream;
10976
10985
  try {
10977
- var audioCtx1 = new AudioContext();
10978
- var micSource = audioCtx1.createMediaStreamSource(localStream);
10979
- var micAnalyser = audioCtx1.createAnalyser();
10980
- micSource.connect(micAnalyser);
10981
- userAnalyserRef.current = micAnalyser;
10982
- audioEl_0.addEventListener("canplay", function() {
10983
- var audioCtx2 = new AudioContext();
10984
- var remoteSource = audioCtx2.createMediaElementSource(audioEl_0);
10985
- var remoteAnalyser = audioCtx2.createAnalyser();
10986
- remoteSource.connect(remoteAnalyser);
10987
- remoteSource.connect(audioCtx2.destination);
10988
- assistantAnalyserRef.current = remoteAnalyser;
10986
+ userAnalyserRef.current = createAnalyser({
10987
+ mediaStream: localStream
10988
+ });
10989
+ assistantAnalyserRef.current = createAnalyser({
10990
+ mediaStream: remoteStream
10989
10991
  });
10990
10992
  } catch (err_0) {
10991
10993
  console.warn("Could not build analyzers:", err_0);
@@ -11068,7 +11070,7 @@ var useWebrtcAudioRuntime = function() {
11068
11070
  };
11069
11071
  openaiEventsDataChannel = peerConn.createDataChannel("oai-events");
11070
11072
  openaiEventsDataChannel.addEventListener("message", /* @__PURE__ */ function() {
11071
- var _ref6 = _asyncToGenerator12(function(e) {
11073
+ var _ref8 = _asyncToGenerator12(function(e) {
11072
11074
  var parsedData, searchParams, eventsResponse, reader, decoder, _ref, value, done, buffer, lines, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, line, event, ref;
11073
11075
  return _ts_generator(this, function(_state) {
11074
11076
  switch(_state.label){
@@ -11168,7 +11170,7 @@ var useWebrtcAudioRuntime = function() {
11168
11170
  });
11169
11171
  });
11170
11172
  return function(_x) {
11171
- return _ref6.apply(this, arguments);
11173
+ return _ref8.apply(this, arguments);
11172
11174
  };
11173
11175
  }());
11174
11176
  return [
@@ -11231,7 +11233,10 @@ var useWebrtcAudioRuntime = function() {
11231
11233
  ];
11232
11234
  case 6:
11233
11235
  _state.sent();
11234
- buildAnalyzers(ms, audioEl);
11236
+ buildAnalyzers({
11237
+ localStream: ms,
11238
+ remoteStream: remoteStreamRef.current
11239
+ });
11235
11240
  setUserIsPending(false);
11236
11241
  setAssistantIsPending(false);
11237
11242
  setAssistantIsReady(true);
@@ -11264,7 +11269,7 @@ var useWebrtcAudioRuntime = function() {
11264
11269
  return _initRealtimeSession.apply(this, arguments);
11265
11270
  }
11266
11271
  var start = /* @__PURE__ */ function() {
11267
- var _ref4 = _asyncToGenerator12(function() {
11272
+ var _ref6 = _asyncToGenerator12(function() {
11268
11273
  return _ts_generator(this, function(_state) {
11269
11274
  switch(_state.label){
11270
11275
  case 0:
@@ -11293,11 +11298,11 @@ var useWebrtcAudioRuntime = function() {
11293
11298
  });
11294
11299
  });
11295
11300
  return function start2() {
11296
- return _ref4.apply(this, arguments);
11301
+ return _ref6.apply(this, arguments);
11297
11302
  };
11298
11303
  }();
11299
11304
  var pause = /* @__PURE__ */ function() {
11300
- var _ref5 = _asyncToGenerator12(function() {
11305
+ var _ref7 = _asyncToGenerator12(function() {
11301
11306
  return _ts_generator(this, function(_state) {
11302
11307
  if (!sessionStartedRef.current) return [
11303
11308
  2
@@ -11315,7 +11320,7 @@ var useWebrtcAudioRuntime = function() {
11315
11320
  });
11316
11321
  });
11317
11322
  return function pause2() {
11318
- return _ref5.apply(this, arguments);
11323
+ return _ref7.apply(this, arguments);
11319
11324
  };
11320
11325
  }();
11321
11326
  return (0, import_react68.useMemo)(function() {