@superinterface/react 3.14.3 → 3.14.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -10776,12 +10776,20 @@ var handleThreadEvent = function(_ref) {
10776
10776
  });
10777
10777
  }
10778
10778
  };
10779
- var handleOpenaiEvent = function(_ref2) {
10780
- var event = _ref2.event, openaiEventsDataChannel = _ref2.openaiEventsDataChannel;
10779
+ var createAnalyser = function(_ref2) {
10780
+ var mediaStream = _ref2.mediaStream;
10781
+ var audioCtx = new AudioContext();
10782
+ var source = audioCtx.createMediaStreamSource(mediaStream);
10783
+ var analyser = audioCtx.createAnalyser();
10784
+ source.connect(analyser);
10785
+ return analyser;
10786
+ };
10787
+ var handleOpenaiEvent = function(_ref3) {
10788
+ var event = _ref3.event, openaiEventsDataChannel = _ref3.openaiEventsDataChannel;
10781
10789
  openaiEventsDataChannel.send(JSON.stringify(event.data));
10782
10790
  };
10783
- var handleEvent = function(_ref3) {
10784
- var event = _ref3.event, superinterfaceContext = _ref3.superinterfaceContext, openaiEventsDataChannel = _ref3.openaiEventsDataChannel;
10791
+ var handleEvent = function(_ref4) {
10792
+ var event = _ref4.event, superinterfaceContext = _ref4.superinterfaceContext, openaiEventsDataChannel = _ref4.openaiEventsDataChannel;
10785
10793
  if (event.type === "openaiEvent") {
10786
10794
  return handleOpenaiEvent({
10787
10795
  event: event,
@@ -10801,20 +10809,14 @@ var useWebrtcAudioRuntime = function() {
10801
10809
  var initRealtimeSession = function initRealtimeSession() {
10802
10810
  return _initRealtimeSession.apply(this, arguments);
10803
10811
  };
10804
- var buildAnalyzers = function buildAnalyzers(localStream, audioEl_0) {
10812
+ var buildAnalyzers = function buildAnalyzers(_ref5) {
10813
+ var localStream = _ref5.localStream, remoteStream = _ref5.remoteStream;
10805
10814
  try {
10806
- var audioCtx1 = new AudioContext();
10807
- var micSource = audioCtx1.createMediaStreamSource(localStream);
10808
- var micAnalyser = audioCtx1.createAnalyser();
10809
- micSource.connect(micAnalyser);
10810
- userAnalyserRef.current = micAnalyser;
10811
- audioEl_0.addEventListener("canplay", function() {
10812
- var audioCtx2 = new AudioContext();
10813
- var remoteSource = audioCtx2.createMediaElementSource(audioEl_0);
10814
- var remoteAnalyser = audioCtx2.createAnalyser();
10815
- remoteSource.connect(remoteAnalyser);
10816
- remoteSource.connect(audioCtx2.destination);
10817
- assistantAnalyserRef.current = remoteAnalyser;
10815
+ userAnalyserRef.current = createAnalyser({
10816
+ mediaStream: localStream
10817
+ });
10818
+ assistantAnalyserRef.current = createAnalyser({
10819
+ mediaStream: remoteStream
10818
10820
  });
10819
10821
  } catch (err_0) {
10820
10822
  console.warn("Could not build analyzers:", err_0);
@@ -10897,7 +10899,7 @@ var useWebrtcAudioRuntime = function() {
10897
10899
  };
10898
10900
  openaiEventsDataChannel = peerConn.createDataChannel("oai-events");
10899
10901
  openaiEventsDataChannel.addEventListener("message", /* @__PURE__ */ function() {
10900
- var _ref6 = _asyncToGenerator12(function(e) {
10902
+ var _ref8 = _asyncToGenerator12(function(e) {
10901
10903
  var parsedData, searchParams, eventsResponse, reader, decoder, _ref, value, done, buffer, lines, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, line, event, ref;
10902
10904
  return _ts_generator(this, function(_state) {
10903
10905
  switch(_state.label){
@@ -10997,7 +10999,7 @@ var useWebrtcAudioRuntime = function() {
10997
10999
  });
10998
11000
  });
10999
11001
  return function(_x) {
11000
- return _ref6.apply(this, arguments);
11002
+ return _ref8.apply(this, arguments);
11001
11003
  };
11002
11004
  }());
11003
11005
  return [
@@ -11060,7 +11062,10 @@ var useWebrtcAudioRuntime = function() {
11060
11062
  ];
11061
11063
  case 6:
11062
11064
  _state.sent();
11063
- buildAnalyzers(ms, audioEl);
11065
+ buildAnalyzers({
11066
+ localStream: ms,
11067
+ remoteStream: remoteStreamRef.current
11068
+ });
11064
11069
  setUserIsPending(false);
11065
11070
  setAssistantIsPending(false);
11066
11071
  setAssistantIsReady(true);
@@ -11093,7 +11098,7 @@ var useWebrtcAudioRuntime = function() {
11093
11098
  return _initRealtimeSession.apply(this, arguments);
11094
11099
  }
11095
11100
  var start = /* @__PURE__ */ function() {
11096
- var _ref4 = _asyncToGenerator12(function() {
11101
+ var _ref6 = _asyncToGenerator12(function() {
11097
11102
  return _ts_generator(this, function(_state) {
11098
11103
  switch(_state.label){
11099
11104
  case 0:
@@ -11122,11 +11127,11 @@ var useWebrtcAudioRuntime = function() {
11122
11127
  });
11123
11128
  });
11124
11129
  return function start2() {
11125
- return _ref4.apply(this, arguments);
11130
+ return _ref6.apply(this, arguments);
11126
11131
  };
11127
11132
  }();
11128
11133
  var pause = /* @__PURE__ */ function() {
11129
- var _ref5 = _asyncToGenerator12(function() {
11134
+ var _ref7 = _asyncToGenerator12(function() {
11130
11135
  return _ts_generator(this, function(_state) {
11131
11136
  if (!sessionStartedRef.current) return [
11132
11137
  2
@@ -11144,7 +11149,7 @@ var useWebrtcAudioRuntime = function() {
11144
11149
  });
11145
11150
  });
11146
11151
  return function pause2() {
11147
- return _ref5.apply(this, arguments);
11152
+ return _ref7.apply(this, arguments);
11148
11153
  };
11149
11154
  }();
11150
11155
  return useMemo20(function() {