@superinterface/react 3.13.1 → 3.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -10757,6 +10757,43 @@ function _asyncToGenerator12(n) {
10757
10757
  });
10758
10758
  };
10759
10759
  }
10760
+ var sentTypes = [
10761
+ "session.created",
10762
+ "response.done",
10763
+ "conversation.item.input_audio_transcription.completed"
10764
+ ];
10765
+ var handleThreadEvent = function(_ref) {
10766
+ var event = _ref.event, superinterfaceContext = _ref.superinterfaceContext;
10767
+ if (event.data.event === "thread.created") {
10768
+ threadCreated({
10769
+ value: event.data,
10770
+ superinterfaceContext: superinterfaceContext
10771
+ });
10772
+ } else if (event.data.event === "thread.run.requires_action") {
10773
+ threadRunRequiresAction({
10774
+ value: event.data,
10775
+ superinterfaceContext: superinterfaceContext
10776
+ });
10777
+ }
10778
+ };
10779
+ var handleOpenaiEvent = function(_ref2) {
10780
+ var event = _ref2.event, openaiEventsDataChannel = _ref2.openaiEventsDataChannel;
10781
+ openaiEventsDataChannel.send(JSON.stringify(event.data));
10782
+ };
10783
+ var handleEvent = function(_ref3) {
10784
+ var event = _ref3.event, superinterfaceContext = _ref3.superinterfaceContext, openaiEventsDataChannel = _ref3.openaiEventsDataChannel;
10785
+ if (event.type === "openaiEvent") {
10786
+ return handleOpenaiEvent({
10787
+ event: event,
10788
+ openaiEventsDataChannel: openaiEventsDataChannel
10789
+ });
10790
+ } else if (event.type === "threadEvent") {
10791
+ return handleThreadEvent({
10792
+ event: event,
10793
+ superinterfaceContext: superinterfaceContext
10794
+ });
10795
+ }
10796
+ };
10760
10797
  var useWebrtcAudioRuntime = function() {
10761
10798
  var startSessionIfNeeded = function startSessionIfNeeded() {
10762
10799
  return _startSessionIfNeeded.apply(this, arguments);
@@ -10834,7 +10871,7 @@ var useWebrtcAudioRuntime = function() {
10834
10871
  }
10835
10872
  function _initRealtimeSession() {
10836
10873
  _initRealtimeSession = _asyncToGenerator12(function() {
10837
- var iceServers, peerConn, audioEl, ms, offer, searchParams, sdpResponse, reader, decoder, answerSdp, _ref, value, done, answer, err;
10874
+ var searchParams, peerConn, audioEl, openaiEventsDataChannel, ms, offer, sdpResponse, answerSdp, answer, err1;
10838
10875
  return _ts_generator(this, function(_state) {
10839
10876
  switch(_state.label){
10840
10877
  case 0:
@@ -10845,30 +10882,11 @@ var useWebrtcAudioRuntime = function() {
10845
10882
  8
10846
10883
  ]);
10847
10884
  setUserIsPending(true);
10848
- iceServers = [
10849
- {
10850
- url: "stun:global.stun.twilio.com:3478",
10851
- urls: "stun:global.stun.twilio.com:3478"
10852
- },
10853
- {
10854
- urls: "stun:stun.l.google.com:19302"
10855
- },
10856
- {
10857
- urls: "stun:stun1.l.google.com:19302"
10858
- },
10859
- {
10860
- urls: "stun:stun2.l.google.com:19302"
10861
- },
10862
- {
10863
- urls: "stun:stun4.l.google.com:19302"
10864
- },
10865
- {
10866
- urls: "stun:stun.stunprotocol.org:3478"
10867
- }
10868
- ];
10869
- peerConn = new RTCPeerConnection({
10870
- iceServers: iceServers
10871
- });
10885
+ searchParams = new URLSearchParams(variableParams({
10886
+ variables: superinterfaceContext.variables,
10887
+ superinterfaceContext: superinterfaceContext
10888
+ }));
10889
+ peerConn = new RTCPeerConnection();
10872
10890
  pcRef.current = peerConn;
10873
10891
  audioEl = document.createElement("audio");
10874
10892
  audioEl.autoplay = true;
@@ -10881,28 +10899,104 @@ var useWebrtcAudioRuntime = function() {
10881
10899
  setAssistantPaused(false);
10882
10900
  setAssistantAudioPlayed(true);
10883
10901
  };
10884
- peerConn.createDataChannel("unused-negotiation-only");
10885
- peerConn.addEventListener("datachannel", function(event) {
10886
- var channel = event.channel;
10887
- if (channel.label === "thread-events") {
10888
- channel.onmessage = function(_ref) {
10889
- var data2 = _ref.data;
10890
- console.log("Data channel message:", data2);
10891
- var parsedData = JSON.parse(data2);
10892
- if (parsedData.event === "thread.created") {
10893
- threadCreated({
10894
- value: parsedData,
10895
- superinterfaceContext: superinterfaceContext
10896
- });
10897
- } else if (parsedData.event === "thread.run.requires_action") {
10898
- threadRunRequiresAction({
10899
- value: parsedData,
10900
- superinterfaceContext: superinterfaceContext
10901
- });
10902
+ openaiEventsDataChannel = peerConn.createDataChannel("oai-events");
10903
+ openaiEventsDataChannel.addEventListener("message", /* @__PURE__ */ function() {
10904
+ var _ref4 = _asyncToGenerator12(function(e) {
10905
+ var parsedData, eventsResponse, reader, decoder, _ref, value, done, buffer, lines, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, line, event, ref;
10906
+ return _ts_generator(this, function(_state) {
10907
+ switch(_state.label){
10908
+ case 0:
10909
+ parsedData = JSON.parse(e.data);
10910
+ if (!sentTypes.includes(parsedData.type)) return [
10911
+ 2
10912
+ ];
10913
+ return [
10914
+ 4,
10915
+ fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc/events?").concat(searchParams), {
10916
+ method: "POST",
10917
+ headers: {
10918
+ "Content-Type": "application/json"
10919
+ },
10920
+ body: e.data
10921
+ })
10922
+ ];
10923
+ case 1:
10924
+ eventsResponse = _state.sent();
10925
+ if (!eventsResponse.body) {
10926
+ throw new Error("No body in events response");
10927
+ }
10928
+ reader = eventsResponse.body.getReader();
10929
+ decoder = new TextDecoder("utf-8");
10930
+ return [
10931
+ 4,
10932
+ reader.read()
10933
+ ];
10934
+ case 2:
10935
+ _ref = _state.sent(), value = _ref.value, done = _ref.done;
10936
+ buffer = "";
10937
+ _state.label = 3;
10938
+ case 3:
10939
+ if (!!done) return [
10940
+ 3,
10941
+ 5
10942
+ ];
10943
+ buffer += decoder.decode(value, {
10944
+ stream: true
10945
+ });
10946
+ lines = buffer.split("\n");
10947
+ buffer = lines.pop() || "";
10948
+ _iteratorNormalCompletion = true, _didIteratorError = false, _iteratorError = undefined;
10949
+ try {
10950
+ for(_iterator = lines[Symbol.iterator](); !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true){
10951
+ line = _step.value;
10952
+ if (line.trim()) {
10953
+ try {
10954
+ event = JSON.parse(line);
10955
+ handleEvent({
10956
+ event: event,
10957
+ superinterfaceContext: superinterfaceContext,
10958
+ openaiEventsDataChannel: openaiEventsDataChannel
10959
+ });
10960
+ } catch (error) {
10961
+ console.error("JSON parse error:", error, "Line:", line);
10962
+ }
10963
+ }
10964
+ }
10965
+ } catch (err) {
10966
+ _didIteratorError = true;
10967
+ _iteratorError = err;
10968
+ } finally{
10969
+ try {
10970
+ if (!_iteratorNormalCompletion && _iterator.return != null) {
10971
+ _iterator.return();
10972
+ }
10973
+ } finally{
10974
+ if (_didIteratorError) {
10975
+ throw _iteratorError;
10976
+ }
10977
+ }
10978
+ }
10979
+ return [
10980
+ 4,
10981
+ reader.read()
10982
+ ];
10983
+ case 4:
10984
+ ref = _state.sent(), value = ref.value, done = ref.done, ref;
10985
+ return [
10986
+ 3,
10987
+ 3
10988
+ ];
10989
+ case 5:
10990
+ return [
10991
+ 2
10992
+ ];
10902
10993
  }
10903
- };
10904
- }
10905
- });
10994
+ });
10995
+ });
10996
+ return function(_x) {
10997
+ return _ref4.apply(this, arguments);
10998
+ };
10999
+ }());
10906
11000
  return [
10907
11001
  4,
10908
11002
  navigator.mediaDevices.getUserMedia({
@@ -10928,10 +11022,6 @@ var useWebrtcAudioRuntime = function() {
10928
11022
  ];
10929
11023
  case 3:
10930
11024
  _state.sent();
10931
- searchParams = new URLSearchParams(variableParams({
10932
- variables: superinterfaceContext.variables,
10933
- superinterfaceContext: superinterfaceContext
10934
- }));
10935
11025
  return [
10936
11026
  4,
10937
11027
  fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc?").concat(searchParams), {
@@ -10947,25 +11037,12 @@ var useWebrtcAudioRuntime = function() {
10947
11037
  if (!sdpResponse.ok) {
10948
11038
  throw new Error("Server responded with status ".concat(sdpResponse.status));
10949
11039
  }
10950
- if (!sdpResponse.body) {
10951
- throw new Error("ReadableStream not supported in this browser.");
10952
- }
10953
- reader = sdpResponse.body.getReader();
10954
- decoder = new TextDecoder("utf-8");
10955
- answerSdp = "";
10956
11040
  return [
10957
11041
  4,
10958
- reader.read()
11042
+ sdpResponse.text()
10959
11043
  ];
10960
11044
  case 5:
10961
- _ref = _state.sent(), value = _ref.value, done = _ref.done;
10962
- if (done) {
10963
- throw new Error("Stream closed before SDP was received");
10964
- }
10965
- answerSdp += decoder.decode(value, {
10966
- stream: true
10967
- });
10968
- console.log("Received SDP Answer:", answerSdp);
11045
+ answerSdp = _state.sent();
10969
11046
  answer = {
10970
11047
  type: "answer",
10971
11048
  sdp: answerSdp
@@ -10986,8 +11063,8 @@ var useWebrtcAudioRuntime = function() {
10986
11063
  8
10987
11064
  ];
10988
11065
  case 7:
10989
- err = _state.sent();
10990
- console.error("Error initRealtimeSession:", err);
11066
+ err1 = _state.sent();
11067
+ console.error("Error initRealtimeSession:", err1);
10991
11068
  setUserIsPending(false);
10992
11069
  setRecorderStatus("stopped");
10993
11070
  setAssistantPlaying(false);