@superinterface/react 3.13.1 → 3.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -10928,6 +10928,43 @@ function _asyncToGenerator12(n) {
10928
10928
  });
10929
10929
  };
10930
10930
  }
10931
+ var sentTypes = [
10932
+ "session.created",
10933
+ "response.done",
10934
+ "conversation.item.input_audio_transcription.completed"
10935
+ ];
10936
+ var handleThreadEvent = function(_ref) {
10937
+ var event = _ref.event, superinterfaceContext = _ref.superinterfaceContext;
10938
+ if (event.data.event === "thread.created") {
10939
+ threadCreated({
10940
+ value: event.data,
10941
+ superinterfaceContext: superinterfaceContext
10942
+ });
10943
+ } else if (event.data.event === "thread.run.requires_action") {
10944
+ threadRunRequiresAction({
10945
+ value: event.data,
10946
+ superinterfaceContext: superinterfaceContext
10947
+ });
10948
+ }
10949
+ };
10950
+ var handleOpenaiEvent = function(_ref2) {
10951
+ var event = _ref2.event, openaiEventsDataChannel = _ref2.openaiEventsDataChannel;
10952
+ openaiEventsDataChannel.send(JSON.stringify(event.data));
10953
+ };
10954
+ var handleEvent = function(_ref3) {
10955
+ var event = _ref3.event, superinterfaceContext = _ref3.superinterfaceContext, openaiEventsDataChannel = _ref3.openaiEventsDataChannel;
10956
+ if (event.type === "openaiEvent") {
10957
+ return handleOpenaiEvent({
10958
+ event: event,
10959
+ openaiEventsDataChannel: openaiEventsDataChannel
10960
+ });
10961
+ } else if (event.type === "threadEvent") {
10962
+ return handleThreadEvent({
10963
+ event: event,
10964
+ superinterfaceContext: superinterfaceContext
10965
+ });
10966
+ }
10967
+ };
10931
10968
  var useWebrtcAudioRuntime = function() {
10932
10969
  var startSessionIfNeeded = function startSessionIfNeeded() {
10933
10970
  return _startSessionIfNeeded.apply(this, arguments);
@@ -11005,7 +11042,7 @@ var useWebrtcAudioRuntime = function() {
11005
11042
  }
11006
11043
  function _initRealtimeSession() {
11007
11044
  _initRealtimeSession = _asyncToGenerator12(function() {
11008
- var iceServers, peerConn, audioEl, ms, offer, searchParams, sdpResponse, reader, decoder, answerSdp, _ref, value, done, answer, err;
11045
+ var searchParams, peerConn, audioEl, openaiEventsDataChannel, ms, offer, sdpResponse, answerSdp, answer, err1;
11009
11046
  return _ts_generator(this, function(_state) {
11010
11047
  switch(_state.label){
11011
11048
  case 0:
@@ -11016,30 +11053,11 @@ var useWebrtcAudioRuntime = function() {
11016
11053
  8
11017
11054
  ]);
11018
11055
  setUserIsPending(true);
11019
- iceServers = [
11020
- {
11021
- url: "stun:global.stun.twilio.com:3478",
11022
- urls: "stun:global.stun.twilio.com:3478"
11023
- },
11024
- {
11025
- urls: "stun:stun.l.google.com:19302"
11026
- },
11027
- {
11028
- urls: "stun:stun1.l.google.com:19302"
11029
- },
11030
- {
11031
- urls: "stun:stun2.l.google.com:19302"
11032
- },
11033
- {
11034
- urls: "stun:stun4.l.google.com:19302"
11035
- },
11036
- {
11037
- urls: "stun:stun.stunprotocol.org:3478"
11038
- }
11039
- ];
11040
- peerConn = new RTCPeerConnection({
11041
- iceServers: iceServers
11042
- });
11056
+ searchParams = new URLSearchParams(variableParams({
11057
+ variables: superinterfaceContext.variables,
11058
+ superinterfaceContext: superinterfaceContext
11059
+ }));
11060
+ peerConn = new RTCPeerConnection();
11043
11061
  pcRef.current = peerConn;
11044
11062
  audioEl = document.createElement("audio");
11045
11063
  audioEl.autoplay = true;
@@ -11052,28 +11070,104 @@ var useWebrtcAudioRuntime = function() {
11052
11070
  setAssistantPaused(false);
11053
11071
  setAssistantAudioPlayed(true);
11054
11072
  };
11055
- peerConn.createDataChannel("unused-negotiation-only");
11056
- peerConn.addEventListener("datachannel", function(event) {
11057
- var channel = event.channel;
11058
- if (channel.label === "thread-events") {
11059
- channel.onmessage = function(_ref) {
11060
- var data2 = _ref.data;
11061
- console.log("Data channel message:", data2);
11062
- var parsedData = JSON.parse(data2);
11063
- if (parsedData.event === "thread.created") {
11064
- threadCreated({
11065
- value: parsedData,
11066
- superinterfaceContext: superinterfaceContext
11067
- });
11068
- } else if (parsedData.event === "thread.run.requires_action") {
11069
- threadRunRequiresAction({
11070
- value: parsedData,
11071
- superinterfaceContext: superinterfaceContext
11072
- });
11073
+ openaiEventsDataChannel = peerConn.createDataChannel("oai-events");
11074
+ openaiEventsDataChannel.addEventListener("message", /* @__PURE__ */ function() {
11075
+ var _ref4 = _asyncToGenerator12(function(e) {
11076
+ var parsedData, eventsResponse, reader, decoder, _ref, value, done, buffer, lines, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, line, event, ref;
11077
+ return _ts_generator(this, function(_state) {
11078
+ switch(_state.label){
11079
+ case 0:
11080
+ parsedData = JSON.parse(e.data);
11081
+ if (!sentTypes.includes(parsedData.type)) return [
11082
+ 2
11083
+ ];
11084
+ return [
11085
+ 4,
11086
+ fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc/events?").concat(searchParams), {
11087
+ method: "POST",
11088
+ headers: {
11089
+ "Content-Type": "application/json"
11090
+ },
11091
+ body: e.data
11092
+ })
11093
+ ];
11094
+ case 1:
11095
+ eventsResponse = _state.sent();
11096
+ if (!eventsResponse.body) {
11097
+ throw new Error("No body in events response");
11098
+ }
11099
+ reader = eventsResponse.body.getReader();
11100
+ decoder = new TextDecoder("utf-8");
11101
+ return [
11102
+ 4,
11103
+ reader.read()
11104
+ ];
11105
+ case 2:
11106
+ _ref = _state.sent(), value = _ref.value, done = _ref.done;
11107
+ buffer = "";
11108
+ _state.label = 3;
11109
+ case 3:
11110
+ if (!!done) return [
11111
+ 3,
11112
+ 5
11113
+ ];
11114
+ buffer += decoder.decode(value, {
11115
+ stream: true
11116
+ });
11117
+ lines = buffer.split("\n");
11118
+ buffer = lines.pop() || "";
11119
+ _iteratorNormalCompletion = true, _didIteratorError = false, _iteratorError = undefined;
11120
+ try {
11121
+ for(_iterator = lines[Symbol.iterator](); !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true){
11122
+ line = _step.value;
11123
+ if (line.trim()) {
11124
+ try {
11125
+ event = JSON.parse(line);
11126
+ handleEvent({
11127
+ event: event,
11128
+ superinterfaceContext: superinterfaceContext,
11129
+ openaiEventsDataChannel: openaiEventsDataChannel
11130
+ });
11131
+ } catch (error) {
11132
+ console.error("JSON parse error:", error, "Line:", line);
11133
+ }
11134
+ }
11135
+ }
11136
+ } catch (err) {
11137
+ _didIteratorError = true;
11138
+ _iteratorError = err;
11139
+ } finally{
11140
+ try {
11141
+ if (!_iteratorNormalCompletion && _iterator.return != null) {
11142
+ _iterator.return();
11143
+ }
11144
+ } finally{
11145
+ if (_didIteratorError) {
11146
+ throw _iteratorError;
11147
+ }
11148
+ }
11149
+ }
11150
+ return [
11151
+ 4,
11152
+ reader.read()
11153
+ ];
11154
+ case 4:
11155
+ ref = _state.sent(), value = ref.value, done = ref.done, ref;
11156
+ return [
11157
+ 3,
11158
+ 3
11159
+ ];
11160
+ case 5:
11161
+ return [
11162
+ 2
11163
+ ];
11073
11164
  }
11074
- };
11075
- }
11076
- });
11165
+ });
11166
+ });
11167
+ return function(_x) {
11168
+ return _ref4.apply(this, arguments);
11169
+ };
11170
+ }());
11077
11171
  return [
11078
11172
  4,
11079
11173
  navigator.mediaDevices.getUserMedia({
@@ -11099,10 +11193,6 @@ var useWebrtcAudioRuntime = function() {
11099
11193
  ];
11100
11194
  case 3:
11101
11195
  _state.sent();
11102
- searchParams = new URLSearchParams(variableParams({
11103
- variables: superinterfaceContext.variables,
11104
- superinterfaceContext: superinterfaceContext
11105
- }));
11106
11196
  return [
11107
11197
  4,
11108
11198
  fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc?").concat(searchParams), {
@@ -11118,25 +11208,12 @@ var useWebrtcAudioRuntime = function() {
11118
11208
  if (!sdpResponse.ok) {
11119
11209
  throw new Error("Server responded with status ".concat(sdpResponse.status));
11120
11210
  }
11121
- if (!sdpResponse.body) {
11122
- throw new Error("ReadableStream not supported in this browser.");
11123
- }
11124
- reader = sdpResponse.body.getReader();
11125
- decoder = new TextDecoder("utf-8");
11126
- answerSdp = "";
11127
11211
  return [
11128
11212
  4,
11129
- reader.read()
11213
+ sdpResponse.text()
11130
11214
  ];
11131
11215
  case 5:
11132
- _ref = _state.sent(), value = _ref.value, done = _ref.done;
11133
- if (done) {
11134
- throw new Error("Stream closed before SDP was received");
11135
- }
11136
- answerSdp += decoder.decode(value, {
11137
- stream: true
11138
- });
11139
- console.log("Received SDP Answer:", answerSdp);
11216
+ answerSdp = _state.sent();
11140
11217
  answer = {
11141
11218
  type: "answer",
11142
11219
  sdp: answerSdp
@@ -11157,8 +11234,8 @@ var useWebrtcAudioRuntime = function() {
11157
11234
  8
11158
11235
  ];
11159
11236
  case 7:
11160
- err = _state.sent();
11161
- console.error("Error initRealtimeSession:", err);
11237
+ err1 = _state.sent();
11238
+ console.error("Error initRealtimeSession:", err1);
11162
11239
  setUserIsPending(false);
11163
11240
  setRecorderStatus("stopped");
11164
11241
  setAssistantPlaying(false);