@superinterface/react 3.13.2 → 3.14.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -10928,6 +10928,43 @@ function _asyncToGenerator12(n) {
10928
10928
  });
10929
10929
  };
10930
10930
  }
10931
+ var sentTypes = [
10932
+ "session.created",
10933
+ "response.done",
10934
+ "conversation.item.input_audio_transcription.completed"
10935
+ ];
10936
+ var handleThreadEvent = function(_ref) {
10937
+ var event = _ref.event, superinterfaceContext = _ref.superinterfaceContext;
10938
+ if (event.data.event === "thread.created") {
10939
+ threadCreated({
10940
+ value: event.data,
10941
+ superinterfaceContext: superinterfaceContext
10942
+ });
10943
+ } else if (event.data.event === "thread.run.requires_action") {
10944
+ threadRunRequiresAction({
10945
+ value: event.data,
10946
+ superinterfaceContext: superinterfaceContext
10947
+ });
10948
+ }
10949
+ };
10950
+ var handleOpenaiEvent = function(_ref2) {
10951
+ var event = _ref2.event, openaiEventsDataChannel = _ref2.openaiEventsDataChannel;
10952
+ openaiEventsDataChannel.send(JSON.stringify(event.data));
10953
+ };
10954
+ var handleEvent = function(_ref3) {
10955
+ var event = _ref3.event, superinterfaceContext = _ref3.superinterfaceContext, openaiEventsDataChannel = _ref3.openaiEventsDataChannel;
10956
+ if (event.type === "openaiEvent") {
10957
+ return handleOpenaiEvent({
10958
+ event: event,
10959
+ openaiEventsDataChannel: openaiEventsDataChannel
10960
+ });
10961
+ } else if (event.type === "threadEvent") {
10962
+ return handleThreadEvent({
10963
+ event: event,
10964
+ superinterfaceContext: superinterfaceContext
10965
+ });
10966
+ }
10967
+ };
10931
10968
  var useWebrtcAudioRuntime = function() {
10932
10969
  var startSessionIfNeeded = function startSessionIfNeeded() {
10933
10970
  return _startSessionIfNeeded.apply(this, arguments);
@@ -11005,41 +11042,22 @@ var useWebrtcAudioRuntime = function() {
11005
11042
  }
11006
11043
  function _initRealtimeSession() {
11007
11044
  _initRealtimeSession = _asyncToGenerator12(function() {
11008
- var searchParams, iceServersResponse, iceServersData, peerConn, audioEl, ms, offer, sdpResponse, reader, decoder, answerSdp, _ref, value, done, answer, err;
11045
+ var searchParams, peerConn, audioEl, openaiEventsDataChannel, ms, offer, sdpResponse, answerSdp, answer, err1;
11009
11046
  return _ts_generator(this, function(_state) {
11010
11047
  switch(_state.label){
11011
11048
  case 0:
11012
11049
  _state.trys.push([
11013
11050
  0,
11014
- 9,
11051
+ 7,
11015
11052
  ,
11016
- 10
11053
+ 8
11017
11054
  ]);
11018
11055
  setUserIsPending(true);
11019
11056
  searchParams = new URLSearchParams(variableParams({
11020
11057
  variables: superinterfaceContext.variables,
11021
11058
  superinterfaceContext: superinterfaceContext
11022
11059
  }));
11023
- return [
11024
- 4,
11025
- fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc/ice-servers?").concat(searchParams), {
11026
- method: "GET",
11027
- headers: {
11028
- "Content-Type": "application/json"
11029
- }
11030
- })
11031
- ];
11032
- case 1:
11033
- iceServersResponse = _state.sent();
11034
- return [
11035
- 4,
11036
- iceServersResponse.json()
11037
- ];
11038
- case 2:
11039
- iceServersData = _state.sent();
11040
- peerConn = new RTCPeerConnection({
11041
- iceServers: iceServersData.iceServers
11042
- });
11060
+ peerConn = new RTCPeerConnection();
11043
11061
  pcRef.current = peerConn;
11044
11062
  audioEl = document.createElement("audio");
11045
11063
  audioEl.autoplay = true;
@@ -11052,35 +11070,111 @@ var useWebrtcAudioRuntime = function() {
11052
11070
  setAssistantPaused(false);
11053
11071
  setAssistantAudioPlayed(true);
11054
11072
  };
11055
- peerConn.createDataChannel("unused-negotiation-only");
11056
- peerConn.addEventListener("datachannel", function(event) {
11057
- var channel = event.channel;
11058
- if (channel.label === "thread-events") {
11059
- channel.onmessage = function(_ref) {
11060
- var data2 = _ref.data;
11061
- console.log("Data channel message:", data2);
11062
- var parsedData = JSON.parse(data2);
11063
- if (parsedData.event === "thread.created") {
11064
- threadCreated({
11065
- value: parsedData,
11066
- superinterfaceContext: superinterfaceContext
11067
- });
11068
- } else if (parsedData.event === "thread.run.requires_action") {
11069
- threadRunRequiresAction({
11070
- value: parsedData,
11071
- superinterfaceContext: superinterfaceContext
11072
- });
11073
+ openaiEventsDataChannel = peerConn.createDataChannel("oai-events");
11074
+ openaiEventsDataChannel.addEventListener("message", /* @__PURE__ */ function() {
11075
+ var _ref4 = _asyncToGenerator12(function(e) {
11076
+ var parsedData, eventsResponse, reader, decoder, _ref, value, done, buffer, lines, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, line, event, ref;
11077
+ return _ts_generator(this, function(_state) {
11078
+ switch(_state.label){
11079
+ case 0:
11080
+ parsedData = JSON.parse(e.data);
11081
+ if (!sentTypes.includes(parsedData.type)) return [
11082
+ 2
11083
+ ];
11084
+ return [
11085
+ 4,
11086
+ fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc/events?").concat(searchParams), {
11087
+ method: "POST",
11088
+ headers: {
11089
+ "Content-Type": "application/json"
11090
+ },
11091
+ body: e.data
11092
+ })
11093
+ ];
11094
+ case 1:
11095
+ eventsResponse = _state.sent();
11096
+ if (!eventsResponse.body) {
11097
+ throw new Error("No body in events response");
11098
+ }
11099
+ reader = eventsResponse.body.getReader();
11100
+ decoder = new TextDecoder("utf-8");
11101
+ return [
11102
+ 4,
11103
+ reader.read()
11104
+ ];
11105
+ case 2:
11106
+ _ref = _state.sent(), value = _ref.value, done = _ref.done;
11107
+ buffer = "";
11108
+ _state.label = 3;
11109
+ case 3:
11110
+ if (!!done) return [
11111
+ 3,
11112
+ 5
11113
+ ];
11114
+ buffer += decoder.decode(value, {
11115
+ stream: true
11116
+ });
11117
+ lines = buffer.split("\n");
11118
+ buffer = lines.pop() || "";
11119
+ _iteratorNormalCompletion = true, _didIteratorError = false, _iteratorError = undefined;
11120
+ try {
11121
+ for(_iterator = lines[Symbol.iterator](); !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true){
11122
+ line = _step.value;
11123
+ if (line.trim()) {
11124
+ try {
11125
+ event = JSON.parse(line);
11126
+ handleEvent({
11127
+ event: event,
11128
+ superinterfaceContext: superinterfaceContext,
11129
+ openaiEventsDataChannel: openaiEventsDataChannel
11130
+ });
11131
+ } catch (error) {
11132
+ console.error("JSON parse error:", error, "Line:", line);
11133
+ }
11134
+ }
11135
+ }
11136
+ } catch (err) {
11137
+ _didIteratorError = true;
11138
+ _iteratorError = err;
11139
+ } finally{
11140
+ try {
11141
+ if (!_iteratorNormalCompletion && _iterator.return != null) {
11142
+ _iterator.return();
11143
+ }
11144
+ } finally{
11145
+ if (_didIteratorError) {
11146
+ throw _iteratorError;
11147
+ }
11148
+ }
11149
+ }
11150
+ return [
11151
+ 4,
11152
+ reader.read()
11153
+ ];
11154
+ case 4:
11155
+ ref = _state.sent(), value = ref.value, done = ref.done, ref;
11156
+ return [
11157
+ 3,
11158
+ 3
11159
+ ];
11160
+ case 5:
11161
+ return [
11162
+ 2
11163
+ ];
11073
11164
  }
11074
- };
11075
- }
11076
- });
11165
+ });
11166
+ });
11167
+ return function(_x) {
11168
+ return _ref4.apply(this, arguments);
11169
+ };
11170
+ }());
11077
11171
  return [
11078
11172
  4,
11079
11173
  navigator.mediaDevices.getUserMedia({
11080
11174
  audio: true
11081
11175
  })
11082
11176
  ];
11083
- case 3:
11177
+ case 1:
11084
11178
  ms = _state.sent();
11085
11179
  localStreamRef.current = ms;
11086
11180
  ms.getTracks().forEach(function(t) {
@@ -11091,13 +11185,13 @@ var useWebrtcAudioRuntime = function() {
11091
11185
  4,
11092
11186
  peerConn.createOffer()
11093
11187
  ];
11094
- case 4:
11188
+ case 2:
11095
11189
  offer = _state.sent();
11096
11190
  return [
11097
11191
  4,
11098
11192
  peerConn.setLocalDescription(offer)
11099
11193
  ];
11100
- case 5:
11194
+ case 3:
11101
11195
  _state.sent();
11102
11196
  return [
11103
11197
  4,
@@ -11109,30 +11203,17 @@ var useWebrtcAudioRuntime = function() {
11109
11203
  }
11110
11204
  })
11111
11205
  ];
11112
- case 6:
11206
+ case 4:
11113
11207
  sdpResponse = _state.sent();
11114
11208
  if (!sdpResponse.ok) {
11115
11209
  throw new Error("Server responded with status ".concat(sdpResponse.status));
11116
11210
  }
11117
- if (!sdpResponse.body) {
11118
- throw new Error("ReadableStream not supported in this browser.");
11119
- }
11120
- reader = sdpResponse.body.getReader();
11121
- decoder = new TextDecoder("utf-8");
11122
- answerSdp = "";
11123
11211
  return [
11124
11212
  4,
11125
- reader.read()
11213
+ sdpResponse.text()
11126
11214
  ];
11127
- case 7:
11128
- _ref = _state.sent(), value = _ref.value, done = _ref.done;
11129
- if (done) {
11130
- throw new Error("Stream closed before SDP was received");
11131
- }
11132
- answerSdp += decoder.decode(value, {
11133
- stream: true
11134
- });
11135
- console.log("Received SDP Answer:", answerSdp);
11215
+ case 5:
11216
+ answerSdp = _state.sent();
11136
11217
  answer = {
11137
11218
  type: "answer",
11138
11219
  sdp: answerSdp
@@ -11141,7 +11222,7 @@ var useWebrtcAudioRuntime = function() {
11141
11222
  4,
11142
11223
  peerConn.setRemoteDescription(answer)
11143
11224
  ];
11144
- case 8:
11225
+ case 6:
11145
11226
  _state.sent();
11146
11227
  buildAnalyzers(ms, audioEl);
11147
11228
  setUserIsPending(false);
@@ -11150,11 +11231,11 @@ var useWebrtcAudioRuntime = function() {
11150
11231
  setAssistantPlaying(true);
11151
11232
  return [
11152
11233
  3,
11153
- 10
11234
+ 8
11154
11235
  ];
11155
- case 9:
11156
- err = _state.sent();
11157
- console.error("Error initRealtimeSession:", err);
11236
+ case 7:
11237
+ err1 = _state.sent();
11238
+ console.error("Error initRealtimeSession:", err1);
11158
11239
  setUserIsPending(false);
11159
11240
  setRecorderStatus("stopped");
11160
11241
  setAssistantPlaying(false);
@@ -11164,9 +11245,9 @@ var useWebrtcAudioRuntime = function() {
11164
11245
  setAssistantAudioPlayed(false);
11165
11246
  return [
11166
11247
  3,
11167
- 10
11248
+ 8
11168
11249
  ];
11169
- case 10:
11250
+ case 8:
11170
11251
  return [
11171
11252
  2
11172
11253
  ];