@superinterface/react 3.13.2 → 3.14.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -10928,6 +10928,43 @@ function _asyncToGenerator12(n) {
10928
10928
  });
10929
10929
  };
10930
10930
  }
10931
+ var sentTypes = [
10932
+ "session.created",
10933
+ "response.done",
10934
+ "conversation.item.input_audio_transcription.completed"
10935
+ ];
10936
+ var handleThreadEvent = function(_ref) {
10937
+ var event = _ref.event, superinterfaceContext = _ref.superinterfaceContext;
10938
+ if (event.data.event === "thread.created") {
10939
+ threadCreated({
10940
+ value: event.data,
10941
+ superinterfaceContext: superinterfaceContext
10942
+ });
10943
+ } else if (event.data.event === "thread.run.requires_action") {
10944
+ threadRunRequiresAction({
10945
+ value: event.data,
10946
+ superinterfaceContext: superinterfaceContext
10947
+ });
10948
+ }
10949
+ };
10950
+ var handleOpenaiEvent = function(_ref2) {
10951
+ var event = _ref2.event, openaiEventsDataChannel = _ref2.openaiEventsDataChannel;
10952
+ openaiEventsDataChannel.send(JSON.stringify(event.data));
10953
+ };
10954
+ var handleEvent = function(_ref3) {
10955
+ var event = _ref3.event, superinterfaceContext = _ref3.superinterfaceContext, openaiEventsDataChannel = _ref3.openaiEventsDataChannel;
10956
+ if (event.type === "openaiEvent") {
10957
+ return handleOpenaiEvent({
10958
+ event: event,
10959
+ openaiEventsDataChannel: openaiEventsDataChannel
10960
+ });
10961
+ } else if (event.type === "threadEvent") {
10962
+ return handleThreadEvent({
10963
+ event: event,
10964
+ superinterfaceContext: superinterfaceContext
10965
+ });
10966
+ }
10967
+ };
10931
10968
  var useWebrtcAudioRuntime = function() {
10932
10969
  var startSessionIfNeeded = function startSessionIfNeeded() {
10933
10970
  return _startSessionIfNeeded.apply(this, arguments);
@@ -11005,41 +11042,18 @@ var useWebrtcAudioRuntime = function() {
11005
11042
  }
11006
11043
  function _initRealtimeSession() {
11007
11044
  _initRealtimeSession = _asyncToGenerator12(function() {
11008
- var searchParams, iceServersResponse, iceServersData, peerConn, audioEl, ms, offer, sdpResponse, reader, decoder, answerSdp, _ref, value, done, answer, err;
11045
+ var peerConn, audioEl, openaiEventsDataChannel, ms, offer, searchParams_0, sdpResponse, answerSdp, answer, err1;
11009
11046
  return _ts_generator(this, function(_state) {
11010
11047
  switch(_state.label){
11011
11048
  case 0:
11012
11049
  _state.trys.push([
11013
11050
  0,
11014
- 9,
11051
+ 7,
11015
11052
  ,
11016
- 10
11053
+ 8
11017
11054
  ]);
11018
11055
  setUserIsPending(true);
11019
- searchParams = new URLSearchParams(variableParams({
11020
- variables: superinterfaceContext.variables,
11021
- superinterfaceContext: superinterfaceContext
11022
- }));
11023
- return [
11024
- 4,
11025
- fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc/ice-servers?").concat(searchParams), {
11026
- method: "GET",
11027
- headers: {
11028
- "Content-Type": "application/json"
11029
- }
11030
- })
11031
- ];
11032
- case 1:
11033
- iceServersResponse = _state.sent();
11034
- return [
11035
- 4,
11036
- iceServersResponse.json()
11037
- ];
11038
- case 2:
11039
- iceServersData = _state.sent();
11040
- peerConn = new RTCPeerConnection({
11041
- iceServers: iceServersData.iceServers
11042
- });
11056
+ peerConn = new RTCPeerConnection();
11043
11057
  pcRef.current = peerConn;
11044
11058
  audioEl = document.createElement("audio");
11045
11059
  audioEl.autoplay = true;
@@ -11052,35 +11066,115 @@ var useWebrtcAudioRuntime = function() {
11052
11066
  setAssistantPaused(false);
11053
11067
  setAssistantAudioPlayed(true);
11054
11068
  };
11055
- peerConn.createDataChannel("unused-negotiation-only");
11056
- peerConn.addEventListener("datachannel", function(event) {
11057
- var channel = event.channel;
11058
- if (channel.label === "thread-events") {
11059
- channel.onmessage = function(_ref) {
11060
- var data2 = _ref.data;
11061
- console.log("Data channel message:", data2);
11062
- var parsedData = JSON.parse(data2);
11063
- if (parsedData.event === "thread.created") {
11064
- threadCreated({
11065
- value: parsedData,
11066
- superinterfaceContext: superinterfaceContext
11067
- });
11068
- } else if (parsedData.event === "thread.run.requires_action") {
11069
- threadRunRequiresAction({
11070
- value: parsedData,
11071
- superinterfaceContext: superinterfaceContext
11072
- });
11069
+ openaiEventsDataChannel = peerConn.createDataChannel("oai-events");
11070
+ openaiEventsDataChannel.addEventListener("message", /* @__PURE__ */ function() {
11071
+ var _ref4 = _asyncToGenerator12(function(e) {
11072
+ var parsedData, searchParams, eventsResponse, reader, decoder, _ref, value, done, buffer, lines, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, line, event, ref;
11073
+ return _ts_generator(this, function(_state) {
11074
+ switch(_state.label){
11075
+ case 0:
11076
+ parsedData = JSON.parse(e.data);
11077
+ if (!sentTypes.includes(parsedData.type)) return [
11078
+ 2
11079
+ ];
11080
+ searchParams = new URLSearchParams(variableParams({
11081
+ variables: superinterfaceContext.variables,
11082
+ superinterfaceContext: superinterfaceContext
11083
+ }));
11084
+ return [
11085
+ 4,
11086
+ fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc/events?").concat(searchParams), {
11087
+ method: "POST",
11088
+ headers: {
11089
+ "Content-Type": "application/json"
11090
+ },
11091
+ body: e.data
11092
+ })
11093
+ ];
11094
+ case 1:
11095
+ eventsResponse = _state.sent();
11096
+ if (!eventsResponse.body) {
11097
+ throw new Error("No body in events response");
11098
+ }
11099
+ reader = eventsResponse.body.getReader();
11100
+ decoder = new TextDecoder("utf-8");
11101
+ return [
11102
+ 4,
11103
+ reader.read()
11104
+ ];
11105
+ case 2:
11106
+ _ref = _state.sent(), value = _ref.value, done = _ref.done;
11107
+ buffer = "";
11108
+ _state.label = 3;
11109
+ case 3:
11110
+ if (!!done) return [
11111
+ 3,
11112
+ 5
11113
+ ];
11114
+ buffer += decoder.decode(value, {
11115
+ stream: true
11116
+ });
11117
+ lines = buffer.split("\n");
11118
+ buffer = lines.pop() || "";
11119
+ _iteratorNormalCompletion = true, _didIteratorError = false, _iteratorError = undefined;
11120
+ try {
11121
+ for(_iterator = lines[Symbol.iterator](); !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true){
11122
+ line = _step.value;
11123
+ if (line.trim()) {
11124
+ try {
11125
+ event = JSON.parse(line);
11126
+ handleEvent({
11127
+ event: event,
11128
+ superinterfaceContext: superinterfaceContext,
11129
+ openaiEventsDataChannel: openaiEventsDataChannel
11130
+ });
11131
+ } catch (error) {
11132
+ console.error("JSON parse error:", error, "Line:", line);
11133
+ }
11134
+ }
11135
+ }
11136
+ } catch (err) {
11137
+ _didIteratorError = true;
11138
+ _iteratorError = err;
11139
+ } finally{
11140
+ try {
11141
+ if (!_iteratorNormalCompletion && _iterator.return != null) {
11142
+ _iterator.return();
11143
+ }
11144
+ } finally{
11145
+ if (_didIteratorError) {
11146
+ throw _iteratorError;
11147
+ }
11148
+ }
11149
+ }
11150
+ return [
11151
+ 4,
11152
+ reader.read()
11153
+ ];
11154
+ case 4:
11155
+ ref = _state.sent(), value = ref.value, done = ref.done, ref;
11156
+ return [
11157
+ 3,
11158
+ 3
11159
+ ];
11160
+ case 5:
11161
+ return [
11162
+ 2
11163
+ ];
11073
11164
  }
11074
- };
11075
- }
11076
- });
11165
+ });
11166
+ });
11167
+ return function(_x) {
11168
+ return _ref4.apply(this, arguments);
11169
+ };
11170
+ }());
11077
11171
  return [
11078
11172
  4,
11079
11173
  navigator.mediaDevices.getUserMedia({
11080
11174
  audio: true
11081
11175
  })
11082
11176
  ];
11083
- case 3:
11177
+ case 1:
11084
11178
  ms = _state.sent();
11085
11179
  localStreamRef.current = ms;
11086
11180
  ms.getTracks().forEach(function(t) {
@@ -11091,17 +11185,21 @@ var useWebrtcAudioRuntime = function() {
11091
11185
  4,
11092
11186
  peerConn.createOffer()
11093
11187
  ];
11094
- case 4:
11188
+ case 2:
11095
11189
  offer = _state.sent();
11096
11190
  return [
11097
11191
  4,
11098
11192
  peerConn.setLocalDescription(offer)
11099
11193
  ];
11100
- case 5:
11194
+ case 3:
11101
11195
  _state.sent();
11196
+ searchParams_0 = new URLSearchParams(variableParams({
11197
+ variables: superinterfaceContext.variables,
11198
+ superinterfaceContext: superinterfaceContext
11199
+ }));
11102
11200
  return [
11103
11201
  4,
11104
- fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc?").concat(searchParams), {
11202
+ fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc?").concat(searchParams_0), {
11105
11203
  method: "POST",
11106
11204
  body: offer.sdp,
11107
11205
  headers: {
@@ -11109,30 +11207,17 @@ var useWebrtcAudioRuntime = function() {
11109
11207
  }
11110
11208
  })
11111
11209
  ];
11112
- case 6:
11210
+ case 4:
11113
11211
  sdpResponse = _state.sent();
11114
11212
  if (!sdpResponse.ok) {
11115
11213
  throw new Error("Server responded with status ".concat(sdpResponse.status));
11116
11214
  }
11117
- if (!sdpResponse.body) {
11118
- throw new Error("ReadableStream not supported in this browser.");
11119
- }
11120
- reader = sdpResponse.body.getReader();
11121
- decoder = new TextDecoder("utf-8");
11122
- answerSdp = "";
11123
11215
  return [
11124
11216
  4,
11125
- reader.read()
11217
+ sdpResponse.text()
11126
11218
  ];
11127
- case 7:
11128
- _ref = _state.sent(), value = _ref.value, done = _ref.done;
11129
- if (done) {
11130
- throw new Error("Stream closed before SDP was received");
11131
- }
11132
- answerSdp += decoder.decode(value, {
11133
- stream: true
11134
- });
11135
- console.log("Received SDP Answer:", answerSdp);
11219
+ case 5:
11220
+ answerSdp = _state.sent();
11136
11221
  answer = {
11137
11222
  type: "answer",
11138
11223
  sdp: answerSdp
@@ -11141,7 +11226,7 @@ var useWebrtcAudioRuntime = function() {
11141
11226
  4,
11142
11227
  peerConn.setRemoteDescription(answer)
11143
11228
  ];
11144
- case 8:
11229
+ case 6:
11145
11230
  _state.sent();
11146
11231
  buildAnalyzers(ms, audioEl);
11147
11232
  setUserIsPending(false);
@@ -11150,11 +11235,11 @@ var useWebrtcAudioRuntime = function() {
11150
11235
  setAssistantPlaying(true);
11151
11236
  return [
11152
11237
  3,
11153
- 10
11238
+ 8
11154
11239
  ];
11155
- case 9:
11156
- err = _state.sent();
11157
- console.error("Error initRealtimeSession:", err);
11240
+ case 7:
11241
+ err1 = _state.sent();
11242
+ console.error("Error initRealtimeSession:", err1);
11158
11243
  setUserIsPending(false);
11159
11244
  setRecorderStatus("stopped");
11160
11245
  setAssistantPlaying(false);
@@ -11164,9 +11249,9 @@ var useWebrtcAudioRuntime = function() {
11164
11249
  setAssistantAudioPlayed(false);
11165
11250
  return [
11166
11251
  3,
11167
- 10
11252
+ 8
11168
11253
  ];
11169
- case 10:
11254
+ case 8:
11170
11255
  return [
11171
11256
  2
11172
11257
  ];