@superinterface/react 3.14.1 → 3.14.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -10897,7 +10897,7 @@ var useWebrtcAudioRuntime = function() {
10897
10897
  };
10898
10898
  openaiEventsDataChannel = peerConn.createDataChannel("oai-events");
10899
10899
  openaiEventsDataChannel.addEventListener("message", /* @__PURE__ */ function() {
10900
- var _ref4 = _asyncToGenerator12(function(e) {
10900
+ var _ref6 = _asyncToGenerator12(function(e) {
10901
10901
  var parsedData, searchParams, eventsResponse, reader, decoder, _ref, value, done, buffer, lines, _iteratorNormalCompletion, _didIteratorError, _iteratorError, _iterator, _step, line, event, ref;
10902
10902
  return _ts_generator(this, function(_state) {
10903
10903
  switch(_state.label){
@@ -10906,6 +10906,9 @@ var useWebrtcAudioRuntime = function() {
10906
10906
  if (!sentTypes.includes(parsedData.type)) return [
10907
10907
  2
10908
10908
  ];
10909
+ if (parsedData.type === "response.done" && parsedData.response.status !== "completed") return [
10910
+ 2
10911
+ ];
10909
10912
  searchParams = new URLSearchParams(variableParams({
10910
10913
  variables: superinterfaceContext.variables,
10911
10914
  superinterfaceContext: superinterfaceContext
@@ -10994,7 +10997,7 @@ var useWebrtcAudioRuntime = function() {
10994
10997
  });
10995
10998
  });
10996
10999
  return function(_x) {
10997
- return _ref4.apply(this, arguments);
11000
+ return _ref6.apply(this, arguments);
10998
11001
  };
10999
11002
  }());
11000
11003
  return [
@@ -11089,6 +11092,61 @@ var useWebrtcAudioRuntime = function() {
11089
11092
  });
11090
11093
  return _initRealtimeSession.apply(this, arguments);
11091
11094
  }
11095
+ var start = /* @__PURE__ */ function() {
11096
+ var _ref4 = _asyncToGenerator12(function() {
11097
+ return _ts_generator(this, function(_state) {
11098
+ switch(_state.label){
11099
+ case 0:
11100
+ return [
11101
+ 4,
11102
+ startSessionIfNeeded()
11103
+ ];
11104
+ case 1:
11105
+ _state.sent();
11106
+ setAssistantPaused(false);
11107
+ setAssistantPlaying(true);
11108
+ if (assistantAudioElRef.current) {
11109
+ assistantAudioElRef.current.play().catch(function(err_1) {
11110
+ console.error("Assistant play error:", err_1);
11111
+ });
11112
+ }
11113
+ if (localStreamRef.current) {
11114
+ localStreamRef.current.getAudioTracks().forEach(function(t_0) {
11115
+ return t_0.enabled = true;
11116
+ });
11117
+ }
11118
+ return [
11119
+ 2
11120
+ ];
11121
+ }
11122
+ });
11123
+ });
11124
+ return function start2() {
11125
+ return _ref4.apply(this, arguments);
11126
+ };
11127
+ }();
11128
+ var pause = /* @__PURE__ */ function() {
11129
+ var _ref5 = _asyncToGenerator12(function() {
11130
+ return _ts_generator(this, function(_state) {
11131
+ if (!sessionStartedRef.current) return [
11132
+ 2
11133
+ ];
11134
+ setAssistantPaused(true);
11135
+ setAssistantPlaying(false);
11136
+ if (localStreamRef.current) {
11137
+ localStreamRef.current.getAudioTracks().forEach(function(t_1) {
11138
+ return t_1.enabled = false;
11139
+ });
11140
+ }
11141
+ return [
11142
+ 2
11143
+ ];
11144
+ });
11145
+ });
11146
+ return function pause2() {
11147
+ return _ref5.apply(this, arguments);
11148
+ };
11149
+ }();
11092
11150
  return useMemo20(function() {
11093
11151
  return {
11094
11152
  webrtcAudioRuntime: {
@@ -11096,174 +11154,27 @@ var useWebrtcAudioRuntime = function() {
11096
11154
  start: function() {
11097
11155
  var _start = _asyncToGenerator12(function() {
11098
11156
  return _ts_generator(this, function(_state) {
11099
- switch(_state.label){
11100
- case 0:
11101
- return [
11102
- 4,
11103
- startSessionIfNeeded()
11104
- ];
11105
- case 1:
11106
- _state.sent();
11107
- setRecorderStatus("recording");
11108
- if (localStreamRef.current) {
11109
- localStreamRef.current.getAudioTracks().forEach(function(t_0) {
11110
- return t_0.enabled = true;
11111
- });
11112
- }
11113
- return [
11114
- 2
11115
- ];
11116
- }
11117
- });
11118
- });
11119
- function start() {
11120
- return _start.apply(this, arguments);
11121
- }
11122
- return start;
11123
- }(),
11124
- pause: function() {
11125
- var _pause = _asyncToGenerator12(function() {
11126
- return _ts_generator(this, function(_state) {
11127
- if (!sessionStartedRef.current) return [
11128
- 2
11129
- ];
11130
- setRecorderStatus("paused");
11131
- if (localStreamRef.current) {
11132
- localStreamRef.current.getAudioTracks().forEach(function(t_1) {
11133
- return t_1.enabled = false;
11134
- });
11135
- }
11136
11157
  return [
11137
11158
  2
11138
11159
  ];
11139
11160
  });
11140
11161
  });
11141
- function pause() {
11142
- return _pause.apply(this, arguments);
11143
- }
11144
- return pause;
11145
- }(),
11146
- resume: function() {
11147
- var _resume = _asyncToGenerator12(function() {
11148
- return _ts_generator(this, function(_state) {
11149
- if (!sessionStartedRef.current) return [
11150
- 2
11151
- ];
11152
- setRecorderStatus("recording");
11153
- if (localStreamRef.current) {
11154
- localStreamRef.current.getAudioTracks().forEach(function(t_2) {
11155
- return t_2.enabled = true;
11156
- });
11157
- }
11158
- return [
11159
- 2
11160
- ];
11161
- });
11162
- });
11163
- function resume() {
11164
- return _resume.apply(this, arguments);
11165
- }
11166
- return resume;
11167
- }(),
11168
- stop: function() {
11169
- var _stop = _asyncToGenerator12(function() {
11170
- return _ts_generator(this, function(_state) {
11171
- if (!sessionStartedRef.current) return [
11172
- 2
11173
- ];
11174
- setRecorderStatus("stopped");
11175
- if (localStreamRef.current) {
11176
- localStreamRef.current.getTracks().forEach(function(track) {
11177
- return track.stop();
11178
- });
11179
- }
11180
- return [
11181
- 2
11182
- ];
11183
- });
11184
- });
11185
- function stop() {
11186
- return _stop.apply(this, arguments);
11162
+ function start2() {
11163
+ return _start.apply(this, arguments);
11187
11164
  }
11188
- return stop;
11165
+ return start2;
11189
11166
  }(),
11167
+ pause: pause,
11168
+ resume: start,
11169
+ stop: pause,
11190
11170
  isPending: userIsPending,
11191
11171
  visualizationAnalyser: userAnalyserRef.current,
11192
11172
  rawStatus: recorderStatus
11193
11173
  },
11194
11174
  assistant: {
11195
- play: function() {
11196
- var _play = _asyncToGenerator12(function() {
11197
- return _ts_generator(this, function(_state) {
11198
- switch(_state.label){
11199
- case 0:
11200
- return [
11201
- 4,
11202
- startSessionIfNeeded()
11203
- ];
11204
- case 1:
11205
- _state.sent();
11206
- setAssistantPaused(false);
11207
- setAssistantPlaying(true);
11208
- if (assistantAudioElRef.current) {
11209
- assistantAudioElRef.current.play().catch(function(err_1) {
11210
- console.error("Assistant play error:", err_1);
11211
- });
11212
- }
11213
- return [
11214
- 2
11215
- ];
11216
- }
11217
- });
11218
- });
11219
- function play() {
11220
- return _play.apply(this, arguments);
11221
- }
11222
- return play;
11223
- }(),
11224
- pause: function() {
11225
- var _pause2 = _asyncToGenerator12(function() {
11226
- return _ts_generator(this, function(_state) {
11227
- if (!sessionStartedRef.current) return [
11228
- 2
11229
- ];
11230
- setAssistantPaused(true);
11231
- setAssistantPlaying(false);
11232
- if (assistantAudioElRef.current) {
11233
- assistantAudioElRef.current.pause();
11234
- }
11235
- return [
11236
- 2
11237
- ];
11238
- });
11239
- });
11240
- function pause() {
11241
- return _pause2.apply(this, arguments);
11242
- }
11243
- return pause;
11244
- }(),
11245
- stop: function() {
11246
- var _stop2 = _asyncToGenerator12(function() {
11247
- return _ts_generator(this, function(_state) {
11248
- if (!sessionStartedRef.current) return [
11249
- 2
11250
- ];
11251
- setAssistantPaused(false);
11252
- setAssistantPlaying(false);
11253
- if (assistantAudioElRef.current) {
11254
- assistantAudioElRef.current.pause();
11255
- assistantAudioElRef.current.currentTime = 0;
11256
- }
11257
- return [
11258
- 2
11259
- ];
11260
- });
11261
- });
11262
- function stop() {
11263
- return _stop2.apply(this, arguments);
11264
- }
11265
- return stop;
11266
- }(),
11175
+ play: start,
11176
+ pause: pause,
11177
+ stop: pause,
11267
11178
  visualizationAnalyser: assistantAnalyserRef.current,
11268
11179
  playing: assistantPlaying,
11269
11180
  paused: assistantPaused,