@superinterface/react 3.10.3 → 3.11.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -2713,11 +2713,11 @@ function _asyncToGenerator3(n) {
2713
2713
  }
2714
2714
  var threadRunRequiresAction = /* @__PURE__ */ function() {
2715
2715
  var _ref2 = _asyncToGenerator3(function(_ref) {
2716
- var value, queryClient, messagesQueryKey, superinterfaceContext, toolCalls, toolOutputs;
2716
+ var value, superinterfaceContext, toolCalls, toolOutputs;
2717
2717
  return _ts_generator(this, function(_state) {
2718
2718
  switch(_state.label){
2719
2719
  case 0:
2720
- value = _ref.value, queryClient = _ref.queryClient, messagesQueryKey = _ref.messagesQueryKey, superinterfaceContext = _ref.superinterfaceContext;
2720
+ value = _ref.value, superinterfaceContext = _ref.superinterfaceContext;
2721
2721
  if (!(value.data.required_action.type === "submit_client_tool_outputs")) return [
2722
2722
  3,
2723
2723
  2
@@ -8605,121 +8605,17 @@ import { c as _c51 } from "react-compiler-runtime";
8605
8605
  import { Flex as Flex29 } from "@radix-ui/themes";
8606
8606
  // src/contexts/threads/AudioThreadContext/index.ts
8607
8607
  import { createContext as createContext13 } from "react";
8608
- function asyncGeneratorStep10(n, t, e, r, o, a, c) {
8609
- try {
8610
- var i = n[a](c), u = i.value;
8611
- } catch (n2) {
8612
- return void e(n2);
8613
- }
8614
- i.done ? t(u) : Promise.resolve(u).then(r, o);
8615
- }
8616
- function _asyncToGenerator10(n) {
8617
- return function() {
8618
- var t = this, e = arguments;
8619
- return new Promise(function(r, o) {
8620
- var a = n.apply(t, e);
8621
- function _next(n2) {
8622
- asyncGeneratorStep10(a, r, o, _next, _throw, "next", n2);
8623
- }
8624
- function _throw(n2) {
8625
- asyncGeneratorStep10(a, r, o, _next, _throw, "throw", n2);
8626
- }
8627
- _next(void 0);
8628
- });
8629
- };
8630
- }
8631
8608
  var AudioThreadContext = /* @__PURE__ */ createContext13({
8632
- status: "idle",
8633
- recorderProps: {
8634
- status: "idle",
8635
- start: function() {
8636
- var _start = _asyncToGenerator10(function() {
8637
- return _ts_generator(this, function(_state) {
8638
- return [
8639
- 2
8640
- ];
8641
- });
8642
- });
8643
- function start() {
8644
- return _start.apply(this, arguments);
8645
- }
8646
- return start;
8647
- }(),
8648
- stop: function() {
8649
- var _stop = _asyncToGenerator10(function() {
8650
- return _ts_generator(this, function(_state) {
8651
- return [
8652
- 2
8653
- ];
8654
- });
8655
- });
8656
- function stop() {
8657
- return _stop.apply(this, arguments);
8658
- }
8659
- return stop;
8660
- }(),
8661
- pause: function() {
8662
- var _pause = _asyncToGenerator10(function() {
8663
- return _ts_generator(this, function(_state) {
8664
- return [
8665
- 2
8666
- ];
8667
- });
8668
- });
8669
- function pause() {
8670
- return _pause.apply(this, arguments);
8671
- }
8672
- return pause;
8673
- }(),
8674
- resume: function() {
8675
- var _resume = _asyncToGenerator10(function() {
8676
- return _ts_generator(this, function(_state) {
8677
- return [
8678
- 2
8679
- ];
8680
- });
8681
- });
8682
- function resume() {
8683
- return _resume.apply(this, arguments);
8684
- }
8685
- return resume;
8686
- }(),
8687
- visualizationAnalyser: null
8688
- },
8689
- // @ts-ignore-next-line
8690
- messageAudioProps: {
8691
- visualizationAnalyser: null,
8692
- playing: false,
8693
- paused: false,
8694
- isReady: false,
8695
- play: function() {
8696
- var _play = _asyncToGenerator10(function() {
8697
- return _ts_generator(this, function(_state) {
8698
- return [
8699
- 2
8700
- ];
8701
- });
8702
- });
8703
- function play() {
8704
- return _play.apply(this, arguments);
8705
- }
8706
- return play;
8707
- }(),
8708
- pause: function() {
8709
- var _pause2 = _asyncToGenerator10(function() {
8710
- return _ts_generator(this, function(_state) {
8711
- return [
8712
- 2
8713
- ];
8714
- });
8715
- });
8716
- function pause() {
8717
- return _pause2.apply(this, arguments);
8718
- }
8719
- return pause;
8720
- }()
8721
- }
8609
+ audioRuntime: null
8722
8610
  });
8611
+ // src/hooks/threads/useAudioThreadContext/index.ts
8612
+ import { useContext as useContext15 } from "react";
8613
+ var useAudioThreadContext = function() {
8614
+ return useContext15(AudioThreadContext);
8615
+ };
8616
+ // src/hooks/audioRuntimes/useTtsAudioRuntime/index.ts
8617
+ import { c as _c50 } from "react-compiler-runtime";
8618
+ import { useMemo as useMemo18 } from "react";
8723
8619
  // src/hooks/misc/usePermission/index.ts
8724
8620
  import { c as _c47 } from "react-compiler-runtime";
8725
8621
  import { useEffect as useEffect7, useState as useState7 } from "react";
@@ -8788,83 +8684,16 @@ var usePermission = function(permissionDesc) {
8788
8684
  useEffect7(t0, t1);
8789
8685
  return state;
8790
8686
  };
8791
- // src/components/threads/AudioThread/Root/lib/blobToData.ts
8792
- var blobToData = function(blob) {
8793
- return new Promise(function(resolve) {
8794
- var reader = new FileReader();
8795
- reader.onloadend = function() {
8796
- return resolve(reader.result);
8797
- };
8798
- reader.readAsDataURL(blob);
8799
- });
8800
- };
8801
- // src/hooks/audioThreads/useStatus/index.ts
8802
- import { c as _c48 } from "react-compiler-runtime";
8803
- import { useMemo as useMemo16 } from "react";
8804
- var useStatus = function(t0) {
8805
- var $ = _c48(2);
8806
- var messageAudioProps = t0.messageAudioProps, recorderProps = t0.recorderProps, createMessageProps = t0.createMessageProps;
8807
- var latestMessageProps = useLatestMessage();
8808
- var t1;
8809
- bb0: {
8810
- var _latestMessageProps$l;
8811
- if (recorderProps.status === "recording") {
8812
- t1 = "recording";
8813
- break bb0;
8814
- }
8815
- if (createMessageProps.isPending) {
8816
- t1 = "creatingMessage";
8817
- break bb0;
8818
- }
8819
- if (messageAudioProps.paused || !messageAudioProps.isAudioPlayed) {
8820
- t1 = "playerPaused";
8821
- break bb0;
8822
- }
8823
- if (messageAudioProps.playing || messageAudioProps.isPending) {
8824
- t1 = "playing";
8825
- break bb0;
8826
- }
8827
- if (!messageAudioProps.isAudioPlayed && !messageAudioProps.isReady) {
8828
- t1 = "loading";
8829
- break bb0;
8830
- }
8831
- if (((_latestMessageProps$l = latestMessageProps.latestMessage) === null || _latestMessageProps$l === void 0 ? void 0 : _latestMessageProps$l.status) === "in_progress") {
8832
- t1 = "creatingMessage";
8833
- break bb0;
8834
- }
8835
- if (recorderProps.status === "idle") {
8836
- t1 = "idle";
8837
- break bb0;
8838
- }
8839
- if (recorderProps.status === "paused") {
8840
- t1 = "recorderPaused";
8841
- break bb0;
8842
- }
8843
- t1 = "loading";
8844
- }
8845
- var status = t1;
8846
- var t2;
8847
- if ($[0] !== status) {
8848
- t2 = {
8849
- status: status
8850
- };
8851
- $[0] = status;
8852
- $[1] = t2;
8853
- } else {
8854
- t2 = $[1];
8855
- }
8856
- return t2;
8857
- };
8858
8687
  // src/hooks/audioThreads/useRecorder/index.ts
8859
8688
  import dayjs3 from "dayjs";
8860
8689
  import { useAudioCapture } from "use-audio-capture";
8861
- import { useMemo as useMemo17, useRef as useRef7, useState as useState8, useCallback as useCallback5, useEffect as useEffect9 } from "react";
8690
+ import { useMemo as useMemo16, useRef as useRef7, useState as useState8, useCallback as useCallback5, useEffect as useEffect9 } from "react";
8862
8691
  import { useAudioPlayer } from "react-use-audio-player";
8863
8692
  // src/hooks/misc/useInterval.ts
8864
- import { c as _c49 } from "react-compiler-runtime";
8693
+ import { c as _c48 } from "react-compiler-runtime";
8865
8694
  import { useEffect as useEffect8, useRef as useRef6 } from "react";
8866
8695
  var useInterval = function(callback, delay) {
8867
- var $ = _c49(5);
8696
+ var $ = _c48(5);
8868
8697
  var savedCallback = useRef6(_temp10);
8869
8698
  var t0;
8870
8699
  if ($[0] !== callback) {
@@ -8947,7 +8776,7 @@ function _toPrimitive44(t, r) {
8947
8776
  }
8948
8777
  return ("string" === r ? String : Number)(t);
8949
8778
  }
8950
- function asyncGeneratorStep11(n, t, e, r, o, a, c) {
8779
+ function asyncGeneratorStep10(n, t, e, r, o, a, c) {
8951
8780
  try {
8952
8781
  var i = n[a](c), u = i.value;
8953
8782
  } catch (n2) {
@@ -8955,16 +8784,16 @@ function asyncGeneratorStep11(n, t, e, r, o, a, c) {
8955
8784
  }
8956
8785
  i.done ? t(u) : Promise.resolve(u).then(r, o);
8957
8786
  }
8958
- function _asyncToGenerator11(n) {
8787
+ function _asyncToGenerator10(n) {
8959
8788
  return function() {
8960
8789
  var t = this, e = arguments;
8961
8790
  return new Promise(function(r, o) {
8962
8791
  var a = n.apply(t, e);
8963
8792
  function _next(n2) {
8964
- asyncGeneratorStep11(a, r, o, _next, _throw, "next", n2);
8793
+ asyncGeneratorStep10(a, r, o, _next, _throw, "next", n2);
8965
8794
  }
8966
8795
  function _throw(n2) {
8967
- asyncGeneratorStep11(a, r, o, _next, _throw, "throw", n2);
8796
+ asyncGeneratorStep10(a, r, o, _next, _throw, "throw", n2);
8968
8797
  }
8969
8798
  _next(void 0);
8970
8799
  });
@@ -8997,7 +8826,7 @@ var useRecorder = function(_ref) {
8997
8826
  setNoiseStart(null);
8998
8827
  },
8999
8828
  onStop: function() {
9000
- var _onStop2 = _asyncToGenerator11(function() {
8829
+ var _onStop2 = _asyncToGenerator10(function() {
9001
8830
  var _arguments = arguments;
9002
8831
  return _ts_generator(this, function(_state) {
9003
8832
  switch(_state.label){
@@ -9054,7 +8883,7 @@ var useRecorder = function(_ref) {
9054
8883
  isInited,
9055
8884
  mediaStream
9056
8885
  ]);
9057
- var visualizationAnalyser = useMemo17(function() {
8886
+ var visualizationAnalyser = useMemo16(function() {
9058
8887
  if (!audioEngine) return null;
9059
8888
  var result = audioEngine.audioContext.createAnalyser();
9060
8889
  audioEngine.source.connect(result);
@@ -9062,7 +8891,7 @@ var useRecorder = function(_ref) {
9062
8891
  }, [
9063
8892
  audioEngine
9064
8893
  ]);
9065
- var silenceAnalyser = useMemo17(function() {
8894
+ var silenceAnalyser = useMemo16(function() {
9066
8895
  if (!audioEngine) return null;
9067
8896
  var result_0 = audioEngine.audioContext.createAnalyser();
9068
8897
  result_0.minDecibels = -60;
@@ -9117,8 +8946,8 @@ var useRecorder = function(_ref) {
9117
8946
  });
9118
8947
  };
9119
8948
  // src/hooks/audioThreads/useMessageAudio/index.ts
9120
- import { c as _c50 } from "react-compiler-runtime";
9121
- import { useMemo as useMemo18, useRef as useRef8, useState as useState9, useEffect as useEffect10, useCallback as useCallback6 } from "react";
8949
+ import { c as _c49 } from "react-compiler-runtime";
8950
+ import { useMemo as useMemo17, useRef as useRef8, useState as useState9, useEffect as useEffect10, useCallback as useCallback6 } from "react";
9122
8951
  import nlp from "compromise";
9123
8952
  import { Howler } from "howler";
9124
8953
  import { useAudioPlayer as useAudioPlayer2 } from "react-use-audio-player";
@@ -9199,7 +9028,8 @@ var getMessageSentences = function(_ref) {
9199
9028
  });
9200
9029
  };
9201
9030
  var useMessageAudio = function(t0) {
9202
- var $ = _c50(50);
9031
+ var _latestMessageProps$l;
9032
+ var $ = _c49(50);
9203
9033
  var _onEnd = t0.onEnd, passedPlay = t0.play;
9204
9034
  var _useState9 = _sliced_to_array(useState9(false), 2), isAudioPlayed = _useState9[0], setIsAudioPlayed = _useState9[1];
9205
9035
  var t1;
@@ -9309,7 +9139,7 @@ var useMessageAudio = function(t0) {
9309
9139
  var searchParams = new URLSearchParams(_objectSpread45({
9310
9140
  input: input_0
9311
9141
  }, superinterfaceContext.variables));
9312
- audioPlayer.load("".concat(superinterfaceContext.baseUrl, "/tts?").concat(searchParams), {
9142
+ audioPlayer.load("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/tts?").concat(searchParams), {
9313
9143
  format: "mp3",
9314
9144
  autoplay: isAudioPlayed,
9315
9145
  html5: isHtmlAudioSupported,
@@ -9327,7 +9157,7 @@ var useMessageAudio = function(t0) {
9327
9157
  var nextSearchParams = new URLSearchParams(_objectSpread45({
9328
9158
  input: nextUnplayedMessageSentence.sentence
9329
9159
  }, superinterfaceContext.variables));
9330
- nextAudioPlayer.load("".concat(superinterfaceContext.baseUrl, "/tts?").concat(nextSearchParams), {
9160
+ nextAudioPlayer.load("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/tts?").concat(nextSearchParams), {
9331
9161
  format: "mp3",
9332
9162
  autoplay: false,
9333
9163
  html5: isHtmlAudioSupported
@@ -9514,7 +9344,7 @@ var useMessageAudio = function(t0) {
9514
9344
  }
9515
9345
  var visualizationAnalyser = t11;
9516
9346
  var t12;
9517
- t12 = isPlaying || unplayedMessageSentences.length > 0;
9347
+ t12 = isPlaying || unplayedMessageSentences.length > 0 || ((_latestMessageProps$l = latestMessageProps.latestMessage) === null || _latestMessageProps$l === void 0 ? void 0 : _latestMessageProps$l.status) === "in_progress";
9518
9348
  var isPending = t12;
9519
9349
  var t13;
9520
9350
  if ($[45] !== audioPlayer || $[46] !== isAudioPlayed || $[47] !== isPending || $[48] !== visualizationAnalyser) {
@@ -9543,74 +9373,20 @@ function _temp11() {
9543
9373
  Howler._howls[0]._sounds[0]._node.crossOrigin = "anonymous";
9544
9374
  }
9545
9375
  }
9546
- // src/components/threads/AudioThread/Root/index.tsx
9376
+ // src/hooks/audioRuntimes/useTtsAudioRuntime/index.ts
9547
9377
  import { useQueryClient as useQueryClient6 } from "@tanstack/react-query";
9548
- import { jsx as _jsx82 } from "react/jsx-runtime";
9549
- var _excluded5 = [
9550
- "children"
9551
- ];
9552
- function ownKeys46(e, r) {
9553
- var t = Object.keys(e);
9554
- if (Object.getOwnPropertySymbols) {
9555
- var o = Object.getOwnPropertySymbols(e);
9556
- r && (o = o.filter(function(r2) {
9557
- return Object.getOwnPropertyDescriptor(e, r2).enumerable;
9558
- })), t.push.apply(t, o);
9559
- }
9560
- return t;
9561
- }
9562
- function _objectSpread46(e) {
9563
- for(var r = 1; r < arguments.length; r++){
9564
- var t = null != arguments[r] ? arguments[r] : {};
9565
- r % 2 ? ownKeys46(Object(t), true).forEach(function(r2) {
9566
- _defineProperty46(e, r2, t[r2]);
9567
- }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys46(Object(t)).forEach(function(r2) {
9568
- Object.defineProperty(e, r2, Object.getOwnPropertyDescriptor(t, r2));
9569
- });
9570
- }
9571
- return e;
9572
- }
9573
- function _defineProperty46(e, r, t) {
9574
- return (r = _toPropertyKey46(r)) in e ? Object.defineProperty(e, r, {
9575
- value: t,
9576
- enumerable: true,
9577
- configurable: true,
9578
- writable: true
9579
- }) : e[r] = t, e;
9580
- }
9581
- function _toPropertyKey46(t) {
9582
- var i = _toPrimitive46(t, "string");
9583
- return "symbol" == (typeof i === "undefined" ? "undefined" : _type_of(i)) ? i : i + "";
9584
- }
9585
- function _toPrimitive46(t, r) {
9586
- if ("object" != (typeof t === "undefined" ? "undefined" : _type_of(t)) || !t) return t;
9587
- var e = t[Symbol.toPrimitive];
9588
- if (void 0 !== e) {
9589
- var i = e.call(t, r || "default");
9590
- if ("object" != (typeof i === "undefined" ? "undefined" : _type_of(i))) return i;
9591
- throw new TypeError("@@toPrimitive must return a primitive value.");
9592
- }
9593
- return ("string" === r ? String : Number)(t);
9594
- }
9595
- function _objectWithoutProperties5(e, t) {
9596
- if (null == e) return {};
9597
- var o, r, i = _objectWithoutPropertiesLoose5(e, t);
9598
- if (Object.getOwnPropertySymbols) {
9599
- var s = Object.getOwnPropertySymbols(e);
9600
- for(r = 0; r < s.length; r++)o = s[r], t.includes(o) || ({}).propertyIsEnumerable.call(e, o) && (i[o] = e[o]);
9601
- }
9602
- return i;
9603
- }
9604
- function _objectWithoutPropertiesLoose5(r, e) {
9605
- if (null == r) return {};
9606
- var t = {};
9607
- for(var n in r)if (({}).hasOwnProperty.call(r, n)) {
9608
- if (e.includes(n)) continue;
9609
- t[n] = r[n];
9610
- }
9611
- return t;
9612
- }
9613
- function asyncGeneratorStep12(n, t, e, r, o, a, c) {
9378
+ // src/hooks/audioRuntimes/useTtsAudioRuntime/blobToData.ts
9379
+ var blobToData = function(blob) {
9380
+ return new Promise(function(resolve) {
9381
+ var reader = new FileReader();
9382
+ reader.onloadend = function() {
9383
+ return resolve(reader.result);
9384
+ };
9385
+ reader.readAsDataURL(blob);
9386
+ });
9387
+ };
9388
+ // src/hooks/audioRuntimes/useTtsAudioRuntime/index.ts
9389
+ function asyncGeneratorStep11(n, t, e, r, o, a, c) {
9614
9390
  try {
9615
9391
  var i = n[a](c), u = i.value;
9616
9392
  } catch (n2) {
@@ -9618,27 +9394,37 @@ function asyncGeneratorStep12(n, t, e, r, o, a, c) {
9618
9394
  }
9619
9395
  i.done ? t(u) : Promise.resolve(u).then(r, o);
9620
9396
  }
9621
- function _asyncToGenerator12(n) {
9397
+ function _asyncToGenerator11(n) {
9622
9398
  return function() {
9623
9399
  var t = this, e = arguments;
9624
9400
  return new Promise(function(r, o) {
9625
9401
  var a = n.apply(t, e);
9626
9402
  function _next(n2) {
9627
- asyncGeneratorStep12(a, r, o, _next, _throw, "next", n2);
9403
+ asyncGeneratorStep11(a, r, o, _next, _throw, "next", n2);
9628
9404
  }
9629
9405
  function _throw(n2) {
9630
- asyncGeneratorStep12(a, r, o, _next, _throw, "throw", n2);
9406
+ asyncGeneratorStep11(a, r, o, _next, _throw, "throw", n2);
9631
9407
  }
9632
9408
  _next(void 0);
9633
9409
  });
9634
9410
  };
9635
9411
  }
9636
- var Content8 = function(t0) {
9637
- var $ = _c51(24);
9638
- var children = t0.children, className = t0.className, style = t0.style, play = t0.play;
9412
+ var useTtsAudioRuntime = function(t0) {
9413
+ var $ = _c50(30);
9414
+ var play = t0.play;
9639
9415
  var addToast = useToasts().addToast;
9640
9416
  var queryClient = useQueryClient6();
9641
9417
  var threadContext = useSuperinterfaceContext();
9418
+ var t1;
9419
+ if ($[0] === Symbol.for("react.memo_cache_sentinel")) {
9420
+ t1 = {
9421
+ name: "microphone"
9422
+ };
9423
+ $[0] = t1;
9424
+ } else {
9425
+ t1 = $[0];
9426
+ }
9427
+ var microphonePermission = usePermission(t1);
9642
9428
  var createMessageProps = useCreateMessage({
9643
9429
  onError: function(error) {
9644
9430
  createMessageDefaultOnError({
@@ -9649,13 +9435,13 @@ var Content8 = function(t0) {
9649
9435
  recorderProps.start();
9650
9436
  }
9651
9437
  });
9652
- var t1;
9653
- if ($[0] !== createMessageProps) {
9654
- t1 = {
9438
+ var t2;
9439
+ if ($[1] !== createMessageProps) {
9440
+ t2 = {
9655
9441
  isStopOnSilence: true,
9656
9442
  onStart: _temp12,
9657
9443
  onStop: function() {
9658
- var _onStop = _asyncToGenerator12(function(_event, chunks) {
9444
+ var _onStop = _asyncToGenerator11(function(_event, chunks) {
9659
9445
  var blob, audioContent;
9660
9446
  return _ts_generator(this, function(_state) {
9661
9447
  switch(_state.label){
@@ -9684,22 +9470,12 @@ var Content8 = function(t0) {
9684
9470
  return onStop;
9685
9471
  }()
9686
9472
  };
9687
- $[0] = createMessageProps;
9688
- $[1] = t1;
9689
- } else {
9690
- t1 = $[1];
9691
- }
9692
- var recorderProps = useRecorder(t1);
9693
- var t2;
9694
- if ($[2] === Symbol.for("react.memo_cache_sentinel")) {
9695
- t2 = {
9696
- name: "microphone"
9697
- };
9473
+ $[1] = createMessageProps;
9698
9474
  $[2] = t2;
9699
9475
  } else {
9700
9476
  t2 = $[2];
9701
9477
  }
9702
- var microphonePermission = usePermission(t2);
9478
+ var recorderProps = useRecorder(t2);
9703
9479
  var t3;
9704
9480
  if ($[3] !== microphonePermission || $[4] !== recorderProps) {
9705
9481
  t3 = function() {
@@ -9726,81 +9502,91 @@ var Content8 = function(t0) {
9726
9502
  t4 = $[8];
9727
9503
  }
9728
9504
  var messageAudioProps = useMessageAudio(t4);
9729
- var t5 = recorderProps;
9730
- var t6;
9731
- if ($[9] !== createMessageProps || $[10] !== messageAudioProps || $[11] !== t5) {
9732
- t6 = {
9733
- recorderProps: t5,
9734
- createMessageProps: createMessageProps,
9735
- messageAudioProps: messageAudioProps
9736
- };
9737
- $[9] = createMessageProps;
9738
- $[10] = messageAudioProps;
9739
- $[11] = t5;
9740
- $[12] = t6;
9741
- } else {
9742
- t6 = $[12];
9743
- }
9744
- var status = useStatus(t6).status;
9505
+ recorderProps;
9506
+ recorderProps;
9507
+ recorderProps;
9508
+ recorderProps;
9509
+ recorderProps;
9510
+ recorderProps;
9511
+ recorderProps;
9512
+ var t5;
9513
+ var t6 = recorderProps;
9745
9514
  var t7 = recorderProps;
9746
- var t8;
9747
- if ($[13] !== messageAudioProps || $[14] !== status || $[15] !== t7) {
9748
- t8 = {
9749
- status: status,
9750
- recorderProps: t7,
9751
- messageAudioProps: messageAudioProps
9515
+ var t8 = recorderProps;
9516
+ var t9 = recorderProps;
9517
+ var t10 = recorderProps;
9518
+ var t11 = recorderProps;
9519
+ var t12;
9520
+ if ($[9] !== createMessageProps.isPending || $[10] !== t10.visualizationAnalyser || $[11] !== t11.status || $[12] !== t6.start || $[13] !== t7.stop || $[14] !== t8.pause || $[15] !== t9.resume) {
9521
+ t12 = {
9522
+ start: t6.start,
9523
+ stop: t7.stop,
9524
+ pause: t8.pause,
9525
+ resume: t9.resume,
9526
+ isPending: createMessageProps.isPending,
9527
+ visualizationAnalyser: t10.visualizationAnalyser,
9528
+ rawStatus: t11.status
9752
9529
  };
9753
- $[13] = messageAudioProps;
9754
- $[14] = status;
9755
- $[15] = t7;
9756
- $[16] = t8;
9757
- } else {
9758
- t8 = $[16];
9759
- }
9760
- var t9;
9761
- if ($[17] !== children || $[18] !== className || $[19] !== style) {
9762
- t9 = /* @__PURE__ */ _jsx82(Flex29, {
9763
- direction: "column",
9764
- flexGrow: "1",
9765
- p: "9",
9766
- className: className,
9767
- style: style,
9768
- children: children
9769
- });
9770
- $[17] = children;
9771
- $[18] = className;
9772
- $[19] = style;
9773
- $[20] = t9;
9530
+ $[9] = createMessageProps.isPending;
9531
+ $[10] = t10.visualizationAnalyser;
9532
+ $[11] = t11.status;
9533
+ $[12] = t6.start;
9534
+ $[13] = t7.stop;
9535
+ $[14] = t8.pause;
9536
+ $[15] = t9.resume;
9537
+ $[16] = t12;
9774
9538
  } else {
9775
- t9 = $[20];
9539
+ t12 = $[16];
9776
9540
  }
9777
- var t10;
9778
- if ($[21] !== t8 || $[22] !== t9) {
9779
- t10 = /* @__PURE__ */ _jsx82(AudioThreadContext.Provider, {
9780
- value: t8,
9781
- children: t9
9782
- });
9783
- $[21] = t8;
9784
- $[22] = t9;
9785
- $[23] = t10;
9541
+ var t13;
9542
+ if ($[17] !== messageAudioProps.isAudioPlayed || $[18] !== messageAudioProps.isPending || $[19] !== messageAudioProps.isReady || $[20] !== messageAudioProps.pause || $[21] !== messageAudioProps.paused || $[22] !== messageAudioProps.play || $[23] !== messageAudioProps.playing || $[24] !== messageAudioProps.stop || $[25] !== messageAudioProps.visualizationAnalyser) {
9543
+ t13 = {
9544
+ play: messageAudioProps.play,
9545
+ pause: messageAudioProps.pause,
9546
+ stop: messageAudioProps.stop,
9547
+ visualizationAnalyser: messageAudioProps.visualizationAnalyser,
9548
+ playing: messageAudioProps.playing,
9549
+ paused: messageAudioProps.paused,
9550
+ isPending: messageAudioProps.isPending,
9551
+ isReady: messageAudioProps.isReady,
9552
+ isAudioPlayed: messageAudioProps.isAudioPlayed,
9553
+ rawStatus: void 0
9554
+ };
9555
+ $[17] = messageAudioProps.isAudioPlayed;
9556
+ $[18] = messageAudioProps.isPending;
9557
+ $[19] = messageAudioProps.isReady;
9558
+ $[20] = messageAudioProps.pause;
9559
+ $[21] = messageAudioProps.paused;
9560
+ $[22] = messageAudioProps.play;
9561
+ $[23] = messageAudioProps.playing;
9562
+ $[24] = messageAudioProps.stop;
9563
+ $[25] = messageAudioProps.visualizationAnalyser;
9564
+ $[26] = t13;
9565
+ } else {
9566
+ t13 = $[26];
9567
+ }
9568
+ var t14;
9569
+ if ($[27] !== t12 || $[28] !== t13) {
9570
+ t14 = {
9571
+ ttsAudioRuntime: {
9572
+ user: t12,
9573
+ assistant: t13
9574
+ }
9575
+ };
9576
+ $[27] = t12;
9577
+ $[28] = t13;
9578
+ $[29] = t14;
9786
9579
  } else {
9787
- t10 = $[23];
9580
+ t14 = $[29];
9788
9581
  }
9789
- return t10;
9790
- };
9791
- var Root12 = function(_ref) {
9792
- var children = _ref.children, rest = _objectWithoutProperties5(_ref, _excluded5);
9793
- return /* @__PURE__ */ _jsx82(ToastsProvider, {
9794
- children: /* @__PURE__ */ _jsx82(Content8, _objectSpread46(_objectSpread46({}, rest), {}, {
9795
- children: children
9796
- }))
9797
- });
9582
+ t5 = t14;
9583
+ return t5;
9798
9584
  };
9799
9585
  function _temp12() {
9800
9586
  return _temp23.apply(this, arguments);
9801
9587
  }
9802
9588
  function _temp23() {
9803
- _temp23 = _asyncToGenerator12(function() {
9589
+ _temp23 = _asyncToGenerator11(function() {
9804
9590
  return _ts_generator(this, function(_state) {
9805
9591
  return [
9806
9592
  2
@@ -9809,15 +9595,207 @@ function _temp23() {
9809
9595
  });
9810
9596
  return _temp23.apply(this, arguments);
9811
9597
  }
9598
+ // src/components/threads/AudioThread/Root/index.tsx
9599
+ import { jsx as _jsx82 } from "react/jsx-runtime";
9600
+ var _excluded5 = [
9601
+ "children"
9602
+ ];
9603
+ var _excluded22 = [
9604
+ "children",
9605
+ "play",
9606
+ "className",
9607
+ "style"
9608
+ ];
9609
+ function ownKeys46(e, r) {
9610
+ var t = Object.keys(e);
9611
+ if (Object.getOwnPropertySymbols) {
9612
+ var o = Object.getOwnPropertySymbols(e);
9613
+ r && (o = o.filter(function(r2) {
9614
+ return Object.getOwnPropertyDescriptor(e, r2).enumerable;
9615
+ })), t.push.apply(t, o);
9616
+ }
9617
+ return t;
9618
+ }
9619
+ function _objectSpread46(e) {
9620
+ for(var r = 1; r < arguments.length; r++){
9621
+ var t = null != arguments[r] ? arguments[r] : {};
9622
+ r % 2 ? ownKeys46(Object(t), true).forEach(function(r2) {
9623
+ _defineProperty46(e, r2, t[r2]);
9624
+ }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys46(Object(t)).forEach(function(r2) {
9625
+ Object.defineProperty(e, r2, Object.getOwnPropertyDescriptor(t, r2));
9626
+ });
9627
+ }
9628
+ return e;
9629
+ }
9630
+ function _defineProperty46(e, r, t) {
9631
+ return (r = _toPropertyKey46(r)) in e ? Object.defineProperty(e, r, {
9632
+ value: t,
9633
+ enumerable: true,
9634
+ configurable: true,
9635
+ writable: true
9636
+ }) : e[r] = t, e;
9637
+ }
9638
+ function _toPropertyKey46(t) {
9639
+ var i = _toPrimitive46(t, "string");
9640
+ return "symbol" == (typeof i === "undefined" ? "undefined" : _type_of(i)) ? i : i + "";
9641
+ }
9642
+ function _toPrimitive46(t, r) {
9643
+ if ("object" != (typeof t === "undefined" ? "undefined" : _type_of(t)) || !t) return t;
9644
+ var e = t[Symbol.toPrimitive];
9645
+ if (void 0 !== e) {
9646
+ var i = e.call(t, r || "default");
9647
+ if ("object" != (typeof i === "undefined" ? "undefined" : _type_of(i))) return i;
9648
+ throw new TypeError("@@toPrimitive must return a primitive value.");
9649
+ }
9650
+ return ("string" === r ? String : Number)(t);
9651
+ }
9652
+ function _objectWithoutProperties5(e, t) {
9653
+ if (null == e) return {};
9654
+ var o, r, i = _objectWithoutPropertiesLoose5(e, t);
9655
+ if (Object.getOwnPropertySymbols) {
9656
+ var s = Object.getOwnPropertySymbols(e);
9657
+ for(r = 0; r < s.length; r++)o = s[r], t.includes(o) || ({}).propertyIsEnumerable.call(e, o) && (i[o] = e[o]);
9658
+ }
9659
+ return i;
9660
+ }
9661
+ function _objectWithoutPropertiesLoose5(r, e) {
9662
+ if (null == r) return {};
9663
+ var t = {};
9664
+ for(var n in r)if (({}).hasOwnProperty.call(r, n)) {
9665
+ if (e.includes(n)) continue;
9666
+ t[n] = r[n];
9667
+ }
9668
+ return t;
9669
+ }
9670
+ var Content8 = function(_ref) {
9671
+ var children = _ref.children, className = _ref.className, style = _ref.style;
9672
+ return /* @__PURE__ */ _jsx82(Flex29, {
9673
+ direction: "column",
9674
+ flexGrow: "1",
9675
+ p: "9",
9676
+ className: className,
9677
+ style: style,
9678
+ children: children
9679
+ });
9680
+ };
9681
+ var TtsAudioRuntime = function(t0) {
9682
+ var $ = _c51(7);
9683
+ var children = t0.children, play = t0.play;
9684
+ var t1;
9685
+ if ($[0] !== play) {
9686
+ t1 = {
9687
+ play: play
9688
+ };
9689
+ $[0] = play;
9690
+ $[1] = t1;
9691
+ } else {
9692
+ t1 = $[1];
9693
+ }
9694
+ var ttsAudioRuntime = useTtsAudioRuntime(t1).ttsAudioRuntime;
9695
+ var t2;
9696
+ if ($[2] !== ttsAudioRuntime) {
9697
+ t2 = {
9698
+ audioRuntime: ttsAudioRuntime
9699
+ };
9700
+ $[2] = ttsAudioRuntime;
9701
+ $[3] = t2;
9702
+ } else {
9703
+ t2 = $[3];
9704
+ }
9705
+ var t3;
9706
+ if ($[4] !== children || $[5] !== t2) {
9707
+ t3 = /* @__PURE__ */ _jsx82(AudioThreadContext.Provider, {
9708
+ value: t2,
9709
+ children: children
9710
+ });
9711
+ $[4] = children;
9712
+ $[5] = t2;
9713
+ $[6] = t3;
9714
+ } else {
9715
+ t3 = $[6];
9716
+ }
9717
+ return t3;
9718
+ };
9719
+ var AudioRuntimeProvider = function(t0) {
9720
+ var $ = _c51(3);
9721
+ var children = t0.children, play = t0.play;
9722
+ var audioThreadContext = useAudioThreadContext();
9723
+ if (audioThreadContext.audioRuntime) {
9724
+ return children;
9725
+ }
9726
+ var t1;
9727
+ if ($[0] !== children || $[1] !== play) {
9728
+ t1 = /* @__PURE__ */ _jsx82(TtsAudioRuntime, {
9729
+ play: play,
9730
+ children: children
9731
+ });
9732
+ $[0] = children;
9733
+ $[1] = play;
9734
+ $[2] = t1;
9735
+ } else {
9736
+ t1 = $[2];
9737
+ }
9738
+ return t1;
9739
+ };
9740
+ var Provider5 = function(t0) {
9741
+ var $ = _c51(9);
9742
+ var children;
9743
+ var rest;
9744
+ if ($[0] !== t0) {
9745
+ var _t = t0;
9746
+ children = _t.children;
9747
+ rest = _objectWithoutProperties5(_t, _excluded5);
9748
+ _t;
9749
+ $[0] = t0;
9750
+ $[1] = children;
9751
+ $[2] = rest;
9752
+ } else {
9753
+ children = $[1];
9754
+ rest = $[2];
9755
+ }
9756
+ var audioThreadContext = useAudioThreadContext();
9757
+ var t1;
9758
+ if ($[3] !== audioThreadContext || $[4] !== rest) {
9759
+ t1 = _objectSpread46(_objectSpread46({}, audioThreadContext), rest);
9760
+ $[3] = audioThreadContext;
9761
+ $[4] = rest;
9762
+ $[5] = t1;
9763
+ } else {
9764
+ t1 = $[5];
9765
+ }
9766
+ var t2;
9767
+ if ($[6] !== children || $[7] !== t1) {
9768
+ t2 = /* @__PURE__ */ _jsx82(AudioThreadContext.Provider, {
9769
+ value: t1,
9770
+ children: children
9771
+ });
9772
+ $[6] = children;
9773
+ $[7] = t1;
9774
+ $[8] = t2;
9775
+ } else {
9776
+ t2 = $[8];
9777
+ }
9778
+ return t2;
9779
+ };
9780
+ var Root12 = function(_ref2) {
9781
+ var children = _ref2.children, play = _ref2.play, className = _ref2.className, style = _ref2.style, rest = _objectWithoutProperties5(_ref2, _excluded22);
9782
+ return /* @__PURE__ */ _jsx82(Provider5, _objectSpread46(_objectSpread46({}, rest), {}, {
9783
+ children: /* @__PURE__ */ _jsx82(AudioRuntimeProvider, {
9784
+ play: play,
9785
+ children: /* @__PURE__ */ _jsx82(ToastsProvider, {
9786
+ children: /* @__PURE__ */ _jsx82(Content8, {
9787
+ className: className,
9788
+ style: style,
9789
+ children: children
9790
+ })
9791
+ })
9792
+ })
9793
+ }));
9794
+ };
9812
9795
  // src/components/threads/AudioThread/Visualization/index.tsx
9813
9796
  import { useContext as useContext16, useState as useState11, useCallback as useCallback8, useEffect as useEffect12 } from "react";
9814
9797
  import _9 from "lodash";
9815
9798
  import { Flex as Flex31 } from "@radix-ui/themes";
9816
- // src/hooks/threads/useAudioThreadContext/index.ts
9817
- import { useContext as useContext15 } from "react";
9818
- var useAudioThreadContext = function() {
9819
- return useContext15(AudioThreadContext);
9820
- };
9821
9799
  // src/components/threads/AudioThread/BarsVisualizer/index.tsx
9822
9800
  import _8 from "lodash";
9823
9801
  import { Flex as Flex30, Grid } from "@radix-ui/themes";
@@ -9882,6 +9860,57 @@ var BarsVisualizer = function(_ref) {
9882
9860
  })
9883
9861
  });
9884
9862
  };
9863
+ // src/hooks/audioThreads/useStatus/index.ts
9864
+ import { c as _c52 } from "react-compiler-runtime";
9865
+ import { useMemo as useMemo19 } from "react";
9866
+ var useStatus = function() {
9867
+ var $ = _c52(2);
9868
+ var audioRuntime = useAudioThreadContext().audioRuntime;
9869
+ var t0;
9870
+ bb0: {
9871
+ if (audioRuntime.user.rawStatus === "recording") {
9872
+ t0 = "recording";
9873
+ break bb0;
9874
+ }
9875
+ if (audioRuntime.user.isPending) {
9876
+ t0 = "creatingMessage";
9877
+ break bb0;
9878
+ }
9879
+ if (audioRuntime.assistant.paused || !audioRuntime.assistant.isAudioPlayed) {
9880
+ t0 = "playerPaused";
9881
+ break bb0;
9882
+ }
9883
+ if (audioRuntime.assistant.playing || audioRuntime.assistant.isPending) {
9884
+ t0 = "playing";
9885
+ break bb0;
9886
+ }
9887
+ if (!audioRuntime.assistant.isAudioPlayed && !audioRuntime.assistant.isReady) {
9888
+ t0 = "loading";
9889
+ break bb0;
9890
+ }
9891
+ if (audioRuntime.user.rawStatus === "idle") {
9892
+ t0 = "idle";
9893
+ break bb0;
9894
+ }
9895
+ if (audioRuntime.user.rawStatus === "paused") {
9896
+ t0 = "recorderPaused";
9897
+ break bb0;
9898
+ }
9899
+ t0 = "loading";
9900
+ }
9901
+ var status = t0;
9902
+ var t1;
9903
+ if ($[0] !== status) {
9904
+ t1 = {
9905
+ status: status
9906
+ };
9907
+ $[0] = status;
9908
+ $[1] = t1;
9909
+ } else {
9910
+ t1 = $[1];
9911
+ }
9912
+ return t1;
9913
+ };
9885
9914
  // src/components/threads/AudioThread/Visualization/index.tsx
9886
9915
  import { jsx as _jsx84, jsxs as _jsxs31 } from "react/jsx-runtime";
9887
9916
  function ownKeys47(e, r) {
@@ -9928,6 +9957,7 @@ function _toPrimitive47(t, r) {
9928
9957
  return ("string" === r ? String : Number)(t);
9929
9958
  }
9930
9959
  var Visualization = function(props) {
9960
+ var status = useStatus().status;
9931
9961
  var audioThreadContext = useAudioThreadContext();
9932
9962
  var assistantNameContext = useContext16(AssistantNameContext);
9933
9963
  var _useState11 = _sliced_to_array(useState11(0), 2), scale = _useState11[0], setScale = _useState11[1];
@@ -9948,7 +9978,7 @@ var Visualization = function(props) {
9948
9978
  }, []);
9949
9979
  useEffect12(function() {
9950
9980
  draw({
9951
- visualizationAnalyser: audioThreadContext.recorderProps.visualizationAnalyser
9981
+ visualizationAnalyser: audioThreadContext.audioRuntime.user.visualizationAnalyser
9952
9982
  });
9953
9983
  }, [
9954
9984
  draw,
@@ -9968,13 +9998,13 @@ var Visualization = function(props) {
9968
9998
  height: "200px",
9969
9999
  width: "200px",
9970
10000
  style: {
9971
- backgroundColor: audioThreadContext.status === "playing" ? "var(--accent-4)" : "var(--gray-4)",
10001
+ backgroundColor: status === "playing" ? "var(--accent-4)" : "var(--gray-4)",
9972
10002
  borderRadius: "9999px",
9973
10003
  scale: scale
9974
10004
  },
9975
10005
  children: /* @__PURE__ */ _jsx84(BarsVisualizer, {
9976
- visualizationAnalyser: audioThreadContext.messageAudioProps.visualizationAnalyser,
9977
- backgroundColor: audioThreadContext.status === "playing" ? "var(--accent-11)" : "var(--gray-11)",
10006
+ visualizationAnalyser: audioThreadContext.audioRuntime.assistant.visualizationAnalyser,
10007
+ backgroundColor: status === "playing" ? "var(--accent-11)" : "var(--gray-11)",
9978
10008
  height: "40px",
9979
10009
  barWidth: "24px"
9980
10010
  })
@@ -9994,7 +10024,7 @@ var Visualization = function(props) {
9994
10024
  }));
9995
10025
  };
9996
10026
  // src/components/threads/AudioThread/Status/index.tsx
9997
- import { c as _c52 } from "react-compiler-runtime";
10027
+ import { c as _c53 } from "react-compiler-runtime";
9998
10028
  // src/components/threads/AudioThread/Status/StatusMessages.tsx
9999
10029
  import { Flex as Flex32, Text as Text9 } from "@radix-ui/themes";
10000
10030
  import { jsx as _jsx85, jsxs as _jsxs32 } from "react/jsx-runtime";
@@ -10074,9 +10104,9 @@ function _toPrimitive48(t, r) {
10074
10104
  return ("string" === r ? String : Number)(t);
10075
10105
  }
10076
10106
  var Status = function(props) {
10077
- var $ = _c52(12);
10078
- var audioThreadContext = useAudioThreadContext();
10079
- if (audioThreadContext.status === "recording") {
10107
+ var $ = _c53(12);
10108
+ var status = useStatus().status;
10109
+ if (status === "recording") {
10080
10110
  var _t;
10081
10111
  if ($[0] === Symbol.for("react.memo_cache_sentinel")) {
10082
10112
  _t = [
@@ -10105,7 +10135,7 @@ var Status = function(props) {
10105
10135
  "recorderPaused",
10106
10136
  "idle",
10107
10137
  "playerPaused"
10108
- ].includes(audioThreadContext.status)) {
10138
+ ].includes(status)) {
10109
10139
  var _t3;
10110
10140
  if ($[3] === Symbol.for("react.memo_cache_sentinel")) {
10111
10141
  _t3 = [
@@ -10127,7 +10157,7 @@ var Status = function(props) {
10127
10157
  }
10128
10158
  return _t4;
10129
10159
  }
10130
- if (audioThreadContext.status === "playing") {
10160
+ if (status === "playing") {
10131
10161
  var _t5;
10132
10162
  if ($[6] === Symbol.for("react.memo_cache_sentinel")) {
10133
10163
  _t5 = [
@@ -10171,7 +10201,7 @@ var Status = function(props) {
10171
10201
  return t1;
10172
10202
  };
10173
10203
  // src/components/threads/AudioThread/Form/index.tsx
10174
- import { c as _c54 } from "react-compiler-runtime";
10204
+ import { c as _c55 } from "react-compiler-runtime";
10175
10205
  import { Flex as Flex34 } from "@radix-ui/themes";
10176
10206
  // src/components/threads/AudioThread/Form/MicIcon.tsx
10177
10207
  import { jsx as _jsx87 } from "react/jsx-runtime";
@@ -10235,15 +10265,16 @@ var MicIcon = function(props) {
10235
10265
  }));
10236
10266
  };
10237
10267
  // src/components/threads/AudioThread/Form/ActionButton/index.tsx
10238
- import { c as _c53 } from "react-compiler-runtime";
10268
+ import { c as _c54 } from "react-compiler-runtime";
10239
10269
  import { Flex as Flex33, IconButton as IconButton10 } from "@radix-ui/themes";
10240
10270
  import { StopIcon as StopIcon2, PauseIcon as PauseIcon2, ArrowUpIcon as ArrowUpIcon3, ResumeIcon } from "@radix-ui/react-icons";
10241
10271
  import { jsx as _jsx88, jsxs as _jsxs33 } from "react/jsx-runtime";
10242
10272
  var ActionButton = function() {
10243
- var $ = _c53(27);
10273
+ var $ = _c54(27);
10274
+ var status = useStatus().status;
10244
10275
  var audioThreadContext = useAudioThreadContext();
10245
10276
  var superinterfaceContext = useSuperinterfaceContext();
10246
- if (audioThreadContext.status === "recording") {
10277
+ if (status === "recording") {
10247
10278
  var _t;
10248
10279
  if ($[0] === Symbol.for("react.memo_cache_sentinel")) {
10249
10280
  _t = /* @__PURE__ */ _jsx88(PauseIcon2, {});
@@ -10252,19 +10283,19 @@ var ActionButton = function() {
10252
10283
  _t = $[0];
10253
10284
  }
10254
10285
  var t1;
10255
- if ($[1] !== audioThreadContext.recorderProps.pause) {
10286
+ if ($[1] !== audioThreadContext.audioRuntime.user.pause) {
10256
10287
  t1 = /* @__PURE__ */ _jsx88(Flex33, {
10257
10288
  mr: "3",
10258
10289
  ml: "-7",
10259
10290
  children: /* @__PURE__ */ _jsx88(IconButton10, {
10260
- onClick: audioThreadContext.recorderProps.pause,
10291
+ onClick: audioThreadContext.audioRuntime.user.pause,
10261
10292
  color: "gray",
10262
10293
  variant: "soft",
10263
10294
  size: "1",
10264
10295
  children: _t
10265
10296
  })
10266
10297
  });
10267
- $[1] = audioThreadContext.recorderProps.pause;
10298
+ $[1] = audioThreadContext.audioRuntime.user.pause;
10268
10299
  $[2] = t1;
10269
10300
  } else {
10270
10301
  t1 = $[2];
@@ -10277,15 +10308,15 @@ var ActionButton = function() {
10277
10308
  t2 = $[3];
10278
10309
  }
10279
10310
  var t3;
10280
- if ($[4] !== audioThreadContext.recorderProps.stop) {
10311
+ if ($[4] !== audioThreadContext.audioRuntime.user.stop) {
10281
10312
  t3 = /* @__PURE__ */ _jsx88(IconButton10, {
10282
- onClick: audioThreadContext.recorderProps.stop,
10313
+ onClick: audioThreadContext.audioRuntime.user.stop,
10283
10314
  highContrast: true,
10284
10315
  variant: "soft",
10285
10316
  size: "4",
10286
10317
  children: t2
10287
10318
  });
10288
- $[4] = audioThreadContext.recorderProps.stop;
10319
+ $[4] = audioThreadContext.audioRuntime.user.stop;
10289
10320
  $[5] = t3;
10290
10321
  } else {
10291
10322
  t3 = $[5];
@@ -10307,7 +10338,7 @@ var ActionButton = function() {
10307
10338
  }
10308
10339
  return t4;
10309
10340
  }
10310
- if (audioThreadContext.status === "recorderPaused") {
10341
+ if (status === "recorderPaused") {
10311
10342
  var _t2;
10312
10343
  if ($[9] === Symbol.for("react.memo_cache_sentinel")) {
10313
10344
  _t2 = /* @__PURE__ */ _jsx88(ResumeIcon, {});
@@ -10316,48 +10347,48 @@ var ActionButton = function() {
10316
10347
  _t2 = $[9];
10317
10348
  }
10318
10349
  var _t3;
10319
- if ($[10] !== audioThreadContext.recorderProps.resume) {
10350
+ if ($[10] !== audioThreadContext.audioRuntime.user.resume) {
10320
10351
  _t3 = /* @__PURE__ */ _jsx88(IconButton10, {
10321
- onClick: audioThreadContext.recorderProps.resume,
10352
+ onClick: audioThreadContext.audioRuntime.user.resume,
10322
10353
  color: "red",
10323
10354
  size: "4",
10324
10355
  children: _t2
10325
10356
  });
10326
- $[10] = audioThreadContext.recorderProps.resume;
10357
+ $[10] = audioThreadContext.audioRuntime.user.resume;
10327
10358
  $[11] = _t3;
10328
10359
  } else {
10329
10360
  _t3 = $[11];
10330
10361
  }
10331
10362
  return _t3;
10332
10363
  }
10333
- if (audioThreadContext.status === "idle") {
10364
+ if (status === "idle") {
10334
10365
  var _t4;
10335
- if ($[12] !== audioThreadContext.recorderProps) {
10366
+ if ($[12] !== audioThreadContext.audioRuntime.user) {
10336
10367
  _t4 = /* @__PURE__ */ _jsx88(IconButton10, {
10337
10368
  onClick: function() {
10338
- return audioThreadContext.recorderProps.start();
10369
+ return audioThreadContext.audioRuntime.user.start();
10339
10370
  },
10340
10371
  size: "4",
10341
10372
  color: "red"
10342
10373
  });
10343
- $[12] = audioThreadContext.recorderProps;
10374
+ $[12] = audioThreadContext.audioRuntime.user;
10344
10375
  $[13] = _t4;
10345
10376
  } else {
10346
10377
  _t4 = $[13];
10347
10378
  }
10348
10379
  return _t4;
10349
10380
  }
10350
- if (audioThreadContext.status === "playing") {
10381
+ if (status === "playing") {
10351
10382
  var _t5;
10352
- if ($[14] !== audioThreadContext.messageAudioProps || $[15] !== audioThreadContext.recorderProps || $[16] !== superinterfaceContext.createMessageAbortControllerRef.current) {
10383
+ if ($[14] !== audioThreadContext.audioRuntime.assistant || $[15] !== audioThreadContext.audioRuntime.user || $[16] !== superinterfaceContext.createMessageAbortControllerRef.current) {
10353
10384
  _t5 = function() {
10354
10385
  var _superinterfaceContex;
10355
- audioThreadContext.messageAudioProps.stop();
10386
+ audioThreadContext.audioRuntime.assistant.stop();
10356
10387
  (_superinterfaceContex = superinterfaceContext.createMessageAbortControllerRef.current) === null || _superinterfaceContex === void 0 || _superinterfaceContex.abort();
10357
- audioThreadContext.recorderProps.start();
10388
+ audioThreadContext.audioRuntime.user.start();
10358
10389
  };
10359
- $[14] = audioThreadContext.messageAudioProps;
10360
- $[15] = audioThreadContext.recorderProps;
10390
+ $[14] = audioThreadContext.audioRuntime.assistant;
10391
+ $[15] = audioThreadContext.audioRuntime.user;
10361
10392
  $[16] = superinterfaceContext.createMessageAbortControllerRef.current;
10362
10393
  $[17] = _t5;
10363
10394
  } else {
@@ -10386,13 +10417,13 @@ var ActionButton = function() {
10386
10417
  }
10387
10418
  return _t7;
10388
10419
  }
10389
- if (audioThreadContext.status === "playerPaused") {
10420
+ if (status === "playerPaused") {
10390
10421
  var _t8;
10391
- if ($[21] !== audioThreadContext.messageAudioProps) {
10422
+ if ($[21] !== audioThreadContext.audioRuntime.assistant) {
10392
10423
  _t8 = function() {
10393
- return audioThreadContext.messageAudioProps.play();
10424
+ return audioThreadContext.audioRuntime.assistant.play();
10394
10425
  };
10395
- $[21] = audioThreadContext.messageAudioProps;
10426
+ $[21] = audioThreadContext.audioRuntime.assistant;
10396
10427
  $[22] = _t8;
10397
10428
  } else {
10398
10429
  _t8 = $[22];
@@ -10477,9 +10508,10 @@ function _toPrimitive50(t, r) {
10477
10508
  return ("string" === r ? String : Number)(t);
10478
10509
  }
10479
10510
  var Form = function(props) {
10480
- var $ = _c54(17);
10511
+ var $ = _c55(17);
10512
+ var status = useStatus().status;
10481
10513
  var audioThreadContext = useAudioThreadContext();
10482
- var t0 = audioThreadContext.status === "recording" ? "var(--accent-11)" : "var(--gray-11)";
10514
+ var t0 = status === "recording" ? "var(--accent-11)" : "var(--gray-11)";
10483
10515
  var t1;
10484
10516
  if ($[0] !== t0) {
10485
10517
  t1 = /* @__PURE__ */ _jsx89(Flex34, {
@@ -10497,7 +10529,7 @@ var Form = function(props) {
10497
10529
  } else {
10498
10530
  t1 = $[1];
10499
10531
  }
10500
- var t2 = audioThreadContext.status === "recording" ? "var(--accent-4)" : "var(--gray-4)";
10532
+ var t2 = status === "recording" ? "var(--accent-4)" : "var(--gray-4)";
10501
10533
  var t3;
10502
10534
  if ($[2] !== t2) {
10503
10535
  t3 = {
@@ -10509,16 +10541,16 @@ var Form = function(props) {
10509
10541
  } else {
10510
10542
  t3 = $[3];
10511
10543
  }
10512
- var t4 = audioThreadContext.status === "recording" ? "var(--accent-11)" : "var(--gray-11)";
10544
+ var t4 = status === "recording" ? "var(--accent-11)" : "var(--gray-11)";
10513
10545
  var t5;
10514
- if ($[4] !== audioThreadContext.recorderProps.visualizationAnalyser || $[5] !== t4) {
10546
+ if ($[4] !== audioThreadContext.audioRuntime.user.visualizationAnalyser || $[5] !== t4) {
10515
10547
  t5 = /* @__PURE__ */ _jsx89(BarsVisualizer, {
10516
- visualizationAnalyser: audioThreadContext.recorderProps.visualizationAnalyser,
10548
+ visualizationAnalyser: audioThreadContext.audioRuntime.user.visualizationAnalyser,
10517
10549
  backgroundColor: t4,
10518
10550
  height: "20px",
10519
10551
  barWidth: "12px"
10520
10552
  });
10521
- $[4] = audioThreadContext.recorderProps.visualizationAnalyser;
10553
+ $[4] = audioThreadContext.audioRuntime.user.visualizationAnalyser;
10522
10554
  $[5] = t4;
10523
10555
  $[6] = t5;
10524
10556
  } else {
@@ -10696,9 +10728,441 @@ var AudioThreadDialog = function(props) {
10696
10728
  AudioThreadDialog.Root = Root9;
10697
10729
  AudioThreadDialog.Trigger = Trigger;
10698
10730
  AudioThreadDialog.Content = Content7;
10731
+ // src/hooks/audioRuntimes/useRealtimeWebRTCAudioRuntime/index.ts
10732
+ import { useEffect as useEffect13, useMemo as useMemo20, useRef as useRef9, useState as useState12 } from "react";
10733
+ function asyncGeneratorStep12(n, t, e, r, o, a, c) {
10734
+ try {
10735
+ var i = n[a](c), u = i.value;
10736
+ } catch (n2) {
10737
+ return void e(n2);
10738
+ }
10739
+ i.done ? t(u) : Promise.resolve(u).then(r, o);
10740
+ }
10741
+ function _asyncToGenerator12(n) {
10742
+ return function() {
10743
+ var t = this, e = arguments;
10744
+ return new Promise(function(r, o) {
10745
+ var a = n.apply(t, e);
10746
+ function _next(n2) {
10747
+ asyncGeneratorStep12(a, r, o, _next, _throw, "next", n2);
10748
+ }
10749
+ function _throw(n2) {
10750
+ asyncGeneratorStep12(a, r, o, _next, _throw, "throw", n2);
10751
+ }
10752
+ _next(void 0);
10753
+ });
10754
+ };
10755
+ }
10756
+ var useRealtimeWebRTCAudioRuntime = function() {
10757
+ var startSessionIfNeeded = function startSessionIfNeeded() {
10758
+ return _startSessionIfNeeded.apply(this, arguments);
10759
+ };
10760
+ var initRealtimeSession = function initRealtimeSession() {
10761
+ return _initRealtimeSession.apply(this, arguments);
10762
+ };
10763
+ var buildAnalyzers = function buildAnalyzers(localStream, audioEl_0) {
10764
+ try {
10765
+ var audioCtx1 = new AudioContext();
10766
+ var micSource = audioCtx1.createMediaStreamSource(localStream);
10767
+ var micAnalyser = audioCtx1.createAnalyser();
10768
+ micSource.connect(micAnalyser);
10769
+ userAnalyserRef.current = micAnalyser;
10770
+ audioEl_0.addEventListener("canplay", function() {
10771
+ var audioCtx2 = new AudioContext();
10772
+ var remoteSource = audioCtx2.createMediaElementSource(audioEl_0);
10773
+ var remoteAnalyser = audioCtx2.createAnalyser();
10774
+ remoteSource.connect(remoteAnalyser);
10775
+ remoteSource.connect(audioCtx2.destination);
10776
+ assistantAnalyserRef.current = remoteAnalyser;
10777
+ });
10778
+ } catch (err_0) {
10779
+ console.warn("Could not build analyzers:", err_0);
10780
+ }
10781
+ };
10782
+ var _useState12 = _sliced_to_array(useState12("idle"), 2), recorderStatus = _useState12[0], setRecorderStatus = _useState12[1];
10783
+ var superinterfaceContext = useSuperinterfaceContext();
10784
+ var _useState121 = _sliced_to_array(useState12(false), 2), userIsPending = _useState121[0], setUserIsPending = _useState121[1];
10785
+ var _useState122 = _sliced_to_array(useState12(false), 2), assistantPlaying = _useState122[0], setAssistantPlaying = _useState122[1];
10786
+ var _useState123 = _sliced_to_array(useState12(false), 2), assistantPaused = _useState123[0], setAssistantPaused = _useState123[1];
10787
+ var _useState124 = _sliced_to_array(useState12(true), 2), assistantIsPending = _useState124[0], setAssistantIsPending = _useState124[1];
10788
+ var _useState125 = _sliced_to_array(useState12(false), 2), assistantIsReady = _useState125[0], setAssistantIsReady = _useState125[1];
10789
+ var _useState126 = _sliced_to_array(useState12(false), 2), assistantAudioPlayed = _useState126[0], setAssistantAudioPlayed = _useState126[1];
10790
+ var sessionStartedRef = useRef9(false);
10791
+ var pcRef = useRef9(null);
10792
+ var localStreamRef = useRef9(null);
10793
+ var remoteStreamRef = useRef9(null);
10794
+ var userAnalyserRef = useRef9(null);
10795
+ var assistantAnalyserRef = useRef9(null);
10796
+ var assistantAudioElRef = useRef9(null);
10797
+ useEffect13(function() {
10798
+ return function() {
10799
+ if (pcRef.current) {
10800
+ pcRef.current.close();
10801
+ pcRef.current = null;
10802
+ }
10803
+ if (assistantAudioElRef.current) {
10804
+ assistantAudioElRef.current.srcObject = null;
10805
+ }
10806
+ };
10807
+ }, []);
10808
+ function _startSessionIfNeeded() {
10809
+ _startSessionIfNeeded = _asyncToGenerator12(function() {
10810
+ return _ts_generator(this, function(_state) {
10811
+ switch(_state.label){
10812
+ case 0:
10813
+ if (sessionStartedRef.current) return [
10814
+ 2
10815
+ ];
10816
+ sessionStartedRef.current = true;
10817
+ return [
10818
+ 4,
10819
+ initRealtimeSession()
10820
+ ];
10821
+ case 1:
10822
+ _state.sent();
10823
+ return [
10824
+ 2
10825
+ ];
10826
+ }
10827
+ });
10828
+ });
10829
+ return _startSessionIfNeeded.apply(this, arguments);
10830
+ }
10831
+ function _initRealtimeSession() {
10832
+ _initRealtimeSession = _asyncToGenerator12(function() {
10833
+ var peerConn, audioEl, ms, offer, searchParams, sdpResponse, answerSdp, answer, err;
10834
+ return _ts_generator(this, function(_state) {
10835
+ switch(_state.label){
10836
+ case 0:
10837
+ _state.trys.push([
10838
+ 0,
10839
+ 7,
10840
+ ,
10841
+ 8
10842
+ ]);
10843
+ setUserIsPending(true);
10844
+ peerConn = new RTCPeerConnection();
10845
+ pcRef.current = peerConn;
10846
+ audioEl = document.createElement("audio");
10847
+ audioEl.autoplay = true;
10848
+ assistantAudioElRef.current = audioEl;
10849
+ peerConn.ontrack = function(evt) {
10850
+ remoteStreamRef.current = evt.streams[0];
10851
+ audioEl.srcObject = evt.streams[0];
10852
+ setAssistantIsPending(false);
10853
+ setAssistantPlaying(true);
10854
+ setAssistantPaused(false);
10855
+ setAssistantAudioPlayed(true);
10856
+ };
10857
+ peerConn.createDataChannel("unused-negotiation-only");
10858
+ peerConn.addEventListener("datachannel", function(event) {
10859
+ var channel = event.channel;
10860
+ if (channel.label === "thread-events") {
10861
+ channel.onmessage = function(_ref) {
10862
+ var data2 = _ref.data;
10863
+ console.log("Data channel message:", data2);
10864
+ var parsedData = JSON.parse(data2);
10865
+ if (parsedData.event === "thread.created") {
10866
+ threadCreated({
10867
+ value: parsedData,
10868
+ superinterfaceContext: superinterfaceContext
10869
+ });
10870
+ } else if (parsedData.event === "thread.run.requires_action") {
10871
+ threadRunRequiresAction({
10872
+ value: parsedData,
10873
+ superinterfaceContext: superinterfaceContext
10874
+ });
10875
+ }
10876
+ };
10877
+ }
10878
+ });
10879
+ return [
10880
+ 4,
10881
+ navigator.mediaDevices.getUserMedia({
10882
+ audio: true
10883
+ })
10884
+ ];
10885
+ case 1:
10886
+ ms = _state.sent();
10887
+ localStreamRef.current = ms;
10888
+ ms.getTracks().forEach(function(t) {
10889
+ peerConn.addTrack(t, ms);
10890
+ });
10891
+ setRecorderStatus("idle");
10892
+ return [
10893
+ 4,
10894
+ peerConn.createOffer()
10895
+ ];
10896
+ case 2:
10897
+ offer = _state.sent();
10898
+ return [
10899
+ 4,
10900
+ peerConn.setLocalDescription(offer)
10901
+ ];
10902
+ case 3:
10903
+ _state.sent();
10904
+ searchParams = new URLSearchParams(variableParams({
10905
+ variables: superinterfaceContext.variables,
10906
+ superinterfaceContext: superinterfaceContext
10907
+ }));
10908
+ return [
10909
+ 4,
10910
+ fetch("".concat(superinterfaceContext.baseUrl, "/audio-runtimes/webrtc?").concat(searchParams), {
10911
+ method: "POST",
10912
+ body: offer.sdp,
10913
+ headers: {
10914
+ "Content-Type": "application/sdp"
10915
+ }
10916
+ })
10917
+ ];
10918
+ case 4:
10919
+ sdpResponse = _state.sent();
10920
+ return [
10921
+ 4,
10922
+ sdpResponse.text()
10923
+ ];
10924
+ case 5:
10925
+ answerSdp = _state.sent();
10926
+ answer = {
10927
+ type: "answer",
10928
+ sdp: answerSdp
10929
+ };
10930
+ return [
10931
+ 4,
10932
+ peerConn.setRemoteDescription(answer)
10933
+ ];
10934
+ case 6:
10935
+ _state.sent();
10936
+ buildAnalyzers(ms, audioEl);
10937
+ setUserIsPending(false);
10938
+ setAssistantIsPending(false);
10939
+ setAssistantIsReady(true);
10940
+ setAssistantPlaying(true);
10941
+ return [
10942
+ 3,
10943
+ 8
10944
+ ];
10945
+ case 7:
10946
+ err = _state.sent();
10947
+ console.error("Error initRealtimeSession:", err);
10948
+ setUserIsPending(false);
10949
+ setRecorderStatus("stopped");
10950
+ setAssistantPlaying(false);
10951
+ setAssistantPaused(false);
10952
+ setAssistantIsPending(false);
10953
+ setAssistantIsReady(false);
10954
+ setAssistantAudioPlayed(false);
10955
+ return [
10956
+ 3,
10957
+ 8
10958
+ ];
10959
+ case 8:
10960
+ return [
10961
+ 2
10962
+ ];
10963
+ }
10964
+ });
10965
+ });
10966
+ return _initRealtimeSession.apply(this, arguments);
10967
+ }
10968
+ return useMemo20(function() {
10969
+ return {
10970
+ realtimeWebRTCAudioRuntime: {
10971
+ user: {
10972
+ start: function() {
10973
+ var _start = _asyncToGenerator12(function() {
10974
+ return _ts_generator(this, function(_state) {
10975
+ switch(_state.label){
10976
+ case 0:
10977
+ return [
10978
+ 4,
10979
+ startSessionIfNeeded()
10980
+ ];
10981
+ case 1:
10982
+ _state.sent();
10983
+ setRecorderStatus("recording");
10984
+ if (localStreamRef.current) {
10985
+ localStreamRef.current.getAudioTracks().forEach(function(t_0) {
10986
+ return t_0.enabled = true;
10987
+ });
10988
+ }
10989
+ return [
10990
+ 2
10991
+ ];
10992
+ }
10993
+ });
10994
+ });
10995
+ function start() {
10996
+ return _start.apply(this, arguments);
10997
+ }
10998
+ return start;
10999
+ }(),
11000
+ pause: function() {
11001
+ var _pause = _asyncToGenerator12(function() {
11002
+ return _ts_generator(this, function(_state) {
11003
+ if (!sessionStartedRef.current) return [
11004
+ 2
11005
+ ];
11006
+ setRecorderStatus("paused");
11007
+ if (localStreamRef.current) {
11008
+ localStreamRef.current.getAudioTracks().forEach(function(t_1) {
11009
+ return t_1.enabled = false;
11010
+ });
11011
+ }
11012
+ return [
11013
+ 2
11014
+ ];
11015
+ });
11016
+ });
11017
+ function pause() {
11018
+ return _pause.apply(this, arguments);
11019
+ }
11020
+ return pause;
11021
+ }(),
11022
+ resume: function() {
11023
+ var _resume = _asyncToGenerator12(function() {
11024
+ return _ts_generator(this, function(_state) {
11025
+ if (!sessionStartedRef.current) return [
11026
+ 2
11027
+ ];
11028
+ setRecorderStatus("recording");
11029
+ if (localStreamRef.current) {
11030
+ localStreamRef.current.getAudioTracks().forEach(function(t_2) {
11031
+ return t_2.enabled = true;
11032
+ });
11033
+ }
11034
+ return [
11035
+ 2
11036
+ ];
11037
+ });
11038
+ });
11039
+ function resume() {
11040
+ return _resume.apply(this, arguments);
11041
+ }
11042
+ return resume;
11043
+ }(),
11044
+ stop: function() {
11045
+ var _stop = _asyncToGenerator12(function() {
11046
+ return _ts_generator(this, function(_state) {
11047
+ if (!sessionStartedRef.current) return [
11048
+ 2
11049
+ ];
11050
+ setRecorderStatus("stopped");
11051
+ if (localStreamRef.current) {
11052
+ localStreamRef.current.getTracks().forEach(function(track) {
11053
+ return track.stop();
11054
+ });
11055
+ }
11056
+ return [
11057
+ 2
11058
+ ];
11059
+ });
11060
+ });
11061
+ function stop() {
11062
+ return _stop.apply(this, arguments);
11063
+ }
11064
+ return stop;
11065
+ }(),
11066
+ isPending: userIsPending,
11067
+ visualizationAnalyser: userAnalyserRef.current,
11068
+ rawStatus: recorderStatus
11069
+ },
11070
+ assistant: {
11071
+ play: function() {
11072
+ var _play = _asyncToGenerator12(function() {
11073
+ return _ts_generator(this, function(_state) {
11074
+ switch(_state.label){
11075
+ case 0:
11076
+ return [
11077
+ 4,
11078
+ startSessionIfNeeded()
11079
+ ];
11080
+ case 1:
11081
+ _state.sent();
11082
+ setAssistantPaused(false);
11083
+ setAssistantPlaying(true);
11084
+ if (assistantAudioElRef.current) {
11085
+ assistantAudioElRef.current.play().catch(function(err_1) {
11086
+ console.error("Assistant play error:", err_1);
11087
+ });
11088
+ }
11089
+ return [
11090
+ 2
11091
+ ];
11092
+ }
11093
+ });
11094
+ });
11095
+ function play() {
11096
+ return _play.apply(this, arguments);
11097
+ }
11098
+ return play;
11099
+ }(),
11100
+ pause: function() {
11101
+ var _pause2 = _asyncToGenerator12(function() {
11102
+ return _ts_generator(this, function(_state) {
11103
+ if (!sessionStartedRef.current) return [
11104
+ 2
11105
+ ];
11106
+ setAssistantPaused(true);
11107
+ setAssistantPlaying(false);
11108
+ if (assistantAudioElRef.current) {
11109
+ assistantAudioElRef.current.pause();
11110
+ }
11111
+ return [
11112
+ 2
11113
+ ];
11114
+ });
11115
+ });
11116
+ function pause() {
11117
+ return _pause2.apply(this, arguments);
11118
+ }
11119
+ return pause;
11120
+ }(),
11121
+ stop: function() {
11122
+ var _stop2 = _asyncToGenerator12(function() {
11123
+ return _ts_generator(this, function(_state) {
11124
+ if (!sessionStartedRef.current) return [
11125
+ 2
11126
+ ];
11127
+ setAssistantPaused(false);
11128
+ setAssistantPlaying(false);
11129
+ if (assistantAudioElRef.current) {
11130
+ assistantAudioElRef.current.pause();
11131
+ assistantAudioElRef.current.currentTime = 0;
11132
+ }
11133
+ return [
11134
+ 2
11135
+ ];
11136
+ });
11137
+ });
11138
+ function stop() {
11139
+ return _stop2.apply(this, arguments);
11140
+ }
11141
+ return stop;
11142
+ }(),
11143
+ visualizationAnalyser: assistantAnalyserRef.current,
11144
+ playing: assistantPlaying,
11145
+ paused: assistantPaused,
11146
+ isPending: assistantIsPending,
11147
+ isReady: assistantIsReady,
11148
+ isAudioPlayed: assistantAudioPlayed,
11149
+ rawStatus: void 0
11150
+ }
11151
+ }
11152
+ };
11153
+ }, [
11154
+ recorderStatus,
11155
+ userIsPending,
11156
+ assistantPlaying,
11157
+ assistantPaused,
11158
+ assistantIsPending,
11159
+ assistantIsReady,
11160
+ assistantAudioPlayed
11161
+ ]);
11162
+ };
10699
11163
  // src/components/markdown/MarkdownProvider/index.tsx
10700
- import { c as _c55 } from "react-compiler-runtime";
10701
- import { useMemo as useMemo19 } from "react";
11164
+ import { c as _c56 } from "react-compiler-runtime";
11165
+ import { useMemo as useMemo21 } from "react";
10702
11166
  import { jsx as _jsx92 } from "react/jsx-runtime";
10703
11167
  var _excluded6 = [
10704
11168
  "children"
@@ -10722,7 +11186,7 @@ function _objectWithoutPropertiesLoose6(r, e) {
10722
11186
  return t;
10723
11187
  }
10724
11188
  var MarkdownProvider = function(t0) {
10725
- var $ = _c55(9);
11189
+ var $ = _c56(9);
10726
11190
  var children;
10727
11191
  var rest;
10728
11192
  if ($[0] !== t0) {
@@ -10765,18 +11229,18 @@ var MarkdownProvider = function(t0) {
10765
11229
  return t3;
10766
11230
  };
10767
11231
  // src/components/annotations/SourceAnnotation/index.tsx
10768
- import { c as _c58 } from "react-compiler-runtime";
11232
+ import { c as _c59 } from "react-compiler-runtime";
10769
11233
  // src/components/annotations/SourceAnnotation/FileCitation/index.tsx
10770
- import { c as _c57 } from "react-compiler-runtime";
10771
- import { useState as useState12 } from "react";
11234
+ import { c as _c58 } from "react-compiler-runtime";
11235
+ import { useState as useState13 } from "react";
10772
11236
  import { QuoteIcon as QuoteIcon2 } from "@radix-ui/react-icons";
10773
11237
  import { Dialog, VisuallyHidden, IconButton as IconButton11 } from "@radix-ui/themes";
10774
11238
  // src/components/annotations/SourceAnnotation/FileCitation/Content.tsx
10775
- import { c as _c56 } from "react-compiler-runtime";
11239
+ import { c as _c57 } from "react-compiler-runtime";
10776
11240
  import { Flex as Flex35, Card as Card5, Inset as Inset3 } from "@radix-ui/themes";
10777
11241
  import { jsx as _jsx93 } from "react/jsx-runtime";
10778
11242
  var Content9 = function(t0) {
10779
- var $ = _c56(5);
11243
+ var $ = _c57(5);
10780
11244
  var fileId = t0.fileId;
10781
11245
  var superinterfaceContext = useSuperinterfaceContext();
10782
11246
  var nextSearchParams = new URLSearchParams(superinterfaceContext.variables);
@@ -10842,9 +11306,9 @@ var Content9 = function(t0) {
10842
11306
  // src/components/annotations/SourceAnnotation/FileCitation/index.tsx
10843
11307
  import { jsx as _jsx94, jsxs as _jsxs37, Fragment as _Fragment4 } from "react/jsx-runtime";
10844
11308
  var FileCitation = function(t0) {
10845
- var $ = _c57(18);
11309
+ var $ = _c58(18);
10846
11310
  var annotation = t0.annotation;
10847
- var _useState12 = _sliced_to_array(useState12(null), 2), activeFileId = _useState12[0], setActiveFileId = _useState12[1];
11311
+ var _useState13 = _sliced_to_array(useState13(null), 2), activeFileId = _useState13[0], setActiveFileId = _useState13[1];
10848
11312
  var t1;
10849
11313
  if ($[0] !== annotation.file_citation.file_id) {
10850
11314
  t1 = function() {
@@ -10991,7 +11455,7 @@ function _objectWithoutPropertiesLoose7(r, e) {
10991
11455
  return t;
10992
11456
  }
10993
11457
  var SourceAnnotation = function(t0) {
10994
- var $ = _c58(10);
11458
+ var $ = _c59(10);
10995
11459
  var children;
10996
11460
  var rest;
10997
11461
  if ($[0] !== t0) {
@@ -11048,7 +11512,7 @@ var SourceAnnotation = function(t0) {
11048
11512
  return null;
11049
11513
  };
11050
11514
  // src/components/avatars/Avatar.tsx
11051
- import { c as _c61 } from "react-compiler-runtime";
11515
+ import { c as _c62 } from "react-compiler-runtime";
11052
11516
  // src/lib/enums/index.ts
11053
11517
  var IconAvatarName = /* @__PURE__ */ function(IconAvatarName2) {
11054
11518
  IconAvatarName2["BACKPACK"] = "BACKPACK";
@@ -11073,7 +11537,7 @@ var AvatarType = /* @__PURE__ */ function(AvatarType2) {
11073
11537
  // src/components/avatars/Avatar.tsx
11074
11538
  import { Avatar as RadixAvatar } from "@radix-ui/themes";
11075
11539
  // src/components/imageAvatars/ImageAvatar/index.tsx
11076
- import { c as _c59 } from "react-compiler-runtime";
11540
+ import { c as _c60 } from "react-compiler-runtime";
11077
11541
  import { Avatar as Avatar4 } from "@radix-ui/themes";
11078
11542
  // src/components/imageAvatars/ImageAvatar/lib/optimizedSrc/path.ts
11079
11543
  var width = function(_ref) {
@@ -11127,7 +11591,7 @@ var optimizedSrc = function(_ref) {
11127
11591
  // src/components/imageAvatars/ImageAvatar/index.tsx
11128
11592
  import { jsx as _jsx96 } from "react/jsx-runtime";
11129
11593
  var ImageAvatar = function(t0) {
11130
- var $ = _c59(9);
11594
+ var $ = _c60(9);
11131
11595
  var imageAvatar = t0.imageAvatar, size = t0.size, className = t0.className, style = t0.style;
11132
11596
  var superinterfaceContext = useSuperinterfaceContext();
11133
11597
  var t1;
@@ -11164,8 +11628,8 @@ var ImageAvatar = function(t0) {
11164
11628
  return t2;
11165
11629
  };
11166
11630
  // src/components/iconAvatars/IconAvatar.tsx
11167
- import { c as _c60 } from "react-compiler-runtime";
11168
- import { useMemo as useMemo20 } from "react";
11631
+ import { c as _c61 } from "react-compiler-runtime";
11632
+ import { useMemo as useMemo22 } from "react";
11169
11633
  import { Avatar as Avatar5 } from "@radix-ui/themes";
11170
11634
  // src/lib/iconAvatars/iconAvatarComponents.ts
11171
11635
  import { BackpackIcon, RocketIcon, MagicWandIcon, CubeIcon, TargetIcon, DiscIcon, GlobeIcon, StarIcon, LightningBoltIcon as LightningBoltIcon2, FaceIcon, PersonIcon as PersonIcon2, HeartIcon } from "@radix-ui/react-icons";
@@ -11174,7 +11638,7 @@ var iconAvatarComponents = (_obj = {}, _define_property(_obj, IconAvatarName.BAC
11174
11638
  // src/components/iconAvatars/IconAvatar.tsx
11175
11639
  import { jsx as _jsx97 } from "react/jsx-runtime";
11176
11640
  var IconAvatar = function(t0) {
11177
- var $ = _c60(7);
11641
+ var $ = _c61(7);
11178
11642
  var iconAvatar = t0.iconAvatar, size = t0.size, className = t0.className, style = t0.style;
11179
11643
  var t1;
11180
11644
  t1 = iconAvatarComponents[iconAvatar.name];
@@ -11208,7 +11672,7 @@ var IconAvatar = function(t0) {
11208
11672
  // src/components/avatars/Avatar.tsx
11209
11673
  import { jsx as _jsx98 } from "react/jsx-runtime";
11210
11674
  var Avatar6 = function(t0) {
11211
- var $ = _c61(14);
11675
+ var $ = _c62(14);
11212
11676
  var avatar = t0.avatar, t1 = t0.size, className = t0.className, style = t0.style;
11213
11677
  var size = t1 === void 0 ? "1" : t1;
11214
11678
  if (avatar) {
@@ -11269,8 +11733,8 @@ var Avatar6 = function(t0) {
11269
11733
  return t2;
11270
11734
  };
11271
11735
  // src/components/components/ComponentsProvider.tsx
11272
- import { c as _c62 } from "react-compiler-runtime";
11273
- import { useMemo as useMemo21 } from "react";
11736
+ import { c as _c63 } from "react-compiler-runtime";
11737
+ import { useMemo as useMemo23 } from "react";
11274
11738
  // src/hooks/components/useComponents.ts
11275
11739
  import { useContext as useContext17 } from "react";
11276
11740
  var useComponents = function() {
@@ -11300,7 +11764,7 @@ function _objectWithoutPropertiesLoose8(r, e) {
11300
11764
  return t;
11301
11765
  }
11302
11766
  var ComponentsProvider = function(t0) {
11303
- var $ = _c62(9);
11767
+ var $ = _c63(9);
11304
11768
  var children;
11305
11769
  var rest;
11306
11770
  if ($[0] !== t0) {
@@ -11343,11 +11807,11 @@ var ComponentsProvider = function(t0) {
11343
11807
  return t3;
11344
11808
  };
11345
11809
  // src/components/assistants/AssistantProvider/index.tsx
11346
- import { c as _c63 } from "react-compiler-runtime";
11810
+ import { c as _c64 } from "react-compiler-runtime";
11347
11811
  import { jsx as _jsx100 } from "react/jsx-runtime";
11348
11812
  var AssistantProvider = function(t0) {
11349
11813
  var _assistant$name;
11350
- var $ = _c63(10);
11814
+ var $ = _c64(10);
11351
11815
  var children = t0.children;
11352
11816
  var superinterfaceContext = useSuperinterfaceContext();
11353
11817
  var t1;
@@ -11399,5 +11863,5 @@ var AssistantProvider = function(t0) {
11399
11863
  }
11400
11864
  return t6;
11401
11865
  };
11402
- export { AssistantAvatarContext, AssistantNameContext, AssistantProvider, AudioThread, AudioThreadDialog, Avatar6 as Avatar, ComponentsProvider, FunctionBase, FunctionComponentsContext, MarkdownContext, MarkdownProvider, RunStep, SourceAnnotation, Suggestions, SuperinterfaceProvider, Thread, ThreadDialog, ThreadDialogContext, UserAvatarContext, useAssistant, useAudioThreadContext, useComponents, useCreateMessage, useIsMutatingMessage, useLatestMessage, useMarkdownContext, useMessageContext, useMessageFormContext, useMessages, useSuperinterfaceContext, useSuperinterfaceContext as useThreadContext, useThreadDialogContext };
11866
+ export { AssistantAvatarContext, AssistantNameContext, AssistantProvider, AudioThread, AudioThreadDialog, Avatar6 as Avatar, ComponentsProvider, FunctionBase, FunctionComponentsContext, MarkdownContext, MarkdownProvider, RunStep, SourceAnnotation, Suggestions, SuperinterfaceProvider, Thread, ThreadDialog, ThreadDialogContext, UserAvatarContext, useAssistant, useAudioThreadContext, useComponents, useCreateMessage, useIsMutatingMessage, useLatestMessage, useMarkdownContext, useMessageContext, useMessageFormContext, useMessages, useRealtimeWebRTCAudioRuntime, useSuperinterfaceContext, useSuperinterfaceContext as useThreadContext, useThreadDialogContext, useTtsAudioRuntime };
11403
11867
  //# sourceMappingURL=index.js.map